summaryrefslogtreecommitdiff
path: root/src/calculation.rs
blob: 64b0f1ecdc34870df1b233f79ac02e971cd02780 (plain)
use std::collections::{HashSet, VecDeque};
use std::io::Write;
use std::path::PathBuf;

use crate::{FileInfo, FilenameOperation, Patch, PatchId, SpanNode, SpanNodeId};

fn try_retain<T, E>(vec: &mut Vec<T>, f: impl Fn(&T) -> Result<bool, E>) -> Result<(), E> {
	let mut i = 0;
	while let Some(e) = vec.get(i) {
		if f(e)? {
			i += 1;
		} else {
			vec.remove(i);
		}
	}

	Ok(())
}

fn try_any<T, E>(vec: &[T], f: impl Fn(&T) -> Result<bool, E>) -> Result<bool, E> {
	for e in vec {
		if f(e)? {
			return Ok(true);
		}
	}

	Ok(false)
}

pub fn depends_on<Error: Clone>(
	a: &Patch,
	b: &PatchId,
	get_patch: impl Fn(&PatchId) -> Result<Patch, Error>,
) -> Result<bool, Error> {
	let mut queue = VecDeque::new();
	if a.dependencies.iter().any(|id| id == b) {
		return Ok(true);
	}
	queue.extend(a.dependencies.iter().cloned());

	while let Some(patch_id) = queue.pop_front() {
		let patch = get_patch(&patch_id)?;
		if patch.dependencies.iter().any(|id| id == b) {
			return Ok(true);
		}
		queue.extend(patch.dependencies.iter().cloned());
	}

	Ok(false)
}

pub fn most_relevant_patches<'a, Error: Clone>(
	patches: impl IntoIterator<Item = &'a PatchId>,
	active_patches: &HashSet<PatchId>,
	get_patch: &impl Fn(&PatchId) -> Result<Patch, Error>,
) -> Result<Vec<PatchId>, Error> {
	let mut uncancelled_operations = Vec::new();

	for patch_a in patches {
		if !active_patches.contains(patch_a) {
			continue;
		}

		let patch_a = get_patch(patch_a)?;
		try_retain(&mut uncancelled_operations, |patch_b| {
			depends_on(&patch_a, patch_b, get_patch).map(|value| !value)
		})?;

		if !try_any(&uncancelled_operations, |patch_b| {
			depends_on(&get_patch(patch_b)?, &patch_a.id, get_patch)
		})? {
			uncancelled_operations.push(patch_a.id.clone());
		}
	}

	Ok(uncancelled_operations)
}

pub const fn file_name_from_operation(operation: &FilenameOperation) -> Option<&PathBuf> {
	match operation {
		FilenameOperation::Add { filename } | FilenameOperation::Rename { filename } => {
			Some(filename)
		}
		FilenameOperation::Delete => None,
	}
}

pub fn file_name<Error: Clone>(
	file: &FileInfo,
	active_patches: &HashSet<PatchId>,
	get_patch: &impl Fn(&PatchId) -> Result<Patch, Error>,
) -> Result<Vec<Option<PathBuf>>, Error> {
	let most_relevant_patches = most_relevant_patches(
		file.name_changes.iter().map(|(id, _)| id),
		active_patches,
		get_patch,
	)?
	.into_iter()
	.collect::<HashSet<_>>();

	Ok(file
		.name_changes
		.iter()
		.filter(|(id, _)| most_relevant_patches.contains(id))
		.map(|(_, operation)| file_name_from_operation(operation).cloned())
		.collect())
}

pub struct RelevantSpanPatches {
	pub additions: Vec<PatchId>,
	pub deletions: Vec<PatchId>,
}

pub fn span_liveness<Error: Clone>(
	node: &SpanNode,
	active_patches: &HashSet<PatchId>,
	get_patch: &impl Fn(&PatchId) -> Result<Patch, Error>,
) -> Result<RelevantSpanPatches, Error> {
	let most_relevant_patches = most_relevant_patches(
		node.added_by.iter().chain(node.deleted_by.iter()),
		active_patches,
		get_patch,
	)?
	.into_iter()
	.collect::<HashSet<_>>();

	let additions = node
		.added_by
		.iter()
		.filter(|id| most_relevant_patches.contains(id))
		.cloned()
		.collect();

	let deletions = node
		.deleted_by
		.iter()
		.filter(|id| most_relevant_patches.contains(id))
		.cloned()
		.collect();

	Ok(RelevantSpanPatches {
		additions,
		deletions,
	})
}

pub fn conflicting_nodes<Error: Clone>(
	nodes: impl Iterator<Item = SpanNode>,
	active_patches: &HashSet<PatchId>,
	get_patch: &impl Fn(&PatchId) -> Result<Patch, Error>,
) -> Result<Vec<SpanNode>, Error> {
	let mut conflicting_nodes = Vec::new();

	for node in nodes {
		let span_liveness = span_liveness(&node, active_patches, get_patch)?;
		if !span_liveness.additions.is_empty() || !span_liveness.deletions.is_empty() {
			conflicting_nodes.push(node);
		}
	}

	Ok(conflicting_nodes)
}

pub struct FileContent(pub Vec<FileContentSpan>);

pub enum FileContentSpan {
	Conflicted(ConflictedFileContentSpan),
	Unconflicted(UnconflictedFileContentSpan),
}

pub struct ConflictedFileContentSpan(pub Vec<(String, UnconflictedFileContentSpan)>);

pub struct UnconflictedFileContentSpan(pub Vec<FileContentNode>);

#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FileContentNode {
	id: SpanNodeId,
	patch: PatchId,
	content: Vec<u8>,
}

fn mergable(
	a: &[FileContentNode],
	b: &ConflictedFileContentSpan,
	get_patch_label: impl Fn(&PatchId) -> String,
) -> bool {
	a.iter().all(|node| {
		let label_a = get_patch_label(&node.patch);
		b.0.iter().any(|(label_b, _)| &label_a == label_b)
	})
}

pub fn file_content<Error: Clone>(
	file: &FileInfo,
	active_patches: &HashSet<PatchId>,
	get_patch: &impl Fn(&PatchId) -> Result<Patch, Error>,
	get_patch_label: impl Fn(&PatchId) -> String,
) -> Result<FileContent, Error> {
	let mut output = Vec::new();
	let mut completed_nodes = HashSet::new();

	let mut queue = VecDeque::new();
	queue.push_back(file.root_span.clone());

	let mut queue_next = VecDeque::new();

	let mut conflicting_nodes = Vec::new();

	while !queue.is_empty() || !queue_next.is_empty() {
		while let Some(node_id) = queue.pop_front() {
			let node = file
				.spans
				.get(&node_id)
				.expect("todo: handle non-existent span-nodes");

			let relevant_patches = span_liveness(node, active_patches, get_patch)?;

			if !node
				.preceding_nodes
				.iter()
				.all(|id| completed_nodes.contains(id))
			{
				queue.push_back(node_id);
				continue;
			}

			if !relevant_patches.additions.is_empty() {
				let patch = get_patch(&node.span.patch)?;
				let content_start = node.span.start;
				let content_end = content_start + node.span.len;
				let span_contents = Vec::from(&patch.contents[content_start..content_end]);

				conflicting_nodes.push(FileContentNode {
					id: node_id.clone(),
					patch: patch.id.clone(),
					content: span_contents,
				});
			}

			if !relevant_patches.deletions.is_empty() {
				conflicting_nodes.push(FileContentNode {
					id: node_id.clone(),
					patch: node.span.patch.clone(),
					content: Vec::new(),
				});
			}

			completed_nodes.insert(node_id);
			for node_id in &node.successor_nodes {
				queue_next.push_back(node_id.clone());
			}
		}

		#[allow(clippy::collapsible_else_if)]
		if conflicting_nodes.len() == 1 {
			if let Some(FileContentSpan::Unconflicted(node)) = output.last_mut() {
				node.0.push(conflicting_nodes[0].clone());
			} else {
				output.push(FileContentSpan::Unconflicted(UnconflictedFileContentSpan(
					vec![conflicting_nodes[0].clone()],
				)));
			}
		} else {
			if let Some(FileContentSpan::Conflicted(span)) = output.last_mut()
				&& mergable(&conflicting_nodes, span, &get_patch_label)
			{
				'outer: for node_a in conflicting_nodes {
					for (label_b, span_b) in &mut span.0 {
						let label_a = get_patch_label(&node_a.patch);
						if &label_a == label_b {
							span_b.0.push(node_a);
							continue 'outer;
						}
					}
				}
			} else {
				output.push(FileContentSpan::Conflicted(ConflictedFileContentSpan(
					conflicting_nodes
						.iter()
						.map(|node| {
							(
								get_patch_label(&node.patch),
								UnconflictedFileContentSpan(vec![(node.clone())]),
							)
						})
						.collect(),
				)));
			}
		}

		conflicting_nodes = Vec::new();

		(queue, queue_next) = (queue_next, queue);
		queue_next.clear();
	}

	Ok(FileContent(output))
}

pub struct FileContentMap {
	nodes: Vec<(usize, SpanNodeId)>,
}

pub fn write_file_content(
	writer: &mut impl Write,
	labelled_file_content: &FileContent,
) -> std::io::Result<FileContentMap> {
	let mut nodes = Vec::new();
	let mut index = 0;
	for span in &labelled_file_content.0 {
		match span {
			FileContentSpan::Unconflicted(span) => {
				for node in &span.0 {
					nodes.push((index, node.id.clone()));
					index += node.content.len();
					writer.write_all(&node.content)?;
				}
			}
			FileContentSpan::Conflicted(span) => {
				for (label, span) in &span.0 {
					let prefix = format!("======= {label}\n");
					writer.write_all(prefix.as_bytes())?;
					index += prefix.len();

					for node in &span.0 {
						nodes.push((index, node.id.clone()));
						writer.write_all(&node.content)?;
						index += node.content.len();
					}
				}
			}
		}
	}

	Ok(FileContentMap { nodes })
}