diff --git a/cli/src/merge_tools/builtin.rs b/cli/src/merge_tools/builtin.rs index 5d7cd9e0a5..a20df95613 100644 --- a/cli/src/merge_tools/builtin.rs +++ b/cli/src/merge_tools/builtin.rs @@ -1014,7 +1014,9 @@ mod tests { to_file_id(base_tree.path_value(path).unwrap()), to_file_id(right_tree.path_value(path).unwrap()), ]); - let content = extract_as_single_hunk(&merge, store, path).block_on(); + let content = extract_as_single_hunk(&merge, store, path) + .block_on() + .unwrap(); let slices = content.map(|ContentHunk(buf)| buf.as_slice()); let merge_result = files::merge(&slices); let sections = make_merge_sections(merge_result).unwrap(); diff --git a/cli/src/merge_tools/mod.rs b/cli/src/merge_tools/mod.rs index 71c71cf573..9d738a03ca 100644 --- a/cli/src/merge_tools/mod.rs +++ b/cli/src/merge_tools/mod.rs @@ -307,7 +307,7 @@ impl MergeEditor { }); }; let content = - extract_as_single_hunk(&simplified_file_merge, tree.store(), repo_path).block_on(); + extract_as_single_hunk(&simplified_file_merge, tree.store(), repo_path).block_on()?; match &self.tool { MergeTool::Builtin => { diff --git a/lib/src/backend.rs b/lib/src/backend.rs index 03be444fa7..29e94a672a 100644 --- a/lib/src/backend.rs +++ b/lib/src/backend.rs @@ -27,7 +27,7 @@ use crate::content_hash::ContentHash; use crate::index::Index; use crate::merge::Merge; use crate::object_id::{id_type, ObjectId}; -use crate::repo_path::{RepoPath, RepoPathComponent, RepoPathComponentBuf}; +use crate::repo_path::{RepoPath, RepoPathBuf, RepoPathComponent, RepoPathComponentBuf}; use crate::signing::SignResult; id_type!( @@ -199,6 +199,12 @@ pub enum BackendError { hash: String, source: Box, }, + #[error("Error when reading file content for file {} with id {}", path.as_internal_file_string(), id.hex())] + ReadFile { + path: RepoPathBuf, + id: FileId, + source: Box, + }, #[error("Could not write object of type {object_type}")] WriteObject { object_type: &'static str, diff --git a/lib/src/conflicts.rs b/lib/src/conflicts.rs index 3314c5bbad..1c10f39b5f 100644 --- a/lib/src/conflicts.rs +++ b/lib/src/conflicts.rs @@ -17,7 +17,7 @@ use std::io::{Read, Write}; use std::iter::zip; -use futures::StreamExt; +use futures::{StreamExt, TryStreamExt}; use itertools::Itertools; use regex::bytes::Regex; @@ -77,21 +77,32 @@ fn write_diff_hunks(hunks: &[DiffHunk], file: &mut dyn Write) -> std::io::Result Ok(()) } -async fn get_file_contents(store: &Store, path: &RepoPath, term: &Option) -> ContentHunk { +async fn get_file_contents( + store: &Store, + path: &RepoPath, + term: &Option, +) -> BackendResult { match term { Some(id) => { let mut content = vec![]; store .read_file_async(path, id) - .await - .unwrap() + .await? .read_to_end(&mut content) - .unwrap(); - ContentHunk(content) + .map_err(|err| BackendError::ReadFile { + path: path.to_owned(), + id: id.clone(), + source: format!( + "Failed to read file contents for {}: {err}", + path.as_internal_file_string() + ) + .into(), + })?; + Ok(ContentHunk(content)) } // If the conflict had removed the file on one side, we pretend that the file // was empty there. - None => ContentHunk(vec![]), + None => Ok(ContentHunk(vec![])), } } @@ -99,12 +110,12 @@ pub async fn extract_as_single_hunk( merge: &Merge>, store: &Store, path: &RepoPath, -) -> Merge { +) -> BackendResult> { let builder: MergeBuilder = futures::stream::iter(merge.iter()) .then(|term| get_file_contents(store, path, term)) - .collect() - .await; - builder.build() + .try_collect() + .await?; + Ok(builder.build()) } /// A type similar to `MergedTreeValue` but with associated data to include in @@ -183,7 +194,7 @@ async fn materialize_tree_value_no_access_denied( let mut contents = vec![]; if let Some(file_merge) = conflict.to_file_merge() { let file_merge = file_merge.simplify(); - let content = extract_as_single_hunk(&file_merge, store, path).await; + let content = extract_as_single_hunk(&file_merge, store, path).await?; materialize_merge_result(&content, &mut contents) .expect("Failed to materialize conflict to in-memory buffer"); } else { @@ -453,7 +464,7 @@ pub async fn update_from_content( // conflicts (for example) are not converted to regular files in the working // copy. let mut old_content = Vec::with_capacity(content.len()); - let merge_hunk = extract_as_single_hunk(simplified_file_ids, store, path).await; + let merge_hunk = extract_as_single_hunk(simplified_file_ids, store, path).await?; materialize_merge_result(&merge_hunk, &mut old_content).unwrap(); if content == old_content { return Ok(file_ids.clone()); diff --git a/lib/tests/test_conflicts.rs b/lib/tests/test_conflicts.rs index 78924f34c5..6876508945 100644 --- a/lib/tests/test_conflicts.rs +++ b/lib/tests/test_conflicts.rs @@ -991,7 +991,9 @@ fn materialize_conflict_string( conflict: &Merge>, ) -> String { let mut result: Vec = vec![]; - let contents = extract_as_single_hunk(conflict, store, path).block_on(); + let contents = extract_as_single_hunk(conflict, store, path) + .block_on() + .unwrap(); materialize_merge_result(&contents, &mut result).unwrap(); String::from_utf8(result).unwrap() }