From 7e138d1f7ffa4ce2c9773d3315b634c7849cd582 Mon Sep 17 00:00:00 2001 From: Martin von Zweigbergk Date: Wed, 18 Oct 2023 16:58:19 -0700 Subject: [PATCH 1/3] cleanup: import `futures::exectutor::block_on()` instead of qualifying It seems we'll end up using `block_on()` quite a bit, at least until we're done transitioning to async, and the function name doesn't conflict with anything else, so let's always import it when we need it. --- lib/src/git_backend.rs | 30 ++++++++++++++---------------- lib/src/local_backend.rs | 10 +++++----- lib/src/merged_tree.rs | 3 ++- lib/src/store.rs | 12 +++++++----- 4 files changed, 28 insertions(+), 27 deletions(-) diff --git a/lib/src/git_backend.rs b/lib/src/git_backend.rs index 8c4a89b6d8..6cc305f54a 100644 --- a/lib/src/git_backend.rs +++ b/lib/src/git_backend.rs @@ -931,6 +931,7 @@ fn bytes_vec_from_json(value: &serde_json::Value) -> Vec { #[cfg(test)] mod tests { use assert_matches::assert_matches; + use futures::executor::block_on; use test_case::test_case; use super::*; @@ -1013,7 +1014,7 @@ mod tests { .collect_vec(); assert_eq!(git_refs, vec![git_commit_id2]); - let commit = futures::executor::block_on(backend.read_commit(&commit_id)).unwrap(); + let commit = block_on(backend.read_commit(&commit_id)).unwrap(); assert_eq!(&commit.change_id, &change_id); assert_eq!(commit.parents, vec![CommitId::from_bytes(&[0; 20])]); assert_eq!(commit.predecessors, vec![]); @@ -1042,7 +1043,7 @@ mod tests { ); assert_eq!(commit.committer.timestamp.tz_offset, -480); - let root_tree = futures::executor::block_on(backend.read_tree( + let root_tree = block_on(backend.read_tree( &RepoPath::root(), &TreeId::from_bytes(root_tree_id.as_bytes()), )) @@ -1056,7 +1057,7 @@ mod tests { &TreeValue::Tree(TreeId::from_bytes(dir_tree_id.as_bytes())) ); - let dir_tree = futures::executor::block_on(backend.read_tree( + let dir_tree = block_on(backend.read_tree( &RepoPath::from_internal_string("dir"), &TreeId::from_bytes(dir_tree_id.as_bytes()), )) @@ -1079,7 +1080,7 @@ mod tests { &TreeValue::Symlink(SymlinkId::from_bytes(blob2.as_bytes())) ); - let commit2 = futures::executor::block_on(backend.read_commit(&commit_id2)).unwrap(); + let commit2 = block_on(backend.read_commit(&commit_id2)).unwrap(); assert_eq!(commit2.parents, vec![commit_id.clone()]); assert_eq!(commit.predecessors, vec![]); assert_eq!( @@ -1118,10 +1119,9 @@ mod tests { // read_commit() without import_head_commits() works as of now. This might be // changed later. - assert!(futures::executor::block_on( - backend.read_commit(&CommitId::from_bytes(git_commit_id.as_bytes())) - ) - .is_ok()); + assert!( + block_on(backend.read_commit(&CommitId::from_bytes(git_commit_id.as_bytes()))).is_ok() + ); assert!( backend .cached_extra_metadata_table() @@ -1209,7 +1209,7 @@ mod tests { // Only root commit as parent commit.parents = vec![backend.root_commit_id().clone()]; let first_id = backend.write_commit(commit.clone()).unwrap().0; - let first_commit = futures::executor::block_on(backend.read_commit(&first_id)).unwrap(); + let first_commit = block_on(backend.read_commit(&first_id)).unwrap(); assert_eq!(first_commit, commit); let first_git_commit = git_repo.find_commit(git_id(&first_id)).unwrap(); assert_eq!(first_git_commit.parent_ids().collect_vec(), vec![]); @@ -1217,7 +1217,7 @@ mod tests { // Only non-root commit as parent commit.parents = vec![first_id.clone()]; let second_id = backend.write_commit(commit.clone()).unwrap().0; - let second_commit = futures::executor::block_on(backend.read_commit(&second_id)).unwrap(); + let second_commit = block_on(backend.read_commit(&second_id)).unwrap(); assert_eq!(second_commit, commit); let second_git_commit = git_repo.find_commit(git_id(&second_id)).unwrap(); assert_eq!( @@ -1228,7 +1228,7 @@ mod tests { // Merge commit commit.parents = vec![first_id.clone(), second_id.clone()]; let merge_id = backend.write_commit(commit.clone()).unwrap().0; - let merge_commit = futures::executor::block_on(backend.read_commit(&merge_id)).unwrap(); + let merge_commit = block_on(backend.read_commit(&merge_id)).unwrap(); assert_eq!(merge_commit, commit); let merge_git_commit = git_repo.find_commit(git_id(&merge_id)).unwrap(); assert_eq!( @@ -1278,8 +1278,7 @@ mod tests { // When writing a tree-level conflict, the root tree on the git side has the // individual trees as subtrees. let read_commit_id = backend.write_commit(commit.clone()).unwrap().0; - let read_commit = - futures::executor::block_on(backend.read_commit(&read_commit_id)).unwrap(); + let read_commit = block_on(backend.read_commit(&read_commit_id)).unwrap(); assert_eq!(read_commit, commit); let git_commit = git_repo .find_commit(Oid::from_bytes(read_commit_id.as_bytes()).unwrap()) @@ -1308,8 +1307,7 @@ mod tests { // regular git tree. commit.root_tree = MergedTreeId::resolved(create_tree(5)); let read_commit_id = backend.write_commit(commit.clone()).unwrap().0; - let read_commit = - futures::executor::block_on(backend.read_commit(&read_commit_id)).unwrap(); + let read_commit = block_on(backend.read_commit(&read_commit_id)).unwrap(); assert_eq!(read_commit, commit); let git_commit = git_repo .find_commit(Oid::from_bytes(read_commit_id.as_bytes()).unwrap()) @@ -1375,7 +1373,7 @@ mod tests { let (commit_id2, mut actual_commit2) = backend.write_commit(commit2.clone()).unwrap(); // The returned matches the ID assert_eq!( - futures::executor::block_on(backend.read_commit(&commit_id2)).unwrap(), + block_on(backend.read_commit(&commit_id2)).unwrap(), actual_commit2 ); assert_ne!(commit_id2, commit_id1); diff --git a/lib/src/local_backend.rs b/lib/src/local_backend.rs index f02e8c89f0..b1c87c2681 100644 --- a/lib/src/local_backend.rs +++ b/lib/src/local_backend.rs @@ -461,6 +461,7 @@ fn conflict_term_to_proto(part: &ConflictTerm) -> crate::protos::local_store::co #[cfg(test)] mod tests { use assert_matches::assert_matches; + use futures::executor::block_on; use super::*; use crate::backend::MillisSinceEpoch; @@ -492,26 +493,25 @@ mod tests { // Only root commit as parent commit.parents = vec![backend.root_commit_id().clone()]; let first_id = backend.write_commit(commit.clone()).unwrap().0; - let first_commit = futures::executor::block_on(backend.read_commit(&first_id)).unwrap(); + let first_commit = block_on(backend.read_commit(&first_id)).unwrap(); assert_eq!(first_commit, commit); // Only non-root commit as parent commit.parents = vec![first_id.clone()]; let second_id = backend.write_commit(commit.clone()).unwrap().0; - let second_commit = futures::executor::block_on(backend.read_commit(&second_id)).unwrap(); + let second_commit = block_on(backend.read_commit(&second_id)).unwrap(); assert_eq!(second_commit, commit); // Merge commit commit.parents = vec![first_id.clone(), second_id.clone()]; let merge_id = backend.write_commit(commit.clone()).unwrap().0; - let merge_commit = futures::executor::block_on(backend.read_commit(&merge_id)).unwrap(); + let merge_commit = block_on(backend.read_commit(&merge_id)).unwrap(); assert_eq!(merge_commit, commit); // Merge commit with root as one parent commit.parents = vec![first_id, backend.root_commit_id().clone()]; let root_merge_id = backend.write_commit(commit.clone()).unwrap().0; - let root_merge_commit = - futures::executor::block_on(backend.read_commit(&root_merge_id)).unwrap(); + let root_merge_commit = block_on(backend.read_commit(&root_merge_id)).unwrap(); assert_eq!(root_merge_commit, commit); } diff --git a/lib/src/merged_tree.rs b/lib/src/merged_tree.rs index c650e98697..98c42be4a0 100644 --- a/lib/src/merged_tree.rs +++ b/lib/src/merged_tree.rs @@ -20,6 +20,7 @@ use std::iter::zip; use std::sync::Arc; use std::{iter, vec}; +use futures::executor::block_on; use futures::stream::StreamExt; use itertools::Itertools; @@ -891,7 +892,7 @@ impl Iterator for TreeDiffIterator<'_> { let tree_after = after.is_tree(); let post_subdir = if (tree_before || tree_after) && !self.matcher.visit(&path).is_nothing() { - let (before_tree, after_tree) = futures::executor::block_on(async { + let (before_tree, after_tree) = block_on(async { let before_tree = Self::tree(dir.tree1.as_ref(), &path, &before); let after_tree = Self::tree(dir.tree2.as_ref(), &path, &after); futures::join!(before_tree, after_tree) diff --git a/lib/src/store.rs b/lib/src/store.rs index ed743ac84e..46fe7e7074 100644 --- a/lib/src/store.rs +++ b/lib/src/store.rs @@ -20,6 +20,8 @@ use std::fmt::{Debug, Formatter}; use std::io::Read; use std::sync::{Arc, RwLock}; +use futures::executor::block_on; + use crate::backend; use crate::backend::{ Backend, BackendResult, ChangeId, CommitId, ConflictId, FileId, MergedTreeId, SymlinkId, @@ -97,7 +99,7 @@ impl Store { } pub fn get_commit(self: &Arc, id: &CommitId) -> BackendResult { - futures::executor::block_on(self.get_commit_async(id)) + block_on(self.get_commit_async(id)) } pub async fn get_commit_async(self: &Arc, id: &CommitId) -> BackendResult { @@ -132,7 +134,7 @@ impl Store { } pub fn get_tree(self: &Arc, dir: &RepoPath, id: &TreeId) -> BackendResult { - futures::executor::block_on(self.get_tree_async(dir, id)) + block_on(self.get_tree_async(dir, id)) } pub async fn get_tree_async( @@ -192,7 +194,7 @@ impl Store { } pub fn read_file(&self, path: &RepoPath, id: &FileId) -> BackendResult> { - futures::executor::block_on(self.read_file_async(path, id)) + block_on(self.read_file_async(path, id)) } pub async fn read_file_async( @@ -208,7 +210,7 @@ impl Store { } pub fn read_symlink(&self, path: &RepoPath, id: &SymlinkId) -> BackendResult { - futures::executor::block_on(self.read_symlink_async(path, id)) + block_on(self.read_symlink_async(path, id)) } pub async fn read_symlink_async( @@ -228,7 +230,7 @@ impl Store { path: &RepoPath, id: &ConflictId, ) -> BackendResult>> { - let backend_conflict = futures::executor::block_on(self.backend.read_conflict(path, id))?; + let backend_conflict = block_on(self.backend.read_conflict(path, id))?; Ok(Merge::from_backend_conflict(backend_conflict)) } From 551c77b759fa44feac57228cd120abf945fb2908 Mon Sep 17 00:00:00 2001 From: Martin von Zweigbergk Date: Wed, 18 Oct 2023 17:20:59 -0700 Subject: [PATCH 2/3] conflicts: reduce some duplication in tests by extracting a closure --- lib/tests/test_conflicts.rs | 33 +++++++++++++-------------------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/lib/tests/test_conflicts.rs b/lib/tests/test_conflicts.rs index 4cf2fc3d04..b2a6bfa85c 100644 --- a/lib/tests/test_conflicts.rs +++ b/lib/tests/test_conflicts.rs @@ -629,23 +629,20 @@ fn test_update_conflict_from_content() { // If the content is unchanged compared to the materialized value, we get the // old conflict id back. let materialized = materialize_conflict_string(store, &path, &conflict); - let result = update_from_content(&conflict, store, &path, materialized.as_bytes()).unwrap(); - assert_eq!(result, conflict); + let parse = |content| update_from_content(&conflict, store, &path, content).unwrap(); + assert_eq!(parse(materialized.as_bytes()), conflict); // If the conflict is resolved, we get None back to indicate that. - let result = - update_from_content(&conflict, store, &path, b"resolved 1\nline 2\nresolved 3\n").unwrap(); let expected_file_id = testutils::write_file(store, &path, "resolved 1\nline 2\nresolved 3\n"); - assert_eq!(result, Merge::normal(expected_file_id)); + assert_eq!( + parse(b"resolved 1\nline 2\nresolved 3\n"), + Merge::normal(expected_file_id) + ); // If the conflict is partially resolved, we get a new conflict back. - let new_conflict = update_from_content( - &conflict, - store, - &path, + let new_conflict = parse( b"resolved 1\nline 2\n<<<<<<<\n%%%%%%%\n-line 3\n+left 3\n+++++++\nright 3\n>>>>>>>\n", - ) - .unwrap(); + ); assert_ne!(new_conflict, conflict); // Calculate expected new FileIds let new_base_file_id = testutils::write_file(store, &path, "resolved 1\nline 2\nline 3\n"); @@ -676,21 +673,17 @@ fn test_update_conflict_from_content_modify_delete() { // If the content is unchanged compared to the materialized value, we get the // old conflict id back. let materialized = materialize_conflict_string(store, &path, &conflict); - let result = update_from_content(&conflict, store, &path, materialized.as_bytes()).unwrap(); - assert_eq!(result, conflict); + let parse = |content| update_from_content(&conflict, store, &path, content).unwrap(); + assert_eq!(parse(materialized.as_bytes()), conflict); // If the conflict is resolved, we get None back to indicate that. - let result = update_from_content(&conflict, store, &path, b"resolved\n").unwrap(); let expected_file_id = testutils::write_file(store, &path, "resolved\n"); - assert_eq!(result, Merge::normal(expected_file_id)); + assert_eq!(parse(b"resolved\n"), Merge::normal(expected_file_id)); // If the conflict is modified, we get a new conflict back. - let new_conflict = update_from_content(&conflict, - store, - &path, + let new_conflict = parse( b"<<<<<<<\n%%%%%%%\n line 1\n-line 2 before\n+line 2 modified after\n line 3\n+++++++\n>>>>>>>\n", - ) - .unwrap(); + ); // Calculate expected new FileIds let new_base_file_id = testutils::write_file(store, &path, "line 1\nline 2 before\nline 3\n"); let new_left_file_id = From f903ac5d7c563cd3b49564c1d2288b0293fa3c4d Mon Sep 17 00:00:00 2001 From: Martin von Zweigbergk Date: Wed, 18 Oct 2023 16:57:19 -0700 Subject: [PATCH 3/3] conflicts: make materialization async We need to let async-ness propagate up from the backend because `block_on()` doesn't like to be called recursively. The conflict materialization code is a good place to make async because it doesn't depends on anything that isn't already async-ready. --- Cargo.lock | 1 + cli/Cargo.toml | 3 ++- cli/src/commands/mod.rs | 9 ++++++++- cli/src/diff_util.rs | 17 +++++++++++++++-- cli/src/merge_tools/builtin.rs | 3 ++- cli/src/merge_tools/external.rs | 5 +++-- cli/src/merge_tools/mod.rs | 3 ++- lib/src/conflicts.rs | 22 ++++++++++++++-------- lib/src/local_working_copy.rs | 14 ++++++++++---- lib/tests/test_conflicts.rs | 7 ++++--- 10 files changed, 61 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 43aabc134b..d11b22eccd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1001,6 +1001,7 @@ dependencies = [ "crossterm 0.26.1", "dirs", "esl01-renderdag", + "futures 0.3.28", "git2", "glob", "hex", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index abdfd47685..9d4c4e87df 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -39,6 +39,7 @@ criterion = { workspace = true, optional = true } crossterm = { workspace = true } dirs = { workspace = true } esl01-renderdag = { workspace = true } +futures = { workspace = true } git2 = { workspace = true } glob = { workspace = true } hex = { workspace = true } @@ -89,4 +90,4 @@ watchman = ["jj-lib/watchman"] [package.metadata.binstall] # The archive name is jj, not jj-cli. Also, `cargo binstall` gets # confused by the `v` before versions in archive name. -pkg-url="{ repo }/releases/download/v{ version }/jj-v{ version }-{ target }.{ archive-format }" +pkg-url = "{ repo }/releases/download/v{ version }/jj-v{ version }-{ target }.{ archive-format }" diff --git a/cli/src/commands/mod.rs b/cli/src/commands/mod.rs index 6ad313b298..461ea05e47 100644 --- a/cli/src/commands/mod.rs +++ b/cli/src/commands/mod.rs @@ -29,6 +29,7 @@ use std::{fmt, fs, io}; use clap::builder::NonEmptyStringValueParser; use clap::parser::ValueSource; use clap::{ArgGroup, Command, CommandFactory, FromArgMatches, Subcommand}; +use futures::executor::block_on; use indexmap::{IndexMap, IndexSet}; use itertools::Itertools; use jj_lib::backend::{CommitId, ObjectId, TreeValue}; @@ -1521,7 +1522,13 @@ fn cmd_cat(ui: &mut Ui, command: &CommandHelper, args: &CatArgs) -> Result<(), C } Err(conflict) => { let mut contents = vec![]; - conflicts::materialize(&conflict, repo.store(), &path, &mut contents).unwrap(); + block_on(conflicts::materialize( + &conflict, + repo.store(), + &path, + &mut contents, + )) + .unwrap(); ui.request_pager(); ui.stdout_formatter().write_all(&contents)?; } diff --git a/cli/src/diff_util.rs b/cli/src/diff_util.rs index f3b8a05fd2..c61f13417e 100644 --- a/cli/src/diff_util.rs +++ b/cli/src/diff_util.rs @@ -18,6 +18,7 @@ use std::io; use std::ops::Range; use std::sync::Arc; +use futures::executor::block_on; use itertools::Itertools; use jj_lib::backend::{ObjectId, TreeValue}; use jj_lib::commit::Commit; @@ -363,7 +364,13 @@ fn diff_content( } None => { let mut content = vec![]; - conflicts::materialize(value, repo.store(), path, &mut content).unwrap(); + block_on(conflicts::materialize( + value, + repo.store(), + path, + &mut content, + )) + .unwrap(); Ok(content) } Some(Some(TreeValue::Tree(_))) | Some(Some(TreeValue::Conflict(_))) => { @@ -516,7 +523,13 @@ fn git_diff_part( None => { mode = "100644".to_string(); hash = "0000000000".to_string(); - conflicts::materialize(value, repo.store(), path, &mut content).unwrap(); + block_on(conflicts::materialize( + value, + repo.store(), + path, + &mut content, + )) + .unwrap(); } Some(Some(TreeValue::Tree(_))) | Some(Some(TreeValue::Conflict(_))) | Some(None) => { panic!("Unexpected {value:?} in diff at path {path:?}"); diff --git a/cli/src/merge_tools/builtin.rs b/cli/src/merge_tools/builtin.rs index 705ed9bb2b..1808367640 100644 --- a/cli/src/merge_tools/builtin.rs +++ b/cli/src/merge_tools/builtin.rs @@ -542,6 +542,7 @@ pub fn edit_merge_builtin( #[cfg(test)] mod tests { + use futures::executor::block_on; use jj_lib::conflicts::extract_as_single_hunk; use jj_lib::repo::Repo; use testutils::TestRepo; @@ -724,7 +725,7 @@ mod tests { to_file_id(right_tree.path_value(&path)), ], ); - let content = extract_as_single_hunk(&merge, store, &path); + let content = block_on(extract_as_single_hunk(&merge, store, &path)); let slices = content.map(|ContentHunk(buf)| buf.as_slice()); let merge_result = files::merge(slices); let sections = make_merge_sections(merge_result).unwrap(); diff --git a/cli/src/merge_tools/external.rs b/cli/src/merge_tools/external.rs index a4ee7e88aa..5cd574af25 100644 --- a/cli/src/merge_tools/external.rs +++ b/cli/src/merge_tools/external.rs @@ -6,6 +6,7 @@ use std::process::{Command, ExitStatus, Stdio}; use std::sync::Arc; use config::ConfigError; +use futures::executor::block_on; use itertools::Itertools; use jj_lib::backend::{FileId, MergedTreeId, TreeValue}; use jj_lib::conflicts::{self, materialize_merge_result}; @@ -357,12 +358,12 @@ pub fn run_mergetool_external( } let new_file_ids = if editor.merge_tool_edits_conflict_markers { - conflicts::update_from_content( + block_on(conflicts::update_from_content( &file_merge, tree.store(), repo_path, output_file_contents.as_slice(), - )? + ))? } else { let new_file_id = tree .store() diff --git a/cli/src/merge_tools/mod.rs b/cli/src/merge_tools/mod.rs index 7b9a6e40dd..965a7d56b4 100644 --- a/cli/src/merge_tools/mod.rs +++ b/cli/src/merge_tools/mod.rs @@ -18,6 +18,7 @@ mod external; use std::sync::Arc; use config::ConfigError; +use futures::executor::block_on; use jj_lib::backend::MergedTreeId; use jj_lib::conflicts::extract_as_single_hunk; use jj_lib::gitignore::GitIgnoreFile; @@ -112,7 +113,7 @@ pub fn run_mergetool( sides: file_merge.num_sides(), }); }; - let content = extract_as_single_hunk(&file_merge, tree.store(), repo_path); + let content = block_on(extract_as_single_hunk(&file_merge, tree.store(), repo_path)); let editor = get_merge_tool_from_settings(ui, settings)?; match editor { diff --git a/lib/src/conflicts.rs b/lib/src/conflicts.rs index c7c9be761c..b09a1badda 100644 --- a/lib/src/conflicts.rs +++ b/lib/src/conflicts.rs @@ -17,6 +17,7 @@ use std::io::Write; use std::iter::zip; +use futures::StreamExt; use itertools::Itertools; use crate::backend::{BackendResult, FileId, TreeValue}; @@ -57,12 +58,13 @@ fn write_diff_hunks(hunks: &[DiffHunk], file: &mut dyn Write) -> std::io::Result Ok(()) } -fn get_file_contents(store: &Store, path: &RepoPath, term: &Option) -> ContentHunk { +async fn get_file_contents(store: &Store, path: &RepoPath, term: &Option) -> ContentHunk { match term { Some(id) => { let mut content = vec![]; store - .read_file(path, id) + .read_file_async(path, id) + .await .unwrap() .read_to_end(&mut content) .unwrap(); @@ -74,22 +76,26 @@ fn get_file_contents(store: &Store, path: &RepoPath, term: &Option) -> C } } -pub fn extract_as_single_hunk( +pub async fn extract_as_single_hunk( merge: &Merge>, store: &Store, path: &RepoPath, ) -> Merge { - merge.map(|term| get_file_contents(store, path, term)) + let builder: MergeBuilder = futures::stream::iter(merge.iter()) + .then(|term| get_file_contents(store, path, term)) + .collect() + .await; + builder.build() } -pub fn materialize( +pub async fn materialize( conflict: &Merge>, store: &Store, path: &RepoPath, output: &mut dyn Write, ) -> std::io::Result<()> { if let Some(file_merge) = conflict.to_file_merge() { - let content = extract_as_single_hunk(&file_merge, store, path); + let content = extract_as_single_hunk(&file_merge, store, path).await; materialize_merge_result(&content, output) } else { // Unless all terms are regular files, we can't do much better than to try to @@ -285,7 +291,7 @@ fn parse_conflict_hunk(input: &[u8]) -> Merge { /// Parses conflict markers in `content` and returns an updated version of /// `file_ids` with the new contents. If no (valid) conflict markers remain, a /// single resolves `FileId` will be returned. -pub fn update_from_content( +pub async fn update_from_content( file_ids: &Merge>, store: &Store, path: &RepoPath, @@ -297,7 +303,7 @@ pub fn update_from_content( // conflicts (for example) are not converted to regular files in the working // copy. let mut old_content = Vec::with_capacity(content.len()); - let merge_hunk = extract_as_single_hunk(file_ids, store, path); + let merge_hunk = extract_as_single_hunk(file_ids, store, path).await; materialize_merge_result(&merge_hunk, &mut old_content).unwrap(); if content == old_content { return Ok(file_ids.clone()); diff --git a/lib/src/local_working_copy.rs b/lib/src/local_working_copy.rs index da52f06499..1d7c54753a 100644 --- a/lib/src/local_working_copy.rs +++ b/lib/src/local_working_copy.rs @@ -30,6 +30,7 @@ use std::sync::mpsc::{channel, Sender}; use std::sync::Arc; use std::time::UNIX_EPOCH; +use futures::executor::block_on; use itertools::Itertools; use once_cell::unsync::OnceCell; use prost::Message; @@ -955,12 +956,12 @@ impl TreeState { message: format!("Failed to open file {}", disk_path.display()), err: err.into(), })?; - let new_file_ids = conflicts::update_from_content( + let new_file_ids = block_on(conflicts::update_from_content( &old_file_ids, self.store.as_ref(), repo_path, &content, - )?; + ))?; match new_file_ids.into_resolved() { Ok(file_id) => { #[cfg(windows)] @@ -1062,8 +1063,13 @@ impl TreeState { err: err.into(), })?; let mut conflict_data = vec![]; - conflicts::materialize(conflict, self.store.as_ref(), path, &mut conflict_data) - .expect("Failed to materialize conflict to in-memory buffer"); + block_on(conflicts::materialize( + conflict, + self.store.as_ref(), + path, + &mut conflict_data, + )) + .expect("Failed to materialize conflict to in-memory buffer"); file.write_all(&conflict_data) .map_err(|err| CheckoutError::Other { message: format!("Failed to write conflict to file {}", disk_path.display()), diff --git a/lib/tests/test_conflicts.rs b/lib/tests/test_conflicts.rs index b2a6bfa85c..352ff3d6b8 100644 --- a/lib/tests/test_conflicts.rs +++ b/lib/tests/test_conflicts.rs @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +use futures::executor::block_on; use jj_lib::backend::FileId; use jj_lib::conflicts::{ extract_as_single_hunk, materialize_merge_result, parse_conflict, update_from_content, @@ -629,7 +630,7 @@ fn test_update_conflict_from_content() { // If the content is unchanged compared to the materialized value, we get the // old conflict id back. let materialized = materialize_conflict_string(store, &path, &conflict); - let parse = |content| update_from_content(&conflict, store, &path, content).unwrap(); + let parse = |content| block_on(update_from_content(&conflict, store, &path, content)).unwrap(); assert_eq!(parse(materialized.as_bytes()), conflict); // If the conflict is resolved, we get None back to indicate that. @@ -673,7 +674,7 @@ fn test_update_conflict_from_content_modify_delete() { // If the content is unchanged compared to the materialized value, we get the // old conflict id back. let materialized = materialize_conflict_string(store, &path, &conflict); - let parse = |content| update_from_content(&conflict, store, &path, content).unwrap(); + let parse = |content| block_on(update_from_content(&conflict, store, &path, content)).unwrap(); assert_eq!(parse(materialized.as_bytes()), conflict); // If the conflict is resolved, we get None back to indicate that. @@ -704,7 +705,7 @@ fn materialize_conflict_string( conflict: &Merge>, ) -> String { let mut result: Vec = vec![]; - let contents = extract_as_single_hunk(conflict, store, path); + let contents = block_on(extract_as_single_hunk(conflict, store, path)); materialize_merge_result(&contents, &mut result).unwrap(); String::from_utf8(result).unwrap() }