Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make conflicts materialization async #2401

Merged
merged 3 commits into from
Oct 20, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ criterion = { workspace = true, optional = true }
crossterm = { workspace = true }
dirs = { workspace = true }
esl01-renderdag = { workspace = true }
futures = { workspace = true }
git2 = { workspace = true }
glob = { workspace = true }
hex = { workspace = true }
Expand Down Expand Up @@ -89,4 +90,4 @@ watchman = ["jj-lib/watchman"]
[package.metadata.binstall]
# The archive name is jj, not jj-cli. Also, `cargo binstall` gets
# confused by the `v` before versions in archive name.
pkg-url="{ repo }/releases/download/v{ version }/jj-v{ version }-{ target }.{ archive-format }"
pkg-url = "{ repo }/releases/download/v{ version }/jj-v{ version }-{ target }.{ archive-format }"
9 changes: 8 additions & 1 deletion cli/src/commands/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ use std::{fmt, fs, io};
use clap::builder::NonEmptyStringValueParser;
use clap::parser::ValueSource;
use clap::{ArgGroup, Command, CommandFactory, FromArgMatches, Subcommand};
use futures::executor::block_on;
use indexmap::{IndexMap, IndexSet};
use itertools::Itertools;
use jj_lib::backend::{CommitId, ObjectId, TreeValue};
Expand Down Expand Up @@ -1521,7 +1522,13 @@ fn cmd_cat(ui: &mut Ui, command: &CommandHelper, args: &CatArgs) -> Result<(), C
}
Err(conflict) => {
let mut contents = vec![];
conflicts::materialize(&conflict, repo.store(), &path, &mut contents).unwrap();
block_on(conflicts::materialize(
&conflict,
repo.store(),
&path,
&mut contents,
))
.unwrap();
ui.request_pager();
ui.stdout_formatter().write_all(&contents)?;
}
Expand Down
17 changes: 15 additions & 2 deletions cli/src/diff_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ use std::io;
use std::ops::Range;
use std::sync::Arc;

use futures::executor::block_on;
use itertools::Itertools;
use jj_lib::backend::{ObjectId, TreeValue};
use jj_lib::commit::Commit;
Expand Down Expand Up @@ -363,7 +364,13 @@ fn diff_content(
}
None => {
let mut content = vec![];
conflicts::materialize(value, repo.store(), path, &mut content).unwrap();
block_on(conflicts::materialize(
value,
repo.store(),
path,
&mut content,
))
.unwrap();
Ok(content)
}
Some(Some(TreeValue::Tree(_))) | Some(Some(TreeValue::Conflict(_))) => {
Expand Down Expand Up @@ -516,7 +523,13 @@ fn git_diff_part(
None => {
mode = "100644".to_string();
hash = "0000000000".to_string();
conflicts::materialize(value, repo.store(), path, &mut content).unwrap();
block_on(conflicts::materialize(
value,
repo.store(),
path,
&mut content,
))
.unwrap();
}
Some(Some(TreeValue::Tree(_))) | Some(Some(TreeValue::Conflict(_))) | Some(None) => {
panic!("Unexpected {value:?} in diff at path {path:?}");
Expand Down
3 changes: 2 additions & 1 deletion cli/src/merge_tools/builtin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -542,6 +542,7 @@ pub fn edit_merge_builtin(

#[cfg(test)]
mod tests {
use futures::executor::block_on;
use jj_lib::conflicts::extract_as_single_hunk;
use jj_lib::repo::Repo;
use testutils::TestRepo;
Expand Down Expand Up @@ -724,7 +725,7 @@ mod tests {
to_file_id(right_tree.path_value(&path)),
],
);
let content = extract_as_single_hunk(&merge, store, &path);
let content = block_on(extract_as_single_hunk(&merge, store, &path));
let slices = content.map(|ContentHunk(buf)| buf.as_slice());
let merge_result = files::merge(slices);
let sections = make_merge_sections(merge_result).unwrap();
Expand Down
5 changes: 3 additions & 2 deletions cli/src/merge_tools/external.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use std::process::{Command, ExitStatus, Stdio};
use std::sync::Arc;

use config::ConfigError;
use futures::executor::block_on;
use itertools::Itertools;
use jj_lib::backend::{FileId, MergedTreeId, TreeValue};
use jj_lib::conflicts::{self, materialize_merge_result};
Expand Down Expand Up @@ -357,12 +358,12 @@ pub fn run_mergetool_external(
}

let new_file_ids = if editor.merge_tool_edits_conflict_markers {
conflicts::update_from_content(
block_on(conflicts::update_from_content(
&file_merge,
tree.store(),
repo_path,
output_file_contents.as_slice(),
)?
))?
} else {
let new_file_id = tree
.store()
Expand Down
3 changes: 2 additions & 1 deletion cli/src/merge_tools/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ mod external;
use std::sync::Arc;

use config::ConfigError;
use futures::executor::block_on;
use jj_lib::backend::MergedTreeId;
use jj_lib::conflicts::extract_as_single_hunk;
use jj_lib::gitignore::GitIgnoreFile;
Expand Down Expand Up @@ -112,7 +113,7 @@ pub fn run_mergetool(
sides: file_merge.num_sides(),
});
};
let content = extract_as_single_hunk(&file_merge, tree.store(), repo_path);
let content = block_on(extract_as_single_hunk(&file_merge, tree.store(), repo_path));

let editor = get_merge_tool_from_settings(ui, settings)?;
match editor {
Expand Down
22 changes: 14 additions & 8 deletions lib/src/conflicts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
use std::io::Write;
use std::iter::zip;

use futures::StreamExt;
use itertools::Itertools;

use crate::backend::{BackendResult, FileId, TreeValue};
Expand Down Expand Up @@ -57,12 +58,13 @@ fn write_diff_hunks(hunks: &[DiffHunk], file: &mut dyn Write) -> std::io::Result
Ok(())
}

fn get_file_contents(store: &Store, path: &RepoPath, term: &Option<FileId>) -> ContentHunk {
async fn get_file_contents(store: &Store, path: &RepoPath, term: &Option<FileId>) -> ContentHunk {
match term {
Some(id) => {
let mut content = vec![];
store
.read_file(path, id)
.read_file_async(path, id)
.await
.unwrap()
.read_to_end(&mut content)
.unwrap();
Expand All @@ -74,22 +76,26 @@ fn get_file_contents(store: &Store, path: &RepoPath, term: &Option<FileId>) -> C
}
}

pub fn extract_as_single_hunk(
pub async fn extract_as_single_hunk(
merge: &Merge<Option<FileId>>,
store: &Store,
path: &RepoPath,
) -> Merge<ContentHunk> {
merge.map(|term| get_file_contents(store, path, term))
let builder: MergeBuilder<ContentHunk> = futures::stream::iter(merge.iter())
.then(|term| get_file_contents(store, path, term))
.collect()
.await;
builder.build()
}

pub fn materialize(
pub async fn materialize(
conflict: &Merge<Option<TreeValue>>,
store: &Store,
path: &RepoPath,
output: &mut dyn Write,
) -> std::io::Result<()> {
if let Some(file_merge) = conflict.to_file_merge() {
let content = extract_as_single_hunk(&file_merge, store, path);
let content = extract_as_single_hunk(&file_merge, store, path).await;
materialize_merge_result(&content, output)
} else {
// Unless all terms are regular files, we can't do much better than to try to
Expand Down Expand Up @@ -285,7 +291,7 @@ fn parse_conflict_hunk(input: &[u8]) -> Merge<ContentHunk> {
/// Parses conflict markers in `content` and returns an updated version of
/// `file_ids` with the new contents. If no (valid) conflict markers remain, a
/// single resolves `FileId` will be returned.
pub fn update_from_content(
pub async fn update_from_content(
file_ids: &Merge<Option<FileId>>,
store: &Store,
path: &RepoPath,
Expand All @@ -297,7 +303,7 @@ pub fn update_from_content(
// conflicts (for example) are not converted to regular files in the working
// copy.
let mut old_content = Vec::with_capacity(content.len());
let merge_hunk = extract_as_single_hunk(file_ids, store, path);
let merge_hunk = extract_as_single_hunk(file_ids, store, path).await;
materialize_merge_result(&merge_hunk, &mut old_content).unwrap();
if content == old_content {
return Ok(file_ids.clone());
Expand Down
30 changes: 14 additions & 16 deletions lib/src/git_backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -931,6 +931,7 @@ fn bytes_vec_from_json(value: &serde_json::Value) -> Vec<u8> {
#[cfg(test)]
mod tests {
use assert_matches::assert_matches;
use futures::executor::block_on;
use test_case::test_case;

use super::*;
Expand Down Expand Up @@ -1013,7 +1014,7 @@ mod tests {
.collect_vec();
assert_eq!(git_refs, vec![git_commit_id2]);

let commit = futures::executor::block_on(backend.read_commit(&commit_id)).unwrap();
let commit = block_on(backend.read_commit(&commit_id)).unwrap();
assert_eq!(&commit.change_id, &change_id);
assert_eq!(commit.parents, vec![CommitId::from_bytes(&[0; 20])]);
assert_eq!(commit.predecessors, vec![]);
Expand Down Expand Up @@ -1042,7 +1043,7 @@ mod tests {
);
assert_eq!(commit.committer.timestamp.tz_offset, -480);

let root_tree = futures::executor::block_on(backend.read_tree(
let root_tree = block_on(backend.read_tree(
&RepoPath::root(),
&TreeId::from_bytes(root_tree_id.as_bytes()),
))
Expand All @@ -1056,7 +1057,7 @@ mod tests {
&TreeValue::Tree(TreeId::from_bytes(dir_tree_id.as_bytes()))
);

let dir_tree = futures::executor::block_on(backend.read_tree(
let dir_tree = block_on(backend.read_tree(
&RepoPath::from_internal_string("dir"),
&TreeId::from_bytes(dir_tree_id.as_bytes()),
))
Expand All @@ -1079,7 +1080,7 @@ mod tests {
&TreeValue::Symlink(SymlinkId::from_bytes(blob2.as_bytes()))
);

let commit2 = futures::executor::block_on(backend.read_commit(&commit_id2)).unwrap();
let commit2 = block_on(backend.read_commit(&commit_id2)).unwrap();
assert_eq!(commit2.parents, vec![commit_id.clone()]);
assert_eq!(commit.predecessors, vec![]);
assert_eq!(
Expand Down Expand Up @@ -1118,10 +1119,9 @@ mod tests {

// read_commit() without import_head_commits() works as of now. This might be
// changed later.
assert!(futures::executor::block_on(
backend.read_commit(&CommitId::from_bytes(git_commit_id.as_bytes()))
)
.is_ok());
assert!(
block_on(backend.read_commit(&CommitId::from_bytes(git_commit_id.as_bytes()))).is_ok()
);
assert!(
backend
.cached_extra_metadata_table()
Expand Down Expand Up @@ -1209,15 +1209,15 @@ mod tests {
// Only root commit as parent
commit.parents = vec![backend.root_commit_id().clone()];
let first_id = backend.write_commit(commit.clone()).unwrap().0;
let first_commit = futures::executor::block_on(backend.read_commit(&first_id)).unwrap();
let first_commit = block_on(backend.read_commit(&first_id)).unwrap();
assert_eq!(first_commit, commit);
let first_git_commit = git_repo.find_commit(git_id(&first_id)).unwrap();
assert_eq!(first_git_commit.parent_ids().collect_vec(), vec![]);

// Only non-root commit as parent
commit.parents = vec![first_id.clone()];
let second_id = backend.write_commit(commit.clone()).unwrap().0;
let second_commit = futures::executor::block_on(backend.read_commit(&second_id)).unwrap();
let second_commit = block_on(backend.read_commit(&second_id)).unwrap();
assert_eq!(second_commit, commit);
let second_git_commit = git_repo.find_commit(git_id(&second_id)).unwrap();
assert_eq!(
Expand All @@ -1228,7 +1228,7 @@ mod tests {
// Merge commit
commit.parents = vec![first_id.clone(), second_id.clone()];
let merge_id = backend.write_commit(commit.clone()).unwrap().0;
let merge_commit = futures::executor::block_on(backend.read_commit(&merge_id)).unwrap();
let merge_commit = block_on(backend.read_commit(&merge_id)).unwrap();
assert_eq!(merge_commit, commit);
let merge_git_commit = git_repo.find_commit(git_id(&merge_id)).unwrap();
assert_eq!(
Expand Down Expand Up @@ -1278,8 +1278,7 @@ mod tests {
// When writing a tree-level conflict, the root tree on the git side has the
// individual trees as subtrees.
let read_commit_id = backend.write_commit(commit.clone()).unwrap().0;
let read_commit =
futures::executor::block_on(backend.read_commit(&read_commit_id)).unwrap();
let read_commit = block_on(backend.read_commit(&read_commit_id)).unwrap();
assert_eq!(read_commit, commit);
let git_commit = git_repo
.find_commit(Oid::from_bytes(read_commit_id.as_bytes()).unwrap())
Expand Down Expand Up @@ -1308,8 +1307,7 @@ mod tests {
// regular git tree.
commit.root_tree = MergedTreeId::resolved(create_tree(5));
let read_commit_id = backend.write_commit(commit.clone()).unwrap().0;
let read_commit =
futures::executor::block_on(backend.read_commit(&read_commit_id)).unwrap();
let read_commit = block_on(backend.read_commit(&read_commit_id)).unwrap();
assert_eq!(read_commit, commit);
let git_commit = git_repo
.find_commit(Oid::from_bytes(read_commit_id.as_bytes()).unwrap())
Expand Down Expand Up @@ -1375,7 +1373,7 @@ mod tests {
let (commit_id2, mut actual_commit2) = backend.write_commit(commit2.clone()).unwrap();
// The returned matches the ID
assert_eq!(
futures::executor::block_on(backend.read_commit(&commit_id2)).unwrap(),
block_on(backend.read_commit(&commit_id2)).unwrap(),
actual_commit2
);
assert_ne!(commit_id2, commit_id1);
Expand Down
10 changes: 5 additions & 5 deletions lib/src/local_backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -461,6 +461,7 @@ fn conflict_term_to_proto(part: &ConflictTerm) -> crate::protos::local_store::co
#[cfg(test)]
mod tests {
use assert_matches::assert_matches;
use futures::executor::block_on;

use super::*;
use crate::backend::MillisSinceEpoch;
Expand Down Expand Up @@ -492,26 +493,25 @@ mod tests {
// Only root commit as parent
commit.parents = vec![backend.root_commit_id().clone()];
let first_id = backend.write_commit(commit.clone()).unwrap().0;
let first_commit = futures::executor::block_on(backend.read_commit(&first_id)).unwrap();
let first_commit = block_on(backend.read_commit(&first_id)).unwrap();
assert_eq!(first_commit, commit);

// Only non-root commit as parent
commit.parents = vec![first_id.clone()];
let second_id = backend.write_commit(commit.clone()).unwrap().0;
let second_commit = futures::executor::block_on(backend.read_commit(&second_id)).unwrap();
let second_commit = block_on(backend.read_commit(&second_id)).unwrap();
assert_eq!(second_commit, commit);

// Merge commit
commit.parents = vec![first_id.clone(), second_id.clone()];
let merge_id = backend.write_commit(commit.clone()).unwrap().0;
let merge_commit = futures::executor::block_on(backend.read_commit(&merge_id)).unwrap();
let merge_commit = block_on(backend.read_commit(&merge_id)).unwrap();
assert_eq!(merge_commit, commit);

// Merge commit with root as one parent
commit.parents = vec![first_id, backend.root_commit_id().clone()];
let root_merge_id = backend.write_commit(commit.clone()).unwrap().0;
let root_merge_commit =
futures::executor::block_on(backend.read_commit(&root_merge_id)).unwrap();
let root_merge_commit = block_on(backend.read_commit(&root_merge_id)).unwrap();
assert_eq!(root_merge_commit, commit);
}

Expand Down
Loading