diff --git a/lib/src/default_index/rev_walk.rs b/lib/src/default_index/rev_walk.rs index af0da9a37d..47ac17d242 100644 --- a/lib/src/default_index/rev_walk.rs +++ b/lib/src/default_index/rev_walk.rs @@ -22,57 +22,114 @@ use std::ops::Range; use smallvec::SmallVec; use super::composite::CompositeIndex; -use super::entry::{IndexEntry, IndexPosition, SmallIndexPositionsVec}; +use super::entry::{IndexPosition, SmallIndexPositionsVec}; -pub(super) trait RevWalkIndex<'a> { +/// Like `Iterator`, but doesn't borrow the `index` internally. +pub(super) trait RevWalk { + type Item; + + /// Advances the iteration and returns the next item. + /// + /// The caller must provide the same `index` instance. + /// + /// Returns `None` when the iteration is finished. Once `None` is returned, + /// this will never resume. In other words, a `RevWalk` is fused. + fn next(&mut self, index: &I) -> Option; + + // The following methods are provided for convenience. They are not supposed + // to be reimplemented. + + /// Reattaches the underlying `index`. + fn attach(self, index: I) -> RevWalkBorrowedIndexIter + where + Self: Sized, + I: Sized, + { + RevWalkBorrowedIndexIter { index, walk: self } + } +} + +/// Adapter that turns `RevWalk` into `Iterator` by attaching borrowed `index`. +#[derive(Clone, Debug)] +#[must_use] +pub(super) struct RevWalkBorrowedIndexIter { + // TODO: `index: I` will be a reference type. + index: I, + walk: W, +} + +impl RevWalkBorrowedIndexIter { + /// Turns into `'static`-lifetime walk object by detaching the index. + pub fn detach(self) -> W { + self.walk + } +} + +impl> Iterator for RevWalkBorrowedIndexIter { + type Item = W::Item; + + fn next(&mut self) -> Option { + self.walk.next(&self.index) + } +} + +impl> FusedIterator for RevWalkBorrowedIndexIter {} + +/// Adapter that turns `RevWalk` into `Iterator` by attaching owned `index`. +#[derive(Clone, Debug)] +#[must_use] +pub(super) struct RevWalkOwnedIndexIter { + index: I, + walk: W, +} + +impl> Iterator for RevWalkOwnedIndexIter { + type Item = W::Item; + + fn next(&mut self) -> Option { + self.walk.next(&self.index) + } +} + +impl> FusedIterator for RevWalkOwnedIndexIter {} + +pub(super) trait RevWalkIndex { type Position: Copy + Ord; type AdjacentPositions: IntoIterator; - fn entry_by_pos(&self, pos: Self::Position) -> IndexEntry<'a>; - fn adjacent_positions(&self, entry: &IndexEntry<'_>) -> Self::AdjacentPositions; + fn adjacent_positions(&self, pos: Self::Position) -> Self::AdjacentPositions; } -impl<'a> RevWalkIndex<'a> for CompositeIndex<'a> { +impl RevWalkIndex for CompositeIndex<'_> { type Position = IndexPosition; type AdjacentPositions = SmallIndexPositionsVec; - fn entry_by_pos(&self, pos: Self::Position) -> IndexEntry<'a> { - CompositeIndex::entry_by_pos(self, pos) - } - - fn adjacent_positions(&self, entry: &IndexEntry<'_>) -> Self::AdjacentPositions { - entry.parent_positions() + fn adjacent_positions(&self, pos: Self::Position) -> Self::AdjacentPositions { + self.entry_by_pos(pos).parent_positions() } } #[derive(Clone)] -pub(super) struct RevWalkDescendantsIndex<'a> { - index: CompositeIndex<'a>, +pub(super) struct RevWalkDescendantsIndex { children_map: HashMap, } // See SmallIndexPositionsVec for the array size. type DescendantIndexPositionsVec = SmallVec<[Reverse; 4]>; -impl<'a> RevWalkDescendantsIndex<'a> { - fn build<'b>( - index: CompositeIndex<'a>, - entries: impl IntoIterator>, - ) -> Self { +impl RevWalkDescendantsIndex { + fn build(index: CompositeIndex, positions: impl IntoIterator) -> Self { // For dense set, it's probably cheaper to use `Vec` instead of `HashMap`. let mut children_map: HashMap = HashMap::new(); - for entry in entries { - children_map.entry(entry.position()).or_default(); // mark head node - for parent_pos in entry.parent_positions() { + for pos in positions { + children_map.entry(pos).or_default(); // mark head node + for parent_pos in index.entry_by_pos(pos).parent_positions() { let parent = children_map.entry(parent_pos).or_default(); - parent.push(Reverse(entry.position())); + parent.push(Reverse(pos)); } } - RevWalkDescendantsIndex { - index, - children_map, - } + RevWalkDescendantsIndex { children_map } } fn contains_pos(&self, pos: IndexPosition) -> bool { @@ -80,16 +137,12 @@ impl<'a> RevWalkDescendantsIndex<'a> { } } -impl<'a> RevWalkIndex<'a> for RevWalkDescendantsIndex<'a> { +impl RevWalkIndex for RevWalkDescendantsIndex { type Position = Reverse; type AdjacentPositions = DescendantIndexPositionsVec; - fn entry_by_pos(&self, pos: Self::Position) -> IndexEntry<'a> { - self.index.entry_by_pos(pos.0) - } - - fn adjacent_positions(&self, entry: &IndexEntry<'_>) -> Self::AdjacentPositions { - self.children_map[&entry.position()].clone() + fn adjacent_positions(&self, pos: Self::Position) -> Self::AdjacentPositions { + self.children_map[&pos.0].clone() } } @@ -225,7 +278,10 @@ impl<'a> RevWalkBuilder<'a> { let mut queue = RevWalkQueue::with_min_pos(min_pos); queue.extend_wanted(self.wanted, ()); queue.extend_unwanted(self.unwanted); - RevWalkImpl { index, queue } + RevWalkBorrowedIndexIter { + index, + walk: RevWalkImpl { queue }, + } } /// Walks ancestors within the `generation_range`. @@ -240,10 +296,12 @@ impl<'a> RevWalkBuilder<'a> { let item_range = RevWalkItemGenerationRange::from_filter_range(generation_range.clone()); queue.extend_wanted(self.wanted, Reverse(item_range)); queue.extend_unwanted(self.unwanted); - RevWalkGenerationRangeImpl { + RevWalkBorrowedIndexIter { index, - queue, - generation_end: generation_range.end, + walk: RevWalkGenerationRangeImpl { + queue, + generation_end: generation_range.end, + }, } } @@ -274,14 +332,18 @@ impl<'a> RevWalkBuilder<'a> { self, root_positions: impl IntoIterator, ) -> RevWalkDescendants<'a> { + let index = self.index; let root_positions = HashSet::from_iter(root_positions); - let candidate_entries = self + let candidate_positions = self .ancestors_until_roots(root_positions.iter().copied()) .collect(); - RevWalkDescendants { - candidate_entries, - root_positions, - reachable_positions: HashSet::new(), + RevWalkBorrowedIndexIter { + index, + walk: RevWalkDescendantsImpl { + candidate_positions, + root_positions, + reachable_positions: HashSet::new(), + }, } } @@ -296,11 +358,11 @@ impl<'a> RevWalkBuilder<'a> { self, root_positions: impl IntoIterator, generation_range: Range, - ) -> RevWalkDescendantsGenerationRange<'a> { + ) -> RevWalkDescendantsGenerationRange { let index = self.index; let root_positions = Vec::from_iter(root_positions); - let entries = self.ancestors_until_roots(root_positions.iter().copied()); - let descendants_index = RevWalkDescendantsIndex::build(index, entries); + let positions = self.ancestors_until_roots(root_positions.iter().copied()); + let descendants_index = RevWalkDescendantsIndex::build(index, positions); let mut queue = RevWalkQueue::with_min_pos(Reverse(IndexPosition::MAX)); let item_range = RevWalkItemGenerationRange::from_filter_range(generation_range.clone()); @@ -310,42 +372,42 @@ impl<'a> RevWalkBuilder<'a> { queue.push_wanted(Reverse(pos), Reverse(item_range)); } } - RevWalkGenerationRangeImpl { + RevWalkOwnedIndexIter { index: descendants_index, - queue, - generation_end: generation_range.end, + walk: RevWalkGenerationRangeImpl { + queue, + generation_end: generation_range.end, + }, } } } -pub(super) type RevWalkAncestors<'a> = RevWalkImpl<'a, CompositeIndex<'a>>; +pub(super) type RevWalkAncestors<'a> = + RevWalkBorrowedIndexIter, RevWalkImpl>; #[derive(Clone)] #[must_use] -pub(super) struct RevWalkImpl<'a, I: RevWalkIndex<'a>> { - index: I, - queue: RevWalkQueue, +pub(super) struct RevWalkImpl

{ + queue: RevWalkQueue, } -impl<'a, I: RevWalkIndex<'a>> Iterator for RevWalkImpl<'a, I> { - type Item = IndexEntry<'a>; +impl RevWalk for RevWalkImpl { + type Item = I::Position; - fn next(&mut self) -> Option> { + fn next(&mut self, index: &I) -> Option { while let Some(item) = self.queue.pop() { self.queue.skip_while_eq(&item.pos); if item.is_wanted() { - let entry = self.index.entry_by_pos(item.pos); self.queue - .extend_wanted(self.index.adjacent_positions(&entry), ()); - return Some(entry); + .extend_wanted(index.adjacent_positions(item.pos), ()); + return Some(item.pos); } else if self.queue.items.len() == self.queue.unwanted_count { // No more wanted entries to walk debug_assert!(!self.queue.items.iter().any(|x| x.is_wanted())); return None; } else { - let entry = self.index.entry_by_pos(item.pos); self.queue - .extend_unwanted(self.index.adjacent_positions(&entry)); + .extend_unwanted(index.adjacent_positions(item.pos)); } } @@ -358,25 +420,25 @@ impl<'a, I: RevWalkIndex<'a>> Iterator for RevWalkImpl<'a, I> { } pub(super) type RevWalkAncestorsGenerationRange<'a> = - RevWalkGenerationRangeImpl<'a, CompositeIndex<'a>>; -pub(super) type RevWalkDescendantsGenerationRange<'a> = - RevWalkGenerationRangeImpl<'a, RevWalkDescendantsIndex<'a>>; + RevWalkBorrowedIndexIter, RevWalkGenerationRangeImpl>; +pub(super) type RevWalkDescendantsGenerationRange = RevWalkOwnedIndexIter< + RevWalkDescendantsIndex, + RevWalkGenerationRangeImpl>, +>; #[derive(Clone)] #[must_use] -pub(super) struct RevWalkGenerationRangeImpl<'a, I: RevWalkIndex<'a>> { - index: I, +pub(super) struct RevWalkGenerationRangeImpl

{ // Sort item generations in ascending order - queue: RevWalkQueue>, + queue: RevWalkQueue>, generation_end: u32, } -impl<'a, I: RevWalkIndex<'a>> RevWalkGenerationRangeImpl<'a, I> { - fn enqueue_wanted_adjacents( - &mut self, - entry: &IndexEntry<'_>, - gen: RevWalkItemGenerationRange, - ) { +impl RevWalkGenerationRangeImpl

{ + fn enqueue_wanted_adjacents(&mut self, index: &I, pos: P, gen: RevWalkItemGenerationRange) + where + I: RevWalkIndex + ?Sized, + { // `gen.start` is incremented from 0, which should never overflow if gen.start + 1 >= self.generation_end { return; @@ -386,17 +448,16 @@ impl<'a, I: RevWalkIndex<'a>> RevWalkGenerationRangeImpl<'a, I> { end: gen.end.saturating_add(1), }; self.queue - .extend_wanted(self.index.adjacent_positions(entry), Reverse(succ_gen)); + .extend_wanted(index.adjacent_positions(pos), Reverse(succ_gen)); } } -impl<'a, I: RevWalkIndex<'a>> Iterator for RevWalkGenerationRangeImpl<'a, I> { - type Item = IndexEntry<'a>; +impl RevWalk for RevWalkGenerationRangeImpl { + type Item = I::Position; - fn next(&mut self) -> Option> { + fn next(&mut self, index: &I) -> Option { while let Some(item) = self.queue.pop() { if let RevWalkWorkItemState::Wanted(Reverse(mut pending_gen)) = item.state { - let entry = self.index.entry_by_pos(item.pos); let mut some_in_range = pending_gen.contains_end(self.generation_end); while let Some(x) = self.queue.pop_eq(&item.pos) { // Merge overlapped ranges to reduce number of the queued items. @@ -408,26 +469,25 @@ impl<'a, I: RevWalkIndex<'a>> Iterator for RevWalkGenerationRangeImpl<'a, I> { pending_gen = if let Some(merged) = pending_gen.try_merge_end(gen) { merged } else { - self.enqueue_wanted_adjacents(&entry, pending_gen); + self.enqueue_wanted_adjacents(index, item.pos, pending_gen); gen }; } else { unreachable!("no more unwanted items of the same entry"); } } - self.enqueue_wanted_adjacents(&entry, pending_gen); + self.enqueue_wanted_adjacents(index, item.pos, pending_gen); if some_in_range { - return Some(entry); + return Some(item.pos); } } else if self.queue.items.len() == self.queue.unwanted_count { // No more wanted entries to walk debug_assert!(!self.queue.items.iter().any(|x| x.is_wanted())); return None; } else { - let entry = self.index.entry_by_pos(item.pos); self.queue.skip_while_eq(&item.pos); self.queue - .extend_unwanted(self.index.adjacent_positions(&entry)); + .extend_unwanted(index.adjacent_positions(item.pos)); } } @@ -479,10 +539,13 @@ impl RevWalkItemGenerationRange { } /// Walks descendants from the roots, in order of ascending index position. +pub(super) type RevWalkDescendants<'a> = + RevWalkBorrowedIndexIter, RevWalkDescendantsImpl>; + #[derive(Clone)] #[must_use] -pub(super) struct RevWalkDescendants<'a> { - candidate_entries: Vec>, +pub(super) struct RevWalkDescendantsImpl { + candidate_positions: Vec, root_positions: HashSet, reachable_positions: HashSet, } @@ -490,41 +553,39 @@ pub(super) struct RevWalkDescendants<'a> { impl RevWalkDescendants<'_> { /// Builds a set of index positions reachable from the roots. /// - /// This is equivalent to `.map(|entry| entry.position()).collect()` on - /// the new iterator, but returns the internal buffer instead. + /// This is equivalent to `.collect()` on the new iterator, but returns the + /// internal buffer instead. pub fn collect_positions_set(mut self) -> HashSet { self.by_ref().for_each(drop); - self.reachable_positions + self.walk.reachable_positions } } -impl<'a> Iterator for RevWalkDescendants<'a> { - type Item = IndexEntry<'a>; +impl RevWalk> for RevWalkDescendantsImpl { + type Item = IndexPosition; - fn next(&mut self) -> Option { - while let Some(candidate) = self.candidate_entries.pop() { - if self.root_positions.contains(&candidate.position()) - || candidate + fn next(&mut self, index: &CompositeIndex) -> Option { + while let Some(candidate_pos) = self.candidate_positions.pop() { + if self.root_positions.contains(&candidate_pos) + || index + .entry_by_pos(candidate_pos) .parent_positions() .iter() .any(|parent_pos| self.reachable_positions.contains(parent_pos)) { - self.reachable_positions.insert(candidate.position()); - return Some(candidate); + self.reachable_positions.insert(candidate_pos); + return Some(candidate_pos); } } None } } -impl FusedIterator for RevWalkDescendants<'_> {} - /// Computes ancestors set lazily. /// /// This is similar to `RevWalk` functionality-wise, but implemented with the /// different design goals: /// -/// * lazy updates with no lifetimed fields /// * optimized for dense ancestors set /// * optimized for testing set membership /// * no iterator API (which could be implemented on top) @@ -649,7 +710,7 @@ mod tests { .wanted_heads(to_positions_vec(index, wanted)) .unwanted_roots(to_positions_vec(index, unwanted)) .ancestors() - .map(|entry| entry.commit_id()) + .map(|pos| index.entry_by_pos(pos).commit_id()) .collect_vec() }; @@ -737,25 +798,25 @@ mod tests { .wanted_heads(to_positions_vec(index, heads)) .ancestors_until_roots(to_positions_vec(index, roots)) }; - let to_commit_id = |entry: IndexEntry| entry.commit_id(); + let to_commit_id = |pos| index.entry_by_pos(pos).commit_id(); let mut iter = make_iter(&[id_6.clone(), id_7.clone()], &[id_3.clone()]); - assert_eq!(iter.queue.items.len(), 2); + assert_eq!(iter.walk.queue.items.len(), 2); assert_eq!(iter.next().map(to_commit_id), Some(id_7.clone())); assert_eq!(iter.next().map(to_commit_id), Some(id_6.clone())); assert_eq!(iter.next().map(to_commit_id), Some(id_5.clone())); - assert_eq!(iter.queue.items.len(), 2); + assert_eq!(iter.walk.queue.items.len(), 2); assert_eq!(iter.next().map(to_commit_id), Some(id_4.clone())); - assert_eq!(iter.queue.items.len(), 1); // id_1 shouldn't be queued + assert_eq!(iter.walk.queue.items.len(), 1); // id_1 shouldn't be queued assert_eq!(iter.next().map(to_commit_id), Some(id_3.clone())); - assert_eq!(iter.queue.items.len(), 0); // id_2 shouldn't be queued + assert_eq!(iter.walk.queue.items.len(), 0); // id_2 shouldn't be queued assert!(iter.next().is_none()); let iter = make_iter(&[id_6.clone(), id_7.clone(), id_2.clone()], &[id_3.clone()]); - assert_eq!(iter.queue.items.len(), 2); // id_2 shouldn't be queued + assert_eq!(iter.walk.queue.items.len(), 2); // id_2 shouldn't be queued let iter = make_iter(&[id_6.clone(), id_7.clone()], &[]); - assert!(iter.queue.items.is_empty()); // no ids should be queued + assert!(iter.walk.queue.items.is_empty()); // no ids should be queued } #[test] @@ -798,7 +859,7 @@ mod tests { .wanted_heads(to_positions_vec(index, wanted)) .unwanted_roots(to_positions_vec(index, unwanted)) .ancestors_filtered_by_generation(range) - .map(|entry| entry.commit_id()) + .map(|pos| index.entry_by_pos(pos).commit_id()) .collect_vec() }; @@ -875,7 +936,7 @@ mod tests { RevWalkBuilder::new(index) .wanted_heads(to_positions_vec(index, wanted)) .ancestors_filtered_by_generation(range) - .map(|entry| entry.commit_id()) + .map(|pos| index.entry_by_pos(pos).commit_id()) .collect_vec() }; @@ -945,7 +1006,7 @@ mod tests { RevWalkBuilder::new(index) .wanted_heads(to_positions_vec(index, heads)) .descendants_filtered_by_generation(to_positions_vec(index, roots), range) - .map(|entry| entry.commit_id()) + .map(|Reverse(pos)| index.entry_by_pos(pos).commit_id()) .collect_vec() }; diff --git a/lib/src/default_index/revset_engine.rs b/lib/src/default_index/revset_engine.rs index 08e4a1d4ca..1f655bbbb5 100644 --- a/lib/src/default_index/revset_engine.rs +++ b/lib/src/default_index/revset_engine.rs @@ -23,7 +23,7 @@ use std::sync::Arc; use itertools::Itertools; -use super::rev_walk::RevWalkBuilder; +use super::rev_walk::{RevWalk, RevWalkBuilder}; use super::revset_graph_iterator::RevsetGraphIterator; use crate::backend::{ChangeId, CommitId, MillisSinceEpoch}; use crate::default_index::{AsCompositeIndex, CompositeIndex, IndexEntry, IndexPosition}; @@ -214,51 +214,33 @@ impl ToPredicateFn for EagerRevset { } } -struct RevWalkRevset { - walk: F, +struct RevWalkRevset { + walk: W, } -impl RevWalkRevset -where - // Returns trait object because we can't express the following constraints - // without using named lifetime and type parameter: - // - // for<'index> - // F: Fn(CompositeIndex<'index>) -> _, - // F::Output: Iterator> + 'index - // - // There's a workaround, but it doesn't help infer closure types. - // https://github.com/rust-lang/rust/issues/47815 - // https://users.rust-lang.org/t/hrtb-on-multiple-generics/34255 - F: Fn(CompositeIndex<'_>) -> Box> + '_>, -{ - fn new(walk: F) -> Self { - RevWalkRevset { walk } - } -} - -impl fmt::Debug for RevWalkRevset { +impl fmt::Debug for RevWalkRevset { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RevWalkRevset").finish_non_exhaustive() } } -impl InternalRevset for RevWalkRevset +impl InternalRevset for RevWalkRevset where - F: Fn(CompositeIndex<'_>) -> Box> + '_>, + W: for<'index> RevWalk, Item = IndexPosition> + Clone, { fn entries<'a, 'index: 'a>( &'a self, index: CompositeIndex<'index>, ) -> Box> + 'a> { - (self.walk)(index) + let positions = self.walk.clone().attach(index); + Box::new(positions.map(move |pos| index.entry_by_pos(pos))) } fn positions<'a, 'index: 'a>( &'a self, index: CompositeIndex<'index>, ) -> Box + 'a> { - Box::new(self.entries(index).map(|entry| entry.position())) + Box::new(self.walk.clone().attach(index)) } fn into_predicate<'a>(self: Box) -> Box @@ -269,24 +251,19 @@ where } } -impl ToPredicateFn for RevWalkRevset +impl ToPredicateFn for RevWalkRevset where - F: Fn(CompositeIndex<'_>) -> Box> + '_>, + W: for<'index> RevWalk, Item = IndexPosition> + Clone, { fn to_predicate_fn<'a, 'index: 'a>( &'a self, index: CompositeIndex<'index>, ) -> Box) -> bool + 'a> { - predicate_fn_from_entries(self.entries(index)) + let positions = self.walk.clone().attach(index); + predicate_fn_from_positions(positions) } } -fn predicate_fn_from_entries<'index, 'iter>( - iter: impl Iterator> + 'iter, -) -> Box) -> bool + 'iter> { - predicate_fn_from_positions(iter.map(|entry| entry.position())) -} - fn predicate_fn_from_positions<'iter>( iter: impl Iterator + 'iter, ) -> Box) -> bool + 'iter> { @@ -742,24 +719,17 @@ impl<'index> EvaluationContext<'index> { } ResolvedExpression::Ancestors { heads, generation } => { let head_set = self.evaluate(heads)?; - let head_positions = head_set.positions(index).collect_vec(); + let head_positions = head_set.positions(index); + let builder = RevWalkBuilder::new(index).wanted_heads(head_positions); if generation == &GENERATION_RANGE_FULL { - Ok(Box::new(RevWalkRevset::new(move |index| { - Box::new( - RevWalkBuilder::new(index) - .wanted_heads(head_positions.iter().copied()) - .ancestors(), - ) - }))) + let walk = builder.ancestors().detach(); + Ok(Box::new(RevWalkRevset { walk })) } else { let generation = to_u32_generation_range(generation)?; - Ok(Box::new(RevWalkRevset::new(move |index| { - Box::new( - RevWalkBuilder::new(index) - .wanted_heads(head_positions.iter().copied()) - .ancestors_filtered_by_generation(generation.clone()), - ) - }))) + let walk = builder + .ancestors_filtered_by_generation(generation) + .detach(); + Ok(Box::new(RevWalkRevset { walk })) } } ResolvedExpression::Range { @@ -777,27 +747,19 @@ impl<'index> EvaluationContext<'index> { head_set.positions(index), root_positions.iter().copied(), |pos1, pos2| pos1.cmp(pos2).reverse(), - ) - .collect_vec(); + ); + let builder = RevWalkBuilder::new(index) + .wanted_heads(head_positions) + .unwanted_roots(root_positions); if generation == &GENERATION_RANGE_FULL { - Ok(Box::new(RevWalkRevset::new(move |index| { - Box::new( - RevWalkBuilder::new(index) - .wanted_heads(head_positions.iter().copied()) - .unwanted_roots(root_positions.iter().copied()) - .ancestors(), - ) - }))) + let walk = builder.ancestors().detach(); + Ok(Box::new(RevWalkRevset { walk })) } else { let generation = to_u32_generation_range(generation)?; - Ok(Box::new(RevWalkRevset::new(move |index| { - Box::new( - RevWalkBuilder::new(index) - .wanted_heads(head_positions.iter().copied()) - .unwanted_roots(root_positions.iter().copied()) - .ancestors_filtered_by_generation(generation.clone()), - ) - }))) + let walk = builder + .ancestors_filtered_by_generation(generation) + .detach(); + Ok(Box::new(RevWalkRevset { walk })) } } ResolvedExpression::DagRange { @@ -806,23 +768,21 @@ impl<'index> EvaluationContext<'index> { generation_from_roots, } => { let root_set = self.evaluate(roots)?; - let root_positions = root_set.positions(index).collect_vec(); + let root_positions = root_set.positions(index); let head_set = self.evaluate(heads)?; - let head_positions = head_set.positions(index).collect_vec(); + let head_positions = head_set.positions(index); + let builder = RevWalkBuilder::new(index).wanted_heads(head_positions); if generation_from_roots == &(1..2) { - let root_positions_set: HashSet<_> = root_positions.iter().copied().collect(); - let candidates = RevWalkRevset::new(move |index| { - Box::new( - RevWalkBuilder::new(index) - .wanted_heads(head_positions.iter().copied()) - .ancestors_until_roots(root_positions.iter().copied()), - ) - }); + let root_positions: HashSet<_> = root_positions.collect(); + let walk = builder + .ancestors_until_roots(root_positions.iter().copied()) + .detach(); + let candidates = RevWalkRevset { walk }; let predicate = as_pure_predicate_fn(move |_index, entry| { entry .parent_positions() .iter() - .any(|parent_pos| root_positions_set.contains(parent_pos)) + .any(|parent_pos| root_positions.contains(parent_pos)) }); // TODO: Suppose heads include all visible heads, ToPredicateFn version can be // optimized to only test the predicate() @@ -831,24 +791,19 @@ impl<'index> EvaluationContext<'index> { predicate, })) } else if generation_from_roots == &GENERATION_RANGE_FULL { - let mut positions = RevWalkBuilder::new(index) - .wanted_heads(head_positions) - .descendants(root_positions) - .map(|entry| entry.position()) - .collect_vec(); + let mut positions = builder.descendants(root_positions).collect_vec(); positions.reverse(); Ok(Box::new(EagerRevset { positions })) } else { // For small generation range, it might be better to build a reachable map // with generation bit set, which can be calculated incrementally from roots: // reachable[pos] = (reachable[parent_pos] | ...) << 1 - let mut positions = RevWalkBuilder::new(index) - .wanted_heads(head_positions) + let mut positions = builder .descendants_filtered_by_generation( root_positions, to_u32_generation_range(generation_from_roots)?, ) - .map(|entry| entry.position()) + .map(|Reverse(pos)| pos) .collect_vec(); positions.reverse(); Ok(Box::new(EagerRevset { positions }))