Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix bug: no store the sync dag block #4259

Open
wants to merge 16 commits into
base: dag-master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions flexidag/src/blockdag.rs
Original file line number Diff line number Diff line change
Expand Up @@ -223,10 +223,12 @@ impl BlockDAG {
);
let reachability_store = self.storage.reachability_store.clone();

let mut merge_set = ghostdata
.unordered_mergeset_without_selected_parent()
.filter(|hash| self.storage.reachability_store.read().has(*hash).unwrap())
.collect::<Vec<_>>()
let mut merge_set = self
.ghost_dag_manager()
.unordered_mergeset_without_selected_parent(
ghostdata.selected_parent,
&header.parents(),
)
.into_iter();
let add_block_result = {
let mut reachability_writer = reachability_store.write();
Expand Down Expand Up @@ -482,8 +484,8 @@ impl BlockDAG {
let dag_state = self.get_dag_state(previous_pruning_point)?;
let next_ghostdata = self.ghostdata(&dag_state.tips)?;
info!(
"start to calculate the mergeset and tips for tips: {:?}, and last pruning point: {:?} and next ghostdata: {:?}",
dag_state.tips, previous_pruning_point, next_ghostdata,
"start to calculate the mergeset and tips for tips: {:?}, and last pruning point: {:?} and next ghostdata's selected parents: {:?} and blues set are {:?}",
dag_state.tips, previous_pruning_point, next_ghostdata.selected_parent, next_ghostdata.mergeset_blues,
);
let next_pruning_point = self.pruning_point_manager().next_pruning_point(
previous_pruning_point,
Expand Down
35 changes: 27 additions & 8 deletions flexidag/src/ghostdag/protocol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,22 +210,29 @@ impl<
}
}

let remote_blue_set = blue_blocks
.iter()
.map(|header| header.id())
.collect::<HashSet<_>>();
if new_block_data
.mergeset_blues
.iter()
.skip(1)
.cloned()
.collect::<HashSet<_>>()
!= blue_blocks
!= remote_blue_set
{
warn!("The data of blue set is not equal when executing the block: {:?}, for {:?}, checking data: {:?}", header.id(), blue_blocks.iter().map(|header| header.id()).collect::<Vec<_>>(), new_block_data.mergeset_blues);
let ghostdata = self.ghostdag(&header.parents_hash())?;
if ghostdata
.mergeset_blues
.iter()
.map(|header| header.id())
.skip(1)
.cloned()
.collect::<HashSet<_>>()
{
if header.number() < 10000000 {
// no bail before 10000000
warn!("The data of blue set is not equal when executing the block: {:?}, for {:?}, checking data: {:?}", header.id(), blue_blocks.iter().map(|header| header.id()).collect::<Vec<_>>(), new_block_data.mergeset_blues);
} else {
bail!("The data of blue set is not equal when executing the block: {:?}, for {:?}, checking data: {:?}", header.id(), blue_blocks.iter().map(|header| header.id()).collect::<Vec<_>>(), new_block_data.mergeset_blues);
!= remote_blue_set
{
bail!("The ghost data of blue set is not equal when executing the block: {:?}, for {:?}, checking data: {:?}", header.id(), blue_blocks.iter().map(|header| header.id()).collect::<Vec<_>>(), ghostdata.mergeset_blues);
Comment on lines +213 to +235
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Improve blue set verification logic and error handling

The current implementation has several issues:

  1. Silently falling back to ghostdag when blue sets don't match could mask underlying problems
  2. The warning message could be more descriptive about the blue set mismatch
  3. The second verification step could mask underlying issues

Consider this improved implementation:

        let remote_blue_set = blue_blocks
            .iter()
            .map(|header| header.id())
            .collect::<HashSet<_>>();
-        if new_block_data
+        let calculated_blue_set = new_block_data
            .mergeset_blues
            .iter()
            .skip(1)
            .cloned()
-            .collect::<HashSet<_>>()
-            != remote_blue_set
-        {
-            warn!("The data of blue set is not equal when executing the block: {:?}, for {:?}, checking data: {:?}", header.id(), blue_blocks.iter().map(|header| header.id()).collect::<Vec<_>>(), new_block_data.mergeset_blues);
-            let ghostdata = self.ghostdag(&header.parents_hash())?;
-            if ghostdata
-                .mergeset_blues
-                .iter()
-                .skip(1)
-                .cloned()
-                .collect::<HashSet<_>>()
-                != remote_blue_set
-            {
-                bail!("The ghost data of blue set is not equal when executing the block: {:?}, for {:?}, checking data: {:?}", header.id(), blue_blocks.iter().map(|header| header.id()).collect::<Vec<_>>(), ghostdata.mergeset_blues);
-            }
-        }
+            .collect::<HashSet<_>>();
+        
+        if calculated_blue_set != remote_blue_set {
+            let missing_blues: Vec<_> = remote_blue_set.difference(&calculated_blue_set).collect();
+            let extra_blues: Vec<_> = calculated_blue_set.difference(&remote_blue_set).collect();
+            
+            bail!(
+                "Blue set mismatch for block {:?}:\nExpected blues: {:?}\nCalculated blues: {:?}\nMissing blues: {:?}\nExtra blues: {:?}",
+                header.id(),
+                remote_blue_set,
+                calculated_blue_set,
+                missing_blues,
+                extra_blues
+            );
+        }

This change:

  1. Provides more detailed error messages showing the exact differences
  2. Removes the redundant second verification step
  3. Fails fast when a mismatch is detected
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
let remote_blue_set = blue_blocks
.iter()
.map(|header| header.id())
.collect::<HashSet<_>>();
if new_block_data
.mergeset_blues
.iter()
.skip(1)
.cloned()
.collect::<HashSet<_>>()
!= blue_blocks
!= remote_blue_set
{
warn!("The data of blue set is not equal when executing the block: {:?}, for {:?}, checking data: {:?}", header.id(), blue_blocks.iter().map(|header| header.id()).collect::<Vec<_>>(), new_block_data.mergeset_blues);
let ghostdata = self.ghostdag(&header.parents_hash())?;
if ghostdata
.mergeset_blues
.iter()
.map(|header| header.id())
.skip(1)
.cloned()
.collect::<HashSet<_>>()
{
if header.number() < 10000000 {
// no bail before 10000000
warn!("The data of blue set is not equal when executing the block: {:?}, for {:?}, checking data: {:?}", header.id(), blue_blocks.iter().map(|header| header.id()).collect::<Vec<_>>(), new_block_data.mergeset_blues);
} else {
bail!("The data of blue set is not equal when executing the block: {:?}, for {:?}, checking data: {:?}", header.id(), blue_blocks.iter().map(|header| header.id()).collect::<Vec<_>>(), new_block_data.mergeset_blues);
!= remote_blue_set
{
bail!("The ghost data of blue set is not equal when executing the block: {:?}, for {:?}, checking data: {:?}", header.id(), blue_blocks.iter().map(|header| header.id()).collect::<Vec<_>>(), ghostdata.mergeset_blues);
let remote_blue_set = blue_blocks
.iter()
.map(|header| header.id())
.collect::<HashSet<_>>();
let calculated_blue_set = new_block_data
.mergeset_blues
.iter()
.skip(1)
.cloned()
.collect::<HashSet<_>>();
if calculated_blue_set != remote_blue_set {
let missing_blues: Vec<_> = remote_blue_set.difference(&calculated_blue_set).collect();
let extra_blues: Vec<_> = calculated_blue_set.difference(&remote_blue_set).collect();
bail!(
"Blue set mismatch for block {:?}:\nExpected blues: {:?}\nCalculated blues: {:?}\nMissing blues: {:?}\nExtra blues: {:?}",
header.id(),
remote_blue_set,
calculated_blue_set,
missing_blues,
extra_blues
);
}

}
}

Expand Down Expand Up @@ -377,12 +384,20 @@ impl<
*candidate_blue_anticone_size = (*candidate_blue_anticone_size).checked_add(1).unwrap();
if *candidate_blue_anticone_size > self.k {
// k-cluster violation: The candidate's blue anticone exceeded k
info!(
"Checking blue candidate: {} failed, blue anticone exceeded k",
blue_candidate
);
return Ok(ColoringState::Red);
}

if *candidate_blues_anticone_sizes.get(&block).unwrap() == self.k {
// k-cluster violation: A block in candidate's blue anticone already
// has k blue blocks in its own anticone
info!(
"Checking blue candidate: {} failed, block {} has k blue blocks in its anticone",
blue_candidate, block
);
return Ok(ColoringState::Red);
}

Expand Down Expand Up @@ -431,6 +446,10 @@ impl<
// The maximum length of new_block_data.mergeset_blues can be K+1 because
// it contains the selected parent.
if new_block_data.mergeset_blues.len() as KType == self.k.checked_add(1).unwrap() {
info!(
"Checking blue candidate: {} failed, mergeset blues size is K+1",
blue_candidate
);
return Ok(ColoringOutput::Red);
}

Expand Down
5 changes: 5 additions & 0 deletions flexidag/src/prune/pruning_point_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,11 @@ impl<T: ReachabilityStoreReader + Clone> PruningPointManagerT<T> {
min_required_blue_score_for_next_pruning_point
);

debug!("previous_pruning_point: {:?}, previous_ghostdata: {:?}, next_ghostdata: {:?}, pruning_depth: {:?}, pruning_finality: {:?}",
previous_pruning_point, previous_ghostdata, next_ghostdata,
pruning_depth, pruning_finality,
);

let mut latest_pruning_ghost_data = previous_ghostdata.to_compact();
if min_required_blue_score_for_next_pruning_point + pruning_depth
<= next_ghostdata.blue_score
Expand Down
Loading
Loading