From 5ad369d383bbc080a5fc289b45317d95e1b70f58 Mon Sep 17 00:00:00 2001 From: Wei Chen Date: Tue, 11 Jun 2024 14:37:33 +0800 Subject: [PATCH 1/2] refactor(electrum): make `LocalChain` generic --- crates/bitcoind_rpc/tests/test_emitter.rs | 8 +- crates/chain/src/chain_data.rs | 6 + crates/chain/src/local_chain.rs | 549 +++++++++++++-------- crates/chain/src/spk_client.rs | 45 +- crates/chain/tests/common/mod.rs | 3 +- crates/chain/tests/test_local_chain.rs | 201 +++++--- crates/electrum/src/bdk_electrum_client.rs | 36 +- crates/sqlite/src/store.rs | 49 +- 8 files changed, 556 insertions(+), 341 deletions(-) diff --git a/crates/bitcoind_rpc/tests/test_emitter.rs b/crates/bitcoind_rpc/tests/test_emitter.rs index 5e0622f27..be963e875 100644 --- a/crates/bitcoind_rpc/tests/test_emitter.rs +++ b/crates/bitcoind_rpc/tests/test_emitter.rs @@ -4,7 +4,7 @@ use bdk_bitcoind_rpc::Emitter; use bdk_chain::{ bitcoin::{Address, Amount, Txid}, keychain::Balance, - local_chain::{CheckPoint, LocalChain}, + local_chain::{ChangeSet, CheckPoint, LocalChain}, Append, BlockId, IndexedTxGraph, SpkTxOutIndex, }; use bdk_testenv::{anyhow, TestEnv}; @@ -48,7 +48,7 @@ pub fn test_sync_local_chain() -> anyhow::Result<()> { assert_eq!( local_chain.apply_update(emission.checkpoint,)?, - BTreeMap::from([(height, Some(hash))]), + ChangeSet::from_iter([(height, Some(BlockId { height, hash }))]), "chain update changeset is unexpected", ); } @@ -94,11 +94,11 @@ pub fn test_sync_local_chain() -> anyhow::Result<()> { assert_eq!( local_chain.apply_update(emission.checkpoint,)?, if exp_height == exp_hashes.len() - reorged_blocks.len() { - core::iter::once((height, Some(hash))) + core::iter::once((height, Some(BlockId { height, hash }))) .chain((height + 1..exp_hashes.len() as u32).map(|h| (h, None))) .collect::() } else { - BTreeMap::from([(height, Some(hash))]) + ChangeSet::from_iter([(height, Some(BlockId { height, hash }))]) }, "chain update changeset is unexpected", ); diff --git a/crates/chain/src/chain_data.rs b/crates/chain/src/chain_data.rs index ae0976de5..798c02031 100644 --- a/crates/chain/src/chain_data.rs +++ b/crates/chain/src/chain_data.rs @@ -103,6 +103,12 @@ pub struct BlockId { pub hash: BlockHash, } +impl AsRef for BlockId { + fn as_ref(&self) -> &BlockId { + self + } +} + impl Anchor for BlockId { fn anchor_block(&self) -> Self { *self diff --git a/crates/chain/src/local_chain.rs b/crates/chain/src/local_chain.rs index 2c396cb33..1796161e1 100644 --- a/crates/chain/src/local_chain.rs +++ b/crates/chain/src/local_chain.rs @@ -1,10 +1,10 @@ //! The [`LocalChain`] is a local implementation of [`ChainOracle`]. use core::convert::Infallible; -use core::ops::RangeBounds; +use core::ops::{Deref, RangeBounds}; use crate::collections::BTreeMap; -use crate::{BlockId, ChainOracle}; +use crate::{Append, BlockId, ChainOracle}; use alloc::sync::Arc; use bitcoin::block::Header; use bitcoin::BlockHash; @@ -13,7 +13,48 @@ use bitcoin::BlockHash; /// /// The key represents the block height, and the value either represents added a new [`CheckPoint`] /// (if [`Some`]), or removing a [`CheckPoint`] (if [`None`]). -pub type ChangeSet = BTreeMap>; +#[derive(Debug, Clone, PartialEq)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde(crate = "serde_crate") +)] +pub struct ChangeSet(BTreeMap>); + +impl Default for ChangeSet { + fn default() -> Self { + ChangeSet(BTreeMap::new()) + } +} + +impl Append for ChangeSet { + fn append(&mut self, other: Self) { + for (key, value) in other.0 { + self.0.insert(key, value); + } + } + + fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +impl FromIterator<(u32, Option)> for ChangeSet { + fn from_iter)>>(iter: I) -> Self { + let mut map = BTreeMap::new(); + for (key, value) in iter { + map.insert(key, value); + } + ChangeSet(map) + } +} + +impl Deref for ChangeSet { + type Target = BTreeMap>; + fn deref(&self) -> &Self::Target { + &self.0 + } +} /// A [`LocalChain`] checkpoint is used to find the agreement point between two chains and as a /// transaction anchor. @@ -24,29 +65,52 @@ pub type ChangeSet = BTreeMap>; /// cheaply clone a [`CheckPoint`] without copying the whole list and to view the entire chain /// without holding a lock on [`LocalChain`]. #[derive(Debug, Clone)] -pub struct CheckPoint(Arc); +pub struct CheckPoint(Arc>); /// The internal contents of [`CheckPoint`]. #[derive(Debug, Clone)] -struct CPInner { - /// Block id (hash and height). - block: BlockId, +struct CPInner { + /// Block (hash and height). + block: B, /// Previous checkpoint (if any). - prev: Option>, + prev: Option>>, } -impl PartialEq for CheckPoint { +impl> PartialEq for CheckPoint +where + B: Copy + Clone + core::fmt::Debug + core::cmp::PartialEq, +{ fn eq(&self, other: &Self) -> bool { - let self_cps = self.iter().map(|cp| cp.block_id()); - let other_cps = other.iter().map(|cp| cp.block_id()); + let self_cps = self.iter().map(|cp| *cp.inner()); + let other_cps = other.iter().map(|cp| *cp.inner()); self_cps.eq(other_cps) } } -impl CheckPoint { - /// Construct a new base block at the front of a linked list. - pub fn new(block: BlockId) -> Self { - Self(Arc::new(CPInner { block, prev: None })) +impl CheckPoint { + /// Construct a checkpoint from the given `header` and block `height`. + /// + /// If `header` is of the genesis block, the checkpoint won't have a [`prev`] node. Otherwise, + /// we return a checkpoint linked with the previous block. + /// + /// [`prev`]: CheckPoint::prev + pub fn from_header(header: &bitcoin::block::Header, height: u32) -> CheckPoint { + let hash = header.block_hash(); + let this_block_id = BlockId { height, hash }; + + let prev_height = match height.checked_sub(1) { + Some(h) => h, + None => return CheckPoint::new(this_block_id), + }; + + let prev_block_id = BlockId { + height: prev_height, + hash: header.prev_blockhash, + }; + + CheckPoint::new(prev_block_id) + .push(this_block_id) + .expect("must construct checkpoint") } /// Construct a checkpoint from a list of [`BlockId`]s in ascending height order. @@ -71,37 +135,58 @@ impl CheckPoint { Ok(acc) } - /// Construct a checkpoint from the given `header` and block `height`. - /// - /// If `header` is of the genesis block, the checkpoint won't have a [`prev`] node. Otherwise, - /// we return a checkpoint linked with the previous block. + /// Inserts `block_id` at its height within the chain. /// - /// [`prev`]: CheckPoint::prev - pub fn from_header(header: &bitcoin::block::Header, height: u32) -> Self { - let hash = header.block_hash(); - let this_block_id = BlockId { height, hash }; + /// The effect of `insert` depends on whether a height already exists. If it doesn't the + /// `block_id` we inserted and all pre-existing blocks higher than it will be re-inserted after + /// it. If the height already existed and has a conflicting block hash then it will be purged + /// along with all block followin it. The returned chain will have a tip of the `block_id` + /// passed in. Of course, if the `block_id` was already present then this just returns `self`. + #[must_use] + pub fn insert(self, block_id: BlockId) -> Self { + assert_ne!(block_id.height, 0, "cannot insert the genesis block"); - let prev_height = match height.checked_sub(1) { - Some(h) => h, - None => return Self::new(this_block_id), - }; + let mut cp = self.clone(); + let mut tail = vec![]; + let base = loop { + if cp.height() == block_id.height { + if cp.hash() == block_id.hash { + return self; + } + // if we have a conflict we just return the inserted block because the tail is by + // implication invalid. + tail = vec![]; + break cp.prev().expect("can't be called on genesis block"); + } - let prev_block_id = BlockId { - height: prev_height, - hash: header.prev_blockhash, + if cp.height() < block_id.height { + break cp; + } + + tail.push(cp.block_id()); + cp = cp.prev().expect("will break before genesis block"); }; - CheckPoint::new(prev_block_id) - .push(this_block_id) - .expect("must construct checkpoint") + base.extend(core::iter::once(block_id).chain(tail.into_iter().rev())) + .expect("tail is in order") + } +} + +impl> CheckPoint +where + B: Copy + Clone + core::fmt::Debug, +{ + /// Construct a new base block at the front of a linked list. + pub fn new(block: B) -> Self { + Self(Arc::new(CPInner { block, prev: None })) } /// Puts another checkpoint onto the linked list representing the blockchain. /// /// Returns an `Err(self)` if the block you are pushing on is not at a greater height that the one you /// are pushing on to. - pub fn push(self, block: BlockId) -> Result { - if self.height() < block.height { + pub fn push(self, block: B) -> Result { + if self.height() < block.as_ref().height { Ok(Self(Arc::new(CPInner { block, prev: Some(self.0), @@ -115,7 +200,7 @@ impl CheckPoint { /// /// Returns an `Err(self)` if there is block which does not have a greater height than the /// previous one. - pub fn extend(self, blocks: impl IntoIterator) -> Result { + pub fn extend(self, blocks: impl IntoIterator) -> Result { let mut curr = self.clone(); for block in blocks { curr = curr.push(block).map_err(|_| self.clone())?; @@ -123,28 +208,33 @@ impl CheckPoint { Ok(curr) } + /// Get reference to the inner type. + pub fn inner(&self) -> &B { + &self.0.block + } + /// Get the [`BlockId`] of the checkpoint. pub fn block_id(&self) -> BlockId { - self.0.block + *self.0.block.as_ref() } /// Get the height of the checkpoint. pub fn height(&self) -> u32 { - self.0.block.height + self.0.block.as_ref().height } /// Get the block hash of the checkpoint. pub fn hash(&self) -> BlockHash { - self.0.block.hash + self.0.block.as_ref().hash } /// Get the previous checkpoint in the chain - pub fn prev(&self) -> Option { + pub fn prev(&self) -> Option> { self.0.prev.clone().map(CheckPoint) } /// Iterate from this checkpoint in descending height. - pub fn iter(&self) -> CheckPointIter { + pub fn iter(&self) -> CheckPointIter { self.clone().into_iter() } @@ -159,7 +249,7 @@ impl CheckPoint { /// /// Note that we always iterate checkpoints in reverse height order (iteration starts at tip /// height). - pub fn range(&self, range: R) -> impl Iterator + pub fn range(&self, range: R) -> impl Iterator> where R: RangeBounds, { @@ -178,75 +268,65 @@ impl CheckPoint { }) } - /// Inserts `block_id` at its height within the chain. + /// Constructs a [`CheckPoint`] from a [`BTreeMap`] of height to Block. /// - /// The effect of `insert` depends on whether a height already exists. If it doesn't the - /// `block_id` we inserted and all pre-existing blocks higher than it will be re-inserted after - /// it. If the height already existed and has a conflicting block hash then it will be purged - /// along with all block followin it. The returned chain will have a tip of the `block_id` - /// passed in. Of course, if the `block_id` was already present then this just returns `self`. - #[must_use] - pub fn insert(self, block_id: BlockId) -> Self { - assert_ne!(block_id.height, 0, "cannot insert the genesis block"); - - let mut cp = self.clone(); - let mut tail = vec![]; - let base = loop { - if cp.height() == block_id.height { - if cp.hash() == block_id.hash { - return self; - } - // if we have a conflict we just return the inserted block because the tail is by - // implication invalid. - tail = vec![]; - break cp.prev().expect("can't be called on genesis block"); - } + /// The [`BTreeMap`] enforces the height order. However, the caller must ensure the blocks are + /// all of the same chain. + pub fn from_blocks(blocks: BTreeMap) -> Result { + if !blocks.contains_key(&0) { + return Err(MissingGenesisError); + } - if cp.height() < block_id.height { - break cp; + let mut tip: Option> = None; + for block in &blocks { + match tip { + Some(curr) => tip = Some(curr.push(*block.1).expect("BTreeMap is ordered")), + None => tip = Some(CheckPoint::new(*block.1)), } + } - tail.push(cp.block_id()); - cp = cp.prev().expect("will break before genesis block"); - }; - - base.extend(core::iter::once(block_id).chain(tail.into_iter().rev())) - .expect("tail is in order") + Ok(tip.expect("already checked to have genesis")) } /// Apply `changeset` to the checkpoint. - fn apply_changeset(mut self, changeset: &ChangeSet) -> Result { + fn apply_changeset( + mut self, + changeset: &ChangeSet, + ) -> Result, MissingGenesisError> + where + B: Clone, + { if let Some(start_height) = changeset.keys().next().cloned() { // changes after point of agreement let mut extension = BTreeMap::default(); // point of agreement - let mut base: Option = None; + let mut base: Option> = None; for cp in self.iter() { if cp.height() >= start_height { - extension.insert(cp.height(), cp.hash()); + extension.insert(cp.height(), *cp.inner()); } else { base = Some(cp); break; } } - for (&height, &hash) in changeset { - match hash { - Some(hash) => { - extension.insert(height, hash); + for (height, block) in changeset.iter() { + match block { + Some(block) => { + extension.insert(*height, *block); } None => { - extension.remove(&height); + extension.remove(height); } }; } let new_tip = match base { Some(base) => base - .extend(extension.into_iter().map(BlockId::from)) + .extend(extension.values().copied()) .expect("extension is strictly greater than base"), - None => LocalChain::from_blocks(extension)?.tip(), + None => CheckPoint::from_blocks(extension)?, }; self = new_tip; } @@ -256,12 +336,12 @@ impl CheckPoint { } /// Iterates over checkpoints backwards. -pub struct CheckPointIter { - current: Option>, +pub struct CheckPointIter { + current: Option>>, } -impl Iterator for CheckPointIter { - type Item = CheckPoint; +impl Iterator for CheckPointIter { + type Item = CheckPoint; fn next(&mut self) -> Option { let current = self.current.clone()?; @@ -270,9 +350,9 @@ impl Iterator for CheckPointIter { } } -impl IntoIterator for CheckPoint { - type Item = CheckPoint; - type IntoIter = CheckPointIter; +impl IntoIterator for CheckPoint { + type Item = CheckPoint; + type IntoIter = CheckPointIter; fn into_iter(self) -> Self::IntoIter { CheckPointIter { @@ -282,12 +362,24 @@ impl IntoIterator for CheckPoint { } /// This is a local implementation of [`ChainOracle`]. -#[derive(Debug, Clone, PartialEq)] -pub struct LocalChain { - tip: CheckPoint, +#[derive(Debug, Clone)] +pub struct LocalChain = BlockId> { + tip: CheckPoint, } -impl ChainOracle for LocalChain { +impl> PartialEq for LocalChain +where + B: Copy + Clone + core::fmt::Debug + core::cmp::PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + self.tip == other.tip + } +} + +impl> ChainOracle for LocalChain +where + B: Copy + Clone + core::fmt::Debug, +{ type Error = Infallible; fn is_block_in_chain( @@ -308,19 +400,14 @@ impl ChainOracle for LocalChain { } fn get_chain_tip(&self) -> Result { - Ok(self.tip.block_id()) + Ok(*self.tip.block_id().as_ref()) } } -impl LocalChain { - /// Get the genesis hash. - pub fn genesis_hash(&self) -> BlockHash { - self.tip.get(0).expect("genesis must exist").hash() - } - +impl LocalChain { /// Construct [`LocalChain`] from genesis `hash`. #[must_use] - pub fn from_genesis_hash(hash: BlockHash) -> (Self, ChangeSet) { + pub fn from_genesis_hash(hash: BlockHash) -> (Self, ChangeSet) { let height = 0; let chain = Self { tip: CheckPoint::new(BlockId { height, hash }), @@ -329,31 +416,6 @@ impl LocalChain { (chain, changeset) } - /// Construct a [`LocalChain`] from an initial `changeset`. - pub fn from_changeset(changeset: ChangeSet) -> Result { - let genesis_entry = changeset.get(&0).copied().flatten(); - let genesis_hash = match genesis_entry { - Some(hash) => hash, - None => return Err(MissingGenesisError), - }; - - let (mut chain, _) = Self::from_genesis_hash(genesis_hash); - chain.apply_changeset(&changeset)?; - - debug_assert!(chain._check_changeset_is_applied(&changeset)); - - Ok(chain) - } - - /// Construct a [`LocalChain`] from a given `checkpoint` tip. - pub fn from_tip(tip: CheckPoint) -> Result { - let genesis_cp = tip.iter().last().expect("must have at least one element"); - if genesis_cp.height() != 0 { - return Err(MissingGenesisError); - } - Ok(Self { tip }) - } - /// Constructs a [`LocalChain`] from a [`BTreeMap`] of height to [`BlockHash`]. /// /// The [`BTreeMap`] enforces the height order. However, the caller must ensure the blocks are @@ -363,7 +425,7 @@ impl LocalChain { return Err(MissingGenesisError); } - let mut tip: Option = None; + let mut tip: Option> = None; for block in &blocks { match tip { Some(curr) => { @@ -381,30 +443,35 @@ impl LocalChain { }) } - /// Get the highest checkpoint. - pub fn tip(&self) -> CheckPoint { - self.tip.clone() - } - - /// Applies the given `update` to the chain. - /// - /// The method returns [`ChangeSet`] on success. This represents the changes applied to `self`. - /// - /// There must be no ambiguity about which of the existing chain's blocks are still valid and - /// which are now invalid. That is, the new chain must implicitly connect to a definite block in - /// the existing chain and invalidate the block after it (if it exists) by including a block at - /// the same height but with a different hash to explicitly exclude it as a connection point. - /// - /// # Errors + /// Update the chain with a given [`Header`] connecting it with the previous block. /// - /// An error will occur if the update does not correctly connect with `self`. + /// This is a convenience method to call [`apply_header_connected_to`] with the `connected_to` + /// parameter being `height-1:prev_blockhash`. If there is no previous block (i.e. genesis), we + /// use the current block as `connected_to`. /// - /// [module-level documentation]: crate::local_chain - pub fn apply_update(&mut self, update: CheckPoint) -> Result { - let (new_tip, changeset) = merge_chains(self.tip.clone(), update)?; - self.tip = new_tip; - self._check_changeset_is_applied(&changeset); - Ok(changeset) + /// [`apply_header_connected_to`]: LocalChain::apply_header_connected_to + pub fn apply_header( + &mut self, + header: &Header, + height: u32, + ) -> Result, CannotConnectError> { + let connected_to = match height.checked_sub(1) { + Some(prev_height) => BlockId { + height: prev_height, + hash: header.prev_blockhash, + }, + None => BlockId { + height, + hash: header.block_hash(), + }, + }; + self.apply_header_connected_to(header, height, connected_to) + .map_err(|err| match err { + ApplyHeaderError::InconsistentBlocks => { + unreachable!("connected_to is derived from the block so is always consistent") + } + ApplyHeaderError::CannotConnect(err) => err, + }) } /// Update the chain with a given [`Header`] at `height` which you claim is connected to a existing block in the chain. @@ -433,7 +500,7 @@ impl LocalChain { header: &Header, height: u32, connected_to: BlockId, - ) -> Result { + ) -> Result, ApplyHeaderError> { let this = BlockId { height, hash: header.block_hash(), @@ -461,40 +528,75 @@ impl LocalChain { self.apply_update(update) .map_err(ApplyHeaderError::CannotConnect) } +} - /// Update the chain with a given [`Header`] connecting it with the previous block. +impl> LocalChain +where + B: Copy + Clone + core::fmt::Debug + PartialEq, +{ + /// Construct a [`LocalChain`] from an initial `changeset`. + pub fn from_changeset(changeset: ChangeSet) -> Result { + let genesis_entry = changeset.0.get(&0).copied().flatten(); + let genesis_block = match genesis_entry { + Some(block) => block, + None => return Err(MissingGenesisError), + }; + + let mut chain = Self { + tip: CheckPoint::new(genesis_block), + }; + chain.apply_changeset(&changeset)?; + + debug_assert!(chain._check_changeset_is_applied(&changeset)); + + Ok(chain) + } + + /// Get the genesis hash. + pub fn genesis_hash(&self) -> BlockHash { + self.tip.get(0).expect("genesis must exist").hash() + } + + /// Construct a [`LocalChain`] from a given `checkpoint` tip. + pub fn from_tip(tip: CheckPoint) -> Result { + let genesis_cp = tip.iter().last().expect("must have at least one element"); + if genesis_cp.height() != 0 { + return Err(MissingGenesisError); + } + Ok(Self { tip }) + } + + /// Get the highest checkpoint. + pub fn tip(&self) -> CheckPoint { + self.tip.clone() + } + + /// Applies the given `update` to the chain. /// - /// This is a convenience method to call [`apply_header_connected_to`] with the `connected_to` - /// parameter being `height-1:prev_blockhash`. If there is no previous block (i.e. genesis), we - /// use the current block as `connected_to`. + /// The method returns [`ChangeSet`] on success. This represents the changes applied to `self`. /// - /// [`apply_header_connected_to`]: LocalChain::apply_header_connected_to - pub fn apply_header( + /// There must be no ambiguity about which of the existing chain's blocks are still valid and + /// which are now invalid. That is, the new chain must implicitly connect to a definite block in + /// the existing chain and invalidate the block after it (if it exists) by including a block at + /// the same height but with a different hash to explicitly exclude it as a connection point. + /// + /// # Errors + /// + /// An error will occur if the update does not correctly connect with `self`. + /// + /// [module-level documentation]: crate::local_chain + pub fn apply_update( &mut self, - header: &Header, - height: u32, - ) -> Result { - let connected_to = match height.checked_sub(1) { - Some(prev_height) => BlockId { - height: prev_height, - hash: header.prev_blockhash, - }, - None => BlockId { - height, - hash: header.block_hash(), - }, - }; - self.apply_header_connected_to(header, height, connected_to) - .map_err(|err| match err { - ApplyHeaderError::InconsistentBlocks => { - unreachable!("connected_to is derived from the block so is always consistent") - } - ApplyHeaderError::CannotConnect(err) => err, - }) + update: CheckPoint, + ) -> Result, CannotConnectError> { + let (new_tip, changeset) = merge_chains(self.tip.clone(), update)?; + self.tip = new_tip; + self._check_changeset_is_applied(&changeset); + Ok(changeset) } /// Apply the given `changeset`. - pub fn apply_changeset(&mut self, changeset: &ChangeSet) -> Result<(), MissingGenesisError> { + pub fn apply_changeset(&mut self, changeset: &ChangeSet) -> Result<(), MissingGenesisError> { let old_tip = self.tip.clone(); let new_tip = old_tip.apply_changeset(changeset)?; self.tip = new_tip; @@ -507,26 +609,35 @@ impl LocalChain { /// # Errors /// /// Replacing the block hash of an existing checkpoint will result in an error. - pub fn insert_block(&mut self, block_id: BlockId) -> Result { - if let Some(original_cp) = self.tip.get(block_id.height) { + pub fn insert_block(&mut self, block: B) -> Result, AlterCheckPointError> { + if let Some(original_cp) = self.tip.get(block.as_ref().height) { let original_hash = original_cp.hash(); - if original_hash != block_id.hash { + if original_hash != block.as_ref().hash { return Err(AlterCheckPointError { - height: block_id.height, + height: block.as_ref().height, original_hash, - update_hash: Some(block_id.hash), + update_hash: Some(block.as_ref().hash), }); } return Ok(ChangeSet::default()); } - let mut changeset = ChangeSet::default(); - changeset.insert(block_id.height, Some(block_id.hash)); + let mut changeset = ChangeSet::::default(); + changeset.0.insert(block.as_ref().height, Some(block)); self.apply_changeset(&changeset) .map_err(|_| AlterCheckPointError { height: 0, original_hash: self.genesis_hash(), - update_hash: changeset.get(&0).cloned().flatten(), + update_hash: Some( + changeset + .0 + .get(&0) + .cloned() + .flatten() + .expect("must have block") + .as_ref() + .hash, + ), })?; Ok(changeset) } @@ -540,16 +651,19 @@ impl LocalChain { /// /// This will fail with [`MissingGenesisError`] if the caller attempts to disconnect from the /// genesis block. - pub fn disconnect_from(&mut self, block_id: BlockId) -> Result { - let mut remove_from = Option::::None; + pub fn disconnect_from( + &mut self, + block_id: BlockId, + ) -> Result, MissingGenesisError> { + let mut remove_from = Option::>::None; let mut changeset = ChangeSet::default(); for cp in self.tip().iter() { let cp_id = cp.block_id(); - if cp_id.height < block_id.height { + if cp_id.as_ref().height < block_id.height { break; } - changeset.insert(cp_id.height, None); - if cp_id == block_id { + changeset.0.insert(cp_id.as_ref().height, None); + if cp_id.as_ref() == &block_id { remove_from = Some(cp); } } @@ -568,35 +682,37 @@ impl LocalChain { /// Derives an initial [`ChangeSet`], meaning that it can be applied to an empty chain to /// recover the current chain. - pub fn initial_changeset(&self) -> ChangeSet { - self.tip - .iter() - .map(|cp| { - let block_id = cp.block_id(); - (block_id.height, Some(block_id.hash)) - }) - .collect() + pub fn initial_changeset(&self) -> ChangeSet { + ChangeSet( + self.tip + .iter() + .map(|cp| { + let block = *cp.inner(); + (block.as_ref().height, Some(block)) + }) + .collect(), + ) } /// Iterate over checkpoints in descending height order. - pub fn iter_checkpoints(&self) -> CheckPointIter { + pub fn iter_checkpoints(&self) -> CheckPointIter { CheckPointIter { current: Some(self.tip.0.clone()), } } - fn _check_changeset_is_applied(&self, changeset: &ChangeSet) -> bool { + fn _check_changeset_is_applied(&self, changeset: &ChangeSet) -> bool { let mut curr_cp = self.tip.clone(); - for (height, exp_hash) in changeset.iter().rev() { + for (height, exp_block) in changeset.0.iter().rev() { match curr_cp.get(*height) { Some(query_cp) => { - if query_cp.height() != *height || Some(query_cp.hash()) != *exp_hash { + if query_cp.height() != *height || Some(*query_cp.inner()) != *exp_block { return false; } curr_cp = query_cp; } None => { - if exp_hash.is_some() { + if exp_block.is_some() { return false; } } @@ -610,7 +726,7 @@ impl LocalChain { /// This is a shorthand for calling [`CheckPoint::get`] on the [`tip`]. /// /// [`tip`]: LocalChain::tip - pub fn get(&self, height: u32) -> Option { + pub fn get(&self, height: u32) -> Option> { self.tip.get(height) } @@ -622,7 +738,7 @@ impl LocalChain { /// This is a shorthand for calling [`CheckPoint::range`] on the [`tip`]. /// /// [`tip`]: LocalChain::tip - pub fn range(&self, range: R) -> impl Iterator + pub fn range(&self, range: R) -> impl Iterator> where R: RangeBounds, { @@ -725,17 +841,20 @@ impl std::error::Error for ApplyHeaderError {} /// /// On success, a tuple is returned `(changeset, can_replace)`. If `can_replace` is true, then the /// `update_tip` can replace the `original_tip`. -fn merge_chains( - original_tip: CheckPoint, - update_tip: CheckPoint, -) -> Result<(CheckPoint, ChangeSet), CannotConnectError> { - let mut changeset = ChangeSet::default(); +fn merge_chains( + original_tip: CheckPoint, + update_tip: CheckPoint, +) -> Result<(CheckPoint, ChangeSet), CannotConnectError> +where + B: AsRef + core::fmt::Debug + Copy + Clone + PartialEq, +{ + let mut changeset = ChangeSet::::default(); let mut orig = original_tip.iter(); let mut update = update_tip.iter(); let mut curr_orig = None; let mut curr_update = None; - let mut prev_orig: Option = None; - let mut prev_update: Option = None; + let mut prev_orig: Option> = None; + let mut prev_update: Option> = None; let mut point_of_agreement_found = false; let mut prev_orig_was_invalidated = false; let mut potentially_invalidated_heights = vec![]; @@ -761,7 +880,7 @@ fn merge_chains( match (curr_orig.as_ref(), curr_update.as_ref()) { // Update block that doesn't exist in the original chain (o, Some(u)) if Some(u.height()) > o.map(|o| o.height()) => { - changeset.insert(u.height(), Some(u.hash())); + changeset.0.insert(u.height(), Some(*u.inner())); prev_update = curr_update.take(); } // Original block that isn't in the update @@ -813,9 +932,9 @@ fn merge_chains( } else { // We have an invalidation height so we set the height to the updated hash and // also purge all the original chain block hashes above this block. - changeset.insert(u.height(), Some(u.hash())); + changeset.0.insert(u.height(), Some(*u.inner())); for invalidated_height in potentially_invalidated_heights.drain(..) { - changeset.insert(invalidated_height, None); + changeset.0.insert(invalidated_height, None); } prev_orig_was_invalidated = true; } diff --git a/crates/chain/src/spk_client.rs b/crates/chain/src/spk_client.rs index fdc3be35b..1ebaf9ee9 100644 --- a/crates/chain/src/spk_client.rs +++ b/crates/chain/src/spk_client.rs @@ -1,5 +1,6 @@ //! Helper types for spk-based blockchain clients. +use crate::BlockId; use crate::{ collections::BTreeMap, local_chain::CheckPoint, ConfirmationTimeHeightAnchor, TxGraph, }; @@ -11,12 +12,15 @@ use core::{fmt::Debug, marker::PhantomData, ops::RangeBounds}; /// /// A client sync fetches relevant chain data for a known list of scripts, transaction ids and /// outpoints. The sync process also updates the chain from the given [`CheckPoint`]. -pub struct SyncRequest { +pub struct SyncRequest = BlockId> +where + B: Clone, +{ /// A checkpoint for the current chain [`LocalChain::tip`]. /// The sync process will return a new chain update that extends this tip. /// /// [`LocalChain::tip`]: crate::local_chain::LocalChain::tip - pub chain_tip: CheckPoint, + pub chain_tip: CheckPoint, /// Transactions that spend from or to these indexed script pubkeys. pub spks: Box + Send>, /// Transactions with these txids. @@ -25,9 +29,12 @@ pub struct SyncRequest { pub outpoints: Box + Send>, } -impl SyncRequest { +impl> SyncRequest +where + B: Clone, +{ /// Construct a new [`SyncRequest`] from a given `cp` tip. - pub fn from_chain_tip(cp: CheckPoint) -> Self { + pub fn from_chain_tip(cp: CheckPoint) -> Self { Self { chain_tip: cp, spks: Box::new(core::iter::empty()), @@ -175,11 +182,14 @@ impl SyncRequest { /// Data returned from a spk-based blockchain client sync. /// /// See also [`SyncRequest`]. -pub struct SyncResult { +pub struct SyncResult = BlockId, A = ConfirmationTimeHeightAnchor> +where + B: Clone, +{ /// The update to apply to the receiving [`TxGraph`]. pub graph_update: TxGraph, /// The update to apply to the receiving [`LocalChain`](crate::local_chain::LocalChain). - pub chain_update: CheckPoint, + pub chain_update: CheckPoint, } /// Data required to perform a spk-based blockchain client full scan. @@ -188,20 +198,26 @@ pub struct SyncResult { /// data until some stop gap number of scripts is found that have no data. This operation is /// generally only used when importing or restoring previously used keychains in which the list of /// used scripts is not known. The full scan process also updates the chain from the given [`CheckPoint`]. -pub struct FullScanRequest { +pub struct FullScanRequest = BlockId> +where + B: Clone, +{ /// A checkpoint for the current [`LocalChain::tip`]. /// The full scan process will return a new chain update that extends this tip. /// /// [`LocalChain::tip`]: crate::local_chain::LocalChain::tip - pub chain_tip: CheckPoint, + pub chain_tip: CheckPoint, /// Iterators of script pubkeys indexed by the keychain index. pub spks_by_keychain: BTreeMap + Send>>, } -impl FullScanRequest { +impl> FullScanRequest +where + B: Clone, +{ /// Construct a new [`FullScanRequest`] from a given `chain_tip`. #[must_use] - pub fn from_chain_tip(chain_tip: CheckPoint) -> Self { + pub fn from_chain_tip(chain_tip: CheckPoint) -> Self { Self { chain_tip, spks_by_keychain: BTreeMap::new(), @@ -218,7 +234,7 @@ impl FullScanRequest { #[cfg(feature = "miniscript")] #[must_use] pub fn from_keychain_txout_index( - chain_tip: CheckPoint, + chain_tip: CheckPoint, index: &crate::keychain::KeychainTxOutIndex, ) -> Self where @@ -316,11 +332,14 @@ impl FullScanRequest { /// Data returned from a spk-based blockchain client full scan. /// /// See also [`FullScanRequest`]. -pub struct FullScanResult { +pub struct FullScanResult = BlockId, A = ConfirmationTimeHeightAnchor> +where + B: Clone, +{ /// The update to apply to the receiving [`LocalChain`](crate::local_chain::LocalChain). pub graph_update: TxGraph, /// The update to apply to the receiving [`TxGraph`]. - pub chain_update: CheckPoint, + pub chain_update: CheckPoint, /// Last active indices for the corresponding keychains (`K`). pub last_active_indices: BTreeMap, } diff --git a/crates/chain/tests/common/mod.rs b/crates/chain/tests/common/mod.rs index 3fad37f93..d171dd8b9 100644 --- a/crates/chain/tests/common/mod.rs +++ b/crates/chain/tests/common/mod.rs @@ -34,9 +34,8 @@ macro_rules! local_chain { macro_rules! chain_update { [ $(($height:expr, $hash:expr)), * ] => {{ #[allow(unused_mut)] - bdk_chain::local_chain::LocalChain::from_blocks([$(($height, $hash).into()),*].into_iter().collect()) + bdk_chain::local_chain::CheckPoint::from_blocks([$(($height, BlockId{ height: $height, hash: $hash}).into()),*].into_iter().collect()) .expect("chain must have genesis block") - .tip() }}; } diff --git a/crates/chain/tests/test_local_chain.rs b/crates/chain/tests/test_local_chain.rs index 6819e3da1..9f1d2edc7 100644 --- a/crates/chain/tests/test_local_chain.rs +++ b/crates/chain/tests/test_local_chain.rs @@ -16,23 +16,26 @@ use proptest::prelude::*; mod common; #[derive(Debug)] -struct TestLocalChain<'a> { +struct TestLocalChain<'a, B: AsRef> { name: &'static str, - chain: LocalChain, - update: CheckPoint, - exp: ExpectedResult<'a>, + chain: LocalChain, + update: CheckPoint, + exp: ExpectedResult<'a, B>, } #[derive(Debug, PartialEq)] -enum ExpectedResult<'a> { +enum ExpectedResult<'a, B> { Ok { - changeset: &'a [(u32, Option)], - init_changeset: &'a [(u32, Option)], + changeset: &'a [(u32, Option)], + init_changeset: &'a [(u32, Option)], }, Err(CannotConnectError), } -impl<'a> TestLocalChain<'a> { +impl<'a, B: AsRef> TestLocalChain<'a, B> +where + B: Copy + std::fmt::Debug + PartialEq, +{ fn run(mut self) { println!("[TestLocalChain] test: {}", self.name); let got_changeset = match self.chain.apply_update(self.update) { @@ -55,13 +58,13 @@ impl<'a> TestLocalChain<'a> { } => { assert_eq!( got_changeset, - changeset.iter().cloned().collect(), + ChangeSet::from_iter(changeset.iter().cloned().collect::>()), "{}: unexpected changeset", self.name ); assert_eq!( self.chain.initial_changeset(), - init_changeset.iter().cloned().collect(), + ChangeSet::from_iter(init_changeset.iter().cloned().collect::>()), "{}: unexpected initial changeset", self.name ); @@ -83,7 +86,7 @@ fn update_local_chain() { update: chain_update![(0, h!("A"))], exp: ExpectedResult::Ok { changeset: &[], - init_changeset: &[(0, Some(h!("A")))], + init_changeset: &[(0, Some(BlockId { height: 0, hash: h!("A")}))], }, }, TestLocalChain { @@ -91,8 +94,8 @@ fn update_local_chain() { chain: local_chain![(0, h!("A"))], update: chain_update![(0, h!("A")), (1, h!("B"))], exp: ExpectedResult::Ok { - changeset: &[(1, Some(h!("B")))], - init_changeset: &[(0, Some(h!("A"))), (1, Some(h!("B")))], + changeset: &[(1, Some(BlockId{height: 1, hash: h!("B")}))], + init_changeset: &[(0, Some(BlockId { height: 0 , hash: h!("A")})), (1, Some(BlockId { height: 1, hash: h!("B")}))], }, }, TestLocalChain { @@ -117,7 +120,7 @@ fn update_local_chain() { update: chain_update![(0, h!("A"))], exp: ExpectedResult::Ok { changeset: &[], - init_changeset: &[(0, Some(h!("A")))], + init_changeset: &[(0, Some(BlockId { height: 0, hash: h!("A")}))], }, }, // Introduce an older checkpoint (B) @@ -129,8 +132,12 @@ fn update_local_chain() { chain: local_chain![(0, h!("_")), (2, h!("C")), (3, h!("D"))], update: chain_update![(0, h!("_")), (1, h!("B")), (2, h!("C"))], exp: ExpectedResult::Ok { - changeset: &[(1, Some(h!("B")))], - init_changeset: &[(0, Some(h!("_"))), (1, Some(h!("B"))), (2, Some(h!("C"))), (3, Some(h!("D")))], + changeset: &[(1, Some(BlockId{height: 1, hash: h!("B")}))], + init_changeset: &[ + (0, Some(BlockId{ height: 0, hash: h!("_")})), + (1, Some(BlockId{ height: 1, hash: h!("B")})), + (2, Some(BlockId{ height: 2, hash: h!("C")})), + (3, Some(BlockId{ height: 3, hash: h!("D")}))], }, }, // Introduce an older checkpoint (A) that is not directly behind PoA @@ -142,8 +149,12 @@ fn update_local_chain() { chain: local_chain![(0, h!("_")), (3, h!("B")), (4, h!("C"))], update: chain_update![(0, h!("_")), (2, h!("A")), (4, h!("C"))], exp: ExpectedResult::Ok { - changeset: &[(2, Some(h!("A")))], - init_changeset: &[(0, Some(h!("_"))), (2, Some(h!("A"))), (3, Some(h!("B"))), (4, Some(h!("C")))], + changeset: &[(2, Some(BlockId{height: 2, hash: h!("A")}))], + init_changeset: &[ + (0, Some(BlockId{ height: 0, hash: h!("_")})), + (2, Some(BlockId{ height: 2, hash: h!("A")})), + (3, Some(BlockId{ height: 3, hash: h!("B")})), + (4, Some(BlockId{ height: 4, hash: h!("C")}))], } }, // Introduce an older checkpoint (B) that is not the oldest checkpoint @@ -155,8 +166,12 @@ fn update_local_chain() { chain: local_chain![(0, h!("_")), (1, h!("A")), (3, h!("C"))], update: chain_update![(0, h!("_")), (2, h!("B")), (3, h!("C"))], exp: ExpectedResult::Ok { - changeset: &[(2, Some(h!("B")))], - init_changeset: &[(0, Some(h!("_"))), (1, Some(h!("A"))), (2, Some(h!("B"))), (3, Some(h!("C")))], + changeset: &[(2, Some(BlockId{height: 2, hash: h!("B")}))], + init_changeset: &[ + (0, Some(BlockId{ height: 0, hash: h!("_")})), + (1, Some(BlockId{ height: 1, hash: h!("A")})), + (2, Some(BlockId{ height: 2, hash: h!("B")})), + (3, Some(BlockId{ height: 3, hash: h!("C")}))], } }, // Introduce two older checkpoints below the PoA @@ -168,8 +183,12 @@ fn update_local_chain() { chain: local_chain![(0, h!("_")), (3, h!("C"))], update: chain_update![(0, h!("_")), (1, h!("A")), (2, h!("B")), (3, h!("C"))], exp: ExpectedResult::Ok { - changeset: &[(1, Some(h!("A"))), (2, Some(h!("B")))], - init_changeset: &[(0, Some(h!("_"))), (1, Some(h!("A"))), (2, Some(h!("B"))), (3, Some(h!("C")))], + changeset: &[(1, Some(BlockId{height: 1, hash: h!("A")})), (2, Some(BlockId{height: 2, hash: h!("B")}))], + init_changeset: &[ + (0, Some(BlockId{ height: 0 , hash: h!("_")})), + (1, Some(BlockId{ height: 1, hash: h!("A")})), + (2, Some(BlockId { height: 2, hash: h!("B")})), + (3, Some(BlockId{ height: 3, hash: h!("C")}))], }, }, TestLocalChain { @@ -177,8 +196,8 @@ fn update_local_chain() { chain: local_chain![(0, h!("im-wrong")), (1, h!("we-agree"))], update: chain_update![(0, h!("fix")), (1, h!("we-agree"))], exp: ExpectedResult::Ok { - changeset: &[(0, Some(h!("fix")))], - init_changeset: &[(0, Some(h!("fix"))), (1, Some(h!("we-agree")))], + changeset: &[(0, Some(BlockId{ height: 0, hash: h!("fix")}))], + init_changeset: &[(0, Some(BlockId { height: 0, hash: h!("fix")})), (1, Some(BlockId{ height: 1, hash: h!("we-agree")}))], }, }, // B and C are in both chain and update @@ -191,13 +210,13 @@ fn update_local_chain() { chain: local_chain![(0, h!("_")), (2, h!("B")), (3, h!("C"))], update: chain_update![(0, h!("_")), (1, h!("A")), (2, h!("B")), (3, h!("C")), (4, h!("D"))], exp: ExpectedResult::Ok { - changeset: &[(1, Some(h!("A"))), (4, Some(h!("D")))], + changeset: &[(1, Some(BlockId{height: 1, hash: h!("A")})), (4, Some(BlockId{height: 4, hash: h!("D")}))], init_changeset: &[ - (0, Some(h!("_"))), - (1, Some(h!("A"))), - (2, Some(h!("B"))), - (3, Some(h!("C"))), - (4, Some(h!("D"))), + (0, Some(BlockId{height: 0, hash: h!("_")})), + (1, Some(BlockId{height: 1, hash: h!("A")})), + (2, Some(BlockId{height: 2, hash: h!("B")})), + (3, Some(BlockId{height: 3, hash: h!("C")})), + (4, Some(BlockId{height: 4, hash: h!("D")})), ], }, }, @@ -225,16 +244,16 @@ fn update_local_chain() { update: chain_update![(0, h!("_")), (2, h!("B'")), (3, h!("C'")), (4, h!("D"))], exp: ExpectedResult::Ok { changeset: &[ - (2, Some(h!("B'"))), - (3, Some(h!("C'"))), - (4, Some(h!("D"))), + (2, Some(BlockId{height: 2, hash: h!("B'")})), + (3, Some(BlockId{height: 3, hash: h!("C'")})), + (4, Some(BlockId{height: 4, hash: h!("D")})), (5, None), ], init_changeset: &[ - (0, Some(h!("_"))), - (2, Some(h!("B'"))), - (3, Some(h!("C'"))), - (4, Some(h!("D"))), + (0, Some(BlockId{height: 0, hash: h!("_")})), + (2, Some(BlockId{height: 2, hash: h!("B'")})), + (3, Some(BlockId{height: 3, hash: h!("C'")})), + (4, Some(BlockId{height: 4, hash: h!("D")})), ], }, }, @@ -249,16 +268,16 @@ fn update_local_chain() { update: chain_update![(0, h!("_")), (1, h!("B'")), (2, h!("C'")), (3, h!("D"))], exp: ExpectedResult::Ok { changeset: &[ - (1, Some(h!("B'"))), - (2, Some(h!("C'"))), - (3, Some(h!("D"))), + (1, Some(BlockId{height: 1, hash: h!("B'")})), + (2, Some(BlockId{height: 2, hash: h!("C'")})), + (3, Some(BlockId{height: 3, hash: h!("D")})), (4, None) ], init_changeset: &[ - (0, Some(h!("_"))), - (1, Some(h!("B'"))), - (2, Some(h!("C'"))), - (3, Some(h!("D"))), + (0, Some(BlockId{height: 0, hash: h!("_")})), + (1, Some(BlockId{height: 1, hash: h!("B'")})), + (2, Some(BlockId{height: 2, hash: h!("C'")})), + (3, Some(BlockId{height: 3, hash: h!("D")})), ], }, }, @@ -284,16 +303,16 @@ fn update_local_chain() { update: chain_update![(0, h!("A")), (2, h!("C")), (4, h!("E")), (5, h!("F"))], exp: ExpectedResult::Ok { changeset: &[ - (2, Some(h!("C"))), - (5, Some(h!("F"))), + (2, Some(BlockId{height: 2, hash: h!("C")})), + (5, Some(BlockId{height: 5, hash: h!("F")})), ], init_changeset: &[ - (0, Some(h!("A"))), - (1, Some(h!("B"))), - (2, Some(h!("C"))), - (3, Some(h!("D"))), - (4, Some(h!("E"))), - (5, Some(h!("F"))), + (0, Some(BlockId{height: 0, hash: h!("A")})), + (1, Some(BlockId{height: 1, hash: h!("B")})), + (2, Some(BlockId{height: 2, hash: h!("C")})), + (3, Some(BlockId{height: 3, hash: h!("D")})), + (4, Some(BlockId{height: 4, hash: h!("E")})), + (5, Some(BlockId{height: 5, hash: h!("F")})), ], }, }, @@ -307,14 +326,14 @@ fn update_local_chain() { update: chain_update![(0, h!("_")), (2, h!("C")), (3, h!("D'"))], exp: ExpectedResult::Ok { changeset: &[ - (3, Some(h!("D'"))), + (3, Some(BlockId{height: 3, hash: h!("D'")})), (4, None), (5, None), ], init_changeset: &[ - (0, Some(h!("_"))), - (2, Some(h!("C"))), - (3, Some(h!("D'"))), + (0, Some(BlockId{height: 0, hash: h!("_")})), + (2, Some(BlockId{height: 2, hash: h!("C")})), + (3, Some(BlockId{height: 3, hash: h!("D'")})), ], }, }, @@ -325,36 +344,54 @@ fn update_local_chain() { #[test] fn local_chain_insert_block() { - struct TestCase { - original: LocalChain, + struct TestCase> { + original: LocalChain, insert: (u32, BlockHash), - expected_result: Result, - expected_final: LocalChain, + expected_result: Result, AlterCheckPointError>, + expected_final: LocalChain, } let test_cases = [ TestCase { original: local_chain![(0, h!("_"))], insert: (5, h!("block5")), - expected_result: Ok([(5, Some(h!("block5")))].into()), + expected_result: Ok(ChangeSet::from_iter([( + 5, + Some(BlockId { + height: 5, + hash: h!("block5"), + }), + )])), expected_final: local_chain![(0, h!("_")), (5, h!("block5"))], }, TestCase { original: local_chain![(0, h!("_")), (3, h!("A"))], insert: (4, h!("B")), - expected_result: Ok([(4, Some(h!("B")))].into()), + expected_result: Ok(ChangeSet::from_iter([( + 4, + Some(BlockId { + height: 4, + hash: h!("B"), + }), + )])), expected_final: local_chain![(0, h!("_")), (3, h!("A")), (4, h!("B"))], }, TestCase { original: local_chain![(0, h!("_")), (4, h!("B"))], insert: (3, h!("A")), - expected_result: Ok([(3, Some(h!("A")))].into()), + expected_result: Ok(ChangeSet::from_iter([( + 3, + Some(BlockId { + height: 3, + hash: h!("A"), + }), + )])), expected_final: local_chain![(0, h!("_")), (3, h!("A")), (4, h!("B"))], }, TestCase { original: local_chain![(0, h!("_")), (2, h!("K"))], insert: (2, h!("K")), - expected_result: Ok([].into()), + expected_result: Ok(ChangeSet::default()), expected_final: local_chain![(0, h!("_")), (2, h!("K"))], }, TestCase { @@ -383,12 +420,12 @@ fn local_chain_insert_block() { #[test] fn local_chain_disconnect_from() { - struct TestCase { + struct TestCase> { name: &'static str, - original: LocalChain, + original: LocalChain, disconnect_from: (u32, BlockHash), - exp_result: Result, - exp_final: LocalChain, + exp_result: Result, MissingGenesisError>, + exp_final: LocalChain, } let test_cases = [ @@ -535,8 +572,8 @@ fn checkpoint_from_block_ids() { #[test] fn checkpoint_query() { - struct TestCase { - chain: LocalChain, + struct TestCase> { + chain: LocalChain, /// The heights we want to call [`CheckPoint::query`] with, represented as an inclusive /// range. /// @@ -663,13 +700,13 @@ fn local_chain_apply_header_connected_to() { } } - struct TestCase { + struct TestCase> { name: &'static str, - chain: LocalChain, + chain: LocalChain, header: Header, height: u32, connected_to: BlockId, - exp_result: Result)>, ApplyHeaderError>, + exp_result: Result)>, ApplyHeaderError>, } let test_cases = [ @@ -703,7 +740,7 @@ fn local_chain_apply_header_connected_to() { header, height, connected_to, - exp_result: Ok(vec![(height, Some(hash))]), + exp_result: Ok(vec![(height, Some(BlockId { height, hash }))]), } }, { @@ -735,7 +772,16 @@ fn local_chain_apply_header_connected_to() { height: 3, hash: h!("C"), }, - exp_result: Ok(vec![(prev_height, Some(prev_hash)), (height, Some(hash))]), + exp_result: Ok(vec![ + ( + prev_height, + Some(BlockId { + height: prev_height, + hash: prev_hash, + }), + ), + (height, Some(BlockId { height, hash })), + ]), } }, { @@ -818,7 +864,10 @@ fn generate_height_range_bounds( ) } -fn generate_checkpoints(max_height: u32, max_count: usize) -> impl Strategy { +fn generate_checkpoints( + max_height: u32, + max_count: usize, +) -> impl Strategy> { proptest::collection::btree_set(1..max_height, 0..max_count).prop_map(|mut heights| { heights.insert(0); // must have genesis CheckPoint::from_block_ids(heights.into_iter().map(|height| { diff --git a/crates/electrum/src/bdk_electrum_client.rs b/crates/electrum/src/bdk_electrum_client.rs index 5f7d59b22..80101f167 100644 --- a/crates/electrum/src/bdk_electrum_client.rs +++ b/crates/electrum/src/bdk_electrum_client.rs @@ -84,7 +84,7 @@ impl BdkElectrumClient { /// - `fetch_prev_txouts`: specifies whether or not we want previous `TxOut`s for fee pub fn full_scan( &self, - request: FullScanRequest, + request: FullScanRequest, stop_gap: usize, batch_size: usize, fetch_prev_txouts: bool, @@ -106,7 +106,7 @@ impl BdkElectrumClient { .iter() .take(10) .map(|cp| (cp.height(), cp)) - .collect::>(); + .collect::>>(); if !request_spks.is_empty() { if !scanned_spks.is_empty() { @@ -187,7 +187,7 @@ impl BdkElectrumClient { /// [`full_scan`]: Self::full_scan pub fn sync( &self, - request: SyncRequest, + request: SyncRequest, batch_size: usize, fetch_prev_txouts: bool, ) -> Result { @@ -202,7 +202,7 @@ impl BdkElectrumClient { .iter() .take(10) .map(|cp| (cp.height(), cp)) - .collect::>(); + .collect::>>(); self.populate_with_txids(&cps, &mut full_scan_res.graph_update, request.txids)?; self.populate_with_outpoints(&cps, &mut full_scan_res.graph_update, request.outpoints)?; @@ -227,7 +227,7 @@ impl BdkElectrumClient { /// Checkpoints (in `cps`) are used to create anchors. The `tx_cache` is self-explanatory. fn populate_with_spks( &self, - cps: &BTreeMap, + cps: &BTreeMap>, graph_update: &mut TxGraph, spks: &mut impl Iterator, stop_gap: usize, @@ -299,7 +299,7 @@ impl BdkElectrumClient { /// Checkpoints (in `cps`) are used to create anchors. The `tx_cache` is self-explanatory. fn populate_with_outpoints( &self, - cps: &BTreeMap, + cps: &BTreeMap>, graph_update: &mut TxGraph, outpoints: impl IntoIterator, ) -> Result<(), Error> { @@ -352,7 +352,7 @@ impl BdkElectrumClient { /// Populate the `graph_update` with transactions/anchors of the provided `txids`. fn populate_with_txids( &self, - cps: &BTreeMap, + cps: &BTreeMap>, graph_update: &mut TxGraph, txids: impl IntoIterator, ) -> Result<(), Error> { @@ -394,11 +394,13 @@ impl BdkElectrumClient { /// /// This can be transformed into a [`FullScanResult`] with either [`ConfirmationHeightAnchor`] or /// [`ConfirmationTimeHeightAnchor`] anchor types. -pub struct ElectrumFullScanResult(FullScanResult); +pub struct ElectrumFullScanResult(FullScanResult); impl ElectrumFullScanResult { /// Return [`FullScanResult`] with [`ConfirmationHeightAnchor`]. - pub fn with_confirmation_height_anchor(self) -> FullScanResult { + pub fn with_confirmation_height_anchor( + self, + ) -> FullScanResult { self.0 } @@ -408,7 +410,7 @@ impl ElectrumFullScanResult { pub fn with_confirmation_time_height_anchor( self, client: &BdkElectrumClient, - ) -> Result, Error> { + ) -> Result, Error> { let res = self.0; Ok(FullScanResult { graph_update: try_into_confirmation_time_result(res.graph_update, &client.inner)?, @@ -422,11 +424,11 @@ impl ElectrumFullScanResult { /// /// This can be transformed into a [`SyncResult`] with either [`ConfirmationHeightAnchor`] or /// [`ConfirmationTimeHeightAnchor`] anchor types. -pub struct ElectrumSyncResult(SyncResult); +pub struct ElectrumSyncResult(SyncResult); impl ElectrumSyncResult { /// Return [`SyncResult`] with [`ConfirmationHeightAnchor`]. - pub fn with_confirmation_height_anchor(self) -> SyncResult { + pub fn with_confirmation_height_anchor(self) -> SyncResult { self.0 } @@ -436,7 +438,7 @@ impl ElectrumSyncResult { pub fn with_confirmation_time_height_anchor( self, client: &BdkElectrumClient, - ) -> Result, Error> { + ) -> Result, Error> { let res = self.0; Ok(SyncResult { graph_update: try_into_confirmation_time_result(res.graph_update, &client.inner)?, @@ -476,8 +478,8 @@ fn try_into_confirmation_time_result( /// Return a [`CheckPoint`] of the latest tip, that connects with `prev_tip`. fn construct_update_tip( client: &impl ElectrumApi, - prev_tip: CheckPoint, -) -> Result<(CheckPoint, Option), Error> { + prev_tip: CheckPoint, +) -> Result<(CheckPoint, Option), Error> { let HeaderNotification { height, .. } = client.block_headers_subscribe()?; let new_tip_height = height as u32; @@ -501,7 +503,7 @@ fn construct_update_tip( // Find the "point of agreement" (if any). let agreement_cp = { - let mut agreement_cp = Option::::None; + let mut agreement_cp = Option::>::None; for cp in prev_tip.iter() { let cp_block = cp.block_id(); let hash = match new_blocks.get(&cp_block.height) { @@ -550,7 +552,7 @@ fn construct_update_tip( /// /// [tx status](https://electrumx-spesmilo.readthedocs.io/en/latest/protocol-basics.html#status) fn determine_tx_anchor( - cps: &BTreeMap, + cps: &BTreeMap>, raw_height: i32, txid: Txid, ) -> Option { diff --git a/crates/sqlite/src/store.rs b/crates/sqlite/src/store.rs index beeb9e0aa..111cbfbd7 100644 --- a/crates/sqlite/src/store.rs +++ b/crates/sqlite/src/store.rs @@ -13,7 +13,8 @@ use std::sync::{Arc, Mutex}; use crate::Error; use bdk_chain::{ - indexed_tx_graph, keychain, local_chain, tx_graph, Anchor, Append, DescriptorExt, DescriptorId, + indexed_tx_graph, keychain, local_chain, tx_graph, Anchor, Append, BlockId, DescriptorExt, + DescriptorId, }; use bdk_persist::CombinedChangeSet; @@ -141,14 +142,14 @@ impl Store { db_transaction: &rusqlite::Transaction, chain_changeset: &local_chain::ChangeSet, ) -> Result<(), Error> { - for (height, hash) in chain_changeset.iter() { - match hash { + for (height, block) in chain_changeset.iter() { + match block { // add new hash at height - Some(hash) => { + Some(block) => { let insert_block_stmt = &mut db_transaction .prepare_cached("INSERT INTO block (hash, height) VALUES (:hash, :height)") .expect("insert block statement"); - let hash = hash.to_string(); + let hash = block.hash.to_string(); insert_block_stmt .execute(named_params! {":hash": hash, ":height": height }) .map_err(Error::Sqlite)?; @@ -171,7 +172,7 @@ impl Store { /// Select all blocks. fn select_blocks( db_transaction: &rusqlite::Transaction, - ) -> Result>, Error> { + ) -> Result>, Error> { let mut select_blocks_stmt = db_transaction .prepare_cached("SELECT height, hash FROM block") .expect("select blocks statement"); @@ -180,7 +181,10 @@ impl Store { .query_map([], |row| { let height = row.get_unwrap::(0); let hash = row.get_unwrap::(1); - let hash = Some(BlockHash::from_str(hash.as_str()).expect("block hash")); + let hash = Some(BlockId { + height, + hash: BlockHash::from_str(hash.as_str()).expect("block hash"), + }); Ok((height, hash)) }) .map_err(Error::Sqlite)?; @@ -515,7 +519,7 @@ where let db_transaction = self.db_transaction()?; let network = Self::select_network(&db_transaction)?; - let chain = Self::select_blocks(&db_transaction)?; + let chain = local_chain::ChangeSet::from_iter(Self::select_blocks(&db_transaction)?); let keychains_added = Self::select_keychains(&db_transaction)?; let last_revealed = Self::select_last_revealed(&db_transaction)?; let txs = Self::select_txs(&db_transaction)?; @@ -658,12 +662,29 @@ mod test { BlockHash::from_str("000000006c02c8ea6e4ff69651f7fcde348fb9d557a06e6957b65552002a7820") .unwrap(); - let block_changeset = [ - (0, Some(block_hash_0)), - (1, Some(block_hash_1)), - (2, Some(block_hash_2)), - ] - .into(); + let block_changeset = local_chain::ChangeSet::from_iter([ + ( + 0, + Some(BlockId { + height: 0, + hash: block_hash_0, + }), + ), + ( + 1, + Some(BlockId { + height: 1, + hash: block_hash_1, + }), + ), + ( + 2, + Some(BlockId { + height: 2, + hash: block_hash_2, + }), + ), + ]); let ext_keychain = Keychain::External { account: 0, From 1bea902c68626dab4397b63bec710cffe07aeaa9 Mon Sep 17 00:00:00 2001 From: Wei Chen Date: Sat, 15 Jun 2024 14:38:17 +0800 Subject: [PATCH 2/2] refactor(electrum): implement merkle proofs WIP --- crates/electrum/src/bdk_electrum_client.rs | 412 +++++++------------- crates/electrum/tests/test_electrum.rs | 37 +- example-crates/example_electrum/src/main.rs | 13 +- example-crates/wallet_electrum/src/main.rs | 4 +- 4 files changed, 161 insertions(+), 305 deletions(-) diff --git a/crates/electrum/src/bdk_electrum_client.rs b/crates/electrum/src/bdk_electrum_client.rs index 80101f167..c93a499e8 100644 --- a/crates/electrum/src/bdk_electrum_client.rs +++ b/crates/electrum/src/bdk_electrum_client.rs @@ -1,14 +1,16 @@ use bdk_chain::{ - bitcoin::{OutPoint, ScriptBuf, Transaction, Txid}, - collections::{BTreeMap, HashMap, HashSet}, + bitcoin::{BlockHash, OutPoint, ScriptBuf, Transaction, Txid}, + collections::{BTreeMap, HashMap}, local_chain::CheckPoint, spk_client::{FullScanRequest, FullScanResult, SyncRequest, SyncResult}, tx_graph::TxGraph, - BlockId, ConfirmationHeightAnchor, ConfirmationTimeHeightAnchor, + Anchor, BlockId, ConfirmationTimeHeightAnchor, }; -use core::str::FromStr; use electrum_client::{ElectrumApi, Error, HeaderNotification}; -use std::sync::{Arc, Mutex}; +use std::{ + collections::BTreeSet, + sync::{Arc, Mutex}, +}; /// We include a chain suffix of a certain length for the purpose of robustness. const CHAIN_SUFFIX_LENGTH: u32 = 8; @@ -88,87 +90,32 @@ impl BdkElectrumClient { stop_gap: usize, batch_size: usize, fetch_prev_txouts: bool, - ) -> Result, Error> { - let mut request_spks = request.spks_by_keychain; - - // We keep track of already-scanned spks just in case a reorg happens and we need to do a - // rescan. We need to keep track of this as iterators in `keychain_spks` are "unbounded" so - // cannot be collected. In addition, we keep track of whether an spk has an active tx - // history for determining the `last_active_index`. - // * key: (keychain, spk_index) that identifies the spk. - // * val: (script_pubkey, has_tx_history). - let mut scanned_spks = BTreeMap::<(K, u32), (ScriptBuf, bool)>::new(); - - let update = loop { - let (tip, _) = construct_update_tip(&self.inner, request.chain_tip.clone())?; - let mut graph_update = TxGraph::::default(); - let cps = tip - .iter() - .take(10) - .map(|cp| (cp.height(), cp)) - .collect::>>(); - - if !request_spks.is_empty() { - if !scanned_spks.is_empty() { - scanned_spks.append( - &mut self.populate_with_spks( - &cps, - &mut graph_update, - &mut scanned_spks - .iter() - .map(|(i, (spk, _))| (i.clone(), spk.clone())), - stop_gap, - batch_size, - )?, - ); - } - for (keychain, keychain_spks) in &mut request_spks { - scanned_spks.extend( - self.populate_with_spks( - &cps, - &mut graph_update, - keychain_spks, - stop_gap, - batch_size, - )? - .into_iter() - .map(|(spk_i, spk)| ((keychain.clone(), spk_i), spk)), - ); - } - } - - // check for reorgs during scan process - let server_blockhash = self.inner.block_header(tip.height() as usize)?.block_hash(); - if tip.hash() != server_blockhash { - continue; // reorg + ) -> Result, Error> { + let (tip, latest_blocks) = + fetch_tip_and_latest_blocks(&self.inner, request.chain_tip.clone())?; + let mut graph_update = TxGraph::::default(); + let mut last_active_indices = BTreeMap::::new(); + + for (keychain, keychain_spks) in request.spks_by_keychain { + if let Some(last_active_index) = + self.populate_with_spks(&mut graph_update, keychain_spks, stop_gap, batch_size)? + { + last_active_indices.insert(keychain, last_active_index); } + } - // Fetch previous `TxOut`s for fee calculation if flag is enabled. - if fetch_prev_txouts { - self.fetch_prev_txout(&mut graph_update)?; - } + let chain_update = chain_update(tip, &latest_blocks, graph_update.all_anchors())?; - let chain_update = tip; - - let keychain_update = request_spks - .into_keys() - .filter_map(|k| { - scanned_spks - .range((k.clone(), u32::MIN)..=(k.clone(), u32::MAX)) - .rev() - .find(|(_, (_, active))| *active) - .map(|((_, i), _)| (k, *i)) - }) - .collect::>(); - - break FullScanResult { - graph_update, - chain_update, - last_active_indices: keychain_update, - }; - }; + // Fetch previous `TxOut`s for fee calculation if flag is enabled. + if fetch_prev_txouts { + self.fetch_prev_txout(&mut graph_update)?; + } - Ok(ElectrumFullScanResult(update)) + Ok(FullScanResult { + graph_update, + chain_update, + last_active_indices, + }) } /// Sync a set of scripts with the blockchain (via an Electrum client) for the data specified @@ -190,32 +137,31 @@ impl BdkElectrumClient { request: SyncRequest, batch_size: usize, fetch_prev_txouts: bool, - ) -> Result { + ) -> Result { let full_scan_req = FullScanRequest::from_chain_tip(request.chain_tip.clone()) .set_spks_for_keychain((), request.spks.enumerate().map(|(i, spk)| (i as u32, spk))); - let mut full_scan_res = self - .full_scan(full_scan_req, usize::MAX, batch_size, false)? - .with_confirmation_height_anchor(); + let mut full_scan_res = self.full_scan(full_scan_req, usize::MAX, batch_size, false)?; + let (tip, latest_blocks) = + fetch_tip_and_latest_blocks(&self.inner, request.chain_tip.clone())?; - let (tip, _) = construct_update_tip(&self.inner, request.chain_tip)?; - let cps = tip - .iter() - .take(10) - .map(|cp| (cp.height(), cp)) - .collect::>>(); + self.populate_with_txids(&mut full_scan_res.graph_update, request.txids)?; + self.populate_with_outpoints(&mut full_scan_res.graph_update, request.outpoints)?; - self.populate_with_txids(&cps, &mut full_scan_res.graph_update, request.txids)?; - self.populate_with_outpoints(&cps, &mut full_scan_res.graph_update, request.outpoints)?; + let chain_update = chain_update( + tip, + &latest_blocks, + full_scan_res.graph_update.all_anchors(), + )?; // Fetch previous `TxOut`s for fee calculation if flag is enabled. if fetch_prev_txouts { self.fetch_prev_txout(&mut full_scan_res.graph_update)?; } - Ok(ElectrumSyncResult(SyncResult { - chain_update: full_scan_res.chain_update, + Ok(SyncResult { + chain_update, graph_update: full_scan_res.graph_update, - })) + }) } /// Populate the `graph_update` with transactions/anchors associated with the given `spks`. @@ -227,70 +173,46 @@ impl BdkElectrumClient { /// Checkpoints (in `cps`) are used to create anchors. The `tx_cache` is self-explanatory. fn populate_with_spks( &self, - cps: &BTreeMap>, - graph_update: &mut TxGraph, - spks: &mut impl Iterator, + graph_update: &mut TxGraph, + mut spks: impl Iterator, stop_gap: usize, batch_size: usize, - ) -> Result, Error> { + ) -> Result, Error> { let mut unused_spk_count = 0_usize; - let mut scanned_spks = BTreeMap::new(); + let mut last_active_index = Option::::None; loop { let spks = (0..batch_size) .map_while(|_| spks.next()) .collect::>(); if spks.is_empty() { - return Ok(scanned_spks); + return Ok(last_active_index); } let spk_histories = self .inner .batch_script_get_history(spks.iter().map(|(_, s)| s.as_script()))?; - for ((spk_index, spk), spk_history) in spks.into_iter().zip(spk_histories) { + for ((spk_index, _spk), spk_history) in spks.into_iter().zip(spk_histories) { if spk_history.is_empty() { - scanned_spks.insert(spk_index, (spk, false)); unused_spk_count += 1; if unused_spk_count > stop_gap { - return Ok(scanned_spks); + return Ok(last_active_index); } continue; } else { - scanned_spks.insert(spk_index, (spk, true)); + last_active_index = Some(spk_index); unused_spk_count = 0; } for tx_res in spk_history { let _ = graph_update.insert_tx(self.fetch_tx(tx_res.tx_hash)?); - if let Some(anchor) = determine_tx_anchor(cps, tx_res.height, tx_res.tx_hash) { - let _ = graph_update.insert_anchor(tx_res.tx_hash, anchor); - } + self.validate_merkle_for_anchor(graph_update, tx_res.tx_hash, tx_res.height)?; } } } } - // Helper function which fetches the `TxOut`s of our relevant transactions' previous transactions, - // which we do not have by default. This data is needed to calculate the transaction fee. - fn fetch_prev_txout( - &self, - graph_update: &mut TxGraph, - ) -> Result<(), Error> { - let full_txs: Vec> = - graph_update.full_txs().map(|tx_node| tx_node.tx).collect(); - for tx in full_txs { - for vin in &tx.input { - let outpoint = vin.previous_output; - let vout = outpoint.vout; - let prev_tx = self.fetch_tx(outpoint.txid)?; - let txout = prev_tx.output[vout as usize].clone(); - let _ = graph_update.insert_txout(outpoint, txout); - } - } - Ok(()) - } - /// Populate the `graph_update` with associated transactions/anchors of `outpoints`. /// /// Transactions in which the outpoint resides, and transactions that spend from the outpoint are @@ -299,8 +221,7 @@ impl BdkElectrumClient { /// Checkpoints (in `cps`) are used to create anchors. The `tx_cache` is self-explanatory. fn populate_with_outpoints( &self, - cps: &BTreeMap>, - graph_update: &mut TxGraph, + graph_update: &mut TxGraph, outpoints: impl IntoIterator, ) -> Result<(), Error> { for outpoint in outpoints { @@ -324,9 +245,7 @@ impl BdkElectrumClient { if !has_residing && res.tx_hash == op_txid { has_residing = true; let _ = graph_update.insert_tx(Arc::clone(&op_tx)); - if let Some(anchor) = determine_tx_anchor(cps, res.height, res.tx_hash) { - let _ = graph_update.insert_anchor(res.tx_hash, anchor); - } + self.validate_merkle_for_anchor(graph_update, res.tx_hash, res.height)?; } if !has_spending && res.tx_hash != op_txid { @@ -340,9 +259,7 @@ impl BdkElectrumClient { continue; } let _ = graph_update.insert_tx(Arc::clone(&res_tx)); - if let Some(anchor) = determine_tx_anchor(cps, res.height, res.tx_hash) { - let _ = graph_update.insert_anchor(res.tx_hash, anchor); - } + self.validate_merkle_for_anchor(graph_update, res.tx_hash, res.height)?; } } } @@ -352,8 +269,7 @@ impl BdkElectrumClient { /// Populate the `graph_update` with transactions/anchors of the provided `txids`. fn populate_with_txids( &self, - cps: &BTreeMap>, - graph_update: &mut TxGraph, + graph_update: &mut TxGraph, txids: impl IntoIterator, ) -> Result<(), Error> { for txid in txids { @@ -371,122 +287,90 @@ impl BdkElectrumClient { // because of restrictions of the Electrum API, we have to use the `script_get_history` // call to get confirmation status of our transaction - let anchor = match self + if let Some(r) = self .inner .script_get_history(spk)? .into_iter() .find(|r| r.tx_hash == txid) { - Some(r) => determine_tx_anchor(cps, r.height, txid), - None => continue, - }; + self.validate_merkle_for_anchor(graph_update, txid, r.height)?; + } let _ = graph_update.insert_tx(tx); - if let Some(anchor) = anchor { - let _ = graph_update.insert_anchor(txid, anchor); - } } Ok(()) } -} -/// The result of [`BdkElectrumClient::full_scan`]. -/// -/// This can be transformed into a [`FullScanResult`] with either [`ConfirmationHeightAnchor`] or -/// [`ConfirmationTimeHeightAnchor`] anchor types. -pub struct ElectrumFullScanResult(FullScanResult); - -impl ElectrumFullScanResult { - /// Return [`FullScanResult`] with [`ConfirmationHeightAnchor`]. - pub fn with_confirmation_height_anchor( - self, - ) -> FullScanResult { - self.0 - } - - /// Return [`FullScanResult`] with [`ConfirmationTimeHeightAnchor`]. - /// - /// This requires additional calls to the Electrum server. - pub fn with_confirmation_time_height_anchor( - self, - client: &BdkElectrumClient, - ) -> Result, Error> { - let res = self.0; - Ok(FullScanResult { - graph_update: try_into_confirmation_time_result(res.graph_update, &client.inner)?, - chain_update: res.chain_update, - last_active_indices: res.last_active_indices, - }) - } -} - -/// The result of [`BdkElectrumClient::sync`]. -/// -/// This can be transformed into a [`SyncResult`] with either [`ConfirmationHeightAnchor`] or -/// [`ConfirmationTimeHeightAnchor`] anchor types. -pub struct ElectrumSyncResult(SyncResult); - -impl ElectrumSyncResult { - /// Return [`SyncResult`] with [`ConfirmationHeightAnchor`]. - pub fn with_confirmation_height_anchor(self) -> SyncResult { - self.0 + // Helper function which checks if a transaction is confirmed by validating the merkle proof. + // An anchor is inserted if the transaction is validated to be in a confirmed block. + fn validate_merkle_for_anchor( + &self, + graph_update: &mut TxGraph, + txid: Txid, + confirmation_height: i32, + ) -> Result<(), Error> { + if let Ok(merkle_res) = self + .inner + .transaction_get_merkle(&txid, confirmation_height as usize) + { + let header = self.inner.block_header(merkle_res.block_height)?; + let is_confirmed_tx = electrum_client::utils::validate_merkle_proof( + &txid, + &header.merkle_root, + &merkle_res, + ); + + if is_confirmed_tx { + let _ = graph_update.insert_anchor( + txid, + ConfirmationTimeHeightAnchor { + confirmation_height: merkle_res.block_height as u32, + confirmation_time: header.time as u64, + anchor_block: BlockId { + height: merkle_res.block_height as u32, + hash: header.block_hash(), + }, + }, + ); + } + } + Ok(()) } - /// Return [`SyncResult`] with [`ConfirmationTimeHeightAnchor`]. - /// - /// This requires additional calls to the Electrum server. - pub fn with_confirmation_time_height_anchor( - self, - client: &BdkElectrumClient, - ) -> Result, Error> { - let res = self.0; - Ok(SyncResult { - graph_update: try_into_confirmation_time_result(res.graph_update, &client.inner)?, - chain_update: res.chain_update, - }) + // Helper function which fetches the `TxOut`s of our relevant transactions' previous transactions, + // which we do not have by default. This data is needed to calculate the transaction fee. + fn fetch_prev_txout( + &self, + graph_update: &mut TxGraph, + ) -> Result<(), Error> { + let full_txs: Vec> = + graph_update.full_txs().map(|tx_node| tx_node.tx).collect(); + for tx in full_txs { + for vin in &tx.input { + let outpoint = vin.previous_output; + let vout = outpoint.vout; + let prev_tx = self.fetch_tx(outpoint.txid)?; + let txout = prev_tx.output[vout as usize].clone(); + let _ = graph_update.insert_txout(outpoint, txout); + } + } + Ok(()) } } -fn try_into_confirmation_time_result( - graph_update: TxGraph, - client: &impl ElectrumApi, -) -> Result, Error> { - let relevant_heights = graph_update - .all_anchors() - .iter() - .map(|(a, _)| a.confirmation_height) - .collect::>(); - - let height_to_time = relevant_heights - .clone() - .into_iter() - .zip( - client - .batch_block_header(relevant_heights)? - .into_iter() - .map(|bh| bh.time as u64), - ) - .collect::>(); - - Ok(graph_update.map_anchors(|a| ConfirmationTimeHeightAnchor { - anchor_block: a.anchor_block, - confirmation_height: a.confirmation_height, - confirmation_time: height_to_time[&a.confirmation_height], - })) -} - -/// Return a [`CheckPoint`] of the latest tip, that connects with `prev_tip`. -fn construct_update_tip( +/// Return a [`CheckPoint`] of the latest tip, that connects with `prev_tip`. The latest blocks are +/// fetched to construct anchor updates with the proper [`BlockHash`] in case of re-org. +fn fetch_tip_and_latest_blocks( client: &impl ElectrumApi, prev_tip: CheckPoint, -) -> Result<(CheckPoint, Option), Error> { +) -> Result<(CheckPoint, BTreeMap), Error> { let HeaderNotification { height, .. } = client.block_headers_subscribe()?; let new_tip_height = height as u32; // If electrum returns a tip height that is lower than our previous tip, then checkpoints do // not need updating. We just return the previous tip and use that as the point of agreement. if new_tip_height < prev_tip.height() { - return Ok((prev_tip.clone(), Some(prev_tip.height()))); + return Ok((prev_tip, BTreeMap::new())); } // Atomically fetch the latest `CHAIN_SUFFIX_LENGTH` count of blocks from Electrum. We use this @@ -529,6 +413,7 @@ fn construct_update_tip( let agreement_height = agreement_cp.as_ref().map(CheckPoint::height); let new_tip = new_blocks + .clone() .into_iter() // Prune `new_blocks` to only include blocks that are actually new. .filter(|(height, _)| Some(*height) > agreement_height) @@ -541,51 +426,28 @@ fn construct_update_tip( }) .expect("must have at least one checkpoint"); - Ok((new_tip, agreement_height)) + Ok((new_tip, new_blocks)) } -/// A [tx status] comprises of a concatenation of `tx_hash:height:`s. We transform a single one of -/// these concatenations into a [`ConfirmationHeightAnchor`] if possible. -/// -/// We use the lowest possible checkpoint as the anchor block (from `cps`). If an anchor block -/// cannot be found, or the transaction is unconfirmed, [`None`] is returned. -/// -/// [tx status](https://electrumx-spesmilo.readthedocs.io/en/latest/protocol-basics.html#status) -fn determine_tx_anchor( - cps: &BTreeMap>, - raw_height: i32, - txid: Txid, -) -> Option { - // The electrum API has a weird quirk where an unconfirmed transaction is presented with a - // height of 0. To avoid invalid representation in our data structures, we manually set - // transactions residing in the genesis block to have height 0, then interpret a height of 0 as - // unconfirmed for all other transactions. - if txid - == Txid::from_str("4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b") - .expect("must deserialize genesis coinbase txid") - { - let anchor_block = cps.values().next()?.block_id(); - return Some(ConfirmationHeightAnchor { - anchor_block, - confirmation_height: 0, - }); - } - match raw_height { - h if h <= 0 => { - debug_assert!(h == 0 || h == -1, "unexpected height ({}) from electrum", h); - None - } - h => { - let h = h as u32; - let anchor_block = cps.range(h..).next().map(|(_, cp)| cp.block_id())?; - if h > anchor_block.height { - None - } else { - Some(ConfirmationHeightAnchor { - anchor_block, - confirmation_height: h, - }) - } +// Add a corresponding checkpoint per anchor height if it does not yet exist. Checkpoints should not +// surpass `latest_blocks`. +fn chain_update( + mut tip: CheckPoint, + latest_blocks: &BTreeMap, + anchors: &BTreeSet<(A, Txid)>, +) -> Result, Error> { + for anchor in anchors { + let height = anchor.0.anchor_block().height; + + // Checkpoint uses the `BlockHash` from `latest_blocks` so that the hash will be consistent + // in case of a re-org. + if tip.get(height).is_none() && height <= tip.height() { + let hash = match latest_blocks.get(&height) { + Some(&hash) => hash, + None => anchor.0.anchor_block().hash, + }; + tip = tip.insert(BlockId { hash, height }); } } + Ok(tip) } diff --git a/crates/electrum/tests/test_electrum.rs b/crates/electrum/tests/test_electrum.rs index c105ecca2..11582353c 100644 --- a/crates/electrum/tests/test_electrum.rs +++ b/crates/electrum/tests/test_electrum.rs @@ -62,14 +62,11 @@ fn scan_detects_confirmed_tx() -> anyhow::Result<()> { // Sync up to tip. env.wait_until_electrum_sees_block()?; - let update = client - .sync( - SyncRequest::from_chain_tip(recv_chain.tip()) - .chain_spks(core::iter::once(spk_to_track)), - 5, - true, - )? - .with_confirmation_time_height_anchor(&client)?; + let update = client.sync( + SyncRequest::from_chain_tip(recv_chain.tip()).chain_spks(core::iter::once(spk_to_track)), + 5, + true, + )?; let _ = recv_chain .apply_update(update.chain_update) @@ -155,13 +152,11 @@ fn tx_can_become_unconfirmed_after_reorg() -> anyhow::Result<()> { // Sync up to tip. env.wait_until_electrum_sees_block()?; - let update = client - .sync( - SyncRequest::from_chain_tip(recv_chain.tip()).chain_spks([spk_to_track.clone()]), - 5, - false, - )? - .with_confirmation_time_height_anchor(&client)?; + let update = client.sync( + SyncRequest::from_chain_tip(recv_chain.tip()).chain_spks([spk_to_track.clone()]), + 5, + false, + )?; let _ = recv_chain .apply_update(update.chain_update) @@ -186,13 +181,11 @@ fn tx_can_become_unconfirmed_after_reorg() -> anyhow::Result<()> { env.reorg_empty_blocks(depth)?; env.wait_until_electrum_sees_block()?; - let update = client - .sync( - SyncRequest::from_chain_tip(recv_chain.tip()).chain_spks([spk_to_track.clone()]), - 5, - false, - )? - .with_confirmation_time_height_anchor(&client)?; + let update = client.sync( + SyncRequest::from_chain_tip(recv_chain.tip()).chain_spks([spk_to_track.clone()]), + 5, + false, + )?; let _ = recv_chain .apply_update(update.chain_update) diff --git a/example-crates/example_electrum/src/main.rs b/example-crates/example_electrum/src/main.rs index 8467d2699..eee406399 100644 --- a/example-crates/example_electrum/src/main.rs +++ b/example-crates/example_electrum/src/main.rs @@ -193,8 +193,7 @@ fn main() -> anyhow::Result<()> { let res = client .full_scan::<_>(request, stop_gap, scan_options.batch_size, false) - .context("scanning the blockchain")? - .with_confirmation_height_anchor(); + .context("scanning the blockchain")?; ( res.chain_update, res.graph_update, @@ -313,8 +312,7 @@ fn main() -> anyhow::Result<()> { let res = client .sync(request, scan_options.batch_size, false) - .context("scanning the blockchain")? - .with_confirmation_height_anchor(); + .context("scanning the blockchain")?; // drop lock on graph and chain drop((graph, chain)); @@ -341,7 +339,12 @@ fn main() -> anyhow::Result<()> { let (_, keychain_changeset) = graph.index.reveal_to_target_multi(&keychain_update); indexed_tx_graph_changeset.append(keychain_changeset.into()); } - indexed_tx_graph_changeset.append(graph.apply_update(graph_update)); + indexed_tx_graph_changeset.append(graph.apply_update(graph_update.map_anchors(|a| { + ConfirmationHeightAnchor { + confirmation_height: a.confirmation_height, + anchor_block: a.anchor_block, + } + }))); (chain_changeset, indexed_tx_graph_changeset) }; diff --git a/example-crates/wallet_electrum/src/main.rs b/example-crates/wallet_electrum/src/main.rs index e6c01c20b..75d13f1a8 100644 --- a/example-crates/wallet_electrum/src/main.rs +++ b/example-crates/wallet_electrum/src/main.rs @@ -57,9 +57,7 @@ fn main() -> Result<(), anyhow::Error> { }) .inspect_spks_for_all_keychains(|_, _, _| std::io::stdout().flush().expect("must flush")); - let mut update = client - .full_scan(request, STOP_GAP, BATCH_SIZE, false)? - .with_confirmation_time_height_anchor(&client)?; + let mut update = client.full_scan(request, STOP_GAP, BATCH_SIZE, false)?; let now = std::time::UNIX_EPOCH.elapsed().unwrap().as_secs(); let _ = update.graph_update.update_last_seen_unconfirmed(now);