From d0de1447a3d8c52e27a933aac6b21891beff68ff Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 19 Aug 2024 15:56:16 +0200 Subject: [PATCH] Update the world ... we update LDK, lightning-liquidity, BDK, rust-bitcoin, rust-esplora-client, rust-electrum-client, etc. --- Cargo.toml | 38 ++-- bindings/ldk_node.udl | 76 +++++-- src/balance.rs | 100 +++++++-- src/builder.rs | 118 ++++++----- src/config.rs | 4 +- src/error.rs | 35 +++- src/event.rs | 90 ++++---- src/fee_estimator.rs | 33 ++- src/graph.rs | 4 +- src/io/mod.rs | 42 ++++ src/io/sqlite_store/mod.rs | 30 ++- src/io/test_utils.rs | 3 +- src/io/utils.rs | 173 ++++++++++++++- src/io/vss_store.rs | 13 +- src/lib.rs | 99 +++++---- src/logger.rs | 2 +- src/message_handler.rs | 20 ++ src/payment/bolt11.rs | 19 +- src/payment/bolt12.rs | 46 ++-- src/payment/onchain.rs | 6 +- src/payment/store.rs | 8 +- src/payment/unified_qr.rs | 3 +- src/sweep.rs | 6 +- src/tx_broadcaster.rs | 11 +- src/types.rs | 25 +-- src/uniffi_types.rs | 3 +- src/wallet/mod.rs | 359 +++++++++++++++++--------------- src/wallet/persist.rs | 187 +++++++++++++++++ src/wallet/ser.rs | 346 ++++++++++++++++++++++++++++++ tests/common/mod.rs | 12 +- tests/integration_tests_rust.rs | 9 +- 31 files changed, 1439 insertions(+), 481 deletions(-) create mode 100644 src/wallet/persist.rs create mode 100644 src/wallet/ser.rs diff --git a/Cargo.toml b/Cargo.toml index 39f3b947d..89443e031 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,14 +28,14 @@ panic = 'abort' # Abort on panic default = [] [dependencies] -lightning = { version = "0.0.123", features = ["std"] } -lightning-invoice = { version = "0.31.0" } -lightning-net-tokio = { version = "0.0.123" } -lightning-persister = { version = "0.0.123" } -lightning-background-processor = { version = "0.0.123", features = ["futures"] } -lightning-rapid-gossip-sync = { version = "0.0.123" } -lightning-transaction-sync = { version = "0.0.123", features = ["esplora-async-https", "time"] } -lightning-liquidity = { version = "=0.1.0-alpha.4", features = ["std"] } +lightning = { version = "0.0.124", features = ["std"] } +lightning-invoice = { version = "0.32.0" } +lightning-net-tokio = { version = "0.0.124" } +lightning-persister = { version = "0.0.124" } +lightning-background-processor = { version = "0.0.124", features = ["futures"] } +lightning-rapid-gossip-sync = { version = "0.0.124" } +lightning-transaction-sync = { version = "0.0.124", features = ["esplora-async-https", "time"] } +lightning-liquidity = { version = "0.1.0-alpha.5", features = ["std"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std"] } #lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main" } @@ -55,18 +55,20 @@ lightning-liquidity = { version = "=0.1.0-alpha.4", features = ["std"] } #lightning-transaction-sync = { path = "../rust-lightning/lightning-transaction-sync", features = ["esplora-async"] } #lightning-liquidity = { path = "../lightning-liquidity", features = ["std"] } -bdk = { version = "0.29.0", default-features = false, features = ["std", "async-interface", "use-esplora-async", "sqlite-bundled", "keys-bip39"]} +bdk_chain = { version = "=0.19.0", default-features = false, features = ["std"] } +bdk_esplora = { version = "=0.18.0", default-features = false, features = ["async-https-rustls"]} +bdk_wallet = { version = "=1.0.0-beta.4", default-features = false, features = ["std", "keys-bip39"]} reqwest = { version = "0.11", default-features = false, features = ["json", "rustls-tls"] } rusqlite = { version = "0.28.0", features = ["bundled"] } -bitcoin = "0.30.2" +bitcoin = "0.32.2" bip39 = "2.0.0" -bip21 = { version = "0.3.1", features = ["std"], default-features = false } +bip21 = { version = "0.5", features = ["std"], default-features = false } rand = "0.8.5" chrono = { version = "0.4", default-features = false, features = ["clock"] } -tokio = { version = "1.37", default-features = false, features = [ "rt-multi-thread", "time", "sync" ] } -esplora-client = { version = "0.6", default-features = false } +tokio = { version = "1.37", default-features = false, features = [ "rt-multi-thread", "time", "sync", "macros" ] } +esplora-client = { version = "0.9", default-features = false } libc = "0.2" uniffi = { version = "0.26.0", features = ["build"], optional = true } @@ -78,18 +80,18 @@ prost = { version = "0.11.6", default-features = false} winapi = { version = "0.3", features = ["winbase"] } [dev-dependencies] -lightning = { version = "0.0.123", features = ["std", "_test_utils"] } +lightning = { version = "0.0.124", features = ["std", "_test_utils"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std", "_test_utils"] } -electrum-client = { version = "0.15.1", default-features = true } -bitcoincore-rpc = { version = "0.17.0", default-features = false } +electrum-client = { version = "0.21.0", default-features = true } +bitcoincore-rpc = { version = "0.19.0", default-features = false } proptest = "1.0.0" regex = "1.5.6" [target.'cfg(not(no_download))'.dev-dependencies] -electrsd = { version = "0.26.0", features = ["legacy", "esplora_a33e97e1", "bitcoind_25_0"] } +electrsd = { version = "0.29.0", features = ["legacy", "esplora_a33e97e1", "bitcoind_25_0"] } [target.'cfg(no_download)'.dev-dependencies] -electrsd = { version = "0.26.0", features = ["legacy"] } +electrsd = { version = "0.29.0", features = ["legacy"] } [target.'cfg(cln_test)'.dev-dependencies] clightningrpc = { version = "0.3.0-beta.8", default-features = false } diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 6663604a2..96490f2b7 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -79,7 +79,7 @@ interface Node { [Throws=NodeError] void close_channel([ByRef]UserChannelId user_channel_id, PublicKey counterparty_node_id); [Throws=NodeError] - void force_close_channel([ByRef]UserChannelId user_channel_id, PublicKey counterparty_node_id); + void force_close_channel([ByRef]UserChannelId user_channel_id, PublicKey counterparty_node_id, string? reason); [Throws=NodeError] void update_channel_config([ByRef]UserChannelId user_channel_id, PublicKey counterparty_node_id, ChannelConfig channel_config); [Throws=NodeError] @@ -92,7 +92,6 @@ interface Node { sequence list_peers(); sequence list_channels(); NetworkGraph network_graph(); - [Throws=NodeError] string sign_message([ByRef]sequence msg); boolean verify_signature([ByRef]sequence msg, [ByRef]string sig, [ByRef]PublicKey pkey); }; @@ -130,9 +129,9 @@ interface Bolt12Payment { [Throws=NodeError] PaymentId send_using_amount([ByRef]Offer offer, u64 amount_msat, u64? quantity, string? payer_note); [Throws=NodeError] - Offer receive(u64 amount_msat, [ByRef]string description, u64? quantity); + Offer receive(u64 amount_msat, [ByRef]string description, u32? expiry_secs, u64? quantity); [Throws=NodeError] - Offer receive_variable_amount([ByRef]string description); + Offer receive_variable_amount([ByRef]string description, u32? expiry_secs); [Throws=NodeError] Bolt12Invoice request_refund_payment([ByRef]Refund refund); [Throws=NodeError] @@ -183,7 +182,6 @@ enum NodeError { "WalletOperationFailed", "WalletOperationTimeout", "OnchainTxSigningFailed", - "MessageSigningFailed", "TxSyncFailed", "TxSyncTimeout", "GossipUpdateFailed", @@ -251,7 +249,7 @@ enum BuildError { [Enum] interface Event { PaymentSuccessful(PaymentId? payment_id, PaymentHash payment_hash, u64? fee_paid_msat); - PaymentFailed(PaymentId? payment_id, PaymentHash payment_hash, PaymentFailureReason? reason); + PaymentFailed(PaymentId? payment_id, PaymentHash? payment_hash, PaymentFailureReason? reason); PaymentReceived(PaymentId? payment_id, PaymentHash payment_hash, u64 amount_msat); PaymentClaimable(PaymentId payment_id, PaymentHash payment_hash, u64 claimable_amount_msat, u32? claim_deadline); ChannelPending(ChannelId channel_id, UserChannelId user_channel_id, ChannelId former_temporary_channel_id, PublicKey counterparty_node_id, OutPoint funding_txo); @@ -266,12 +264,15 @@ enum PaymentFailureReason { "PaymentExpired", "RouteNotFound", "UnexpectedError", + "UnknownRequiredFeatures", + "InvoiceRequestExpired", + "InvoiceRequestRejected", }; [Enum] interface ClosureReason { CounterpartyForceClosed(UntrustedString peer_msg); - HolderForceClosed(); + HolderForceClosed(boolean? broadcasted_latest_txn); LegacyCooperativeClosure(); CounterpartyInitiatedCooperativeClosure(); LocallyInitiatedCooperativeClosure(); @@ -283,6 +284,7 @@ interface ClosureReason { CounterpartyCoopClosedUnfundedChannel(); FundingBatchClosure(); HTLCsTimedOut(); + PeerFeerateTooLow(u32 peer_feerate_sat_per_kw, u32 required_feerate_sat_per_kw); }; [Enum] @@ -368,7 +370,7 @@ dictionary ChannelDetails { boolean is_outbound; boolean is_channel_ready; boolean is_usable; - boolean is_public; + boolean is_announced; u16? cltv_expiry_delta; u64 counterparty_unspendable_punishment_reserve; u64? counterparty_outbound_htlc_minimum_msat; @@ -393,12 +395,58 @@ dictionary PeerDetails { [Enum] interface LightningBalance { - ClaimableOnChannelClose ( ChannelId channel_id, PublicKey counterparty_node_id, u64 amount_satoshis ); - ClaimableAwaitingConfirmations ( ChannelId channel_id, PublicKey counterparty_node_id, u64 amount_satoshis, u32 confirmation_height ); - ContentiousClaimable ( ChannelId channel_id, PublicKey counterparty_node_id, u64 amount_satoshis, u32 timeout_height, PaymentHash payment_hash, PaymentPreimage payment_preimage ); - MaybeTimeoutClaimableHTLC ( ChannelId channel_id, PublicKey counterparty_node_id, u64 amount_satoshis, u32 claimable_height, PaymentHash payment_hash); - MaybePreimageClaimableHTLC ( ChannelId channel_id, PublicKey counterparty_node_id, u64 amount_satoshis, u32 expiry_height, PaymentHash payment_hash); - CounterpartyRevokedOutputClaimable ( ChannelId channel_id, PublicKey counterparty_node_id, u64 amount_satoshis ); + ClaimableOnChannelClose ( + ChannelId channel_id, + PublicKey counterparty_node_id, + u64 amount_satoshis, + u64 transaction_fee_satoshis, + u64 outbound_payment_htlc_rounded_msat, + u64 outbound_forwarded_htlc_rounded_msat, + u64 inbound_claiming_htlc_rounded_msat, + u64 inbound_htlc_rounded_msat + ); + ClaimableAwaitingConfirmations ( + ChannelId channel_id, + PublicKey counterparty_node_id, + u64 amount_satoshis, + u32 confirmation_height, + BalanceSource source + ); + ContentiousClaimable ( + ChannelId channel_id, + PublicKey counterparty_node_id, + u64 amount_satoshis, + u32 timeout_height, + PaymentHash payment_hash, + PaymentPreimage payment_preimage + ); + MaybeTimeoutClaimableHTLC ( + ChannelId channel_id, + PublicKey counterparty_node_id, + u64 amount_satoshis, + u32 claimable_height, + PaymentHash payment_hash, + boolean outbound_payment + ); + MaybePreimageClaimableHTLC ( + ChannelId channel_id, + PublicKey counterparty_node_id, + u64 amount_satoshis, + u32 expiry_height, + PaymentHash payment_hash + ); + CounterpartyRevokedOutputClaimable ( + ChannelId channel_id, + PublicKey counterparty_node_id, + u64 amount_satoshis + ); +}; + +enum BalanceSource { + "HolderForceClosed", + "CounterpartyForceClosed", + "CoopClose", + "Htlc", }; [Enum] diff --git a/src/balance.rs b/src/balance.rs index 1f061cded..c43386d80 100644 --- a/src/balance.rs +++ b/src/balance.rs @@ -5,10 +5,12 @@ // http://opensource.org/licenses/MIT>, at your option. You may not use this file except in // accordance with one or both of these licenses. -use crate::sweep::value_satoshis_from_descriptor; +use crate::sweep::value_from_descriptor; use lightning::chain::channelmonitor::Balance as LdkBalance; -use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage}; +use lightning::chain::channelmonitor::BalanceSource; +use lightning::ln::types::ChannelId; +use lightning::ln::{PaymentHash, PaymentPreimage}; use lightning::util::sweep::{OutputSpendStatus, TrackedSpendableOutput}; use bitcoin::secp256k1::PublicKey; @@ -80,6 +82,49 @@ pub enum LightningBalance { /// The amount available to claim, in satoshis, excluding the on-chain fees which will be /// required to do so. amount_satoshis: u64, + /// The transaction fee we pay for the closing commitment transaction. This amount is not + /// included in the `amount_satoshis` value. + /// + /// Note that if this channel is inbound (and thus our counterparty pays the commitment + /// transaction fee) this value will be zero. For channels created prior to LDK Node 0.4 + /// the channel is always treated as outbound (and thus this value is never zero). + transaction_fee_satoshis: u64, + /// The amount of millisatoshis which has been burned to fees from HTLCs which are outbound + /// from us and are related to a payment which was sent by us. This is the sum of the + /// millisatoshis part of all HTLCs which are otherwise represented by + /// [`LightningBalance::MaybeTimeoutClaimableHTLC`] with their + /// [`LightningBalance::MaybeTimeoutClaimableHTLC::outbound_payment`] flag set, as well as + /// any dust HTLCs which would otherwise be represented the same. + /// + /// This amount (rounded up to a whole satoshi value) will not be included in `amount_satoshis`. + outbound_payment_htlc_rounded_msat: u64, + /// The amount of millisatoshis which has been burned to fees from HTLCs which are outbound + /// from us and are related to a forwarded HTLC. This is the sum of the millisatoshis part + /// of all HTLCs which are otherwise represented by + /// [`LightningBalance::MaybeTimeoutClaimableHTLC`] with their + /// [`LightningBalance::MaybeTimeoutClaimableHTLC::outbound_payment`] flag *not* set, as + /// well as any dust HTLCs which would otherwise be represented the same. + /// + /// This amount (rounded up to a whole satoshi value) will not be included in `amount_satoshis`. + outbound_forwarded_htlc_rounded_msat: u64, + /// The amount of millisatoshis which has been burned to fees from HTLCs which are inbound + /// to us and for which we know the preimage. This is the sum of the millisatoshis part of + /// all HTLCs which would be represented by [`LightningBalance::ContentiousClaimable`] on + /// channel close, but whose current value is included in `amount_satoshis`, as well as any + /// dust HTLCs which would otherwise be represented the same. + /// + /// This amount (rounded up to a whole satoshi value) will not be included in the counterparty's + /// `amount_satoshis`. + inbound_claiming_htlc_rounded_msat: u64, + /// The amount of millisatoshis which has been burned to fees from HTLCs which are inbound + /// to us and for which we do not know the preimage. This is the sum of the millisatoshis + /// part of all HTLCs which would be represented by + /// [`LightningBalance::MaybePreimageClaimableHTLC`] on channel close, as well as any dust + /// HTLCs which would otherwise be represented the same. + /// + /// This amount (rounded up to a whole satoshi value) will not be included in the + /// counterparty's `amount_satoshis`. + inbound_htlc_rounded_msat: u64, }, /// The channel has been closed, and the given balance is ours but awaiting confirmations until /// we consider it spendable. @@ -96,6 +141,8 @@ pub enum LightningBalance { /// /// [`Event::SpendableOutputs`]: lightning::events::Event::SpendableOutputs confirmation_height: u32, + /// Whether this balance is a result of cooperative close, a force-close, or an HTLC. + source: BalanceSource, }, /// The channel has been closed, and the given balance should be ours but awaiting spending /// transaction confirmation. If the spending transaction does not confirm in time, it is @@ -136,6 +183,8 @@ pub enum LightningBalance { claimable_height: u32, /// The payment hash whose preimage our counterparty needs to claim this HTLC. payment_hash: PaymentHash, + /// Indicates whether this HTLC represents a payment which was sent outbound from us. + outbound_payment: bool, }, /// HTLCs which we received from our counterparty which are claimable with a preimage which we /// do not currently have. This will only be claimable if we receive the preimage from the node @@ -174,16 +223,33 @@ impl LightningBalance { channel_id: ChannelId, counterparty_node_id: PublicKey, balance: LdkBalance, ) -> Self { match balance { - LdkBalance::ClaimableOnChannelClose { amount_satoshis } => { - Self::ClaimableOnChannelClose { channel_id, counterparty_node_id, amount_satoshis } + LdkBalance::ClaimableOnChannelClose { + amount_satoshis, + transaction_fee_satoshis, + outbound_payment_htlc_rounded_msat, + outbound_forwarded_htlc_rounded_msat, + inbound_claiming_htlc_rounded_msat, + inbound_htlc_rounded_msat, + } => Self::ClaimableOnChannelClose { + channel_id, + counterparty_node_id, + amount_satoshis, + transaction_fee_satoshis, + outbound_payment_htlc_rounded_msat, + outbound_forwarded_htlc_rounded_msat, + inbound_claiming_htlc_rounded_msat, + inbound_htlc_rounded_msat, }, - LdkBalance::ClaimableAwaitingConfirmations { amount_satoshis, confirmation_height } => { - Self::ClaimableAwaitingConfirmations { - channel_id, - counterparty_node_id, - amount_satoshis, - confirmation_height, - } + LdkBalance::ClaimableAwaitingConfirmations { + amount_satoshis, + confirmation_height, + source, + } => Self::ClaimableAwaitingConfirmations { + channel_id, + counterparty_node_id, + amount_satoshis, + confirmation_height, + source, }, LdkBalance::ContentiousClaimable { amount_satoshis, @@ -202,12 +268,14 @@ impl LightningBalance { amount_satoshis, claimable_height, payment_hash, + outbound_payment, } => Self::MaybeTimeoutClaimableHTLC { channel_id, counterparty_node_id, amount_satoshis, claimable_height, payment_hash, + outbound_payment, }, LdkBalance::MaybePreimageClaimableHTLC { amount_satoshis, @@ -278,7 +346,7 @@ impl PendingSweepBalance { match output_info.status { OutputSpendStatus::PendingInitialBroadcast { .. } => { let channel_id = output_info.channel_id; - let amount_satoshis = value_satoshis_from_descriptor(&output_info.descriptor); + let amount_satoshis = value_from_descriptor(&output_info.descriptor).to_sat(); Self::PendingBroadcast { channel_id, amount_satoshis } }, OutputSpendStatus::PendingFirstConfirmation { @@ -287,8 +355,8 @@ impl PendingSweepBalance { .. } => { let channel_id = output_info.channel_id; - let amount_satoshis = value_satoshis_from_descriptor(&output_info.descriptor); - let latest_spending_txid = latest_spending_tx.txid(); + let amount_satoshis = value_from_descriptor(&output_info.descriptor).to_sat(); + let latest_spending_txid = latest_spending_tx.compute_txid(); Self::BroadcastAwaitingConfirmation { channel_id, latest_broadcast_height, @@ -303,8 +371,8 @@ impl PendingSweepBalance { .. } => { let channel_id = output_info.channel_id; - let amount_satoshis = value_satoshis_from_descriptor(&output_info.descriptor); - let latest_spending_txid = latest_spending_tx.txid(); + let amount_satoshis = value_from_descriptor(&output_info.descriptor).to_sat(); + let latest_spending_txid = latest_spending_tx.compute_txid(); Self::AwaitingThresholdConfirmations { channel_id, latest_spending_txid, diff --git a/src/builder.rs b/src/builder.rs index 9faf97714..f6b201c54 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -6,8 +6,8 @@ // accordance with one or both of these licenses. use crate::config::{ - default_user_config, Config, BDK_CLIENT_CONCURRENCY, BDK_CLIENT_STOP_GAP, - DEFAULT_ESPLORA_CLIENT_TIMEOUT_SECS, DEFAULT_ESPLORA_SERVER_URL, WALLET_KEYS_SEED_LEN, + default_user_config, Config, DEFAULT_ESPLORA_CLIENT_TIMEOUT_SECS, DEFAULT_ESPLORA_SERVER_URL, + WALLET_KEYS_SEED_LEN, }; use crate::connection::ConnectionManager; use crate::event::EventQueue; @@ -15,6 +15,8 @@ use crate::fee_estimator::OnchainFeeEstimator; use crate::gossip::GossipSource; use crate::io; use crate::io::sqlite_store::SqliteStore; +#[cfg(any(vss, vss_test))] +use crate::io::vss_store::VssStore; use crate::liquidity::LiquiditySource; use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; use crate::message_handler::NodeCustomMessageHandler; @@ -25,10 +27,12 @@ use crate::types::{ ChainMonitor, ChannelManager, DynStore, GossipSync, Graph, KeysManager, MessageRouter, OnionMessenger, PeerManager, }; +use crate::wallet::persist::KVStoreWalletPersister; use crate::wallet::Wallet; use crate::{LogLevel, Node}; use lightning::chain::{chainmonitor, BestBlock, Watch}; +use lightning::io::Cursor; use lightning::ln::channelmanager::{self, ChainParameters, ChannelManagerReadArgs}; use lightning::ln::msgs::{RoutingMessageHandler, SocketAddress}; use lightning::ln::peer_handler::{IgnoringMessageHandler, MessageHandler}; @@ -53,12 +57,9 @@ use lightning_transaction_sync::EsploraSyncClient; use lightning_liquidity::lsps2::client::LSPS2ClientConfig; use lightning_liquidity::{LiquidityClientConfig, LiquidityManager}; -#[cfg(any(vss, vss_test))] -use crate::io::vss_store::VssStore; -use bdk::bitcoin::secp256k1::Secp256k1; -use bdk::blockchain::esplora::EsploraBlockchain; -use bdk::database::SqliteDatabase; -use bdk::template::Bip84; +use bdk_wallet::template::Bip84; +use bdk_wallet::KeychainKind; +use bdk_wallet::Wallet as BdkWallet; use bip39::Mnemonic; @@ -71,7 +72,6 @@ use std::convert::TryInto; use std::default::Default; use std::fmt; use std::fs; -use std::io::Cursor; use std::path::PathBuf; use std::sync::atomic::AtomicBool; use std::sync::{Arc, Mutex, RwLock}; @@ -357,6 +357,8 @@ impl NodeBuilder { /// previously configured. #[cfg(any(vss, vss_test))] pub fn build_with_vss_store(&self, url: String, store_id: String) -> Result { + use bitcoin::key::Secp256k1; + let logger = setup_logger(&self.config)?; let seed_bytes = seed_bytes_from_config( @@ -366,14 +368,13 @@ impl NodeBuilder { )?; let config = Arc::new(self.config.clone()); - let xprv = bitcoin::bip32::ExtendedPrivKey::new_master(config.network.into(), &seed_bytes) - .map_err(|e| { - log_error!(logger, "Failed to derive master secret: {}", e); - BuildError::InvalidSeedBytes - })?; + let xprv = bitcoin::bip32::Xpriv::new_master(config.network, &seed_bytes).map_err(|e| { + log_error!(logger, "Failed to derive master secret: {}", e); + BuildError::InvalidSeedBytes + })?; let vss_xprv = xprv - .ckd_priv(&Secp256k1::new(), ChildNumber::Hardened { index: 877 }) + .derive_priv(&Secp256k1::new(), &[ChildNumber::Hardened { index: 877 }]) .map_err(|e| { log_error!(logger, "Failed to derive VSS secret: {}", e); BuildError::KVStoreSetupFailed @@ -555,38 +556,37 @@ fn build_with_store_internal( logger: Arc, kv_store: Arc, ) -> Result { // Initialize the on-chain wallet and chain access - let xprv = bitcoin::bip32::ExtendedPrivKey::new_master(config.network.into(), &seed_bytes) - .map_err(|e| { - log_error!(logger, "Failed to derive master secret: {}", e); - BuildError::InvalidSeedBytes - })?; - - let wallet_name = bdk::wallet::wallet_name_from_descriptor( - Bip84(xprv, bdk::KeychainKind::External), - Some(Bip84(xprv, bdk::KeychainKind::Internal)), - config.network.into(), - &Secp256k1::new(), - ) - .map_err(|e| { - log_error!(logger, "Failed to derive wallet name: {}", e); - BuildError::WalletSetupFailed + let xprv = bitcoin::bip32::Xpriv::new_master(config.network, &seed_bytes).map_err(|e| { + log_error!(logger, "Failed to derive master secret: {}", e); + BuildError::InvalidSeedBytes })?; - let database_path = format!("{}/bdk_wallet_{}.sqlite", config.storage_dir_path, wallet_name); - let database = SqliteDatabase::new(database_path); - - let bdk_wallet = bdk::Wallet::new( - Bip84(xprv, bdk::KeychainKind::External), - Some(Bip84(xprv, bdk::KeychainKind::Internal)), - config.network.into(), - database, - ) - .map_err(|e| { - log_error!(logger, "Failed to set up wallet: {}", e); - BuildError::WalletSetupFailed - })?; + let descriptor = Bip84(xprv, KeychainKind::External); + let change_descriptor = Bip84(xprv, KeychainKind::Internal); + let mut wallet_persister = + KVStoreWalletPersister::new(Arc::clone(&kv_store), Arc::clone(&logger)); + let wallet_opt = BdkWallet::load() + .descriptor(KeychainKind::External, Some(descriptor.clone())) + .descriptor(KeychainKind::Internal, Some(change_descriptor.clone())) + .extract_keys() + .check_network(config.network) + .load_wallet(&mut wallet_persister) + .map_err(|e| { + log_error!(logger, "Failed to set up wallet: {}", e); + BuildError::WalletSetupFailed + })?; + let bdk_wallet = match wallet_opt { + Some(wallet) => wallet, + None => BdkWallet::create(descriptor, change_descriptor) + .network(config.network) + .create_wallet(&mut wallet_persister) + .map_err(|e| { + log_error!(logger, "Failed to set up wallet: {}", e); + BuildError::WalletSetupFailed + })?, + }; - let (blockchain, tx_sync, tx_broadcaster, fee_estimator) = match chain_data_source_config { + let (esplora_client, tx_sync, tx_broadcaster, fee_estimator) = match chain_data_source_config { Some(ChainDataSourceConfig::Esplora(server_url)) => { let mut client_builder = esplora_client::Builder::new(&server_url.clone()); client_builder = client_builder.timeout(DEFAULT_ESPLORA_CLIENT_TIMEOUT_SECS); @@ -595,8 +595,6 @@ fn build_with_store_internal( esplora_client.clone(), Arc::clone(&logger), )); - let blockchain = EsploraBlockchain::from_client(esplora_client, BDK_CLIENT_STOP_GAP) - .with_concurrency(BDK_CLIENT_CONCURRENCY); let tx_broadcaster = Arc::new(TransactionBroadcaster::new( tx_sync.client().clone(), Arc::clone(&logger), @@ -606,15 +604,18 @@ fn build_with_store_internal( Arc::clone(&config), Arc::clone(&logger), )); - (blockchain, tx_sync, tx_broadcaster, fee_estimator) + (esplora_client, tx_sync, tx_broadcaster, fee_estimator) }, None => { // Default to Esplora client. let server_url = DEFAULT_ESPLORA_SERVER_URL.to_string(); - let tx_sync = Arc::new(EsploraSyncClient::new(server_url, Arc::clone(&logger))); - let blockchain = - EsploraBlockchain::from_client(tx_sync.client().clone(), BDK_CLIENT_STOP_GAP) - .with_concurrency(BDK_CLIENT_CONCURRENCY); + let mut client_builder = esplora_client::Builder::new(&server_url); + client_builder = client_builder.timeout(DEFAULT_ESPLORA_CLIENT_TIMEOUT_SECS); + let esplora_client = client_builder.build_async().unwrap(); + let tx_sync = Arc::new(EsploraSyncClient::from_client( + esplora_client.clone(), + Arc::clone(&logger), + )); let tx_broadcaster = Arc::new(TransactionBroadcaster::new( tx_sync.client().clone(), Arc::clone(&logger), @@ -624,14 +625,15 @@ fn build_with_store_internal( Arc::clone(&config), Arc::clone(&logger), )); - (blockchain, tx_sync, tx_broadcaster, fee_estimator) + (esplora_client, tx_sync, tx_broadcaster, fee_estimator) }, }; let runtime = Arc::new(RwLock::new(None)); let wallet = Arc::new(Wallet::new( - blockchain, bdk_wallet, + wallet_persister, + esplora_client, Arc::clone(&tx_broadcaster), Arc::clone(&fee_estimator), Arc::clone(&logger), @@ -711,7 +713,7 @@ fn build_with_store_internal( ) { Ok(monitors) => monitors, Err(e) => { - if e.kind() == std::io::ErrorKind::NotFound { + if e.kind() == lightning::io::ErrorKind::NotFound { Vec::new() } else { log_error!(logger, "Failed to read channel monitors: {}", e.to_string()); @@ -764,7 +766,7 @@ fn build_with_store_internal( } else { // We're starting a fresh node. let genesis_block_hash = - bitcoin::blockdata::constants::genesis_block(config.network.into()).block_hash(); + bitcoin::blockdata::constants::genesis_block(config.network).block_hash(); let chain_params = ChainParameters { network: config.network.into(), @@ -808,6 +810,7 @@ fn build_with_store_internal( Arc::new(message_router), Arc::clone(&channel_manager), IgnoringMessageHandler {}, + IgnoringMessageHandler {}, )); let ephemeral_bytes: [u8; 32] = keys_manager.get_secure_random_bytes(); @@ -883,14 +886,14 @@ fn build_with_store_internal( chan_handler: Arc::clone(&channel_manager), route_handler: Arc::clone(&p2p_gossip_sync) as Arc, - onion_message_handler: onion_messenger, + onion_message_handler: Arc::clone(&onion_messenger), custom_message_handler, }, GossipSync::Rapid(_) => MessageHandler { chan_handler: Arc::clone(&channel_manager), route_handler: Arc::new(IgnoringMessageHandler {}) as Arc, - onion_message_handler: onion_messenger, + onion_message_handler: Arc::clone(&onion_messenger), custom_message_handler, }, GossipSync::None => { @@ -1018,6 +1021,7 @@ fn build_with_store_internal( chain_monitor, output_sweeper, peer_manager, + onion_messenger, connection_manager, keys_manager, network_graph, diff --git a/src/config.rs b/src/config.rs index 2ccfc2db9..b69e73ecf 100644 --- a/src/config.rs +++ b/src/config.rs @@ -32,7 +32,7 @@ const DEFAULT_ANCHOR_PER_CHANNEL_RESERVE_SATS: u64 = 25_000; pub(crate) const BDK_CLIENT_STOP_GAP: usize = 20; // The number of concurrent requests made against the API provider. -pub(crate) const BDK_CLIENT_CONCURRENCY: u8 = 4; +pub(crate) const BDK_CLIENT_CONCURRENCY: usize = 4; // The default Esplora server we're using. pub(crate) const DEFAULT_ESPLORA_SERVER_URL: &str = "https://blockstream.info/api"; @@ -297,7 +297,7 @@ pub(crate) fn default_user_config(config: &Config) -> UserConfig { if !may_announce_channel(config) { user_config.accept_forwards_to_priv_channels = false; - user_config.channel_handshake_config.announced_channel = false; + user_config.channel_handshake_config.announce_for_forwarding = false; user_config.channel_handshake_limits.force_announced_channel_preference = true; } diff --git a/src/error.rs b/src/error.rs index 807e1ca54..8caaaabdd 100644 --- a/src/error.rs +++ b/src/error.rs @@ -5,6 +5,11 @@ // http://opensource.org/licenses/MIT>, at your option. You may not use this file except in // accordance with one or both of these licenses. +use bdk_chain::bitcoin::psbt::ExtractTxError as BdkExtractTxError; +use bdk_chain::local_chain::CannotConnectError as BdkChainConnectionError; +use bdk_wallet::error::CreateTxError as BdkCreateTxError; +use bdk_wallet::signer::SignerError as BdkSignerError; + use std::fmt; #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -48,8 +53,6 @@ pub enum Error { WalletOperationTimeout, /// A signing operation for transaction failed. OnchainTxSigningFailed, - /// A signing operation for message failed. - MessageSigningFailed, /// A transaction sync operation failed. TxSyncFailed, /// A transaction sync operation timed out. @@ -140,7 +143,6 @@ impl fmt::Display for Error { Self::WalletOperationFailed => write!(f, "Failed to conduct wallet operation."), Self::WalletOperationTimeout => write!(f, "A wallet operation timed out."), Self::OnchainTxSigningFailed => write!(f, "Failed to sign given transaction."), - Self::MessageSigningFailed => write!(f, "Failed to sign given message."), Self::TxSyncFailed => write!(f, "Failed to sync transactions."), Self::TxSyncTimeout => write!(f, "Syncing transactions timed out."), Self::GossipUpdateFailed => write!(f, "Failed to update gossip data."), @@ -187,12 +189,27 @@ impl fmt::Display for Error { impl std::error::Error for Error {} -impl From for Error { - fn from(e: bdk::Error) -> Self { - match e { - bdk::Error::Signer(_) => Self::OnchainTxSigningFailed, - _ => Self::WalletOperationFailed, - } +impl From for Error { + fn from(_: BdkSignerError) -> Self { + Self::OnchainTxSigningFailed + } +} + +impl From for Error { + fn from(_: BdkCreateTxError) -> Self { + Self::OnchainTxCreationFailed + } +} + +impl From for Error { + fn from(_: BdkExtractTxError) -> Self { + Self::OnchainTxCreationFailed + } +} + +impl From for Error { + fn from(_: BdkChainConnectionError) -> Self { + Self::WalletOperationFailed } } diff --git a/src/event.rs b/src/event.rs index 1f4b2e117..8d732d21a 100644 --- a/src/event.rs +++ b/src/event.rs @@ -27,11 +27,12 @@ use crate::io::{ use crate::logger::{log_debug, log_error, log_info, Logger}; use lightning::events::bump_transaction::BumpTransactionEvent; -use lightning::events::{ClosureReason, PaymentPurpose}; +use lightning::events::{ClosureReason, PaymentPurpose, ReplayEvent}; use lightning::events::{Event as LdkEvent, PaymentFailureReason}; use lightning::impl_writeable_tlv_based_enum; use lightning::ln::channelmanager::PaymentId; -use lightning::ln::{ChannelId, PaymentHash}; +use lightning::ln::types::ChannelId; +use lightning::ln::PaymentHash; use lightning::routing::gossip::NodeId; use lightning::util::errors::APIError; use lightning::util::ser::{Readable, ReadableArgs, Writeable, Writer}; @@ -40,7 +41,7 @@ use lightning_liquidity::lsps2::utils::compute_opening_fee; use bitcoin::blockdata::locktime::absolute::LockTime; use bitcoin::secp256k1::PublicKey; -use bitcoin::OutPoint; +use bitcoin::{Amount, OutPoint}; use rand::{thread_rng, Rng}; @@ -74,7 +75,12 @@ pub enum Event { /// Will only be `None` for events serialized with LDK Node v0.2.1 or prior. payment_id: Option, /// The hash of the payment. - payment_hash: PaymentHash, + /// + /// This will be `None` if the payment failed before receiving an invoice when paying a + /// BOLT12 [`Offer`]. + /// + /// [`Offer`]: lightning::offers::offer::Offer + payment_hash: Option, /// The reason why the payment failed. /// /// This will be `None` for events serialized by LDK Node v0.2.1 and prior. @@ -159,8 +165,8 @@ impl_writeable_tlv_based_enum!(Event, (3, payment_id, option), }, (1, PaymentFailed) => { - (0, payment_hash, required), - (1, reason, option), + (0, payment_hash, option), + (1, reason, upgradable_option), (3, payment_id, option), }, (2, PaymentReceived) => { @@ -191,7 +197,7 @@ impl_writeable_tlv_based_enum!(Event, (2, payment_id, required), (4, claimable_amount_msat, required), (6, claim_deadline, option), - }; + } ); pub struct EventQueue @@ -394,7 +400,7 @@ where } } - pub async fn handle_event(&self, event: LdkEvent) { + pub async fn handle_event(&self, event: LdkEvent) -> Result<(), ReplayEvent> { match event { LdkEvent::FundingGenerationReady { temporary_channel_id, @@ -412,17 +418,18 @@ where let locktime = LockTime::from_height(cur_height).unwrap_or(LockTime::ZERO); // Sign the final funding transaction and broadcast it. + let channel_amount = Amount::from_sat(channel_value_satoshis); match self.wallet.create_funding_transaction( output_script, - channel_value_satoshis, + channel_amount, confirmation_target, locktime, ) { Ok(final_tx) => { // Give the funding transaction back to LDK for opening the channel. match self.channel_manager.funding_transaction_generated( - &temporary_channel_id, - &counterparty_node_id, + temporary_channel_id, + counterparty_node_id, final_tx, ) { Ok(()) => {}, @@ -452,6 +459,7 @@ where .force_close_without_broadcasting_txn( &temporary_channel_id, &counterparty_node_id, + "Failed to create funding transaction".to_string(), ) .unwrap_or_else(|e| { log_error!(self.logger, "Failed to force close channel after funding generation failed: {:?}", e); @@ -462,6 +470,9 @@ where }, } }, + LdkEvent::FundingTxBroadcastSafe { .. } => { + debug_assert!(false, "We currently only support safe funding, so this event should never be emitted."); + }, LdkEvent::PaymentClaimable { payment_hash, purpose, @@ -491,7 +502,7 @@ where log_error!(self.logger, "Failed to access payment store: {}", e); panic!("Failed to access payment store"); }); - return; + return Ok(()); } if info.status == PaymentStatus::Succeeded @@ -513,7 +524,7 @@ where log_error!(self.logger, "Failed to access payment store: {}", e); panic!("Failed to access payment store"); }); - return; + return Ok(()); } let max_total_opening_fee_msat = match info.kind { @@ -552,7 +563,7 @@ where log_error!(self.logger, "Failed to access payment store: {}", e); panic!("Failed to access payment store"); }); - return; + return Ok(()); } // If this is known by the store but ChannelManager doesn't know the preimage, @@ -581,7 +592,7 @@ where ); panic!("Failed to push to event queue"); }); - return; + return Ok(()); } }, _ => {}, @@ -717,6 +728,7 @@ where receiver_node_id: _, htlcs: _, sender_intended_total_msat: _, + onion_fields: _, } => { let payment_id = PaymentId(payment_hash.0); log_info!( @@ -810,7 +822,7 @@ where id } else { debug_assert!(false, "payment_id should always be set."); - return; + return Ok(()); }; let update = PaymentDetailsUpdate { @@ -855,13 +867,13 @@ where LdkEvent::PaymentFailed { payment_id, payment_hash, reason, .. } => { log_info!( self.logger, - "Failed to send payment to payment hash {:?} due to {:?}.", - hex_utils::to_string(&payment_hash.0), + "Failed to send payment with ID {} due to {:?}.", + payment_id, reason ); let update = PaymentDetailsUpdate { - hash: Some(Some(payment_hash)), + hash: Some(payment_hash), status: Some(PaymentStatus::Failed), ..PaymentDetailsUpdate::new(payment_id) }; @@ -916,9 +928,15 @@ where funding_satoshis, channel_type, push_msat: _, + is_announced: _, + params: _, } => { let anchor_channel = channel_type.requires_anchors_zero_fee_htlc_tx(); + // TODO: We should use `is_announced` flag above and reject announced channels if + // we're not a forwading node, once we add a 'forwarding mode' based on listening + // address / node alias being set. + if anchor_channel { if let Some(anchor_channels_config) = self.config.anchor_channels_config.as_ref() @@ -951,11 +969,12 @@ where .force_close_without_broadcasting_txn( &temporary_channel_id, &counterparty_node_id, + "Channel request rejected".to_string(), ) .unwrap_or_else(|e| { log_error!(self.logger, "Failed to reject channel: {:?}", e) }); - return; + return Ok(()); } } else { log_error!( @@ -967,11 +986,12 @@ where .force_close_without_broadcasting_txn( &temporary_channel_id, &counterparty_node_id, + "Channel request rejected".to_string(), ) .unwrap_or_else(|e| { log_error!(self.logger, "Failed to reject channel: {:?}", e) }); - return; + return Ok(()); } } @@ -1038,7 +1058,7 @@ where node.announcement_info .as_ref() .map_or("unnamed node".to_string(), |ann| { - format!("node {}", ann.alias) + format!("node {}", ann.alias()) }) }) }; @@ -1177,21 +1197,8 @@ where }, LdkEvent::DiscardFunding { .. } => {}, LdkEvent::HTLCIntercepted { .. } => {}, - LdkEvent::InvoiceRequestFailed { payment_id } => { - log_error!( - self.logger, - "Failed to request invoice for outbound BOLT12 payment {}", - payment_id - ); - let update = PaymentDetailsUpdate { - status: Some(PaymentStatus::Failed), - ..PaymentDetailsUpdate::new(payment_id) - }; - self.payment_store.update(&update).unwrap_or_else(|e| { - log_error!(self.logger, "Failed to access payment store: {}", e); - panic!("Failed to access payment store"); - }); - return; + LdkEvent::InvoiceReceived { .. } => { + debug_assert!(false, "We currently don't handle BOLT12 invoices manually, so this event should never be emitted."); }, LdkEvent::ConnectionNeeded { node_id, addresses } => { let runtime_lock = self.runtime.read().unwrap(); @@ -1243,13 +1250,20 @@ where "Ignoring BumpTransactionEvent for channel {} due to trusted counterparty {}", channel_id, counterparty_node_id ); - return; + return Ok(()); } } self.bump_tx_event_handler.handle_event(&bte); }, + LdkEvent::OnionMessageIntercepted { .. } => { + debug_assert!(false, "We currently don't support onion message interception, so this event should never be emitted."); + }, + LdkEvent::OnionMessagePeerConnected { .. } => { + debug_assert!(false, "We currently don't support onion message interception, so this event should never be emitted."); + }, } + Ok(()) } } diff --git a/src/fee_estimator.rs b/src/fee_estimator.rs index 857106aa3..62b4b8882 100644 --- a/src/fee_estimator.rs +++ b/src/fee_estimator.rs @@ -13,10 +13,9 @@ use lightning::chain::chaininterface::ConfirmationTarget as LdkConfirmationTarge use lightning::chain::chaininterface::FeeEstimator as LdkFeeEstimator; use lightning::chain::chaininterface::FEERATE_FLOOR_SATS_PER_KW; -use bdk::FeeRate; +use bitcoin::FeeRate; use esplora_client::AsyncClient as EsploraClient; -use bitcoin::blockdata::weight::Weight; use bitcoin::Network; use std::collections::HashMap; @@ -90,7 +89,8 @@ where let confirmation_targets = vec![ ConfirmationTarget::OnchainPayment, ConfirmationTarget::ChannelFunding, - LdkConfirmationTarget::OnChainSweep.into(), + LdkConfirmationTarget::MaximumFeeEstimate.into(), + LdkConfirmationTarget::UrgentOnChainSweep.into(), LdkConfirmationTarget::MinAllowedAnchorChannelRemoteFee.into(), LdkConfirmationTarget::MinAllowedNonAnchorChannelRemoteFee.into(), LdkConfirmationTarget::AnchorChannelFee.into(), @@ -104,7 +104,8 @@ where ConfirmationTarget::OnchainPayment => 6, ConfirmationTarget::ChannelFunding => 12, ConfirmationTarget::Lightning(ldk_target) => match ldk_target { - LdkConfirmationTarget::OnChainSweep => 6, + LdkConfirmationTarget::MaximumFeeEstimate => 1, + LdkConfirmationTarget::UrgentOnChainSweep => 6, LdkConfirmationTarget::MinAllowedAnchorChannelRemoteFee => 1008, LdkConfirmationTarget::MinAllowedNonAnchorChannelRemoteFee => 144, LdkConfirmationTarget::AnchorChannelFee => 1008, @@ -114,7 +115,7 @@ where }, }; - let converted_estimates = + let converted_estimate_sat_vb = esplora_client::convert_fee_rate(num_blocks, estimates.clone()).map_err(|e| { log_error!( self.logger, @@ -125,7 +126,7 @@ where Error::FeerateEstimationUpdateFailed })?; - let fee_rate = FeeRate::from_sat_per_vb(converted_estimates); + let fee_rate = FeeRate::from_sat_per_kwu((converted_estimate_sat_vb * 250.0) as u64); // LDK 0.0.118 introduced changes to the `ConfirmationTarget` semantics that // require some post-estimation adjustments to the fee rates, which we do here. @@ -133,9 +134,8 @@ where ConfirmationTarget::Lightning( LdkConfirmationTarget::MinAllowedNonAnchorChannelRemoteFee, ) => { - let slightly_less_than_background = - fee_rate.fee_wu(Weight::from_wu(1000)) - 250; - FeeRate::from_sat_per_kwu(slightly_less_than_background as f32) + let slightly_less_than_background = fee_rate.to_sat_per_kwu() - 250; + FeeRate::from_sat_per_kwu(slightly_less_than_background) }, _ => fee_rate, }; @@ -146,7 +146,7 @@ where self.logger, "Fee rate estimation updated for {:?}: {} sats/kwu", target, - adjusted_fee_rate.fee_wu(Weight::from_wu(1000)) + adjusted_fee_rate.to_sat_per_kwu(), ); } Ok(()) @@ -164,7 +164,8 @@ where ConfirmationTarget::OnchainPayment => 5000, ConfirmationTarget::ChannelFunding => 1000, ConfirmationTarget::Lightning(ldk_target) => match ldk_target { - LdkConfirmationTarget::OnChainSweep => 5000, + LdkConfirmationTarget::MaximumFeeEstimate => 8000, + LdkConfirmationTarget::UrgentOnChainSweep => 5000, LdkConfirmationTarget::MinAllowedAnchorChannelRemoteFee => { FEERATE_FLOOR_SATS_PER_KW }, @@ -179,17 +180,13 @@ where }; // We'll fall back on this, if we really don't have any other information. - let fallback_rate = FeeRate::from_sat_per_kwu(fallback_sats_kwu as f32); + let fallback_rate = FeeRate::from_sat_per_kwu(fallback_sats_kwu as u64); let estimate = *locked_fee_rate_cache.get(&confirmation_target).unwrap_or(&fallback_rate); // Currently we assume every transaction needs to at least be relayable, which is why we // enforce a lower bound of `FEERATE_FLOOR_SATS_PER_KW`. - let weight_units = Weight::from_wu(1000); - FeeRate::from_wu( - estimate.fee_wu(weight_units).max(FEERATE_FLOOR_SATS_PER_KW as u64), - weight_units, - ) + FeeRate::from_sat_per_kwu(estimate.to_sat_per_kwu().max(FEERATE_FLOOR_SATS_PER_KW as u64)) } } @@ -198,6 +195,6 @@ where L::Target: Logger, { fn get_est_sat_per_1000_weight(&self, confirmation_target: LdkConfirmationTarget) -> u32 { - self.estimate_fee_rate(confirmation_target.into()).fee_wu(Weight::from_wu(1000)) as u32 + self.estimate_fee_rate(confirmation_target.into()).to_sat_per_kwu() as u32 } } diff --git a/src/graph.rs b/src/graph.rs index 520be99db..3e4e58c88 100644 --- a/src/graph.rs +++ b/src/graph.rs @@ -165,8 +165,8 @@ pub struct NodeAnnouncementInfo { impl From for NodeAnnouncementInfo { fn from(value: lightning::routing::gossip::NodeAnnouncementInfo) -> Self { Self { - last_update: value.last_update, - alias: value.alias.to_string(), + last_update: value.last_update(), + alias: value.alias().to_string(), addresses: value.addresses().iter().cloned().collect(), } } diff --git a/src/io/mod.rs b/src/io/mod.rs index c65ab1d3b..22caff50f 100644 --- a/src/io/mod.rs +++ b/src/io/mod.rs @@ -42,3 +42,45 @@ pub(crate) const LATEST_RGS_SYNC_TIMESTAMP_KEY: &str = "latest_rgs_sync_timestam pub(crate) const LATEST_NODE_ANN_BCAST_TIMESTAMP_PRIMARY_NAMESPACE: &str = ""; pub(crate) const LATEST_NODE_ANN_BCAST_TIMESTAMP_SECONDARY_NAMESPACE: &str = ""; pub(crate) const LATEST_NODE_ANN_BCAST_TIMESTAMP_KEY: &str = "latest_node_ann_bcast_timestamp"; + +/// The BDK wallet's [`ChangeSet::descriptor`] will be persisted under this key. +/// +/// [`ChangeSet::descriptor`]: bdk_wallet::ChangeSet::descriptor +pub(crate) const BDK_WALLET_DESCRIPTOR_PRIMARY_NAMESPACE: &str = "bdk_wallet"; +pub(crate) const BDK_WALLET_DESCRIPTOR_SECONDARY_NAMESPACE: &str = ""; +pub(crate) const BDK_WALLET_DESCRIPTOR_KEY: &str = "descriptor"; + +/// The BDK wallet's [`ChangeSet::change_descriptor`] will be persisted under this key. +/// +/// [`ChangeSet::change_descriptor`]: bdk_wallet::ChangeSet::change_descriptor +pub(crate) const BDK_WALLET_CHANGE_DESCRIPTOR_PRIMARY_NAMESPACE: &str = "bdk_wallet"; +pub(crate) const BDK_WALLET_CHANGE_DESCRIPTOR_SECONDARY_NAMESPACE: &str = ""; +pub(crate) const BDK_WALLET_CHANGE_DESCRIPTOR_KEY: &str = "change_descriptor"; + +/// The BDK wallet's [`ChangeSet::network`] will be persisted under this key. +/// +/// [`ChangeSet::network`]: bdk_wallet::ChangeSet::network +pub(crate) const BDK_WALLET_NETWORK_PRIMARY_NAMESPACE: &str = "bdk_wallet"; +pub(crate) const BDK_WALLET_NETWORK_SECONDARY_NAMESPACE: &str = ""; +pub(crate) const BDK_WALLET_NETWORK_KEY: &str = "network"; + +/// The BDK wallet's [`ChangeSet::local_chain`] will be persisted under this key. +/// +/// [`ChangeSet::local_chain`]: bdk_wallet::ChangeSet::local_chain +pub(crate) const BDK_WALLET_LOCAL_CHAIN_PRIMARY_NAMESPACE: &str = "bdk_wallet"; +pub(crate) const BDK_WALLET_LOCAL_CHAIN_SECONDARY_NAMESPACE: &str = ""; +pub(crate) const BDK_WALLET_LOCAL_CHAIN_KEY: &str = "local_chain"; + +/// The BDK wallet's [`ChangeSet::tx_graph`] will be persisted under this key. +/// +/// [`ChangeSet::tx_graph`]: bdk_wallet::ChangeSet::tx_graph +pub(crate) const BDK_WALLET_TX_GRAPH_PRIMARY_NAMESPACE: &str = "bdk_wallet"; +pub(crate) const BDK_WALLET_TX_GRAPH_SECONDARY_NAMESPACE: &str = ""; +pub(crate) const BDK_WALLET_TX_GRAPH_KEY: &str = "tx_graph"; + +/// The BDK wallet's [`ChangeSet::indexer`] will be persisted under this key. +/// +/// [`ChangeSet::indexer`]: bdk_wallet::ChangeSet::indexer +pub(crate) const BDK_WALLET_INDEXER_PRIMARY_NAMESPACE: &str = "bdk_wallet"; +pub(crate) const BDK_WALLET_INDEXER_SECONDARY_NAMESPACE: &str = ""; +pub(crate) const BDK_WALLET_INDEXER_KEY: &str = "indexer"; diff --git a/src/io/sqlite_store/mod.rs b/src/io/sqlite_store/mod.rs index c1eac84b4..b72db5a2b 100644 --- a/src/io/sqlite_store/mod.rs +++ b/src/io/sqlite_store/mod.rs @@ -132,7 +132,7 @@ impl SqliteStore { impl KVStore for SqliteStore { fn read( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, - ) -> std::io::Result> { + ) -> io::Result> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "read")?; let locked_conn = self.connection.lock().unwrap(); @@ -142,7 +142,7 @@ impl KVStore for SqliteStore { let mut stmt = locked_conn.prepare_cached(&sql).map_err(|e| { let msg = format!("Failed to prepare statement: {}", e); - std::io::Error::new(std::io::ErrorKind::Other, msg) + io::Error::new(io::ErrorKind::Other, msg) })?; let res = stmt @@ -162,7 +162,7 @@ impl KVStore for SqliteStore { PrintableString(secondary_namespace), PrintableString(key) ); - std::io::Error::new(std::io::ErrorKind::NotFound, msg) + io::Error::new(io::ErrorKind::NotFound, msg) }, e => { let msg = format!( @@ -172,7 +172,7 @@ impl KVStore for SqliteStore { PrintableString(key), e ); - std::io::Error::new(std::io::ErrorKind::Other, msg) + io::Error::new(io::ErrorKind::Other, msg) }, })?; Ok(res) @@ -180,7 +180,7 @@ impl KVStore for SqliteStore { fn write( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: &[u8], - ) -> std::io::Result<()> { + ) -> io::Result<()> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "write")?; let locked_conn = self.connection.lock().unwrap(); @@ -192,7 +192,7 @@ impl KVStore for SqliteStore { let mut stmt = locked_conn.prepare_cached(&sql).map_err(|e| { let msg = format!("Failed to prepare statement: {}", e); - std::io::Error::new(std::io::ErrorKind::Other, msg) + io::Error::new(io::ErrorKind::Other, msg) })?; stmt.execute(named_params! { @@ -210,13 +210,13 @@ impl KVStore for SqliteStore { PrintableString(key), e ); - std::io::Error::new(std::io::ErrorKind::Other, msg) + io::Error::new(io::ErrorKind::Other, msg) }) } fn remove( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, _lazy: bool, - ) -> std::io::Result<()> { + ) -> io::Result<()> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "remove")?; let locked_conn = self.connection.lock().unwrap(); @@ -225,7 +225,7 @@ impl KVStore for SqliteStore { let mut stmt = locked_conn.prepare_cached(&sql).map_err(|e| { let msg = format!("Failed to prepare statement: {}", e); - std::io::Error::new(std::io::ErrorKind::Other, msg) + io::Error::new(io::ErrorKind::Other, msg) })?; stmt.execute(named_params! { @@ -241,14 +241,12 @@ impl KVStore for SqliteStore { PrintableString(key), e ); - std::io::Error::new(std::io::ErrorKind::Other, msg) + io::Error::new(io::ErrorKind::Other, msg) })?; Ok(()) } - fn list( - &self, primary_namespace: &str, secondary_namespace: &str, - ) -> std::io::Result> { + fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> io::Result> { check_namespace_key_validity(primary_namespace, secondary_namespace, None, "list")?; let locked_conn = self.connection.lock().unwrap(); @@ -259,7 +257,7 @@ impl KVStore for SqliteStore { ); let mut stmt = locked_conn.prepare_cached(&sql).map_err(|e| { let msg = format!("Failed to prepare statement: {}", e); - std::io::Error::new(std::io::ErrorKind::Other, msg) + io::Error::new(io::ErrorKind::Other, msg) })?; let mut keys = Vec::new(); @@ -274,13 +272,13 @@ impl KVStore for SqliteStore { ) .map_err(|e| { let msg = format!("Failed to retrieve queried rows: {}", e); - std::io::Error::new(std::io::ErrorKind::Other, msg) + io::Error::new(io::ErrorKind::Other, msg) })?; for k in rows_iter { keys.push(k.map_err(|e| { let msg = format!("Failed to retrieve queried rows: {}", e); - std::io::Error::new(std::io::ErrorKind::Other, msg) + io::Error::new(io::ErrorKind::Other, msg) })?); } diff --git a/src/io/test_utils.rs b/src/io/test_utils.rs index c4610b4f5..98b33fa5f 100644 --- a/src/io/test_utils.rs +++ b/src/io/test_utils.rs @@ -151,12 +151,13 @@ pub(crate) fn do_test_store(store_0: &K, store_1: &K) { .force_close_broadcasting_latest_txn( &nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id(), + "whoops".to_string(), ) .unwrap(); check_closed_event!( nodes[0], 1, - ClosureReason::HolderForceClosed, + ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000 ); diff --git a/src/io/utils.rs b/src/io/utils.rs index 29484273c..f6fd10b41 100644 --- a/src/io/utils.rs +++ b/src/io/utils.rs @@ -12,8 +12,11 @@ use crate::logger::{log_error, FilesystemLogger}; use crate::peer_store::PeerStore; use crate::sweep::DeprecatedSpendableOutputInfo; use crate::types::{Broadcaster, ChainSource, DynStore, FeeEstimator, KeysManager, Sweeper}; +use crate::wallet::ser::{ChangeSetDeserWrapper, ChangeSetSerWrapper}; use crate::{Error, EventQueue, PaymentDetails}; +use lightning::io::Cursor; +use lightning::ln::msgs::DecodeError; use lightning::routing::gossip::NetworkGraph; use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringDecayParameters}; use lightning::util::logger::Logger; @@ -26,13 +29,21 @@ use lightning::util::persist::{ }; use lightning::util::ser::{Readable, ReadableArgs, Writeable}; use lightning::util::string::PrintableString; +use lightning::util::sweep::{OutputSpendStatus, OutputSweeper}; + +use bdk_chain::indexer::keychain_txout::ChangeSet as BdkIndexerChangeSet; +use bdk_chain::local_chain::ChangeSet as BdkLocalChainChangeSet; +use bdk_chain::miniscript::{Descriptor, DescriptorPublicKey}; +use bdk_chain::tx_graph::ChangeSet as BdkTxGraphChangeSet; +use bdk_chain::ConfirmationBlockTime; +use bdk_wallet::ChangeSet as BdkWalletChangeSet; use bip39::Mnemonic; -use lightning::util::sweep::{OutputSpendStatus, OutputSweeper}; +use bitcoin::Network; use rand::{thread_rng, RngCore}; use std::fs; -use std::io::{Cursor, Write}; +use std::io::Write; use std::ops::Deref; use std::path::Path; use std::sync::Arc; @@ -518,6 +529,164 @@ pub(crate) fn check_namespace_key_validity( Ok(()) } +macro_rules! impl_read_write_change_set_type { + ( $read_name: ident, $write_name: ident, $change_set_type:ty, $primary_namespace: expr, $secondary_namespace: expr, $key: expr ) => { + pub(crate) fn $read_name( + kv_store: Arc, logger: L, + ) -> Result, std::io::Error> + where + L::Target: Logger, + { + let bytes = match kv_store.read($primary_namespace, $secondary_namespace, $key) { + Ok(bytes) => bytes, + Err(e) => { + if e.kind() == lightning::io::ErrorKind::NotFound { + return Ok(None); + } else { + log_error!( + logger, + "Reading data from key {}/{}/{} failed due to: {}", + $primary_namespace, + $secondary_namespace, + $key, + e + ); + return Err(e.into()); + } + }, + }; + + let mut reader = Cursor::new(bytes); + let res: Result, DecodeError> = + Readable::read(&mut reader); + match res { + Ok(res) => Ok(Some(res.0)), + Err(e) => { + log_error!(logger, "Failed to deserialize BDK wallet field: {}", e); + Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Failed to deserialize BDK wallet field", + )) + }, + } + } + + pub(crate) fn $write_name( + value: &$change_set_type, kv_store: Arc, logger: L, + ) -> Result<(), std::io::Error> + where + L::Target: Logger, + { + let data = ChangeSetSerWrapper(value).encode(); + kv_store.write($primary_namespace, $secondary_namespace, $key, &data).map_err(|e| { + log_error!( + logger, + "Writing data to key {}/{}/{} failed due to: {}", + $primary_namespace, + $secondary_namespace, + $key, + e + ); + e.into() + }) + } + }; +} + +impl_read_write_change_set_type!( + read_bdk_wallet_descriptor, + write_bdk_wallet_descriptor, + Descriptor, + BDK_WALLET_DESCRIPTOR_PRIMARY_NAMESPACE, + BDK_WALLET_DESCRIPTOR_SECONDARY_NAMESPACE, + BDK_WALLET_DESCRIPTOR_KEY +); + +impl_read_write_change_set_type!( + read_bdk_wallet_change_descriptor, + write_bdk_wallet_change_descriptor, + Descriptor, + BDK_WALLET_CHANGE_DESCRIPTOR_PRIMARY_NAMESPACE, + BDK_WALLET_CHANGE_DESCRIPTOR_SECONDARY_NAMESPACE, + BDK_WALLET_CHANGE_DESCRIPTOR_KEY +); + +impl_read_write_change_set_type!( + read_bdk_wallet_network, + write_bdk_wallet_network, + Network, + BDK_WALLET_NETWORK_PRIMARY_NAMESPACE, + BDK_WALLET_NETWORK_SECONDARY_NAMESPACE, + BDK_WALLET_NETWORK_KEY +); + +impl_read_write_change_set_type!( + read_bdk_wallet_local_chain, + write_bdk_wallet_local_chain, + BdkLocalChainChangeSet, + BDK_WALLET_LOCAL_CHAIN_PRIMARY_NAMESPACE, + BDK_WALLET_LOCAL_CHAIN_SECONDARY_NAMESPACE, + BDK_WALLET_LOCAL_CHAIN_KEY +); + +impl_read_write_change_set_type!( + read_bdk_wallet_tx_graph, + write_bdk_wallet_tx_graph, + BdkTxGraphChangeSet, + BDK_WALLET_TX_GRAPH_PRIMARY_NAMESPACE, + BDK_WALLET_TX_GRAPH_SECONDARY_NAMESPACE, + BDK_WALLET_TX_GRAPH_KEY +); + +impl_read_write_change_set_type!( + read_bdk_wallet_indexer, + write_bdk_wallet_indexer, + BdkIndexerChangeSet, + BDK_WALLET_INDEXER_PRIMARY_NAMESPACE, + BDK_WALLET_INDEXER_SECONDARY_NAMESPACE, + BDK_WALLET_INDEXER_KEY +); + +// Reads the full BdkWalletChangeSet or returns default fields +pub(crate) fn read_bdk_wallet_change_set( + kv_store: Arc, logger: Arc, +) -> Result, std::io::Error> { + let mut change_set = BdkWalletChangeSet::default(); + + // We require a descriptor and return `None` to signal creation of a new wallet otherwise. + if let Some(descriptor) = + read_bdk_wallet_descriptor(Arc::clone(&kv_store), Arc::clone(&logger))? + { + change_set.descriptor = Some(descriptor); + } else { + return Ok(None); + } + + // We require a change_descriptor and return `None` to signal creation of a new wallet otherwise. + if let Some(change_descriptor) = + read_bdk_wallet_change_descriptor(Arc::clone(&kv_store), Arc::clone(&logger))? + { + change_set.change_descriptor = Some(change_descriptor); + } else { + return Ok(None); + } + + // We require a network and return `None` to signal creation of a new wallet otherwise. + if let Some(network) = read_bdk_wallet_network(Arc::clone(&kv_store), Arc::clone(&logger))? { + change_set.network = Some(network); + } else { + return Ok(None); + } + + read_bdk_wallet_local_chain(Arc::clone(&kv_store), Arc::clone(&logger))? + .map(|local_chain| change_set.local_chain = local_chain); + read_bdk_wallet_tx_graph(Arc::clone(&kv_store), Arc::clone(&logger))? + .map(|tx_graph| change_set.tx_graph = tx_graph); + read_bdk_wallet_indexer(Arc::clone(&kv_store), Arc::clone(&logger))? + .map(|indexer| change_set.indexer = indexer); + Ok(Some(change_set)) +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/io/vss_store.rs b/src/io/vss_store.rs index ba09b5988..474f7dbc7 100644 --- a/src/io/vss_store.rs +++ b/src/io/vss_store.rs @@ -5,9 +5,7 @@ // http://opensource.org/licenses/MIT>, at your option. You may not use this file except in // accordance with one or both of these licenses. -use io::Error; -use std::io; -use std::io::ErrorKind; +use lightning::io::{self, Error, ErrorKind}; #[cfg(test)] use std::panic::RefUnwindSafe; use std::time::Duration; @@ -139,7 +137,14 @@ impl KVStore for VssStore { })?; // unwrap safety: resp.value must be always present for a non-erroneous VSS response, otherwise // it is an API-violation which is converted to [`VssError::InternalServerError`] in [`VssClient`] - let storable = Storable::decode(&resp.value.unwrap().value[..])?; + let storable = Storable::decode(&resp.value.unwrap().value[..]).map_err(|e| { + let msg = format!( + "Failed to decode data read from key {}/{}/{}: {}", + primary_namespace, secondary_namespace, key, e + ); + Error::new(ErrorKind::Other, msg) + })?; + Ok(self.storable_builder.deconstruct(storable)?.0) } diff --git a/src/lib.rs b/src/lib.rs index 914dec4b1..4a7d081c5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -138,7 +138,7 @@ use payment::{ use peer_store::{PeerInfo, PeerStore}; use types::{ Broadcaster, BumpTransactionEventHandler, ChainMonitor, ChannelManager, DynStore, FeeEstimator, - Graph, KeysManager, PeerManager, Router, Scorer, Sweeper, Wallet, + Graph, KeysManager, OnionMessenger, PeerManager, Router, Scorer, Sweeper, Wallet, }; pub use types::{ChannelDetails, PeerDetails, UserChannelId}; @@ -146,7 +146,8 @@ use logger::{log_error, log_info, log_trace, FilesystemLogger, Logger}; use lightning::chain::{BestBlock, Confirm}; use lightning::events::bump_transaction::Wallet as LdkWallet; -use lightning::ln::channelmanager::{ChannelShutdownState, PaymentId}; +use lightning::ln::channel_state::ChannelShutdownState; +use lightning::ln::channelmanager::PaymentId; use lightning::ln::msgs::SocketAddress; use lightning::routing::gossip::NodeAlias; @@ -186,6 +187,7 @@ pub struct Node { chain_monitor: Arc, output_sweeper: Arc, peer_manager: Arc, + onion_messenger: Arc, connection_manager: Arc>>, keys_manager: Arc, network_graph: Arc, @@ -279,49 +281,44 @@ impl Node { .config .onchain_wallet_sync_interval_secs .max(config::WALLET_SYNC_INTERVAL_MINIMUM_SECS); - std::thread::spawn(move || { - tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on( - async move { - let mut onchain_wallet_sync_interval = tokio::time::interval( - Duration::from_secs(onchain_wallet_sync_interval_secs), - ); - onchain_wallet_sync_interval - .set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); - loop { - tokio::select! { - _ = stop_sync.changed() => { + runtime.spawn(async move { + let mut onchain_wallet_sync_interval = + tokio::time::interval(Duration::from_secs(onchain_wallet_sync_interval_secs)); + onchain_wallet_sync_interval + .set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); + loop { + tokio::select! { + _ = stop_sync.changed() => { + log_trace!( + sync_logger, + "Stopping background syncing on-chain wallet.", + ); + return; + } + _ = onchain_wallet_sync_interval.tick() => { + let now = Instant::now(); + match wallet.sync().await { + Ok(()) => { log_trace!( sync_logger, - "Stopping background syncing on-chain wallet.", - ); - return; + "Background sync of on-chain wallet finished in {}ms.", + now.elapsed().as_millis() + ); + let unix_time_secs_opt = + SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); + *sync_onchain_wallet_timestamp.write().unwrap() = unix_time_secs_opt; } - _ = onchain_wallet_sync_interval.tick() => { - let now = Instant::now(); - match wallet.sync().await { - Ok(()) => { - log_trace!( - sync_logger, - "Background sync of on-chain wallet finished in {}ms.", - now.elapsed().as_millis() - ); - let unix_time_secs_opt = - SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); - *sync_onchain_wallet_timestamp.write().unwrap() = unix_time_secs_opt; - } - Err(err) => { - log_error!( - sync_logger, - "Background sync of on-chain wallet failed: {}", - err - ) - } - } + Err(err) => { + log_error!( + sync_logger, + "Background sync of on-chain wallet failed: {}", + err + ) } } } - }, - ); + } + } }); let mut stop_fee_updates = self.stop_sender.subscribe(); @@ -636,7 +633,7 @@ impl Node { continue; } - if !bcast_cm.list_channels().iter().any(|chan| chan.is_public && chan.is_channel_ready) { + if !bcast_cm.list_channels().iter().any(|chan| chan.is_announced && chan.is_channel_ready) { // Skip if we don't have any public channels that are ready. continue; } @@ -730,6 +727,7 @@ impl Node { let background_chan_man = Arc::clone(&self.channel_manager); let background_gossip_sync = self.gossip_source.as_gossip_sync(); let background_peer_man = Arc::clone(&self.peer_manager); + let background_onion_messenger = Arc::clone(&self.onion_messenger); let background_logger = Arc::clone(&self.logger); let background_error_logger = Arc::clone(&self.logger); let background_scorer = Arc::clone(&self.scorer); @@ -762,6 +760,7 @@ impl Node { |e| background_event_handler.handle_event(e), background_chain_mon, background_chan_man, + Some(background_onion_messenger), background_gossip_sync, background_peer_man, background_logger, @@ -1188,7 +1187,7 @@ impl Node { fn open_channel_inner( &self, node_id: PublicKey, address: SocketAddress, channel_amount_sats: u64, push_to_counterparty_msat: Option, channel_config: Option, - announce_channel: bool, + announce_for_forwarding: bool, ) -> Result { let rt_lock = self.runtime.read().unwrap(); if rt_lock.is_none() { @@ -1250,12 +1249,12 @@ impl Node { } let mut user_config = default_user_config(&self.config); - user_config.channel_handshake_config.announced_channel = announce_channel; + user_config.channel_handshake_config.announce_for_forwarding = announce_for_forwarding; user_config.channel_config = (channel_config.unwrap_or_default()).clone().into(); // We set the max inflight to 100% for private channels. // FIXME: LDK will default to this behavior soon, too, at which point we should drop this // manual override. - if !announce_channel { + if !announce_for_forwarding { user_config .channel_handshake_config .max_inbound_htlc_value_in_flight_percent_of_channel = 100; @@ -1484,7 +1483,7 @@ impl Node { pub fn close_channel( &self, user_channel_id: &UserChannelId, counterparty_node_id: PublicKey, ) -> Result<(), Error> { - self.close_channel_internal(user_channel_id, counterparty_node_id, false) + self.close_channel_internal(user_channel_id, counterparty_node_id, false, None) } /// Force-close a previously opened channel. @@ -1500,13 +1499,19 @@ impl Node { /// for more information). pub fn force_close_channel( &self, user_channel_id: &UserChannelId, counterparty_node_id: PublicKey, + reason: Option, ) -> Result<(), Error> { - self.close_channel_internal(user_channel_id, counterparty_node_id, true) + self.close_channel_internal(user_channel_id, counterparty_node_id, true, reason) } fn close_channel_internal( &self, user_channel_id: &UserChannelId, counterparty_node_id: PublicKey, force: bool, + force_close_reason: Option, ) -> Result<(), Error> { + debug_assert!( + force_close_reason.is_none() || force, + "Reason can only be set for force closures" + ); let open_channels = self.channel_manager.list_channels_with_counterparty(&counterparty_node_id); if let Some(channel_details) = @@ -1520,6 +1525,7 @@ impl Node { .force_close_without_broadcasting_txn( &channel_details.channel_id, &counterparty_node_id, + force_close_reason.unwrap_or_default(), ) .map_err(|e| { log_error!( @@ -1534,6 +1540,7 @@ impl Node { .force_close_broadcasting_latest_txn( &channel_details.channel_id, &counterparty_node_id, + force_close_reason.unwrap_or_default(), ) .map_err(|e| { log_error!(self.logger, "Failed to force-close channel: {:?}", e); @@ -1727,7 +1734,7 @@ impl Node { /// can be sure that the signature was generated by the caller. /// Signatures are EC recoverable, meaning that given the message and the /// signature the `PublicKey` of the signer can be extracted. - pub fn sign_message(&self, msg: &[u8]) -> Result { + pub fn sign_message(&self, msg: &[u8]) -> String { self.keys_manager.sign_message(msg) } diff --git a/src/logger.rs b/src/logger.rs index 2be20a165..19df24367 100644 --- a/src/logger.rs +++ b/src/logger.rs @@ -9,11 +9,11 @@ pub(crate) use lightning::util::logger::Logger; pub(crate) use lightning::{log_bytes, log_debug, log_error, log_info, log_trace}; use lightning::util::logger::{Level, Record}; -use lightning::util::ser::Writer; use chrono::Utc; use std::fs; +use std::io::Write; #[cfg(not(target_os = "windows"))] use std::os::unix::fs::symlink; use std::path::Path; diff --git a/src/message_handler.rs b/src/message_handler.rs index 18dfa8637..38999512e 100644 --- a/src/message_handler.rs +++ b/src/message_handler.rs @@ -99,4 +99,24 @@ where }, } } + + fn peer_connected( + &self, their_node_id: &PublicKey, msg: &lightning::ln::msgs::Init, inbound: bool, + ) -> Result<(), ()> { + match self { + Self::Ignoring => Ok(()), + Self::Liquidity { liquidity_source, .. } => { + liquidity_source.liquidity_manager().peer_connected(their_node_id, msg, inbound) + }, + } + } + + fn peer_disconnected(&self, their_node_id: &PublicKey) { + match self { + Self::Ignoring => {}, + Self::Liquidity { liquidity_source, .. } => { + liquidity_source.liquidity_manager().peer_disconnected(their_node_id) + }, + } + } } diff --git a/src/payment/bolt11.rs b/src/payment/bolt11.rs index b7f72355b..708c127bd 100644 --- a/src/payment/bolt11.rs +++ b/src/payment/bolt11.rs @@ -23,10 +23,15 @@ use crate::peer_store::{PeerInfo, PeerStore}; use crate::types::{ChannelManager, KeysManager}; use lightning::ln::channelmanager::{PaymentId, RecipientOnionFields, Retry, RetryableSendFailure}; +use lightning::ln::invoice_utils::{ + create_invoice_from_channelmanager_and_duration_since_epoch, + create_invoice_from_channelmanager_and_duration_since_epoch_with_payment_hash, +}; use lightning::ln::{PaymentHash, PaymentPreimage}; use lightning::routing::router::{PaymentParameters, RouteParameters}; -use lightning_invoice::{payment, Bolt11Invoice, Currency}; +use lightning::ln::bolt11_payment; +use lightning_invoice::{Bolt11Invoice, Currency}; use bitcoin::hashes::sha256::Hash as Sha256; use bitcoin::hashes::Hash; @@ -88,7 +93,7 @@ impl Bolt11Payment { return Err(Error::NotRunning); } - let (payment_hash, recipient_onion, mut route_params) = payment::payment_parameters_from_invoice(&invoice).map_err(|_| { + let (payment_hash, recipient_onion, mut route_params) = bolt11_payment::payment_parameters_from_invoice(&invoice).map_err(|_| { log_error!(self.logger, "Failed to send payment due to the given invoice being \"zero-amount\". Please use send_using_amount instead."); Error::InvalidInvoice })?; @@ -471,7 +476,7 @@ impl Bolt11Payment { let invoice = { let invoice_res = if let Some(payment_hash) = manual_claim_payment_hash { - lightning_invoice::utils::create_invoice_from_channelmanager_and_duration_since_epoch_with_payment_hash( + create_invoice_from_channelmanager_and_duration_since_epoch_with_payment_hash( &self.channel_manager, keys_manager, Arc::clone(&self.logger), @@ -484,7 +489,7 @@ impl Bolt11Payment { None, ) } else { - lightning_invoice::utils::create_invoice_from_channelmanager_and_duration_since_epoch( + create_invoice_from_channelmanager_and_duration_since_epoch( &self.channel_manager, keys_manager, Arc::clone(&self.logger), @@ -696,7 +701,7 @@ impl Bolt11Payment { return Err(Error::NotRunning); } - let (_payment_hash, _recipient_onion, route_params) = payment::payment_parameters_from_invoice(&invoice).map_err(|_| { + let (_payment_hash, _recipient_onion, route_params) = bolt11_payment::payment_parameters_from_invoice(&invoice).map_err(|_| { log_error!(self.logger, "Failed to send probes due to the given invoice being \"zero-amount\". Please use send_probes_using_amount instead."); Error::InvalidInvoice })?; @@ -738,12 +743,12 @@ impl Bolt11Payment { return Err(Error::InvalidAmount); } - payment::payment_parameters_from_invoice(&invoice).map_err(|_| { + bolt11_payment::payment_parameters_from_invoice(&invoice).map_err(|_| { log_error!(self.logger, "Failed to send probes due to the given invoice unexpectedly being \"zero-amount\"."); Error::InvalidInvoice })? } else { - payment::payment_parameters_from_zero_amount_invoice(&invoice, amount_msat).map_err(|_| { + bolt11_payment::payment_parameters_from_zero_amount_invoice(&invoice, amount_msat).map_err(|_| { log_error!(self.logger, "Failed to send probes due to the given invoice unexpectedly being not \"zero-amount\"."); Error::InvalidInvoice })? diff --git a/src/payment/bolt12.rs b/src/payment/bolt12.rs index 9ec7bde34..90024b7d3 100644 --- a/src/payment/bolt12.rs +++ b/src/payment/bolt12.rs @@ -112,7 +112,7 @@ impl Bolt12Payment { let payment = PaymentDetails::new( payment_id, kind, - Some(*offer_amount_msat), + Some(offer_amount_msat), PaymentDirection::Outbound, PaymentStatus::Pending, ); @@ -136,7 +136,7 @@ impl Bolt12Payment { let payment = PaymentDetails::new( payment_id, kind, - Some(*offer_amount_msat), + Some(offer_amount_msat), PaymentDirection::Outbound, PaymentStatus::Failed, ); @@ -172,7 +172,7 @@ impl Bolt12Payment { let max_total_routing_fee_msat = None; let offer_amount_msat = match offer.amount() { - Some(Amount::Bitcoin { amount_msats }) => *amount_msats, + Some(Amount::Bitcoin { amount_msats }) => amount_msats, Some(_) => { log_error!(self.logger, "Failed to send payment as the provided offer was denominated in an unsupported currency."); return Err(Error::UnsupportedCurrency); @@ -255,12 +255,19 @@ impl Bolt12Payment { /// Returns a payable offer that can be used to request and receive a payment of the amount /// given. pub fn receive( - &self, amount_msat: u64, description: &str, quantity: Option, + &self, amount_msat: u64, description: &str, expiry_secs: Option, quantity: Option, ) -> Result { - let offer_builder = self.channel_manager.create_offer_builder().map_err(|e| { - log_error!(self.logger, "Failed to create offer builder: {:?}", e); - Error::OfferCreationFailed - })?; + let absolute_expiry = expiry_secs.map(|secs| { + (SystemTime::now() + Duration::from_secs(secs as u64)) + .duration_since(UNIX_EPOCH) + .unwrap() + }); + + let offer_builder = + self.channel_manager.create_offer_builder(absolute_expiry).map_err(|e| { + log_error!(self.logger, "Failed to create offer builder: {:?}", e); + Error::OfferCreationFailed + })?; let mut offer = offer_builder.amount_msats(amount_msat).description(description.to_string()); @@ -284,11 +291,20 @@ impl Bolt12Payment { /// Returns a payable offer that can be used to request and receive a payment for which the /// amount is to be determined by the user, also known as a "zero-amount" offer. - pub fn receive_variable_amount(&self, description: &str) -> Result { - let offer_builder = self.channel_manager.create_offer_builder().map_err(|e| { - log_error!(self.logger, "Failed to create offer builder: {:?}", e); - Error::OfferCreationFailed - })?; + pub fn receive_variable_amount( + &self, description: &str, expiry_secs: Option, + ) -> Result { + let absolute_expiry = expiry_secs.map(|secs| { + (SystemTime::now() + Duration::from_secs(secs as u64)) + .duration_since(UNIX_EPOCH) + .unwrap() + }); + + let offer_builder = + self.channel_manager.create_offer_builder(absolute_expiry).map_err(|e| { + log_error!(self.logger, "Failed to create offer builder: {:?}", e); + Error::OfferCreationFailed + })?; let offer = offer_builder.description(description.to_string()).build().map_err(|e| { log_error!(self.logger, "Failed to create offer: {:?}", e); Error::OfferCreationFailed @@ -340,7 +356,7 @@ impl Bolt12Payment { rand::thread_rng().fill_bytes(&mut random_bytes); let payment_id = PaymentId(random_bytes); - let expiration = (SystemTime::now() + Duration::from_secs(expiry_secs as u64)) + let absolute_expiry = (SystemTime::now() + Duration::from_secs(expiry_secs as u64)) .duration_since(UNIX_EPOCH) .unwrap(); let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); @@ -350,7 +366,7 @@ impl Bolt12Payment { .channel_manager .create_refund_builder( amount_msat, - expiration, + absolute_expiry, payment_id, retry_strategy, max_total_routing_fee_msat, diff --git a/src/payment/onchain.rs b/src/payment/onchain.rs index a3cc0d2f2..b43765a97 100644 --- a/src/payment/onchain.rs +++ b/src/payment/onchain.rs @@ -12,7 +12,7 @@ use crate::error::Error; use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; use crate::types::{ChannelManager, Wallet}; -use bitcoin::{Address, Txid}; +use bitcoin::{Address, Amount, Txid}; use std::sync::{Arc, RwLock}; @@ -70,7 +70,9 @@ impl OnchainPayment { ); return Err(Error::InsufficientFunds); } - self.wallet.send_to_address(address, Some(amount_sats)) + + let amount = Amount::from_sat(amount_sats); + self.wallet.send_to_address(address, Some(amount)) } /// Send an on-chain payment to the given address, draining all the available funds. diff --git a/src/payment/store.rs b/src/payment/store.rs index 0cea18002..ee82544dc 100644 --- a/src/payment/store.rs +++ b/src/payment/store.rs @@ -150,7 +150,7 @@ pub enum PaymentDirection { impl_writeable_tlv_based_enum!(PaymentDirection, (0, Inbound) => {}, - (1, Outbound) => {}; + (1, Outbound) => {} ); /// Represents the current status of a payment. @@ -167,7 +167,7 @@ pub enum PaymentStatus { impl_writeable_tlv_based_enum!(PaymentStatus, (0, Pending) => {}, (2, Succeeded) => {}, - (4, Failed) => {}; + (4, Failed) => {} ); /// Represents the kind of a payment. @@ -293,7 +293,7 @@ impl_writeable_tlv_based_enum!(PaymentKind, (2, preimage, option), (3, quantity, option), (4, secret, option), - }; + } ); /// Limits applying to how much fee we allow an LSP to deduct from the payment amount. @@ -499,11 +499,11 @@ where #[cfg(test)] mod tests { use super::*; + use bitcoin::io::Cursor; use lightning::util::{ ser::Readable, test_utils::{TestLogger, TestStore}, }; - use std::io::Cursor; use std::sync::Arc; /// We refactored `PaymentDetails` to hold a payment id and moved some required fields into diff --git a/src/payment/unified_qr.rs b/src/payment/unified_qr.rs index 66488e232..88d372456 100644 --- a/src/payment/unified_qr.rs +++ b/src/payment/unified_qr.rs @@ -90,7 +90,8 @@ impl UnifiedQrPayment { let amount_msats = amount_sats * 1_000; - let bolt12_offer = match self.bolt12_payment.receive(amount_msats, description, None) { + let bolt12_offer = match self.bolt12_payment.receive(amount_msats, description, None, None) + { Ok(offer) => Some(offer), Err(e) => { log_error!(self.logger, "Failed to create offer: {}", e); diff --git a/src/sweep.rs b/src/sweep.rs index 5c1d62a20..ba10869b8 100644 --- a/src/sweep.rs +++ b/src/sweep.rs @@ -10,10 +10,10 @@ //! once sufficient time has passed for us to be confident any users completed the migration. use lightning::impl_writeable_tlv_based; -use lightning::ln::ChannelId; +use lightning::ln::types::ChannelId; use lightning::sign::SpendableOutputDescriptor; -use bitcoin::{BlockHash, Transaction}; +use bitcoin::{Amount, BlockHash, Transaction}; #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) struct DeprecatedSpendableOutputInfo { @@ -38,7 +38,7 @@ impl_writeable_tlv_based!(DeprecatedSpendableOutputInfo, { (14, confirmation_hash, option), }); -pub(crate) fn value_satoshis_from_descriptor(descriptor: &SpendableOutputDescriptor) -> u64 { +pub(crate) fn value_from_descriptor(descriptor: &SpendableOutputDescriptor) -> Amount { match &descriptor { SpendableOutputDescriptor::StaticOutput { output, .. } => output.value, SpendableOutputDescriptor::DelayedPaymentOutput(output) => output.output.value, diff --git a/src/tx_broadcaster.rs b/src/tx_broadcaster.rs index 88415ba46..37bd616dc 100644 --- a/src/tx_broadcaster.rs +++ b/src/tx_broadcaster.rs @@ -47,6 +47,7 @@ where let mut receiver = self.queue_receiver.lock().await; while let Some(next_package) = receiver.recv().await { for tx in &next_package { + let txid = tx.compute_txid(); let timeout_fut = tokio::time::timeout( Duration::from_secs(TX_BROADCAST_TIMEOUT_SECS), self.esplora_client.broadcast(tx), @@ -54,11 +55,7 @@ where match timeout_fut.await { Ok(res) => match res { Ok(()) => { - log_trace!( - self.logger, - "Successfully broadcast transaction {}", - tx.txid() - ); + log_trace!(self.logger, "Successfully broadcast transaction {}", txid); }, Err(e) => match e { esplora_client::Error::Reqwest(err) => { @@ -85,7 +82,7 @@ where log_error!( self.logger, "Failed to broadcast transaction {}: {}", - tx.txid(), + txid, e ); log_trace!( @@ -100,7 +97,7 @@ where log_error!( self.logger, "Failed to broadcast transaction due to timeout {}: {}", - tx.txid(), + txid, e ); log_trace!( diff --git a/src/types.rs b/src/types.rs index 591b73b4d..5005d93a6 100644 --- a/src/types.rs +++ b/src/types.rs @@ -9,11 +9,11 @@ use crate::logger::FilesystemLogger; use crate::message_handler::NodeCustomMessageHandler; use lightning::chain::chainmonitor; -use lightning::ln::channelmanager::ChannelDetails as LdkChannelDetails; +use lightning::ln::channel_state::ChannelDetails as LdkChannelDetails; use lightning::ln::msgs::RoutingMessageHandler; use lightning::ln::msgs::SocketAddress; use lightning::ln::peer_handler::IgnoringMessageHandler; -use lightning::ln::ChannelId; +use lightning::ln::types::ChannelId; use lightning::routing::gossip; use lightning::routing::router::DefaultRouter; use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringFeeParameters}; @@ -72,19 +72,11 @@ pub(crate) type Broadcaster = crate::tx_broadcaster::TransactionBroadcaster>; -pub(crate) type Wallet = crate::wallet::Wallet< - bdk::database::SqliteDatabase, - Arc, - Arc, - Arc, ->; +pub(crate) type Wallet = + crate::wallet::Wallet, Arc, Arc>; -pub(crate) type KeysManager = crate::wallet::WalletKeysManager< - bdk::database::SqliteDatabase, - Arc, - Arc, - Arc, ->; +pub(crate) type KeysManager = + crate::wallet::WalletKeysManager, Arc, Arc>; pub(crate) type Router = DefaultRouter< Arc, @@ -121,6 +113,7 @@ pub(crate) type OnionMessenger = lightning::onion_message::messenger::OnionMesse Arc, Arc, IgnoringMessageHandler, + IgnoringMessageHandler, >; pub(crate) type MessageRouter = lightning::onion_message::messenger::DefaultMessageRouter< @@ -234,7 +227,7 @@ pub struct ChannelDetails { /// This is a strict superset of `is_channel_ready`. pub is_usable: bool, /// Returns `true` if this channel is (or will be) publicly-announced - pub is_public: bool, + pub is_announced: bool, /// The difference in the CLTV value between incoming HTLCs and an outbound HTLC forwarded over /// the channel. pub cltv_expiry_delta: Option, @@ -308,7 +301,7 @@ impl From for ChannelDetails { is_outbound: value.is_outbound, is_channel_ready: value.is_channel_ready, is_usable: value.is_usable, - is_public: value.is_public, + is_announced: value.is_announced, cltv_expiry_delta: value.config.map(|c| c.cltv_expiry_delta), counterparty_unspendable_punishment_reserve: value .counterparty diff --git a/src/uniffi_types.rs b/src/uniffi_types.rs index 2a6ac8da3..a66bcddea 100644 --- a/src/uniffi_types.rs +++ b/src/uniffi_types.rs @@ -14,8 +14,9 @@ pub use crate::graph::{ChannelInfo, ChannelUpdateInfo, NodeAnnouncementInfo, Nod pub use crate::payment::store::{LSPFeeLimits, PaymentDirection, PaymentKind, PaymentStatus}; pub use crate::payment::{MaxTotalRoutingFeeLimit, QrPaymentResult, SendingParameters}; +pub use lightning::chain::channelmonitor::BalanceSource; pub use lightning::events::{ClosureReason, PaymentFailureReason}; -pub use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret}; +pub use lightning::ln::types::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret}; pub use lightning::offers::invoice::Bolt12Invoice; pub use lightning::offers::offer::{Offer, OfferId}; pub use lightning::offers::refund::Refund; diff --git a/src/wallet/mod.rs b/src/wallet/mod.rs index 6da08715c..b1c053f66 100644 --- a/src/wallet/mod.rs +++ b/src/wallet/mod.rs @@ -5,9 +5,11 @@ // http://opensource.org/licenses/MIT>, at your option. You may not use this file except in // accordance with one or both of these licenses. +use persist::KVStoreWalletPersister; + use crate::logger::{log_error, log_info, log_trace, Logger}; -use crate::config::BDK_WALLET_SYNC_TIMEOUT_SECS; +use crate::config::{BDK_CLIENT_CONCURRENCY, BDK_CLIENT_STOP_GAP, BDK_WALLET_SYNC_TIMEOUT_SECS}; use crate::fee_estimator::{ConfirmationTarget, FeeEstimator}; use crate::Error; @@ -22,77 +24,70 @@ use lightning::sign::{ }; use lightning::util::message_signing; +use lightning_invoice::RawBolt11Invoice; -use bdk::blockchain::EsploraBlockchain; -use bdk::database::BatchDatabase; -use bdk::wallet::AddressIndex; -use bdk::{Balance, SignOptions, SyncOptions}; +use bdk_chain::ChainPosition; +use bdk_esplora::EsploraAsyncExt; +use bdk_wallet::{KeychainKind, PersistedWallet, SignOptions}; -use bitcoin::address::{Payload, WitnessVersion}; -use bitcoin::bech32::u5; use bitcoin::blockdata::constants::WITNESS_SCALE_FACTOR; use bitcoin::blockdata::locktime::absolute::LockTime; -use bitcoin::hash_types::WPubkeyHash; use bitcoin::hashes::Hash; use bitcoin::key::XOnlyPublicKey; -use bitcoin::psbt::PartiallySignedTransaction; +use bitcoin::psbt::Psbt; use bitcoin::secp256k1::ecdh::SharedSecret; use bitcoin::secp256k1::ecdsa::{RecoverableSignature, Signature}; use bitcoin::secp256k1::{PublicKey, Scalar, Secp256k1, SecretKey, Signing}; -use bitcoin::{ScriptBuf, Transaction, TxOut, Txid}; +use bitcoin::{ + Amount, ScriptBuf, Transaction, TxOut, Txid, WPubkeyHash, WitnessProgram, WitnessVersion, +}; + +use esplora_client::AsyncClient as EsploraAsyncClient; use std::ops::{Deref, DerefMut}; -use std::sync::{Arc, Mutex, RwLock}; +use std::sync::{Arc, Mutex}; use std::time::Duration; +pub(crate) mod persist; +pub(crate) mod ser; + enum WalletSyncStatus { Completed, InProgress { subscribers: tokio::sync::broadcast::Sender> }, } -pub(crate) struct Wallet +pub(crate) struct Wallet where - D: BatchDatabase, B::Target: BroadcasterInterface, E::Target: FeeEstimator, L::Target: Logger, { - // A BDK blockchain used for wallet sync. - blockchain: EsploraBlockchain, // A BDK on-chain wallet. - inner: Mutex>, - // A cache storing the most recently retrieved fee rate estimations. + inner: Mutex>, + persister: Mutex, + esplora_client: EsploraAsyncClient, broadcaster: B, fee_estimator: E, // A Mutex holding the current sync status. sync_status: Mutex, - // TODO: Drop this workaround after BDK 1.0 upgrade. - balance_cache: RwLock, logger: L, } -impl Wallet +impl Wallet where - D: BatchDatabase, B::Target: BroadcasterInterface, E::Target: FeeEstimator, L::Target: Logger, { pub(crate) fn new( - blockchain: EsploraBlockchain, wallet: bdk::Wallet, broadcaster: B, fee_estimator: E, - logger: L, + wallet: bdk_wallet::PersistedWallet, + wallet_persister: KVStoreWalletPersister, esplora_client: EsploraAsyncClient, + broadcaster: B, fee_estimator: E, logger: L, ) -> Self { - let start_balance = wallet.get_balance().unwrap_or(Balance { - immature: 0, - trusted_pending: 0, - untrusted_pending: 0, - confirmed: 0, - }); - let inner = Mutex::new(wallet); + let persister = Mutex::new(wallet_persister); let sync_status = Mutex::new(WalletSyncStatus::Completed); - let balance_cache = RwLock::new(start_balance); - Self { blockchain, inner, broadcaster, fee_estimator, sync_status, balance_cache, logger } + Self { inner, persister, esplora_client, broadcaster, fee_estimator, sync_status, logger } } pub(crate) async fn sync(&self) -> Result<(), Error> { @@ -106,41 +101,53 @@ where } let res = { - let wallet_lock = self.inner.lock().unwrap(); + let full_scan_request = self.inner.lock().unwrap().start_full_scan().build(); let wallet_sync_timeout_fut = tokio::time::timeout( Duration::from_secs(BDK_WALLET_SYNC_TIMEOUT_SECS), - wallet_lock.sync(&self.blockchain, SyncOptions { progress: None }), + self.esplora_client.full_scan( + full_scan_request, + BDK_CLIENT_STOP_GAP, + BDK_CLIENT_CONCURRENCY, + ), ); match wallet_sync_timeout_fut.await { Ok(res) => match res { - Ok(()) => { - // TODO: Drop this workaround after BDK 1.0 upgrade. - // Update balance cache after syncing. - if let Ok(balance) = wallet_lock.get_balance() { - *self.balance_cache.write().unwrap() = balance; - } - Ok(()) - }, - Err(e) => match e { - bdk::Error::Esplora(ref be) => match **be { - bdk::blockchain::esplora::EsploraError::Reqwest(_) => { + Ok(update) => { + let mut locked_wallet = self.inner.lock().unwrap(); + match locked_wallet.apply_update(update) { + Ok(()) => { + let mut locked_persister = self.persister.lock().unwrap(); + locked_wallet.persist(&mut locked_persister).map_err(|e| { + log_error!(self.logger, "Failed to persist wallet: {}", e); + Error::PersistenceFailed + })?; + + Ok(()) + }, + Err(e) => { log_error!( self.logger, - "Sync failed due to HTTP connection error: {}", + "Sync failed due to chain connection error: {}", e ); - Err(From::from(e)) - }, - _ => { - log_error!(self.logger, "Sync failed due to Esplora error: {}", e); - Err(From::from(e)) + Err(Error::WalletOperationFailed) }, + } + }, + Err(e) => match *e { + esplora_client::Error::Reqwest(he) => { + log_error!( + self.logger, + "Sync failed due to HTTP connection error: {}", + he + ); + Err(Error::WalletOperationFailed) }, _ => { - log_error!(self.logger, "Wallet sync error: {}", e); - Err(From::from(e)) + log_error!(self.logger, "Sync failed due to Esplora error: {}", e); + Err(Error::WalletOperationFailed) }, }, }, @@ -157,22 +164,22 @@ where } pub(crate) fn create_funding_transaction( - &self, output_script: ScriptBuf, value_sats: u64, confirmation_target: ConfirmationTarget, + &self, output_script: ScriptBuf, amount: Amount, confirmation_target: ConfirmationTarget, locktime: LockTime, ) -> Result { let fee_rate = self.fee_estimator.estimate_fee_rate(confirmation_target); - let locked_wallet = self.inner.lock().unwrap(); + let mut locked_wallet = self.inner.lock().unwrap(); let mut tx_builder = locked_wallet.build_tx(); tx_builder - .add_recipient(output_script, value_sats) + .add_recipient(output_script, amount) .fee_rate(fee_rate) .nlocktime(locktime) .enable_rbf(); let mut psbt = match tx_builder.finish() { - Ok((psbt, _)) => { + Ok(psbt) => { log_trace!(self.logger, "Created funding PSBT: {:?}", psbt); psbt }, @@ -194,39 +201,52 @@ where }, } - Ok(psbt.extract_tx()) + let mut locked_persister = self.persister.lock().unwrap(); + locked_wallet.persist(&mut locked_persister).map_err(|e| { + log_error!(self.logger, "Failed to persist wallet: {}", e); + Error::PersistenceFailed + })?; + + let tx = psbt.extract_tx().map_err(|e| { + log_error!(self.logger, "Failed to extract transaction: {}", e); + e + })?; + + Ok(tx) } pub(crate) fn get_new_address(&self) -> Result { - let address_info = self.inner.lock().unwrap().get_address(AddressIndex::New)?; + let mut locked_wallet = self.inner.lock().unwrap(); + let mut locked_persister = self.persister.lock().unwrap(); + + let address_info = locked_wallet.reveal_next_address(KeychainKind::External); + locked_wallet.persist(&mut locked_persister).map_err(|e| { + log_error!(self.logger, "Failed to persist wallet: {}", e); + Error::PersistenceFailed + })?; Ok(address_info.address) } fn get_new_internal_address(&self) -> Result { - let address_info = - self.inner.lock().unwrap().get_internal_address(AddressIndex::LastUnused)?; + let mut locked_wallet = self.inner.lock().unwrap(); + let mut locked_persister = self.persister.lock().unwrap(); + + let address_info = locked_wallet.next_unused_address(KeychainKind::Internal); + locked_wallet.persist(&mut locked_persister).map_err(|e| { + log_error!(self.logger, "Failed to persist wallet: {}", e); + Error::PersistenceFailed + })?; Ok(address_info.address) } pub(crate) fn get_balances( &self, total_anchor_channels_reserve_sats: u64, ) -> Result<(u64, u64), Error> { - // TODO: Drop this workaround after BDK 1.0 upgrade. - // We get the balance and update our cache if we can do so without blocking on the wallet - // Mutex. Otherwise, we return a cached value. - let balance = match self.inner.try_lock() { - Ok(wallet_lock) => { - // Update balance cache if we can. - let balance = wallet_lock.get_balance()?; - *self.balance_cache.write().unwrap() = balance.clone(); - balance - }, - Err(_) => self.balance_cache.read().unwrap().clone(), - }; + let balance = self.inner.lock().unwrap().balance(); let (total, spendable) = ( - balance.get_total(), - balance.get_spendable().saturating_sub(total_anchor_channels_reserve_sats), + balance.total().to_sat(), + balance.trusted_spendable().to_sat().saturating_sub(total_anchor_channels_reserve_sats), ); Ok((total, spendable)) @@ -243,18 +263,18 @@ where /// If `amount_msat_or_drain` is `None` the wallet will be drained, i.e., all available funds will be /// spent. pub(crate) fn send_to_address( - &self, address: &bitcoin::Address, amount_msat_or_drain: Option, + &self, address: &bitcoin::Address, amount_or_drain: Option, ) -> Result { let confirmation_target = ConfirmationTarget::OnchainPayment; let fee_rate = self.fee_estimator.estimate_fee_rate(confirmation_target); let tx = { - let locked_wallet = self.inner.lock().unwrap(); + let mut locked_wallet = self.inner.lock().unwrap(); let mut tx_builder = locked_wallet.build_tx(); - if let Some(amount_sats) = amount_msat_or_drain { + if let Some(amount) = amount_or_drain { tx_builder - .add_recipient(address.script_pubkey(), amount_sats) + .add_recipient(address.script_pubkey(), amount) .fee_rate(fee_rate) .enable_rbf(); } else { @@ -266,7 +286,7 @@ where } let mut psbt = match tx_builder.finish() { - Ok((psbt, _)) => { + Ok(psbt) => { log_trace!(self.logger, "Created PSBT: {:?}", psbt); psbt }, @@ -287,19 +307,29 @@ where return Err(err.into()); }, } - psbt.extract_tx() + + let mut locked_persister = self.persister.lock().unwrap(); + locked_wallet.persist(&mut locked_persister).map_err(|e| { + log_error!(self.logger, "Failed to persist wallet: {}", e); + Error::PersistenceFailed + })?; + + psbt.extract_tx().map_err(|e| { + log_error!(self.logger, "Failed to extract transaction: {}", e); + e + })? }; self.broadcaster.broadcast_transactions(&[&tx]); - let txid = tx.txid(); + let txid = tx.compute_txid(); - if let Some(amount_sats) = amount_msat_or_drain { + if let Some(amount) = amount_or_drain { log_info!( self.logger, "Created new transaction {} sending {}sats on-chain to address {}", txid, - amount_sats, + amount.to_sat(), address ); } else { @@ -368,9 +398,8 @@ where } } -impl WalletSource for Wallet +impl WalletSource for Wallet where - D: BatchDatabase, B::Target: BroadcasterInterface, E::Target: FeeEstimator, L::Target: Logger, @@ -378,67 +407,57 @@ where fn list_confirmed_utxos(&self) -> Result, ()> { let locked_wallet = self.inner.lock().unwrap(); let mut utxos = Vec::new(); - let confirmed_txs: Vec = locked_wallet - .list_transactions(false) - .map_err(|e| { - log_error!(self.logger, "Failed to retrieve transactions from wallet: {}", e); - })? - .into_iter() - .filter(|t| t.confirmation_time.is_some()) + let confirmed_txs: Vec = locked_wallet + .transactions() + .filter(|t| matches!(t.chain_position, ChainPosition::Confirmed(_))) + .map(|t| t.tx_node.txid) .collect(); - let unspent_confirmed_utxos = locked_wallet - .list_unspent() - .map_err(|e| { - log_error!( - self.logger, - "Failed to retrieve unspent transactions from wallet: {}", - e - ); - })? - .into_iter() - .filter(|u| confirmed_txs.iter().find(|t| t.txid == u.outpoint.txid).is_some()); + let unspent_confirmed_utxos = + locked_wallet.list_unspent().filter(|u| confirmed_txs.contains(&u.outpoint.txid)); for u in unspent_confirmed_utxos { - let payload = Payload::from_script(&u.txout.script_pubkey).map_err(|e| { - log_error!(self.logger, "Failed to retrieve script payload: {}", e); - })?; + let script_pubkey = u.txout.script_pubkey; + match script_pubkey.witness_version() { + Some(version @ WitnessVersion::V0) => { + let witness_program = WitnessProgram::new(version, script_pubkey.as_bytes()) + .map_err(|e| { + log_error!(self.logger, "Failed to retrieve script payload: {}", e); + })?; - match payload { - Payload::WitnessProgram(program) => match program.version() { - WitnessVersion::V0 if program.program().len() == 20 => { - let wpkh = - WPubkeyHash::from_slice(program.program().as_bytes()).map_err(|e| { - log_error!(self.logger, "Failed to retrieve script payload: {}", e); - })?; - let utxo = Utxo::new_v0_p2wpkh(u.outpoint, u.txout.value, &wpkh); - utxos.push(utxo); - }, - WitnessVersion::V1 => { - XOnlyPublicKey::from_slice(program.program().as_bytes()).map_err(|e| { + let wpkh = WPubkeyHash::from_slice(&witness_program.program().as_bytes()) + .map_err(|e| { + log_error!(self.logger, "Failed to retrieve script payload: {}", e); + })?; + let utxo = Utxo::new_v0_p2wpkh(u.outpoint, u.txout.value, &wpkh); + utxos.push(utxo); + }, + Some(version @ WitnessVersion::V1) => { + let witness_program = WitnessProgram::new(version, script_pubkey.as_bytes()) + .map_err(|e| { log_error!(self.logger, "Failed to retrieve script payload: {}", e); })?; - let utxo = Utxo { - outpoint: u.outpoint, - output: TxOut { - value: u.txout.value, - script_pubkey: ScriptBuf::new_witness_program(&program), - }, - satisfaction_weight: 1 /* empty script_sig */ * WITNESS_SCALE_FACTOR as u64 + - 1 /* witness items */ + 1 /* schnorr sig len */ + 64, /* schnorr sig */ - }; - utxos.push(utxo); - }, - _ => { - log_error!( - self.logger, - "Unexpected witness version or length. Version: {}, Length: {}", - program.version(), - program.program().len() - ); - }, + XOnlyPublicKey::from_slice(&witness_program.program().as_bytes()).map_err( + |e| { + log_error!(self.logger, "Failed to retrieve script payload: {}", e); + }, + )?; + + let utxo = Utxo { + outpoint: u.outpoint, + output: TxOut { + value: u.txout.value, + script_pubkey: ScriptBuf::new_witness_program(&witness_program), + }, + satisfaction_weight: 1 /* empty script_sig */ * WITNESS_SCALE_FACTOR as u64 + + 1 /* witness items */ + 1 /* schnorr sig len */ + 64, /* schnorr sig */ + }; + utxos.push(utxo); }, - _ => { + Some(version) => { + log_error!(self.logger, "Unexpected witness version: {}", version,); + }, + None => { log_error!( self.logger, "Tried to use a non-witness script. This must never happen." @@ -452,16 +471,18 @@ where } fn get_change_script(&self) -> Result { - let locked_wallet = self.inner.lock().unwrap(); - let address_info = - locked_wallet.get_internal_address(AddressIndex::LastUnused).map_err(|e| { - log_error!(self.logger, "Failed to retrieve new address from wallet: {}", e); - })?; + let mut locked_wallet = self.inner.lock().unwrap(); + let mut locked_persister = self.persister.lock().unwrap(); + let address_info = locked_wallet.next_unused_address(KeychainKind::Internal); + locked_wallet.persist(&mut locked_persister).map_err(|e| { + log_error!(self.logger, "Failed to persist wallet: {}", e); + () + })?; Ok(address_info.address.script_pubkey()) } - fn sign_psbt(&self, mut psbt: PartiallySignedTransaction) -> Result { + fn sign_psbt(&self, mut psbt: Psbt) -> Result { let locked_wallet = self.inner.lock().unwrap(); // While BDK populates both `witness_utxo` and `non_witness_utxo` fields, LDK does not. As @@ -482,27 +503,30 @@ where }, } - Ok(psbt.extract_tx()) + let tx = psbt.extract_tx().map_err(|e| { + log_error!(self.logger, "Failed to extract transaction: {}", e); + () + })?; + + Ok(tx) } } /// Similar to [`KeysManager`], but overrides the destination and shutdown scripts so they are /// directly spendable by the BDK wallet. -pub(crate) struct WalletKeysManager +pub(crate) struct WalletKeysManager where - D: BatchDatabase, B::Target: BroadcasterInterface, E::Target: FeeEstimator, L::Target: Logger, { inner: KeysManager, - wallet: Arc>, + wallet: Arc>, logger: L, } -impl WalletKeysManager +impl WalletKeysManager where - D: BatchDatabase, B::Target: BroadcasterInterface, E::Target: FeeEstimator, L::Target: Logger, @@ -513,15 +537,14 @@ where /// `starting_time_nanos`. pub fn new( seed: &[u8; 32], starting_time_secs: u64, starting_time_nanos: u32, - wallet: Arc>, logger: L, + wallet: Arc>, logger: L, ) -> Self { let inner = KeysManager::new(seed, starting_time_secs, starting_time_nanos); Self { inner, wallet, logger } } - pub fn sign_message(&self, msg: &[u8]) -> Result { + pub fn sign_message(&self, msg: &[u8]) -> String { message_signing::sign(msg, &self.inner.get_node_secret_key()) - .or(Err(Error::MessageSigningFailed)) } pub fn get_node_secret_key(&self) -> SecretKey { @@ -533,9 +556,8 @@ where } } -impl NodeSigner for WalletKeysManager +impl NodeSigner for WalletKeysManager where - D: BatchDatabase, B::Target: BroadcasterInterface, E::Target: FeeEstimator, L::Target: Logger, @@ -555,9 +577,9 @@ where } fn sign_invoice( - &self, hrp_bytes: &[u8], invoice_data: &[u5], recipient: Recipient, + &self, invoice: &RawBolt11Invoice, recipient: Recipient, ) -> Result { - self.inner.sign_invoice(hrp_bytes, invoice_data, recipient) + self.inner.sign_invoice(invoice, recipient) } fn sign_gossip_message(&self, msg: UnsignedGossipMessage<'_>) -> Result { @@ -577,9 +599,8 @@ where } } -impl OutputSpender for WalletKeysManager +impl OutputSpender for WalletKeysManager where - D: BatchDatabase, B::Target: BroadcasterInterface, E::Target: FeeEstimator, L::Target: Logger, @@ -601,9 +622,8 @@ where } } -impl EntropySource for WalletKeysManager +impl EntropySource for WalletKeysManager where - D: BatchDatabase, B::Target: BroadcasterInterface, E::Target: FeeEstimator, L::Target: Logger, @@ -613,9 +633,8 @@ where } } -impl SignerProvider for WalletKeysManager +impl SignerProvider for WalletKeysManager where - D: BatchDatabase, B::Target: BroadcasterInterface, E::Target: FeeEstimator, L::Target: Logger, @@ -650,11 +669,10 @@ where log_error!(self.logger, "Failed to retrieve new address from wallet: {}", e); })?; - match address.payload { - Payload::WitnessProgram(program) => ShutdownScript::new_witness_program(&program) - .map_err(|e| { - log_error!(self.logger, "Invalid shutdown script: {:?}", e); - }), + match address.witness_program() { + Some(program) => ShutdownScript::new_witness_program(&program).map_err(|e| { + log_error!(self.logger, "Invalid shutdown script: {:?}", e); + }), _ => { log_error!( self.logger, @@ -666,9 +684,8 @@ where } } -impl ChangeDestinationSource for WalletKeysManager +impl ChangeDestinationSource for WalletKeysManager where - D: BatchDatabase, B::Target: BroadcasterInterface, E::Target: FeeEstimator, L::Target: Logger, diff --git a/src/wallet/persist.rs b/src/wallet/persist.rs new file mode 100644 index 000000000..06af541a2 --- /dev/null +++ b/src/wallet/persist.rs @@ -0,0 +1,187 @@ +// This file is Copyright its original authors, visible in version control history. +// +// This file is licensed under the Apache License, Version 2.0 or the MIT license , at your option. You may not use this file except in +// accordance with one or both of these licenses. + +use crate::io::utils::{ + read_bdk_wallet_change_set, write_bdk_wallet_change_descriptor, write_bdk_wallet_descriptor, + write_bdk_wallet_indexer, write_bdk_wallet_local_chain, write_bdk_wallet_network, + write_bdk_wallet_tx_graph, +}; +use crate::logger::{log_error, FilesystemLogger}; +use crate::types::DynStore; + +use lightning::util::logger::Logger; + +use bdk_chain::Merge; +use bdk_wallet::{ChangeSet, WalletPersister}; + +use std::sync::Arc; +pub(crate) struct KVStoreWalletPersister { + latest_change_set: Option, + kv_store: Arc, + logger: Arc, +} + +impl KVStoreWalletPersister { + pub(crate) fn new(kv_store: Arc, logger: Arc) -> Self { + Self { latest_change_set: None, kv_store, logger } + } +} + +impl WalletPersister for KVStoreWalletPersister { + type Error = std::io::Error; + + fn initialize(persister: &mut Self) -> Result { + // Return immediately if we have already been initialized. + if let Some(latest_change_set) = persister.latest_change_set.as_ref() { + return Ok(latest_change_set.clone()); + } + + let change_set_opt = read_bdk_wallet_change_set( + Arc::clone(&persister.kv_store), + Arc::clone(&persister.logger), + )?; + + let change_set = match change_set_opt { + Some(persisted_change_set) => persisted_change_set, + None => { + // BDK docs state: "The implementation must return all data currently stored in the + // persister. If there is no data, return an empty changeset (using + // ChangeSet::default())." + ChangeSet::default() + }, + }; + persister.latest_change_set = Some(change_set.clone()); + Ok(change_set) + } + + fn persist(persister: &mut Self, change_set: &ChangeSet) -> Result<(), Self::Error> { + if change_set.is_empty() { + return Ok(()); + } + + // We're allowed to fail here if we're not initialized, BDK docs state: "This method can fail if the + // persister is not initialized." + let latest_change_set = persister.latest_change_set.as_mut().ok_or_else(|| { + std::io::Error::new( + std::io::ErrorKind::Other, + "Wallet must be initialized before calling persist", + ) + })?; + + // Check that we'd never accidentally override any persisted data if the change set doesn't + // match our descriptor/change_descriptor/network. + if let Some(descriptor) = change_set.descriptor.as_ref() { + if latest_change_set.descriptor.is_some() + && latest_change_set.descriptor.as_ref() != Some(descriptor) + { + debug_assert!(false, "Wallet descriptor must never change"); + log_error!( + persister.logger, + "Wallet change set doesn't match persisted descriptor. This should never happen." + ); + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Wallet change set doesn't match persisted descriptor. This should never happen." + )); + } else { + latest_change_set.descriptor = Some(descriptor.clone()); + write_bdk_wallet_descriptor( + &descriptor, + Arc::clone(&persister.kv_store), + Arc::clone(&persister.logger), + )?; + } + } + + if let Some(change_descriptor) = change_set.change_descriptor.as_ref() { + if latest_change_set.change_descriptor.is_some() + && latest_change_set.change_descriptor.as_ref() != Some(change_descriptor) + { + debug_assert!(false, "Wallet change_descriptor must never change"); + log_error!( + persister.logger, + "Wallet change set doesn't match persisted change_descriptor. This should never happen." + ); + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Wallet change set doesn't match persisted change_descriptor. This should never happen." + )); + } else { + latest_change_set.change_descriptor = Some(change_descriptor.clone()); + write_bdk_wallet_change_descriptor( + &change_descriptor, + Arc::clone(&persister.kv_store), + Arc::clone(&persister.logger), + )?; + } + } + + if let Some(network) = change_set.network { + if latest_change_set.network.is_some() && latest_change_set.network != Some(network) { + debug_assert!(false, "Wallet network must never change"); + log_error!( + persister.logger, + "Wallet change set doesn't match persisted network. This should never happen." + ); + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Wallet change set doesn't match persisted network. This should never happen.", + )); + } else { + latest_change_set.network = Some(network); + write_bdk_wallet_network( + &network, + Arc::clone(&persister.kv_store), + Arc::clone(&persister.logger), + )?; + } + } + + debug_assert!( + latest_change_set.descriptor.is_some() + && latest_change_set.change_descriptor.is_some() + && latest_change_set.network.is_some(), + "descriptor, change_descriptor, and network are mandatory ChangeSet fields" + ); + + // Merge and persist the sub-changesets individually if necessary. + // + // According to the BDK team the individual sub-changesets can be persisted + // individually/non-atomically, "(h)owever, the localchain tip is used by block-by-block + // chain sources as a reference as to where to sync from, so I would persist that last", "I + // would write in this order: indexer, tx_graph, local_chain", which is why we follow this + // particular order. + if !change_set.indexer.is_empty() { + latest_change_set.indexer.merge(change_set.indexer.clone()); + write_bdk_wallet_indexer( + &latest_change_set.indexer, + Arc::clone(&persister.kv_store), + Arc::clone(&persister.logger), + )?; + } + + if !change_set.tx_graph.is_empty() { + latest_change_set.tx_graph.merge(change_set.tx_graph.clone()); + write_bdk_wallet_tx_graph( + &latest_change_set.tx_graph, + Arc::clone(&persister.kv_store), + Arc::clone(&persister.logger), + )?; + } + + if !change_set.local_chain.is_empty() { + latest_change_set.local_chain.merge(change_set.local_chain.clone()); + write_bdk_wallet_local_chain( + &latest_change_set.local_chain, + Arc::clone(&persister.kv_store), + Arc::clone(&persister.logger), + )?; + } + + Ok(()) + } +} diff --git a/src/wallet/ser.rs b/src/wallet/ser.rs new file mode 100644 index 000000000..2e33992a8 --- /dev/null +++ b/src/wallet/ser.rs @@ -0,0 +1,346 @@ +// This file is Copyright its original authors, visible in version control history. +// +// This file is licensed under the Apache License, Version 2.0 or the MIT license , at your option. You may not use this file except in +// accordance with one or both of these licenses. + +use lightning::ln::msgs::DecodeError; +use lightning::util::ser::{BigSize, Readable, RequiredWrapper, Writeable, Writer}; +use lightning::{decode_tlv_stream, encode_tlv_stream, read_tlv_fields, write_tlv_fields}; + +use bdk_chain::bdk_core::{BlockId, ConfirmationBlockTime}; +use bdk_chain::indexer::keychain_txout::ChangeSet as BdkIndexerChangeSet; +use bdk_chain::local_chain::ChangeSet as BdkLocalChainChangeSet; +use bdk_chain::tx_graph::ChangeSet as BdkTxGraphChangeSet; +use bdk_chain::DescriptorId; + +use bdk_wallet::descriptor::Descriptor; +use bdk_wallet::keys::DescriptorPublicKey; + +use bitcoin::hashes::sha256::Hash as Sha256Hash; +use bitcoin::p2p::Magic; +use bitcoin::{BlockHash, Network, OutPoint, Transaction, TxOut, Txid}; + +use std::collections::{BTreeMap, BTreeSet}; +use std::str::FromStr; +use std::sync::Arc; + +const CHANGESET_SERIALIZATION_VERSION: u8 = 1; + +pub(crate) struct ChangeSetSerWrapper<'a, T>(pub &'a T); +pub(crate) struct ChangeSetDeserWrapper(pub T); + +impl<'a> Writeable for ChangeSetSerWrapper<'a, Descriptor> { + fn write(&self, writer: &mut W) -> Result<(), lightning::io::Error> { + CHANGESET_SERIALIZATION_VERSION.write(writer)?; + + self.0.to_string().write(writer) + } +} + +impl Readable for ChangeSetDeserWrapper> { + fn read(reader: &mut R) -> Result { + let version: u8 = Readable::read(reader)?; + if version != CHANGESET_SERIALIZATION_VERSION { + return Err(DecodeError::UnknownVersion); + } + + let descriptor_str: String = Readable::read(reader)?; + let descriptor = Descriptor::::from_str(&descriptor_str) + .map_err(|_| DecodeError::InvalidValue)?; + Ok(Self(descriptor)) + } +} + +impl<'a> Writeable for ChangeSetSerWrapper<'a, Network> { + fn write(&self, writer: &mut W) -> Result<(), lightning::io::Error> { + CHANGESET_SERIALIZATION_VERSION.write(writer)?; + + self.0.magic().to_bytes().write(writer) + } +} + +impl Readable for ChangeSetDeserWrapper { + fn read(reader: &mut R) -> Result { + let version: u8 = Readable::read(reader)?; + if version != CHANGESET_SERIALIZATION_VERSION { + return Err(DecodeError::UnknownVersion); + } + + let buf: [u8; 4] = Readable::read(reader)?; + let magic = Magic::from_bytes(buf); + let network = Network::from_magic(magic).ok_or(DecodeError::InvalidValue)?; + Ok(Self(network)) + } +} + +impl<'a> Writeable for ChangeSetSerWrapper<'a, BdkLocalChainChangeSet> { + fn write(&self, writer: &mut W) -> Result<(), lightning::io::Error> { + CHANGESET_SERIALIZATION_VERSION.write(writer)?; + + encode_tlv_stream!(writer, { + (0, self.0.blocks, required), + }); + Ok(()) + } +} + +impl Readable for ChangeSetDeserWrapper { + fn read(reader: &mut R) -> Result { + let version: u8 = Readable::read(reader)?; + if version != CHANGESET_SERIALIZATION_VERSION { + return Err(DecodeError::UnknownVersion); + } + + let mut blocks = RequiredWrapper(None); + decode_tlv_stream!(reader, { + (0, blocks, required), + }); + Ok(Self(BdkLocalChainChangeSet { blocks: blocks.0.unwrap() })) + } +} + +impl<'a> Writeable for ChangeSetSerWrapper<'a, BdkTxGraphChangeSet> { + fn write(&self, writer: &mut W) -> Result<(), lightning::io::Error> { + CHANGESET_SERIALIZATION_VERSION.write(writer)?; + + encode_tlv_stream!(writer, { + (0, ChangeSetSerWrapper(&self.0.txs), required), + (2, self.0.txouts, required), + (4, ChangeSetSerWrapper(&self.0.anchors), required), + (6, self.0.last_seen, required), + }); + Ok(()) + } +} + +impl Readable for ChangeSetDeserWrapper> { + fn read(reader: &mut R) -> Result { + let version: u8 = Readable::read(reader)?; + if version != CHANGESET_SERIALIZATION_VERSION { + return Err(DecodeError::UnknownVersion); + } + + let mut txs: RequiredWrapper>>> = + RequiredWrapper(None); + let mut txouts: RequiredWrapper> = RequiredWrapper(None); + let mut anchors: RequiredWrapper< + ChangeSetDeserWrapper>, + > = RequiredWrapper(None); + let mut last_seen: RequiredWrapper> = RequiredWrapper(None); + + decode_tlv_stream!(reader, { + (0, txs, required), + (2, txouts, required), + (4, anchors, required), + (6, last_seen, required), + }); + + Ok(Self(BdkTxGraphChangeSet { + txs: txs.0.unwrap().0, + txouts: txouts.0.unwrap(), + anchors: anchors.0.unwrap().0, + last_seen: last_seen.0.unwrap(), + })) + } +} + +impl<'a> Writeable for ChangeSetSerWrapper<'a, BTreeSet<(ConfirmationBlockTime, Txid)>> { + fn write(&self, writer: &mut W) -> Result<(), lightning::io::Error> { + let len = BigSize(self.0.len() as u64); + len.write(writer)?; + for (time, txid) in self.0.iter() { + write_tlv_fields!(writer, { + (0, ChangeSetSerWrapper(time), required), + (2, txid, required), + }); + } + Ok(()) + } +} + +impl Readable for ChangeSetDeserWrapper> { + fn read(reader: &mut R) -> Result { + let len: BigSize = Readable::read(reader)?; + let mut set = BTreeSet::new(); + for _ in 0..len.0 { + let mut time: RequiredWrapper> = + RequiredWrapper(None); + let mut txid: RequiredWrapper = RequiredWrapper(None); + read_tlv_fields!(reader, { + (0, time, required), + (2, txid, required), + }); + set.insert((time.0.unwrap().0, txid.0.unwrap())); + } + Ok(Self(set)) + } +} + +impl<'a> Writeable for ChangeSetSerWrapper<'a, BTreeSet>> { + fn write(&self, writer: &mut W) -> Result<(), lightning::io::Error> { + let len = BigSize(self.0.len() as u64); + len.write(writer)?; + for tx in self.0.iter() { + write_tlv_fields!(writer, { + (0, tx, required), + }); + } + Ok(()) + } +} + +impl Readable for ChangeSetDeserWrapper>> { + fn read(reader: &mut R) -> Result { + let len: BigSize = Readable::read(reader)?; + let mut set = BTreeSet::new(); + for _ in 0..len.0 { + let mut tx: RequiredWrapper = RequiredWrapper(None); + read_tlv_fields!(reader, { + (0, tx, required), + }); + set.insert(Arc::new(tx.0.unwrap())); + } + Ok(Self(set)) + } +} + +impl<'a> Writeable for ChangeSetSerWrapper<'a, ConfirmationBlockTime> { + fn write(&self, writer: &mut W) -> Result<(), lightning::io::Error> { + encode_tlv_stream!(writer, { + (0, ChangeSetSerWrapper(&self.0.block_id), required), + (2, self.0.confirmation_time, required), + }); + Ok(()) + } +} + +impl Readable for ChangeSetDeserWrapper { + fn read(reader: &mut R) -> Result { + let mut block_id: RequiredWrapper> = RequiredWrapper(None); + let mut confirmation_time: RequiredWrapper = RequiredWrapper(None); + + decode_tlv_stream!(reader, { + (0, block_id, required), + (2, confirmation_time, required), + }); + + Ok(Self(ConfirmationBlockTime { + block_id: block_id.0.unwrap().0, + confirmation_time: confirmation_time.0.unwrap(), + })) + } +} + +impl<'a> Writeable for ChangeSetSerWrapper<'a, BlockId> { + fn write(&self, writer: &mut W) -> Result<(), lightning::io::Error> { + encode_tlv_stream!(writer, { + (0, self.0.height, required), + (2, self.0.hash, required), + }); + Ok(()) + } +} + +impl Readable for ChangeSetDeserWrapper { + fn read(reader: &mut R) -> Result { + let mut height: RequiredWrapper = RequiredWrapper(None); + let mut hash: RequiredWrapper = RequiredWrapper(None); + decode_tlv_stream!(reader, { + (0, height, required), + (2, hash, required), + }); + + Ok(Self(BlockId { height: height.0.unwrap(), hash: hash.0.unwrap() })) + } +} + +impl<'a> Writeable for ChangeSetSerWrapper<'a, BdkIndexerChangeSet> { + fn write(&self, writer: &mut W) -> Result<(), lightning::io::Error> { + CHANGESET_SERIALIZATION_VERSION.write(writer)?; + + encode_tlv_stream!(writer, { (0, ChangeSetSerWrapper(&self.0.last_revealed), required) }); + Ok(()) + } +} + +impl Readable for ChangeSetDeserWrapper { + fn read(reader: &mut R) -> Result { + let version: u8 = Readable::read(reader)?; + if version != CHANGESET_SERIALIZATION_VERSION { + return Err(DecodeError::UnknownVersion); + } + + let mut last_revealed: RequiredWrapper>> = + RequiredWrapper(None); + + decode_tlv_stream!(reader, { (0, last_revealed, required) }); + + Ok(Self(BdkIndexerChangeSet { last_revealed: last_revealed.0.unwrap().0 })) + } +} + +impl<'a> Writeable for ChangeSetSerWrapper<'a, BTreeMap> { + fn write(&self, writer: &mut W) -> Result<(), lightning::io::Error> { + let len = BigSize(self.0.len() as u64); + len.write(writer)?; + for (descriptor_id, last_index) in self.0.iter() { + write_tlv_fields!(writer, { + (0, ChangeSetSerWrapper(descriptor_id), required), + (2, last_index, required), + }); + } + Ok(()) + } +} + +impl Readable for ChangeSetDeserWrapper> { + fn read(reader: &mut R) -> Result { + let len: BigSize = Readable::read(reader)?; + let mut set = BTreeMap::new(); + for _ in 0..len.0 { + let mut descriptor_id: RequiredWrapper> = + RequiredWrapper(None); + let mut last_index: RequiredWrapper = RequiredWrapper(None); + read_tlv_fields!(reader, { + (0, descriptor_id, required), + (2, last_index, required), + }); + set.insert(descriptor_id.0.unwrap().0, last_index.0.unwrap()); + } + Ok(Self(set)) + } +} + +impl<'a> Writeable for ChangeSetSerWrapper<'a, DescriptorId> { + fn write(&self, writer: &mut W) -> Result<(), lightning::io::Error> { + encode_tlv_stream!(writer, { (0, ChangeSetSerWrapper(&self.0 .0), required) }); + Ok(()) + } +} + +impl Readable for ChangeSetDeserWrapper { + fn read(reader: &mut R) -> Result { + let mut hash: RequiredWrapper> = RequiredWrapper(None); + + decode_tlv_stream!(reader, { (0, hash, required) }); + + Ok(Self(DescriptorId(hash.0.unwrap().0))) + } +} + +impl<'a> Writeable for ChangeSetSerWrapper<'a, Sha256Hash> { + fn write(&self, writer: &mut W) -> Result<(), lightning::io::Error> { + writer.write_all(&self.0[..]) + } +} + +impl Readable for ChangeSetDeserWrapper { + fn read(reader: &mut R) -> Result { + use bitcoin::hashes::Hash; + + let buf: [u8; 32] = Readable::read(reader)?; + Ok(Self(Sha256Hash::from_slice(&buf[..]).unwrap())) + } +} diff --git a/tests/common/mod.rs b/tests/common/mod.rs index f8a9eae7a..a7cd87323 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -759,7 +759,7 @@ pub(crate) fn do_channel_full_cycle( println!("\nB close_channel (force: {})", force_close); if force_close { std::thread::sleep(Duration::from_secs(1)); - node_a.force_close_channel(&user_channel_id, node_b.node_id()).unwrap(); + node_a.force_close_channel(&user_channel_id, node_b.node_id(), None).unwrap(); } else { node_a.close_channel(&user_channel_id, node_b.node_id()).unwrap(); } @@ -913,7 +913,7 @@ impl TestSyncStore { fn do_list( &self, primary_namespace: &str, secondary_namespace: &str, - ) -> std::io::Result> { + ) -> lightning::io::Result> { let fs_res = self.fs_store.list(primary_namespace, secondary_namespace); let sqlite_res = self.sqlite_store.list(primary_namespace, secondary_namespace); let test_res = self.test_store.list(primary_namespace, secondary_namespace); @@ -944,7 +944,7 @@ impl TestSyncStore { impl KVStore for TestSyncStore { fn read( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, - ) -> std::io::Result> { + ) -> lightning::io::Result> { let _guard = self.serializer.read().unwrap(); let fs_res = self.fs_store.read(primary_namespace, secondary_namespace, key); @@ -969,7 +969,7 @@ impl KVStore for TestSyncStore { fn write( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: &[u8], - ) -> std::io::Result<()> { + ) -> lightning::io::Result<()> { let _guard = self.serializer.write().unwrap(); let fs_res = self.fs_store.write(primary_namespace, secondary_namespace, key, buf); let sqlite_res = self.sqlite_store.write(primary_namespace, secondary_namespace, key, buf); @@ -996,7 +996,7 @@ impl KVStore for TestSyncStore { fn remove( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, - ) -> std::io::Result<()> { + ) -> lightning::io::Result<()> { let _guard = self.serializer.write().unwrap(); let fs_res = self.fs_store.remove(primary_namespace, secondary_namespace, key, lazy); let sqlite_res = @@ -1024,7 +1024,7 @@ impl KVStore for TestSyncStore { fn list( &self, primary_namespace: &str, secondary_namespace: &str, - ) -> std::io::Result> { + ) -> lightning::io::Result> { let _guard = self.serializer.read().unwrap(); self.do_list(primary_namespace, secondary_namespace) } diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index 907e89084..6d33e80c6 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -312,7 +312,7 @@ fn sign_verify_msg() { // Tests arbitrary message signing and later verification let msg = "OK computer".as_bytes(); - let sig = node.sign_message(msg).unwrap(); + let sig = node.sign_message(msg); let pkey = node.node_id(); assert!(node.verify_signature(msg, sig.as_str(), &pkey)); } @@ -437,7 +437,8 @@ fn simple_bolt12_send_receive() { std::thread::sleep(std::time::Duration::from_secs(1)); let expected_amount_msat = 100_000_000; - let offer = node_b.bolt12_payment().receive(expected_amount_msat, "asdf", Some(1)).unwrap(); + let offer = + node_b.bolt12_payment().receive(expected_amount_msat, "asdf", None, Some(1)).unwrap(); let expected_quantity = Some(1); let expected_payer_note = Some("Test".to_string()); let payment_id = node_a @@ -491,7 +492,7 @@ fn simple_bolt12_send_receive() { let offer_amount_msat = 100_000_000; let less_than_offer_amount = offer_amount_msat - 10_000; let expected_amount_msat = offer_amount_msat + 10_000; - let offer = node_b.bolt12_payment().receive(offer_amount_msat, "asdf", Some(1)).unwrap(); + let offer = node_b.bolt12_payment().receive(offer_amount_msat, "asdf", None, Some(1)).unwrap(); let expected_quantity = Some(1); let expected_payer_note = Some("Test".to_string()); assert!(node_a @@ -642,7 +643,7 @@ fn generate_bip21_uri() { match uqr_payment.clone() { Ok(ref uri) => { println!("Generated URI: {}", uri); - assert!(uri.contains("BITCOIN:")); + assert!(uri.contains("bitcoin:")); assert!(uri.contains("lightning=")); assert!(uri.contains("lno=")); },