diff --git a/documentation/api/api-explorer.yml b/documentation/api/api-explorer.yml index 55e4b504b..c017b615d 100644 --- a/documentation/api/api-explorer.yml +++ b/documentation/api/api-explorer.yml @@ -275,13 +275,13 @@ components: totalBalance: type: string description: >- - The total value held in unspent outputs owned by the given address - (includes funds held in storage deposit). - sigLockedBalance: + The total value held in unspent outputs that is unlockable by the given address or currently timelocked. + Does not include funds held in storage deposit. + availableBalance: type: string description: >- - The sum of value held in unspent outputs owned by the given address - that are signature locked ("trivially unlockable"). + The total value held in unspent outputs that is immediately unlockable at ledgerIndex by the given address. + Does not include funds held in storage deposit. ledgerIndex: type: integer description: The ledger index for which the balance calculation was performed. @@ -585,7 +585,7 @@ components: balance-example: value: totalBalance: 100000 - sigLockedBalance: 99900 + availableBalance: 99900 ledgerIndex: 500000 ledger-updates-address-example: value: diff --git a/src/analytics/ledger/active_addresses.rs b/src/analytics/ledger/active_addresses.rs index b0fc50581..c16eb2247 100644 --- a/src/analytics/ledger/active_addresses.rs +++ b/src/analytics/ledger/active_addresses.rs @@ -43,7 +43,7 @@ impl IntervalAnalytics for AddressActivityMeasurement { impl Analytics for AddressActivityAnalytics { type Measurement = AddressActivityMeasurement; - fn handle_transaction(&mut self, consumed: &[LedgerSpent], created: &[LedgerOutput], _ctx: &dyn AnalyticsContext) { + fn handle_transaction(&mut self, consumed: &[LedgerSpent], created: &[LedgerOutput], ctx: &dyn AnalyticsContext) { for output in consumed { if let Some(a) = output.owning_address() { self.addresses.insert(*a); @@ -51,7 +51,7 @@ impl Analytics for AddressActivityAnalytics { } for output in created { - if let Some(a) = output.owning_address() { + if let Some(a) = output.output.owning_address(ctx.at().milestone_timestamp) { self.addresses.insert(*a); } } diff --git a/src/analytics/ledger/address_balance.rs b/src/analytics/ledger/address_balance.rs index afc4a41b2..a80ec4218 100644 --- a/src/analytics/ledger/address_balance.rs +++ b/src/analytics/ledger/address_balance.rs @@ -4,7 +4,10 @@ use std::collections::HashMap; use super::*; -use crate::model::utxo::{Address, TokenAmount}; +use crate::model::{ + payload::milestone::MilestoneTimestamp, + utxo::{Address, TokenAmount}, +}; #[derive(Debug)] pub(crate) struct AddressBalanceMeasurement { @@ -29,10 +32,13 @@ pub(crate) struct AddressBalancesAnalytics { impl AddressBalancesAnalytics { /// Initialize the analytics by reading the current ledger state. - pub(crate) fn init<'a>(unspent_outputs: impl IntoIterator) -> Self { + pub(crate) fn init<'a>( + unspent_outputs: impl IntoIterator, + milestone_timestamp: MilestoneTimestamp, + ) -> Self { let mut balances = HashMap::new(); for output in unspent_outputs { - if let Some(&a) = output.owning_address() { + if let Some(&a) = output.output.owning_address(milestone_timestamp) { *balances.entry(a).or_default() += output.amount(); } } @@ -43,7 +49,7 @@ impl AddressBalancesAnalytics { impl Analytics for AddressBalancesAnalytics { type Measurement = AddressBalanceMeasurement; - fn handle_transaction(&mut self, consumed: &[LedgerSpent], created: &[LedgerOutput], _ctx: &dyn AnalyticsContext) { + fn handle_transaction(&mut self, consumed: &[LedgerSpent], created: &[LedgerOutput], ctx: &dyn AnalyticsContext) { for output in consumed { if let Some(a) = output.owning_address() { // All inputs should be present in `addresses`. If not, we skip it's value. @@ -57,7 +63,7 @@ impl Analytics for AddressBalancesAnalytics { } for output in created { - if let Some(&a) = output.owning_address() { + if let Some(&a) = output.output.owning_address(ctx.at().milestone_timestamp) { // All inputs should be present in `addresses`. If not, we skip it's value. *self.balances.entry(a).or_default() += output.amount(); } diff --git a/src/analytics/ledger/base_token.rs b/src/analytics/ledger/base_token.rs index c3504cb3e..95aeef998 100644 --- a/src/analytics/ledger/base_token.rs +++ b/src/analytics/ledger/base_token.rs @@ -19,14 +19,14 @@ pub(crate) struct BaseTokenActivityMeasurement { impl Analytics for BaseTokenActivityMeasurement { type Measurement = Self; - fn handle_transaction(&mut self, consumed: &[LedgerSpent], created: &[LedgerOutput], _ctx: &dyn AnalyticsContext) { + fn handle_transaction(&mut self, consumed: &[LedgerSpent], created: &[LedgerOutput], ctx: &dyn AnalyticsContext) { // The idea behind the following code is that we keep track of the deltas that are applied to each account that // is represented by an address. let mut balance_deltas: HashMap<&Address, i128> = HashMap::new(); // We first gather all tokens that have been moved to an individual address. for output in created { - if let Some(address) = output.owning_address() { + if let Some(address) = output.output.owning_address(ctx.at().milestone_timestamp) { *balance_deltas.entry(address).or_default() += output.amount().0 as i128; } } diff --git a/src/analytics/mod.rs b/src/analytics/mod.rs index 2f2a64308..7af647d6b 100644 --- a/src/analytics/mod.rs +++ b/src/analytics/mod.rs @@ -23,7 +23,7 @@ use crate::{ model::{ ledger::{LedgerOutput, LedgerSpent}, metadata::LedgerInclusionState, - payload::{Payload, TransactionEssence}, + payload::{milestone::MilestoneTimestamp, Payload, TransactionEssence}, protocol::ProtocolParameters, tangle::{MilestoneIndex, MilestoneIndexTimestamp}, utxo::Input, @@ -152,9 +152,12 @@ impl Analytic { choice: &AnalyticsChoice, protocol_params: &ProtocolParameters, unspent_outputs: impl IntoIterator, + milestone_timestamp: MilestoneTimestamp, ) -> Self { Self(match choice { - AnalyticsChoice::AddressBalance => Box::new(AddressBalancesAnalytics::init(unspent_outputs)) as _, + AnalyticsChoice::AddressBalance => { + Box::new(AddressBalancesAnalytics::init(unspent_outputs, milestone_timestamp)) as _ + } AnalyticsChoice::BaseTokenActivity => Box::::default() as _, AnalyticsChoice::BlockActivity => Box::::default() as _, AnalyticsChoice::ActiveAddresses => Box::::default() as _, @@ -396,7 +399,7 @@ mod test { ledger::{LedgerOutput, LedgerSpent}, metadata::BlockMetadata, node::NodeConfiguration, - payload::{MilestoneId, MilestonePayload}, + payload::{milestone::MilestoneTimestamp, MilestoneId, MilestonePayload}, protocol::ProtocolParameters, tangle::{MilestoneIndex, MilestoneIndexTimestamp}, }, @@ -444,10 +447,11 @@ mod test { fn init<'a>( protocol_params: ProtocolParameters, unspent_outputs: impl IntoIterator + Copy, + milestone_timestamp: MilestoneTimestamp, ) -> Self { Self { active_addresses: Default::default(), - address_balance: AddressBalancesAnalytics::init(unspent_outputs), + address_balance: AddressBalancesAnalytics::init(unspent_outputs, milestone_timestamp), base_tokens: Default::default(), ledger_outputs: LedgerOutputMeasurement::init(unspent_outputs), ledger_size: LedgerSizeAnalytics::init(protocol_params, unspent_outputs), diff --git a/src/bin/inx-chronicle/api/explorer/responses.rs b/src/bin/inx-chronicle/api/explorer/responses.rs index e2198f455..ca519aa8a 100644 --- a/src/bin/inx-chronicle/api/explorer/responses.rs +++ b/src/bin/inx-chronicle/api/explorer/responses.rs @@ -78,7 +78,7 @@ impl From for LedgerUpdateByMilestoneDto { #[serde(rename_all = "camelCase")] pub struct BalanceResponse { pub total_balance: String, - pub sig_locked_balance: String, + pub available_balance: String, pub ledger_index: MilestoneIndex, } diff --git a/src/bin/inx-chronicle/api/explorer/routes.rs b/src/bin/inx-chronicle/api/explorer/routes.rs index 76eb721af..2cb3283b8 100644 --- a/src/bin/inx-chronicle/api/explorer/routes.rs +++ b/src/bin/inx-chronicle/api/explorer/routes.rs @@ -155,22 +155,22 @@ async fn ledger_updates_by_milestone( } async fn balance(database: Extension, Path(address): Path) -> ApiResult { - let ledger_index = database + let ledger_ms = database .collection::() - .get_ledger_index() + .get_newest_milestone() .await? .ok_or(MissingError::NoResults)?; let address = Address::from_str(&address).map_err(RequestError::from)?; let res = database .collection::() - .get_address_balance(address, ledger_index) + .get_address_balance(address, ledger_ms) .await? .ok_or(MissingError::NoResults)?; Ok(BalanceResponse { total_balance: res.total_balance, - sig_locked_balance: res.sig_locked_balance, - ledger_index, + available_balance: res.available_balance, + ledger_index: ledger_ms.milestone_index, }) } diff --git a/src/bin/inx-chronicle/cli/analytics.rs b/src/bin/inx-chronicle/cli/analytics.rs index 3e29efea0..b2bf03d04 100644 --- a/src/bin/inx-chronicle/cli/analytics.rs +++ b/src/bin/inx-chronicle/cli/analytics.rs @@ -264,7 +264,14 @@ pub async fn fill_analytics( let analytics = analytics_choices .iter() - .map(|choice| Analytic::init(choice, &milestone.protocol_params, &ledger_state)) + .map(|choice| { + Analytic::init( + choice, + &milestone.protocol_params, + &ledger_state, + milestone.at.milestone_timestamp, + ) + }) .collect::>(); state = Some(AnalyticsState { analytics, diff --git a/src/bin/inx-chronicle/inx/influx/analytics.rs b/src/bin/inx-chronicle/inx/influx/analytics.rs index 02f76be58..8e837ed43 100644 --- a/src/bin/inx-chronicle/inx/influx/analytics.rs +++ b/src/bin/inx-chronicle/inx/influx/analytics.rs @@ -72,7 +72,14 @@ impl InxWorker { let analytics = analytics_choices .iter() - .map(|choice| Analytic::init(choice, &milestone.protocol_params, &ledger_state)) + .map(|choice| { + Analytic::init( + choice, + &milestone.protocol_params, + &ledger_state, + milestone.at.milestone_timestamp, + ) + }) .collect::>(); *state = Some(AnalyticsState { analytics, diff --git a/src/bin/inx-chronicle/migrations/migrate_2.rs b/src/bin/inx-chronicle/migrations/migrate_2.rs new file mode 100644 index 000000000..6875d5977 --- /dev/null +++ b/src/bin/inx-chronicle/migrations/migrate_2.rs @@ -0,0 +1,122 @@ +// Copyright 2023 IOTA Stiftung +// SPDX-License-Identifier: Apache-2.0 + +use async_trait::async_trait; +use chronicle::{ + db::{ + mongodb::collections::{LedgerUpdateCollection, OutputCollection}, + MongoDb, MongoDbCollection, MongoDbCollectionExt, + }, + model::{ + ledger::{LedgerOutput, LedgerSpent, RentStructureBytes}, + metadata::OutputMetadata, + utxo::{Output, OutputId}, + }, +}; +use futures::{prelude::stream::TryStreamExt, StreamExt}; +use mongodb::bson::doc; +use serde::Deserialize; +use tokio::{task::JoinSet, try_join}; + +use super::Migration; + +const INSERT_BATCH_SIZE: usize = 1000; + +pub struct Migrate; + +#[async_trait] +impl Migration for Migrate { + const ID: usize = 2; + const APP_VERSION: &'static str = "1.0.0-rc.3"; + const DATE: time::Date = time::macros::date!(2024 - 01 - 12); + + async fn migrate(db: &MongoDb) -> eyre::Result<()> { + db.collection::() + .collection() + .drop(None) + .await?; + + let outputs_stream = db + .collection::() + .find::(doc! {}, None) + .await?; + let mut batched_stream = outputs_stream.try_chunks(INSERT_BATCH_SIZE); + + let mut tasks = JoinSet::new(); + + while let Some(batch) = batched_stream.next().await { + let batch = batch?; + while tasks.len() >= 100 { + if let Some(res) = tasks.join_next().await { + res??; + } + } + let db = db.clone(); + tasks.spawn(async move { + let consumed = batch.iter().filter_map(Option::::from).collect::>(); + let created = batch.into_iter().map(LedgerOutput::from).collect::>(); + try_join! { + async { + db.collection::() + .insert_unspent_ledger_updates(&created) + .await + }, + async { + db.collection::().update_spent_outputs(&consumed).await + }, + async { + db.collection::().insert_spent_ledger_updates(&consumed).await + } + } + .and(Ok(())) + }); + } + + while let Some(res) = tasks.join_next().await { + res??; + } + + Ok(()) + } +} + +#[derive(Deserialize)] +pub struct OutputDocument { + #[serde(rename = "_id")] + output_id: OutputId, + output: Output, + metadata: OutputMetadata, + details: OutputDetails, +} + +#[derive(Deserialize)] +struct OutputDetails { + rent_structure: RentStructureBytes, +} + +impl From for LedgerOutput { + fn from(value: OutputDocument) -> Self { + Self { + output_id: value.output_id, + block_id: value.metadata.block_id, + booked: value.metadata.booked, + output: value.output, + rent_structure: value.details.rent_structure, + } + } +} + +impl From<&OutputDocument> for Option { + fn from(value: &OutputDocument) -> Self { + value.metadata.spent_metadata.map(|spent_metadata| LedgerSpent { + spent_metadata, + output: LedgerOutput { + output_id: value.output_id, + block_id: value.metadata.block_id, + booked: value.metadata.booked, + output: value.output.clone(), + rent_structure: value.details.rent_structure, + }, + }) + } +} diff --git a/src/bin/inx-chronicle/migrations/mod.rs b/src/bin/inx-chronicle/migrations/mod.rs index b15940316..0c268adbc 100644 --- a/src/bin/inx-chronicle/migrations/mod.rs +++ b/src/bin/inx-chronicle/migrations/mod.rs @@ -12,8 +12,9 @@ use eyre::bail; pub mod migrate_0; pub mod migrate_1; +pub mod migrate_2; -pub type LatestMigration = migrate_1::Migrate; +pub type LatestMigration = migrate_2::Migrate; /// The list of migrations, in order. const MIGRATIONS: &[&'static dyn DynMigration] = &[ @@ -21,6 +22,7 @@ const MIGRATIONS: &[&'static dyn DynMigration] = &[ // list. &migrate_0::Migrate, &migrate_1::Migrate, + &migrate_2::Migrate, ]; fn build_migrations(migrations: &[&'static dyn DynMigration]) -> HashMap, &'static dyn DynMigration> { diff --git a/src/db/mongodb/collections/ledger_update.rs b/src/db/mongodb/collections/ledger_update.rs index 791269da4..5bac08b63 100644 --- a/src/db/mongodb/collections/ledger_update.rs +++ b/src/db/mongodb/collections/ledger_update.rs @@ -110,23 +110,18 @@ impl LedgerUpdateCollection { I: IntoIterator, I::IntoIter: Send + Sync, { - let ledger_updates = outputs.into_iter().filter_map( - |LedgerSpent { - output: LedgerOutput { output_id, output, .. }, - spent_metadata, - }| { - // Ledger updates - output.owning_address().map(|&address| LedgerUpdateDocument { - _id: Id { - milestone_index: spent_metadata.spent.milestone_index, - output_id: *output_id, - is_spent: true, - }, - address, - milestone_timestamp: spent_metadata.spent.milestone_timestamp, - }) - }, - ); + let ledger_updates = outputs.into_iter().filter_map(|output| { + // Ledger updates + output.owning_address().map(|&address| LedgerUpdateDocument { + _id: Id { + milestone_index: output.spent_metadata.spent.milestone_index, + output_id: output.output_id(), + is_spent: true, + }, + address, + milestone_timestamp: output.spent_metadata.spent.milestone_timestamp, + }) + }); self.insert_many_ignore_duplicates(ledger_updates, InsertManyOptions::builder().ordered(false).build()) .await?; @@ -140,25 +135,18 @@ impl LedgerUpdateCollection { I: IntoIterator, I::IntoIter: Send + Sync, { - let ledger_updates = outputs.into_iter().filter_map( - |LedgerOutput { - output_id, - booked, - output, - .. - }| { - // Ledger updates - output.owning_address().map(|&address| LedgerUpdateDocument { - _id: Id { - milestone_index: booked.milestone_index, - output_id: *output_id, - is_spent: false, - }, - address, - milestone_timestamp: booked.milestone_timestamp, - }) - }, - ); + let ledger_updates = outputs.into_iter().filter_map(|output| { + // Ledger updates + output.owning_address().map(|&address| LedgerUpdateDocument { + _id: Id { + milestone_index: output.booked.milestone_index, + output_id: output.output_id, + is_spent: false, + }, + address, + milestone_timestamp: output.booked.milestone_timestamp, + }) + }); self.insert_many_ignore_duplicates(ledger_updates, InsertManyOptions::builder().ordered(false).build()) .await?; diff --git a/src/db/mongodb/collections/outputs/mod.rs b/src/db/mongodb/collections/outputs/mod.rs index 8ee3b6ab3..bc017d057 100644 --- a/src/db/mongodb/collections/outputs/mod.rs +++ b/src/db/mongodb/collections/outputs/mod.rs @@ -98,7 +98,7 @@ struct OutputDetails { impl From<&LedgerOutput> for OutputDocument { fn from(rec: &LedgerOutput) -> Self { - let address = rec.output.owning_address().copied(); + let address = rec.owning_address().copied(); let is_trivial_unlock = rec.output.is_trivial_unlock(); Self { @@ -141,6 +141,8 @@ impl From<&LedgerOutput> for OutputDocument { impl From<&LedgerSpent> for OutputDocument { fn from(rec: &LedgerSpent) -> Self { let mut res = Self::from(&rec.output); + // Update the address as the spending may have changed it + res.details.address = rec.owning_address().copied(); res.metadata.spent_metadata.replace(rec.spent_metadata); res } @@ -166,7 +168,7 @@ pub struct OutputWithMetadataResult { #[allow(missing_docs)] pub struct BalanceResult { pub total_balance: String, - pub sig_locked_balance: String, + pub available_balance: String, } #[derive(Clone, Debug, Default, Deserialize)] @@ -420,27 +422,80 @@ impl OutputCollection { pub async fn get_address_balance( &self, address: Address, - ledger_index: MilestoneIndex, + ledger_ms: MilestoneIndexTimestamp, ) -> Result, Error> { self .aggregate( [ // Look at all (at ledger index o'clock) unspent output documents for the given address. doc! { "$match": { - "details.address": &address, - "metadata.booked.milestone_index": { "$lte": ledger_index }, - "metadata.spent_metadata.spent.milestone_index": { "$not": { "$lte": ledger_index } } + "$or": [ + { "details.address": &address }, + { + "output.expiration_unlock_condition": { "$exists": true }, + "output.expiration_unlock_condition.return_address": &address + } + ], + "metadata.booked.milestone_index": { "$lte": ledger_ms.milestone_index }, + "metadata.spent_metadata.spent.milestone_index": { "$not": { "$lte": ledger_ms.milestone_index } } } }, + doc! { "$set": { "output_amount": { "$subtract": [ + { "$toDecimal": "$output.amount" }, + { "$ifNull": [{ "$toDecimal": "$output.storage_deposit_return_unlock_condition.amount" }, 0 ] }, + ] } } }, doc! { "$group": { "_id": null, - "total_balance": { "$sum": { "$toDecimal": "$output.amount" } }, - "sig_locked_balance": { "$sum": { - "$cond": [ { "$eq": [ "$details.is_trivial_unlock", true] }, { "$toDecimal": "$output.amount" }, 0 ] + "total_balance": { "$sum": { + "$cond": [ + // If this output is trivially unlocked by this address + { "$eq": [ "$details.address", &address ] }, + { "$cond": [ + // And the output has no expiration or is not expired + { "$or": [ + { "$lte": [ "$output.expiration_unlock_condition", null ] }, + { "$gt": [ "$output.expiration_unlock_condition.timestamp", ledger_ms.milestone_timestamp ] } + ] }, + { "$toDecimal": "$output_amount" }, 0 + ] }, + // Otherwise, if this output has expiring funds that will be returned to this address + { "$cond": [ + // And the output is expired + { "$lte": [ "$output.expiration_unlock_condition.timestamp", ledger_ms.milestone_timestamp ] }, + { "$toDecimal": "$output_amount" }, 0 + ] } + ] + } }, + "available_balance": { "$sum": { + "$cond": [ + // If this output is trivially unlocked by this address + { "$eq": [ "$details.address", &address ] }, + { "$cond": [ + { "$and": [ + // And the output has no expiration or is not expired + { "$or": [ + { "$lte": [ "$output.expiration_unlock_condition", null ] }, + { "$gt": [ "$output.expiration_unlock_condition.timestamp", ledger_ms.milestone_timestamp ] } + ] }, + // and has no timelock or is past the lock period + { "$or": [ + { "$lte": [ "$output.timelock_unlock_condition", null ] }, + { "$lte": [ "$output.timelock_unlock_condition.timestamp", ledger_ms.milestone_timestamp ] } + ] } + ] }, + { "$toDecimal": "$output_amount" }, 0 + ] }, + // Otherwise, if this output has expiring funds that will be returned to this address + { "$cond": [ + // And the output is expired + { "$lte": [ "$output.expiration_unlock_condition.timestamp", ledger_ms.milestone_timestamp ] }, + { "$toDecimal": "$output_amount" }, 0 + ] } + ] } }, } }, doc! { "$project": { "total_balance": { "$toString": "$total_balance" }, - "sig_locked_balance": { "$toString": "$sig_locked_balance" }, + "available_balance": { "$toString": "$available_balance" }, } }, ], None, @@ -491,7 +546,7 @@ impl OutputCollection { } } - /// Get the address activity in a date + /// Get the address activity in a date range pub async fn get_address_activity_count_in_range( &self, start_date: time::Date, diff --git a/src/model/block/payload/transaction/output/ledger.rs b/src/model/block/payload/transaction/output/ledger.rs index 6c1e6a69b..97339474b 100644 --- a/src/model/block/payload/transaction/output/ledger.rs +++ b/src/model/block/payload/transaction/output/ledger.rs @@ -30,7 +30,7 @@ impl LedgerOutput { } pub fn owning_address(&self) -> Option<&Address> { - self.output.owning_address() + self.output.owning_address(None) } } @@ -53,7 +53,9 @@ impl LedgerSpent { } pub fn owning_address(&self) -> Option<&Address> { - self.output.owning_address() + self.output + .output + .owning_address(self.spent_metadata.spent.milestone_timestamp) } } /// The different number of bytes that are used for computing the rent cost. diff --git a/src/model/block/payload/transaction/output/mod.rs b/src/model/block/payload/transaction/output/mod.rs index 4e1797371..7cfa3c03d 100644 --- a/src/model/block/payload/transaction/output/mod.rs +++ b/src/model/block/payload/transaction/output/mod.rs @@ -34,7 +34,9 @@ pub use self::{ treasury::TreasuryOutput, }; use crate::model::{ - bytify, payload::TransactionId, stringify, ProtocolParameters, TryFromWithContext, TryIntoWithContext, + bytify, + payload::{milestone::MilestoneTimestamp, TransactionId}, + stringify, ProtocolParameters, TryFromWithContext, TryIntoWithContext, }; /// The amount of tokens associated with an output. @@ -142,13 +144,33 @@ pub enum Output { impl Output { /// Returns the [`Address`] that is in control of the output. - pub fn owning_address(&self) -> Option<&Address> { + /// The `milestone_timestamp` is used to determine which address currently owns the output if it contains an + /// [`ExpirationUnlockCondition`](self::unlock_condition::ExpirationUnlockCondition) + pub fn owning_address(&self, milestone_timestamp: impl Into>) -> Option<&Address> { Some(match self { Self::Treasury(_) => return None, Self::Basic(BasicOutput { address_unlock_condition, + expiration_unlock_condition, .. - }) => &address_unlock_condition.address, + }) + | Self::Nft(NftOutput { + address_unlock_condition, + expiration_unlock_condition, + .. + }) => { + if let (Some(spent_timestamp), Some(expiration_unlock_condition)) = + (milestone_timestamp.into(), expiration_unlock_condition) + { + if spent_timestamp >= expiration_unlock_condition.timestamp { + &expiration_unlock_condition.return_address + } else { + &address_unlock_condition.address + } + } else { + &address_unlock_condition.address + } + } Self::Alias(AliasOutput { state_controller_address_unlock_condition, .. @@ -157,10 +179,6 @@ impl Output { immutable_alias_address_unlock_condition, .. }) => &immutable_alias_address_unlock_condition.address, - Self::Nft(NftOutput { - address_unlock_condition, - .. - }) => &address_unlock_condition.address, }) } diff --git a/src/model/block/payload/transaction/output/unlock_condition/expiration.rs b/src/model/block/payload/transaction/output/unlock_condition/expiration.rs index 248322d80..5b73e19b3 100644 --- a/src/model/block/payload/transaction/output/unlock_condition/expiration.rs +++ b/src/model/block/payload/transaction/output/unlock_condition/expiration.rs @@ -14,8 +14,8 @@ use crate::model::{tangle::MilestoneTimestamp, utxo::Address}; /// After or at the unix time, only Return Address can unlock it. #[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct ExpirationUnlockCondition { - return_address: Address, - timestamp: MilestoneTimestamp, + pub(crate) return_address: Address, + pub(crate) timestamp: MilestoneTimestamp, } impl> From for ExpirationUnlockCondition {