diff --git a/crates/iroha/benches/tps/utils.rs b/crates/iroha/benches/tps/utils.rs index 08a95111946..e6937bb693c 100644 --- a/crates/iroha/benches/tps/utils.rs +++ b/crates/iroha/benches/tps/utils.rs @@ -110,15 +110,14 @@ impl Config { .view(); let mut blocks = state_view.all_blocks(NonZeroUsize::new(blocks_out_of_measure as usize + 1).unwrap()); - let (txs_accepted, txs_rejected) = (0..self.blocks) + let (txs_rejected, txs_accepted) = (0..self.blocks) .map(|_| { let block = blocks .next() .expect("The block is not yet in state. Need more sleep?"); - ( - block.transactions().filter(|tx| tx.error.is_none()).count(), - block.transactions().filter(|tx| tx.error.is_some()).count(), - ) + + let rejected = block.errors().count(); + (rejected, block.transactions().count() - rejected) }) .fold((0, 0), |acc, pair| (acc.0 + pair.0, acc.1 + pair.1)); #[allow(clippy::float_arithmetic, clippy::cast_precision_loss)] diff --git a/crates/iroha/tests/integration/tx_history.rs b/crates/iroha/tests/integration/tx_history.rs index 1b20be0054f..8c2b85f14a6 100644 --- a/crates/iroha/tests/integration/tx_history.rs +++ b/crates/iroha/tests/integration/tx_history.rs @@ -50,7 +50,7 @@ fn client_has_rejected_and_acepted_txs_should_return_tx_history() -> Result<()> let transactions = client .query(transaction::all()) - .filter_with(|tx| tx.transaction.value.authority.eq(account_id.clone())) + .filter_with(|tx| tx.value.authority.eq(account_id.clone())) .with_pagination(Pagination { limit: Some(nonzero!(50_u64)), offset: 1, @@ -59,15 +59,11 @@ fn client_has_rejected_and_acepted_txs_should_return_tx_history() -> Result<()> assert_eq!(transactions.len(), 50); let mut prev_creation_time = core::time::Duration::from_millis(0); - transactions - .iter() - .map(AsRef::as_ref) - .map(AsRef::as_ref) - .for_each(|tx| { - assert_eq!(tx.authority(), &account_id); - //check sorted - assert!(tx.creation_time() >= prev_creation_time); - prev_creation_time = tx.creation_time(); - }); + transactions.iter().map(AsRef::as_ref).for_each(|tx| { + assert_eq!(tx.authority(), &account_id); + //check sorted + assert!(tx.creation_time() >= prev_creation_time); + prev_creation_time = tx.creation_time(); + }); Ok(()) } diff --git a/crates/iroha_core/benches/blocks/common.rs b/crates/iroha_core/benches/blocks/common.rs index fa9aafa6fca..a1c9aa553af 100644 --- a/crates/iroha_core/benches/blocks/common.rs +++ b/crates/iroha_core/benches/blocks/common.rs @@ -58,9 +58,7 @@ pub fn create_block( .unwrap(); // Verify that transactions are valid - for tx in block.as_ref().transactions() { - assert_eq!(tx.error, None); - } + assert_eq!(block.as_ref().errors().count(), 0); block } diff --git a/crates/iroha_core/src/block.rs b/crates/iroha_core/src/block.rs index e2f473b8d51..b9b9a4dc31a 100644 --- a/crates/iroha_core/src/block.rs +++ b/crates/iroha_core/src/block.rs @@ -11,7 +11,7 @@ use iroha_data_model::{ block::*, events::prelude::*, peer::PeerId, - transaction::{error::TransactionRejectionReason, prelude::*}, + transaction::{error::TransactionRejectionReason, SignedTransaction}, }; use thiserror::Error; @@ -562,7 +562,7 @@ mod valid { if block.transactions().any(|tx| { state .transactions() - .get(&tx.as_ref().hash()) + .get(&tx.hash()) // In case of soft-fork transaction is check if it was added at the same height as candidate block .is_some_and(|height| height.get() < expected_block_height) }) { @@ -585,7 +585,6 @@ mod valid { let errors = block .transactions() - .map(AsRef::as_ref) // FIXME: Redundant clone .cloned() .enumerate() @@ -838,7 +837,7 @@ mod valid { let transactions = block.payload().transactions.as_slice(); for transaction in transactions { - if transaction.value.authority() != genesis_account { + if transaction.authority() != genesis_account { return Err(InvalidGenesisError::UnexpectedAuthority); } } @@ -1074,15 +1073,16 @@ mod event { fn produce_events(&self) -> impl Iterator { let block_height = self.as_ref().header().height; - let tx_events = self.as_ref().transactions().map(move |tx| { - let status = tx.error.as_ref().map_or_else( + let block = self.as_ref(); + let tx_events = block.transactions().enumerate().map(move |(idx, tx)| { + let status = block.error(idx).map_or_else( || TransactionStatus::Approved, - |error| TransactionStatus::Rejected(error.clone()), + |error| TransactionStatus::Rejected(Box::new(error.clone())), ); TransactionEvent { block_height: Some(block_height), - hash: tx.as_ref().hash(), + hash: tx.hash(), status, } }); @@ -1195,23 +1195,8 @@ mod tests { .categorize(&mut state_block) .unpack(|_| {}); - // The first transaction should be confirmed - assert!(valid_block - .as_ref() - .transactions() - .next() - .unwrap() - .error - .is_none()); - - // The second transaction should be rejected - assert!(valid_block - .as_ref() - .transactions() - .nth(1) - .unwrap() - .error - .is_some()); + // The 1st transaction should be confirmed and the 2nd rejected + assert_eq!(*valid_block.as_ref().errors().next().unwrap().0, 1); } #[tokio::test] @@ -1277,23 +1262,10 @@ mod tests { .categorize(&mut state_block) .unpack(|_| {}); - // The first transaction should fail - assert!(valid_block - .as_ref() - .transactions() - .next() - .unwrap() - .error - .is_some()); - - // The third transaction should succeed - assert!(valid_block - .as_ref() - .transactions() - .nth(2) - .unwrap() - .error - .is_none()); + // The 1st transaction should fail and 2nd succeed + let mut errors = valid_block.as_ref().errors(); + assert_eq!(0, *errors.next().unwrap().0); + assert!(errors.next().is_none()); } #[tokio::test] @@ -1343,27 +1315,17 @@ mod tests { .categorize(&mut state_block) .unpack(|_| {}); - // The first transaction should be rejected - assert!( - valid_block - .as_ref() - .transactions() - .next() - .unwrap() - .error - .is_some(), + let mut errors = valid_block.as_ref().errors(); + // The 1st transaction should be rejected + assert_eq!( + 0, + *errors.next().unwrap().0, "The first transaction should be rejected, as it contains `Fail`." ); // The second transaction should be accepted assert!( - valid_block - .as_ref() - .transactions() - .nth(1) - .unwrap() - .error - .is_none(), + errors.next().is_none(), "The second transaction should be accepted." ); } diff --git a/crates/iroha_core/src/metrics.rs b/crates/iroha_core/src/metrics.rs index b377cd1986a..93f8ead17c0 100644 --- a/crates/iroha_core/src/metrics.rs +++ b/crates/iroha_core/src/metrics.rs @@ -75,15 +75,8 @@ impl MetricsReporter { break; }; block_index += 1; - let mut block_txs_accepted = 0; - let mut block_txs_rejected = 0; - for tx in block.transactions() { - if tx.error.is_none() { - block_txs_accepted += 1; - } else { - block_txs_rejected += 1; - } - } + let block_txs_rejected = block.errors().count() as u64; + let block_txs_accepted = block.transactions().count() as u64 - block_txs_rejected; self.metrics .txs diff --git a/crates/iroha_core/src/smartcontracts/isi/query.rs b/crates/iroha_core/src/smartcontracts/isi/query.rs index 60a6cc101e0..69707310402 100644 --- a/crates/iroha_core/src/smartcontracts/isi/query.rs +++ b/crates/iroha_core/src/smartcontracts/isi/query.rs @@ -6,9 +6,9 @@ use eyre::Result; use iroha_data_model::{ prelude::*, query::{ - error::QueryExecutionFail as Error, parameters::QueryParams, QueryBox, QueryOutputBatchBox, - QueryRequest, QueryRequestWithAuthority, QueryResponse, SingularQueryBox, - SingularQueryOutputBox, + error::QueryExecutionFail as Error, parameters::QueryParams, CommittedTransaction, + QueryBox, QueryOutputBatchBox, QueryRequest, QueryRequestWithAuthority, QueryResponse, + SingularQueryBox, SingularQueryOutputBox, }, }; @@ -68,7 +68,7 @@ impl SortableQueryOutput for RoleId { } } -impl SortableQueryOutput for TransactionQueryOutput { +impl SortableQueryOutput for CommittedTransaction { fn get_metadata_sorting_key(&self, _key: &Name) -> Option { None } @@ -571,15 +571,11 @@ mod tests { assert_eq!(txs.len() as u64, num_blocks * 2); assert_eq!( - txs.iter() - .filter(|txn| txn.transaction.error.is_some()) - .count() as u64, + txs.iter().filter(|txn| txn.error.is_some()).count() as u64, num_blocks ); assert_eq!( - txs.iter() - .filter(|txn| txn.transaction.error.is_none()) - .count() as u64, + txs.iter().filter(|txn| txn.error.is_none()).count() as u64, num_blocks ); @@ -632,9 +628,7 @@ mod tests { let not_found = FindTransactions::new() .execute( - TransactionQueryOutputPredicateBox::build(|tx| { - tx.transaction.value.hash.eq(wrong_hash) - }), + CommittedTransactionPredicateBox::build(|tx| tx.value.hash.eq(wrong_hash)), &state_view, ) .expect("Query execution should not fail") @@ -643,8 +637,8 @@ mod tests { let found_accepted = FindTransactions::new() .execute( - TransactionQueryOutputPredicateBox::build(|tx| { - tx.transaction.value.hash.eq(va_tx.as_ref().hash()) + CommittedTransactionPredicateBox::build(|tx| { + tx.value.hash.eq(va_tx.as_ref().hash()) }), &state_view, ) @@ -652,11 +646,8 @@ mod tests { .next() .expect("Query should return a transaction"); - if found_accepted.transaction.error.is_none() { - assert_eq!( - va_tx.as_ref().hash(), - found_accepted.as_ref().as_ref().hash() - ) + if found_accepted.error.is_none() { + assert_eq!(va_tx.as_ref().hash(), found_accepted.as_ref().hash()) } Ok(()) } diff --git a/crates/iroha_core/src/smartcontracts/isi/tx.rs b/crates/iroha_core/src/smartcontracts/isi/tx.rs index e12fd93073d..7567764c63a 100644 --- a/crates/iroha_core/src/smartcontracts/isi/tx.rs +++ b/crates/iroha_core/src/smartcontracts/isi/tx.rs @@ -9,12 +9,10 @@ use iroha_data_model::{ prelude::*, query::{ error::QueryExecutionFail, - predicate::{ - predicate_atoms::block::TransactionQueryOutputPredicateBox, CompoundPredicate, - }, - TransactionQueryOutput, + predicate::{predicate_atoms::block::CommittedTransactionPredicateBox, CompoundPredicate}, + CommittedTransaction, }, - transaction::CommittedTransaction, + transaction::error::TransactionRejectionReason, }; use iroha_telemetry::metrics; use nonzero_ext::nonzero; @@ -51,12 +49,15 @@ impl BlockTransactionRef { self.0.hash() } - fn value(&self) -> CommittedTransaction { - self.0 - .transactions() - .nth(self.1) - .expect("The transaction is not found") - .clone() + fn value(&self) -> (SignedTransaction, Option) { + ( + self.0 + .transactions() + .nth(self.1) + .expect("INTERNAL BUG: The transaction is not found") + .clone(), + self.0.error(self.1).cloned(), + ) } } @@ -64,15 +65,20 @@ impl ValidQuery for FindTransactions { #[metrics(+"find_transactions")] fn execute( self, - filter: CompoundPredicate, + filter: CompoundPredicate, state_ro: &impl StateReadOnly, ) -> Result, QueryExecutionFail> { Ok(state_ro .all_blocks(nonzero!(1_usize)) .flat_map(BlockTransactionIter::new) - .map(|tx| TransactionQueryOutput { - block_hash: tx.block_hash(), - transaction: tx.value(), + .map(|tx| { + let (value, error) = tx.value(); + + CommittedTransaction { + block_hash: tx.block_hash(), + value, + error, + } }) .filter(move |tx| filter.applies(tx))) } diff --git a/crates/iroha_core/src/state.rs b/crates/iroha_core/src/state.rs index 6eb0c7e1fa1..05146d89429 100644 --- a/crates/iroha_core/src/state.rs +++ b/crates/iroha_core/src/state.rs @@ -1381,15 +1381,14 @@ impl<'state> StateBlock<'state> { /// # Errors /// Fails if transaction instruction execution fails fn execute_transactions(&mut self, block: &CommittedBlock) -> Result<()> { + let block = block.as_ref(); + // TODO: Should this block panic instead? - for tx in block.as_ref().transactions() { - if tx.error.is_none() { + for (idx, tx) in block.transactions().enumerate() { + if block.error(idx).is_none() { // Execute every tx in it's own transaction let mut transaction = self.transaction(); - transaction.process_executable( - tx.as_ref().instructions(), - tx.as_ref().authority().clone(), - )?; + transaction.process_executable(tx.instructions(), tx.authority().clone())?; transaction.apply(); } } @@ -1421,7 +1420,6 @@ impl<'state> StateBlock<'state> { block .as_ref() .transactions() - .map(|tx| &tx.value) .map(SignedTransaction::hash) .for_each(|tx_hash| { self.transactions.insert(tx_hash, block_height); diff --git a/crates/iroha_core/src/sumeragi/main_loop.rs b/crates/iroha_core/src/sumeragi/main_loop.rs index 2cdf071d362..b33f35275f7 100644 --- a/crates/iroha_core/src/sumeragi/main_loop.rs +++ b/crates/iroha_core/src/sumeragi/main_loop.rs @@ -134,7 +134,7 @@ impl Sumeragi { .map_err(|recv_error| { assert!( recv_error != mpsc::TryRecvError::Disconnected, - "INTERNAL ERROR: Sumeragi control message pump disconnected" + "INTERNAL BUG: Sumeragi control message pump disconnected" ) }) { @@ -173,7 +173,7 @@ impl Sumeragi { .map_err(|recv_error| { assert!( recv_error != mpsc::TryRecvError::Disconnected, - "INTERNAL ERROR: Sumeragi message pump disconnected" + "INTERNAL BUG: Sumeragi message pump disconnected" ) }) .ok()?; @@ -259,7 +259,7 @@ impl Sumeragi { } }; - if block.as_ref().transactions().any(|tx| tx.error.is_some()) { + if block.as_ref().errors().next().is_some() { error!( peer_id=%self.peer_id, role=%self.role(), @@ -311,7 +311,7 @@ impl Sumeragi { .expect("Genesis invalid"); assert!( - !genesis.as_ref().transactions().any(|tx| tx.error.is_some()), + genesis.as_ref().errors().next().is_none(), "Genesis contains invalid transactions" ); @@ -393,8 +393,6 @@ impl Sumeragi { genesis_account: &AccountId, existing_voting_block: &mut Option, ) -> Option> { - assert!(!block.header().is_genesis()); - ValidBlock::validate_keep_voting_block( block, topology, @@ -546,21 +544,21 @@ impl Sumeragi { .is_consensus_required() .expect("INTERNAL BUG: Consensus required for validating peer"); - if let Some(mut v_block) = + if let Some(mut valid_block) = self.validate_block(block, state, topology, genesis_account, voting_block) { - v_block.block.sign(&self.key_pair, topology); + valid_block.block.sign(&self.key_pair, topology); - let msg = BlockSigned::from(&v_block.block); + let msg = BlockSigned::from(&valid_block.block); self.broadcast_packet_to(msg, [topology.proxy_tail()]); info!( peer_id=%self.peer_id, role=%self.role(), - block=%v_block.block.as_ref().hash(), + block=%valid_block.block.as_ref().hash(), "Voted for the block" ); - *voting_block = Some(v_block); + *voting_block = Some(valid_block); } } (BlockMessage::BlockCreated(BlockCreated { block }), Role::ObservingPeer) => { @@ -576,24 +574,24 @@ impl Sumeragi { .is_consensus_required() .expect("INTERNAL BUG: Consensus required for observing peer"); - if let Some(mut v_block) = + if let Some(mut valid_block) = self.validate_block(block, state, topology, genesis_account, voting_block) { if view_change_index >= 1 { - v_block.block.sign(&self.key_pair, topology); + valid_block.block.sign(&self.key_pair, topology); - let msg = BlockSigned::from(&v_block.block); + let msg = BlockSigned::from(&valid_block.block); self.broadcast_packet_to(msg, [topology.proxy_tail()]); info!( peer_id=%self.peer_id, role=%self.role(), - block=%v_block.block.as_ref().hash(), + block=%valid_block.block.as_ref().hash(), "Voted for the block" ); } - *voting_block = Some(v_block); + *voting_block = Some(valid_block); } } (BlockMessage::BlockCreated(BlockCreated { block }), Role::ProxyTail) => { diff --git a/crates/iroha_data_model/src/block.rs b/crates/iroha_data_model/src/block.rs index 4f060e0be67..df84387130d 100644 --- a/crates/iroha_data_model/src/block.rs +++ b/crates/iroha_data_model/src/block.rs @@ -3,8 +3,10 @@ //! `Block`s are organised into a linear sequence over time (also known as the block chain). #[cfg(not(feature = "std"))] -use alloc::{boxed::Box, format, string::String, vec::Vec}; +use alloc::{boxed::Box, collections::BTreeMap, format, string::String, vec::Vec}; use core::{fmt::Display, time::Duration}; +#[cfg(feature = "std")] +use std::collections::BTreeMap; use derive_more::Display; use iroha_crypto::{HashOf, MerkleTree, SignatureOf}; @@ -16,7 +18,7 @@ use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; pub use self::model::*; -use crate::transaction::prelude::*; +use crate::transaction::{error::TransactionRejectionReason, prelude::*}; #[model] mod model { @@ -72,7 +74,7 @@ mod model { /// Block header pub header: BlockHeader, /// array of transactions, which successfully passed validation and consensus step. - pub transactions: Vec, + pub transactions: Vec, } /// Signature of a block @@ -108,6 +110,12 @@ mod model { pub(super) signatures: Vec, /// Block payload pub(super) payload: BlockPayload, + /// Collection of rejection reasons for every transaction if exists + /// + /// # Warning + /// + /// Transaction errors are not part of the block hash or protected by the block signature. + pub(super) errors: BTreeMap, } } @@ -158,20 +166,13 @@ impl SignedBlock { header: BlockHeader, transactions: impl IntoIterator, ) -> SignedBlock { - let transactions = transactions - .into_iter() - .map(|tx| CommittedTransaction { - value: tx, - error: None, - }) - .collect(); - SignedBlockV1 { signatures: vec![signature], payload: BlockPayload { header, - transactions, + transactions: transactions.into_iter().collect(), }, + errors: BTreeMap::new(), } .into() } @@ -184,13 +185,20 @@ impl SignedBlock { ) -> &mut Self { let SignedBlock::V1(block) = self; - for (tx, error) in errors { - block.payload.transactions[tx].error = Some(Box::new(error)); - } + block.errors = errors + .into_iter() + .map(|(idx, error)| (idx as u64, error)) + .collect(); self } + /// Return error for the transaction index + pub fn error(&self, tx: usize) -> Option<&TransactionRejectionReason> { + let SignedBlock::V1(block) = self; + block.errors.get(&(tx as u64)) + } + /// Block payload. Used for tests #[cfg(feature = "transparent_api")] pub fn payload(&self) -> &BlockPayload { @@ -205,13 +213,6 @@ impl SignedBlock { &block.payload.header } - /// Block transactions - #[inline] - pub fn transactions(&self) -> impl ExactSizeIterator { - let SignedBlock::V1(block) = self; - block.payload.transactions.iter() - } - /// Signatures of peers which approved this block. #[inline] pub fn signatures( @@ -221,6 +222,23 @@ impl SignedBlock { block.signatures.iter() } + /// Block transactions + #[inline] + pub fn transactions(&self) -> impl ExactSizeIterator { + let SignedBlock::V1(block) = self; + block.payload.transactions.iter() + } + + /// Collection of rejection reasons for every transaction if exists + /// + /// # Warning + /// + /// Transaction errors are not part of the block hash or protected by the block signature. + pub fn errors(&self) -> impl ExactSizeIterator { + let SignedBlock::V1(block) = self; + block.errors.iter() + } + /// Calculate block hash #[inline] pub fn hash(&self) -> HashOf { @@ -316,13 +334,6 @@ impl SignedBlock { creation_time_ms, view_change_index: 0, }; - let transactions = transactions - .into_iter() - .map(|transaction| CommittedTransaction { - value: transaction, - error: None, - }) - .collect(); let signature = BlockSignature(0, SignatureOf::new(private_key, &header)); let payload = BlockPayload { @@ -333,17 +344,18 @@ impl SignedBlock { SignedBlockV1 { signatures: vec![signature], payload, + errors: BTreeMap::new(), } .into() } #[cfg(feature = "std")] - fn get_genesis_block_creation_time(genesis_transactions: &[SignedTransaction]) -> u64 { + fn get_genesis_block_creation_time(transactions: &[SignedTransaction]) -> u64 { use std::time::SystemTime; - let latest_txn_time = genesis_transactions + let latest_txn_time = transactions .iter() - .map(super::transaction::SignedTransaction::creation_time) + .map(SignedTransaction::creation_time) .max() .expect("INTERNAL BUG: Block empty"); let now = SystemTime::now() @@ -380,12 +392,13 @@ mod candidate { struct SignedBlockCandidate { signatures: Vec, payload: BlockPayload, + errors: BTreeMap, } #[derive(Decode, Deserialize)] struct BlockPayloadCandidate { header: BlockHeader, - transactions: Vec, + transactions: Vec, } impl BlockPayloadCandidate { @@ -408,7 +421,7 @@ mod candidate { let expected_txs_hash = self .transactions .iter() - .map(|value| value.as_ref().hash()) + .map(SignedTransaction::hash) .collect::>() .hash() .ok_or("Block is empty")?; @@ -418,7 +431,7 @@ mod candidate { } self.transactions.iter().try_for_each(|tx| { - if tx.value.creation_time() >= self.header.creation_time() { + if tx.creation_time() >= self.header.creation_time() { return Err("Transaction creation time is ahead of block creation time"); } @@ -443,6 +456,7 @@ mod candidate { Ok(SignedBlockV1 { signatures: self.signatures, payload: self.payload, + errors: self.errors, }) } @@ -473,14 +487,14 @@ mod candidate { #[cfg(not(target_family = "wasm"))] fn validate_genesis(&self) -> Result<(), &'static str> { - use crate::isi::InstructionBox; - let transactions = self.payload.transactions.as_slice(); + + if !self.errors.is_empty() { + return Err("Genesis transaction must not contain errors"); + } + for transaction in transactions { - if transaction.error.is_some() { - return Err("Genesis transaction must not contain errors"); - } - let Executable::Instructions(_) = transaction.value.instructions() else { + let Executable::Instructions(_) = transaction.instructions() else { return Err("Genesis transaction must contain instructions"); }; } @@ -489,11 +503,11 @@ mod candidate { return Err("Genesis block must contain at least one transaction"); }; let Executable::Instructions(instructions_executor) = - transaction_executor.value.instructions() + transaction_executor.instructions() else { return Err("Genesis transaction must contain instructions"); }; - let [InstructionBox::Upgrade(_)] = instructions_executor.as_ref() else { + let [crate::isi::InstructionBox::Upgrade(_)] = instructions_executor.as_ref() else { return Err( "First transaction must contain single `Upgrade` instruction to set executor", ); diff --git a/crates/iroha_data_model/src/query/mod.rs b/crates/iroha_data_model/src/query/mod.rs index 18693869359..82f0711e833 100644 --- a/crates/iroha_data_model/src/query/mod.rs +++ b/crates/iroha_data_model/src/query/mod.rs @@ -31,7 +31,7 @@ use crate::{ permission::Permission, role::{Role, RoleId}, seal::Sealed, - transaction::{CommittedTransaction, SignedTransaction}, + transaction::{error::TransactionRejectionReason, SignedTransaction}, trigger::{Trigger, TriggerId}, }; @@ -118,7 +118,7 @@ mod model { Role(Vec), Parameter(Vec), Permission(Vec), - Transaction(Vec), + Transaction(Vec), Peer(Vec), RoleId(Vec), TriggerId(Vec), @@ -153,7 +153,7 @@ mod model { Json(JsonValue), Trigger(crate::trigger::Trigger), Parameters(Parameters), - Transaction(TransactionQueryOutput), + Transaction(CommittedTransaction), BlockHeader(BlockHeader), } @@ -240,12 +240,14 @@ mod model { )] #[getset(get = "pub")] #[ffi_type] - pub struct TransactionQueryOutput { + pub struct CommittedTransaction { /// The hash of the block to which `tx` belongs to pub block_hash: HashOf, /// Transaction #[getset(skip)] - pub transaction: CommittedTransaction, + pub value: SignedTransaction, + /// Reason of rejection, if any + pub error: Option, } } @@ -563,7 +565,7 @@ impl_iter_queries! { FindPeers => crate::peer::Peer, FindActiveTriggerIds => crate::trigger::TriggerId, FindTriggers => crate::trigger::Trigger, - FindTransactions => TransactionQueryOutput, + FindTransactions => CommittedTransaction, FindAccountsWithAsset => crate::account::Account, FindBlockHeaders => crate::block::BlockHeader, FindBlocks => SignedBlock, @@ -580,9 +582,9 @@ impl_singular_queries! { FindExecutorDataModel => crate::executor::ExecutorDataModel, } -impl AsRef for TransactionQueryOutput { - fn as_ref(&self) -> &CommittedTransaction { - &self.transaction +impl AsRef for CommittedTransaction { + fn as_ref(&self) -> &SignedTransaction { + &self.value } } @@ -1097,6 +1099,6 @@ pub mod prelude { account::prelude::*, asset::prelude::*, block::prelude::*, builder::prelude::*, domain::prelude::*, executor::prelude::*, parameters::prelude::*, peer::prelude::*, permission::prelude::*, predicate::prelude::*, role::prelude::*, transaction::prelude::*, - trigger::prelude::*, QueryBox, QueryRequest, SingularQueryBox, TransactionQueryOutput, + trigger::prelude::*, CommittedTransaction, QueryBox, QueryRequest, SingularQueryBox, }; } diff --git a/crates/iroha_data_model/src/query/predicate/predicate_atoms/block.rs b/crates/iroha_data_model/src/query/predicate/predicate_atoms/block.rs index c1d7ecb24b3..0ee4e0289e7 100644 --- a/crates/iroha_data_model/src/query/predicate/predicate_atoms/block.rs +++ b/crates/iroha_data_model/src/query/predicate/predicate_atoms/block.rs @@ -1,7 +1,7 @@ //! This module contains predicates for block-related objects, mirroring [`crate::block`]. #[cfg(not(feature = "std"))] -use alloc::{boxed::Box, format, string::String, vec::Vec}; +use alloc::{format, string::String, vec::Vec}; use iroha_crypto::HashOf; use iroha_schema::IntoSchema; @@ -19,9 +19,9 @@ use crate::{ projectors::BaseProjector, AstPredicate, CompoundPredicate, EvaluatePredicate, HasPredicateBox, HasPrototype, }, - TransactionQueryOutput, + CommittedTransaction, }, - transaction::{CommittedTransaction, SignedTransaction}, + transaction::SignedTransaction, }; /// A predicate that can be applied to a [`HashOf`] @@ -128,21 +128,22 @@ pub enum TransactionErrorPredicateBox { IsSome, } -impl_predicate_box!(Option>: TransactionErrorPredicateBox); +impl_predicate_box!(Option: TransactionErrorPredicateBox); -impl EvaluatePredicate>> for TransactionErrorPredicateBox { - fn applies(&self, input: &Option>) -> bool { +impl EvaluatePredicate> for TransactionErrorPredicateBox { + fn applies(&self, input: &Option) -> bool { match self { TransactionErrorPredicateBox::IsSome => input.is_some(), } } } -/// A predicate that can be applied to a [`CommittedTransaction`] +/// A predicate that can be applied to a [`CommittedTransaction`]. #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] pub enum CommittedTransactionPredicateBox { - // projections - /// Checks if a predicate applies to the signed transaction inside. + /// Checks if a predicate applies to the hash of the block the transaction was included in. + BlockHash(BlockHashPredicateBox), + /// Checks if a predicate applies to the committed transaction inside. Value(SignedTransactionPredicateBox), /// Checks if a predicate applies to the error of the transaction. Error(TransactionErrorPredicateBox), @@ -153,35 +154,13 @@ impl_predicate_box!(CommittedTransaction: CommittedTransactionPredicateBox); impl EvaluatePredicate for CommittedTransactionPredicateBox { fn applies(&self, input: &CommittedTransaction) -> bool { match self { - CommittedTransactionPredicateBox::Value(signed_transaction) => { - signed_transaction.applies(&input.value) - } - CommittedTransactionPredicateBox::Error(error) => error.applies(&input.error), - } - } -} - -/// A predicate that can be applied to a [`TransactionQueryOutput`]. -#[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] -pub enum TransactionQueryOutputPredicateBox { - // projections - /// Checks if a predicate applies to the committed transaction inside. - Transaction(CommittedTransactionPredicateBox), - /// Checks if a predicate applies to the hash of the block the transaction was included in. - BlockHash(BlockHashPredicateBox), -} - -impl_predicate_box!(TransactionQueryOutput: TransactionQueryOutputPredicateBox); - -impl EvaluatePredicate for TransactionQueryOutputPredicateBox { - fn applies(&self, input: &TransactionQueryOutput) -> bool { - match self { - TransactionQueryOutputPredicateBox::Transaction(committed_transaction) => { - committed_transaction.applies(&input.transaction) - } - TransactionQueryOutputPredicateBox::BlockHash(block_hash) => { + CommittedTransactionPredicateBox::BlockHash(block_hash) => { block_hash.applies(&input.block_hash) } + CommittedTransactionPredicateBox::Value(committed_transaction) => { + committed_transaction.applies(&input.value) + } + CommittedTransactionPredicateBox::Error(error) => error.applies(&input.error), } } } @@ -191,7 +170,7 @@ pub mod prelude { pub use super::{ BlockHashPredicateBox, BlockHeaderPredicateBox, CommittedTransactionPredicateBox, SignedBlockPredicateBox, SignedTransactionPredicateBox, TransactionErrorPredicateBox, - TransactionHashPredicateBox, TransactionQueryOutputPredicateBox, + TransactionHashPredicateBox, }; } @@ -203,7 +182,7 @@ mod test { account::AccountId, prelude::{ AccountIdPredicateBox, BlockHeaderPredicateBox, CompoundPredicate, - SignedBlockPredicateBox, TransactionQueryOutputPredicateBox, + SignedBlockPredicateBox, }, query::predicate::predicate_atoms::block::{ BlockHashPredicateBox, CommittedTransactionPredicateBox, SignedTransactionPredicateBox, @@ -219,37 +198,32 @@ mod test { .parse() .unwrap(); - let predicate = TransactionQueryOutputPredicateBox::build(|tx| { + let predicate = CommittedTransactionPredicateBox::build(|tx| { tx.block_hash.eq(HashOf::from_untyped_unchecked(hash)) - & tx.transaction.error.is_some() - & tx.transaction.value.authority.eq(account_id.clone()) - & tx.transaction - .value - .hash - .eq(HashOf::from_untyped_unchecked(hash)) + & tx.value.authority.eq(account_id.clone()) + & tx.value.hash.eq(HashOf::from_untyped_unchecked(hash)) + & tx.error.is_some() }); assert_eq!( predicate, CompoundPredicate::And(vec![ - CompoundPredicate::Atom(TransactionQueryOutputPredicateBox::BlockHash( + CompoundPredicate::Atom(CommittedTransactionPredicateBox::BlockHash( BlockHashPredicateBox::Equals(HashOf::from_untyped_unchecked(hash)) )), - CompoundPredicate::Atom(TransactionQueryOutputPredicateBox::Transaction( - CommittedTransactionPredicateBox::Error(TransactionErrorPredicateBox::IsSome) - )), - CompoundPredicate::Atom(TransactionQueryOutputPredicateBox::Transaction( - CommittedTransactionPredicateBox::Value( - SignedTransactionPredicateBox::Authority(AccountIdPredicateBox::Equals( - account_id.clone() - )) - ) + CompoundPredicate::Atom(CommittedTransactionPredicateBox::Value( + SignedTransactionPredicateBox::Authority(AccountIdPredicateBox::Equals( + account_id.clone() + )) )), - CompoundPredicate::Atom(TransactionQueryOutputPredicateBox::Transaction( - CommittedTransactionPredicateBox::Value(SignedTransactionPredicateBox::Hash( - TransactionHashPredicateBox::Equals(HashOf::from_untyped_unchecked(hash)) + CompoundPredicate::Atom(CommittedTransactionPredicateBox::Value( + SignedTransactionPredicateBox::Hash(TransactionHashPredicateBox::Equals( + HashOf::from_untyped_unchecked(hash) )) )), + CompoundPredicate::Atom(CommittedTransactionPredicateBox::Error( + TransactionErrorPredicateBox::IsSome + )), ]) ); } diff --git a/crates/iroha_data_model/src/query/predicate/projectors.rs b/crates/iroha_data_model/src/query/predicate/projectors.rs index 4fe3d194f8d..45ea67cba26 100644 --- a/crates/iroha_data_model/src/query/predicate/projectors.rs +++ b/crates/iroha_data_model/src/query/predicate/projectors.rs @@ -8,9 +8,7 @@ use core::marker::PhantomData; use super::{AstPredicate, CompoundPredicate}; use crate::{ - prelude::{ - BlockHeaderPredicateBox, SignedBlockPredicateBox, TransactionQueryOutputPredicateBox, - }, + prelude::{BlockHeaderPredicateBox, SignedBlockPredicateBox}, query::predicate::{ predicate_atoms::{ account::{AccountIdPredicateBox, AccountPredicateBox}, @@ -214,9 +212,6 @@ proj!(SignedTransactionHashProjector(SignedTransactionHashProjection): Transacti proj!(SignedTransactionAuthorityProjector(SignedTransactionAuthorityProjection): AccountIdPredicateBox => SignedTransactionPredicateBox::Authority); // projections on CommittedTransaction +proj!(CommittedTransactionBlockHashProjector(CommittedTransactionBlockHashProjection): BlockHashPredicateBox => CommittedTransactionPredicateBox::BlockHash); proj!(CommittedTransactionValueProjector(CommittedTransactionValueProjection): SignedTransactionPredicateBox => CommittedTransactionPredicateBox::Value); proj!(CommittedTransactionErrorProjector(CommittedTransactionErrorProjection): TransactionErrorPredicateBox => CommittedTransactionPredicateBox::Error); - -// projections on TransactionQueryOutput -proj!(TransactionQueryOutputTransactionProjector(TransactionQueryOutputTransactionProjection): CommittedTransactionPredicateBox => TransactionQueryOutputPredicateBox::Transaction); -proj!(TransactionQueryOutputBlockHashProjector(TransactionQueryOutputBlockHashProjection): BlockHashPredicateBox => TransactionQueryOutputPredicateBox::BlockHash); diff --git a/crates/iroha_data_model/src/query/predicate/prototypes/block.rs b/crates/iroha_data_model/src/query/predicate/prototypes/block.rs index 236d927446b..f72e59a0211 100644 --- a/crates/iroha_data_model/src/query/predicate/prototypes/block.rs +++ b/crates/iroha_data_model/src/query/predicate/prototypes/block.rs @@ -12,14 +12,14 @@ use crate::{ predicate_atoms::block::{ BlockHashPredicateBox, BlockHeaderPredicateBox, CommittedTransactionPredicateBox, SignedBlockPredicateBox, SignedTransactionPredicateBox, TransactionErrorPredicateBox, - TransactionHashPredicateBox, TransactionQueryOutputPredicateBox, + TransactionHashPredicateBox, }, predicate_combinators::NotAstPredicate, projectors::{ - BlockHeaderHashProjector, CommittedTransactionErrorProjector, - CommittedTransactionValueProjector, ObjectProjector, SignedBlockHeaderProjector, - SignedTransactionAuthorityProjector, SignedTransactionHashProjector, - TransactionQueryOutputBlockHashProjector, TransactionQueryOutputTransactionProjector, + BlockHeaderHashProjector, CommittedTransactionBlockHashProjector, + CommittedTransactionErrorProjector, CommittedTransactionValueProjector, + ObjectProjector, SignedBlockHeaderProjector, SignedTransactionAuthorityProjector, + SignedTransactionHashProjector, }, prototypes::account::AccountIdPrototype, AstPredicate, HasPrototype, @@ -123,25 +123,15 @@ where } } -/// A prototype of [`crate::transaction::CommittedTransaction`] -#[derive(Default, Copy, Clone)] +/// A prototype of [`crate::query::CommittedTransaction`] for predicate construction. +#[derive(Default, Clone)] pub struct CommittedTransactionPrototype { - /// Build a predicate on the signed transaction inside + /// Build a predicate on the block hash inside + pub block_hash: BlockHashPrototype>, + /// Build a predicate on the transaction inside pub value: SignedTransactionPrototype>, /// Build a predicate on the transaction error pub error: TransactionErrorPrototype>, } impl_prototype!(CommittedTransactionPrototype: CommittedTransactionPredicateBox); - -/// A prototype of [`crate::query::TransactionQueryOutput`] for predicate construction. -#[derive(Default, Copy, Clone)] -pub struct TransactionQueryOutputPrototype { - /// Build a predicate on the transaction inside - pub transaction: - CommittedTransactionPrototype>, - /// Build a predicate on the block hash inside - pub block_hash: BlockHashPrototype>, -} - -impl_prototype!(TransactionQueryOutputPrototype: TransactionQueryOutputPredicateBox); diff --git a/crates/iroha_data_model/src/transaction.rs b/crates/iroha_data_model/src/transaction.rs index c050310de39..1df809d685e 100644 --- a/crates/iroha_data_model/src/transaction.rs +++ b/crates/iroha_data_model/src/transaction.rs @@ -29,7 +29,6 @@ use crate::{ #[model] mod model { - use getset::Getters; use iroha_primitives::const_vec::ConstVec; use super::*; @@ -152,32 +151,6 @@ mod model { /// Payload of the transaction. pub(super) payload: TransactionPayload, } - - /// Transaction Value used in Instructions and Queries - #[derive( - Debug, - PartialOrd, - Ord, - Getters, - Clone, - PartialEq, - Eq, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[ffi_type] - #[getset(get = "pub")] - pub struct CommittedTransaction { - /// Committed transaction - #[getset(skip)] - pub value: SignedTransaction, - /// Reason of rejection - // NOTE: Using `Box` reduces memory use by 10% - pub error: Option>, - } } impl FromIterator for Executable { @@ -309,12 +282,6 @@ impl SignedTransactionV1 { } } -impl AsRef for CommittedTransaction { - fn as_ref(&self) -> &SignedTransaction { - &self.value - } -} - impl TransactionSignature { /// Signature itself pub fn payload(&self) -> &Signature { @@ -748,9 +715,7 @@ mod http { pub mod prelude { #[cfg(feature = "http")] pub use super::http::TransactionBuilder; - pub use super::{ - error::prelude::*, CommittedTransaction, Executable, SignedTransaction, WasmSmartContract, - }; + pub use super::{error::prelude::*, Executable, SignedTransaction, WasmSmartContract}; } #[cfg(test)] diff --git a/crates/iroha_genesis/src/lib.rs b/crates/iroha_genesis/src/lib.rs index 70649c91ebd..9dca8a4ae5b 100644 --- a/crates/iroha_genesis/src/lib.rs +++ b/crates/iroha_genesis/src/lib.rs @@ -423,7 +423,7 @@ mod tests { // First transaction { let transaction = transactions[0]; - let instructions = transaction.value.instructions(); + let instructions = transaction.instructions(); let Executable::Instructions(instructions) = instructions else { panic!("Expected instructions"); }; @@ -434,7 +434,7 @@ mod tests { // Second transaction let transaction = transactions[1]; - let instructions = transaction.value.instructions(); + let instructions = transaction.instructions(); let Executable::Instructions(instructions) = instructions else { panic!("Expected instructions"); }; diff --git a/crates/iroha_schema_derive/src/lib.rs b/crates/iroha_schema_derive/src/lib.rs index 8c1aa43e35d..0dd0c21a59a 100644 --- a/crates/iroha_schema_derive/src/lib.rs +++ b/crates/iroha_schema_derive/src/lib.rs @@ -12,7 +12,7 @@ use proc_macro2::{Span, TokenStream}; use quote::{quote, ToTokens}; use syn::parse_quote; -fn add_bounds_to_all_generic_parameters(generics: &mut syn::Generics, bound: syn::Path) { +fn add_bounds_to_all_generic_parameters(generics: &mut syn::Generics, bound: &syn::Path) { let generic_type_parameters = generics .type_params() .map(|ty_param| ty_param.ident.clone()) @@ -50,7 +50,7 @@ fn impl_type_id(input: &mut syn::DeriveInput) -> TokenStream { // Unlike IntoSchema, `TypeId` bounds are required only on the generic type parameters, as in the standard "dumb" algorithm // The schema of the fields are irrelevant here, as we only need the names of the parameters - add_bounds_to_all_generic_parameters(&mut input.generics, parse_quote!(iroha_schema::TypeId)); + add_bounds_to_all_generic_parameters(&mut input.generics, &parse_quote!(iroha_schema::TypeId)); let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let type_id_body = trait_body(name, &input.generics, true); @@ -217,19 +217,19 @@ pub fn schema_derive(input: TokenStream) -> TokenStream { // first of all, `IntoSchema` impls are required for all generic type parameters to be able to call `type_name` on them add_bounds_to_all_generic_parameters( &mut input.generics, - parse_quote!(iroha_schema::IntoSchema), + &parse_quote!(iroha_schema::IntoSchema), ); // add trait bounds on field types using the same algorithm that parity scale codec uses - emitter.handle(trait_bounds::add( + trait_bounds::add( &input.ident, &mut input.generics, &input.data, - syn::parse_quote!(iroha_schema::IntoSchema), + &syn::parse_quote!(iroha_schema::IntoSchema), None, false, &syn::parse_quote!(iroha_schema), - )); + ); let impl_type_id = impl_type_id(&mut syn::parse2(original_input).unwrap()); diff --git a/crates/iroha_schema_derive/src/trait_bounds.rs b/crates/iroha_schema_derive/src/trait_bounds.rs index fe9548568a1..eebc2fe31ab 100644 --- a/crates/iroha_schema_derive/src/trait_bounds.rs +++ b/crates/iroha_schema_derive/src/trait_bounds.rs @@ -1,6 +1,6 @@ -//! Algorithm for generating trait bounds in IntoSchema derive +//! Algorithm for generating trait bounds in `IntoSchema` derive //! -//! Based on https://github.com/paritytech/parity-scale-codec/blob/2c61d4ab70dfa157556430546441cd2deb5031f2/derive/src/trait_bounds.rs +//! Based on use std::iter; @@ -8,7 +8,7 @@ use proc_macro2::Ident; use syn::{ parse_quote, visit::{self, Visit}, - Generics, Result, Type, TypePath, + Generics, Type, TypePath, }; use crate::{IntoSchemaData, IntoSchemaField}; @@ -119,11 +119,11 @@ pub fn add( generics: &mut Generics, data: &IntoSchemaData, // custom_trait_bound: Option>, - codec_bound: syn::Path, - codec_skip_bound: Option, + codec_bound: &syn::Path, + codec_skip_bound: Option<&syn::Path>, dumb_trait_bounds: bool, crate_path: &syn::Path, -) -> Result<()> { +) { let skip_type_params = Vec::::new(); // NOTE: not implementing custom trait bounds for now // can be implemented later if needed @@ -144,13 +144,13 @@ pub fn add( .map(|tp| tp.ident.clone()) .collect::>(); if ty_params.is_empty() { - return Ok(()); + return; } let codec_types = - get_types_to_add_trait_bound(input_ident, data, &ty_params, dumb_trait_bounds)?; + get_types_to_add_trait_bound(input_ident, data, &ty_params, dumb_trait_bounds); - let compact_types = collect_types(data, |t| t.codec_attrs.compact)? + let compact_types = collect_types(data, |t| t.codec_attrs.compact) .into_iter() // Only add a bound if the type uses a generic .filter(|ty| type_contain_idents(ty, &ty_params)) @@ -158,7 +158,7 @@ pub fn add( let skip_types = if codec_skip_bound.is_some() { let needs_default_bound = |f: &IntoSchemaField| f.codec_attrs.skip; - collect_types(data, needs_default_bound)? + collect_types(data, needs_default_bound) .into_iter() // Only add a bound if the type uses a generic .filter(|ty| type_contain_idents(ty, &ty_params)) @@ -170,27 +170,24 @@ pub fn add( if !codec_types.is_empty() || !compact_types.is_empty() || !skip_types.is_empty() { let where_clause = generics.make_where_clause(); - codec_types.into_iter().for_each(|ty| { + for ty in codec_types { where_clause .predicates .push(parse_quote!(#ty : #codec_bound)) - }); + } - compact_types.into_iter().for_each(|ty| { + for ty in compact_types { where_clause .predicates .push(parse_quote!(#crate_path::Compact<#ty> : #codec_bound)) - }); + } - skip_types.into_iter().for_each(|ty| { - let codec_skip_bound = codec_skip_bound.as_ref(); + for ty in skip_types { where_clause .predicates .push(parse_quote!(#ty : #codec_skip_bound)) - }); + } } - - Ok(()) } /// Returns all types that must be added to the where clause with the respective trait bound. @@ -199,16 +196,16 @@ fn get_types_to_add_trait_bound( data: &IntoSchemaData, ty_params: &[Ident], dumb_trait_bound: bool, -) -> Result> { +) -> Vec { if dumb_trait_bound { - Ok(ty_params.iter().map(|t| parse_quote!( #t )).collect()) + ty_params.iter().map(|t| parse_quote!( #t )).collect() } else { let needs_codec_bound = |f: &IntoSchemaField| { !f.codec_attrs.compact - && true // utils::get_encoded_as_type(f).is_none() + // utils::get_encoded_as_type(f).is_none() && !f.codec_attrs.skip }; - let res = collect_types(data, needs_codec_bound)? + collect_types(data, needs_codec_bound) .into_iter() // Only add a bound if the type uses a generic .filter(|ty| type_contain_idents(ty, ty_params)) @@ -226,16 +223,11 @@ fn get_types_to_add_trait_bound( // Remove all remaining types that start/contain the input ident to not have them in the // where clause. .filter(|ty| !type_or_sub_type_path_starts_with_ident(ty, input_ident)) - .collect(); - - Ok(res) + .collect() } } -fn collect_types( - data: &IntoSchemaData, - type_filter: fn(&IntoSchemaField) -> bool, -) -> Result> { +fn collect_types(data: &IntoSchemaData, type_filter: fn(&IntoSchemaField) -> bool) -> Vec { let types = match *data { IntoSchemaData::Struct(ref data) => data .fields @@ -258,5 +250,5 @@ fn collect_types( .collect(), }; - Ok(types) + types } diff --git a/crates/iroha_schema_gen/src/lib.rs b/crates/iroha_schema_gen/src/lib.rs index a373e870c7d..63d992b136a 100644 --- a/crates/iroha_schema_gen/src/lib.rs +++ b/crates/iroha_schema_gen/src/lib.rs @@ -130,6 +130,11 @@ types!( AssetType, AssetValue, AssetValuePredicateBox, + BTreeMap, + BTreeMap, + BTreeMap, + BTreeSet, + BTreeSet, BlockEvent, BlockEventFilter, BlockHashPredicateBox, @@ -147,23 +152,19 @@ types!( Box>, Box>, Box>, + Box>, Box>, Box>, Box>, Box>, Box>, Box>, - Box>, Box>, Box>, Box, - BTreeMap, - BTreeMap, - BTreeSet, - BTreeSet, - BurnBox, Burn, Burn, + BurnBox, ChainId, CommittedTransaction, CommittedTransactionPredicateBox, @@ -171,21 +172,21 @@ types!( CompoundPredicate, CompoundPredicate, CompoundPredicate, + CompoundPredicate, CompoundPredicate, CompoundPredicate, CompoundPredicate, CompoundPredicate, CompoundPredicate, CompoundPredicate, - CompoundPredicate, CompoundPredicate, CompoundPredicate, ConfigurationEvent, ConfigurationEventFilter, ConfigurationEventSet, ConstString, - ConstVec, ConstVec, + ConstVec, CustomInstruction, CustomParameter, CustomParameterId, @@ -217,37 +218,37 @@ types!( ExecutorUpgrade, FetchSize, FindAccountMetadata, - FindAccountsWithAsset, FindAccounts, + FindAccountsWithAsset, FindActiveTriggerIds, + FindAssetDefinitionMetadata, + FindAssetMetadata, + FindAssetQuantityById, FindAssets, FindAssetsDefinitions, FindBlockHeaders, FindBlocks, + FindDomainMetadata, FindDomains, + FindError, + FindExecutorDataModel, FindParameters, FindPeers, + FindPermissionsByAccountId, FindRoleIds, FindRoles, - FindTransactions, - FindTriggers, - FindAssetDefinitionMetadata, - FindAssetMetadata, - FindAssetQuantityById, - FindDomainMetadata, - FindError, - FindExecutorDataModel, - FindPermissionsByAccountId, FindRolesByAccountId, + FindTransactions, FindTriggerMetadata, + FindTriggers, ForwardCursor, - GrantBox, Grant, Grant, Grant, + GrantBox, Hash, - HashOf>, HashOf, + HashOf>, HashOf, IdBox, InstructionBox, @@ -259,26 +260,6 @@ types!( IpfsPath, Ipv4Addr, Ipv6Addr, - QueryBox, - QueryOutput, - QueryOutputBatchBox, - QueryParams, - QueryWithFilter, - QueryWithFilter, - QueryWithFilter, - QueryWithFilter, - QueryWithFilter, - QueryWithFilter, - QueryWithFilter, - QueryWithFilter, - QueryWithFilter, - QueryWithFilter, - QueryWithFilter, - QueryWithFilter, - QueryWithFilter, - QueryWithFilter, - QueryWithFilter, - QueryWithParams, JsonValue, Level, Log, @@ -291,11 +272,11 @@ types!( MetadataChanged, MetadataChanged, MetadataPredicateBox, - MintabilityError, - Mintable, - MintBox, Mint, Mint, + MintBox, + MintabilityError, + Mintable, Mismatch, Name, NewAccount, @@ -322,7 +303,7 @@ types!( Option, Option, Option, - Option>, + Option, Option, Option, Option, @@ -344,31 +325,51 @@ types!( PipelineEventFilterBox, PublicKey, PublicKeyPredicateBox, + QueryBox, QueryExecutionFail, + QueryOutput, + QueryOutputBatchBox, + QueryParams, QueryRequest, QueryRequestWithAuthority, QueryResponse, QuerySignature, + QueryWithFilter, + QueryWithFilter, + QueryWithFilter, + QueryWithFilter, + QueryWithFilter, + QueryWithFilter, + QueryWithFilter, + QueryWithFilter, + QueryWithFilter, + QueryWithFilter, + QueryWithFilter, + QueryWithFilter, + QueryWithFilter, + QueryWithFilter, + QueryWithFilter, + QueryWithParams, Register, Register, Register, - RegisterBox, Register, Register, Register, Register, + RegisterBox, RemoveKeyValue, RemoveKeyValue, RemoveKeyValue, - RemoveKeyValueBox, RemoveKeyValue, RemoveKeyValue, + RemoveKeyValueBox, Repeats, RepetitionError, - RevokeBox, Revoke, Revoke, Revoke, + RevokeBox, Role, RoleEvent, RoleEventFilter, @@ -380,9 +381,9 @@ types!( SetKeyValue, SetKeyValue, SetKeyValue, - SetKeyValueBox, SetKeyValue, SetKeyValue, + SetKeyValueBox, SetParameter, Signature, SignatureOf, @@ -421,8 +422,6 @@ types!( TransactionParameter, TransactionParameters, TransactionPayload, - TransactionQueryOutput, - TransactionQueryOutputPredicateBox, TransactionRejectionReason, TransactionSignature, TransactionStatus, @@ -441,17 +440,17 @@ types!( TriggerEventSet, TriggerId, TriggerIdPredicateBox, - TriggerPredicateBox, TriggerNumberOfExecutionsChanged, + TriggerPredicateBox, TypeError, Unregister, Unregister, Unregister, - UnregisterBox, Unregister, Unregister, Unregister, Unregister, + UnregisterBox, Upgrade, ValidationFail, Vec, @@ -464,13 +463,13 @@ types!( Vec>, Vec>, Vec>, + Vec>, Vec>, Vec>, Vec>, Vec>, Vec>, Vec>, - Vec>, Vec>, Vec>, Vec, @@ -483,7 +482,7 @@ types!( Vec, Vec, Vec, - Vec, + Vec, Vec, Vec, Vec, @@ -537,9 +536,9 @@ pub mod complete_data_model { error::{FindError, QueryExecutionFail}, parameters::{ForwardCursor, QueryParams}, predicate::CompoundPredicate, - QueryOutput, QueryOutputBatchBox, QueryRequestWithAuthority, QueryResponse, - QuerySignature, QueryWithFilter, QueryWithParams, SignedQuery, SignedQueryV1, - SingularQueryOutputBox, + CommittedTransaction, QueryOutput, QueryOutputBatchBox, QueryRequestWithAuthority, + QueryResponse, QuerySignature, QueryWithFilter, QueryWithParams, SignedQuery, + SignedQueryV1, SingularQueryOutputBox, }, transaction::{ error::TransactionLimitError, SignedTransactionV1, TransactionPayload, diff --git a/crates/iroha_telemetry_derive/src/lib.rs b/crates/iroha_telemetry_derive/src/lib.rs index d5febea0894..b2c63305ac2 100644 --- a/crates/iroha_telemetry_derive/src/lib.rs +++ b/crates/iroha_telemetry_derive/src/lib.rs @@ -82,14 +82,14 @@ impl Parse for MetricSpecs { } struct MetricSpec { + metric_name: LitStr, #[cfg(feature = "metric-instrumentation")] timing: bool, - metric_name: LitStr, } impl Parse for MetricSpec { fn parse(input: syn::parse::ParseStream) -> syn::Result { - let _timing = ::parse(input).is_ok(); + let timing = ::parse(input).is_ok(); let metric_name_lit = syn::Lit::parse(input)?; let metric_name = match metric_name_lit { @@ -112,7 +112,7 @@ impl Parse for MetricSpec { Ok(Self { metric_name, #[cfg(feature = "metric-instrumentation")] - timing: _timing, + timing, }) } } @@ -191,7 +191,7 @@ pub fn metrics(attr: TokenStream, item: TokenStream) -> TokenStream { fn impl_metrics( emitter: &mut Emitter, - #[cfg_attr(not(feature = "metric-instrumentation"), allow(unused))] specs: MetricSpecs, + #[cfg_attr(not(feature = "metric-instrumentation"), expect(unused))] specs: MetricSpecs, func: &syn::ItemFn, ) -> TokenStream { let syn::ItemFn { diff --git a/docs/source/references/schema.json b/docs/source/references/schema.json index a1f460792dd..4fc396d7e1f 100644 --- a/docs/source/references/schema.json +++ b/docs/source/references/schema.json @@ -720,7 +720,7 @@ }, { "name": "transactions", - "type": "Vec" + "type": "Vec" } ] }, @@ -985,6 +985,10 @@ "ChainId": "String", "CommittedTransaction": { "Struct": [ + { + "name": "block_hash", + "type": "HashOf" + }, { "name": "value", "type": "SignedTransaction" @@ -998,13 +1002,18 @@ "CommittedTransactionPredicateBox": { "Enum": [ { - "tag": "Value", + "tag": "BlockHash", "discriminant": 0, + "type": "BlockHashPredicateBox" + }, + { + "tag": "Value", + "discriminant": 1, "type": "SignedTransactionPredicateBox" }, { "tag": "Error", - "discriminant": 1, + "discriminant": 2, "type": "TransactionErrorPredicateBox" } ] @@ -1111,6 +1120,30 @@ } ] }, + "CompoundPredicate": { + "Enum": [ + { + "tag": "Atom", + "discriminant": 0, + "type": "CommittedTransactionPredicateBox" + }, + { + "tag": "Not", + "discriminant": 1, + "type": "CompoundPredicate" + }, + { + "tag": "And", + "discriminant": 2, + "type": "Vec>" + }, + { + "tag": "Or", + "discriminant": 3, + "type": "Vec>" + } + ] + }, "CompoundPredicate": { "Enum": [ { @@ -1255,30 +1288,6 @@ } ] }, - "CompoundPredicate": { - "Enum": [ - { - "tag": "Atom", - "discriminant": 0, - "type": "TransactionQueryOutputPredicateBox" - }, - { - "tag": "Not", - "discriminant": 1, - "type": "CompoundPredicate" - }, - { - "tag": "And", - "discriminant": 2, - "type": "Vec>" - }, - { - "tag": "Or", - "discriminant": 3, - "type": "Vec>" - } - ] - }, "CompoundPredicate": { "Enum": [ { @@ -3003,7 +3012,7 @@ { "tag": "FindTransactions", "discriminant": 12, - "type": "QueryWithFilter" + "type": "QueryWithFilter" }, { "tag": "FindBlocks", @@ -3111,7 +3120,7 @@ { "tag": "Transaction", "discriminant": 7, - "type": "Vec" + "type": "Vec" }, { "tag": "Peer", @@ -3363,7 +3372,7 @@ } ] }, - "QueryWithFilter": { + "QueryWithFilter": { "Struct": [ { "name": "query", @@ -3371,7 +3380,7 @@ }, { "name": "predicate", - "type": "CompoundPredicate" + "type": "CompoundPredicate" } ] }, @@ -3961,6 +3970,10 @@ { "name": "payload", "type": "BlockPayload" + }, + { + "name": "errors", + "type": "SortedMap" } ] }, @@ -4094,7 +4107,7 @@ { "tag": "Transaction", "discriminant": 5, - "type": "TransactionQueryOutput" + "type": "CommittedTransaction" }, { "tag": "BlockHeader", @@ -4196,6 +4209,12 @@ "value": "JsonValue" } }, + "SortedMap": { + "Map": { + "key": "u64", + "value": "TransactionRejectionReason" + } + }, "SortedVec": { "Vec": "Permission" }, @@ -4406,32 +4425,6 @@ } ] }, - "TransactionQueryOutput": { - "Struct": [ - { - "name": "block_hash", - "type": "HashOf" - }, - { - "name": "transaction", - "type": "CommittedTransaction" - } - ] - }, - "TransactionQueryOutputPredicateBox": { - "Enum": [ - { - "tag": "Transaction", - "discriminant": 0, - "type": "CommittedTransactionPredicateBox" - }, - { - "tag": "BlockHash", - "discriminant": 1, - "type": "BlockHashPredicateBox" - } - ] - }, "TransactionRejectionReason": { "Enum": [ { @@ -4921,6 +4914,9 @@ "Vec>": { "Vec": "CompoundPredicate" }, + "Vec>": { + "Vec": "CompoundPredicate" + }, "Vec>": { "Vec": "CompoundPredicate" }, @@ -4939,9 +4935,6 @@ "Vec>": { "Vec": "CompoundPredicate" }, - "Vec>": { - "Vec": "CompoundPredicate" - }, "Vec>": { "Vec": "CompoundPredicate" }, @@ -4978,8 +4971,8 @@ "Vec": { "Vec": "SignedBlock" }, - "Vec": { - "Vec": "TransactionQueryOutput" + "Vec": { + "Vec": "SignedTransaction" }, "Vec": { "Vec": "Trigger"