diff --git a/Cargo.toml b/Cargo.toml index 429f77b..55d454d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" [dependencies] base16 = { version = "0.2.1", default-features = false, features = ["alloc"] } blake2b_simd = { version = "1.0.2", default-features = false } -casper-types = "4.0.1" +casper-types = { git = "https://github.com/casper-network/casper-node.git", branch = "release-2.0.0-rc4" } ed25519-dalek = { version = "2.0.0", default-features = false, features = [ "alloc", "zeroize", @@ -17,6 +17,7 @@ k256 = { version = "0.13.1", default-features = false, features = [ "sha256", ] } serde = { version = "1.0.195", default-features = false, features = ["derive"] } +serde-map-to-array = "1" time = { version = "0.3.31", default-features = false, features = [ "serde", "formatting", @@ -25,10 +26,11 @@ time = { version = "0.3.31", default-features = false, features = [ [dev-dependencies] bincode = "1.3.3" -casper-hashing = "3.0.0" -casper-execution-engine = "7.0.1" -casper-types = { version = "4.0.1", features = ["gens"] } -casper-node = "1.5.6" +# casper-hashing = "3.0.0" +casper-execution-engine = { git = "https://github.com/casper-network/casper-node.git", branch = "release-2.0.0-rc4" } +casper-types = { git = "https://github.com/casper-network/casper-node.git", branch = "release-2.0.0-rc4", features = ["gens"] } +casper-node = { git = "https://github.com/casper-network/casper-node.git", branch = "release-2.0.0-rc4" } +casper-storage = { git = "https://github.com/casper-network/casper-node.git", branch = "release-2.0.0-rc4" } hex = "0.4.3" once_cell = "1.19.0" serde_json = "1.0.111" diff --git a/rust-toolchain.toml b/rust-toolchain.toml index c1f5c7b..73cb934 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.75.0" +channel = "stable" components = ["rustfmt", "clippy"] diff --git a/src/block.rs b/src/block.rs index 6bf8176..6b9fdec 100644 --- a/src/block.rs +++ b/src/block.rs @@ -6,18 +6,17 @@ use casper_types::{crypto::gens::public_key_arb, SecretKey}; use proptest::prelude::*; use casper_types::{ - bytesrepr::{FromBytes, ToBytes}, - EraId, PublicKey, Signature, + bytesrepr::{self, FromBytes, ToBytes}, + EraId, PublicKey, RewardedSignatures, Signature, TransactionHash, }; use super::{ - block_header::{BlockHash, BlockHeader}, + block_header::{BlockHash, BlockHeaderV1}, crypto::{verify, SignatureVerificationError}, hash::Digest, }; -#[derive(Clone, Debug, PartialOrd, Ord, Eq, PartialEq)] -#[cfg_attr(test, derive(serde::Serialize, serde::Deserialize))] +#[derive(Clone, Debug, PartialOrd, Ord, Eq, PartialEq, serde::Serialize, serde::Deserialize)] // See: https://github.com/casper-network/casper-node/blob/edc4b45ea05526ba6dd7971da09e27754a37a230/node/src/types/block.rs#L1324-L1332 pub struct BlockSignatures { block_hash: BlockHash, @@ -102,7 +101,7 @@ impl Arbitrary for BlockSignatures { #[derive(Debug, Clone, PartialEq, Eq)] // See: https://github.com/casper-network/casper-node/blob/edc4b45ea05526ba6dd7971da09e27754a37a230/node/src/types/block.rs#L1184-L1188 pub struct BlockHeaderWithSignatures { - block_header: BlockHeader, + block_header: BlockHeaderV1, block_signatures: BlockSignatures, } @@ -120,12 +119,12 @@ pub enum BlockHeaderWithSignaturesConstructionError { impl BlockHeaderWithSignatures { pub fn new( - block_header: BlockHeader, + block_header: BlockHeaderV1, block_signatures: BlockSignatures, ) -> Result { if block_header.era_id() != block_signatures.era_id() { return Err(BlockHeaderWithSignaturesConstructionError::InvalidEraId { - header_era_id: block_header.era_id().clone(), + header_era_id: block_header.era_id(), signatures_era_id: block_signatures.era_id(), }); } @@ -144,7 +143,7 @@ impl BlockHeaderWithSignatures { }) } - pub fn block_header(&self) -> &BlockHeader { + pub fn block_header(&self) -> &BlockHeaderV1 { &self.block_header } @@ -159,10 +158,10 @@ impl Arbitrary for BlockHeaderWithSignatures { type Strategy = BoxedStrategy; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { - (any::(), any::()) + (any::(), any::()) .prop_map(|(block_header, mut block_signatures)| { block_signatures.block_hash = block_header.block_hash(); - block_signatures.era_id = block_header.era_id().clone(); + block_signatures.era_id = block_header.era_id(); BlockHeaderWithSignatures { block_header, block_signatures, @@ -177,15 +176,15 @@ impl Arbitrary for BlockHeaderWithSignatures { )] #[cfg_attr(test, derive(proptest_derive::Arbitrary))] // See: https://github.com/casper-network/casper-node/blob/edc4b45ea05526ba6dd7971da09e27754a37a230/node/src/types/deploy/deploy_hash.rs#L32 -pub struct DeployHash(Digest); +pub struct DeployHash(pub(crate) Digest); // See: https://github.com/casper-network/casper-node/blob/edc4b45ea05526ba6dd7971da09e27754a37a230/node/src/types/deploy/deploy_hash.rs#L89-L101 impl ToBytes for DeployHash { - fn write_bytes(&self, writer: &mut Vec) -> Result<(), casper_types::bytesrepr::Error> { + fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { self.0.write_bytes(writer) } - fn to_bytes(&self) -> Result, casper_types::bytesrepr::Error> { + fn to_bytes(&self) -> Result, bytesrepr::Error> { self.0.to_bytes() } @@ -196,26 +195,120 @@ impl ToBytes for DeployHash { // See: https://github.com/casper-network/casper-node/blob/edc4b45ea05526ba6dd7971da09e27754a37a230/node/src/types/deploy/deploy_hash.rs#L103-L107 impl FromBytes for DeployHash { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), casper_types::bytesrepr::Error> { + fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { Digest::from_bytes(bytes).map(|(inner, remainder)| (DeployHash(inner), remainder)) } } #[derive(Clone, PartialEq, Eq, Debug, PartialOrd, Ord, serde::Serialize, serde::Deserialize)] +#[allow(clippy::large_enum_variant)] +pub enum BlockBody { + /// The legacy, initial version of the body portion of a block. + #[serde(rename = "Version1")] + V1(BlockBodyV1), + /// The version 2 of the body portion of a block, which includes the + /// `past_finality_signatures`. + #[serde(rename = "Version2")] + V2(BlockBodyV2), +} + +/// Tag for block body v1. +pub const BLOCK_BODY_V1_TAG: u8 = 0; +/// Tag for block body v2. +pub const BLOCK_BODY_V2_TAG: u8 = 1; + +impl BlockBody { + pub fn hash(&self) -> Digest { + match self { + BlockBody::V1(v1) => v1.hash(), + BlockBody::V2(v2) => v2.hash(), + } + } +} + +impl From for BlockBody { + fn from(block_body: BlockBodyV1) -> Self { + BlockBody::V1(block_body) + } +} + +impl From for BlockBody { + fn from(block_body: BlockBodyV2) -> Self { + BlockBody::V2(block_body) + } +} + +impl ToBytes for BlockBody { + fn to_bytes(&self) -> Result, bytesrepr::Error> { + let mut buffer = bytesrepr::allocate_buffer(self)?; + match self { + BlockBody::V1(v1) => { + buffer.insert(0, BLOCK_BODY_V1_TAG); + buffer.extend(v1.to_bytes()?); + } + BlockBody::V2(v2) => { + buffer.insert(0, BLOCK_BODY_V2_TAG); + buffer.extend(v2.to_bytes()?); + } + } + Ok(buffer) + } + + fn serialized_length(&self) -> usize { + 1 + match self { + BlockBody::V1(v1) => v1.serialized_length(), + BlockBody::V2(v2) => v2.serialized_length(), + } + } +} + +impl FromBytes for BlockBody { + fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { + let (tag, remainder) = u8::from_bytes(bytes)?; + match tag { + BLOCK_BODY_V1_TAG => { + let (body, remainder): (BlockBodyV1, _) = FromBytes::from_bytes(remainder)?; + Ok((Self::V1(body), remainder)) + } + BLOCK_BODY_V2_TAG => { + let (body, remainder): (BlockBodyV2, _) = FromBytes::from_bytes(remainder)?; + Ok((Self::V2(body), remainder)) + } + _ => Err(bytesrepr::Error::Formatting), + } + } +} + +#[cfg(test)] +impl Arbitrary for BlockBody { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + prop_oneof![ + any::().prop_map(BlockBody::V1), + any::().prop_map(BlockBody::V2), + ] + .boxed() + } +} + +#[derive(Clone, PartialEq, Eq, Debug, PartialOrd, Ord, serde::Serialize, serde::Deserialize)] +// See: https://github.com/casper-network/casper-node/blob/edc4b45ea05526ba6dd7971da09e27754a37a230/node/src/types/block.rs#L1204C14-L1204C15 // See: https://github.com/casper-network/casper-node/blob/edc4b45ea05526ba6dd7971da09e27754a37a230/node/src/types/block.rs#L1204C14-L1204C15 -pub struct BlockBody { +pub struct BlockBodyV1 { proposer: PublicKey, deploy_hashes: Vec, transfer_hashes: Vec, } -impl BlockBody { +impl BlockBodyV1 { pub fn new( proposer: PublicKey, deploy_hashes: Vec, transfer_hashes: Vec, ) -> Self { - BlockBody { + BlockBodyV1 { proposer, deploy_hashes, transfer_hashes, @@ -240,7 +333,7 @@ impl BlockBody { } #[cfg(test)] -impl Arbitrary for BlockBody { +impl Arbitrary for BlockBodyV1 { type Parameters = (); type Strategy = BoxedStrategy; @@ -250,7 +343,7 @@ impl Arbitrary for BlockBody { prop::collection::vec(any::(), 0..5), prop::collection::vec(any::(), 0..5), ) - .prop_map(|(proposer, deploy_hashes, transfer_hashes)| BlockBody { + .prop_map(|(proposer, deploy_hashes, transfer_hashes)| BlockBodyV1 { proposer, deploy_hashes, transfer_hashes, @@ -260,9 +353,9 @@ impl Arbitrary for BlockBody { } // See: https://github.com/casper-network/casper-node/blob/edc4b45ea05526ba6dd7971da09e27754a37a230/node/src/types/block.rs#L1292-L1306 -impl ToBytes for BlockBody { - fn to_bytes(&self) -> Result, casper_types::bytesrepr::Error> { - let mut buffer = casper_types::bytesrepr::allocate_buffer(self)?; +impl ToBytes for BlockBodyV1 { + fn to_bytes(&self) -> Result, bytesrepr::Error> { + let mut buffer = bytesrepr::allocate_buffer(self)?; buffer.extend(self.proposer.to_bytes()?); buffer.extend(self.deploy_hashes.to_bytes()?); buffer.extend(self.transfer_hashes.to_bytes()?); @@ -277,12 +370,12 @@ impl ToBytes for BlockBody { } // See: https://github.com/casper-network/casper-node/blob/edc4b45ea05526ba6dd7971da09e27754a37a230/node/src/types/block.rs#L1308-L1321 -impl FromBytes for BlockBody { - fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), casper_types::bytesrepr::Error> { +impl FromBytes for BlockBodyV1 { + fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { let (proposer, bytes) = PublicKey::from_bytes(bytes)?; let (deploy_hashes, bytes) = Vec::::from_bytes(bytes)?; let (transfer_hashes, bytes) = Vec::::from_bytes(bytes)?; - let body = BlockBody { + let body = BlockBodyV1 { proposer, deploy_hashes, transfer_hashes, @@ -291,12 +384,141 @@ impl FromBytes for BlockBody { } } +/// The body portion of a block. Version 2. +#[derive(Clone, PartialEq, Eq, Debug, PartialOrd, Ord, serde::Serialize, serde::Deserialize)] +pub struct BlockBodyV2 { + /// Map of transactions mapping categories to a list of transaction hashes. + transactions: BTreeMap>, + /// List of identifiers for finality signatures for a particular past block. + rewarded_signatures: RewardedSignatures, +} + +impl BlockBodyV2 { + pub fn new( + transactions: BTreeMap>, + rewarded_signatures: RewardedSignatures, + ) -> Self { + BlockBodyV2 { + transactions, + rewarded_signatures, + } + } + + pub fn transactions(&self) -> &BTreeMap> { + &self.transactions + } + + pub fn rewarded_signatures(&self) -> &RewardedSignatures { + &self.rewarded_signatures + } + + pub fn hash(&self) -> Digest { + Digest::hash(&self.to_bytes().unwrap()) + } +} + +impl ToBytes for BlockBodyV2 { + fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { + self.transactions.write_bytes(writer)?; + self.rewarded_signatures.write_bytes(writer)?; + Ok(()) + } + + fn to_bytes(&self) -> Result, bytesrepr::Error> { + let mut buffer = bytesrepr::allocate_buffer(self)?; + self.write_bytes(&mut buffer)?; + Ok(buffer) + } + + fn serialized_length(&self) -> usize { + self.transactions.serialized_length() + self.rewarded_signatures.serialized_length() + } +} + +impl FromBytes for BlockBodyV2 { + fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { + let (transactions, bytes) = FromBytes::from_bytes(bytes)?; + let (rewarded_signatures, bytes) = RewardedSignatures::from_bytes(bytes)?; + let body = BlockBodyV2 { + transactions, + rewarded_signatures, + }; + Ok((body, bytes)) + } +} + +#[cfg(test)] +impl Arbitrary for BlockBodyV2 { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + fn transaction_category_arb() -> impl Strategy { + use casper_types::TransactionCategory; + + prop_oneof![ + Just(TransactionCategory::Mint as u8), + Just(TransactionCategory::Auction as u8), + Just(TransactionCategory::InstallUpgrade as u8), + Just(TransactionCategory::Large as u8), + Just(TransactionCategory::Medium as u8), + Just(TransactionCategory::Small as u8), + ] + } + + ( + prop::collection::btree_map( + transaction_category_arb(), + prop::collection::vec( + prop_oneof!( + any::() + .prop_map(|hash| TransactionHash::from_raw(hash.0.into())), + any::<[u8; crate::hash::DIGEST_LENGTH]>() + .prop_map(TransactionHash::from_raw), + ), + 0..5, + ), + 0..5, + ), + // validator set + prop::collection::btree_set(public_key_arb(), 0..10), + // indices of validators who signed + prop::collection::vec(any::(), 0..10), + ) + .prop_map(|(transactions, validator_set, signer_indices)| { + let validator_set: Vec<_> = validator_set.into_iter().collect(); + + // prop::Index.get panics if the collection is empty + use alloc::collections::BTreeSet; + let signing_validators: BTreeSet<_> = if validator_set.is_empty() { + BTreeSet::new() + } else { + signer_indices + .into_iter() + .map(|index| index.get(&validator_set)) + .cloned() + .collect() + }; + + let rewarded_signatures = RewardedSignatures::new([ + casper_types::SingleBlockRewardedSignatures::from_validator_set( + &signing_validators, + &validator_set, + ), + ]); + + BlockBodyV2::new(transactions, rewarded_signatures) + }) + .boxed() + } +} + // Data structure reflecting the JSON representation of a block's body. // See: https://github.com/casper-network/casper-node/blob/8ca9001dabba0dae95f92ad8c54eddd163200b5d/node/src/types/block.rs#L2268-L2277 #[derive(Clone, PartialEq, Eq, Debug)] pub struct Block { block_header_with_signatures: BlockHeaderWithSignatures, - body: BlockBody, + body: BlockBodyV1, } #[derive(Debug)] @@ -310,7 +532,7 @@ pub enum BlockConstructionError { impl Block { pub fn new( block_header_with_signatures: BlockHeaderWithSignatures, - body: BlockBody, + body: BlockBodyV1, ) -> Result { let header_block_hash = block_header_with_signatures.block_header().body_hash(); let body_hash = body.hash(); @@ -330,7 +552,7 @@ impl Block { &self.block_header_with_signatures } - pub fn body(&self) -> &BlockBody { + pub fn body(&self) -> &BlockBodyV1 { &self.body } } @@ -341,7 +563,7 @@ impl Arbitrary for Block { type Strategy = BoxedStrategy; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { - (any::(), any::()) + (any::(), any::()) .prop_map(|(header, body)| Block { block_header_with_signatures: header, body, @@ -361,12 +583,12 @@ mod test { use crate::{block_header::BlockHash, crypto::sign, hash::DIGEST_LENGTH}; - use super::{BlockBody, BlockSignatures, DeployHash}; + use super::{BlockBody, BlockBodyV1, BlockBodyV2, BlockSignatures, DeployHash}; #[proptest] fn serde_json_block_signatures_round_trip(block_signatures: BlockSignatures) { let serialized_block_signatures = serde_json::to_string(&block_signatures).unwrap(); - let casper_types_block_signatures: casper_node::types::BlockSignatures = + let casper_types_block_signatures: casper_types::BlockSignaturesV1 = serde_json::from_str(&serialized_block_signatures).unwrap(); let serialized_casper_types_block_signatures = serde_json::to_string(&casper_types_block_signatures).unwrap(); @@ -382,7 +604,7 @@ mod test { #[proptest] fn bincode_block_signatures_round_trip(block_signatures: BlockSignatures) { let serialized_block_signatures = bincode::serialize(&block_signatures).unwrap(); - let casper_types_block_signatures: casper_node::types::BlockSignatures = + let casper_types_block_signatures: casper_types::BlockSignaturesV1 = bincode::deserialize(&serialized_block_signatures).unwrap(); let serialized_casper_types_block_signatures = bincode::serialize(&casper_types_block_signatures).unwrap(); @@ -437,7 +659,7 @@ mod test { #[proptest] fn serde_json_deploy_hash_round_trip_casper_node(deploy_hash: DeployHash) { let serialized_deploy_hash = serde_json::to_string(&deploy_hash).unwrap(); - let casper_node_deploy_hash: casper_node::types::DeployHash = + let casper_node_deploy_hash: casper_types::DeployHash = serde_json::from_str(&serialized_deploy_hash).unwrap(); let serialized_casper_node_deploy_hash = serde_json::to_string(&casper_node_deploy_hash).unwrap(); @@ -453,7 +675,7 @@ mod test { #[proptest] fn bincode_deploy_hash_round_trip_casper_node(deploy_hash: DeployHash) { let serialized_deploy_hash = bincode::serialize(&deploy_hash).unwrap(); - let casper_node_deploy_hash: casper_node::types::DeployHash = + let casper_node_deploy_hash: casper_types::DeployHash = bincode::deserialize(&serialized_deploy_hash).unwrap(); let serialized_casper_node_deploy_hash = bincode::serialize(&casper_node_deploy_hash).unwrap(); @@ -469,7 +691,7 @@ mod test { let casper_types_deploy_hash: casper_types::DeployHash = casper_types::bytesrepr::deserialize(serialized_deploy_hash.clone()).unwrap(); let serialized_casper_types_deploy_hash = - casper_types::bytesrepr::serialize(&casper_types_deploy_hash).unwrap(); + casper_types::bytesrepr::serialize(casper_types_deploy_hash).unwrap(); assert_eq!(serialized_deploy_hash, serialized_casper_types_deploy_hash); let deserialized_deploy_hash: DeployHash = casper_types::bytesrepr::deserialize(serialized_casper_types_deploy_hash.clone()) @@ -480,10 +702,10 @@ mod test { #[proptest] fn bytesrepr_deploy_hash_round_trip_casper_node(deploy_hash: DeployHash) { let serialized_deploy_hash = deploy_hash.to_bytes().unwrap(); - let casper_node_deploy_hash: casper_node::types::DeployHash = + let casper_node_deploy_hash: casper_types::DeployHash = casper_types::bytesrepr::deserialize(serialized_deploy_hash.clone()).unwrap(); let serialized_casper_node_deploy_hash = - casper_types::bytesrepr::serialize(&casper_node_deploy_hash).unwrap(); + casper_types::bytesrepr::serialize(casper_node_deploy_hash).unwrap(); assert_eq!(serialized_deploy_hash, serialized_casper_node_deploy_hash); let deserialized_deploy_hash: DeployHash = casper_types::bytesrepr::deserialize(serialized_casper_node_deploy_hash.clone()) @@ -491,10 +713,34 @@ mod test { assert_eq!(deploy_hash, deserialized_deploy_hash); } + #[proptest] + fn serde_json_block_body_v1_round_trip(block_body: BlockBodyV1) { + let serialized_block_body = serde_json::to_string(&block_body).unwrap(); + let casper_node_block_body: casper_types::BlockBodyV1 = + serde_json::from_str(&serialized_block_body).unwrap(); + let serialized_node_block_body = serde_json::to_string(&casper_node_block_body).unwrap(); + assert_eq!(serialized_block_body, serialized_node_block_body); + let deserialized_block_body: BlockBodyV1 = + serde_json::from_str(&serialized_node_block_body).unwrap(); + assert_eq!(block_body, deserialized_block_body); + } + + #[proptest] + fn serde_json_block_body_v2_round_trip(block_body: BlockBodyV2) { + let serialized_block_body = serde_json::to_string(&block_body).unwrap(); + let casper_node_block_body: casper_types::BlockBodyV2 = + serde_json::from_str(&serialized_block_body).unwrap(); + let serialized_node_block_body = serde_json::to_string(&casper_node_block_body).unwrap(); + assert_eq!(serialized_block_body, serialized_node_block_body); + let deserialized_block_body: BlockBodyV2 = + serde_json::from_str(&serialized_node_block_body).unwrap(); + assert_eq!(block_body, deserialized_block_body); + } + #[proptest] fn serde_json_block_body_round_trip(block_body: BlockBody) { let serialized_block_body = serde_json::to_string(&block_body).unwrap(); - let casper_node_block_body: casper_node::types::BlockBody = + let casper_node_block_body: casper_types::BlockBody = serde_json::from_str(&serialized_block_body).unwrap(); let serialized_node_block_body = serde_json::to_string(&casper_node_block_body).unwrap(); assert_eq!(serialized_block_body, serialized_node_block_body); @@ -503,10 +749,36 @@ mod test { assert_eq!(block_body, deserialized_block_body); } + #[proptest] + fn bincode_block_body_v1_round_trip(block_body: BlockBodyV1) { + let serialized_block_body = bincode::serialize(&block_body).unwrap(); + let casper_node_block_body: casper_types::BlockBodyV1 = + bincode::deserialize(&serialized_block_body).unwrap(); + let serialized_casper_node_block_body = + bincode::serialize(&casper_node_block_body).unwrap(); + assert_eq!(serialized_block_body, serialized_casper_node_block_body); + let deserialized_block_body: BlockBodyV1 = + bincode::deserialize(&serialized_casper_node_block_body).unwrap(); + assert_eq!(block_body, deserialized_block_body); + } + + #[proptest] + fn bincode_block_body_v2_round_trip(block_body: BlockBodyV2) { + let serialized_block_body = bincode::serialize(&block_body).unwrap(); + let casper_node_block_body: casper_types::BlockBodyV2 = + bincode::deserialize(&serialized_block_body).unwrap(); + let serialized_casper_node_block_body = + bincode::serialize(&casper_node_block_body).unwrap(); + assert_eq!(serialized_block_body, serialized_casper_node_block_body); + let deserialized_block_body: BlockBodyV2 = + bincode::deserialize(&serialized_casper_node_block_body).unwrap(); + assert_eq!(block_body, deserialized_block_body); + } + #[proptest] fn bincode_block_body_round_trip(block_body: BlockBody) { let serialized_block_body = bincode::serialize(&block_body).unwrap(); - let casper_node_block_body: casper_node::types::BlockBody = + let casper_node_block_body: casper_types::BlockBody = bincode::deserialize(&serialized_block_body).unwrap(); let serialized_casper_node_block_body = bincode::serialize(&casper_node_block_body).unwrap(); @@ -516,10 +788,38 @@ mod test { assert_eq!(block_body, deserialized_block_body); } + #[proptest] + fn bytesrepr_block_body_v1_round_trip(block_body: BlockBodyV1) { + let serialized_block_body = block_body.to_bytes().unwrap(); + let casper_node_block_body: casper_types::BlockBodyV1 = + casper_types::bytesrepr::deserialize(serialized_block_body.clone()).unwrap(); + let serialized_casper_node_block_body = + casper_types::bytesrepr::serialize(&casper_node_block_body).unwrap(); + assert_eq!(serialized_block_body, serialized_casper_node_block_body); + let deserialized_block_body: BlockBodyV1 = + casper_types::bytesrepr::deserialize(serialized_casper_node_block_body.clone()) + .unwrap(); + assert_eq!(block_body, deserialized_block_body); + } + + #[proptest] + fn bytesrepr_block_body_v2_round_trip(block_body: BlockBodyV2) { + let serialized_block_body = block_body.to_bytes().unwrap(); + let casper_node_block_body: casper_types::BlockBodyV2 = + casper_types::bytesrepr::deserialize(serialized_block_body.clone()).unwrap(); + let serialized_casper_node_block_body = + casper_types::bytesrepr::serialize(&casper_node_block_body).unwrap(); + assert_eq!(serialized_block_body, serialized_casper_node_block_body); + let deserialized_block_body: BlockBodyV2 = + casper_types::bytesrepr::deserialize(serialized_casper_node_block_body.clone()) + .unwrap(); + assert_eq!(block_body, deserialized_block_body); + } + #[proptest] fn bytesrepr_block_body_round_trip(block_body: BlockBody) { let serialized_block_body = block_body.to_bytes().unwrap(); - let casper_node_block_body: casper_node::types::BlockBody = + let casper_node_block_body: casper_types::BlockBody = casper_types::bytesrepr::deserialize(serialized_block_body.clone()).unwrap(); let serialized_casper_node_block_body = casper_types::bytesrepr::serialize(&casper_node_block_body).unwrap(); @@ -534,9 +834,12 @@ mod test { fn block_body_hash_agree(block_body: BlockBody) { let block_body_hash = block_body.hash(); let serialized_block_body = block_body.to_bytes().unwrap(); - let casper_node_block_body: casper_node::types::BlockBody = + let casper_node_block_body: casper_types::BlockBody = casper_types::bytesrepr::deserialize(serialized_block_body).unwrap(); - let casper_node_block_body_hash = casper_node_block_body.hash(); + let casper_node_block_body_hash = match casper_node_block_body { + casper_types::BlockBody::V1(v1) => v1.hash(), + casper_types::BlockBody::V2(v2) => v2.hash(), + }; assert_eq!( block_body_hash.as_ref(), casper_node_block_body_hash.as_ref() diff --git a/src/block_header.rs b/src/block_header.rs index 7f83bf3..1ae7680 100644 --- a/src/block_header.rs +++ b/src/block_header.rs @@ -3,11 +3,13 @@ use alloc::{string::String, vec::Vec}; #[cfg(test)] use proptest::{arbitrary::Arbitrary, prelude::*}; -use casper_types::bytesrepr::{FromBytes, ToBytes}; -use casper_types::{EraId, ProtocolVersion}; +use casper_types::bytesrepr::{self, FromBytes, ToBytes}; +use casper_types::{EraId, ProtocolVersion, PublicKey}; use time::OffsetDateTime; -use super::consensus::EraEnd; +use crate::consensus::EraEndV2; + +use super::consensus::EraEndV1; use super::hash::Digest; use super::hash::DIGEST_LENGTH; @@ -131,36 +133,202 @@ impl FromBytes for Timestamp { } } +#[derive(Clone, Debug, Eq, PartialEq)] +#[cfg_attr(test, derive(serde::Serialize, serde::Deserialize))] +pub enum BlockHeader { + /// The legacy, initial version of the header portion of a block. + #[cfg_attr(test, serde(rename = "Version1"))] + V1(BlockHeaderV1), + /// The version 2 of the header portion of a block. + #[cfg_attr(test, serde(rename = "Version2"))] + V2(BlockHeaderV2), +} + +/// Tag for block header v1. +pub const BLOCK_HEADER_V1_TAG: u8 = 0; +/// Tag for block header v2. +pub const BLOCK_HEADER_V2_TAG: u8 = 1; + +impl BlockHeader { + /// Returns the hash of this block header. + pub fn block_hash(&self) -> BlockHash { + match self { + BlockHeader::V1(v1) => v1.block_hash(), + BlockHeader::V2(v2) => v2.block_hash(), + } + } + + /// Returns the parent block's hash. + pub fn parent_hash(&self) -> &BlockHash { + match self { + BlockHeader::V1(v1) => v1.parent_hash(), + BlockHeader::V2(v2) => v2.parent_hash(), + } + } + + /// Returns the root hash of global state after the deploys in this block have been executed. + pub fn state_root_hash(&self) -> &Digest { + match self { + BlockHeader::V1(v1) => v1.state_root_hash(), + BlockHeader::V2(v2) => v2.state_root_hash(), + } + } + + /// Returns the hash of the block's body. + pub fn body_hash(&self) -> &Digest { + match self { + BlockHeader::V1(v1) => v1.body_hash(), + BlockHeader::V2(v2) => v2.body_hash(), + } + } + + /// Returns a random bit needed for initializing a future era. + pub fn random_bit(&self) -> bool { + match self { + BlockHeader::V1(v1) => v1.random_bit(), + BlockHeader::V2(v2) => v2.random_bit(), + } + } + + /// Returns a seed needed for initializing a future era. + pub fn accumulated_seed(&self) -> &Digest { + match self { + BlockHeader::V1(v1) => v1.accumulated_seed(), + BlockHeader::V2(v2) => v2.accumulated_seed(), + } + } + + /// Returns the timestamp from when the block was proposed. + pub fn timestamp(&self) -> Timestamp { + match self { + BlockHeader::V1(v1) => v1.timestamp(), + BlockHeader::V2(v2) => v2.timestamp(), + } + } + + /// Returns the era ID in which this block was created. + pub fn era_id(&self) -> EraId { + match self { + BlockHeader::V1(v1) => v1.era_id(), + BlockHeader::V2(v2) => v2.era_id(), + } + } + + /// Returns the height of this block, i.e. the number of ancestors. + pub fn height(&self) -> u64 { + match self { + BlockHeader::V1(v1) => v1.height(), + BlockHeader::V2(v2) => v2.height(), + } + } + + /// Returns the protocol version of the network from when this block was created. + pub fn protocol_version(&self) -> ProtocolVersion { + match self { + BlockHeader::V1(v1) => v1.protocol_version(), + BlockHeader::V2(v2) => v2.protocol_version(), + } + } +} + +impl From for BlockHeader { + fn from(header: BlockHeaderV1) -> Self { + BlockHeader::V1(header) + } +} + +impl From for BlockHeader { + fn from(header: BlockHeaderV2) -> Self { + BlockHeader::V2(header) + } +} + +impl ToBytes for BlockHeader { + fn to_bytes(&self) -> Result, bytesrepr::Error> { + let mut buffer = bytesrepr::allocate_buffer(self)?; + match self { + BlockHeader::V1(v1) => { + buffer.insert(0, BLOCK_HEADER_V1_TAG); + buffer.extend(v1.to_bytes()?); + } + BlockHeader::V2(v2) => { + buffer.insert(0, BLOCK_HEADER_V2_TAG); + buffer.extend(v2.to_bytes()?); + } + } + Ok(buffer) + } + + fn serialized_length(&self) -> usize { + 1 + match self { + BlockHeader::V1(v1) => v1.serialized_length(), + BlockHeader::V2(v2) => v2.serialized_length(), + } + } +} + +impl FromBytes for BlockHeader { + fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { + let (tag, remainder) = u8::from_bytes(bytes)?; + match tag { + BLOCK_HEADER_V1_TAG => { + let (header, remainder): (BlockHeaderV1, _) = FromBytes::from_bytes(remainder)?; + Ok((Self::V1(header), remainder)) + } + BLOCK_HEADER_V2_TAG => { + let (header, remainder): (BlockHeaderV2, _) = FromBytes::from_bytes(remainder)?; + Ok((Self::V2(header), remainder)) + } + _ => Err(bytesrepr::Error::Formatting), + } + } +} + +#[cfg(test)] +impl Arbitrary for BlockHeader { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + prop_oneof![ + any::().prop_map(BlockHeader::V1), + any::().prop_map(BlockHeader::V2), + ] + .boxed() + } +} + #[derive(Clone, Eq, PartialEq, Debug)] #[cfg_attr(test, derive(serde::Serialize, serde::Deserialize))] // See: https://github.com/casper-network/casper-node/blob/8ca9001dabba0dae95f92ad8c54eddd163200b5d/node/src/types/block.rs#L813-L828 -pub struct BlockHeader { +pub struct BlockHeaderV1 { parent_hash: BlockHash, state_root_hash: Digest, body_hash: Digest, random_bit: bool, accumulated_seed: Digest, - era_end: Option, + era_end: Option, timestamp: Timestamp, era_id: EraId, height: u64, protocol_version: ProtocolVersion, } -impl BlockHeader { +impl BlockHeaderV1 { + #[allow(clippy::too_many_arguments)] pub fn new( parent_hash: BlockHash, state_root_hash: Digest, body_hash: Digest, random_bit: bool, accumulated_seed: Digest, - era_end: Option, + era_end: Option, timestamp: Timestamp, era_id: EraId, height: u64, protocol_version: ProtocolVersion, ) -> Self { - BlockHeader { + BlockHeaderV1 { parent_hash, state_root_hash, body_hash, @@ -194,7 +362,7 @@ impl BlockHeader { &self.accumulated_seed } - pub fn era_end(&self) -> Option<&EraEnd> { + pub fn era_end(&self) -> Option<&EraEndV1> { self.era_end.as_ref() } @@ -220,23 +388,24 @@ impl BlockHeader { } #[cfg(test)] -impl Arbitrary for BlockHeader { +fn arb_protocolversion() -> impl Strategy { + (0..=255u32, 0..=255u32, 0..=255u32) + .prop_map(|(major, minor, patch)| ProtocolVersion::from_parts(major, minor, patch)) +} + +#[cfg(test)] +impl Arbitrary for BlockHeaderV1 { type Parameters = (); type Strategy = BoxedStrategy; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { - fn arb_protocolversion() -> impl Strategy { - (0..=255u32, 0..=255u32, 0..=255u32) - .prop_map(|(major, minor, patch)| ProtocolVersion::from_parts(major, minor, patch)) - } - ( any::(), any::(), any::(), any::(), any::(), - any::>(), + any::>(), any::(), any::(), // EraId any::(), // height @@ -256,7 +425,7 @@ impl Arbitrary for BlockHeader { protocol_version, )| { let era_id = EraId::from(era_id); - BlockHeader { + BlockHeaderV1 { parent_hash, state_root_hash, body_hash, @@ -274,7 +443,7 @@ impl Arbitrary for BlockHeader { } } -impl ToBytes for BlockHeader { +impl ToBytes for BlockHeaderV1 { fn to_bytes(&self) -> Result, casper_types::bytesrepr::Error> { let mut buffer = casper_types::bytesrepr::allocate_buffer(self)?; buffer.extend(self.parent_hash.to_bytes()?); @@ -304,19 +473,19 @@ impl ToBytes for BlockHeader { } } -impl FromBytes for BlockHeader { +impl FromBytes for BlockHeaderV1 { fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), casper_types::bytesrepr::Error> { let (parent_hash, remainder) = BlockHash::from_bytes(bytes)?; let (state_root_hash, remainder) = Digest::from_bytes(remainder)?; let (body_hash, remainder) = Digest::from_bytes(remainder)?; let (random_bit, remainder) = bool::from_bytes(remainder)?; let (accumulated_seed, remainder) = Digest::from_bytes(remainder)?; - let (era_end, remainder) = Option::::from_bytes(remainder)?; + let (era_end, remainder) = Option::::from_bytes(remainder)?; let (timestamp, remainder) = Timestamp::from_bytes(remainder)?; let (era_id, remainder) = EraId::from_bytes(remainder)?; let (height, remainder) = u64::from_bytes(remainder)?; let (protocol_version, remainder) = ProtocolVersion::from_bytes(remainder)?; - let block_header = BlockHeader { + let block_header = BlockHeaderV1 { parent_hash, state_root_hash, body_hash, @@ -332,6 +501,264 @@ impl FromBytes for BlockHeader { } } +/// The header portion of a block. +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr(test, derive(serde::Serialize, serde::Deserialize))] +pub struct BlockHeaderV2 { + /// The parent block's hash. + pub parent_hash: BlockHash, + /// The root hash of global state after the deploys in this block have been executed. + pub state_root_hash: Digest, + /// The hash of the block's body. + pub body_hash: Digest, + /// A random bit needed for initializing a future era. + pub random_bit: bool, + /// A seed needed for initializing a future era. + pub accumulated_seed: Digest, + /// The `EraEnd` of a block if it is a switch block. + pub era_end: Option, + /// The timestamp from when the block was proposed. + pub timestamp: Timestamp, + /// The era ID in which this block was created. + pub era_id: EraId, + /// The height of this block, i.e. the number of ancestors. + pub height: u64, + /// The protocol version of the network from when this block was created. + pub protocol_version: ProtocolVersion, + /// The public key of the validator which proposed the block. + pub proposer: PublicKey, + /// The gas price of the era + pub current_gas_price: u8, + /// The most recent switch block hash. + pub last_switch_block_hash: Option, +} + +impl BlockHeaderV2 { + /// Returns the hash of this block header. + pub fn block_hash(&self) -> BlockHash { + self.compute_block_hash() + } + + /// Returns the parent block's hash. + pub fn parent_hash(&self) -> &BlockHash { + &self.parent_hash + } + + /// Returns the root hash of global state after the deploys in this block have been executed. + pub fn state_root_hash(&self) -> &Digest { + &self.state_root_hash + } + + /// Returns the hash of the block's body. + pub fn body_hash(&self) -> &Digest { + &self.body_hash + } + + /// Returns a random bit needed for initializing a future era. + pub fn random_bit(&self) -> bool { + self.random_bit + } + + /// Returns a seed needed for initializing a future era. + pub fn accumulated_seed(&self) -> &Digest { + &self.accumulated_seed + } + + /// Returns the `EraEnd` of a block if it is a switch block. + pub fn era_end(&self) -> Option<&EraEndV2> { + self.era_end.as_ref() + } + + /// Returns the timestamp from when the block was proposed. + pub fn timestamp(&self) -> Timestamp { + self.timestamp + } + + /// Returns the era ID in which this block was created. + pub fn era_id(&self) -> EraId { + self.era_id + } + + /// Returns the era ID in which the next block would be created (i.e. this block's era ID, or + /// its successor if this is a switch block). + pub fn next_block_era_id(&self) -> EraId { + if self.era_end.is_some() { + self.era_id.successor() + } else { + self.era_id + } + } + + /// Returns the height of this block, i.e. the number of ancestors. + pub fn height(&self) -> u64 { + self.height + } + + /// Returns the protocol version of the network from when this block was created. + pub fn protocol_version(&self) -> ProtocolVersion { + self.protocol_version + } + + /// Returns `true` if this block is the last one in the current era. + pub fn is_switch_block(&self) -> bool { + self.era_end.is_some() + } + + /// Returns the public key of the validator which proposed the block. + pub fn proposer(&self) -> &PublicKey { + &self.proposer + } + + /// Returns `true` if this block is the Genesis block, i.e. has height 0 and era 0. + pub fn is_genesis(&self) -> bool { + self.era_id().is_genesis() && self.height() == 0 + } + + /// Returns the gas price for the given block. + pub fn current_gas_price(&self) -> u8 { + self.current_gas_price + } + + pub(crate) fn compute_block_hash(&self) -> BlockHash { + let serialized_header = self + .to_bytes() + .unwrap_or_else(|error| panic!("should serialize block header: {}", error)); + BlockHash::from(Digest::hash(&serialized_header)) + } +} + +impl ToBytes for BlockHeaderV2 { + fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { + self.parent_hash.write_bytes(writer)?; + self.state_root_hash.write_bytes(writer)?; + self.body_hash.write_bytes(writer)?; + self.random_bit.write_bytes(writer)?; + self.accumulated_seed.write_bytes(writer)?; + self.era_end.write_bytes(writer)?; + self.timestamp.write_bytes(writer)?; + self.era_id.write_bytes(writer)?; + self.height.write_bytes(writer)?; + self.protocol_version.write_bytes(writer)?; + self.proposer.write_bytes(writer)?; + self.current_gas_price.write_bytes(writer)?; + self.last_switch_block_hash.write_bytes(writer) + } + + fn to_bytes(&self) -> Result, bytesrepr::Error> { + let mut buffer = bytesrepr::allocate_buffer(self)?; + self.write_bytes(&mut buffer)?; + Ok(buffer) + } + + fn serialized_length(&self) -> usize { + self.parent_hash.serialized_length() + + self.state_root_hash.serialized_length() + + self.body_hash.serialized_length() + + self.random_bit.serialized_length() + + self.accumulated_seed.serialized_length() + + self.era_end.serialized_length() + + self.timestamp.serialized_length() + + self.era_id.serialized_length() + + self.height.serialized_length() + + self.protocol_version.serialized_length() + + self.proposer.serialized_length() + + self.current_gas_price.serialized_length() + + self.last_switch_block_hash.serialized_length() + } +} + +impl FromBytes for BlockHeaderV2 { + fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { + let (parent_hash, remainder) = BlockHash::from_bytes(bytes)?; + let (state_root_hash, remainder) = Digest::from_bytes(remainder)?; + let (body_hash, remainder) = Digest::from_bytes(remainder)?; + let (random_bit, remainder) = bool::from_bytes(remainder)?; + let (accumulated_seed, remainder) = Digest::from_bytes(remainder)?; + let (era_end, remainder) = Option::from_bytes(remainder)?; + let (timestamp, remainder) = Timestamp::from_bytes(remainder)?; + let (era_id, remainder) = EraId::from_bytes(remainder)?; + let (height, remainder) = u64::from_bytes(remainder)?; + let (protocol_version, remainder) = ProtocolVersion::from_bytes(remainder)?; + let (proposer, remainder) = PublicKey::from_bytes(remainder)?; + let (current_gas_price, remainder) = u8::from_bytes(remainder)?; + let (last_switch_block_hash, remainder) = Option::from_bytes(remainder)?; + let block_header = BlockHeaderV2 { + parent_hash, + state_root_hash, + body_hash, + random_bit, + accumulated_seed, + era_end, + timestamp, + era_id, + height, + protocol_version, + proposer, + current_gas_price, + last_switch_block_hash, + }; + Ok((block_header, remainder)) + } +} + +#[cfg(test)] +impl Arbitrary for BlockHeaderV2 { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + ( + // this tuple is needed because prop_map only supports tuples of arity <= 12 + (any::(), any::()), + any::(), + any::(), + any::(), + any::>(), + any::(), + any::(), + any::(), + arb_protocolversion(), + casper_types::crypto::gens::public_key_arb(), + 0..=255u8, + any::>(), + ) + .prop_map( + |( + (parent_hash, state_root_hash), + body_hash, + random_bit, + accumulated_seed, + era_end, + timestamp, + era_id, + height, + protocol_version, + proposer, + current_gas_price, + last_switch_block_hash, + )| { + let era_id = EraId::from(era_id); + BlockHeaderV2 { + parent_hash, + state_root_hash, + body_hash, + random_bit, + accumulated_seed, + era_end, + timestamp, + era_id, + height, + protocol_version, + proposer, + current_gas_price, + last_switch_block_hash, + } + }, + ) + .boxed() + } +} + #[cfg(test)] mod test { extern crate std; @@ -340,7 +767,7 @@ mod test { use casper_types::bytesrepr::{deserialize_from_slice, ToBytes}; use test_strategy::proptest; - use super::{BlockHeader, Timestamp}; + use super::{BlockHeaderV1, Timestamp}; #[proptest] fn serde_json_timestamp_round_trip(timestamp: Timestamp) { @@ -381,46 +808,95 @@ mod test { } #[proptest] - fn serde_json_block_header_round_trip(block_header: BlockHeader) { + fn serde_json_block_header_v1_round_trip(block_header: BlockHeaderV1) { + let serialized_block_header = serde_json::to_string(&block_header).unwrap(); + let casper_node_block_header: casper_types::BlockHeaderV1 = + serde_json::from_str(&serialized_block_header).unwrap(); + let serialized_casper_node_block_header = + serde_json::to_string(&casper_node_block_header).unwrap(); + let deserialized_block_header: BlockHeaderV1 = + serde_json::from_str(&serialized_casper_node_block_header).unwrap(); + assert_eq!(block_header, deserialized_block_header); + } + + #[proptest] + fn serde_json_block_header_round_trip(block_header: super::BlockHeader) { let serialized_block_header = serde_json::to_string(&block_header).unwrap(); - let casper_node_block_header: casper_node::types::BlockHeader = + let casper_node_block_header: casper_types::BlockHeader = serde_json::from_str(&serialized_block_header).unwrap(); let serialized_casper_node_block_header = serde_json::to_string(&casper_node_block_header).unwrap(); - let deserialized_block_header: BlockHeader = + let deserialized_block_header: super::BlockHeader = serde_json::from_str(&serialized_casper_node_block_header).unwrap(); assert_eq!(block_header, deserialized_block_header); } #[proptest] - fn bincode_block_header_round_trip(block_header: BlockHeader) { + fn bincode_block_header_v1_round_trip(block_header: BlockHeaderV1) { let serialized_block_header = bincode::serialize(&block_header).unwrap(); - let casper_node_block_header: casper_node::types::BlockHeader = + let casper_node_block_header: casper_types::BlockHeaderV1 = bincode::deserialize(&serialized_block_header).unwrap(); let serialized_casper_node_block_header = bincode::serialize(&casper_node_block_header).unwrap(); assert_eq!(serialized_block_header, serialized_casper_node_block_header); - let deserialized_block_header: BlockHeader = + let deserialized_block_header: BlockHeaderV1 = bincode::deserialize(&serialized_casper_node_block_header).unwrap(); assert_eq!(block_header, deserialized_block_header); } #[proptest] - fn bytesrepr_block_header_round_trip(block_header: BlockHeader) { + fn bincode_block_header_round_trip(block_header: super::BlockHeader) { + let serialized_block_header = bincode::serialize(&block_header).unwrap(); + let casper_node_block_header: casper_types::BlockHeader = + bincode::deserialize(&serialized_block_header).unwrap(); + let serialized_casper_node_block_header = + bincode::serialize(&casper_node_block_header).unwrap(); + assert_eq!(serialized_block_header, serialized_casper_node_block_header); + let deserialized_block_header: super::BlockHeader = + bincode::deserialize(&serialized_casper_node_block_header).unwrap(); + assert_eq!(block_header, deserialized_block_header); + } + + #[proptest] + fn bytesrepr_block_header_v1_round_trip(block_header: BlockHeaderV1) { let serialized_block_header = block_header.to_bytes().unwrap(); - let casper_node_block_header: casper_node::types::BlockHeader = + let casper_node_block_header: casper_types::BlockHeaderV1 = deserialize_from_slice(&serialized_block_header).unwrap(); let serialized_casper_node_block_header = casper_node_block_header.to_bytes().unwrap(); assert_eq!(serialized_block_header, serialized_casper_node_block_header); - let deserialized_block_header: BlockHeader = + let deserialized_block_header: BlockHeaderV1 = deserialize_from_slice(&serialized_casper_node_block_header).unwrap(); assert_eq!(block_header, deserialized_block_header) } #[proptest] - fn block_header_hash_agree(block_header: BlockHeader) { - let casper_node_block_header: casper_node::types::BlockHeader = - deserialize_from_slice(&block_header.to_bytes().unwrap()).unwrap(); + fn bytesrepr_block_header_round_trip(block_header: super::BlockHeader) { + let serialized_block_header = block_header.to_bytes().unwrap(); + let casper_node_block_header: casper_types::BlockHeader = + deserialize_from_slice(&serialized_block_header).unwrap(); + let serialized_casper_node_block_header = casper_node_block_header.to_bytes().unwrap(); + assert_eq!(serialized_block_header, serialized_casper_node_block_header); + let deserialized_block_header: super::BlockHeader = + deserialize_from_slice(&serialized_casper_node_block_header).unwrap(); + assert_eq!(block_header, deserialized_block_header) + } + + #[proptest] + fn block_header_v1_hash_agree(block_header: BlockHeaderV1) { + let casper_node_block_header: casper_types::BlockHeaderV1 = + deserialize_from_slice(block_header.to_bytes().unwrap()).unwrap(); + let block_hash = block_header.block_hash(); + let casper_block_hash = casper_node_block_header.block_hash(); + assert_eq!( + <[u8; 32]>::from(block_hash).to_vec(), + casper_block_hash.as_ref().to_owned() + ); + } + + #[proptest] + fn block_header_hash_agree(block_header: super::BlockHeader) { + let casper_node_block_header: casper_types::BlockHeader = + deserialize_from_slice(block_header.to_bytes().unwrap()).unwrap(); let block_hash = block_header.block_hash(); let casper_block_hash = casper_node_block_header.block_hash(); assert_eq!( diff --git a/src/consensus.rs b/src/consensus.rs index 297aa44..e02eaac 100644 --- a/src/consensus.rs +++ b/src/consensus.rs @@ -4,15 +4,23 @@ use alloc::{collections::BTreeMap, vec::Vec}; use proptest::prelude::*; use casper_types::{ - bytesrepr::{FromBytes, ToBytes}, + bytesrepr::{self, FromBytes, ToBytes}, PublicKey, U512, }; +#[cfg(test)] +use serde_map_to_array::BTreeMapToArray; +use serde_map_to_array::KeyValueLabels; + #[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)] #[cfg_attr(test, derive(serde::Serialize, serde::Deserialize))] // See https://github.com/casper-network/casper-node/blob/8ca9001dabba0dae95f92ad8c54eddd163200b5d/node/src/components/consensus/consensus_protocol.rs#L105-L115 pub struct EraReport { pub(crate) equivocators: Vec, + #[cfg_attr( + test, + serde(with = "BTreeMapToArray::") + )] pub(crate) rewards: BTreeMap, pub(crate) inactive_validators: Vec, } @@ -54,7 +62,8 @@ impl Arbitrary for EraReport { proptest::collection::btree_map( casper_types::crypto::gens::public_key_arb(), any::(), - 0..5, + // Must have at least one reward or deserialization will fail. + 1..5, ), proptest::collection::vec(casper_types::crypto::gens::public_key_arb(), 0..5), ) @@ -103,17 +112,21 @@ impl FromBytes for EraReport { #[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)] #[cfg_attr(test, derive(serde::Serialize, serde::Deserialize))] // See: https://github.com/casper-network/casper-node/blob/8ca9001dabba0dae95f92ad8c54eddd163200b5d/node/src/types/block.rs#L748-L753 -pub struct EraEnd { +pub struct EraEndV1 { pub(crate) era_report: EraReport, + #[cfg_attr( + test, + serde(with = "BTreeMapToArray::") + )] pub(crate) next_era_validator_weights: BTreeMap, } -impl EraEnd { +impl EraEndV1 { pub fn new( era_report: EraReport, next_era_validator_weights: BTreeMap, ) -> Self { - EraEnd { + EraEndV1 { era_report, next_era_validator_weights, } @@ -128,8 +141,15 @@ impl EraEnd { } } +pub struct NextEraValidatorLabels; + +impl KeyValueLabels for NextEraValidatorLabels { + const KEY: &'static str = "validator"; + const VALUE: &'static str = "weight"; +} + #[cfg(test)] -impl Arbitrary for EraEnd { +impl Arbitrary for EraEndV1 { type Parameters = (); type Strategy = BoxedStrategy; @@ -139,10 +159,11 @@ impl Arbitrary for EraEnd { proptest::collection::btree_map( casper_types::crypto::gens::public_key_arb(), any::().prop_map(U512::from), - 0..5, + // Must have at least one validator or deserialization will fail. + 1..5, ), ) - .prop_map(|(era_report, next_era_validator_weights)| EraEnd { + .prop_map(|(era_report, next_era_validator_weights)| EraEndV1 { era_report, next_era_validator_weights, }) @@ -151,7 +172,7 @@ impl Arbitrary for EraEnd { } // See: https://github.com/casper-network/casper-node/blob/8ca9001dabba0dae95f92ad8c54eddd163200b5d/node/src/types/block.rs#L774-L785 -impl ToBytes for EraEnd { +impl ToBytes for EraEndV1 { fn to_bytes(&self) -> Result, casper_types::bytesrepr::Error> { let mut buffer = casper_types::bytesrepr::allocate_buffer(self)?; buffer.extend(self.era_report.to_bytes()?); @@ -165,11 +186,11 @@ impl ToBytes for EraEnd { } // See: https://github.com/casper-network/casper-node/blob/8ca9001dabba0dae95f92ad8c54eddd163200b5d/node/src/types/block.rs#L787-L797 -impl FromBytes for EraEnd { +impl FromBytes for EraEndV1 { fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), casper_types::bytesrepr::Error> { let (era_report, bytes) = EraReport::from_bytes(bytes)?; let (next_era_validator_weights, bytes) = BTreeMap::::from_bytes(bytes)?; - let era_end = EraEnd { + let era_end = EraEndV1 { era_report, next_era_validator_weights, }; @@ -177,6 +198,133 @@ impl FromBytes for EraEnd { } } +pub struct EraRewardsLabels; + +impl KeyValueLabels for EraRewardsLabels { + const KEY: &'static str = "validator"; + const VALUE: &'static str = "amount"; +} + +/// Information related to the end of an era, and validator weights for the following era. +#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] +#[cfg_attr(test, derive(serde::Serialize, serde::Deserialize))] +pub struct EraEndV2 { + /// The set of equivocators. + pub equivocators: Vec, + /// Validators that haven't produced any unit during the era. + pub inactive_validators: Vec, + /// The validators for the upcoming era and their respective weights. + #[cfg_attr( + test, + serde(with = "BTreeMapToArray::") + )] + pub next_era_validator_weights: BTreeMap, + /// The rewards distributed to the validators. + pub rewards: BTreeMap>, + pub next_era_gas_price: u8, +} + +impl ToBytes for EraEndV2 { + fn write_bytes(&self, writer: &mut Vec) -> Result<(), bytesrepr::Error> { + let EraEndV2 { + equivocators, + inactive_validators, + next_era_validator_weights, + rewards, + next_era_gas_price, + } = self; + + equivocators.write_bytes(writer)?; + inactive_validators.write_bytes(writer)?; + next_era_validator_weights.write_bytes(writer)?; + rewards.write_bytes(writer)?; + next_era_gas_price.write_bytes(writer)?; + + Ok(()) + } + + fn to_bytes(&self) -> Result, bytesrepr::Error> { + let mut buffer = bytesrepr::allocate_buffer(self)?; + self.write_bytes(&mut buffer)?; + Ok(buffer) + } + + fn serialized_length(&self) -> usize { + let EraEndV2 { + equivocators, + inactive_validators, + next_era_validator_weights, + rewards, + next_era_gas_price, + } = self; + + equivocators.serialized_length() + + inactive_validators.serialized_length() + + next_era_validator_weights.serialized_length() + + rewards.serialized_length() + + next_era_gas_price.serialized_length() + } +} + +impl FromBytes for EraEndV2 { + fn from_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), bytesrepr::Error> { + let (equivocators, bytes) = Vec::from_bytes(bytes)?; + let (inactive_validators, bytes) = Vec::from_bytes(bytes)?; + let (next_era_validator_weights, bytes) = BTreeMap::from_bytes(bytes)?; + let (rewards, bytes) = BTreeMap::from_bytes(bytes)?; + let (next_era_gas_price, bytes) = u8::from_bytes(bytes)?; + let era_end = EraEndV2 { + equivocators, + inactive_validators, + next_era_validator_weights, + rewards, + next_era_gas_price, + }; + + Ok((era_end, bytes)) + } +} + +#[cfg(test)] +impl Arbitrary for EraEndV2 { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + ( + proptest::collection::vec(casper_types::crypto::gens::public_key_arb(), 0..5), + proptest::collection::vec(casper_types::crypto::gens::public_key_arb(), 0..5), + proptest::collection::btree_map( + casper_types::crypto::gens::public_key_arb(), + any::().prop_map(U512::from), + 1..5, + ), + proptest::collection::btree_map( + casper_types::crypto::gens::public_key_arb(), + proptest::collection::vec(any::().prop_map(U512::from), 1..5), + 1..5, + ), + any::(), + ) + .prop_map( + |( + equivocators, + inactive_validators, + next_era_validator_weights, + rewards, + next_era_gas_price, + )| EraEndV2 { + equivocators, + inactive_validators, + next_era_validator_weights, + rewards, + next_era_gas_price, + }, + ) + .boxed() + } +} + #[cfg(test)] mod test { extern crate std; @@ -184,12 +332,19 @@ mod test { use casper_types::bytesrepr::{deserialize_from_slice, ToBytes}; use test_strategy::proptest; - use super::EraEnd; + use super::EraEndV1; + + #[proptest] + fn bytesrepr_era_end_v1_round_trip(era_end: EraEndV1) { + let serialized = era_end.to_bytes().unwrap(); + let deserialized: EraEndV1 = deserialize_from_slice(&serialized).unwrap(); + assert_eq!(era_end, deserialized) + } #[proptest] - fn bytesrepr_era_end_round_trip(era_end: EraEnd) { + fn bytesrepr_era_end_v2_round_trip(era_end: super::EraEndV2) { let serialized = era_end.to_bytes().unwrap(); - let deserialized: EraEnd = deserialize_from_slice(&serialized).unwrap(); + let deserialized: super::EraEndV2 = deserialize_from_slice(&serialized).unwrap(); assert_eq!(era_end, deserialized) } } diff --git a/src/crypto.rs b/src/crypto.rs index 460949b..8a79e3c 100644 --- a/src/crypto.rs +++ b/src/crypto.rs @@ -76,9 +76,8 @@ mod test { fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { prop_oneof![ - any::<[u8; CasperPublicKey::ED25519_LENGTH]>().prop_map(|bytes| { - RealSecretKey::Ed25519(Ed25519SecretKey::try_from(bytes).unwrap()) - }), + any::<[u8; CasperPublicKey::ED25519_LENGTH]>() + .prop_map(|bytes| { RealSecretKey::Ed25519(Ed25519SecretKey::from(bytes)) }), any::<[u8; CasperSecretKey::SECP256K1_LENGTH]>() .prop_filter("Cannot make a secret key from [0u8; 32]", |bytes| bytes != &[0u8; CasperSecretKey::SECP256K1_LENGTH]) @@ -107,15 +106,13 @@ mod test { fn sign_and_verify(real_secret_key: RealSecretKey, message: Vec) { let casper_secret_key = CasperSecretKey::from(real_secret_key); let signature = sign(&casper_secret_key, &message); - assert_eq!( - verify( - &CasperPublicKey::from(&casper_secret_key), - &message, - &signature - ) - .unwrap(), - () + + verify( + &CasperPublicKey::from(&casper_secret_key), + &message, + &signature, ) + .unwrap(); } #[proptest] @@ -136,16 +133,13 @@ mod test { CasperSecretKey::ed25519_from_bytes([0u8; CasperSecretKey::ED25519_LENGTH]).unwrap(); let message = "this shouldn't work for the good public key"; let bad_signature = sign(&bad_secret_key, message); - assert_eq!( - verify( - &CasperPublicKey::from(&bad_secret_key), - message, - &bad_signature - ) - .unwrap(), - (), - "Bad secret key should be able to verify its own signature" - ); + verify( + &CasperPublicKey::from(&bad_secret_key), + message, + &bad_signature, + ) + .unwrap_or_else(|_| panic!("Bad secret key should be able to verify its own signature")); + let good_public_key = CasperPublicKey::from( &CasperSecretKey::ed25519_from_bytes([1u8; CasperSecretKey::ED25519_LENGTH]).unwrap(), ); @@ -163,16 +157,14 @@ mod test { .unwrap(); let message = "this shouldn't work for the good public key"; let bad_signature = sign(&bad_secret_key, message); - assert_eq!( - verify( - &CasperPublicKey::from(&bad_secret_key), - message, - &bad_signature - ) - .unwrap(), - (), - "Bad secret key should be able to verify its own signature" - ); + + verify( + &CasperPublicKey::from(&bad_secret_key), + message, + &bad_signature, + ) + .unwrap_or_else(|_| panic!("Bad secret key should be able to verify its own signature")); + let good_public_key = CasperPublicKey::from( &CasperSecretKey::secp256k1_from_bytes([2u8; CasperSecretKey::SECP256K1_LENGTH]) .unwrap(), @@ -197,12 +189,12 @@ mod test { fn should_not_verify_different_signature_schemes() { let message = "should not work because the signatures are different types"; let secret_bytes = [1u8; 32]; - let ed25519_secret_key = CasperSecretKey::ed25519_from_bytes(&secret_bytes).unwrap(); + let ed25519_secret_key = CasperSecretKey::ed25519_from_bytes(secret_bytes).unwrap(); let secp256k1_public_key = - CasperPublicKey::from(&CasperSecretKey::secp256k1_from_bytes(&secret_bytes).unwrap()); - let ed25519_signature = sign(&ed25519_secret_key, &message); + CasperPublicKey::from(&CasperSecretKey::secp256k1_from_bytes(secret_bytes).unwrap()); + let ed25519_signature = sign(&ed25519_secret_key, message); assert!( - verify(&secp256k1_public_key, &message, &ed25519_signature).is_err(), + verify(&secp256k1_public_key, message, &ed25519_signature).is_err(), "should not verify different types of public keys and signatures" ) } diff --git a/src/hash.rs b/src/hash.rs index 9c1bc2c..439fe21 100644 --- a/src/hash.rs +++ b/src/hash.rs @@ -8,7 +8,8 @@ const CHUNK_SIZE_BYTES: usize = 8 * 1024 * 1024; const CHUNK_DATA_ZEROED: [u8; CHUNK_SIZE_BYTES] = [0u8; CHUNK_SIZE_BYTES]; #[derive(Clone, Default, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] -// See: https://github.com/casper-network/casper-node/blob/8ca9001dabba0dae95f92ad8c54eddd163200b5d/hashing/src/lib.rs#L48 +// For 1.* See: https://github.com/casper-network/casper-node/blob/8ca9001dabba0dae95f92ad8c54eddd163200b5d/hashing/src/lib.rs#L48 +// For casper 2.0 rc4 see https://github.com/casper-network/casper-node/blob/4e2ddf485e5cec830f9ff402b052f5f55801eb54/types/src/digest.rs#L54 #[cfg_attr(test, derive(proptest_derive::Arbitrary))] pub struct Digest([u8; DIGEST_LENGTH]); @@ -130,14 +131,12 @@ impl serde::Serialize for Digest { impl<'de> serde::Deserialize<'de> for Digest { fn deserialize>(deserializer: D) -> Result { - let bytes: Vec; - - if deserializer.is_human_readable() { + let bytes: Vec = if deserializer.is_human_readable() { let hex_string = String::deserialize(deserializer)?; - bytes = base16::decode(hex_string.as_bytes()).map_err(serde::de::Error::custom)?; + base16::decode(hex_string.as_bytes()).map_err(serde::de::Error::custom)? } else { - bytes = >::deserialize(deserializer)?; - } + >::deserialize(deserializer)? + }; let data = <[u8; DIGEST_LENGTH]>::try_from(bytes.as_ref()).map_err(serde::de::Error::custom)?; @@ -158,7 +157,7 @@ mod test { #[proptest] fn serde_json_digest_round_trip(timestamp: Digest) { let serialized_digest = serde_json::to_string(×tamp).unwrap(); - let casper_hashing_digest: casper_hashing::Digest = + let casper_hashing_digest: casper_types::Digest = serde_json::from_str(&serialized_digest).unwrap(); let serialized_casper_hashing_digest = serde_json::to_string(&casper_hashing_digest).unwrap(); @@ -171,7 +170,7 @@ mod test { #[proptest] fn bincode_timestamp_round_trip(timestamp: Digest) { let serialized_timestamp = bincode::serialize(×tamp).unwrap(); - let casper_types_timestamp: casper_hashing::Digest = + let casper_types_timestamp: casper_types::Digest = bincode::deserialize(&serialized_timestamp).unwrap(); let serialized_casper_types_timestamp = bincode::serialize(&casper_types_timestamp).unwrap(); @@ -184,7 +183,7 @@ mod test { #[proptest] fn bytesrepr_digest_round_trip(digest: Digest) { let serialized_digest = digest.to_bytes().unwrap(); - let casper_hashing_digest: casper_hashing::Digest = + let casper_hashing_digest: casper_types::Digest = deserialize_from_slice(&serialized_digest).unwrap(); let serialized_casper_hashing_digest = casper_hashing_digest.to_bytes().unwrap(); let deserialized_digest: Digest = @@ -195,7 +194,7 @@ mod test { #[proptest] fn hashing_agrees_with_casper_hashing(data: Vec) { let digest = Digest::hash(&data); - let casper_digest = casper_hashing::Digest::hash(&data); + let casper_digest = casper_types::Digest::hash(&data); assert_eq!( <[u8; DIGEST_LENGTH]>::from(digest), <[u8; DIGEST_LENGTH]>::from(casper_digest) diff --git a/src/json_compatibility.rs b/src/json_compatibility.rs index faa9954..4ae6757 100644 --- a/src/json_compatibility.rs +++ b/src/json_compatibility.rs @@ -7,11 +7,11 @@ use serde::{Deserialize, Serialize}; use crate::{ block::{ - Block, BlockBody, BlockConstructionError, BlockHeaderWithSignatures, + Block, BlockBodyV1, BlockConstructionError, BlockHeaderWithSignatures, BlockHeaderWithSignaturesConstructionError, BlockSignatures, }, - block_header::{BlockHash, BlockHeader, Timestamp}, - consensus::{EraEnd, EraReport}, + block_header::{BlockHash, BlockHeaderV1, Timestamp}, + consensus::{EraEndV1, EraReport}, crypto::SignatureVerificationError, hash::Digest, }; @@ -82,10 +82,7 @@ impl From for JsonEraReport { } = era_report; let rewards = rewards .into_iter() - .map(|(validator, amount)| Reward { - validator: validator, - amount: amount, - }) + .map(|(validator, amount)| Reward { validator, amount }) .collect(); JsonEraReport { equivocators, @@ -131,8 +128,8 @@ impl JsonEraEnd { } } -impl From for JsonEraEnd { - fn from(era_end: EraEnd) -> Self { +impl From for JsonEraEnd { + fn from(era_end: EraEndV1) -> Self { let era_report = JsonEraReport::from(era_end.era_report); let next_era_validator_weights = era_end .next_era_validator_weights @@ -149,7 +146,7 @@ impl From for JsonEraEnd { } } -impl From for EraEnd { +impl From for EraEndV1 { fn from(json_data: JsonEraEnd) -> Self { let era_report = EraReport::from(json_data.era_report); let next_era_validator_weights = json_data @@ -157,7 +154,7 @@ impl From for EraEnd { .iter() .map(|validator_weight| (validator_weight.validator.clone(), validator_weight.weight)) .collect(); - EraEnd { + EraEndV1 { era_report, next_era_validator_weights, } @@ -222,8 +219,8 @@ impl JsonBlockHeader { } } -impl From for JsonBlockHeader { - fn from(block_header: BlockHeader) -> Self { +impl From for JsonBlockHeader { + fn from(block_header: BlockHeaderV1) -> Self { JsonBlockHeader { parent_hash: block_header.parent_hash().clone(), state_root_hash: block_header.state_root_hash().clone(), @@ -239,7 +236,7 @@ impl From for JsonBlockHeader { } } -impl From for BlockHeader { +impl From for BlockHeaderV1 { fn from(block_header: JsonBlockHeader) -> Self { let JsonBlockHeader { parent_hash, @@ -253,8 +250,8 @@ impl From for BlockHeader { height, protocol_version, } = block_header; - let era_end = era_end.map(EraEnd::from); - BlockHeader::new( + let era_end = era_end.map(EraEndV1::from); + BlockHeaderV1::new( parent_hash, state_root_hash, body_hash, @@ -308,7 +305,7 @@ impl From for (PublicKey, Signature) { pub struct JsonBlock { hash: BlockHash, header: JsonBlockHeader, - body: BlockBody, + body: BlockBodyV1, proofs: Vec, } @@ -321,7 +318,7 @@ impl JsonBlock { &self.header } - pub fn body(&self) -> &BlockBody { + pub fn body(&self) -> &BlockBodyV1 { &self.body } @@ -342,8 +339,8 @@ impl From for JsonBlock { .block_header_with_signatures() .block_signatures() .proofs() - .into_iter() - .map(|(pubkey, signature)| JsonProof::from((pubkey.clone(), signature.clone()))) + .iter() + .map(|(pubkey, signature)| JsonProof::from((pubkey.clone(), *signature))) .collect(); JsonBlock { hash, @@ -379,7 +376,7 @@ impl TryFrom for Block { body, proofs, } = json_block; - let block_header = BlockHeader::from(header); + let block_header = BlockHeaderV1::from(header); let header_hash = block_header.block_hash(); if block_hash != header_hash { return Err(JsonBlockConversionError::InvalidBlockHash { @@ -405,7 +402,7 @@ impl TryFrom for Block { } let header = BlockHeaderWithSignatures::new(block_header, block_signatures) .map_err(JsonBlockConversionError::BlockHeaderWithSignaturesConstructionError)?; - Ok(Block::new(header, body).map_err(JsonBlockConversionError::BlockConstructionError)?) + Block::new(header, body).map_err(JsonBlockConversionError::BlockConstructionError) } } @@ -416,8 +413,8 @@ mod test { use test_strategy::proptest; use crate::{ - block_header::BlockHeader, - consensus::{EraEnd, EraReport}, + block_header::BlockHeaderV1, + consensus::{EraEndV1, EraReport}, }; use super::{JsonBlockHeader, JsonEraEnd, JsonEraReport}; @@ -430,16 +427,16 @@ mod test { } #[proptest] - fn era_end_round_trip(era_end: EraEnd) { + fn era_end_round_trip(era_end: EraEndV1) { let json_era_end = JsonEraEnd::from(era_end.clone()); - let round_trip_era_end = EraEnd::from(json_era_end); + let round_trip_era_end = EraEndV1::from(json_era_end); assert_eq!(era_end, round_trip_era_end); } #[proptest] - fn block_header_round_trip(block_header: BlockHeader) { + fn block_header_round_trip(block_header: BlockHeaderV1) { let json_block_header = JsonBlockHeader::from(block_header.clone()); - let round_trip_block_header = BlockHeader::from(json_block_header); + let round_trip_block_header = BlockHeaderV1::from(json_block_header); assert_eq!(block_header, round_trip_block_header); } } diff --git a/src/kernel.rs b/src/kernel.rs index 1af6c6f..989888a 100644 --- a/src/kernel.rs +++ b/src/kernel.rs @@ -39,7 +39,6 @@ impl EraInfo { pub fn new(era_id: EraId, validator_weights: BTreeMap) -> Self { let total_weight = validator_weights .values() - .into_iter() .fold(U512::from(0), |acc, x| acc + x); Self { era_id, @@ -52,6 +51,7 @@ impl EraInfo { self.era_id } + #[allow(clippy::result_large_err)] pub fn validate( &self, block_header_with_signatures: &BlockHeaderWithSignatures, @@ -88,7 +88,7 @@ impl EraInfo { } Err(BlockSignaturesValidationError::InsufficientWeight { bad_signature_weight: block_signature_weight, - total_weight: self.total_weight.clone(), + total_weight: self.total_weight, }) } } @@ -140,6 +140,7 @@ impl LightClientKernel { } } + #[allow(clippy::result_large_err)] pub fn update( &mut self, block_header_with_signatures: &BlockHeaderWithSignatures, diff --git a/src/merkle_proof.rs b/src/merkle_proof.rs index 734451d..8f901ee 100644 --- a/src/merkle_proof.rs +++ b/src/merkle_proof.rs @@ -93,6 +93,12 @@ pub type PointerBlockArray = [PointerBlockValue; RADIX]; #[cfg_attr(test, derive(proptest_derive::Arbitrary))] pub struct PointerBlock(PointerBlockArray); +impl Default for PointerBlock { + fn default() -> Self { + Self::new() + } +} + impl PointerBlock { pub fn new() -> Self { PointerBlock([ @@ -460,7 +466,7 @@ impl Arbitrary for TrieMerkleProof { proptest::collection::vec(::arbitrary(), 1..=6), ) .prop_map(|(key, value, proof_steps)| { - TrieMerkleProof::new(key, value, proof_steps.into()) + TrieMerkleProof::new(key, value, proof_steps) }) .boxed() } @@ -535,11 +541,11 @@ impl<'a, 'b> QueryInfo<'a, 'b> { } pub fn key(&self) -> &'a Key { - &self.key + self.key } pub fn stored_value(&self) -> &'b StoredValue { - &self.stored_value + self.stored_value } } @@ -594,7 +600,7 @@ pub fn process_query_proofs<'a>( mod test { extern crate std; - use casper_execution_engine::storage::trie::merkle_proof::TrieMerkleProof as CasperTrieMerkleProof; + use casper_types::global_state::TrieMerkleProof as CasperTrieMerkleProof; use casper_types::{bytesrepr::ToBytes, Key, StoredValue}; use test_strategy::proptest; @@ -626,10 +632,14 @@ mod test { casper_types::bytesrepr::deserialize(serialized_trie_merkle_proof.clone()).unwrap(); let trie_merkle_root_bytes: [u8; DIGEST_LENGTH] = trie_merkle_proof.compute_state_hash().unwrap().into(); - let casper_trie_merkle_root_bytes: [u8; DIGEST_LENGTH] = casper_trie_merkle_proof - .compute_state_hash() + + let casper_trie_merkle_root_bytes: [u8; DIGEST_LENGTH] = + casper_storage::global_state::trie_store::operations::compute_state_hash( + &casper_trie_merkle_proof, + ) .unwrap() .into(); + assert_eq!(trie_merkle_root_bytes, casper_trie_merkle_root_bytes); } } diff --git a/tests/integration.rs b/tests/integration.rs index 77afb95..37620b0 100644 --- a/tests/integration.rs +++ b/tests/integration.rs @@ -1,8 +1,8 @@ use std::collections::BTreeMap; use casper_litmus::{ - block::{Block, BlockBody}, - block_header::BlockHeader, + block::{Block, BlockBodyV1}, + block_header::BlockHeaderV1, json_compatibility::{JsonBlock, JsonBlockHeader}, kernel::LightClientKernel, merkle_proof::{process_query_proofs, TrieMerkleProof}, @@ -50,33 +50,36 @@ fn first_entry_in_blocks_map_is_correct() { fn json_block_header_round_trip() { let json_block: JsonBlock = serde_json::from_str(include_str!("assets/blocks/block-0.json")).unwrap(); - let converted_block_header = BlockHeader::from(json_block.header().clone()); + let converted_block_header = BlockHeaderV1::from(json_block.header().clone()); let reconstituted_json_block_header = JsonBlockHeader::from(converted_block_header.clone()); assert_eq!(json_block.header(), &reconstituted_json_block_header); } #[test] fn parse_and_validate_hash_of_block() { - let casper_node_json_block: casper_node::types::JsonBlock = + let casper_node_json_block: serde_json::Value = serde_json::from_str(include_str!("mainnet/blocks/block-0.json")).unwrap(); - let casper_node_block_header = - casper_node::types::BlockHeader::from(casper_node_json_block.header.clone()); + + let casper_node_block: casper_types::BlockV1 = + serde_json::from_value(casper_node_json_block.clone()).unwrap(); + + let casper_node_block_header = casper_node_block.header(); assert_eq!( casper_node_block_header.block_hash().as_ref(), - casper_node_json_block.hash.as_ref(), + casper_node_block.hash().as_ref(), "Casper node block hash mismatch" ); let block_header_bytes = casper_node_block_header.to_bytes().unwrap(); - let deserialized_block_header: BlockHeader = + let deserialized_block_header: BlockHeaderV1 = deserialize_from_slice(&block_header_bytes).unwrap(); assert_eq!( deserialized_block_header.block_hash().as_ref(), - casper_node_json_block.hash.as_ref(), + casper_node_block.hash().as_ref(), "JSON block hash mismatch" ); let json_block: JsonBlock = serde_json::from_str(include_str!("mainnet/blocks/block-0.json")).unwrap(); - let converted_block_header = BlockHeader::from(json_block.header().clone()); + let converted_block_header = BlockHeaderV1::from(json_block.header().clone()); assert_eq!( deserialized_block_header, converted_block_header, "Block header mismatch" @@ -85,25 +88,28 @@ fn parse_and_validate_hash_of_block() { #[test] fn parse_and_validate_hash_of_block_body() { - let casper_node_json_block: casper_node::types::JsonBlock = + let casper_node_json_block: serde_json::Value = serde_json::from_str(include_str!("mainnet/blocks/block-0.json")).unwrap(); - let casper_node_block_body = - casper_node::types::BlockBody::from(casper_node_json_block.body.clone()); + let casper_node_block_body: casper_types::BlockBodyV1 = + serde_json::from_value(casper_node_json_block["body"].clone()).unwrap(); + let body_hash: casper_types::Digest = + serde_json::from_value(casper_node_json_block["header"]["body_hash"].clone()).unwrap(); + assert_eq!( casper_node_block_body.hash().as_ref(), - casper_node_json_block.header.body_hash.as_ref(), + body_hash.as_ref(), "Casper node block body hash mismatch" ); let block_body_bytes = casper_node_block_body.to_bytes().unwrap(); - let deserialized_block_body: BlockBody = deserialize_from_slice(&block_body_bytes).unwrap(); + let deserialized_block_body: BlockBodyV1 = deserialize_from_slice(&block_body_bytes).unwrap(); assert_eq!( deserialized_block_body.hash().as_ref(), - casper_node_json_block.header.body_hash.as_ref(), + body_hash.as_ref(), "JSON block body hash mismatch" ); let json_block: JsonBlock = serde_json::from_str(include_str!("mainnet/blocks/block-0.json")).unwrap(); - let converted_block_body = BlockBody::from(json_block.body().clone()); + let converted_block_body = json_block.body().clone(); assert_eq!( deserialized_block_body, converted_block_body, "Block body mismatch" @@ -146,7 +152,7 @@ fn query_proofs() { if let casper_types::StoredValue::Account(account) = query_info.stored_value() { assert_eq!( "account-hash-c39d7a6202e5558ffbf327985c55a95f606db48115599a216987b73daf409076", - serde_json::to_value(&account.account_hash()) + serde_json::to_value(account.account_hash()) .expect("should convert to serde_json::Value") .as_str() .expect("should be a string"),