diff --git a/Cargo.lock b/Cargo.lock index 5748b5e898..3a093a4ec8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2021,7 +2021,16 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" dependencies = [ - "bit-vec", + "bit-vec 0.6.3", +] + +[[package]] +name = "bit-set" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +dependencies = [ + "bit-vec 0.8.0", ] [[package]] @@ -2030,6 +2039,12 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + [[package]] name = "bit_field" version = "0.10.2" @@ -8565,8 +8580,10 @@ dependencies = [ "page_size", "parking_lot 0.12.3", "postcard", + "proptest", "reth-libmdbx", "roaring", + "rstest 0.18.2", "serde", "serde_json", "smallvec", @@ -9023,7 +9040,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55cb077ad656299f160924eb2912aa147d7339ea7d69e1b5517326fdcec3c1ca" dependencies = [ "ascii-canvas", - "bit-set", + "bit-set 0.5.3", "ena", "itertools 0.11.0", "lalrpop-util", @@ -11535,12 +11552,12 @@ dependencies = [ [[package]] name = "proptest" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" +checksum = "14cae93065090804185d3b75f0bf93b8eeda30c7a9b4a33d3bdb3988d6229e50" dependencies = [ - "bit-set", - "bit-vec", + "bit-set 0.8.0", + "bit-vec 0.8.0", "bitflags 2.6.0", "lazy_static", "num-traits 0.2.19", diff --git a/crates/katana/rpc/rpc-types/src/trie.rs b/crates/katana/rpc/rpc-types/src/trie.rs index 58f797cc12..a92f0b4426 100644 --- a/crates/katana/rpc/rpc-types/src/trie.rs +++ b/crates/katana/rpc/rpc-types/src/trie.rs @@ -1,9 +1,10 @@ use std::ops::{Deref, DerefMut}; use katana_primitives::contract::StorageKey; +use katana_primitives::hash::StarkHash; use katana_primitives::{ContractAddress, Felt}; -use katana_trie::bonsai::BitSlice; -use katana_trie::{MultiProof, Path, ProofNode}; +use katana_trie::bitvec::view::BitView; +use katana_trie::{BitVec, MultiProof, Path, ProofNode}; use serde::{Deserialize, Serialize}; #[derive(Debug, Default, Serialize, Deserialize)] @@ -24,7 +25,7 @@ pub struct GlobalRoots { } /// Node in the Merkle-Patricia trie. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum MerkleNode { /// Represents a path to the highest non-zero descendant node. @@ -47,6 +48,21 @@ pub enum MerkleNode { }, } +impl MerkleNode { + // Taken from `bonsai-trie`: https://github.com/madara-alliance/bonsai-trie/blob/bfc6ad47b3cb8b75b1326bf630ca16e581f194c5/src/trie/merkle_node.rs#L234-L248 + pub fn compute_hash(&self) -> Felt { + match self { + Self::Binary { left, right } => Hash::hash(left, right), + Self::Edge { child, path, length } => { + let mut length_bytes = [0u8; 32]; + length_bytes[31] = *length; + let length = Felt::from_bytes_be(&length_bytes); + Hash::hash(child, path) + length + } + } + } +} + /// The response type for `starknet_getStorageProof` method. /// /// The requested storage proofs. Note that if a requested leaf has the default value, the path to @@ -142,40 +158,63 @@ impl From for ProofNode { fn from(value: MerkleNode) -> Self { match value { MerkleNode::Binary { left, right } => Self::Binary { left, right }, - MerkleNode::Edge { path, child, .. } => Self::Edge { child, path: felt_to_path(path) }, + MerkleNode::Edge { path, child, length } => { + Self::Edge { child, path: felt_to_path(path, length) } + } } } } -fn felt_to_path(felt: Felt) -> Path { - Path(BitSlice::from_slice(&felt.to_bytes_be())[5..].to_bitvec()) +fn felt_to_path(felt: Felt, length: u8) -> Path { + let length = length as usize; + let mut bits = BitVec::new(); + + // This function converts a Felt to a Path by preserving leading zeros + // that are semantically important in the Merkle tree path representation. + // + // Example: + // For a path "0000100" (length=7): + // - As an integer/hex: 0x4 (leading zeros get truncated) + // - As a Path: [0,0,0,0,1,0,0] (leading zeros preserved) + // + // We need to preserve these leading zeros because in a Merkle tree path: + // - Each bit represents a direction (left=0, right=1) + // - The position/index of each bit matters for the path traversal + // - "0000100" and "100" would represent different paths in the tree + for bit in &felt.to_bits_be()[256 - length..] { + bits.push(*bit); + } + + Path(bits) } fn path_to_felt(path: Path) -> Felt { - let mut arr = [0u8; 32]; - let slice = &mut BitSlice::from_slice_mut(&mut arr)[5..]; - slice[..path.len()].copy_from_bitslice(&path); - Felt::from_bytes_be(&arr) + let mut bytes = [0u8; 32]; + bytes.view_bits_mut()[256 - path.len()..].copy_from_bitslice(&path); + Felt::from_bytes_be(&bytes) } #[cfg(test)] mod tests { - use katana_primitives::felt; + use katana_trie::BitVec; use super::*; - // This test is assuming that the `path` field in `MerkleNode::Edge` is already a valid trie - // path value. + // Test cases taken from `bonsai-trie` crate #[rstest::rstest] - #[case(felt!("0x1234567890abcdef"))] - #[case(felt!("0xdeadbeef"))] - #[case(Felt::MAX)] - #[case(Felt::ZERO)] - fn test_path_felt_roundtrip(#[case] path_in_felt: Felt) { - let initial_path = felt_to_path(path_in_felt); - - let converted_felt = path_to_felt(initial_path.clone()); - let path = felt_to_path(converted_felt); - assert_eq!(initial_path, path); + #[case(&[0b10101010, 0b10101010])] + #[case(&[])] + #[case(&[0b10101010])] + #[case(&[0b00000000])] + #[case(&[0b11111111])] + #[case(&[0b11111111, 0b00000000, 0b10101010, 0b10101010, 0b11111111, 0b00000000, 0b10101010, 0b10101010, 0b11111111, 0b00000000, 0b10101010, 0b10101010])] + fn path_felt_rt(#[case] input: &[u8]) { + let path = Path(BitVec::from_slice(input)); + + let converted_felt = path_to_felt(path.clone()); + let converted_path = felt_to_path(converted_felt, path.len() as u8); + + assert_eq!(path, converted_path); + assert_eq!(path.len(), converted_path.len()); } } diff --git a/crates/katana/rpc/rpc/Cargo.toml b/crates/katana/rpc/rpc/Cargo.toml index 3910bbd2c9..5d032a4a0b 100644 --- a/crates/katana/rpc/rpc/Cargo.toml +++ b/crates/katana/rpc/rpc/Cargo.toml @@ -7,11 +7,6 @@ repository.workspace = true version.workspace = true [dependencies] -anyhow.workspace = true -dojo-metrics.workspace = true -futures.workspace = true -http.workspace = true -jsonrpsee = { workspace = true, features = [ "server" ] } katana-core.workspace = true katana-executor.workspace = true katana-pool.workspace = true @@ -21,6 +16,12 @@ katana-rpc-api.workspace = true katana-rpc-types.workspace = true katana-rpc-types-builder.workspace = true katana-tasks.workspace = true + +anyhow.workspace = true +dojo-metrics.workspace = true +futures.workspace = true +http.workspace = true +jsonrpsee = { workspace = true, features = [ "server" ] } metrics.workspace = true serde_json.workspace = true starknet.workspace = true @@ -32,6 +33,11 @@ tracing.workspace = true url.workspace = true [dev-dependencies] +katana-cairo.workspace = true +katana-node.workspace = true +katana-rpc-api = { workspace = true, features = [ "client" ] } +katana-trie.workspace = true + alloy = { git = "https://github.com/alloy-rs/alloy", features = [ "contract", "network", "node-bindings", "provider-http", "providers", "signer-local" ] } alloy-primitives = { workspace = true, features = [ "serde" ] } assert_matches.workspace = true @@ -40,10 +46,6 @@ dojo-test-utils.workspace = true dojo-utils.workspace = true indexmap.workspace = true jsonrpsee = { workspace = true, features = [ "client" ] } -katana-cairo.workspace = true -katana-node.workspace = true -katana-rpc-api = { workspace = true, features = [ "client" ] } -katana-trie.workspace = true num-traits.workspace = true rand.workspace = true rstest.workspace = true diff --git a/crates/katana/rpc/rpc/tests/proofs.rs b/crates/katana/rpc/rpc/tests/proofs.rs index 07d3a35f86..ec2b4f2066 100644 --- a/crates/katana/rpc/rpc/tests/proofs.rs +++ b/crates/katana/rpc/rpc/tests/proofs.rs @@ -6,17 +6,15 @@ use jsonrpsee::http_client::HttpClientBuilder; use katana_node::config::rpc::DEFAULT_RPC_MAX_PROOF_KEYS; use katana_node::config::SequencingConfig; use katana_primitives::block::BlockIdOrTag; -use katana_primitives::class::ClassHash; -use katana_primitives::hash::StarkHash; -use katana_primitives::{hash, Felt}; +use katana_primitives::class::{ClassHash, CompiledClassHash}; +use katana_primitives::Felt; use katana_rpc_api::starknet::StarknetApiClient; -use katana_rpc_types::trie::GetStorageProofResponse; -use katana_trie::bitvec::view::AsBits; -use katana_trie::bonsai::BitVec; -use katana_trie::MultiProof; -use starknet::accounts::Account; +use katana_trie::{compute_classes_trie_value, ClassesMultiProof, MultiProof}; +use starknet::accounts::{Account, ConnectedAccount, SingleOwnerAccount}; use starknet::core::types::BlockTag; -use starknet::macros::short_string; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use starknet::signers::LocalWallet; mod common; @@ -74,14 +72,96 @@ async fn proofs_limit() { #[tokio::test] async fn classes_proofs() { - let sequencer = - TestSequencer::start(get_default_test_config(SequencingConfig::default())).await; + let cfg = get_default_test_config(SequencingConfig::default()); - let provider = sequencer.provider(); + let sequencer = TestSequencer::start(cfg).await; let account = sequencer.account(); - let path: PathBuf = PathBuf::from("tests/test_data/cairo1_contract.json"); - let (contract, compiled_class_hash) = common::prepare_contract_declaration_params(&path) + let (class_hash1, compiled_class_hash1) = + declare(&account, "tests/test_data/cairo1_contract.json").await; + let (class_hash2, compiled_class_hash2) = + declare(&account, "tests/test_data/cairo_l1_msg_contract.json").await; + let (class_hash3, compiled_class_hash3) = + declare(&account, "tests/test_data/test_sierra_contract.json").await; + + // We need to use the jsonrpsee client because `starknet-rs` doesn't yet support RPC 0.8.0 + let client = HttpClientBuilder::default().build(sequencer.url()).unwrap(); + + { + let class_hash = class_hash1; + let trie_entry = compute_classes_trie_value(compiled_class_hash1); + + let proofs = client + .get_storage_proof(BlockIdOrTag::Number(1), Some(vec![class_hash]), None, None) + .await + .expect("failed to get storage proof"); + + let results = ClassesMultiProof::from(MultiProof::from(proofs.classes_proof.nodes)) + .verify(proofs.global_roots.classes_tree_root, vec![class_hash]); + + assert_eq!(vec![trie_entry], results); + } + + { + let class_hash = class_hash2; + let trie_entry = compute_classes_trie_value(compiled_class_hash2); + + let proofs = client + .get_storage_proof(BlockIdOrTag::Number(2), Some(vec![class_hash]), None, None) + .await + .expect("failed to get storage proof"); + + let results = ClassesMultiProof::from(MultiProof::from(proofs.classes_proof.nodes)) + .verify(proofs.global_roots.classes_tree_root, vec![class_hash]); + + assert_eq!(vec![trie_entry], results); + } + + { + let class_hash = class_hash3; + let trie_entry = compute_classes_trie_value(compiled_class_hash3); + + let proofs = client + .get_storage_proof(BlockIdOrTag::Number(3), Some(vec![class_hash]), None, None) + .await + .expect("failed to get storage proof"); + + let results = ClassesMultiProof::from(MultiProof::from(proofs.classes_proof.nodes)) + .verify(proofs.global_roots.classes_tree_root, vec![class_hash]); + + assert_eq!(vec![trie_entry], results); + } + + { + let class_hashes = vec![class_hash1, class_hash2, class_hash3]; + let trie_entries = vec![ + compute_classes_trie_value(compiled_class_hash1), + compute_classes_trie_value(compiled_class_hash2), + compute_classes_trie_value(compiled_class_hash3), + ]; + + let proofs = client + .get_storage_proof( + BlockIdOrTag::Tag(BlockTag::Latest), + Some(class_hashes.clone()), + None, + None, + ) + .await + .expect("failed to get storage proof"); + + let results = ClassesMultiProof::from(MultiProof::from(proofs.classes_proof.nodes)) + .verify(proofs.global_roots.classes_tree_root, class_hashes.clone()); + + assert_eq!(trie_entries, results); + } +} + +async fn declare( + account: &SingleOwnerAccount, LocalWallet>, + path: impl Into, +) -> (ClassHash, CompiledClassHash) { + let (contract, compiled_class_hash) = common::prepare_contract_declaration_params(&path.into()) .expect("failed to prepare class declaration params"); let class_hash = contract.class_hash(); @@ -91,29 +171,9 @@ async fn classes_proofs() { .await .expect("failed to send declare tx"); - dojo_utils::TransactionWaiter::new(res.transaction_hash, &provider) + dojo_utils::TransactionWaiter::new(res.transaction_hash, account.provider()) .await .expect("failed to wait on tx"); - // We need to use the jsonrpsee client because `starknet-rs` doesn't yet support RPC 0.8.0 - let client = HttpClientBuilder::default().build(sequencer.url()).unwrap(); - - let GetStorageProofResponse { global_roots, classes_proof, .. } = client - .get_storage_proof(BlockIdOrTag::Tag(BlockTag::Latest), Some(vec![class_hash]), None, None) - .await - .expect("failed to get storage proof"); - - let key: BitVec = class_hash.to_bytes_be().as_bits()[5..].to_owned(); - let value = - hash::Poseidon::hash(&short_string!("CONTRACT_CLASS_LEAF_V0"), &compiled_class_hash); - - let classes_proof = MultiProof::from(classes_proof.nodes); - - // the returned data is the list of values corresponds to the [key] - let results = classes_proof - .verify_proof::(global_roots.classes_tree_root, [key], 251) - .collect::, _>>() - .expect("failed to verify proofs"); - - assert_eq!(vec![value], results); + (class_hash, compiled_class_hash) } diff --git a/crates/katana/rpc/rpc/tests/test_data/test_sierra_contract.json b/crates/katana/rpc/rpc/tests/test_data/test_sierra_contract.json new file mode 120000 index 0000000000..e92339e522 --- /dev/null +++ b/crates/katana/rpc/rpc/tests/test_data/test_sierra_contract.json @@ -0,0 +1 @@ +../../../../contracts/build/account_with_dummy_validate.sierra.json \ No newline at end of file diff --git a/crates/katana/storage/db/Cargo.toml b/crates/katana/storage/db/Cargo.toml index 8385b2dffe..47ff7f3464 100644 --- a/crates/katana/storage/db/Cargo.toml +++ b/crates/katana/storage/db/Cargo.toml @@ -34,6 +34,8 @@ rev = "b34b0d3" [dev-dependencies] arbitrary.workspace = true criterion.workspace = true +proptest = "1.6.0" +rstest.workspace = true starknet.workspace = true [features] diff --git a/crates/katana/storage/db/src/models/trie.rs b/crates/katana/storage/db/src/models/trie.rs index 3e3628919c..4f5cc49ec3 100644 --- a/crates/katana/storage/db/src/models/trie.rs +++ b/crates/katana/storage/db/src/models/trie.rs @@ -42,6 +42,23 @@ pub enum TrieDatabaseKeyType { TrieLog, } +#[derive(Debug, thiserror::Error)] +#[error("unknown trie key type: {0}")] +pub struct TrieDatabaseKeyTypeTryFromError(u8); + +impl TryFrom for TrieDatabaseKeyType { + type Error = TrieDatabaseKeyTypeTryFromError; + + fn try_from(value: u8) -> Result { + match value { + 0 => Ok(Self::Trie), + 1 => Ok(Self::Flat), + 2 => Ok(Self::TrieLog), + invalid => Err(TrieDatabaseKeyTypeTryFromError(invalid)), + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)] pub struct TrieDatabaseKey { pub r#type: TrieDatabaseKeyType, @@ -68,16 +85,11 @@ impl Decode for TrieDatabaseKey { if bytes.len() < 2 { // Need at least type and length bytes - panic!("emptyy buffer") + panic!("empty buffer") } - let r#type = match bytes[0] { - 0 => TrieDatabaseKeyType::Trie, - 1 => TrieDatabaseKeyType::Flat, - 2 => TrieDatabaseKeyType::TrieLog, - _ => panic!("Invalid trie database key type"), - }; - + let r#type = + TrieDatabaseKeyType::try_from(bytes[0]).expect("Invalid trie database key type"); let key_len = bytes[1] as usize; if bytes.len() < 2 + key_len { diff --git a/crates/katana/storage/db/src/trie/mod.rs b/crates/katana/storage/db/src/trie/mod.rs index de5aaa12e2..c8f741c33f 100644 --- a/crates/katana/storage/db/src/trie/mod.rs +++ b/crates/katana/storage/db/src/trie/mod.rs @@ -361,7 +361,6 @@ where // TODO: check if the snapshot exist fn transaction(&self, id: CommitId) -> Option<(CommitId, Self::Transaction<'_>)> { - dbg!("getting snapshot", id); Some((id, SnapshotTrieDb::new(self.tx.clone(), id))) } } @@ -379,3 +378,104 @@ fn to_db_key(key: &DatabaseKey<'_>) -> models::trie::TrieDatabaseKey { } } } + +#[cfg(test)] +mod tests { + use katana_primitives::hash::{Pedersen, StarkHash}; + use katana_primitives::{felt, hash}; + use katana_trie::{verify_proof, ClassesTrie, CommitId}; + use starknet::macros::short_string; + + use super::TrieDbMut; + use crate::abstraction::Database; + use crate::mdbx::test_utils; + use crate::tables; + use crate::trie::SnapshotTrieDb; + + #[test] + fn snapshot() { + let db = test_utils::create_test_db(); + let db_tx = db.tx_mut().expect("failed to get tx"); + + let mut trie = ClassesTrie::new(TrieDbMut::::new(&db_tx)); + + let root0 = { + let entries = [ + (felt!("0x9999"), felt!("0xdead")), + (felt!("0x5555"), felt!("0xbeef")), + (felt!("0x1337"), felt!("0xdeadbeef")), + ]; + + for (key, value) in entries { + trie.insert(key, value); + } + + trie.commit(0); + trie.root() + }; + + let root1 = { + let entries = [ + (felt!("0x6969"), felt!("0x80085")), + (felt!("0x3333"), felt!("0x420")), + (felt!("0x2222"), felt!("0x7171")), + ]; + + for (key, value) in entries { + trie.insert(key, value); + } + + trie.commit(1); + trie.root() + }; + + assert_ne!(root0, root1); + + { + let db = SnapshotTrieDb::::new(&db_tx, CommitId::new(0)); + let mut snapshot0 = ClassesTrie::new(db); + + let snapshot_root0 = snapshot0.root(); + assert_eq!(snapshot_root0, root0); + + let proofs0 = snapshot0.multiproof(vec![felt!("0x9999")]); + let verify_result0 = + verify_proof::(&proofs0, snapshot_root0, vec![felt!("0x9999")]); + + let value = + hash::Poseidon::hash(&short_string!("CONTRACT_CLASS_LEAF_V0"), &felt!("0xdead")); + assert_eq!(vec![value], verify_result0); + } + + { + let commit = CommitId::new(1); + let mut snapshot1 = + ClassesTrie::new(SnapshotTrieDb::::new(&db_tx, commit)); + + let snapshot_root1 = snapshot1.root(); + assert_eq!(snapshot_root1, root1); + + let proofs1 = snapshot1.multiproof(vec![felt!("0x6969")]); + let verify_result1 = + verify_proof::(&proofs1, snapshot_root1, vec![felt!("0x6969")]); + + let value = + hash::Poseidon::hash(&short_string!("CONTRACT_CLASS_LEAF_V0"), &felt!("0x80085")); + assert_eq!(vec![value], verify_result1); + } + + { + let root = trie.root(); + let proofs = trie.multiproof(vec![felt!("0x6969"), felt!("0x9999")]); + let result = + verify_proof::(&proofs, root, vec![felt!("0x6969"), felt!("0x9999")]); + + let value0 = + hash::Poseidon::hash(&short_string!("CONTRACT_CLASS_LEAF_V0"), &felt!("0x80085")); + let value1 = + hash::Poseidon::hash(&short_string!("CONTRACT_CLASS_LEAF_V0"), &felt!("0xdead")); + + assert_eq!(vec![value0, value1], result); + } + } +} diff --git a/crates/katana/storage/db/src/trie/snapshot.rs b/crates/katana/storage/db/src/trie/snapshot.rs index b9adff8d72..a9012888f9 100644 --- a/crates/katana/storage/db/src/trie/snapshot.rs +++ b/crates/katana/storage/db/src/trie/snapshot.rs @@ -121,3 +121,124 @@ where unimplemented!("modifying trie snapshot is not supported") } } + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use katana_primitives::felt; + use katana_trie::bonsai::DatabaseKey; + use katana_trie::{BonsaiPersistentDatabase, CommitId}; + use proptest::prelude::*; + use proptest::strategy; + + use super::*; + use crate::abstraction::{Database, DbTx}; + use crate::mdbx::test_utils; + use crate::models::trie::TrieDatabaseKeyType; + use crate::tables; + use crate::trie::{SnapshotTrieDb, TrieDbMut}; + + #[allow(unused)] + fn arb_db_key_type() -> BoxedStrategy { + prop_oneof![ + Just(TrieDatabaseKeyType::Trie), + Just(TrieDatabaseKeyType::Flat), + Just(TrieDatabaseKeyType::TrieLog), + ] + .boxed() + } + + #[derive(Debug)] + struct Case { + number: BlockNumber, + keyvalues: HashMap<(TrieDatabaseKeyType, [u8; 32]), [u8; 32]>, + } + + prop_compose! { + // This create a strategy that generates a random values but always a hardcoded key + fn arb_keyvalues_with_fixed_key() ( + value in any::<[u8;32]>() + ) -> HashMap<(TrieDatabaseKeyType, [u8; 32]), [u8; 32]> { + let key = (TrieDatabaseKeyType::Trie, felt!("0x112345678921541231").to_bytes_be()); + HashMap::from_iter([(key, value)]) + } + } + + prop_compose! { + fn arb_keyvalues() ( + keyvalues in prop::collection::hash_map( + (arb_db_key_type(), any::<[u8;32]>()), + any::<[u8;32]>(), + 1..100 + ) + ) -> HashMap<(TrieDatabaseKeyType, [u8; 32]), [u8; 32]> { + keyvalues + } + } + + prop_compose! { + fn arb_block(count: u64, step: u64) ( + number in (count * step)..((count * step) + step), + keyvalues in arb_keyvalues_with_fixed_key() + ) -> Case { + Case { number, keyvalues } + } + } + + /// Strategy for generating a list of blocks with `count` size where each block is within a + /// range of `step` size. See [`arb_block`]. + fn arb_blocklist(step: u64, count: usize) -> impl strategy::Strategy> { + let mut strats = Vec::with_capacity(count); + for i in 0..count { + strats.push(arb_block(i as u64, step)); + } + strategy::Strategy::prop_map(strats, move |strats| strats) + } + + proptest! { + #[test] + fn test_get_insert(blocks in arb_blocklist(10, 1000)) { + let db = test_utils::create_test_db(); + let tx = db.tx_mut().expect("failed to create rw tx"); + + for block in &blocks { + let mut trie = TrieDbMut::::new(&tx); + + // Insert key/value pairs + for ((r#type, key), value) in &block.keyvalues { + let db_key = match r#type { + TrieDatabaseKeyType::Trie => DatabaseKey::Trie(key.as_ref()), + TrieDatabaseKeyType::Flat => DatabaseKey::Flat(key.as_ref()), + TrieDatabaseKeyType::TrieLog => DatabaseKey::TrieLog(key.as_ref()), + }; + + trie.insert(&db_key, value.as_ref(), None).expect("failed to insert"); + } + + let snapshot_id = CommitId::from(block.number); + trie.snapshot(snapshot_id); + } + + tx.commit().expect("failed to commit tx"); + let tx = db.tx().expect("failed to create ro tx"); + + for block in &blocks { + let snapshot_id = CommitId::from(block.number); + let snapshot_db = SnapshotTrieDb::::new(&tx, snapshot_id); + + // Verify snapshots + for ((r#type, key), value) in &block.keyvalues { + let db_key = match r#type { + TrieDatabaseKeyType::Trie => DatabaseKey::Trie(key.as_ref()), + TrieDatabaseKeyType::Flat => DatabaseKey::Flat(key.as_ref()), + TrieDatabaseKeyType::TrieLog => DatabaseKey::TrieLog(key.as_ref()), + }; + + let result = snapshot_db.get(&db_key).unwrap(); + prop_assert_eq!(result.as_ref().map(|x| x.as_slice()), Some(value.as_slice())); + } + } + } + } +} diff --git a/crates/katana/trie/src/classes.rs b/crates/katana/trie/src/classes.rs index 9381686ca4..c74f3dffc1 100644 --- a/crates/katana/trie/src/classes.rs +++ b/crates/katana/trie/src/classes.rs @@ -8,11 +8,32 @@ use starknet_types_core::hash::{Poseidon, StarkHash}; use crate::id::CommitId; +#[derive(Debug)] +pub struct ClassesMultiProof(pub MultiProof); + +impl ClassesMultiProof { + // TODO: maybe perform results check in this method as well. make it accept the compiled class + // hashes + pub fn verify(&self, root: Felt, class_hashes: Vec) -> Vec { + crate::verify_proof::(&self.0, root, class_hashes) + } +} + +impl From for ClassesMultiProof { + fn from(value: MultiProof) -> Self { + Self(value) + } +} + #[derive(Debug)] pub struct ClassesTrie { trie: crate::BonsaiTrie, } +///////////////////////////////////////////////////// +// ClassesTrie implementations +///////////////////////////////////////////////////// + impl ClassesTrie { const BONSAI_IDENTIFIER: &'static [u8] = b"classes"; @@ -34,9 +55,7 @@ where DB: BonsaiDatabase + BonsaiPersistentDatabase, { pub fn insert(&mut self, hash: ClassHash, compiled_hash: CompiledClassHash) { - // https://docs.starknet.io/architecture-and-concepts/network-architecture/starknet-state/#classes_trie - const CONTRACT_CLASS_LEAF_V0: Felt = short_string!("CONTRACT_CLASS_LEAF_V0"); - let value = Poseidon::hash(&CONTRACT_CLASS_LEAF_V0, &compiled_hash); + let value = compute_classes_trie_value(compiled_hash); self.trie.insert(Self::BONSAI_IDENTIFIER, hash, value) } @@ -44,3 +63,9 @@ where self.trie.commit(block.into()) } } + +pub fn compute_classes_trie_value(compiled_class_hash: CompiledClassHash) -> Felt { + // https://docs.starknet.io/architecture-and-concepts/network-architecture/starknet-state/#classes_trie + const CONTRACT_CLASS_LEAF_V0: Felt = short_string!("CONTRACT_CLASS_LEAF_V0"); + Poseidon::hash(&CONTRACT_CLASS_LEAF_V0, &compiled_class_hash) +} diff --git a/crates/katana/trie/src/lib.rs b/crates/katana/trie/src/lib.rs index 11a85fafc8..e0af40b4bc 100644 --- a/crates/katana/trie/src/lib.rs +++ b/crates/katana/trie/src/lib.rs @@ -1,10 +1,7 @@ -use bitvec::array::BitArray; -use bitvec::order::Msb0; -use bitvec::vec::BitVec; use bitvec::view::AsBits; -pub use bonsai::{MultiProof, Path, ProofNode}; -pub use bonsai_trie::{BonsaiDatabase, BonsaiPersistentDatabase}; -use bonsai_trie::{BonsaiStorage, BonsaiStorageConfig}; +pub use bonsai::{BitVec, MultiProof, Path, ProofNode}; +use bonsai_trie::BonsaiStorage; +pub use bonsai_trie::{BonsaiDatabase, BonsaiPersistentDatabase, BonsaiStorageConfig}; use katana_primitives::class::ClassHash; use katana_primitives::Felt; use starknet_types_core::hash::{Pedersen, StarkHash}; @@ -15,7 +12,7 @@ mod contracts; mod id; mod storages; -pub use classes::ClassesTrie; +pub use classes::*; pub use contracts::ContractsTrie; pub use id::CommitId; pub use storages::StoragesTrie; @@ -26,7 +23,7 @@ pub use storages::StoragesTrie; /// having to handle how to transform the keys into the internal keys used by the trie. /// This struct is not meant to be used directly, and instead use the specific tries that have /// been derived from it, [`ClassesTrie`], [`ContractsTrie`], or [`StoragesTrie`]. -pub(crate) struct BonsaiTrie +pub struct BonsaiTrie where DB: BonsaiDatabase, Hash: StarkHash + Send + Sync, @@ -41,8 +38,14 @@ where { pub fn new(db: DB) -> Self { let config = BonsaiStorageConfig { - max_saved_trie_logs: Some(usize::MAX), - max_saved_snapshots: Some(usize::MAX), + // we have our own implementation of storing trie changes + max_saved_trie_logs: Some(0), + // in the bonsai-trie crate, this field seems to be only used in rocksdb impl. + // i dont understand why would they add a config thats implementation specific ???? + // + // this config should be used by our implementation of the + // BonsaiPersistentDatabase::snapshot() + max_saved_snapshots: Some(64usize), snapshot_interval: 1, }; @@ -59,13 +62,8 @@ where self.storage.root_hash(id).expect("failed to get trie root") } - pub fn multiproof(&mut self, id: &[u8], mut keys: Vec) -> MultiProof { - keys.sort(); - let keys = keys - .into_iter() - .map(|key| BitArray::new(key.to_bytes_be())) - .map(|hash| hash.as_bitslice()[5..].to_owned()); - + pub fn multiproof(&mut self, id: &[u8], keys: Vec) -> MultiProof { + let keys = keys.into_iter().map(|key| key.to_bytes_be().as_bits()[5..].to_owned()); self.storage.get_multi_proof(id, keys).expect("failed to get multiproof") } } @@ -76,7 +74,7 @@ where Hash: StarkHash + Send + Sync, { pub fn insert(&mut self, id: &[u8], key: Felt, value: Felt) { - let key: BitVec = key.to_bytes_be().as_bits()[5..].to_owned(); + let key: BitVec = key.to_bytes_be().as_bits()[5..].to_owned(); self.storage.insert(id, &key, &value).unwrap(); } @@ -132,6 +130,15 @@ pub fn compute_contract_state_hash( Pedersen::hash(&hash, &CONTRACT_STATE_HASH_VERSION) } +pub fn verify_proof( + proofs: &MultiProof, + root: Felt, + keys: Vec, +) -> Vec { + let keys = keys.into_iter().map(|f| f.to_bytes_be().as_bits()[5..].to_owned()); + proofs.verify_proof::(root, keys, 251).collect::, _>>().unwrap() +} + #[cfg(test)] mod tests {