diff --git a/Cargo.lock b/Cargo.lock index 583b0ee602..5324d066b5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1906,6 +1906,7 @@ checksum = "9ea835d29036a4087793836fa931b08837ad5e957da9e23886b29586fb9b6650" name = "dump_declared_classes" version = "0.0.5" dependencies = [ + "clap", "papyrus_storage", ] diff --git a/crates/dump_declared_classes/Cargo.toml b/crates/dump_declared_classes/Cargo.toml index 20c6383af1..b53f5a9990 100644 --- a/crates/dump_declared_classes/Cargo.toml +++ b/crates/dump_declared_classes/Cargo.toml @@ -7,3 +7,4 @@ license-file.workspace = true [dependencies] papyrus_storage = { path = "../papyrus_storage", version = "0.0.5" } +clap.workspace = true diff --git a/crates/dump_declared_classes/src/main.rs b/crates/dump_declared_classes/src/main.rs index bf50f226f0..396b6cd362 100644 --- a/crates/dump_declared_classes/src/main.rs +++ b/crates/dump_declared_classes/src/main.rs @@ -1,15 +1,67 @@ -use papyrus_storage::utils::dump_declared_classes_table_to_file; +use clap::{Arg, Command}; +use papyrus_storage::utils::dump_declared_classes_table_by_block_range; /// This executable dumps the declared_classes table from the storage to a file. -/// The file path can be passed as an argument, otherwise it will be dumped to -/// "dump_declared_classes.json". + fn main() { - let args = std::env::args().collect::>(); - let default_file_path = "dump_declared_classes.json".to_string(); - let file_path = args.get(1).unwrap_or(&default_file_path); + let cli_params = get_cli_params(); + match dump_declared_classes_table_by_block_range( + cli_params.start_block, + cli_params.end_block, + &cli_params.file_path, + ) { + Ok(_) => println!("Dumped declared_classes table to file: {} .", cli_params.file_path), + Err(e) => println!("Failed dumping declared_classes table with error: {} .", e), + } +} + +struct CliParams { + start_block: u64, + end_block: u64, + file_path: String, +} + +/// The start_block and end_block arguments are mandatory and define the block range to dump, +/// start_block is inclusive and end_block is exclusive. The file_path is an optional parameter, +/// otherwise the data will be dumped to "dump_declared_classes.json". +fn get_cli_params() -> CliParams { + let matches = Command::new("Dump declared classes") + .arg( + Arg::new("file_path") + .short('f') + .long("file_path") + .default_value("dump_declared_classes.json") + .help("The file path to dump the declared classes table to."), + ) + .arg( + Arg::new("start_block") + .short('s') + .long("start_block") + .required(true) + .help("The block number to start dumping from."), + ) + .arg( + Arg::new("end_block") + .short('e') + .long("end_block") + .required(true) + .help("The block number to end dumping at."), + ) + .get_matches(); - match dump_declared_classes_table_to_file(file_path) { - Ok(_) => println!("Dumped declared_classes table to file: {}", file_path), - Err(e) => println!("Failed dumping declared_classes table with error: {}", e), + let file_path = matches.get_one::("file_path").unwrap().as_str(); + let start_block = matches + .get_one::("start_block") + .expect("Failed parsing start_block") + .parse::() + .expect("Failed parsing start_block"); + let end_block = matches + .get_one::("end_block") + .expect("Failed parsing end_block") + .parse::() + .expect("Failed parsing end_block"); + if start_block >= end_block { + panic!("start_block must be smaller than end_block"); } + CliParams { start_block, end_block, file_path: file_path.to_string() } } diff --git a/crates/papyrus_storage/src/lib.rs b/crates/papyrus_storage/src/lib.rs index 25e9aa3b5b..47d5e7f97d 100644 --- a/crates/papyrus_storage/src/lib.rs +++ b/crates/papyrus_storage/src/lib.rs @@ -420,6 +420,11 @@ pub enum StorageError { IOError(#[from] std::io::Error), #[error(transparent)] SerdeError(#[from] serde_json::Error), + #[error( + "The block number {block} should be smaller than the compiled_class_marker \ + {compiled_class_marker}." + )] + InvalidBlockNumber { block: BlockNumber, compiled_class_marker: BlockNumber }, } /// A type alias that maps to std::result::Result. diff --git a/crates/papyrus_storage/src/utils.rs b/crates/papyrus_storage/src/utils.rs index 29d6e69206..13d316d596 100644 --- a/crates/papyrus_storage/src/utils.rs +++ b/crates/papyrus_storage/src/utils.rs @@ -3,14 +3,24 @@ #[path = "utils_test.rs"] mod utils_test; +use std::collections::HashMap; use std::fs::File; use std::io::{BufWriter, Write}; +use serde::Serialize; +use starknet_api::block::BlockNumber; +use starknet_api::core::{ClassHash, CompiledClassHash}; +use starknet_api::hash::StarkFelt; +use starknet_api::state::{EntryPoint, EntryPointType}; + +use crate::compiled_class::CasmStorageReader; use crate::db::serialization::StorageSerde; use crate::db::{DbIter, TableIdentifier, RO}; -use crate::{open_storage, StorageConfig, StorageResult, StorageTxn}; +use crate::state::StateStorageReader; +use crate::{open_storage, StorageConfig, StorageError, StorageResult, StorageTxn}; /// Dumps a table from the storage to a file in JSON format. +#[allow(dead_code)] fn dump_table_to_file( txn: &StorageTxn<'_, RO>, table_id: &TableIdentifier, @@ -38,11 +48,65 @@ where Ok(()) } -/// Dumps the declared_classes table from the storage to a file. -pub fn dump_declared_classes_table_to_file(file_path: &str) -> StorageResult<()> { +#[derive(Serialize)] +struct DumpDeclaredClass { + class_hash: ClassHash, + compiled_class_hash: CompiledClassHash, + sierra_program: Vec, + entry_points_by_type: HashMap>, +} + +/// Dumps the declared_classes at a given block range from the storage to a file. +pub fn dump_declared_classes_table_by_block_range( + start_block: u64, + end_block: u64, + file_path: &str, +) -> StorageResult<()> { let storage_config = StorageConfig::default(); let (storage_reader, _) = open_storage(storage_config)?; let txn = storage_reader.begin_ro_txn()?; - dump_table_to_file(&txn, &txn.tables.declared_classes, file_path)?; + let compiled_class_marker = txn.get_compiled_class_marker()?; + if end_block > compiled_class_marker.0 { + return Err(StorageError::InvalidBlockNumber { + block: BlockNumber(end_block), + compiled_class_marker, + }); + } + dump_declared_classes_table_by_block_range_internal(&txn, file_path, start_block, end_block) +} + +fn dump_declared_classes_table_by_block_range_internal( + txn: &StorageTxn<'_, RO>, + file_path: &str, + start_block: u64, + end_block: u64, +) -> StorageResult<()> { + let table_handle = txn.txn.open_table(&txn.tables.declared_classes)?; + let file = File::create(file_path)?; + let mut writer = BufWriter::new(file); + writer.write_all(b"[")?; + let mut first = true; + for block_number in start_block..end_block { + if let Some(thin_state_diff) = txn.get_state_diff(BlockNumber(block_number))? { + for (class_hash, compiled_class_hash) in thin_state_diff.declared_classes.iter() { + if let Some(contract_class) = table_handle.get(&txn.txn, class_hash)? { + if !first { + writer.write_all(b",")?; + } + serde_json::to_writer( + &mut writer, + &DumpDeclaredClass { + class_hash: *class_hash, + compiled_class_hash: *compiled_class_hash, + sierra_program: contract_class.sierra_program.clone(), + entry_points_by_type: contract_class.entry_point_by_type.clone(), + }, + )?; + first = false; + } + } + }; + } + writer.write_all(b"]")?; Ok(()) } diff --git a/crates/papyrus_storage/src/utils_test.rs b/crates/papyrus_storage/src/utils_test.rs index ed8666ce8e..881e4ce103 100644 --- a/crates/papyrus_storage/src/utils_test.rs +++ b/crates/papyrus_storage/src/utils_test.rs @@ -10,56 +10,67 @@ use starknet_api::state::{ContractClass, StateDiff}; use super::dump_table_to_file; use crate::state::StateStorageWriter; use crate::test_utils::get_test_storage; +use crate::utils::{dump_declared_classes_table_by_block_range_internal, DumpDeclaredClass}; #[test] -fn test_dump_table_to_file() { +fn test_dump_declared_classes() { let file_path = "tmp_test_dump_declared_classes_table.json"; - let declared_class1 = ( - ClassHash(1u128.into()), - ContractClass { - sierra_program: vec![StarkFelt::ONE, StarkFelt::TWO], - entry_point_by_type: HashMap::new(), - abi: "".to_string(), - }, - ); - let declared_class2 = ( - ClassHash(2u128.into()), - ContractClass { - sierra_program: vec![StarkFelt::THREE, StarkFelt::ZERO], - entry_point_by_type: HashMap::new(), - abi: "".to_string(), - }, - ); let compiled_class_hash = CompiledClassHash(StarkHash::default()); - let declared_classes = vec![declared_class1.clone(), declared_class2.clone()]; - let declared_classes_for_append_state = indexmap!( - declared_class1.0 => - (compiled_class_hash, declared_class1.1), - declared_class2.0 => - (compiled_class_hash, declared_class2.1), - ); - + let mut declared_classes = vec![]; + let mut state_diffs = vec![]; let ((reader, mut writer), _temp_dir) = get_test_storage(); - let txn = writer.begin_rw_txn().unwrap(); - txn.append_state_diff( - BlockNumber(0), - StateDiff { + for i in 0..5 { + let i_felt = StarkFelt::from_u128(i as u128); + declared_classes.push(( + ClassHash(i_felt), + ContractClass { + sierra_program: vec![i_felt, i_felt], + entry_point_by_type: HashMap::new(), + abi: "".to_string(), + }, + )); + state_diffs.push(StateDiff { deployed_contracts: indexmap!(), storage_diffs: indexmap!(), - declared_classes: declared_classes_for_append_state, + declared_classes: indexmap!( + declared_classes[i].0 => + (compiled_class_hash, declared_classes[i].1.clone()), + ), deprecated_declared_classes: indexmap!(), nonces: indexmap!(), replaced_classes: indexmap!(), - }, - indexmap!(), - ) - .unwrap() - .commit() - .unwrap(); - + }); + let txn = writer.begin_rw_txn().unwrap(); + txn.append_state_diff(BlockNumber(i as u64), state_diffs[i].clone(), indexmap!()) + .unwrap() + .commit() + .unwrap(); + } let txn = reader.begin_ro_txn().unwrap(); + + // Test dump_table_to_file for declared_clases table. dump_table_to_file(&txn, &txn.tables.declared_classes, file_path).unwrap(); let file_content = fs::read_to_string(file_path).unwrap(); let _ = fs::remove_file(file_path); assert_eq!(file_content, serde_json::to_string(&declared_classes).unwrap()); + + // Test dump_declared_classes_table_by_block_range + dump_declared_classes_table_by_block_range_internal(&txn, file_path, 2, 4).unwrap(); + let file_content = fs::read_to_string(file_path).unwrap(); + let _ = fs::remove_file(file_path); + let expected_declared_classes = vec![ + DumpDeclaredClass { + class_hash: declared_classes[2].0, + compiled_class_hash, + sierra_program: declared_classes[2].1.sierra_program.clone(), + entry_points_by_type: declared_classes[2].1.entry_point_by_type.clone(), + }, + DumpDeclaredClass { + class_hash: declared_classes[3].0, + compiled_class_hash, + sierra_program: declared_classes[3].1.sierra_program.clone(), + entry_points_by_type: declared_classes[3].1.entry_point_by_type.clone(), + }, + ]; + assert_eq!(file_content, serde_json::to_string(&expected_declared_classes).unwrap()); }