diff --git a/crates/papyrus_node/src/run.rs b/crates/papyrus_node/src/run.rs index 6c1eae24de..e26b379759 100644 --- a/crates/papyrus_node/src/run.rs +++ b/crates/papyrus_node/src/run.rs @@ -25,7 +25,8 @@ use papyrus_p2p_sync::{Protocol, BUFFER_SIZE}; use papyrus_protobuf::consensus::{ProposalPart, StreamMessage}; #[cfg(feature = "rpc")] use papyrus_rpc::run_server; -use papyrus_storage::{open_storage, update_storage_metrics, StorageReader, StorageWriter}; +use papyrus_storage::storage_metrics::update_storage_metrics; +use papyrus_storage::{open_storage, StorageReader, StorageWriter}; use papyrus_sync::sources::base_layer::{BaseLayerSourceError, EthereumBaseLayerSource}; use papyrus_sync::sources::central::{CentralError, CentralSource, CentralSourceConfig}; use papyrus_sync::sources::pending::PendingSource; diff --git a/crates/papyrus_storage/Cargo.toml b/crates/papyrus_storage/Cargo.toml index 21ee756462..925261d1f8 100644 --- a/crates/papyrus_storage/Cargo.toml +++ b/crates/papyrus_storage/Cargo.toml @@ -10,11 +10,6 @@ description = "A storage implementation for a Starknet node." document_calls = ["lazy_static"] testing = ["tempfile"] -[[bin]] -name = "dump_declared_classes" -path = "src/bin/dump_declared_classes.rs" -required-features = ["clap"] - [[bin]] name = "storage_benchmark" path = "src/bin/storage_benchmark.rs" diff --git a/crates/papyrus_storage/src/bin/README.md b/crates/papyrus_storage/src/bin/README.md deleted file mode 100644 index 72496f4d19..0000000000 --- a/crates/papyrus_storage/src/bin/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# Dump Declared Classes Tool - -This tool allows you to dump the entire `declared_classes` table from Papyrus storage into a file. - -## Instructions - -1. **Run a Docker** - Please refer to the main [README](../../../../README.adoc#running-papyrus-with-docker) for instructions. - - Note: use a released Docker image - -3. **View Running Docker Containers** - - ```bash - docker ps - ``` - You can also view the logs produced by the full node with: - - ```bash - docker logs - ``` - -4. **Sync the Full Node** - - The full node sync could take a few hours/days. Once it's partially or fully synced, you can run the tool to dump the declared classes into a file. - -5. **Access the Docker Container** - - ```bash - docker exec -ti sh - ``` - -6. **Run the Tool** - - ```bash - target/release/dump_declared_classes --start_block --end_block --chain_id [--file_path file_path] - ``` - - The default value for file_path is `dump_declared_classes.json`. - - diff --git a/crates/papyrus_storage/src/bin/dump_declared_classes.rs b/crates/papyrus_storage/src/bin/dump_declared_classes.rs deleted file mode 100644 index 80c43999ce..0000000000 --- a/crates/papyrus_storage/src/bin/dump_declared_classes.rs +++ /dev/null @@ -1,79 +0,0 @@ -use clap::{Arg, Command}; -use papyrus_storage::utils::dump_declared_classes_table_by_block_range; -use starknet_api::core::ChainId; - -/// This executable dumps the declared_classes table from the storage to a file. -fn main() { - let cli_params = get_cli_params(); - match dump_declared_classes_table_by_block_range( - cli_params.start_block, - cli_params.end_block, - &cli_params.file_path, - &cli_params.chain_id, - ) { - Ok(_) => println!("Dumped declared_classes table to file: {} .", cli_params.file_path), - Err(e) => println!("Failed dumping declared_classes table with error: {}", e), - } -} - -struct CliParams { - start_block: u64, - end_block: u64, - file_path: String, - chain_id: ChainId, -} - -/// The start_block and end_block arguments are mandatory and define the block range to dump, -/// start_block is inclusive and end_block is exclusive. The file_path is an optional parameter, -/// otherwise the data will be dumped to "dump_declared_classes.json". -fn get_cli_params() -> CliParams { - let matches = Command::new("Dump declared classes") - .arg( - Arg::new("file_path") - .short('f') - .long("file_path") - .default_value("dump_declared_classes.json") - .help("The file path to dump the declared classes table to."), - ) - .arg( - Arg::new("start_block") - .short('s') - .long("start_block") - .required(true) - .help("The block number to start dumping from."), - ) - .arg( - Arg::new("end_block") - .short('e') - .long("end_block") - .required(true) - .help("The block number to end dumping at."), - ) - .arg( - Arg::new("chain_id") - .short('c') - .long("chain_id") - .required(true) - .help("The chain id SN_MAIN/SN_SEPOLIA, default value is SN_MAIN."), - ) - .get_matches(); - - let file_path = - matches.get_one::("file_path").expect("Failed parsing file_path").to_string(); - let start_block = matches - .get_one::("start_block") - .expect("Failed parsing start_block") - .parse::() - .expect("Failed parsing start_block"); - let end_block = matches - .get_one::("end_block") - .expect("Failed parsing end_block") - .parse::() - .expect("Failed parsing end_block"); - if start_block >= end_block { - panic!("start_block must be smaller than end_block"); - } - let chain_id = - matches.get_one::("chain_id").expect("Failed parsing chain_id").to_string(); - CliParams { start_block, end_block, file_path, chain_id: ChainId::Other(chain_id) } -} diff --git a/crates/papyrus_storage/src/lib.rs b/crates/papyrus_storage/src/lib.rs index 7d7fa274f8..e932d0d0db 100644 --- a/crates/papyrus_storage/src/lib.rs +++ b/crates/papyrus_storage/src/lib.rs @@ -81,7 +81,7 @@ pub mod class; pub mod compiled_class; #[cfg(feature = "document_calls")] pub mod document_calls; -pub mod utils; +pub mod storage_metrics; // TODO(yair): Make the compression_utils module pub(crate) or extract it from the crate. #[doc(hidden)] pub mod compression_utils; @@ -150,7 +150,6 @@ use crate::db::{ use crate::header::StorageBlockHeader; use crate::mmap_file::MMapFileStats; use crate::state::data::IndexedDeprecatedContractClass; -pub use crate::utils::update_storage_metrics; use crate::version::{VersionStorageReader, VersionStorageWriter}; // For more details on the storage version, see the module documentation. diff --git a/crates/papyrus_storage/src/storage_metrics.rs b/crates/papyrus_storage/src/storage_metrics.rs new file mode 100644 index 0000000000..8e6fd114b6 --- /dev/null +++ b/crates/papyrus_storage/src/storage_metrics.rs @@ -0,0 +1,31 @@ +//! module for metrics utilities. +#[cfg(test)] +#[path = "storage_metrics_test.rs"] +mod storage_metrics_test; + +use metrics::{absolute_counter, gauge}; +use tracing::debug; + +use crate::{StorageReader, StorageResult}; + +// TODO(dvir): add storage metrics names to this module. + +// TODO(dvir): consider adding storage size metrics. +// TODO(dvir): relocate all the storage metrics in one module and export them (also in other +// crates). +/// Updates storage metrics about the state of the storage. +#[allow(clippy::as_conversions)] +pub fn update_storage_metrics(reader: &StorageReader) -> StorageResult<()> { + debug!("updating storage metrics"); + gauge!("storage_free_pages_number", reader.db_reader.get_free_pages()? as f64); + let info = reader.db_reader.get_db_info()?; + absolute_counter!( + "storage_last_page_number", + u64::try_from(info.last_pgno()).expect("usize should fit in u64") + ); + absolute_counter!( + "storage_last_transaction_index", + u64::try_from(info.last_txnid()).expect("usize should fit in u64") + ); + Ok(()) +} diff --git a/crates/papyrus_storage/src/storage_metrics_test.rs b/crates/papyrus_storage/src/storage_metrics_test.rs new file mode 100644 index 0000000000..323d194746 --- /dev/null +++ b/crates/papyrus_storage/src/storage_metrics_test.rs @@ -0,0 +1,44 @@ +use metrics_exporter_prometheus::PrometheusBuilder; +use papyrus_test_utils::prometheus_is_contained; +use prometheus_parse::Value::{Counter, Gauge}; + +use super::update_storage_metrics; +use crate::test_utils::get_test_storage; + +#[test] +fn update_storage_metrics_test() { + let ((reader, _writer), _temp_dir) = get_test_storage(); + let handle = PrometheusBuilder::new().install_recorder().unwrap(); + + assert!(prometheus_is_contained(handle.render(), "storage_free_pages_number", &[]).is_none()); + assert!(prometheus_is_contained(handle.render(), "storage_last_page_number", &[]).is_none()); + assert!( + prometheus_is_contained(handle.render(), "storage_last_transaction_index", &[]).is_none() + ); + + update_storage_metrics(&reader).unwrap(); + + let Gauge(free_pages) = + prometheus_is_contained(handle.render(), "storage_free_pages_number", &[]).unwrap() + else { + panic!("storage_free_pages_number is not a Gauge") + }; + // TODO(dvir): add an upper limit when the bug in the binding freelist function will be fixed. + assert!(0f64 < free_pages); + + let Counter(last_page) = + prometheus_is_contained(handle.render(), "storage_last_page_number", &[]).unwrap() + else { + panic!("storage_last_page_number is not a Counter") + }; + assert!(0f64 < last_page); + assert!(last_page < 1000f64); + + let Counter(last_transaction) = + prometheus_is_contained(handle.render(), "storage_last_transaction_index", &[]).unwrap() + else { + panic!("storage_last_transaction_index is not a Counter") + }; + assert!(0f64 < last_transaction); + assert!(last_transaction < 100f64); +} diff --git a/crates/papyrus_storage/src/utils.rs b/crates/papyrus_storage/src/utils.rs deleted file mode 100644 index 157c7cbb7b..0000000000 --- a/crates/papyrus_storage/src/utils.rs +++ /dev/null @@ -1,110 +0,0 @@ -//! module for external utils, such as dumping a storage table to a file -#[cfg(test)] -#[path = "utils_test.rs"] -mod utils_test; - -use std::fs::File; -use std::io::{BufWriter, Write}; - -use metrics::{absolute_counter, gauge}; -use serde::Serialize; -use starknet_api::block::BlockNumber; -use starknet_api::core::{ChainId, ClassHash, CompiledClassHash}; -use starknet_api::rpc_transaction::EntryPointByType; -use starknet_types_core::felt::Felt; -use tracing::debug; - -use crate::compiled_class::CasmStorageReader; -use crate::db::table_types::Table; -use crate::db::RO; -use crate::state::StateStorageReader; -use crate::{open_storage, StorageConfig, StorageError, StorageReader, StorageResult, StorageTxn}; - -#[derive(Serialize)] -struct DumpDeclaredClass { - class_hash: ClassHash, - compiled_class_hash: CompiledClassHash, - sierra_program: Vec, - contract_class_version: String, - entry_points_by_type: EntryPointByType, -} - -/// Dumps the declared_classes at a given block range from the storage to a file. -pub fn dump_declared_classes_table_by_block_range( - start_block: u64, - end_block: u64, - file_path: &str, - chain_id: &ChainId, -) -> StorageResult<()> { - let mut storage_config = StorageConfig::default(); - storage_config.db_config.chain_id = chain_id.clone(); - let (storage_reader, _) = open_storage(storage_config)?; - let txn = storage_reader.begin_ro_txn()?; - let compiled_class_marker = txn.get_compiled_class_marker()?; - if end_block > compiled_class_marker.0 { - return Err(StorageError::InvalidBlockNumber { - block: BlockNumber(end_block), - compiled_class_marker, - }); - } - dump_declared_classes_table_by_block_range_internal(&txn, file_path, start_block, end_block) -} - -fn dump_declared_classes_table_by_block_range_internal( - txn: &StorageTxn<'_, RO>, - file_path: &str, - start_block: u64, - end_block: u64, -) -> StorageResult<()> { - let table_handle = txn.txn.open_table(&txn.tables.declared_classes)?; - let file = File::create(file_path)?; - let mut writer = BufWriter::new(file); - writer.write_all(b"[")?; - let mut first = true; - for block_number in start_block..end_block { - if let Some(thin_state_diff) = txn.get_state_diff(BlockNumber(block_number))? { - for (class_hash, compiled_class_hash) in thin_state_diff.declared_classes.iter() { - if let Some(contract_class_location) = table_handle.get(&txn.txn, class_hash)? { - let contract_class = - txn.file_handlers.get_contract_class_unchecked(contract_class_location)?; - if !first { - writer.write_all(b",")?; - } - serde_json::to_writer( - &mut writer, - &DumpDeclaredClass { - class_hash: *class_hash, - compiled_class_hash: *compiled_class_hash, - sierra_program: contract_class.sierra_program.clone(), - contract_class_version: contract_class.contract_class_version.clone(), - entry_points_by_type: contract_class.entry_points_by_type.clone(), - }, - )?; - first = false; - } - } - }; - } - writer.write_all(b"]")?; - Ok(()) -} - -// TODO(dvir): consider adding storage size metrics. -// TODO(dvir): relocate all the storage metrics in one module and export them (also in other -// crates). -/// Updates storage metrics about the state of the storage. -#[allow(clippy::as_conversions)] -pub fn update_storage_metrics(reader: &StorageReader) -> StorageResult<()> { - debug!("updating storage metrics"); - gauge!("storage_free_pages_number", reader.db_reader.get_free_pages()? as f64); - let info = reader.db_reader.get_db_info()?; - absolute_counter!( - "storage_last_page_number", - u64::try_from(info.last_pgno()).expect("usize should fit in u64") - ); - absolute_counter!( - "storage_last_transaction_index", - u64::try_from(info.last_txnid()).expect("usize should fit in u64") - ); - Ok(()) -} diff --git a/crates/papyrus_storage/src/utils_test.rs b/crates/papyrus_storage/src/utils_test.rs deleted file mode 100644 index 8f69a6570b..0000000000 --- a/crates/papyrus_storage/src/utils_test.rs +++ /dev/null @@ -1,120 +0,0 @@ -use std::fs; - -use indexmap::indexmap; -use metrics_exporter_prometheus::PrometheusBuilder; -use papyrus_test_utils::prometheus_is_contained; -use pretty_assertions::assert_eq; -use prometheus_parse::Value::{Counter, Gauge}; -use starknet_api::block::BlockNumber; -use starknet_api::core::{ClassHash, CompiledClassHash}; -use starknet_api::hash::StarkHash; -use starknet_api::rpc_transaction::EntryPointByType; -use starknet_api::state::{SierraContractClass, ThinStateDiff}; -use starknet_types_core::felt::Felt; - -use super::update_storage_metrics; -use crate::class::ClassStorageWriter; -use crate::state::StateStorageWriter; -use crate::test_utils::get_test_storage; -use crate::utils::{dump_declared_classes_table_by_block_range_internal, DumpDeclaredClass}; - -// TODO(yael): fix dump_table_to_file. -#[test] -fn test_dump_declared_classes() { - let file_path = "tmp_test_dump_declared_classes_table.json"; - let compiled_class_hash = CompiledClassHash(StarkHash::default()); - let mut declared_classes = vec![]; - let mut state_diffs = vec![]; - let ((reader, mut writer), _temp_dir) = get_test_storage(); - for i in 0..5 { - let i_felt = Felt::from(u128::try_from(i).expect("usize should fit in u128")); - declared_classes.push(( - ClassHash(i_felt), - SierraContractClass { - sierra_program: vec![i_felt, i_felt], - contract_class_version: "0.1.0".to_string(), - entry_points_by_type: EntryPointByType::default(), - abi: "".to_string(), - }, - )); - state_diffs.push(ThinStateDiff { - deployed_contracts: indexmap!(), - storage_diffs: indexmap!(), - declared_classes: indexmap!( - declared_classes[i].0 => compiled_class_hash - ), - deprecated_declared_classes: vec![], - nonces: indexmap!(), - replaced_classes: indexmap!(), - }); - let block_number = BlockNumber(u64::try_from(i).expect("usize should fit in u64")); - let txn = writer.begin_rw_txn().unwrap(); - txn.append_state_diff(block_number, state_diffs[i].clone()) - .unwrap() - .append_classes(block_number, &[(declared_classes[i].0, &declared_classes[i].1)], &[]) - .unwrap() - .commit() - .unwrap(); - } - let txn = reader.begin_ro_txn().unwrap(); - - // Test dump_declared_classes_table_by_block_range - dump_declared_classes_table_by_block_range_internal(&txn, file_path, 2, 4).unwrap(); - let file_content = fs::read_to_string(file_path).unwrap(); - let _ = fs::remove_file(file_path); - let expected_declared_classes = vec![ - DumpDeclaredClass { - class_hash: declared_classes[2].0, - compiled_class_hash, - sierra_program: declared_classes[2].1.sierra_program.clone(), - contract_class_version: declared_classes[2].1.contract_class_version.clone(), - entry_points_by_type: declared_classes[2].1.entry_points_by_type.clone(), - }, - DumpDeclaredClass { - class_hash: declared_classes[3].0, - compiled_class_hash, - sierra_program: declared_classes[3].1.sierra_program.clone(), - contract_class_version: declared_classes[3].1.contract_class_version.clone(), - entry_points_by_type: declared_classes[3].1.entry_points_by_type.clone(), - }, - ]; - assert_eq!(file_content, serde_json::to_string(&expected_declared_classes).unwrap()); -} - -#[test] -fn update_storage_metrics_test() { - let ((reader, _writer), _temp_dir) = get_test_storage(); - let handle = PrometheusBuilder::new().install_recorder().unwrap(); - - assert!(prometheus_is_contained(handle.render(), "storage_free_pages_number", &[]).is_none()); - assert!(prometheus_is_contained(handle.render(), "storage_last_page_number", &[]).is_none()); - assert!( - prometheus_is_contained(handle.render(), "storage_last_transaction_index", &[]).is_none() - ); - - update_storage_metrics(&reader).unwrap(); - - let Gauge(free_pages) = - prometheus_is_contained(handle.render(), "storage_free_pages_number", &[]).unwrap() - else { - panic!("storage_free_pages_number is not a Gauge") - }; - // TODO(dvir): add an upper limit when the bug in the binding freelist function will be fixed. - assert!(0f64 < free_pages); - - let Counter(last_page) = - prometheus_is_contained(handle.render(), "storage_last_page_number", &[]).unwrap() - else { - panic!("storage_last_page_number is not a Counter") - }; - assert!(0f64 < last_page); - assert!(last_page < 1000f64); - - let Counter(last_transaction) = - prometheus_is_contained(handle.render(), "storage_last_transaction_index", &[]).unwrap() - else { - panic!("storage_last_transaction_index is not a Counter") - }; - assert!(0f64 < last_transaction); - assert!(last_transaction < 100f64); -} diff --git a/papyrus_utilities.Dockerfile b/papyrus_utilities.Dockerfile index f6a36f7963..d031008c9e 100644 --- a/papyrus_utilities.Dockerfile +++ b/papyrus_utilities.Dockerfile @@ -14,10 +14,6 @@ FROM builder AS utilities_builder # Build papyrus_load_test and copy its resources. RUN cargo build --target x86_64-unknown-linux-musl --release --package papyrus_load_test --bin papyrus_load_test -# Build dump_declared_classes. -RUN cargo build --target x86_64-unknown-linux-musl --release --package papyrus_storage --features "clap" \ - --bin dump_declared_classes - # Build storage_benchmark. RUN cargo build --target x86_64-unknown-linux-musl --release --package papyrus_storage \ --features "clap statistical" --bin storage_benchmark @@ -32,9 +28,6 @@ WORKDIR /app COPY --from=utilities_builder /app/target/x86_64-unknown-linux-musl/release/papyrus_load_test /app/target/release/papyrus_load_test COPY crates/papyrus_load_test/resources/ /app/crates/papyrus_load_test/resources -# Copy the dump_declared_classes executable. -COPY --from=utilities_builder /app/target/x86_64-unknown-linux-musl/release/dump_declared_classes /app/target/release/dump_declared_classes - # Copy the storage_benchmark executable. COPY --from=utilities_builder /app/target/x86_64-unknown-linux-musl/release/storage_benchmark /app/target/release/storage_benchmark @@ -45,6 +38,5 @@ ENTRYPOINT echo -e \ "There is no default executable for this image. Run an executable using its name or path to it.\n\ The available executables are:\n\ - papyrus_load_test, performs a stress test on a node RPC gateway.\n\ - - dump_declared_classes, dumps the declared_classes table from the storage to a file.\n\ - storage_benchmark, performs a benchmark on the storage.\n\ For example, in a docker runtime: docker run --entrypoint papyrus_load_test "