From d600d56f9e7a901cade5d8049a728181edb1fa36 Mon Sep 17 00:00:00 2001 From: Yael Doweck Date: Tue, 3 Oct 2023 11:41:32 +0300 Subject: [PATCH] feat(storage): add dump storage table to file utility --- crates/papyrus_storage/src/lib.rs | 5 ++ crates/papyrus_storage/src/utils.rs | 48 +++++++++++++++++ crates/papyrus_storage/src/utils_test.rs | 65 ++++++++++++++++++++++++ 3 files changed, 118 insertions(+) create mode 100644 crates/papyrus_storage/src/utils.rs create mode 100644 crates/papyrus_storage/src/utils_test.rs diff --git a/crates/papyrus_storage/src/lib.rs b/crates/papyrus_storage/src/lib.rs index 7c035e2d16..da24026105 100644 --- a/crates/papyrus_storage/src/lib.rs +++ b/crates/papyrus_storage/src/lib.rs @@ -52,6 +52,7 @@ pub mod base_layer; pub mod body; pub mod compiled_class; +pub mod utils; // TODO(yair): Make the compression_utils module pub(crate) or extract it from the crate. #[doc(hidden)] pub mod compression_utils; @@ -415,6 +416,10 @@ pub enum StorageError { CompiledClassReWrite { class_hash: ClassHash }, #[error("The table {table_name} is unused under the {storage_scope:?} storage scope.")] ScopeError { table_name: String, storage_scope: StorageScope }, + #[error(transparent)] + IOError(#[from] std::io::Error), + #[error(transparent)] + SerdeError(#[from] serde_json::Error), } /// A type alias that maps to std::result::Result. diff --git a/crates/papyrus_storage/src/utils.rs b/crates/papyrus_storage/src/utils.rs new file mode 100644 index 0000000000..a144aa489c --- /dev/null +++ b/crates/papyrus_storage/src/utils.rs @@ -0,0 +1,48 @@ +//! module for external utils, such as dumping a storage table to a file +#[cfg(test)] +#[path = "utils_test.rs"] +mod utils_test; + +use std::fs::File; +use std::io::{BufWriter, Write}; + +use crate::db::serialization::StorageSerde; +use crate::db::{DbIter, TableIdentifier, RO}; +use crate::{open_storage, StorageConfig, StorageResult, StorageTxn}; + +/// dumps a table from the storage to a file in JSON format +fn dump_table_to_file( + txn: &StorageTxn<'_, RO>, + table_id: &TableIdentifier, + file_path: &str, +) -> StorageResult<()> +where + K: StorageSerde + serde::Serialize, + V: StorageSerde + serde::Serialize, +{ + let table_handle = txn.txn.open_table(table_id)?; + let mut cursor = table_handle.cursor(&txn.txn)?; + let iter = DbIter::new(&mut cursor); + let file = File::create(file_path)?; + let mut writer = BufWriter::new(file); + writer.write_all(b"[")?; + let mut first = true; + for data in iter { + if !first { + writer.write_all(b",")?; + } + serde_json::to_writer(&mut writer, &data.unwrap())?; + first = false; + } + writer.write_all(b"]")?; + Ok(()) +} + +/// dumps the declared classes table from the storage to a file +pub fn dump_declared_classes_table_to_file(file_path: &str) -> StorageResult<()> { + let storage_config = StorageConfig::default(); + let (storage_reader, _) = open_storage(storage_config.clone())?; + let txn = storage_reader.begin_ro_txn()?; + dump_table_to_file(&txn, &txn.tables.declared_classes, file_path)?; + Ok(()) +} diff --git a/crates/papyrus_storage/src/utils_test.rs b/crates/papyrus_storage/src/utils_test.rs new file mode 100644 index 0000000000..b891c793be --- /dev/null +++ b/crates/papyrus_storage/src/utils_test.rs @@ -0,0 +1,65 @@ +use std::collections::HashMap; +use std::fs; + +use indexmap::indexmap; +use starknet_api::block::BlockNumber; +use starknet_api::core::{ClassHash, CompiledClassHash}; +use starknet_api::hash::{StarkFelt, StarkHash}; +use starknet_api::state::{ContractClass, StateDiff}; + +use super::dump_table_to_file; +use crate::state::StateStorageWriter; +use crate::test_utils::get_test_storage; + +#[test] +fn test_dump_table_to_file() { + let file_path = "tmp_test_dump_declared_classes_table.json"; + let declared_class1 = ( + ClassHash(1u128.into()), + ContractClass { + sierra_program: vec![StarkFelt::ONE, StarkFelt::TWO], + entry_point_by_type: HashMap::new(), + abi: "".to_string(), + }, + ); + let declared_class2 = ( + ClassHash(2u128.into()), + ContractClass { + sierra_program: vec![StarkFelt::THREE, StarkFelt::ZERO], + entry_point_by_type: HashMap::new(), + abi: "".to_string(), + }, + ); + let compiled_class_hash = CompiledClassHash(StarkHash::default()); + let declared_classes = vec![declared_class1.clone(), declared_class2.clone()]; + let declared_classes_for_append_state = indexmap!( + declared_class1.0 => + (compiled_class_hash, declared_class1.1.clone()), + declared_class2.0 => + (compiled_class_hash, declared_class2.1.clone()), + ); + + let ((reader, mut writer), _temp_dir) = get_test_storage(); + let txn = writer.begin_rw_txn().unwrap(); + txn.append_state_diff( + BlockNumber(0), + StateDiff { + deployed_contracts: indexmap!(), + storage_diffs: indexmap!(), + declared_classes: declared_classes_for_append_state, + deprecated_declared_classes: indexmap!(), + nonces: indexmap!(), + replaced_classes: indexmap!(), + }, + indexmap!(), + ) + .unwrap() + .commit() + .unwrap(); + + let txn = reader.begin_ro_txn().unwrap(); + dump_table_to_file(&txn, &txn.tables.declared_classes, file_path).unwrap(); + let file_content = fs::read_to_string(file_path).unwrap(); + let _ = fs::remove_file(file_path); + assert_eq!(file_content, serde_json::to_string(&declared_classes).unwrap()); +}