Skip to content

Commit

Permalink
feat: dump_declared_classes can dump for a specific block range (#1283)
Browse files Browse the repository at this point in the history
  • Loading branch information
Yael-Starkware authored Oct 19, 2023
1 parent 7eccdf3 commit d116ae4
Show file tree
Hide file tree
Showing 6 changed files with 185 additions and 50 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions crates/dump_declared_classes/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ license-file.workspace = true

[dependencies]
papyrus_storage = { path = "../papyrus_storage", version = "0.0.5" }
clap.workspace = true
71 changes: 62 additions & 9 deletions crates/dump_declared_classes/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,68 @@
use papyrus_storage::utils::dump_declared_classes_table_to_file;
use clap::{Arg, Command};
use papyrus_storage::utils::dump_declared_classes_table_by_block_range;

/// This executable dumps the declared_classes table from the storage to a file.
/// The file path can be passed as an argument, otherwise it will be dumped to
/// "dump_declared_classes.json".
fn main() {
let args = std::env::args().collect::<Vec<_>>();
let default_file_path = "dump_declared_classes.json".to_string();
let file_path = args.get(1).unwrap_or(&default_file_path);
let cli_params = get_cli_params();
match dump_declared_classes_table_by_block_range(
cli_params.start_block,
cli_params.end_block,
&cli_params.file_path,
) {
Ok(_) => println!("Dumped declared_classes table to file: {} .", cli_params.file_path),
Err(e) => println!("Failed dumping declared_classes table with error: {} .", e),
}
}

struct CliParams {
start_block: u64,
end_block: u64,
file_path: String,
}

/// The start_block and end_block arguments are mandatory and define the block range to dump,
/// start_block is inclusive and end_block is exclusive. The file_path is an optional parameter,
/// otherwise the data will be dumped to "dump_declared_classes.json".
fn get_cli_params() -> CliParams {
let matches = Command::new("Dump declared classes")
.arg(
Arg::new("file_path")
.short('f')
.long("file_path")
.default_value("dump_declared_classes.json")
.help("The file path to dump the declared classes table to."),
)
.arg(
Arg::new("start_block")
.short('s')
.long("start_block")
.required(true)
.help("The block number to start dumping from."),
)
.arg(
Arg::new("end_block")
.short('e')
.long("end_block")
.required(true)
.help("The block number to end dumping at."),
)
.get_matches();

match dump_declared_classes_table_to_file(file_path) {
Ok(_) => println!("Dumped declared_classes table to file: {}", file_path),
Err(e) => println!("Failed dumping declared_classes table with error: {}", e),
let file_path =
matches.get_one::<String>("file_path").expect("Failed parsing file_path").as_str();
let start_block = matches
.get_one::<String>("start_block")
.expect("Failed parsing start_block")
.parse::<u64>()
.expect("Failed parsing start_block");
let end_block = matches
.get_one::<String>("end_block")
.expect("Failed parsing end_block")
.parse::<u64>()
.expect("Failed parsing end_block");
if start_block >= end_block {
panic!("start_block must be smaller than end_block");
}
CliParams { start_block, end_block, file_path: file_path.to_string() }
}
5 changes: 5 additions & 0 deletions crates/papyrus_storage/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -420,6 +420,11 @@ pub enum StorageError {
IOError(#[from] std::io::Error),
#[error(transparent)]
SerdeError(#[from] serde_json::Error),
#[error(
"The block number {block} should be smaller than the compiled_class_marker \
{compiled_class_marker}."
)]
InvalidBlockNumber { block: BlockNumber, compiled_class_marker: BlockNumber },
}

/// A type alias that maps to std::result::Result<T, StorageError>.
Expand Down
72 changes: 68 additions & 4 deletions crates/papyrus_storage/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,24 @@
#[path = "utils_test.rs"]
mod utils_test;

use std::collections::HashMap;
use std::fs::File;
use std::io::{BufWriter, Write};

use serde::Serialize;
use starknet_api::block::BlockNumber;
use starknet_api::core::{ClassHash, CompiledClassHash};
use starknet_api::hash::StarkFelt;
use starknet_api::state::{EntryPoint, EntryPointType};

use crate::compiled_class::CasmStorageReader;
use crate::db::serialization::StorageSerde;
use crate::db::{DbIter, TableIdentifier, RO};
use crate::{open_storage, StorageConfig, StorageResult, StorageTxn};
use crate::state::StateStorageReader;
use crate::{open_storage, StorageConfig, StorageError, StorageResult, StorageTxn};

/// Dumps a table from the storage to a file in JSON format.
#[allow(dead_code)]
fn dump_table_to_file<K, V>(
txn: &StorageTxn<'_, RO>,
table_id: &TableIdentifier<K, V>,
Expand Down Expand Up @@ -38,11 +48,65 @@ where
Ok(())
}

/// Dumps the declared_classes table from the storage to a file.
pub fn dump_declared_classes_table_to_file(file_path: &str) -> StorageResult<()> {
#[derive(Serialize)]
struct DumpDeclaredClass {
class_hash: ClassHash,
compiled_class_hash: CompiledClassHash,
sierra_program: Vec<StarkFelt>,
entry_points_by_type: HashMap<EntryPointType, Vec<EntryPoint>>,
}

/// Dumps the declared_classes at a given block range from the storage to a file.
pub fn dump_declared_classes_table_by_block_range(
start_block: u64,
end_block: u64,
file_path: &str,
) -> StorageResult<()> {
let storage_config = StorageConfig::default();
let (storage_reader, _) = open_storage(storage_config)?;
let txn = storage_reader.begin_ro_txn()?;
dump_table_to_file(&txn, &txn.tables.declared_classes, file_path)?;
let compiled_class_marker = txn.get_compiled_class_marker()?;
if end_block > compiled_class_marker.0 {
return Err(StorageError::InvalidBlockNumber {
block: BlockNumber(end_block),
compiled_class_marker,
});
}
dump_declared_classes_table_by_block_range_internal(&txn, file_path, start_block, end_block)
}

fn dump_declared_classes_table_by_block_range_internal(
txn: &StorageTxn<'_, RO>,
file_path: &str,
start_block: u64,
end_block: u64,
) -> StorageResult<()> {
let table_handle = txn.txn.open_table(&txn.tables.declared_classes)?;
let file = File::create(file_path)?;
let mut writer = BufWriter::new(file);
writer.write_all(b"[")?;
let mut first = true;
for block_number in start_block..end_block {
if let Some(thin_state_diff) = txn.get_state_diff(BlockNumber(block_number))? {
for (class_hash, compiled_class_hash) in thin_state_diff.declared_classes.iter() {
if let Some(contract_class) = table_handle.get(&txn.txn, class_hash)? {
if !first {
writer.write_all(b",")?;
}
serde_json::to_writer(
&mut writer,
&DumpDeclaredClass {
class_hash: *class_hash,
compiled_class_hash: *compiled_class_hash,
sierra_program: contract_class.sierra_program.clone(),
entry_points_by_type: contract_class.entry_point_by_type.clone(),
},
)?;
first = false;
}
}
};
}
writer.write_all(b"]")?;
Ok(())
}
85 changes: 48 additions & 37 deletions crates/papyrus_storage/src/utils_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,56 +10,67 @@ use starknet_api::state::{ContractClass, StateDiff};
use super::dump_table_to_file;
use crate::state::StateStorageWriter;
use crate::test_utils::get_test_storage;
use crate::utils::{dump_declared_classes_table_by_block_range_internal, DumpDeclaredClass};

#[test]
fn test_dump_table_to_file() {
fn test_dump_declared_classes() {
let file_path = "tmp_test_dump_declared_classes_table.json";
let declared_class1 = (
ClassHash(1u128.into()),
ContractClass {
sierra_program: vec![StarkFelt::ONE, StarkFelt::TWO],
entry_point_by_type: HashMap::new(),
abi: "".to_string(),
},
);
let declared_class2 = (
ClassHash(2u128.into()),
ContractClass {
sierra_program: vec![StarkFelt::THREE, StarkFelt::ZERO],
entry_point_by_type: HashMap::new(),
abi: "".to_string(),
},
);
let compiled_class_hash = CompiledClassHash(StarkHash::default());
let declared_classes = vec![declared_class1.clone(), declared_class2.clone()];
let declared_classes_for_append_state = indexmap!(
declared_class1.0 =>
(compiled_class_hash, declared_class1.1),
declared_class2.0 =>
(compiled_class_hash, declared_class2.1),
);

let mut declared_classes = vec![];
let mut state_diffs = vec![];
let ((reader, mut writer), _temp_dir) = get_test_storage();
let txn = writer.begin_rw_txn().unwrap();
txn.append_state_diff(
BlockNumber(0),
StateDiff {
for i in 0..5 {
let i_felt = StarkFelt::from_u128(i as u128);
declared_classes.push((
ClassHash(i_felt),
ContractClass {
sierra_program: vec![i_felt, i_felt],
entry_point_by_type: HashMap::new(),
abi: "".to_string(),
},
));
state_diffs.push(StateDiff {
deployed_contracts: indexmap!(),
storage_diffs: indexmap!(),
declared_classes: declared_classes_for_append_state,
declared_classes: indexmap!(
declared_classes[i].0 =>
(compiled_class_hash, declared_classes[i].1.clone()),
),
deprecated_declared_classes: indexmap!(),
nonces: indexmap!(),
replaced_classes: indexmap!(),
},
indexmap!(),
)
.unwrap()
.commit()
.unwrap();

});
let txn = writer.begin_rw_txn().unwrap();
txn.append_state_diff(BlockNumber(i as u64), state_diffs[i].clone(), indexmap!())
.unwrap()
.commit()
.unwrap();
}
let txn = reader.begin_ro_txn().unwrap();

// Test dump_table_to_file for declared_clases table.
dump_table_to_file(&txn, &txn.tables.declared_classes, file_path).unwrap();
let file_content = fs::read_to_string(file_path).unwrap();
let _ = fs::remove_file(file_path);
assert_eq!(file_content, serde_json::to_string(&declared_classes).unwrap());

// Test dump_declared_classes_table_by_block_range
dump_declared_classes_table_by_block_range_internal(&txn, file_path, 2, 4).unwrap();
let file_content = fs::read_to_string(file_path).unwrap();
let _ = fs::remove_file(file_path);
let expected_declared_classes = vec![
DumpDeclaredClass {
class_hash: declared_classes[2].0,
compiled_class_hash,
sierra_program: declared_classes[2].1.sierra_program.clone(),
entry_points_by_type: declared_classes[2].1.entry_point_by_type.clone(),
},
DumpDeclaredClass {
class_hash: declared_classes[3].0,
compiled_class_hash,
sierra_program: declared_classes[3].1.sierra_program.clone(),
entry_points_by_type: declared_classes[3].1.entry_point_by_type.clone(),
},
];
assert_eq!(file_content, serde_json::to_string(&expected_declared_classes).unwrap());
}

0 comments on commit d116ae4

Please sign in to comment.