From 64fa43c6551bc5fe4eff2676723f066ccb4e12cd Mon Sep 17 00:00:00 2001 From: glihm Date: Mon, 8 Apr 2024 23:30:56 -0400 Subject: [PATCH 01/23] fix: add the transaction options to sozo migrate apply (#1802) * fix: add the transaction options to sozo migrate apply * Delete crates/sozo/ops/src/account.rs --- bin/sozo/src/commands/migrate.rs | 23 ++++++++++++++++++++--- crates/sozo/ops/src/migration/mod.rs | 3 ++- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/bin/sozo/src/commands/migrate.rs b/bin/sozo/src/commands/migrate.rs index f21166692e..c142378802 100644 --- a/bin/sozo/src/commands/migrate.rs +++ b/bin/sozo/src/commands/migrate.rs @@ -2,6 +2,7 @@ use anyhow::{anyhow, Context, Result}; use clap::{Args, Subcommand}; use dojo_lang::compiler::MANIFESTS_DIR; use dojo_world::metadata::{dojo_metadata_from_workspace, Environment}; +use dojo_world::migration::TxConfig; use katana_rpc_api::starknet::RPC_SPEC_VERSION; use scarb::core::{Config, Workspace}; use sozo_ops::migration; @@ -14,6 +15,7 @@ use starknet::signers::LocalWallet; use super::options::account::AccountOptions; use super::options::starknet::StarknetOptions; +use super::options::transaction::TransactionOptions; use super::options::world::WorldOptions; #[derive(Debug, Args)] @@ -57,6 +59,9 @@ pub enum MigrateCommand { #[command(flatten)] account: AccountOptions, + + #[command(flatten)] + transaction: TransactionOptions, }, } @@ -150,10 +155,13 @@ impl MigrateArgs { ) .await?; - migration::migrate(&ws, world_address, chain_id, &account, name, true).await + migration::migrate(&ws, world_address, chain_id, &account, name, true, None) + .await }) } - MigrateCommand::Apply { mut name, world, starknet, account } => { + MigrateCommand::Apply { mut name, world, starknet, account, transaction } => { + let txn_config: Option = Some(transaction.into()); + if name.is_none() { if let Some(root_package) = ws.root_package() { name = Some(root_package.id.name.to_string()) @@ -171,7 +179,16 @@ impl MigrateArgs { ) .await?; - migration::migrate(&ws, world_address, chain_id, &account, name, false).await + migration::migrate( + &ws, + world_address, + chain_id, + &account, + name, + false, + txn_config, + ) + .await }) } } diff --git a/crates/sozo/ops/src/migration/mod.rs b/crates/sozo/ops/src/migration/mod.rs index 9383298d73..d237be073f 100644 --- a/crates/sozo/ops/src/migration/mod.rs +++ b/crates/sozo/ops/src/migration/mod.rs @@ -60,6 +60,7 @@ pub async fn migrate( account: &SingleOwnerAccount, name: Option, dry_run: bool, + txn_config: Option, ) -> Result<()> where P: Provider + Sync + Send + 'static, @@ -118,7 +119,7 @@ where .await?; } else { // Migrate according to the diff. - match apply_diff(ws, account, None, &mut strategy).await { + match apply_diff(ws, account, txn_config, &mut strategy).await { Ok(migration_output) => { update_manifests_and_abis( ws, From f4c47ba7a0137e376556d3d7459f6b2569416dc4 Mon Sep 17 00:00:00 2001 From: Tarrence van As Date: Tue, 9 Apr 2024 07:28:38 -0400 Subject: [PATCH 02/23] Prepare release: v0.6.1-alpha.0 (#1803) --- Cargo.lock | 82 ++++++++++---------- Cargo.toml | 2 +- crates/katana/runner/runner-macro/Cargo.toml | 2 +- 3 files changed, 43 insertions(+), 43 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index aff69d7463..bf3f39bd87 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1275,7 +1275,7 @@ dependencies = [ [[package]] name = "benches" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "clap", @@ -2815,7 +2815,7 @@ dependencies = [ [[package]] name = "common" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "reqwest", @@ -3544,7 +3544,7 @@ checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" [[package]] name = "dojo-bindgen" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "async-trait", "cainome 0.1.5", @@ -3560,15 +3560,15 @@ dependencies = [ [[package]] name = "dojo-core" -version = "0.6.0" +version = "0.6.1-alpha.0" [[package]] name = "dojo-examples-spawn-and-move" -version = "0.6.0" +version = "0.6.1-alpha.0" [[package]] name = "dojo-lang" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -3618,7 +3618,7 @@ dependencies = [ [[package]] name = "dojo-language-server" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -3640,7 +3640,7 @@ dependencies = [ [[package]] name = "dojo-metrics" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "hyper", @@ -3658,7 +3658,7 @@ dependencies = [ [[package]] name = "dojo-test-utils" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "assert_fs", @@ -3692,7 +3692,7 @@ dependencies = [ [[package]] name = "dojo-types" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "crypto-bigint", "hex", @@ -3707,7 +3707,7 @@ dependencies = [ [[package]] name = "dojo-world" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "assert_fs", @@ -3743,7 +3743,7 @@ dependencies = [ [[package]] name = "dojo-world-abigen" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "cairo-lang-starknet", "camino", @@ -6618,7 +6618,7 @@ dependencies = [ [[package]] name = "katana" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "alloy-primitives", "anyhow", @@ -6645,7 +6645,7 @@ dependencies = [ [[package]] name = "katana-codecs" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "bytes", "katana-primitives", @@ -6653,7 +6653,7 @@ dependencies = [ [[package]] name = "katana-codecs-derive" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "proc-macro2", "quote", @@ -6663,7 +6663,7 @@ dependencies = [ [[package]] name = "katana-core" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "alloy-contract", "alloy-network", @@ -6709,7 +6709,7 @@ dependencies = [ [[package]] name = "katana-db" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "cairo-lang-starknet", @@ -6731,7 +6731,7 @@ dependencies = [ [[package]] name = "katana-executor" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "alloy-primitives", "anyhow", @@ -6759,7 +6759,7 @@ dependencies = [ [[package]] name = "katana-primitives" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "alloy-primitives", "anyhow", @@ -6785,7 +6785,7 @@ dependencies = [ [[package]] name = "katana-provider" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "auto_impl", @@ -6810,7 +6810,7 @@ dependencies = [ [[package]] name = "katana-rpc" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "assert_matches", @@ -6846,7 +6846,7 @@ dependencies = [ [[package]] name = "katana-rpc-api" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "jsonrpsee 0.16.3", "katana-core", @@ -6857,7 +6857,7 @@ dependencies = [ [[package]] name = "katana-rpc-types" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "alloy-primitives", "anyhow", @@ -6878,7 +6878,7 @@ dependencies = [ [[package]] name = "katana-rpc-types-builder" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "katana-executor", @@ -6890,7 +6890,7 @@ dependencies = [ [[package]] name = "katana-runner" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "chrono", @@ -6909,7 +6909,7 @@ dependencies = [ [[package]] name = "katana-tasks" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "futures", "rayon", @@ -10011,7 +10011,7 @@ checksum = "e666a5496a0b2186dbcd0ff6106e29e093c15591bde62c20d3842007c6978a09" [[package]] name = "runner-macro" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "quote", "syn 2.0.55", @@ -10238,7 +10238,7 @@ dependencies = [ [[package]] name = "saya" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "clap", @@ -10257,7 +10257,7 @@ dependencies = [ [[package]] name = "saya-core" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "async-trait", @@ -10293,7 +10293,7 @@ dependencies = [ [[package]] name = "saya-provider" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "alloy-primitives", "anyhow", @@ -11056,7 +11056,7 @@ dependencies = [ [[package]] name = "sozo" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "assert_fs", @@ -11107,7 +11107,7 @@ dependencies = [ [[package]] name = "sozo-ops" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "assert_fs", @@ -11156,7 +11156,7 @@ dependencies = [ [[package]] name = "sozo-signers" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "starknet 0.9.0", @@ -12596,7 +12596,7 @@ dependencies = [ [[package]] name = "torii" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "async-trait", @@ -12641,7 +12641,7 @@ dependencies = [ [[package]] name = "torii-client" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "async-trait", "camino", @@ -12669,7 +12669,7 @@ dependencies = [ [[package]] name = "torii-core" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "async-trait", @@ -12706,7 +12706,7 @@ dependencies = [ [[package]] name = "torii-graphql" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "async-graphql", @@ -12747,7 +12747,7 @@ dependencies = [ [[package]] name = "torii-grpc" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "bytes", "crypto-bigint", @@ -12786,7 +12786,7 @@ dependencies = [ [[package]] name = "torii-relay" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "async-trait", @@ -12821,7 +12821,7 @@ dependencies = [ [[package]] name = "torii-server" -version = "0.6.0" +version = "0.6.1-alpha.0" dependencies = [ "anyhow", "async-trait", @@ -13126,7 +13126,7 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "types-test" -version = "0.6.0" +version = "0.6.1-alpha.0" [[package]] name = "ucd-trie" diff --git a/Cargo.toml b/Cargo.toml index 92d4ea1ed7..2906f5cfba 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,7 +45,7 @@ edition = "2021" license = "Apache-2.0" license-file = "LICENSE" repository = "https://github.com/dojoengine/dojo/" -version = "0.6.0" +version = "0.6.1-alpha.0" [profile.performance] codegen-units = 1 diff --git a/crates/katana/runner/runner-macro/Cargo.toml b/crates/katana/runner/runner-macro/Cargo.toml index 399f17ed2c..837fe7afb2 100644 --- a/crates/katana/runner/runner-macro/Cargo.toml +++ b/crates/katana/runner/runner-macro/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "runner-macro" -version = "0.6.0" +version = "0.6.1-alpha.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From 1fe14ea5aae135d2994a4ad57043e0544a03c9fb Mon Sep 17 00:00:00 2001 From: Larko <59736843+Larkooo@users.noreply.github.com> Date: Tue, 9 Apr 2024 11:28:12 -0400 Subject: [PATCH 03/23] fix(grpc): queries (#1779) * fix(grpc): queries * fmt * feat: entities test * refactor: disable tests until test rework * fmt * fix: tests --- Cargo.lock | 4 + crates/torii/grpc/Cargo.toml | 6 + crates/torii/grpc/src/server/mod.rs | 41 +++++-- .../grpc/src/server/tests/entities_test.rs | 105 ++++++++++++++++++ crates/torii/grpc/src/server/tests/mod.rs | 1 + 5 files changed, 145 insertions(+), 12 deletions(-) create mode 100644 crates/torii/grpc/src/server/tests/entities_test.rs create mode 100644 crates/torii/grpc/src/server/tests/mod.rs diff --git a/Cargo.lock b/Cargo.lock index bf3f39bd87..86b07d5fe3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12751,7 +12751,9 @@ version = "0.6.1-alpha.0" dependencies = [ "bytes", "crypto-bigint", + "dojo-test-utils", "dojo-types", + "dojo-world", "futures", "futures-util", "hex", @@ -12761,8 +12763,10 @@ dependencies = [ "prost 0.12.3", "rand", "rayon", + "scarb", "serde", "serde_json", + "sozo-ops", "sqlx", "starknet 0.9.0", "starknet-crypto 0.6.1", diff --git a/crates/torii/grpc/Cargo.toml b/crates/torii/grpc/Cargo.toml index 24987b5534..d0d19105d2 100644 --- a/crates/torii/grpc/Cargo.toml +++ b/crates/torii/grpc/Cargo.toml @@ -30,6 +30,12 @@ serde_json.workspace = true tower = "0.4.13" tracing.workspace = true +[dev-dependencies] +scarb.workspace = true +dojo-test-utils.workspace = true +sozo-ops.workspace = true +dojo-world = { path = "../../dojo-world", features = [ "contracts" ] } + [target.'cfg(target_arch = "wasm32")'.dependencies] tonic-web-wasm-client.workspace = true wasm-prost.workspace = true diff --git a/crates/torii/grpc/src/server/mod.rs b/crates/torii/grpc/src/server/mod.rs index 99b8918dfb..1914123e70 100644 --- a/crates/torii/grpc/src/server/mod.rs +++ b/crates/torii/grpc/src/server/mod.rs @@ -1,6 +1,9 @@ pub mod logger; pub mod subscriptions; +#[cfg(test)] +mod tests; + use std::future::Future; use std::net::SocketAddr; use std::pin::Pin; @@ -151,7 +154,7 @@ impl DojoWorld { row_events.iter().map(map_row_to_event).collect() } - async fn query_by_hashed_keys( + pub(crate) async fn query_by_hashed_keys( &self, table: &str, model_relation_table: &str, @@ -232,7 +235,7 @@ impl DojoWorld { Ok((entities, total_count)) } - async fn query_by_keys( + pub(crate) async fn query_by_keys( &self, table: &str, model_relation_table: &str, @@ -261,7 +264,10 @@ impl DojoWorld { JOIN {model_relation_table} ON {table}.id = {model_relation_table}.entity_id WHERE {model_relation_table}.model_id = '{}' and {table}.keys LIKE ? "#, - get_selector_from_name(&keys_clause.model).map_err(ParseError::NonAsciiName)?, + format!( + "{:#x}", + get_selector_from_name(&keys_clause.model).map_err(ParseError::NonAsciiName)? + ), ); // total count of rows that matches keys_pattern without limit and offset @@ -275,17 +281,24 @@ impl DojoWorld { JOIN {model_relation_table} ON {table}.id = {model_relation_table}.entity_id WHERE {table}.keys LIKE ? GROUP BY {table}.id - HAVING model_ids REGEXP '(^|,){}(,|$)' + HAVING INSTR(model_ids, '{}') > 0 LIMIT 1 "#, - get_selector_from_name(&keys_clause.model).map_err(ParseError::NonAsciiName)?, + format!( + "{:#x}", + get_selector_from_name(&keys_clause.model).map_err(ParseError::NonAsciiName)? + ), ); let (models_str,): (String,) = sqlx::query_as(&models_query).bind(&keys_pattern).fetch_one(&self.pool).await?; + println!("models_str: {}", models_str); + let model_ids = models_str.split(',').collect::>(); let schemas = self.model_cache.schemas(model_ids).await?; + println!("schemas: {:?}", schemas); + // query to filter with limit and offset let entities_query = format!( "{} WHERE {table}.keys LIKE ? ORDER BY {table}.event_id DESC LIMIT ? OFFSET ?", @@ -307,7 +320,7 @@ impl DojoWorld { )) } - async fn events_by_keys( + pub(crate) async fn events_by_keys( &self, keys_clause: proto::types::EventKeysClause, limit: u32, @@ -344,7 +357,7 @@ impl DojoWorld { row_events.iter().map(map_row_to_event).collect() } - async fn query_by_member( + pub(crate) async fn query_by_member( &self, table: &str, model_relation_table: &str, @@ -381,10 +394,13 @@ impl DojoWorld { FROM {table} JOIN {model_relation_table} ON {table}.id = {model_relation_table}.entity_id GROUP BY {table}.id - HAVING model_ids REGEXP '(^|,){}(,|$)' + HAVING INSTR(model_ids, '{}') > 0 LIMIT 1 "#, - get_selector_from_name(&member_clause.model).map_err(ParseError::NonAsciiName)?, + format!( + "{:#x}", + get_selector_from_name(&member_clause.model).map_err(ParseError::NonAsciiName)? + ), ); let (models_str,): (String,) = sqlx::query_as(&models_query).fetch_one(&self.pool).await?; @@ -423,7 +439,8 @@ impl DojoWorld { pub async fn model_metadata(&self, model: &str) -> Result { // selector - let model = get_selector_from_name(model).map_err(ParseError::NonAsciiName)?; + let model = + format!("{:#x}", get_selector_from_name(model).map_err(ParseError::NonAsciiName)?); let (name, class_hash, contract_address, packed_size, unpacked_size, layout): ( String, @@ -436,11 +453,11 @@ impl DojoWorld { "SELECT name, class_hash, contract_address, packed_size, unpacked_size, layout FROM \ models WHERE id = ?", ) - .bind(format!("{:#x}", model)) + .bind(&model) .fetch_one(&self.pool) .await?; - let schema = self.model_cache.schema(&format!("{:#x}", model)).await?; + let schema = self.model_cache.schema(&model).await?; let layout = hex::decode(&layout).unwrap(); Ok(proto::types::ModelMetadata { diff --git a/crates/torii/grpc/src/server/tests/entities_test.rs b/crates/torii/grpc/src/server/tests/entities_test.rs new file mode 100644 index 0000000000..5f16412b4d --- /dev/null +++ b/crates/torii/grpc/src/server/tests/entities_test.rs @@ -0,0 +1,105 @@ +use std::str::FromStr; +use std::sync::Arc; + +use dojo_test_utils::compiler::build_test_config; +use dojo_test_utils::migration::prepare_migration; +use dojo_test_utils::sequencer::{ + get_default_test_starknet_config, SequencerConfig, TestSequencer, +}; +use dojo_world::contracts::WorldContractReader; +use dojo_world::utils::TransactionWaiter; +use scarb::ops; +use sozo_ops::migration::execute_strategy; +use sqlx::sqlite::{SqliteConnectOptions, SqlitePoolOptions}; +use starknet::accounts::{Account, Call}; +use starknet::core::types::{BlockId, BlockTag}; +use starknet::core::utils::get_selector_from_name; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use starknet_crypto::poseidon_hash_many; +use tokio::sync::broadcast; +use torii_core::engine::{Engine, EngineConfig, Processors}; +use torii_core::processors::register_model::RegisterModelProcessor; +use torii_core::processors::store_set_record::StoreSetRecordProcessor; +use torii_core::sql::Sql; + +use crate::server::DojoWorld; +use crate::types::schema::Entity; +use crate::types::KeysClause; + +#[tokio::test(flavor = "multi_thread")] +async fn test_entities_queries() { + let options = + SqliteConnectOptions::from_str("sqlite::memory:").unwrap().create_if_missing(true); + let pool = SqlitePoolOptions::new().max_connections(5).connect_with(options).await.unwrap(); + sqlx::migrate!("../migrations").run(&pool).await.unwrap(); + let base_path = "../../../examples/spawn-and-move"; + let target_path = format!("{}/target/dev", base_path); + let mut migration = prepare_migration(base_path.into(), target_path.into()).unwrap(); + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + let provider = Arc::new(JsonRpcClient::new(HttpTransport::new(sequencer.url()))); + let world = WorldContractReader::new(migration.world_address().unwrap(), &provider); + + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); + let ws = ops::read_workspace(config.manifest_path(), &config) + .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); + execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + + // spawn + let tx = account + .execute(vec![Call { + to: migration.contracts.first().unwrap().contract_address, + selector: get_selector_from_name("spawn").unwrap(), + calldata: vec![], + }]) + .send() + .await + .unwrap(); + + TransactionWaiter::new(tx.transaction_hash, &provider).await.unwrap(); + + let db = Sql::new(pool.clone(), migration.world_address().unwrap()).await.unwrap(); + + let (shutdown_tx, _) = broadcast::channel(1); + let mut engine = Engine::new( + world, + db.clone(), + &provider, + Processors { + event: vec![Box::new(RegisterModelProcessor), Box::new(StoreSetRecordProcessor)], + ..Processors::default() + }, + EngineConfig::default(), + shutdown_tx, + None, + ); + + let _ = engine.sync_to_head(0).await.unwrap(); + + let (_, receiver) = tokio::sync::mpsc::channel(1); + let grpc = + DojoWorld::new(db.pool, receiver, migration.world_address().unwrap(), provider.clone()); + + let entities = grpc + .query_by_keys( + "entities", + "entity_model", + KeysClause { model: "Moves".to_string(), keys: vec![account.address()] }.into(), + 1, + 0, + ) + .await + .unwrap() + .0; + + assert_eq!(entities.len(), 1); + + let entity: Entity = entities.get(0).unwrap().clone().try_into().unwrap(); + assert_eq!(entity.models.first().unwrap().name, "Position"); + assert_eq!(entity.models.get(1).unwrap().name, "Moves"); + assert_eq!(entity.hashed_keys, poseidon_hash_many(&[account.address()])); +} diff --git a/crates/torii/grpc/src/server/tests/mod.rs b/crates/torii/grpc/src/server/tests/mod.rs new file mode 100644 index 0000000000..4e714887c0 --- /dev/null +++ b/crates/torii/grpc/src/server/tests/mod.rs @@ -0,0 +1 @@ +mod entities_test; From 5164e588c74fc3b9132d100752f6cb30be00e0a3 Mon Sep 17 00:00:00 2001 From: Neo <128649481+neotheprogramist@users.noreply.github.com> Date: Tue, 9 Apr 2024 17:30:11 +0200 Subject: [PATCH 04/23] Saya New Inputs (#1757) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * new inputs and serialization * L2 -> L1 messages * L1 -> L2 messages * leftover * format * saya to new input * extracting nonce from transaction * typo for rust fmt * Update crates/saya/core/src/prover/program_input.rs Co-authored-by: glihm * Update crates/saya/core/src/prover/program_input.rs Co-authored-by: glihm * Update crates/saya/core/src/prover/program_input.rs Co-authored-by: glihm * Update crates/saya/core/src/prover/program_input.rs Co-authored-by: glihm * unused import * extracted recursive messages to function --------- Co-authored-by: Mateusz Zając Co-authored-by: Mateusz Zając <60236390+matzayonc@users.noreply.github.com> Co-authored-by: glihm --- crates/saya/README.md | 8 + crates/saya/core/src/lib.rs | 47 +++- crates/saya/core/src/prover/mod.rs | 2 + crates/saya/core/src/prover/program_input.rs | 225 +++++++++++++++++++ crates/saya/core/src/prover/state_diff.rs | 50 ++++- crates/saya/core/src/prover/stone_image.rs | 7 +- 6 files changed, 329 insertions(+), 10 deletions(-) create mode 100644 crates/saya/core/src/prover/program_input.rs diff --git a/crates/saya/README.md b/crates/saya/README.md index 112cc89d8a..5c0542d503 100644 --- a/crates/saya/README.md +++ b/crates/saya/README.md @@ -67,6 +67,14 @@ However, papyrus and blockifier which we depend on are still in `-dev` version, * cairo-lang (we should support `2.5` now) * scarb (breaking changes between 2.4 and 2.5 to be addresses, not required to only build saya and SNOS) +## Local Testing + +```bash +cargo run -r -p katana # Start an appchain +cargo run -r -p sozo -- build --manifest-path examples/spawn-and-move/Scarb.toml +cargo run -r -p sozo -- migrate --manifest-path examples/spawn-and-move/Scarb.toml # Make some transactions +cargo run -r --bin saya -- --rpc-url http://localhost:5050 # Run Saya +``` ## Additional documentation [Hackmd note](https://hackmd.io/@glihm/saya) diff --git a/crates/saya/core/src/lib.rs b/crates/saya/core/src/lib.rs index b36261df73..bf7d14bf2a 100644 --- a/crates/saya/core/src/lib.rs +++ b/crates/saya/core/src/lib.rs @@ -4,11 +4,13 @@ use std::sync::Arc; use futures::future::join; use katana_primitives::block::{BlockNumber, FinalityStatus, SealedBlock, SealedBlockWithStatus}; +use katana_primitives::transaction::Tx; use katana_primitives::FieldElement; use prover::ProverIdentifier; use saya_provider::rpc::JsonRpcProvider; use saya_provider::Provider as SayaProvider; use serde::{Deserialize, Serialize}; +use tokio::io::AsyncWriteExt; use tracing::{error, info, trace}; use url::Url; use verifier::VerifierIdentifier; @@ -16,7 +18,7 @@ use verifier::VerifierIdentifier; use crate::blockchain::Blockchain; use crate::data_availability::{DataAvailabilityClient, DataAvailabilityConfig}; use crate::error::SayaResult; -use crate::prover::state_diff::ProvedStateDiff; +use crate::prover::{extract_messages, ProgramInput}; pub mod blockchain; pub mod data_availability; @@ -145,7 +147,7 @@ impl Saya { ) -> SayaResult<()> { trace!(target: LOG_TARGET, block_number = %block_number, "Processing block."); - let (block, prev_block, genesis_state_hash) = blocks; + let (block, prev_block, _genesis_state_hash) = blocks; let (state_updates, da_state_update) = self.provider.fetch_state_updates(block_number).await?; @@ -171,16 +173,49 @@ impl Saya { return Ok(()); } - let to_prove = ProvedStateDiff { - genesis_state_hash, - prev_state_hash: prev_block.header.header.state_root, + let transactions = block + .block + .body + .iter() + .filter_map(|t| match &t.transaction { + Tx::L1Handler(tx) => Some(tx), + _ => None, + }) + .collect::>(); + + let (message_to_starknet_segment, message_to_appchain_segment) = + extract_messages(&exec_infos, transactions); + + let new_program_input = ProgramInput { + prev_state_root: prev_block.header.header.state_root, + block_number: FieldElement::from(block_number), + block_hash: block.block.header.hash, + config_hash: FieldElement::from(0u64), + message_to_starknet_segment, + message_to_appchain_segment, state_updates: state_updates_to_prove, }; + println!("Program input: {}", new_program_input.serialize()?); + + // let to_prove = ProvedStateDiff { + // genesis_state_hash, + // prev_state_hash: prev_block.header.header.state_root, + // state_updates: state_updates_to_prove, + // }; + trace!(target: "saya_core", "Proving block {block_number}."); - let proof = prover::prove(to_prove.serialize(), self.config.prover).await?; + let proof = prover::prove(new_program_input.serialize()?, self.config.prover).await?; info!(target: "saya_core", block_number, "Block proven."); + // save proof to file + tokio::fs::File::create(format!("proof_{}.json", block_number)) + .await + .unwrap() + .write_all(proof.as_bytes()) + .await + .unwrap(); + trace!(target: "saya_core", "Verifying block {block_number}."); let transaction_hash = verifier::verify(proof, self.config.verifier).await?; info!(target: "saya_core", block_number, transaction_hash, "Block verified."); diff --git a/crates/saya/core/src/prover/mod.rs b/crates/saya/core/src/prover/mod.rs index 1dee9a1353..3214a7f4ae 100644 --- a/crates/saya/core/src/prover/mod.rs +++ b/crates/saya/core/src/prover/mod.rs @@ -6,11 +6,13 @@ use std::str::FromStr; use anyhow::bail; use async_trait::async_trait; +mod program_input; mod serializer; pub mod state_diff; mod stone_image; mod vec252; +pub use program_input::*; use serde::{Deserialize, Serialize}; pub use serializer::parse_proof; pub use stone_image::*; diff --git a/crates/saya/core/src/prover/program_input.rs b/crates/saya/core/src/prover/program_input.rs new file mode 100644 index 0000000000..d39a7e7b64 --- /dev/null +++ b/crates/saya/core/src/prover/program_input.rs @@ -0,0 +1,225 @@ +use katana_primitives::contract::ContractAddress; +use katana_primitives::state::StateUpdates; +use katana_primitives::trace::{CallInfo, EntryPointType, TxExecInfo}; +use katana_primitives::transaction::L1HandlerTx; +use katana_primitives::utils::transaction::compute_l1_message_hash; +use starknet::core::types::FieldElement; + +use super::state_diff::state_updates_to_json_like; + +/// Based on https://github.com/cartridge-gg/piltover/blob/2be9d46f00c9c71e2217ab74341f77b09f034c81/src/snos_output.cairo#L19-L20 +/// With the new state root computed by the prover. +pub struct ProgramInput { + pub prev_state_root: FieldElement, + pub block_number: FieldElement, + pub block_hash: FieldElement, + pub config_hash: FieldElement, + pub message_to_starknet_segment: Vec, + pub message_to_appchain_segment: Vec, + pub state_updates: StateUpdates, +} + +fn get_messages_recursively(info: &CallInfo) -> Vec { + let mut messages = vec![]; + + // By default, `from_address` must correspond to the contract address that + // is sending the message. In the case of library calls, `code_address` is `None`, + // we then use the `caller_address` instead (which can also be an account). + let from_address = + if let Some(code_address) = info.code_address { code_address } else { info.caller_address }; + + messages.extend(info.l2_to_l1_messages.iter().map(|m| MessageToStarknet { + from_address, + to_address: ContractAddress::from(m.to_address), + payload: m.payload.clone(), + })); + + info.inner_calls.iter().for_each(|call| { + messages.extend(get_messages_recursively(call)); + }); + + messages +} + +pub fn extract_messages( + exec_infos: &Vec, + mut transactions: Vec<&L1HandlerTx>, +) -> (Vec, Vec) { + let message_to_starknet_segment = exec_infos + .iter() + .map(|t| t.execute_call_info.iter().chain(t.validate_call_info.iter()).chain(t.fee_transfer_call_info.iter())) // Take into account both validate and execute calls. + .flatten() + .map(get_messages_recursively) + .flatten() + .collect(); + + let message_to_appchain_segment = exec_infos + .iter() + .map(|t| t.execute_call_info.iter()) + .flatten() + .filter(|c| c.entry_point_type == EntryPointType::L1Handler) + .map(|c| { + let message_hash = + compute_l1_message_hash(*c.caller_address, *c.contract_address, &c.calldata[..]); + + // Matching execution to a transaction to extract nonce. + let matching = transactions + .iter() + .enumerate() + .find(|(_, &t)| { + t.message_hash == message_hash + && c.contract_address == t.contract_address + && c.calldata == t.calldata + }) + .expect(&format!( + "No matching transaction found for message hash: {}", + message_hash + )) + .0; + + // Removing, to have different nonces, even for the same message content. + let removed = transactions.remove(matching); + + (c, removed) + }) + .map(|(c, t)| MessageToAppchain { + from_address: c.caller_address, + to_address: c.contract_address, + nonce: t.nonce, + selector: c.entry_point_selector, + payload: c.calldata.clone(), + }) + .collect(); + + (message_to_starknet_segment, message_to_appchain_segment) +} + +impl ProgramInput { + pub fn serialize(&self) -> anyhow::Result { + let message_to_starknet = self + .message_to_starknet_segment + .iter() + .map(MessageToStarknet::serialize) + .collect::>>()? + .into_iter() + .flatten() + .map(|e| format!("{}", e)) + .collect::>() + .join(","); + + let message_to_appchain = self + .message_to_appchain_segment + .iter() + .map(|m| m.serialize()) + .collect::>>()? + .into_iter() + .flatten() + .map(|e| format!("{}", e)) + .collect::>() + .join(","); + + let mut result = String::from('{'); + result.push_str(&format!(r#""prev_state_root":{},"#, self.prev_state_root)); + result.push_str(&format!(r#""block_number":{},"#, self.block_number)); + result.push_str(&format!(r#""block_hash":{},"#, self.block_hash)); + result.push_str(&format!(r#""config_hash":{},"#, self.config_hash)); + + result.push_str(&format!(r#""message_to_starknet_segment":[{}],"#, message_to_starknet)); + result.push_str(&format!(r#""message_to_appchain_segment":[{}],"#, message_to_appchain)); + + result.push_str(&state_updates_to_json_like(&self.state_updates)); + + result.push_str(&format!("{}", "}")); + + Ok(result) + } +} + +/// Based on https://github.com/cartridge-gg/piltover/blob/2be9d46f00c9c71e2217ab74341f77b09f034c81/src/messaging/output_process.cairo#L16 +pub struct MessageToStarknet { + pub from_address: ContractAddress, + pub to_address: ContractAddress, + pub payload: Vec, +} + +impl MessageToStarknet { + pub fn serialize(&self) -> anyhow::Result> { + let mut result = vec![*self.from_address, *self.to_address]; + result.push(FieldElement::try_from(self.payload.len())?); + result.extend(self.payload.iter().cloned()); + Ok(result) + } +} + +/// Based on https://github.com/cartridge-gg/piltover/blob/2be9d46f00c9c71e2217ab74341f77b09f034c81/src/messaging/output_process.cairo#L28 +pub struct MessageToAppchain { + pub from_address: ContractAddress, + pub to_address: ContractAddress, + pub nonce: FieldElement, + pub selector: FieldElement, + pub payload: Vec, +} + +impl MessageToAppchain { + pub fn serialize(&self) -> anyhow::Result> { + let mut result = vec![*self.from_address, *self.to_address, self.nonce, self.selector]; + result.push(FieldElement::try_from(self.payload.len())?); + result.extend(self.payload.iter().cloned()); + Ok(result) + } +} + +#[test] +fn test_program_input() -> anyhow::Result<()> { + use std::str::FromStr; + + let input = ProgramInput { + prev_state_root: FieldElement::from_str("101")?, + block_number: FieldElement::from_str("102")?, + block_hash: FieldElement::from_str("103")?, + config_hash: FieldElement::from_str("104")?, + message_to_starknet_segment: vec![MessageToStarknet { + from_address: ContractAddress::from(FieldElement::from_str("105")?), + to_address: ContractAddress::from(FieldElement::from_str("106")?), + payload: vec![FieldElement::from_str("107")?], + }], + message_to_appchain_segment: vec![MessageToAppchain { + from_address: ContractAddress::from(FieldElement::from_str("108")?), + to_address: ContractAddress::from(FieldElement::from_str("109")?), + nonce: FieldElement::from_str("110")?, + selector: FieldElement::from_str("111")?, + payload: vec![FieldElement::from_str("112")?], + }], + state_updates: StateUpdates { + nonce_updates: std::collections::HashMap::new(), + storage_updates: std::collections::HashMap::new(), + contract_updates: std::collections::HashMap::new(), + declared_classes: std::collections::HashMap::new(), + }, + }; + + let serialized = input.serialize().unwrap(); + + println!("Serialized: {}", serialized); + + pub const EXPECTED: &str = r#"{ + "prev_state_root": 101, + "block_number": 102, + "block_hash": 103, + "config_hash": 104, + "message_to_starknet_segment": [105,106,1,107], + "message_to_appchain_segment": [108,109,110,111,1,112], + "nonce_updates": {}, + "storage_updates": {}, + "contract_updates": {}, + "declared_classes": {} + }"#; + + let expected = EXPECTED.chars().filter(|c| !c.is_whitespace()).collect::(); + + println!("{}", expected); + + assert_eq!(serialized, expected); + + Ok(()) +} diff --git a/crates/saya/core/src/prover/state_diff.rs b/crates/saya/core/src/prover/state_diff.rs index d87c9090da..ad0572a322 100644 --- a/crates/saya/core/src/prover/state_diff.rs +++ b/crates/saya/core/src/prover/state_diff.rs @@ -54,7 +54,55 @@ pub const EXAMPLE_KATANA_DIFF: &str = r#"{ } }"#; -/// We need custom implentation because of dynamic keys in json +pub fn state_updates_to_json_like(state_updates: &StateUpdates) -> String { + let mut result = String::new(); + + result.push_str(&format!(r#""nonce_updates":{}"#, "{")); + let nonce_updates = state_updates + .nonce_updates + .iter() + .map(|(k, v)| format!(r#""{}":{}"#, k.0, v)) + .collect::>() + .join(","); + result.push_str(&format!("{}{}", nonce_updates, "}")); + + result.push_str(&format!(r#","storage_updates":{}"#, "{")); + let storage_updates = state_updates + .storage_updates + .iter() + .map(|(k, v)| { + let storage = + v.iter().map(|(k, v)| format!(r#""{}":{}"#, k, v)).collect::>().join(","); + + format!(r#""{}":{{{}}}"#, k.0, storage) + }) + .collect::>() + .join(","); + result.push_str(&format!("{}{}", storage_updates, "}")); + + result.push_str(&format!(r#","contract_updates":{}"#, "{")); + let contract_updates = state_updates + .contract_updates + .iter() + .map(|(k, v)| format!(r#""{}":{}"#, k.0, v)) + .collect::>() + .join(","); + result.push_str(&format!("{}{}", contract_updates, "}")); + + result.push_str(&format!(r#","declared_classes":{}"#, "{")); + let declared_classes = state_updates + .declared_classes + .iter() + .map(|(k, v)| format!(r#""{}":{}"#, k, v)) + .collect::>() + .join(","); + + result.push_str(&format!("{}{}", declared_classes, "}")); + + result +} + +/// We need custom implementation because of dynamic keys in json impl ProvedStateDiff { pub fn serialize(&self) -> String { let mut result = String::from('{'); diff --git a/crates/saya/core/src/prover/stone_image.rs b/crates/saya/core/src/prover/stone_image.rs index 714a12f733..c5ad5de5d9 100644 --- a/crates/saya/core/src/prover/stone_image.rs +++ b/crates/saya/core/src/prover/stone_image.rs @@ -5,6 +5,7 @@ use async_trait::async_trait; use tokio::io::{AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader}; use tokio::process::Command; use tokio::sync::OnceCell; +use tracing::warn; use super::{ProverClient, ProverIdentifier}; @@ -50,8 +51,8 @@ impl StoneProver { static STONE_PROVER: OnceCell<(anyhow::Result, anyhow::Result)> = OnceCell::const_new(); - let source = "neotheprogramist/state-diff-commitment"; - let verifier = "neotheprogramist/verifier:latest"; + let source = "piniom/state-diff-commitment"; + let verifier = "piniom/verifier:latest"; let result = STONE_PROVER .get_or_init(|| async { @@ -71,7 +72,7 @@ impl StoneProver { if result.0.is_err() { bail!("Failed to pull prover"); } else if result.1.is_err() { - bail!("Failed to pull verifier"); + warn!("Failed to pull verifier"); } Ok(StoneProver(source.to_string())) From 364fe019a56d1e817fa80db2417bd09b768f7838 Mon Sep 17 00:00:00 2001 From: glihm Date: Tue, 9 Apr 2024 13:05:23 -0400 Subject: [PATCH 05/23] ci: bump codecov to latest version and CI fail on upload fail (#1805) * ci: bump codecov to latest version and CI fail on upload fail * ci: remove flag as API may be flaky on codecov side --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bb0c54609f..7f684cdea2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: cargo llvm-cov nextest --no-report -p katana cargo llvm-cov nextest --no-report -p katana --no-default-features --features sir cargo llvm-cov report --lcov --output-path lcov.info - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v4 with: token: ${{ secrets.CODECOV_TOKEN }} files: lcov.info From 11e08817d699ecdf8500c275a152bcf77f5e2a72 Mon Sep 17 00:00:00 2001 From: Yun Date: Tue, 9 Apr 2024 17:58:13 -1000 Subject: [PATCH 06/23] Display transaction hash in log as hex str (#1809) --- crates/katana/core/src/pool.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/katana/core/src/pool.rs b/crates/katana/core/src/pool.rs index fd558f8a40..8214e91657 100644 --- a/crates/katana/core/src/pool.rs +++ b/crates/katana/core/src/pool.rs @@ -25,7 +25,7 @@ impl TransactionPool { let hash = transaction.hash; self.transactions.write().push(transaction); - info!(target: LOG_TARGET, hash = %hash, "Transaction received."); + info!(target: LOG_TARGET, hash = %format!("\"{hash:#x}\""), "Transaction received."); // notify listeners of new tx added to the pool self.notify_listener(hash) @@ -58,7 +58,7 @@ impl TransactionPool { if e.is_full() { warn!( target: LOG_TARGET, - hash = ?hash, + hash = ?format!("\"{hash:#x}\""), "Unable to send tx notification because channel is full." ); true From 72578062d93ac12ff5f57c24c158b0a4b13bc6fd Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Thu, 11 Apr 2024 13:56:36 +0800 Subject: [PATCH 07/23] Move class artifacts out of `katana-primitives` (#1813) bcs the classes are used throughout other crates in `/katana` and not just in `/primitives`, so make sense to move it to the top dir --- .../{primitives => }/contracts/compiled/account.json | 0 .../compiled/argent_ArgentAccount_0.3.0.json | 0 .../compiled/argent_ArgentAccount_0.3.1.json | 0 .../contracts/compiled/cairo1_contract.json | 0 .../{primitives => }/contracts/compiled/erc20.json | 0 .../contracts/compiled/oz_account_080.json | 0 .../{primitives => }/contracts/compiled/test.json | 0 .../contracts/compiled/universal_deployer.json | 0 .../{primitives => }/contracts/messaging/README.md | 0 .../contracts/messaging/anvil.messaging.json | 0 .../contracts/messaging/cairo/.gitignore | 0 .../contracts/messaging/cairo/.tool-versions | 0 .../contracts/messaging/cairo/Makefile | 0 .../contracts/messaging/cairo/Scarb.lock | 0 .../contracts/messaging/cairo/Scarb.toml | 0 .../contracts/messaging/cairo/account_l2.json | 0 .../contracts/messaging/cairo/account_l3.json | 0 .../messaging/cairo/src/appchain_messaging.cairo | 0 .../contracts/messaging/cairo/src/contract_1.cairo | 0 .../messaging/cairo/src/contract_msg_l1.cairo | 0 .../messaging/cairo/src/contract_msg_starknet.cairo | 0 .../contracts/messaging/cairo/src/lib.cairo | 0 .../contracts/messaging/l3.messaging.json | 0 .../{primitives => }/contracts/messaging/run_e2e.sh | 0 .../contracts/messaging/solidity/.anvil.env | 0 .../contracts/messaging/solidity/.gitignore | 0 .../solidity/IStarknetMessagingLocal_ABI.json | 0 .../contracts/messaging/solidity/Makefile | 0 .../contracts/messaging/solidity/README.md | 0 .../contracts/messaging/solidity/foundry.toml | 0 .../solidity/lib/starknet/IStarknetMessaging.sol | 0 .../lib/starknet/IStarknetMessagingEvents.sol | 0 .../messaging/solidity/lib/starknet/NamedStorage.sol | 0 .../solidity/lib/starknet/StarknetMessaging.sol | 0 .../messaging/solidity/script/LocalTesting.s.sol | 0 .../contracts/messaging/solidity/src/Contract1.sol | 0 .../solidity/src/StarknetMessagingLocal.sol | 0 .../{primitives => }/contracts/test_contract.cairo | 0 .../contracts/universal_deployer.cairo | 0 crates/katana/core/src/service/messaging/ethereum.rs | 2 +- .../executor/src/implementation/blockifier/state.rs | 2 +- .../katana/executor/src/implementation/sir/state.rs | 2 +- crates/katana/executor/tests/fixtures/contract.json | 2 +- .../contracts/messaging/solidity/lib/forge-std | 1 - crates/katana/primitives/src/conversion/rpc.rs | 2 +- crates/katana/primitives/src/genesis/constant.rs | 8 ++++---- crates/katana/primitives/src/genesis/json.rs | 12 +++++++----- .../src/genesis/test-genesis-with-class.json | 4 ++-- .../katana/primitives/src/genesis/test-genesis.json | 6 +++--- 49 files changed, 21 insertions(+), 20 deletions(-) rename crates/katana/{primitives => }/contracts/compiled/account.json (100%) rename crates/katana/{primitives => }/contracts/compiled/argent_ArgentAccount_0.3.0.json (100%) rename crates/katana/{primitives => }/contracts/compiled/argent_ArgentAccount_0.3.1.json (100%) rename crates/katana/{primitives => }/contracts/compiled/cairo1_contract.json (100%) rename crates/katana/{primitives => }/contracts/compiled/erc20.json (100%) rename crates/katana/{primitives => }/contracts/compiled/oz_account_080.json (100%) rename crates/katana/{primitives => }/contracts/compiled/test.json (100%) rename crates/katana/{primitives => }/contracts/compiled/universal_deployer.json (100%) rename crates/katana/{primitives => }/contracts/messaging/README.md (100%) rename crates/katana/{primitives => }/contracts/messaging/anvil.messaging.json (100%) rename crates/katana/{primitives => }/contracts/messaging/cairo/.gitignore (100%) rename crates/katana/{primitives => }/contracts/messaging/cairo/.tool-versions (100%) rename crates/katana/{primitives => }/contracts/messaging/cairo/Makefile (100%) rename crates/katana/{primitives => }/contracts/messaging/cairo/Scarb.lock (100%) rename crates/katana/{primitives => }/contracts/messaging/cairo/Scarb.toml (100%) rename crates/katana/{primitives => }/contracts/messaging/cairo/account_l2.json (100%) rename crates/katana/{primitives => }/contracts/messaging/cairo/account_l3.json (100%) rename crates/katana/{primitives => }/contracts/messaging/cairo/src/appchain_messaging.cairo (100%) rename crates/katana/{primitives => }/contracts/messaging/cairo/src/contract_1.cairo (100%) rename crates/katana/{primitives => }/contracts/messaging/cairo/src/contract_msg_l1.cairo (100%) rename crates/katana/{primitives => }/contracts/messaging/cairo/src/contract_msg_starknet.cairo (100%) rename crates/katana/{primitives => }/contracts/messaging/cairo/src/lib.cairo (100%) rename crates/katana/{primitives => }/contracts/messaging/l3.messaging.json (100%) rename crates/katana/{primitives => }/contracts/messaging/run_e2e.sh (100%) rename crates/katana/{primitives => }/contracts/messaging/solidity/.anvil.env (100%) rename crates/katana/{primitives => }/contracts/messaging/solidity/.gitignore (100%) rename crates/katana/{primitives => }/contracts/messaging/solidity/IStarknetMessagingLocal_ABI.json (100%) rename crates/katana/{primitives => }/contracts/messaging/solidity/Makefile (100%) rename crates/katana/{primitives => }/contracts/messaging/solidity/README.md (100%) rename crates/katana/{primitives => }/contracts/messaging/solidity/foundry.toml (100%) rename crates/katana/{primitives => }/contracts/messaging/solidity/lib/starknet/IStarknetMessaging.sol (100%) rename crates/katana/{primitives => }/contracts/messaging/solidity/lib/starknet/IStarknetMessagingEvents.sol (100%) rename crates/katana/{primitives => }/contracts/messaging/solidity/lib/starknet/NamedStorage.sol (100%) rename crates/katana/{primitives => }/contracts/messaging/solidity/lib/starknet/StarknetMessaging.sol (100%) rename crates/katana/{primitives => }/contracts/messaging/solidity/script/LocalTesting.s.sol (100%) rename crates/katana/{primitives => }/contracts/messaging/solidity/src/Contract1.sol (100%) rename crates/katana/{primitives => }/contracts/messaging/solidity/src/StarknetMessagingLocal.sol (100%) rename crates/katana/{primitives => }/contracts/test_contract.cairo (100%) rename crates/katana/{primitives => }/contracts/universal_deployer.cairo (100%) delete mode 160000 crates/katana/primitives/contracts/messaging/solidity/lib/forge-std diff --git a/crates/katana/primitives/contracts/compiled/account.json b/crates/katana/contracts/compiled/account.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/account.json rename to crates/katana/contracts/compiled/account.json diff --git a/crates/katana/primitives/contracts/compiled/argent_ArgentAccount_0.3.0.json b/crates/katana/contracts/compiled/argent_ArgentAccount_0.3.0.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/argent_ArgentAccount_0.3.0.json rename to crates/katana/contracts/compiled/argent_ArgentAccount_0.3.0.json diff --git a/crates/katana/primitives/contracts/compiled/argent_ArgentAccount_0.3.1.json b/crates/katana/contracts/compiled/argent_ArgentAccount_0.3.1.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/argent_ArgentAccount_0.3.1.json rename to crates/katana/contracts/compiled/argent_ArgentAccount_0.3.1.json diff --git a/crates/katana/primitives/contracts/compiled/cairo1_contract.json b/crates/katana/contracts/compiled/cairo1_contract.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/cairo1_contract.json rename to crates/katana/contracts/compiled/cairo1_contract.json diff --git a/crates/katana/primitives/contracts/compiled/erc20.json b/crates/katana/contracts/compiled/erc20.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/erc20.json rename to crates/katana/contracts/compiled/erc20.json diff --git a/crates/katana/primitives/contracts/compiled/oz_account_080.json b/crates/katana/contracts/compiled/oz_account_080.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/oz_account_080.json rename to crates/katana/contracts/compiled/oz_account_080.json diff --git a/crates/katana/primitives/contracts/compiled/test.json b/crates/katana/contracts/compiled/test.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/test.json rename to crates/katana/contracts/compiled/test.json diff --git a/crates/katana/primitives/contracts/compiled/universal_deployer.json b/crates/katana/contracts/compiled/universal_deployer.json similarity index 100% rename from crates/katana/primitives/contracts/compiled/universal_deployer.json rename to crates/katana/contracts/compiled/universal_deployer.json diff --git a/crates/katana/primitives/contracts/messaging/README.md b/crates/katana/contracts/messaging/README.md similarity index 100% rename from crates/katana/primitives/contracts/messaging/README.md rename to crates/katana/contracts/messaging/README.md diff --git a/crates/katana/primitives/contracts/messaging/anvil.messaging.json b/crates/katana/contracts/messaging/anvil.messaging.json similarity index 100% rename from crates/katana/primitives/contracts/messaging/anvil.messaging.json rename to crates/katana/contracts/messaging/anvil.messaging.json diff --git a/crates/katana/primitives/contracts/messaging/cairo/.gitignore b/crates/katana/contracts/messaging/cairo/.gitignore similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/.gitignore rename to crates/katana/contracts/messaging/cairo/.gitignore diff --git a/crates/katana/primitives/contracts/messaging/cairo/.tool-versions b/crates/katana/contracts/messaging/cairo/.tool-versions similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/.tool-versions rename to crates/katana/contracts/messaging/cairo/.tool-versions diff --git a/crates/katana/primitives/contracts/messaging/cairo/Makefile b/crates/katana/contracts/messaging/cairo/Makefile similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/Makefile rename to crates/katana/contracts/messaging/cairo/Makefile diff --git a/crates/katana/primitives/contracts/messaging/cairo/Scarb.lock b/crates/katana/contracts/messaging/cairo/Scarb.lock similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/Scarb.lock rename to crates/katana/contracts/messaging/cairo/Scarb.lock diff --git a/crates/katana/primitives/contracts/messaging/cairo/Scarb.toml b/crates/katana/contracts/messaging/cairo/Scarb.toml similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/Scarb.toml rename to crates/katana/contracts/messaging/cairo/Scarb.toml diff --git a/crates/katana/primitives/contracts/messaging/cairo/account_l2.json b/crates/katana/contracts/messaging/cairo/account_l2.json similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/account_l2.json rename to crates/katana/contracts/messaging/cairo/account_l2.json diff --git a/crates/katana/primitives/contracts/messaging/cairo/account_l3.json b/crates/katana/contracts/messaging/cairo/account_l3.json similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/account_l3.json rename to crates/katana/contracts/messaging/cairo/account_l3.json diff --git a/crates/katana/primitives/contracts/messaging/cairo/src/appchain_messaging.cairo b/crates/katana/contracts/messaging/cairo/src/appchain_messaging.cairo similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/src/appchain_messaging.cairo rename to crates/katana/contracts/messaging/cairo/src/appchain_messaging.cairo diff --git a/crates/katana/primitives/contracts/messaging/cairo/src/contract_1.cairo b/crates/katana/contracts/messaging/cairo/src/contract_1.cairo similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/src/contract_1.cairo rename to crates/katana/contracts/messaging/cairo/src/contract_1.cairo diff --git a/crates/katana/primitives/contracts/messaging/cairo/src/contract_msg_l1.cairo b/crates/katana/contracts/messaging/cairo/src/contract_msg_l1.cairo similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/src/contract_msg_l1.cairo rename to crates/katana/contracts/messaging/cairo/src/contract_msg_l1.cairo diff --git a/crates/katana/primitives/contracts/messaging/cairo/src/contract_msg_starknet.cairo b/crates/katana/contracts/messaging/cairo/src/contract_msg_starknet.cairo similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/src/contract_msg_starknet.cairo rename to crates/katana/contracts/messaging/cairo/src/contract_msg_starknet.cairo diff --git a/crates/katana/primitives/contracts/messaging/cairo/src/lib.cairo b/crates/katana/contracts/messaging/cairo/src/lib.cairo similarity index 100% rename from crates/katana/primitives/contracts/messaging/cairo/src/lib.cairo rename to crates/katana/contracts/messaging/cairo/src/lib.cairo diff --git a/crates/katana/primitives/contracts/messaging/l3.messaging.json b/crates/katana/contracts/messaging/l3.messaging.json similarity index 100% rename from crates/katana/primitives/contracts/messaging/l3.messaging.json rename to crates/katana/contracts/messaging/l3.messaging.json diff --git a/crates/katana/primitives/contracts/messaging/run_e2e.sh b/crates/katana/contracts/messaging/run_e2e.sh similarity index 100% rename from crates/katana/primitives/contracts/messaging/run_e2e.sh rename to crates/katana/contracts/messaging/run_e2e.sh diff --git a/crates/katana/primitives/contracts/messaging/solidity/.anvil.env b/crates/katana/contracts/messaging/solidity/.anvil.env similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/.anvil.env rename to crates/katana/contracts/messaging/solidity/.anvil.env diff --git a/crates/katana/primitives/contracts/messaging/solidity/.gitignore b/crates/katana/contracts/messaging/solidity/.gitignore similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/.gitignore rename to crates/katana/contracts/messaging/solidity/.gitignore diff --git a/crates/katana/primitives/contracts/messaging/solidity/IStarknetMessagingLocal_ABI.json b/crates/katana/contracts/messaging/solidity/IStarknetMessagingLocal_ABI.json similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/IStarknetMessagingLocal_ABI.json rename to crates/katana/contracts/messaging/solidity/IStarknetMessagingLocal_ABI.json diff --git a/crates/katana/primitives/contracts/messaging/solidity/Makefile b/crates/katana/contracts/messaging/solidity/Makefile similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/Makefile rename to crates/katana/contracts/messaging/solidity/Makefile diff --git a/crates/katana/primitives/contracts/messaging/solidity/README.md b/crates/katana/contracts/messaging/solidity/README.md similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/README.md rename to crates/katana/contracts/messaging/solidity/README.md diff --git a/crates/katana/primitives/contracts/messaging/solidity/foundry.toml b/crates/katana/contracts/messaging/solidity/foundry.toml similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/foundry.toml rename to crates/katana/contracts/messaging/solidity/foundry.toml diff --git a/crates/katana/primitives/contracts/messaging/solidity/lib/starknet/IStarknetMessaging.sol b/crates/katana/contracts/messaging/solidity/lib/starknet/IStarknetMessaging.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/lib/starknet/IStarknetMessaging.sol rename to crates/katana/contracts/messaging/solidity/lib/starknet/IStarknetMessaging.sol diff --git a/crates/katana/primitives/contracts/messaging/solidity/lib/starknet/IStarknetMessagingEvents.sol b/crates/katana/contracts/messaging/solidity/lib/starknet/IStarknetMessagingEvents.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/lib/starknet/IStarknetMessagingEvents.sol rename to crates/katana/contracts/messaging/solidity/lib/starknet/IStarknetMessagingEvents.sol diff --git a/crates/katana/primitives/contracts/messaging/solidity/lib/starknet/NamedStorage.sol b/crates/katana/contracts/messaging/solidity/lib/starknet/NamedStorage.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/lib/starknet/NamedStorage.sol rename to crates/katana/contracts/messaging/solidity/lib/starknet/NamedStorage.sol diff --git a/crates/katana/primitives/contracts/messaging/solidity/lib/starknet/StarknetMessaging.sol b/crates/katana/contracts/messaging/solidity/lib/starknet/StarknetMessaging.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/lib/starknet/StarknetMessaging.sol rename to crates/katana/contracts/messaging/solidity/lib/starknet/StarknetMessaging.sol diff --git a/crates/katana/primitives/contracts/messaging/solidity/script/LocalTesting.s.sol b/crates/katana/contracts/messaging/solidity/script/LocalTesting.s.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/script/LocalTesting.s.sol rename to crates/katana/contracts/messaging/solidity/script/LocalTesting.s.sol diff --git a/crates/katana/primitives/contracts/messaging/solidity/src/Contract1.sol b/crates/katana/contracts/messaging/solidity/src/Contract1.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/src/Contract1.sol rename to crates/katana/contracts/messaging/solidity/src/Contract1.sol diff --git a/crates/katana/primitives/contracts/messaging/solidity/src/StarknetMessagingLocal.sol b/crates/katana/contracts/messaging/solidity/src/StarknetMessagingLocal.sol similarity index 100% rename from crates/katana/primitives/contracts/messaging/solidity/src/StarknetMessagingLocal.sol rename to crates/katana/contracts/messaging/solidity/src/StarknetMessagingLocal.sol diff --git a/crates/katana/primitives/contracts/test_contract.cairo b/crates/katana/contracts/test_contract.cairo similarity index 100% rename from crates/katana/primitives/contracts/test_contract.cairo rename to crates/katana/contracts/test_contract.cairo diff --git a/crates/katana/primitives/contracts/universal_deployer.cairo b/crates/katana/contracts/universal_deployer.cairo similarity index 100% rename from crates/katana/primitives/contracts/universal_deployer.cairo rename to crates/katana/contracts/universal_deployer.cairo diff --git a/crates/katana/core/src/service/messaging/ethereum.rs b/crates/katana/core/src/service/messaging/ethereum.rs index 3268293f6d..743ac137c7 100644 --- a/crates/katana/core/src/service/messaging/ethereum.rs +++ b/crates/katana/core/src/service/messaging/ethereum.rs @@ -22,7 +22,7 @@ sol! { #[sol(rpc, rename_all = "snakecase")] #[derive(serde::Serialize, serde::Deserialize)] StarknetMessagingLocal, - "../primitives/contracts/messaging/solidity/IStarknetMessagingLocal_ABI.json" + "../contracts/messaging/solidity/IStarknetMessagingLocal_ABI.json" } sol! { diff --git a/crates/katana/executor/src/implementation/blockifier/state.rs b/crates/katana/executor/src/implementation/blockifier/state.rs index 074e52dad2..79305b12e6 100644 --- a/crates/katana/executor/src/implementation/blockifier/state.rs +++ b/crates/katana/executor/src/implementation/blockifier/state.rs @@ -263,7 +263,7 @@ mod tests { use crate::StateProviderDb; fn new_sierra_class() -> (FlattenedSierraClass, CompiledClass) { - let json = include_str!("../../../../primitives/contracts/compiled/cairo1_contract.json"); + let json = include_str!("../../../../contracts/compiled/cairo1_contract.json"); let artifact = serde_json::from_str(json).unwrap(); let compiled_class = parse_compiled_class(artifact).unwrap(); let sierra_class = parse_sierra_class(json).unwrap().flatten().unwrap(); diff --git a/crates/katana/executor/src/implementation/sir/state.rs b/crates/katana/executor/src/implementation/sir/state.rs index 74bd309692..a7334c219e 100644 --- a/crates/katana/executor/src/implementation/sir/state.rs +++ b/crates/katana/executor/src/implementation/sir/state.rs @@ -299,7 +299,7 @@ mod tests { use crate::StateProviderDb; fn new_sierra_class() -> (FlattenedSierraClass, CompiledClass) { - let json = include_str!("../../../../primitives/contracts/compiled/cairo1_contract.json"); + let json = include_str!("../../../../contracts/compiled/cairo1_contract.json"); let artifact = serde_json::from_str(json).unwrap(); let compiled_class = parse_compiled_class(artifact).unwrap(); let sierra_class = parse_sierra_class(json).unwrap().flatten().unwrap(); diff --git a/crates/katana/executor/tests/fixtures/contract.json b/crates/katana/executor/tests/fixtures/contract.json index ce1957cd94..359796f532 120000 --- a/crates/katana/executor/tests/fixtures/contract.json +++ b/crates/katana/executor/tests/fixtures/contract.json @@ -1 +1 @@ -../../../primitives/contracts/compiled/oz_account_080.json \ No newline at end of file +../../../contracts/compiled/oz_account_080.json \ No newline at end of file diff --git a/crates/katana/primitives/contracts/messaging/solidity/lib/forge-std b/crates/katana/primitives/contracts/messaging/solidity/lib/forge-std deleted file mode 160000 index ae570fec08..0000000000 --- a/crates/katana/primitives/contracts/messaging/solidity/lib/forge-std +++ /dev/null @@ -1 +0,0 @@ -Subproject commit ae570fec082bfe1c1f45b0acca4a2b4f84d345ce diff --git a/crates/katana/primitives/src/conversion/rpc.rs b/crates/katana/primitives/src/conversion/rpc.rs index 41d9836743..2b27cd2b7e 100644 --- a/crates/katana/primitives/src/conversion/rpc.rs +++ b/crates/katana/primitives/src/conversion/rpc.rs @@ -280,7 +280,7 @@ mod tests { #[test] fn legacy_rpc_to_inner_and_back() { - let json = include_str!("../../contracts/compiled/account.json"); + let json = include_str!("../../../contracts/compiled/account.json"); let json = serde_json::from_str(json).unwrap(); let class: DeprecatedCompiledClass = parse_deprecated_compiled_class(json).unwrap(); diff --git a/crates/katana/primitives/src/genesis/constant.rs b/crates/katana/primitives/src/genesis/constant.rs index 16d8019b85..b5e8c555a3 100644 --- a/crates/katana/primitives/src/genesis/constant.rs +++ b/crates/katana/primitives/src/genesis/constant.rs @@ -126,14 +126,14 @@ lazy_static! { // Default fee token contract // pub static ref DEFAULT_LEGACY_ERC20_CONTRACT_CASM: CompiledContractClass = parse_compiled_class(include_str!("../../contracts/compiled/erc20.json")).unwrap(); - pub static ref DEFAULT_LEGACY_ERC20_CONTRACT_CASM: CompiledClass = read_compiled_class_artifact(include_str!("../../contracts/compiled/erc20.json")); + pub static ref DEFAULT_LEGACY_ERC20_CONTRACT_CASM: CompiledClass = read_compiled_class_artifact(include_str!("../../../contracts/compiled/erc20.json")); // Default universal deployer - pub static ref DEFAULT_LEGACY_UDC_CASM: CompiledClass = read_compiled_class_artifact(include_str!("../../contracts/compiled/universal_deployer.json")); + pub static ref DEFAULT_LEGACY_UDC_CASM: CompiledClass = read_compiled_class_artifact(include_str!("../../../contracts/compiled/universal_deployer.json")); // Default account contract - pub static ref DEFAULT_OZ_ACCOUNT_CONTRACT: SierraClass = parse_sierra_class(include_str!("../../contracts/compiled/oz_account_080.json")).unwrap(); - pub static ref DEFAULT_OZ_ACCOUNT_CONTRACT_CASM: CompiledClass = read_compiled_class_artifact(include_str!("../../contracts/compiled/oz_account_080.json")); + pub static ref DEFAULT_OZ_ACCOUNT_CONTRACT: SierraClass = parse_sierra_class(include_str!("../../../contracts/compiled/oz_account_080.json")).unwrap(); + pub static ref DEFAULT_OZ_ACCOUNT_CONTRACT_CASM: CompiledClass = read_compiled_class_artifact(include_str!("../../../contracts/compiled/oz_account_080.json")); } diff --git a/crates/katana/primitives/src/genesis/json.rs b/crates/katana/primitives/src/genesis/json.rs index ffa9f59a9e..a50bc4d54a 100644 --- a/crates/katana/primitives/src/genesis/json.rs +++ b/crates/katana/primitives/src/genesis/json.rs @@ -705,15 +705,16 @@ mod tests { vec![ GenesisClassJson { class_hash: Some(felt!("0x8")), - class: PathBuf::from("../../contracts/compiled/erc20.json").into(), + class: PathBuf::from("../../../contracts/compiled/erc20.json").into(), }, GenesisClassJson { class_hash: Some(felt!("0x80085")), - class: PathBuf::from("../../contracts/compiled/universal_deployer.json").into(), + class: PathBuf::from("../../../contracts/compiled/universal_deployer.json") + .into(), }, GenesisClassJson { class_hash: Some(felt!("0xa55")), - class: PathBuf::from("../../contracts/compiled/oz_account_080.json").into(), + class: PathBuf::from("../../../contracts/compiled/oz_account_080.json").into(), }, ] ); @@ -729,11 +730,12 @@ mod tests { vec![ GenesisClassJson { class_hash: Some(felt!("0x8")), - class: PathBuf::from("../../contracts/compiled/erc20.json").into(), + class: PathBuf::from("../../../contracts/compiled/erc20.json").into(), }, GenesisClassJson { class_hash: Some(felt!("0x80085")), - class: PathBuf::from("../../contracts/compiled/universal_deployer.json").into(), + class: PathBuf::from("../../../contracts/compiled/universal_deployer.json") + .into(), }, GenesisClassJson { class_hash: Some(felt!("0xa55")), diff --git a/crates/katana/primitives/src/genesis/test-genesis-with-class.json b/crates/katana/primitives/src/genesis/test-genesis-with-class.json index 5a6f151d6e..177ff094f0 100644 --- a/crates/katana/primitives/src/genesis/test-genesis-with-class.json +++ b/crates/katana/primitives/src/genesis/test-genesis-with-class.json @@ -57,11 +57,11 @@ }, "classes": [ { - "class": "../../contracts/compiled/erc20.json", + "class": "../../../contracts/compiled/erc20.json", "classHash": "0x8" }, { - "class": "../../contracts/compiled/universal_deployer.json", + "class": "../../../contracts/compiled/universal_deployer.json", "classHash": "0x80085" }, { diff --git a/crates/katana/primitives/src/genesis/test-genesis.json b/crates/katana/primitives/src/genesis/test-genesis.json index 822a664ca1..b122261e2f 100644 --- a/crates/katana/primitives/src/genesis/test-genesis.json +++ b/crates/katana/primitives/src/genesis/test-genesis.json @@ -69,15 +69,15 @@ }, "classes": [ { - "class": "../../contracts/compiled/erc20.json", + "class": "../../../contracts/compiled/erc20.json", "classHash": "0x8" }, { - "class": "../../contracts/compiled/universal_deployer.json", + "class": "../../../contracts/compiled/universal_deployer.json", "classHash": "0x80085" }, { - "class": "../../contracts/compiled/oz_account_080.json", + "class": "../../../contracts/compiled/oz_account_080.json", "classHash": "0xa55" } ] From ab1fa3fbbf936402e0a2c1d0cb36a1f664c33284 Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Thu, 11 Apr 2024 13:58:30 +0800 Subject: [PATCH 08/23] Add account class with dummy validate for testing (#1814) this would be useful for when you're hardcoding txs in tests/benchmarks without having to deal with the signatures. --- .../account_with_dummy_validate.cairo | 53 ++ .../account_with_dummy_validate.sierra.json | 709 ++++++++++++++++++ 2 files changed, 762 insertions(+) create mode 100644 crates/katana/contracts/account_with_dummy_validate.cairo create mode 100644 crates/katana/contracts/compiled/account_with_dummy_validate.sierra.json diff --git a/crates/katana/contracts/account_with_dummy_validate.cairo b/crates/katana/contracts/account_with_dummy_validate.cairo new file mode 100644 index 0000000000..15a3ed3095 --- /dev/null +++ b/crates/katana/contracts/account_with_dummy_validate.cairo @@ -0,0 +1,53 @@ +#[starknet::contract] +mod Account { + use array::{ArrayTrait, SpanTrait}; + use starknet::{ContractAddress, call_contract_syscall}; + use starknet::info::SyscallResultTrait; + use zeroable::Zeroable; + + #[storage] + struct Storage { + } + + #[external(v0)] + fn __validate_deploy__( + self: @ContractState, + class_hash: felt252, + contract_address_salt: felt252 + ) -> felt252 { + starknet::VALIDATED + } + + #[external(v0)] + fn __validate_declare__(self: @ContractState, class_hash: felt252) -> felt252 { + starknet::VALIDATED + } + + #[external(v0)] + fn __validate__( + self: @ContractState, + contract_address: ContractAddress, + selector: felt252, + calldata: Array + ) -> felt252 { + starknet::VALIDATED + } + + #[external(v0)] + #[raw_output] + fn __execute__( + self: @ContractState, + contract_address: ContractAddress, + selector: felt252, + calldata: Array + ) -> Span { + // Validate caller. + assert(starknet::get_caller_address().is_zero(), 'INVALID_CALLER'); + + call_contract_syscall( + address: contract_address, + entry_point_selector: selector, + calldata: calldata.span() + ).unwrap_syscall() + } +} diff --git a/crates/katana/contracts/compiled/account_with_dummy_validate.sierra.json b/crates/katana/contracts/compiled/account_with_dummy_validate.sierra.json new file mode 100644 index 0000000000..62a0da050a --- /dev/null +++ b/crates/katana/contracts/compiled/account_with_dummy_validate.sierra.json @@ -0,0 +1,709 @@ +{ + "sierra_program": [ + "0x1", + "0x4", + "0x0", + "0x2", + "0x4", + "0x0", + "0x114", + "0xec", + "0x2c", + "0x52616e6765436865636b", + "0x800000000000000100000000000000000000000000000000", + "0x426f78", + "0x800000000000000700000000000000000000000000000001", + "0x1", + "0x13", + "0x4172726179", + "0x800000000000000300000000000000000000000000000001", + "0x6", + "0x456e756d", + "0x800000000000000300000000000000000000000000000003", + "0x0", + "0xfeece2ea7edbbbebeeb5f270b77f64c680a68a089b794478dd9eca75e0196a", + "0x2", + "0xf", + "0x11", + "0x66656c74323532", + "0x800000000000000700000000000000000000000000000000", + "0x436f6e747261637441646472657373", + "0x75313238", + "0x536e617073686f74", + "0x537472756374", + "0x800000000000000700000000000000000000000000000002", + "0x1baeba72e79e9db2587cf44fedb2f3700b2075a5e8e39a562584862c4b71f62", + "0x9", + "0x12", + "0xb", + "0x1597b831feeb60c71f259624b79cf66995ea4f7e383403583674ab9c33b9cec", + "0xc", + "0x753332", + "0x80000000000000070000000000000000000000000000000e", + "0x348a62b7a38c0673e61e888d83a3ac1bf334ee7361a8514593d3d9532ed8b39", + "0x7", + "0x8", + "0xa", + "0xd", + "0xe", + "0x753634", + "0x800000000000000700000000000000000000000000000004", + "0x3808c701a5d13e100ab11b6c02f91f752ecae7e420d21b56c90ec0a475cc7e5", + "0x10", + "0x3342418ef16b3e2799b906b1e4e89dbb9b111332dd44f72458ce44f9895b508", + "0x800000000000000700000000000000000000000000000006", + "0x7d4d99e9ed8d285b5c61b493cedb63976bc3d9da867933d829f49ce838b5e7", + "0x5", + "0x4", + "0x2ee1e2b1b89f8c495f200e4956278a4d47395fe262f27b52e5865c9524c08c3", + "0x800000000000000f00000000000000000000000000000001", + "0x16a4c8d7c05909052238a862d8cc3e7975bf05a07b3a69c6b28951083a6d672", + "0x15", + "0x2ca39cde64b91db1514d78c135ee79d71b3b57fffee52f1a3ef96618a34d8c8", + "0x14", + "0x16", + "0x161ee0e6962e56453b5d68e09d1cabe5633858c1ba3a7e73fee8c70867eced0", + "0x4e6f6e5a65726f", + "0x3e1934b18d91949ab9afdbdd1866a30ccca06c2b1e6581582c6b27f8b4f6555", + "0x1a", + "0x800000000000000700000000000000000000000000000003", + "0x29d7d57c04a880978e7b3689f6218e507f3be17588744b58dc17762447ad0e7", + "0x1c", + "0x1d", + "0x2490fcc229f827552c25240be8547f63b1bc196e0e5c5bf72432ff0bf34f9e0", + "0x556e696e697469616c697a6564", + "0x800000000000000200000000000000000000000000000001", + "0x17b6ecc31946835b0d9d92c2dd7a9c14f29af0371571ae74a1b228828b2242", + "0x22", + "0x34f9bd7c6cb2dd4263175964ad75f1ff1461ddc332fbfb274e0fb2a5d7ab968", + "0x23", + "0x3d37ad6eafb32512d2dd95a2917f6bf14858de22c27a1114392429f2e5c15d7", + "0x4275696c74696e436f737473", + "0x53797374656d", + "0x9931c641b913035ae674b400b61a51476d506bbe8bba2ff8a6272790aba9e6", + "0x26", + "0x11c6d8087e00642489f92d2821ad6ebd6532ad1a3b6d12833da6d6810391511", + "0x4761734275696c74696e", + "0x82", + "0x7265766f6b655f61705f747261636b696e67", + "0x77697468647261775f676173", + "0x6272616e63685f616c69676e", + "0x73746f72655f74656d70", + "0x66756e6374696f6e5f63616c6c", + "0x3", + "0x656e756d5f6d61746368", + "0x2a", + "0x64726f70", + "0x7374727563745f6465636f6e737472756374", + "0x61727261795f6c656e", + "0x736e617073686f745f74616b65", + "0x7533325f636f6e7374", + "0x72656e616d65", + "0x7533325f6571", + "0x61727261795f6e6577", + "0x66656c743235325f636f6e7374", + "0x496e70757420746f6f206c6f6e6720666f7220617267756d656e7473", + "0x61727261795f617070656e64", + "0x7374727563745f636f6e737472756374", + "0x656e756d5f696e6974", + "0x29", + "0x2b", + "0x28", + "0x6765745f6275696c74696e5f636f737473", + "0x27", + "0x77697468647261775f6761735f616c6c", + "0x56414c4944", + "0x4f7574206f6620676173", + "0x4661696c656420746f20646573657269616c697a6520706172616d202332", + "0x4661696c656420746f20646573657269616c697a6520706172616d202331", + "0x25", + "0x24", + "0x4661696c656420746f20646573657269616c697a6520706172616d202333", + "0x616c6c6f635f6c6f63616c", + "0x66696e616c697a655f6c6f63616c73", + "0x73746f72655f6c6f63616c", + "0x1f", + "0x20", + "0x21", + "0x61727261795f736e617073686f745f706f705f66726f6e74", + "0x1e", + "0x6a756d70", + "0x756e626f78", + "0x21adb5788e32c84f69a1863d85ef9394b7bf761a0ce1190f826984e5075c371", + "0x64697361626c655f61705f747261636b696e67", + "0x1b", + "0x636f6e74726163745f616464726573735f746f5f66656c74323532", + "0x66656c743235325f737562", + "0x66656c743235325f69735f7a65726f", + "0x63616c6c5f636f6e74726163745f73797363616c6c", + "0x18", + "0x19", + "0x494e56414c49445f43414c4c4552", + "0x647570", + "0x17", + "0x6765745f657865637574696f6e5f696e666f5f76325f73797363616c6c", + "0x372", + "0xffffffffffffffff", + "0x68", + "0x59", + "0x4a", + "0x3d", + "0x2d", + "0x2e", + "0x2f", + "0x30", + "0x31", + "0x32", + "0x33", + "0x34", + "0x35", + "0x36", + "0x37", + "0x38", + "0x39", + "0x3a", + "0x3b", + "0x3c", + "0x3e", + "0x3f", + "0x40", + "0x41", + "0x42", + "0x43", + "0x44", + "0x45", + "0x46", + "0xca", + "0xbb", + "0x97", + "0xae", + "0x160", + "0x151", + "0x142", + "0x13b", + "0x12c", + "0x108", + "0x11f", + "0x47", + "0x48", + "0x49", + "0x4b", + "0x4c", + "0x4d", + "0x4e", + "0x4f", + "0x50", + "0x51", + "0x52", + "0x53", + "0x54", + "0x55", + "0x56", + "0x57", + "0x20f", + "0x1fe", + "0x1ed", + "0x1e4", + "0x1d3", + "0x1a3", + "0x1c3", + "0x1bc", + "0x58", + "0x5a", + "0x226", + "0x22b", + "0x235", + "0x253", + "0x24c", + "0x261", + "0x266", + "0x282", + "0x27c", + "0x2d5", + "0x5b", + "0x5c", + "0x5d", + "0x5e", + "0x5f", + "0x60", + "0x61", + "0x2c5", + "0x62", + "0x2b0", + "0x63", + "0x64", + "0x2b5", + "0x65", + "0x66", + "0x2bf", + "0x67", + "0x317", + "0x69", + "0x2ef", + "0x6a", + "0x30d", + "0x6b", + "0x307", + "0x6c", + "0x6d", + "0x338", + "0x6e", + "0x6f", + "0x70", + "0x71", + "0x72", + "0x73", + "0x74", + "0x75", + "0x76", + "0x77", + "0x344", + "0x78", + "0x351", + "0x79", + "0x7a", + "0x356", + "0x7b", + "0x7c", + "0x360", + "0x7d", + "0x7e", + "0x7f", + "0x80", + "0x81", + "0x36c", + "0xd8", + "0x16e", + "0x21f", + "0x23c", + "0x241", + "0x259", + "0x28c", + "0x2de", + "0x326", + "0x33e", + "0x34a", + "0x366", + "0x1e7c", + "0x400a040183c0a04018380a0502834180b050240a08038180a04018080200", + "0x302e160d81408150d01410070c814300c0b8581c050205424140904c2411", + "0x880a24028240a09028240a23028880a21028240a200607c2c120f0740a1c", + "0x141205150304e1610814520514814500c138582426118144a05128144605", + "0xc418300b0140a2f0605c2c09028840a21028b80a2d028b0182b0b0880a29", + "0x1408370701446051b030160a1a814680519830160a070146405178301616", + "0xec183a050bc18300b0240a04018d40a39028e0180b050840a2f0605c2c09", + "0x3016161e8141c0520830160a10814803f04814803f1f03060161e8147805", + "0x8c0a2f0605c2c3d028840a45060e81435029100a430602c14420288c0a2f", + "0x309c0c26930044b1e814120525030740a1a814920524030160a0111c2446", + "0x15c460502958120502954a8050294c1805029405c5202944460502940184f", + "0x14b80c2d8940a05280940a052d0301c052c8940a052a8940a052c0240a05", + "0x38c405071846a050298064050298012050297c120502940bc0e029741205", + "0x14b06807014ba0c339980a0528030ca6202814a06402814a06302814a005", + "0x1804605029801c05029541c05029607a05029545a52029441c05029401205", + "0x14a26b07014ba6a07014ba2302814aa6907014ba0c071880a0e309240a05", + "0x17484050294c880502958da050294c425202944420502954d8050294c1252", + "0x14b07202814c00902814e22102814e20c380240a05378840a05379b81c05", + "0x154e60502954920502958c4050294c445202944420502940e40502954e405", + "0xf40a0530030ee7602814a01902814a00c071d80a0e308240a053a9d00a05", + "0x140a5006038a805071841205029681205029e0ec050294c0a0e3b0141c61", + "0x30f405071b00a0e309b00a05280301c6c02838c20c3c8141c5402838c254", + "0x141c6102838da0507184da0502940180e368141c61220140a600c9480a51", + "0x14ba0c3e0840a052d0840a052c0e40a052b1ec0a052988ca405288141c42", + "0x1480a5102839000507185000502940180e400141c61061fc187e061f4180e", + "0x74a405288141c052e8301c4202838c20902815068207014ba8102814aa1b", + "0x140a60170140a55168140a55030140a56030140a781a0140a56420140a53", + "0x1480a0e308310a8002814a605071ec0a0e309ec0a05280301c7b02838c239", + "0x140180e420141c611a0140a60121480a5102838a40507184a40502940180e", + "0x21c1c05060380a0c0621c0a0c060310c5202814a605072100a0e30a100a05", + "0x21c0a52028b8185202a1c0a5202948180c43814180e06084120e440b45c0e", + "0x21c0a0c0703036054488c0a87070640a09060b80a87028b80a2d06064440e", + "0x2418240e8390e05110145c0c110150e0511014a40c0621c0a2302884180c", + "0x143a05110301887028940a21060310e0506038180f02a284a05438384805", + "0x310e050d014360c030681c87028400a23060400a87028a40a19060a40a87", + "0x180a24060310e0519014360c1a8c81c87028d00a23060d00a87028303a0c", + "0x21c1c80420381e0c420150e05420144a0c400150e051a814480c420150e05", + "0xe40a1a060e40a8702830200c408150e05060a4180c43814180e06030920c", + "0x21c0a7b1e038640c1e0150e05060d0187b02a1c0a39408380c0c1c8150e05", + "0x305a05438145a05420305c05438145c0516830ec05438147a051a8307a05", + "0x310e05060381876070b45c2e029d80a87029d80a81060380a87028380a80", + "0x22ce8730721c1c72168b8a43c061c80a87029c80a7b061c80a8702830720c", + "0x1b00a76061b00a87028307a0c368150e05060a4180c43814180e06110840e", + "0x150e0536814e40c330150e0533014340c0621c0a49028841866248390e05", + "0x18ca80e43814c805210301887029880a7406188c80e43814da66071cc186d", + "0x2300a49062300a87028000a6c060000a870298c0a6d060310e052a014880c", + "0x150e0507015000c3a0150e053a015080c398150e05398145a0c468150e05", + "0x150e05060a4180c43814180e062341c74398b80a8d02a1c0a8d02a04180e", + "0xd0189002a1c0a8f470380c0c478150e0547814340c478150e0506198188e", + "0x1484051683126054381524051a8312405438152091070c8189102a1c0a0c", + "0x24c0a8702a4c0a81060380a87028380a80061100a87029100a84061080a87", + "0x301887028740a64060310e0507814e80c0621c0a0c07031260e221085c05", + "0x152a9407018189502a1c0a9502868189502a1c0a0c310312805438141829", + "0x2640a8702a600a35062600a8702a592e0e190312e05438141834062580a87", + "0x15020c070150e0507015000c168150e0516815080c170150e05170145a0c", + "0x14c80c0621c0a1b029d0180c43814180e062641c2d170b80a9902a1c0a99", + "0x3136054381536050d0313605438141854062680a8702830520c0621c0a22", + "0x146a0c4e8150e0545a701c32062700a8702830680c458150e054da681c06", + "0x21c0a0e02a00182d02a1c0a2d02a10182e02a1c0a2e028b4189e02a1c0a9d", + "0x14a405320301887028301c0c4f0385a2e170153c05438153c05408301c05", + "0x380c0c500150e0550014340c500150e0506198189f02a1c0a0c148301887", + "0x1546051a83146054381542a2070c818a202a1c0a0c1a031420543815409f", + "0x380a87028380a80060840a87028840a84060240a87028240a2d062900a87", + "0x21c1c05060380a0c0621c0a0c06031480e108245c05520150e0552015020c", + "0x21c0a52028b8185202a1c0a5202948180c43814180e06084120e528b45c0e", + "0x21c0a0c0703036055308c0a87070640a09060b80a87028b80a2d06064440e", + "0x14460c120150e050e814320c0e8150e0511014440c0621c0a2302884180c", + "0x390e0514814460c148150e0506074180c438144a050d8301e250721c0a24", + "0x94183402a1c0a1a02890180602a1c0a0f02890180c4381420050d8303410", + "0x21c0a0c148301887028301c0c0629c1887070d00c0e078300c05438140c05", + "0x310805438146a3207018183502a1c0a3502868183502a1c0a0c080306405", + "0xb80a2d060e40a8702a040a35062040a8702a11000e190310005438141834", + "0x150e051c815020c070150e0507015000c168150e0516815080c170150e05", + "0x150e053d814f60c3d8150e05060e4180c43814180e060e41c2d170b80a39", + "0x21c0a0c148301887028301c0c391d81ca81e8f01c87071ec5a2e290f0187b", + "0x68180c4381484051083088420721c0a74029d8187402a1c0a0c1e830e605", + "0x14e80c361b41c87029cc880e39830e60543814e605390308805438148805", + "0x150e0533014da0c0621c0a49029101866248390e0536814840c0621c0a6c", + "0x210183c02a1c0a3c028b4185402a1c0a6202924186202a1c0a64029b01864", + "0x387a3c17014a80543814a805408301c05438141c05400307a05438147a05", + "0x21c0a0002868180002a1c0a0c33030c605438141829060310e05060381854", + "0x2380a8702a311a0e190311a05438141834062300a8702800c60e030300005", + "0x15000c390150e0539015080c3b0150e053b0145a0c478150e05470146a0c", + "0x1d0180c43814180e0623c1c723b0b80a8f02a1c0a8f02a04180e02a1c0a0e", + "0x312205438141854062400a8702830520c0621c0a2202990180c438143605", + "0x24c1c320624c0a8702830680c490150e0548a401c06062440a8702a440a1a", + "0x21c0a2d02a10182e02a1c0a2e028b4189502a1c0a94028d4189402a1c0a92", + "0x301c0c4a8385a2e170152a05438152a05408301c05438141c05400305a05", + "0x14340c4b8150e0506198189602a1c0a0c148301887029480a64060310e05", + "0x153099070c8189902a1c0a0c1a0313005438152e9607018189702a1c0a97", + "0x840a87028840a84060240a87028240a2d0626c0a8702a680a35062680a87", + "0x21c0a0c06031360e108245c054d8150e054d815020c070150e0507015000c", + "0x21c0a2e028b4180c43814180e06084120e548b45c0e438380a0c07014180c", + "0x21c1c230280018230c888a487029485c0e31830a40543814a405290305c05", + "0x3032054381432052903018870286c0a8c060310e0506038181d02aa83605", + "0x84180c43814180e060a40aab078150e0e12814120c128901c87028640a2e", + "0x21c0a2402948182d02a1c0a2d02a10182202a1c0a22028b4180c438141e05", + "0x38183202ab06805438380c05470300c1a081490e05120b44452468304805", + "0x301c0c408155a8002a1c1c8402a4018841a8390e051a0151e0c0621c0a0c", + "0x30f6054381472050c8307205438146a0511030188702a000a44060310e05", + "0x14ec0511830ec0543814181d060310e051e014360c1e8f01c87029ec0a23", + "0x1080a87029cc0a24061d00a87028f40a24060310e0539014360c399c81c87", + "0x30520c0621c0a0c0703018ae0621c1c423a0381e0c3a0150e053a0144a0c", + "0x150e05369101c06061b40a87029b40a1a061b40a8702830200c220150e05", + "0xb4186402a1c0a66028d4186602a1c0a6c24838640c248150e05060d0186c", + "0x14c805408301c05438141c05400303405438143405420302005438142005", + "0x14c4053d830c405438141839060310e0506038186407068202e029900a87", + "0x30520c0621c0a0c070311800072bcc6540721c1c620d040a43c061880a87", + "0x310e0547814420c4823c1c8702a380a76062380a87028307a0c468150e05", + "0x3124910721c0a8d48038e60c468150e0546814e40c480150e0548014340c", + "0x15280536830188702a4c0a4406251260e43815220521030188702a480a74", + "0x1500a87029500a2d0625c0a8702a580a49062580a8702a540a6c062540a87", + "0x1505c054b8150e054b815020c070150e0507015000c318150e0531815080c", + "0x2640a1a062640a8702830cc0c4c0150e05060a4180c43814180e0625c1c63", + "0x21c0a9a4d838640c4d8150e05060d0189a02a1c0a994c0380c0c4c8150e05", + "0x3118054381518054203000054381400051683138054381516051a8311605", + "0x310e0506038189c07230002e02a700a8702a700a81060380a87028380a80", + "0x150e0506244189d02a1c0a0c148301887028d40a64060310e0540814e80c", + "0xc818a002a1c0a0c1a0313e05438153c9d07018189e02a1c0a9e02868189e", + "0x680a84060400a87028400a2d062880a8702a840a35062840a8702a7d400e", + "0x31440e0d0405c05510150e0551015020c070150e0507015000c0d0150e05", + "0x21c0a1a02a10181002a1c0a10028b418a302a1c0a32028d4180c43814180e", + "0x301c0c518383410170154605438154605408301c05438141c05400303405", + "0x30c40c520150e05060a4180c438144805320301887028a40a74060310e05", + "0x150e05060d018b102a1c0ab0520380c0c580150e0558014340c580150e05", + "0x3044054381444051683166054381514051a83114054381562b2070c818b2", + "0xb4442e02acc0a8702acc0a81060380a87028380a80060b40a87028b40a84", + "0x21c0a0c148301887028640a64060310e050e814e80c0621c0a0c07031660e", + "0x316c05438156ab40701818b502a1c0ab50286818b502a1c0a0c2a0316805", + "0x880a2d062e40a8702ae00a35062e00a8702ad96e0e190316e05438141834", + "0x150e055c815020c070150e0507015000c168150e0516815080c110150e05", + "0x2e80a8702830520c0621c0a5202990180c43814180e062e41c2d110b80ab9", + "0x30680c5e0150e055dae81c06062ec0a8702aec0a1a062ec0a8702830cc0c", + "0x21c0a09028b418bf02a1c0abe028d418be02a1c0abc5e838640c5e8150e05", + "0x157e05438157e05408301c05438141c05400304205438144205420301205", + "0x30180c438141894060840a8702831260c168150e050624818bf07084122e", + "0x145a0c0621c0a0c0703036230730032220721c1c05060380a0c0621c0a0c", + "0x14000c128903a5243814a4220718c185202a1c0a5202948182202a1c0a22", + "0x144805170304805438144805290301887028301c0c07815822e02a1c1c25", + "0x301c0c0d015840902a1c1c1002824182e02a1c0a2e168392a0c080a41c87", + "0xa40a87028a40a52060640a87028640a84060740a87028740a2d060310e05", + "0x21c1c3202a3818321a018a487028a4321d29234180902a1c0a09108392c0c", + "0x150e0e40815200c40a001c87028d40a8f060310e0506038188402b0c6a05", + "0x307a054381478050c8307805438150005110301887028301c0c3d8158839", + "0x14e60511830e60543814181d060310e053b014360c391d81c87028f40a23", + "0x1b40a87029080a24061100a87029c80a24060310e053a014360c211d01c87", + "0x14880c0621c0a0c0703018c50621c1c6d220381e0c220150e05220144a0c", + "0x40186c02a1c0a0c148301887028b80a8c060310e0504814420c0621c0a39", + "0x21c0a0c1a030cc0543814926c07018184902a1c0a4902868184902a1c0a0c", + "0x180a87028180a2d061500a87029880a35061880a8702998c80e19030c805", + "0x185c052a0150e052a015020c070150e0507015000c1a0150e051a015080c", + "0x148780c318150e0531814f60c318150e05060e4180c43814180e061501c34", + "0x260188f02a1c0a0c4b8301887028301c0c472341cc6460001c870718c6806", + "0x141c0540031180543815180542030188702a400a9906245200e438151e05", + "0xe40a87028e40a72060240a87028240a1a060b80a87028b80a9a060380a87", + "0x2500a8b060000a87028000a2d0625126922921c0a39048b9220e46025360c", + "0x21c0a97029b0189702a1c0a9502a70180c43814180e062580ac74a8150e0e", + "0x312405438152405420300005438140005168313205438153005248313005", + "0x310e0506038189949a48002e02a640a8702a640a810624c0a8702a4c0a80", + "0x15000c490150e0549015080c000150e05000145a0c4d0150e054b0146a0c", + "0x110180c43814180e062692692000b80a9a02a1c0a9a02a04189302a1c0a93", + "0x313605438141829060310e0517015180c0621c0a0902884180c438147205", + "0x141834062700a8702a2d360e0303116054381516050d0311605438141866", + "0x150e05468145a0c4f8150e054f0146a0c4f0150e054e2741c32062740a87", + "0xb80a9f02a1c0a9f02a04180e02a1c0a0e02a00188e02a1c0a8e02a10188d", + "0x14420c0621c0a8002990180c43814f6053a0301887028301c0c4f8391c8d", + "0x6818a102a1c0a0c488314005438141829060310e0517015180c0621c0a09", + "0x289460e190314605438141834062880a8702a85400e030314205438154205", + "0x150e051a015080c030150e05030145a0c580150e05520146a0c520150e05", + "0x14180e062c01c34030b80ab002a1c0ab002a04180e02a1c0a0e02a001834", + "0x145a0c588150e05420146a0c0621c0a0902884180c438145c05460301887", + "0x21c0ab102a04180e02a1c0a0e02a00183402a1c0a3402a10180602a1c0a06", + "0x21c0a2e02a30180c4381434053a0301887028301c0c588386806170156205", + "0x21c0a0c310316405438141829060310e05108153a0c0621c0a2902990180c", + "0x316805438141834062cc0a8702a29640e0303114054381514050d0311405", + "0x15080c0e8150e050e8145a0c5b0150e055a8146a0c5a8150e0559ad01c32", + "0x2d81c190e8b80ab602a1c0ab602a04180e02a1c0a0e02a00181902a1c0a19", + "0x310e05108153a0c0621c0a2402990180c438141e053a0301887028301c0c", + "0x21c0ab80286818b802a1c0a0c2a0316e05438141829060310e05168153c0c", + "0x2ec0a8702ae5740e190317405438141834062e40a8702ae16e0e030317005", + "0x15000c0c8150e050c815080c0e8150e050e8145a0c5e0150e055d8146a0c", + "0x278180c43814180e062f01c190e8b80abc02a1c0abc02a04180e02a1c0a0e", + "0x317a05438141829060310e05108153a0c0621c0a5202990180c438145a05", + "0x141834062fc0a8702af97a0e030317c05438157c050d0317c05438141866", + "0x150e05118145a0c648150e05448146a0c448150e055fb201c32063200a87", + "0xb80ac902a1c0ac902a04180e02a1c0a0e02a00181b02a1c0a1b02a101823", + "0x301c0c170159452070390e0e028153e0c028150e0506014440c648383623", + "0x840a87028b40aa2060240a87028380aa1060b40a87029480aa0060310e05", + "0x640a87028880ab0060880a8702831480c0621c0a0c0703018cb02831460c", + "0x14a40c118150e0504814da0c108150e050c815440c048150e0517015420c", + "0x143605590301887028301c0c0e815981b02a1c1c2102ac4182302a1c0a23", + "0x8c0a870288c0a520603c0a87028940ab3060940a87028900a8a060900a87", + "0x290180c438143a053a0301887028301c0c0788c1c05078150e0507815680c", + "0x21c0a1002ad0182302a1c0a2302948181002a1c0a2902ad4182902a1c0a0c", + "0x1418a4061480a87028380a0e030301c05438141805450302023070142005", + "0x140a2e060140a87028140a52060b8a40e029480a87029480a72060b80a87", + "0xb8180e5b0301887028301c0c168159a2e02a1c1c52028241852070390e05", + "0x1412051683032054381442055b8301887028301c0c110159c21048390e0e", + "0x301c0c0c8381252028640a87028640ab8060380a87028380a52060240a87", + "0x3044054381444051683036054381446055c83046054381418a4060310e05", + "0x301887028301c0c0d83844520286c0a870286c0ab8060380a87028380a52", + "0x740ab8060380a87028380a52060300a87028300a2d060740a87028b40ab9", + "0x1480a9f061480a87028380a22060310e05062e8181d07030a4050e8150e05", + "0x145c05508304205438145a05500301887028301c0c048159e2d170390e0e", + "0x1418a4060310e0506038180c6801418a3060640a87028840aa2060880a87", + "0x640a870286c0aa2060880a87028240aa10606c0a870288c0ab00608c0a87", + "0x940ad1120150e0e0c815620c0e8150e050e814a40c0e8150e0511014da0c", + "0x21c0a0f02a28182902a1c0a0c148301e05438144805590301887028301c0c", + "0x303a05438143a05290300a05438140a05420301805438141805168302005", + "0x68a48702840521d028305abb060400a87028400a1a060a40a87028a40a72", + "0x2101c87028c80a8f060310e0506038183502b486405438386805470306806", + "0x3034054381434051683072054381502055e8310205438150084072f01880", + "0x301887028301c0c1c8183452028e40a87028e40abe060180a87028180a84", + "0x1ec0abe060180a87028180a84060680a87028680a2d061ec0a87028d40abf", + "0x21c0a0c520301887028940a74060310e0506038187b03068a4053d8150e05", + "0x1c80a87029d80abd061d80a87028f43a0e5e0307a05438147805640307805", + "0x30a405390150e05390157c0c028150e0502815080c060150e05060145a0c", + "0x224180502a1c0a0502a00180c02a1c0a0c02a10180c438141c054c830e405", + "0x301887028301c0c11815a61902a1c1c2202b24182210824a48702814180e", + "0x900ad6060310e050e815180c120741c870286c0ad50606c0a87028640ad4", + "0x21c0a29028841810148390e0507814ec0c078150e0512815ae0c128150e05", + "0x360180c438140c051083068060721c0a1a029d8181a02a1c0a1002a28180c", + "0x21c0a3402a28180c438146a051083108350721c0a32029d8183202a1c0a0c", + "0xe40a87028e40a1a060e40a8702a05000e6c8310205438150805450310005", + "0x110183d1e0390e0516814840c0621c0a0c07030f6056d8310e0e1c815b40c", + "0x148420916b70187602a1c0a7602948187602a1c0a3d029b4180c438147805", + "0x1b00a87029d00ade060310e0506038186d22108a4dd3a1cce45243838ec2e", + "0x31460c320150e0536015be0c330150e0539815000c248150e0539015080c", + "0x200184902a1c0a4202a10186202a1c0a6d02b84180c43814180e06031c005", + "0x38a80545830a80543814c80571030c80543814c4056f830cc05438148805", + "0x150e0546014d80c460150e0531815380c0621c0a0c0703000057198c0a87", + "0x204186602a1c0a6602a00184902a1c0a4902a10188e02a1c0a8d02924188d", + "0x210188f02a1c0a00028d4180c43814180e06238cc49290151c05438151c05", + "0x23ccc49290151e05438151e0540830cc0543814cc05400309205438149205", + "0x310e0529015180c0621c0a2e02884180c43814f605720301887028301c0c", + "0x21c0a9102868189102a1c0a0c440312005438141829060310e0516814880c", + "0x2500a8702a49260e190312605438141834062480a8702a45200e030312205", + "0x15020c108150e0510815000c048150e0504815080c4a8150e054a0146a0c", + "0x1480a8c060310e0517014420c0621c0a0c070312a21049480a9502a1c0a95", + "0x301205438141205420312c054381446051a8301887028b40a44060310e05", + "0x30188702831740c4b084125202a580a8702a580a81060840a87028840a80", + "0x39ca21048390e0e1681418521e0305a05438145a053d8305a05438141839", + "0x368180902a1c0a09028b41823170390e0517015cc0c0621c0a0c070303222", + "0x21c0a5202ba0180c438145c05108301887028301c0c0d815ce0c438384605", + "0x240a87028240a2d060940a87028900abd060900a87028741c0e5e0303a05", + "0x310e0506038182510824a405128150e05128157c0c108150e0510815080c", + "0x14120c1483c1c87028380a2e060380a87028380a52060310e050d815c80c", + "0x1418ea060180a8702840a40e030301887028301c0c0d015d21002a1c1c29", + "0x150e0510815080c048150e05048145a0c190150e051a0b81cd9060d00a87", + "0x2ec183202a1c0a3202868180602a1c0a06029c8180f02a1c0a0f029481821", + "0x14180e060e40aeb408150e0e400151c0c402106a5243814640607884122d", + "0x150e051e8157a0c1e8150e051e1ec1cbc060f0f60e438150205478301887", + "0x1480a7602a1c0a7602af8188402a1c0a8402a10183502a1c0a35028b41876", + "0x210183502a1c0a35028b4187202a1c0a3902afc180c43814180e061d90835", + "0x84180c43814180e061c9083529014e40543814e4055f0310805438150805", + "0x21c0a7307839780c398150e050d015900c0621c0a5202910180c438145c05", + "0x30420543814420542030120543814120516830840543814e8055e830e805", + "0x110180c438145c05108301887028301c0c210841252029080a87029080abe", + "0x30da05438141866061100a8702830520c0621c0a0e02990180c43814a405", + "0x1241c32061240a8702830680c360150e05369101c06061b40a87029b40a1a", + "0x21c0a1902a10182202a1c0a22028b4186402a1c0a6602afc186602a1c0a6c", + "0x140a80060300a87028300a8406190322229014c80543814c8055f0303205", + "0x301205770b40a87070b80aed060b8a40e2921c0a0506039d80c028150e05", + "0x21c0a2202bc4182202a1c0a2102bc0182102a1c0a2d02bbc180c43814180e", + "0x310e050e815180c0621c0a2302bcc180c43814320579030481d0d88c322d", + "0x380a840603c0a87028940af5060940a870286c0af4060310e0512014420c", + "0x38180f29038a405078150e0507815ec0c290150e0529015000c070150e05", + "0x150e0529015000c070150e0507015080c148150e0504815ee0c0621c0a0c", + "0x38180e02be40a054383818057c0305252071480a2902a1c0a2902bd81852", + "0x150e0517015020c170150e0529014920c290150e0502814d80c0621c0a0c", + "0x301205438141c2d070c8182d02a1c0a0c1a0301887028301c0c170140a2e", + "0x38a48707014180e7d0304205028840a87028840a81060840a87028240a35", + "0x21c0a0e02a10182202a1c0a2e02bf0180c43814180e06084122d293ec5c52", + "0x301c0c063f80a0c5183036054381444057e830460543814a405400303205", + "0x8c0a87028240a80060640a87028b40a84060740a87028840aff060310e05", + "0x3c0b01128150e0e12015da0c120150e050d816000c0d8150e050e815fa0c", + "0x142005818302005438145205810305205438144a05778301887028301c0c", + "0x680a87028680b040608c0a870288c0a80060640a87028640a84060680a87", + "0x640a87028640a84060180a870283c0b05060310e0506038181a11864a405", + "0x21c1c0c02c18180611864a405030150e0503016080c118150e0511815000c", + "0xb80a87029480b03061480a87028140b02060310e0506038180e02c1c0a05", + "0x38640c168150e05060d0180c43814180e060b80a05170150e0517016080c", + "0xb84a21028144205438144205820304205438141205828301205438141c2d", + "0x380a0c31190c60c1708cc863060b818520701418623218c182e11990c60c", + "0x188c863060b84664318305d08290380a0c31190c60c1708cc863060b9ee52", + "0x8c1852118301d0b028307a0e07038120e85030a8230708c0b09290380a0c", + "0x188c86329038122139190c609868380a0c3698c18521198c185286014186c", + "0x148c8630743c5c5207014186d31830a4090708cc60c16c385a2e290380a0c", + "0x31080529016240506210c86329190c60e88830c405400162005061ecc863", + "0x113" + ], + "sierra_program_debug_info": { + "type_names": [], + "libfunc_names": [], + "user_func_names": [] + }, + "contract_class_version": "0.1.0", + "entry_points_by_type": { + "EXTERNAL": [ + { + "selector": "0x15d40a3d6ca2ac30f4031e42be28da9b056fef9bb7357ac5e85627ee876e5ad", + "function_idx": 3 + }, + { + "selector": "0x162da33a4585851fe8d3af3c2a9c60b557814e221e0d4f30ff0b2189d9c7775", + "function_idx": 2 + }, + { + "selector": "0x289da278a8dc833409cabfdad1581e8e7d40e42dcaed693fa4008dcdb4963b3", + "function_idx": 1 + }, + { + "selector": "0x36fcbf06cd96843058359e1a75928beacfac10727dab22a3972f0af8aa92895", + "function_idx": 0 + } + ], + "L1_HANDLER": [], + "CONSTRUCTOR": [] + }, + "abi": [ + { + "type": "function", + "name": "__validate_deploy__", + "inputs": [ + { + "name": "class_hash", + "type": "core::felt252" + }, + { + "name": "contract_address_salt", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "__validate_declare__", + "inputs": [ + { + "name": "class_hash", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "__validate__", + "inputs": [ + { + "name": "contract_address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "selector", + "type": "core::felt252" + }, + { + "name": "calldata", + "type": "core::array::Array::" + } + ], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "function", + "name": "__execute__", + "inputs": [ + { + "name": "contract_address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "selector", + "type": "core::felt252" + }, + { + "name": "calldata", + "type": "core::array::Array::" + } + ], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "event", + "name": "account_with_dummy_validate::account_with_dummy_validate::Account::Event", + "kind": "enum", + "variants": [] + } + ] +} \ No newline at end of file From f1cf59759e1a2c1a67bca47a4cc489ede76f6ffe Mon Sep 17 00:00:00 2001 From: lambda-0x <0xlambda@protonmail.com> Date: Thu, 11 Apr 2024 21:45:41 +0530 Subject: [PATCH 09/23] fix: incorrect assumption about migration output (#1790) * fix: incorrect assumption about migration output * remove unnecessary import --- crates/dojo-world/src/migration/mod.rs | 3 +++ crates/sozo/ops/src/migration/mod.rs | 37 +++++++++++++++----------- 2 files changed, 24 insertions(+), 16 deletions(-) diff --git a/crates/dojo-world/src/migration/mod.rs b/crates/dojo-world/src/migration/mod.rs index 471e16ac50..4af66e2ac5 100644 --- a/crates/dojo-world/src/migration/mod.rs +++ b/crates/dojo-world/src/migration/mod.rs @@ -36,6 +36,7 @@ pub struct DeployOutput { // base class hash at time of deployment pub base_class_hash: FieldElement, pub was_upgraded: bool, + pub name: Option, } #[derive(Clone, Debug)] @@ -215,6 +216,7 @@ pub trait Deployable: Declarable + Sync { declare, base_class_hash, was_upgraded, + name: None, }) } @@ -287,6 +289,7 @@ pub trait Deployable: Declarable + Sync { declare, base_class_hash: FieldElement::default(), was_upgraded: false, + name: None, }) } diff --git a/crates/sozo/ops/src/migration/mod.rs b/crates/sozo/ops/src/migration/mod.rs index d237be073f..d2f687e750 100644 --- a/crates/sozo/ops/src/migration/mod.rs +++ b/crates/sozo/ops/src/migration/mod.rs @@ -182,25 +182,27 @@ async fn update_manifests_and_abis( local_manifest.world.inner.block_number = migration_output.world_block_number; } - let base_class_hash = *local_manifest.base.inner.class_hash(); - - debug_assert!(local_manifest.contracts.len() == migration_output.contracts.len()); - - local_manifest.contracts.iter_mut().zip(migration_output.contracts).for_each( - |(local_manifest, contract_output)| { - let salt = generate_salt(&local_manifest.name); - local_manifest.inner.address = Some(get_contract_address( + migration_output.contracts.iter().for_each(|contract_output| { + // ignore failed migration which are represented by None + if let Some(output) = contract_output { + // find the contract in local manifest and update its address and base class hash + let local = local_manifest + .contracts + .iter_mut() + .find(|c| c.name == output.name.as_ref().unwrap()) + .expect("contract got migrated, means it should be present here"); + + let salt = generate_salt(&local.name); + local.inner.address = Some(get_contract_address( salt, - base_class_hash, + output.base_class_hash, &[], migration_output.world_address, )); - if let Some(output) = contract_output { - local_manifest.inner.base_class_hash = output.base_class_hash; - } - }, - ); + local.inner.base_class_hash = output.base_class_hash; + } + }); // copy abi files from `abi/base` to `abi/deployments/{chain_id}` and update abi path in // local_manifest @@ -613,7 +615,7 @@ where ) .await { - Ok(val) => { + Ok(mut val) => { if let Some(declare) = val.clone().declare { ui.print_hidden_sub(format!( "Declare transaction: {:#x}", @@ -623,6 +625,7 @@ where ui.print_hidden_sub(format!("Deploy transaction: {:#x}", val.transaction_hash)); + val.name = Some(contract.diff.name.clone()); Ok(ContractDeploymentOutput::Output(val)) } Err(MigrationError::ContractAlreadyDeployed(contract_address)) => { @@ -788,7 +791,7 @@ where ) .await { - Ok(output) => { + Ok(mut output) => { if let Some(ref declare) = output.declare { ui.print_hidden_sub(format!( "Declare transaction: {:#x}", @@ -814,7 +817,9 @@ where )); ui.print_sub(format!("Contract address: {:#x}", output.contract_address)); } + let name = contract.diff.name.clone(); + output.name = Some(name); deploy_output.push(Some(output)); } Err(MigrationError::ContractAlreadyDeployed(contract_address)) => { From 39ee46b82f1deb08cefab201ff663297a2114f8e Mon Sep 17 00:00:00 2001 From: glihm Date: Thu, 11 Apr 2024 12:32:33 -0400 Subject: [PATCH 10/23] feat: add metadata section to the world contract manifest (#1781) * feat: add metadata section to the world contract manifest * fix: fmt * fix: remove ignore on a manifest test * fix formatting --------- Co-authored-by: lambda-0x <0xlambda@protonmail.com> --- bin/sozo/src/commands/dev.rs | 2 +- bin/sozo/src/commands/migrate.rs | 27 +++++++--- bin/sozo/src/commands/options/starknet.rs | 6 ++- crates/dojo-world/src/manifest/mod.rs | 14 ++--- crates/dojo-world/src/manifest/types.rs | 14 +++-- .../sozo/ops/src/migration/migration_test.rs | 9 ++-- crates/sozo/ops/src/migration/mod.rs | 52 ++++++++++--------- crates/torii/types-test/Scarb.lock | 2 +- .../manifests/dev/manifest.json | 6 ++- .../manifests/dev/manifest.toml | 6 ++- 10 files changed, 87 insertions(+), 51 deletions(-) diff --git a/bin/sozo/src/commands/dev.rs b/bin/sozo/src/commands/dev.rs index c702ac8599..4c03b9d749 100644 --- a/bin/sozo/src/commands/dev.rs +++ b/bin/sozo/src/commands/dev.rs @@ -216,7 +216,7 @@ impl DevArgs { let mut previous_manifest: Option = Option::None; let result = build(&mut context); - let Some((mut world_address, account, _)) = context + let Some((mut world_address, account, _, _)) = context .ws .config() .tokio_handle() diff --git a/bin/sozo/src/commands/migrate.rs b/bin/sozo/src/commands/migrate.rs index c142378802..849fe88462 100644 --- a/bin/sozo/src/commands/migrate.rs +++ b/bin/sozo/src/commands/migrate.rs @@ -76,12 +76,13 @@ pub async fn setup_env<'a>( Option, SingleOwnerAccount, LocalWallet>, String, + String, )> { let ui = ws.config().ui(); let world_address = world.address(env).ok(); - let (account, chain_id) = { + let (account, chain_id, rpc_url) = { let provider = starknet.provider(env)?; let spec_version = provider.spec_version().await?; @@ -94,6 +95,8 @@ pub async fn setup_env<'a>( )); } + let rpc_url = starknet.url(env)?; + let chain_id = provider.chain_id().await?; let chain_id = parse_cairo_short_string(&chain_id) .with_context(|| "Cannot parse chain_id as string")?; @@ -109,7 +112,7 @@ pub async fn setup_env<'a>( } match account.provider().get_class_hash_at(BlockId::Tag(BlockTag::Pending), address).await { - Ok(_) => Ok((account, chain_id)), + Ok(_) => Ok((account, chain_id, rpc_url)), Err(ProviderError::StarknetError(StarknetError::ContractNotFound)) => { Err(anyhow!("Account with address {:#x} doesn't exist.", account.address())) } @@ -118,7 +121,7 @@ pub async fn setup_env<'a>( } .with_context(|| "Problem initializing account for migration.")?; - Ok((world_address, account, chain_id)) + Ok((world_address, account, chain_id, rpc_url.to_string())) } impl MigrateArgs { @@ -145,7 +148,7 @@ impl MigrateArgs { }; config.tokio_handle().block_on(async { - let (world_address, account, chain_id) = setup_env( + let (world_address, account, chain_id, rpc_url) = setup_env( &ws, account, starknet, @@ -155,8 +158,17 @@ impl MigrateArgs { ) .await?; - migration::migrate(&ws, world_address, chain_id, &account, name, true, None) - .await + migration::migrate( + &ws, + world_address, + chain_id, + rpc_url, + &account, + name, + true, + None, + ) + .await }) } MigrateCommand::Apply { mut name, world, starknet, account, transaction } => { @@ -169,7 +181,7 @@ impl MigrateArgs { }; config.tokio_handle().block_on(async { - let (world_address, account, chain_id) = setup_env( + let (world_address, account, chain_id, rpc_url) = setup_env( &ws, account, starknet, @@ -183,6 +195,7 @@ impl MigrateArgs { &ws, world_address, chain_id, + rpc_url, &account, name, false, diff --git a/bin/sozo/src/commands/options/starknet.rs b/bin/sozo/src/commands/options/starknet.rs index 011b04ae9e..759cacbf70 100644 --- a/bin/sozo/src/commands/options/starknet.rs +++ b/bin/sozo/src/commands/options/starknet.rs @@ -24,8 +24,10 @@ impl StarknetOptions { Ok(JsonRpcClient::new(HttpTransport::new(self.url(env_metadata)?))) } - // we dont check the env var because that would be handled by `clap` - fn url(&self, env_metadata: Option<&Environment>) -> Result { + // We dont check the env var because that would be handled by `clap`. + // This function is made public because [`JsonRpcClient`] does not expose + // the raw rpc url. + pub fn url(&self, env_metadata: Option<&Environment>) -> Result { if let Some(url) = self.rpc_url.as_ref() { Ok(url.clone()) } else if let Some(url) = env_metadata.and_then(|env| env.rpc_url()) { diff --git a/crates/dojo-world/src/manifest/mod.rs b/crates/dojo-world/src/manifest/mod.rs index de19e77b15..0c3a9c73d6 100644 --- a/crates/dojo-world/src/manifest/mod.rs +++ b/crates/dojo-world/src/manifest/mod.rs @@ -29,9 +29,9 @@ mod test; mod types; pub use types::{ - AbiFormat, BaseManifest, Class, ComputedValueEntrypoint, Contract, DeploymentManifest, - DojoContract, DojoModel, Manifest, ManifestMethods, Member, OverlayClass, OverlayContract, - OverlayDojoContract, OverlayDojoModel, OverlayManifest, + AbiFormat, BaseManifest, Class, ComputedValueEntrypoint, DeploymentManifest, DojoContract, + DojoModel, Manifest, ManifestMethods, Member, OverlayClass, OverlayContract, + OverlayDojoContract, OverlayDojoModel, OverlayManifest, WorldContract, WorldMetadata, }; pub const WORLD_CONTRACT_NAME: &str = "dojo::world::world"; @@ -65,10 +65,10 @@ pub enum AbstractManifestError { Json(#[from] serde_json::Error), } -impl From> for Manifest { +impl From> for Manifest { fn from(value: Manifest) -> Self { Manifest::new( - Contract { + WorldContract { class_hash: value.inner.class_hash, abi: value.inner.abi, original_class_hash: value.inner.original_class_hash, @@ -254,7 +254,7 @@ impl DeploymentManifest { models, contracts, world: Manifest::new( - Contract { + WorldContract { address: Some(world_address), class_hash: world_class_hash, ..Default::default() @@ -607,7 +607,7 @@ impl ManifestMethods for DojoModel { } } -impl ManifestMethods for Contract { +impl ManifestMethods for WorldContract { type OverlayType = OverlayContract; fn abi(&self) -> Option<&AbiFormat> { diff --git a/crates/dojo-world/src/manifest/types.rs b/crates/dojo-world/src/manifest/types.rs index fe8c09dc5e..1ef454c202 100644 --- a/crates/dojo-world/src/manifest/types.rs +++ b/crates/dojo-world/src/manifest/types.rs @@ -30,7 +30,7 @@ pub struct BaseManifest { #[derive(Clone, Debug, Serialize, Deserialize)] #[cfg_attr(test, derive(PartialEq))] pub struct DeploymentManifest { - pub world: Manifest, + pub world: Manifest, pub base: Manifest, pub contracts: Vec>, pub models: Vec>, @@ -117,7 +117,7 @@ pub struct DojoModel { #[derive(Clone, Default, Debug, Serialize, Deserialize)] #[cfg_attr(test, derive(PartialEq))] #[serde(tag = "kind")] -pub struct Contract { +pub struct WorldContract { #[serde_as(as = "UfeHex")] pub class_hash: FieldElement, #[serde_as(as = "UfeHex")] @@ -128,8 +128,8 @@ pub struct Contract { #[serde_as(as = "Option")] pub transaction_hash: Option, pub block_number: Option, - // used by World contract pub seed: Option, + pub metadata: Option, } #[serde_as] @@ -286,3 +286,11 @@ impl PartialEq for AbiFormat { } } } + +#[serde_as] +#[derive(Clone, Default, Debug, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq))] +pub struct WorldMetadata { + pub profile_name: String, + pub rpc_url: String, +} diff --git a/crates/sozo/ops/src/migration/migration_test.rs b/crates/sozo/ops/src/migration/migration_test.rs index 22ae92ff25..8758de2b44 100644 --- a/crates/sozo/ops/src/migration/migration_test.rs +++ b/crates/sozo/ops/src/migration/migration_test.rs @@ -117,7 +117,6 @@ fn migrate_world_without_seed_will_fail() { assert!(res.is_err_and(|e| e.to_string().contains("Missing seed for World deployment."))) } -#[ignore] #[tokio::test] async fn migration_from_remote() { let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); @@ -139,10 +138,13 @@ async fn migration_from_remote() { ExecutionEncoding::New, ); + let profile_name = ws.current_profile().unwrap().to_string(); + let manifest = BaseManifest::load_from_path( - &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(BASE_DIR), + &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(&profile_name).join(BASE_DIR), ) .unwrap(); + let world = WorldDiff::compute(manifest, None); let mut migration = prepare_for_migration( @@ -156,9 +158,10 @@ async fn migration_from_remote() { execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); let local_manifest = BaseManifest::load_from_path( - &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(BASE_DIR), + &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(&profile_name).join(BASE_DIR), ) .unwrap(); + let remote_manifest = DeploymentManifest::load_from_remote( JsonRpcClient::new(HttpTransport::new(sequencer.url())), migration.world_address().unwrap(), diff --git a/crates/sozo/ops/src/migration/mod.rs b/crates/sozo/ops/src/migration/mod.rs index d2f687e750..4c096fe037 100644 --- a/crates/sozo/ops/src/migration/mod.rs +++ b/crates/sozo/ops/src/migration/mod.rs @@ -7,8 +7,9 @@ use dojo_world::contracts::abi::world::ResourceMetadata; use dojo_world::contracts::cairo_utils; use dojo_world::contracts::world::WorldContract; use dojo_world::manifest::{ - AbiFormat, AbstractManifestError, BaseManifest, Contract, DeploymentManifest, DojoContract, - DojoModel, Manifest, ManifestMethods, OverlayManifest, + AbiFormat, AbstractManifestError, BaseManifest, DeploymentManifest, DojoContract, DojoModel, + Manifest, ManifestMethods, OverlayManifest, WorldContract as ManifestWorldContract, + WorldMetadata, }; use dojo_world::metadata::dojo_metadata_from_workspace; use dojo_world::migration::contract::ContractMigration; @@ -57,6 +58,7 @@ pub async fn migrate( ws: &Workspace<'_>, world_address: Option, chain_id: String, + rpc_url: String, account: &SingleOwnerAccount, name: Option, dry_run: bool, @@ -105,38 +107,20 @@ where let mut strategy = prepare_migration(&target_dir, diff, name.clone(), world_address, &ui)?; let world_address = strategy.world_address().expect("world address must exist"); - if dry_run { + let migration_output = if dry_run { print_strategy(&ui, account.provider(), &strategy).await; - - update_manifests_and_abis( - ws, - local_manifest, - &profile_dir, - &profile_name, - MigrationOutput { world_address, ..Default::default() }, - name.as_ref(), - ) - .await?; + MigrationOutput { world_address, ..Default::default() } } else { // Migrate according to the diff. match apply_diff(ws, account, txn_config, &mut strategy).await { - Ok(migration_output) => { - update_manifests_and_abis( - ws, - local_manifest, - &profile_dir, - &profile_name, - migration_output, - name.as_ref(), - ) - .await?; - } + Ok(migration_output) => migration_output, Err(e) => { update_manifests_and_abis( ws, local_manifest, &profile_dir, &profile_name, + &rpc_url, MigrationOutput { world_address, ..Default::default() }, name.as_ref(), ) @@ -146,6 +130,17 @@ where } }; + update_manifests_and_abis( + ws, + local_manifest, + &profile_dir, + &profile_name, + &rpc_url, + migration_output, + name.as_ref(), + ) + .await?; + Ok(()) } @@ -154,6 +149,7 @@ async fn update_manifests_and_abis( local_manifest: BaseManifest, profile_dir: &Utf8PathBuf, profile_name: &str, + rpc_url: &str, migration_output: MigrationOutput, salt: Option<&String>, ) -> Result<()> { @@ -165,6 +161,11 @@ async fn update_manifests_and_abis( let mut local_manifest: DeploymentManifest = local_manifest.into(); + local_manifest.world.inner.metadata = Some(WorldMetadata { + profile_name: profile_name.to_string(), + rpc_url: rpc_url.to_string(), + }); + if deployed_path.exists() { let previous_manifest = DeploymentManifest::load_from_path(&deployed_path)?; local_manifest.merge_from_previous(previous_manifest); @@ -264,7 +265,8 @@ async fn update_manifest_abis( manifest.inner.set_abi(Some(AbiFormat::Path(deployed_relative_path))); } - inner_helper::(profile_dir, profile_name, &mut local_manifest.world).await; + inner_helper::(profile_dir, profile_name, &mut local_manifest.world) + .await; for contract in local_manifest.contracts.iter_mut() { inner_helper::(profile_dir, profile_name, contract).await; diff --git a/crates/torii/types-test/Scarb.lock b/crates/torii/types-test/Scarb.lock index ccfaf02af8..112ae6dbe9 100644 --- a/crates/torii/types-test/Scarb.lock +++ b/crates/torii/types-test/Scarb.lock @@ -15,7 +15,7 @@ source = "git+https://github.com/dojoengine/dojo?tag=v0.3.11#1e651b5d4d3b79b14a7 [[package]] name = "types_test" -version = "0.5.1" +version = "0.6.0" dependencies = [ "dojo", ] diff --git a/examples/spawn-and-move/manifests/dev/manifest.json b/examples/spawn-and-move/manifests/dev/manifest.json index 8870aef2b3..bb9ce0f641 100644 --- a/examples/spawn-and-move/manifests/dev/manifest.json +++ b/examples/spawn-and-move/manifests/dev/manifest.json @@ -1,6 +1,6 @@ { "world": { - "kind": "Contract", + "kind": "WorldContract", "class_hash": "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd", "original_class_hash": "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd", "abi": [ @@ -664,6 +664,10 @@ "transaction_hash": "0x6afefdcc49b3563a4f3657900ba71e9f9356861b15b942a73f2018f046a1048", "block_number": 3, "seed": "dojo_examples", + "metadata": { + "profile_name": "dev", + "rpc_url": "http://localhost:5050/" + }, "name": "dojo::world::world" }, "base": { diff --git a/examples/spawn-and-move/manifests/dev/manifest.toml b/examples/spawn-and-move/manifests/dev/manifest.toml index 4da19e7528..156abd3709 100644 --- a/examples/spawn-and-move/manifests/dev/manifest.toml +++ b/examples/spawn-and-move/manifests/dev/manifest.toml @@ -1,5 +1,5 @@ [world] -kind = "Contract" +kind = "WorldContract" class_hash = "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd" original_class_hash = "0x799bc4e9da10bfb3dd88e6f223c9cfbf7745435cd14f5d69675ea448e578cd" abi = "abis/deployments/dojo_world_world.json" @@ -9,6 +9,10 @@ block_number = 3 seed = "dojo_examples" name = "dojo::world::world" +[world.metadata] +profile_name = "dev" +rpc_url = "http://localhost:5050/" + [base] kind = "Class" class_hash = "0x679177a2cb757694ac4f326d01052ff0963eac0bc2a17116a2b87badcdf6f76" From 4ec33212177648aa2ccafccfe38bfd9331db393d Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Fri, 12 Apr 2024 17:04:47 +0800 Subject: [PATCH 11/23] feat(katana): add executor metrics (#1791) ref #1369 Add metrics on katana executor; tracking the total L1 **gas** and Cairo **steps** used. There were two approaches that i thought of; 1. record the metrics on every tx execution, or 2. on every block ~Decided to go with (1) as it would allow to measure it in realtime (as the tx is being executed), instead of having to wait until the block is finished being processed.~ Thought im not exactly sure which one is the ideal one. Doing (1) might be less performant bcs we have to acquire the lock to the metrics recorder more frequently (ie every tx), as opposed to only updating the metrics once every block. another thing to note, currently doing (1) would require all executor implementations to define the metrics in their own implmentations, meaning have to duplicate code. If do (2) can just define it under `block_producer` scope and be executor agnostic. EDIT: doing (2). metrics are collected upon completion of block production --- some changes are made to gather the value after block production: - simplify params on `backend::do_mine_block`, now only accept two args; `BlockEnv` and `ExecutionOutput` - add a new type `ExecutionStats` under `katana-executor`, this is where executor would store the gas and steps value --- Cargo.lock | 2 ++ crates/katana/core/Cargo.toml | 2 ++ crates/katana/core/src/backend/mod.rs | 36 ++++++++++--------- crates/katana/core/src/sequencer.rs | 8 ++--- .../katana/core/src/service/block_producer.rs | 27 ++++++-------- crates/katana/core/src/service/metrics.rs | 15 ++++++++ crates/katana/core/src/service/mod.rs | 30 ++++++++++++++++ crates/katana/executor/src/abstraction/mod.rs | 11 ++++++ .../src/implementation/blockifier/mod.rs | 14 +++++--- .../executor/src/implementation/sir/mod.rs | 11 ++++-- crates/katana/executor/tests/executor.rs | 23 +++++++++--- crates/katana/executor/tests/simulate.rs | 4 ++- crates/katana/primitives/src/receipt.rs | 12 +++++++ 13 files changed, 145 insertions(+), 50 deletions(-) create mode 100644 crates/katana/core/src/service/metrics.rs diff --git a/Cargo.lock b/Cargo.lock index 86b07d5fe3..2b16039879 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6682,6 +6682,7 @@ dependencies = [ "cairo-vm 0.9.2", "convert_case 0.6.0", "derive_more", + "dojo-metrics", "flate2", "futures", "hex", @@ -6692,6 +6693,7 @@ dependencies = [ "katana-provider", "katana-tasks", "lazy_static", + "metrics", "parking_lot 0.12.1", "rand", "reqwest", diff --git a/crates/katana/core/Cargo.toml b/crates/katana/core/Cargo.toml index aeb7c1af4e..68c395f28b 100644 --- a/crates/katana/core/Cargo.toml +++ b/crates/katana/core/Cargo.toml @@ -15,6 +15,8 @@ katana-tasks.workspace = true anyhow.workspace = true async-trait.workspace = true +dojo-metrics.workspace = true +metrics.workspace = true cairo-lang-casm = "2.3.1" cairo-lang-starknet = "2.3.1" cairo-vm.workspace = true diff --git a/crates/katana/core/src/backend/mod.rs b/crates/katana/core/src/backend/mod.rs index badd55ab36..209b58a2dc 100644 --- a/crates/katana/core/src/backend/mod.rs +++ b/crates/katana/core/src/backend/mod.rs @@ -1,12 +1,11 @@ use std::sync::Arc; -use katana_executor::ExecutorFactory; +use katana_executor::{ExecutionOutput, ExecutionResult, ExecutorFactory}; use katana_primitives::block::{ Block, FinalityStatus, GasPrices, Header, PartialHeader, SealedBlockWithStatus, }; use katana_primitives::chain::ChainId; use katana_primitives::env::BlockEnv; -use katana_primitives::state::StateUpdatesWithDeclaredClasses; use katana_primitives::version::CURRENT_STARKNET_VERSION; use katana_primitives::FieldElement; use katana_provider::providers::fork::ForkedProvider; @@ -26,7 +25,7 @@ pub mod storage; use self::config::StarknetConfig; use self::storage::Blockchain; use crate::env::BlockContextGenerator; -use crate::service::block_producer::{BlockProductionError, MinedBlockOutcome, TxWithOutcome}; +use crate::service::block_producer::{BlockProductionError, MinedBlockOutcome}; use crate::utils::get_current_timestamp; pub(crate) const LOG_TARGET: &str = "katana::core::backend"; @@ -120,17 +119,20 @@ impl Backend { pub fn do_mine_block( &self, block_env: &BlockEnv, - txs_outcomes: Vec, - state_updates: StateUpdatesWithDeclaredClasses, + execution_output: ExecutionOutput, ) -> Result { - let mut txs = vec![]; - let mut receipts = vec![]; - let mut execs = vec![]; - - for t in txs_outcomes { - txs.push(t.tx); - receipts.push(t.receipt); - execs.push(t.exec_info); + // we optimistically allocate the maximum amount possible + let mut txs = Vec::with_capacity(execution_output.transactions.len()); + let mut traces = Vec::with_capacity(execution_output.transactions.len()); + let mut receipts = Vec::with_capacity(execution_output.transactions.len()); + + // only include successful transactions in the block + for (tx, res) in execution_output.transactions { + if let ExecutionResult::Success { receipt, trace, .. } = res { + txs.push(tx); + traces.push(trace); + receipts.push(receipt); + } } let prev_hash = BlockHashProvider::latest_hash(self.blockchain.provider())?; @@ -156,9 +158,9 @@ impl Backend { BlockWriter::insert_block_with_states_and_receipts( self.blockchain.provider(), block, - state_updates, + execution_output.states, receipts, - execs, + traces, )?; info!( @@ -168,7 +170,7 @@ impl Backend { "Block mined.", ); - Ok(MinedBlockOutcome { block_number }) + Ok(MinedBlockOutcome { block_number, stats: execution_output.stats }) } pub fn update_block_env(&self, block_env: &mut BlockEnv) { @@ -192,7 +194,7 @@ impl Backend { &self, block_env: &BlockEnv, ) -> Result { - self.do_mine_block(block_env, Default::default(), Default::default()) + self.do_mine_block(block_env, Default::default()) } } diff --git a/crates/katana/core/src/sequencer.rs b/crates/katana/core/src/sequencer.rs index 790be10c69..0a0db8678d 100644 --- a/crates/katana/core/src/sequencer.rs +++ b/crates/katana/core/src/sequencer.rs @@ -85,13 +85,13 @@ impl KatanaSequencer { let block_producer = Arc::new(block_producer); - tokio::spawn(NodeService { + tokio::spawn(NodeService::new( + Arc::clone(&pool), miner, - pool: Arc::clone(&pool), - block_producer: block_producer.clone(), + block_producer.clone(), #[cfg(feature = "messaging")] messaging, - }); + )); Ok(Self { pool, config, backend, block_producer }) } diff --git a/crates/katana/core/src/service/block_producer.rs b/crates/katana/core/src/service/block_producer.rs index 09e5be0a1b..4205290389 100644 --- a/crates/katana/core/src/service/block_producer.rs +++ b/crates/katana/core/src/service/block_producer.rs @@ -8,7 +8,7 @@ use std::time::Duration; use futures::channel::mpsc::{channel, Receiver, Sender}; use futures::stream::{Stream, StreamExt}; use futures::FutureExt; -use katana_executor::{BlockExecutor, ExecutionOutput, ExecutionResult, ExecutorFactory}; +use katana_executor::{BlockExecutor, ExecutionResult, ExecutionStats, ExecutorFactory}; use katana_primitives::block::{BlockHashOrNumber, ExecutableBlock, PartialHeader}; use katana_primitives::receipt::Receipt; use katana_primitives::trace::TxExecInfo; @@ -42,8 +42,10 @@ pub enum BlockProductionError { TransactionExecutionError(#[from] katana_executor::ExecutorError), } +#[derive(Debug, Clone)] pub struct MinedBlockOutcome { pub block_number: u64, + pub stats: ExecutionStats, } #[derive(Debug, Clone)] @@ -264,19 +266,8 @@ impl IntervalBlockProducer { trace!(target: LOG_TARGET, "Creating new block."); let block_env = executor.block_env(); - let ExecutionOutput { states, transactions } = executor.take_execution_output()?; - - let transactions = transactions - .into_iter() - .filter_map(|(tx, res)| match res { - ExecutionResult::Failed { .. } => None, - ExecutionResult::Success { receipt, trace, .. } => { - Some(TxWithOutcome { tx, receipt, exec_info: trace }) - } - }) - .collect::>(); - - let outcome = backend.do_mine_block(&block_env, transactions, states)?; + let execution_output = executor.take_execution_output()?; + let outcome = backend.do_mine_block(&block_env, execution_output)?; trace!(target: LOG_TARGET, block_number = %outcome.block_number, "Created new block."); @@ -515,8 +506,10 @@ impl InstantBlockProducer { executor.execute_block(block)?; - let ExecutionOutput { states, transactions } = executor.take_execution_output()?; - let txs_outcomes = transactions + let execution_output = executor.take_execution_output()?; + let txs_outcomes = execution_output + .transactions + .clone() .into_iter() .filter_map(|(tx, res)| match res { ExecutionResult::Success { receipt, trace, .. } => { @@ -526,7 +519,7 @@ impl InstantBlockProducer { }) .collect::>(); - let outcome = backend.do_mine_block(&block_env, txs_outcomes.clone(), states)?; + let outcome = backend.do_mine_block(&block_env, execution_output)?; trace!(target: LOG_TARGET, block_number = %outcome.block_number, "Created new block."); diff --git a/crates/katana/core/src/service/metrics.rs b/crates/katana/core/src/service/metrics.rs new file mode 100644 index 0000000000..3749ab8c03 --- /dev/null +++ b/crates/katana/core/src/service/metrics.rs @@ -0,0 +1,15 @@ +use dojo_metrics::Metrics; +use metrics::Counter; + +pub(crate) struct ServiceMetrics { + pub(crate) block_producer: BlockProducerMetrics, +} + +#[derive(Metrics)] +#[metrics(scope = "block_producer")] +pub(crate) struct BlockProducerMetrics { + /// The amount of L1 gas processed in a block. + pub(crate) l1_gas_processed_total: Counter, + /// The amount of Cairo steps processed in a block. + pub(crate) cairo_steps_processed_total: Counter, +} diff --git a/crates/katana/core/src/service/mod.rs b/crates/katana/core/src/service/mod.rs index a636764ec1..74b9fb2a18 100644 --- a/crates/katana/core/src/service/mod.rs +++ b/crates/katana/core/src/service/mod.rs @@ -13,11 +13,13 @@ use starknet::core::types::FieldElement; use tracing::{error, info}; use self::block_producer::BlockProducer; +use self::metrics::{BlockProducerMetrics, ServiceMetrics}; use crate::pool::TransactionPool; pub mod block_producer; #[cfg(feature = "messaging")] pub mod messaging; +mod metrics; #[cfg(feature = "messaging")] use self::messaging::{MessagingOutcome, MessagingService}; @@ -39,6 +41,28 @@ pub struct NodeService { /// The messaging service #[cfg(feature = "messaging")] pub(crate) messaging: Option>, + /// Metrics for recording the service operations + metrics: ServiceMetrics, +} + +impl NodeService { + pub fn new( + pool: Arc, + miner: TransactionMiner, + block_producer: Arc>, + #[cfg(feature = "messaging")] messaging: Option>, + ) -> Self { + let metrics = ServiceMetrics { block_producer: BlockProducerMetrics::default() }; + + Self { + pool, + miner, + block_producer, + metrics, + #[cfg(feature = "messaging")] + messaging, + } + } } impl Future for NodeService { @@ -68,6 +92,12 @@ impl Future for NodeService { match res { Ok(outcome) => { info!(target: LOG_TARGET, block_number = %outcome.block_number, "Mined block."); + + let metrics = &pin.metrics.block_producer; + let gas_used = outcome.stats.l1_gas_used; + let steps_used = outcome.stats.cairo_steps_used; + metrics.l1_gas_processed_total.increment(gas_used as u64); + metrics.cairo_steps_processed_total.increment(steps_used as u64); } Err(err) => { diff --git a/crates/katana/executor/src/abstraction/mod.rs b/crates/katana/executor/src/abstraction/mod.rs index 313ce8e045..5e231cc823 100644 --- a/crates/katana/executor/src/abstraction/mod.rs +++ b/crates/katana/executor/src/abstraction/mod.rs @@ -72,9 +72,20 @@ impl SimulationFlag { } } +/// Stats about the transactions execution. +#[derive(Debug, Clone, Default)] +pub struct ExecutionStats { + /// The total gas used. + pub l1_gas_used: u128, + /// The total cairo steps used. + pub cairo_steps_used: u128, +} + /// The output of a executor after a series of executions. #[derive(Debug, Default)] pub struct ExecutionOutput { + /// Statistics throughout the executions process. + pub stats: ExecutionStats, /// The state updates produced by the executions. pub states: StateUpdatesWithDeclaredClasses, /// The transactions that have been executed. diff --git a/crates/katana/executor/src/implementation/blockifier/mod.rs b/crates/katana/executor/src/implementation/blockifier/mod.rs index a18c4cb0e6..1ad7bd2094 100644 --- a/crates/katana/executor/src/implementation/blockifier/mod.rs +++ b/crates/katana/executor/src/implementation/blockifier/mod.rs @@ -19,8 +19,9 @@ use tracing::info; use self::output::receipt_from_exec_info; use self::state::CachedState; use crate::{ - BlockExecutor, EntryPointCall, ExecutionError, ExecutionOutput, ExecutionResult, ExecutorExt, - ExecutorFactory, ExecutorResult, ResultAndStates, SimulationFlag, StateProviderDb, + BlockExecutor, EntryPointCall, ExecutionError, ExecutionOutput, ExecutionResult, + ExecutionStats, ExecutorExt, ExecutorFactory, ExecutorResult, ResultAndStates, SimulationFlag, + StateProviderDb, }; pub(crate) const LOG_TARGET: &str = "katana::executor::blockifier"; @@ -69,6 +70,7 @@ pub struct StarknetVMProcessor<'a> { state: CachedState>, transactions: Vec<(TxWithHash, ExecutionResult)>, simulation_flags: SimulationFlag, + stats: ExecutionStats, } impl<'a> StarknetVMProcessor<'a> { @@ -81,7 +83,7 @@ impl<'a> StarknetVMProcessor<'a> { let transactions = Vec::new(); let block_context = utils::block_context_from_envs(&block_env, &cfg_env); let state = state::CachedState::new(StateProviderDb(state)); - Self { block_context, state, transactions, simulation_flags } + Self { block_context, state, transactions, simulation_flags, stats: Default::default() } } fn fill_block_env_from_header(&mut self, header: &PartialHeader) { @@ -159,6 +161,9 @@ impl<'a> BlockExecutor<'a> for StarknetVMProcessor<'a> { crate::utils::log_resources(&trace.actual_resources); crate::utils::log_events(receipt.events()); + self.stats.l1_gas_used += fee.gas_consumed; + self.stats.cairo_steps_used += receipt.resources_used().steps as u128; + if let Some(reason) = receipt.revert_reason() { info!(target: LOG_TARGET, reason = %reason, "Transaction reverted."); } @@ -187,7 +192,8 @@ impl<'a> BlockExecutor<'a> for StarknetVMProcessor<'a> { fn take_execution_output(&mut self) -> ExecutorResult { let states = utils::state_update_from_cached_state(&self.state); let transactions = std::mem::take(&mut self.transactions); - Ok(ExecutionOutput { states, transactions }) + let stats = std::mem::take(&mut self.stats); + Ok(ExecutionOutput { stats, states, transactions }) } fn state(&self) -> Box { diff --git a/crates/katana/executor/src/implementation/sir/mod.rs b/crates/katana/executor/src/implementation/sir/mod.rs index f73930b73d..bb6c44d704 100644 --- a/crates/katana/executor/src/implementation/sir/mod.rs +++ b/crates/katana/executor/src/implementation/sir/mod.rs @@ -23,7 +23,7 @@ use crate::abstraction::{ BlockExecutor, ExecutionOutput, ExecutorExt, ExecutorFactory, ExecutorResult, SimulationFlag, StateProviderDb, }; -use crate::{EntryPointCall, ExecutionError, ExecutionResult, ResultAndStates}; +use crate::{EntryPointCall, ExecutionError, ExecutionResult, ExecutionStats, ResultAndStates}; pub(crate) const LOG_TARGET: &str = "katana::executor::sir"; @@ -72,6 +72,7 @@ pub struct StarknetVMProcessor<'a> { state: CachedState, PermanentContractClassCache>, transactions: Vec<(TxWithHash, ExecutionResult)>, simulation_flags: SimulationFlag, + stats: ExecutionStats, } impl<'a> StarknetVMProcessor<'a> { @@ -85,7 +86,7 @@ impl<'a> StarknetVMProcessor<'a> { let block_context = utils::block_context_from_envs(&block_env, &cfg_env); let state = CachedState::new(StateProviderDb(state), PermanentContractClassCache::default()); - Self { block_context, state, transactions, simulation_flags } + Self { block_context, state, transactions, simulation_flags, stats: Default::default() } } fn fill_block_env_from_header(&mut self, header: &PartialHeader) { @@ -160,6 +161,9 @@ impl<'a> BlockExecutor<'a> for StarknetVMProcessor<'a> { crate::utils::log_resources(&trace.actual_resources); crate::utils::log_events(receipt.events()); + self.stats.l1_gas_used += fee.gas_consumed; + self.stats.cairo_steps_used += receipt.resources_used().steps as u128; + if let Some(reason) = receipt.revert_reason() { info!(target: LOG_TARGET, reason = %reason, "Transaction reverted."); } @@ -194,7 +198,8 @@ impl<'a> BlockExecutor<'a> for StarknetVMProcessor<'a> { fn take_execution_output(&mut self) -> ExecutorResult { let states = utils::state_update_from_cached_state(&self.state); let transactions = std::mem::take(&mut self.transactions); - Ok(ExecutionOutput { states, transactions }) + let stats = std::mem::take(&mut self.stats); + Ok(ExecutionOutput { stats, states, transactions }) } fn state(&self) -> Box { diff --git a/crates/katana/executor/tests/executor.rs b/crates/katana/executor/tests/executor.rs index 70a85ee6fb..3d64cf2393 100644 --- a/crates/katana/executor/tests/executor.rs +++ b/crates/katana/executor/tests/executor.rs @@ -249,13 +249,28 @@ fn test_executor_with_valid_blocks_impl( ); // assert the state updates after all the blocks are executed - // + let mut actual_total_gas: u128 = 0; + let mut actual_total_steps: u128 = 0; // assert the state updates - let ExecutionOutput { states, transactions } = executor.take_execution_output().unwrap(); - // asserts that the executed transactions are stored - let actual_txs: Vec = transactions.iter().map(|(tx, _)| tx.clone()).collect(); + let ExecutionOutput { states, transactions, stats } = executor.take_execution_output().unwrap(); + // asserts that the executed transactions are stored + let actual_txs: Vec = transactions + .iter() + .map(|(tx, res)| { + if let Some(fee) = res.fee() { + actual_total_gas += fee.gas_consumed; + } + if let Some(rec) = res.receipt() { + actual_total_steps += rec.resources_used().steps as u128; + } + tx.clone() + }) + .collect(); + + assert_eq!(actual_total_gas, stats.l1_gas_used); + assert_eq!(actual_total_steps, stats.cairo_steps_used); assert_eq!(actual_txs, expected_txs); let actual_nonce_updates = states.state_updates.nonce_updates; diff --git a/crates/katana/executor/tests/simulate.rs b/crates/katana/executor/tests/simulate.rs index 7a8905fa43..076f2ba013 100644 --- a/crates/katana/executor/tests/simulate.rs +++ b/crates/katana/executor/tests/simulate.rs @@ -62,9 +62,11 @@ fn test_simulate_tx_impl( }),); // check that the underlying state is not modified - let ExecutionOutput { states, transactions } = + let ExecutionOutput { states, transactions, stats } = executor.take_execution_output().expect("must take output"); + assert_eq!(stats.l1_gas_used, 0, "no gas usage should be recorded"); + assert_eq!(stats.cairo_steps_used, 0, "no steps usage should be recorded"); assert!(transactions.is_empty(), "simulated tx should not be stored"); assert!(states.state_updates.nonce_updates.is_empty(), "no state updates"); diff --git a/crates/katana/primitives/src/receipt.rs b/crates/katana/primitives/src/receipt.rs index b9f5bb5e9f..f17e6d9cc9 100644 --- a/crates/katana/primitives/src/receipt.rs +++ b/crates/katana/primitives/src/receipt.rs @@ -122,6 +122,7 @@ impl Receipt { } } + /// Returns the L1 messages sent. pub fn messages_sent(&self) -> &[MessageToL1] { match self { Receipt::Invoke(rct) => &rct.messages_sent, @@ -131,6 +132,7 @@ impl Receipt { } } + /// Returns the events emitted. pub fn events(&self) -> &[Event] { match self { Receipt::Invoke(rct) => &rct.events, @@ -139,6 +141,16 @@ impl Receipt { Receipt::DeployAccount(rct) => &rct.events, } } + + /// Returns the execution resources used. + pub fn resources_used(&self) -> &TxExecutionResources { + match self { + Receipt::Invoke(rct) => &rct.execution_resources, + Receipt::Declare(rct) => &rct.execution_resources, + Receipt::L1Handler(rct) => &rct.execution_resources, + Receipt::DeployAccount(rct) => &rct.execution_resources, + } + } } /// Transaction execution resources. From 99bd4cba7536a38a3d821f0e057845b70c542608 Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Fri, 12 Apr 2024 17:07:49 +0800 Subject: [PATCH 12/23] katana: update grafana dashboard with execution metrics (#1818) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ref #1791 #1369 Screenshot 2024-04-12 at 2 36 56 AM showing total gas and steps using a simple line charts, tracking its growth over time --- monitoring/grafana/katana.json | 233 +++++++++++++++++++++++++++++++-- 1 file changed, 223 insertions(+), 10 deletions(-) diff --git a/monitoring/grafana/katana.json b/monitoring/grafana/katana.json index 9d0626ffa0..1e85a36de3 100644 --- a/monitoring/grafana/katana.json +++ b/monitoring/grafana/katana.json @@ -36,6 +36,219 @@ "x": 0, "y": 0 }, + "id": 122, + "panels": [], + "title": "Execution", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "description": "The total amount of L1 gas that has been processed", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Total gas", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 1 + }, + "id": 121, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "katana_block_producer_l1_gas_processed_total{instance=\"localhost:9100\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "L1 Gas Processed", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "description": "The total amount of Cairo steps that has been processed", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Total steps", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 1 + }, + "id": 123, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "katana_block_producer_cairo_steps_processed_total{instance=\"localhost:9100\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Cairo Steps Processed", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 9 + }, "id": 108, "panels": [], "title": "RPC Server", @@ -131,7 +344,7 @@ "h": 8, "w": 12, "x": 0, - "y": 1 + "y": 10 }, "id": 109, "options": { @@ -193,7 +406,7 @@ "h": 8, "w": 12, "x": 12, - "y": 1 + "y": 10 }, "id": 111, "maxDataPoints": 25, @@ -322,7 +535,7 @@ "h": 8, "w": 12, "x": 0, - "y": 9 + "y": 18 }, "id": 120, "options": { @@ -380,7 +593,7 @@ "h": 8, "w": 12, "x": 12, - "y": 9 + "y": 18 }, "id": 112, "maxDataPoints": 25, @@ -451,7 +664,7 @@ "h": 1, "w": 24, "x": 0, - "y": 17 + "y": 26 }, "id": 97, "panels": [], @@ -523,7 +736,7 @@ "h": 8, "w": 12, "x": 0, - "y": 18 + "y": 27 }, "id": 99, "options": { @@ -620,7 +833,7 @@ "h": 8, "w": 12, "x": 12, - "y": 18 + "y": 27 }, "id": 101, "options": { @@ -716,7 +929,7 @@ "h": 8, "w": 12, "x": 0, - "y": 26 + "y": 35 }, "id": 98, "options": { @@ -878,7 +1091,7 @@ "h": 8, "w": 12, "x": 12, - "y": 26 + "y": 35 }, "id": 100, "options": { @@ -953,6 +1166,6 @@ "timezone": "", "title": "katana", "uid": "2k8BXz24x", - "version": 3, + "version": 6, "weekStart": "" } From 59ff3a6b99af85342d259e8397c46fc8bcc6b02a Mon Sep 17 00:00:00 2001 From: Tarrence van As Date: Fri, 12 Apr 2024 10:01:17 -0400 Subject: [PATCH 13/23] Prepare release: v0.6.1-alpha.1 (#1824) --- Cargo.lock | 82 ++++++++++---------- Cargo.toml | 2 +- crates/katana/runner/runner-macro/Cargo.toml | 2 +- 3 files changed, 43 insertions(+), 43 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2b16039879..408bd71b93 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1275,7 +1275,7 @@ dependencies = [ [[package]] name = "benches" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "clap", @@ -2815,7 +2815,7 @@ dependencies = [ [[package]] name = "common" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "reqwest", @@ -3544,7 +3544,7 @@ checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" [[package]] name = "dojo-bindgen" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "async-trait", "cainome 0.1.5", @@ -3560,15 +3560,15 @@ dependencies = [ [[package]] name = "dojo-core" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" [[package]] name = "dojo-examples-spawn-and-move" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" [[package]] name = "dojo-lang" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -3618,7 +3618,7 @@ dependencies = [ [[package]] name = "dojo-language-server" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -3640,7 +3640,7 @@ dependencies = [ [[package]] name = "dojo-metrics" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "hyper", @@ -3658,7 +3658,7 @@ dependencies = [ [[package]] name = "dojo-test-utils" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "assert_fs", @@ -3692,7 +3692,7 @@ dependencies = [ [[package]] name = "dojo-types" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "crypto-bigint", "hex", @@ -3707,7 +3707,7 @@ dependencies = [ [[package]] name = "dojo-world" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "assert_fs", @@ -3743,7 +3743,7 @@ dependencies = [ [[package]] name = "dojo-world-abigen" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "cairo-lang-starknet", "camino", @@ -6618,7 +6618,7 @@ dependencies = [ [[package]] name = "katana" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "alloy-primitives", "anyhow", @@ -6645,7 +6645,7 @@ dependencies = [ [[package]] name = "katana-codecs" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "bytes", "katana-primitives", @@ -6653,7 +6653,7 @@ dependencies = [ [[package]] name = "katana-codecs-derive" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "proc-macro2", "quote", @@ -6663,7 +6663,7 @@ dependencies = [ [[package]] name = "katana-core" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "alloy-contract", "alloy-network", @@ -6711,7 +6711,7 @@ dependencies = [ [[package]] name = "katana-db" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "cairo-lang-starknet", @@ -6733,7 +6733,7 @@ dependencies = [ [[package]] name = "katana-executor" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "alloy-primitives", "anyhow", @@ -6761,7 +6761,7 @@ dependencies = [ [[package]] name = "katana-primitives" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "alloy-primitives", "anyhow", @@ -6787,7 +6787,7 @@ dependencies = [ [[package]] name = "katana-provider" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "auto_impl", @@ -6812,7 +6812,7 @@ dependencies = [ [[package]] name = "katana-rpc" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "assert_matches", @@ -6848,7 +6848,7 @@ dependencies = [ [[package]] name = "katana-rpc-api" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "jsonrpsee 0.16.3", "katana-core", @@ -6859,7 +6859,7 @@ dependencies = [ [[package]] name = "katana-rpc-types" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "alloy-primitives", "anyhow", @@ -6880,7 +6880,7 @@ dependencies = [ [[package]] name = "katana-rpc-types-builder" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "katana-executor", @@ -6892,7 +6892,7 @@ dependencies = [ [[package]] name = "katana-runner" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "chrono", @@ -6911,7 +6911,7 @@ dependencies = [ [[package]] name = "katana-tasks" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "futures", "rayon", @@ -10013,7 +10013,7 @@ checksum = "e666a5496a0b2186dbcd0ff6106e29e093c15591bde62c20d3842007c6978a09" [[package]] name = "runner-macro" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "quote", "syn 2.0.55", @@ -10240,7 +10240,7 @@ dependencies = [ [[package]] name = "saya" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "clap", @@ -10259,7 +10259,7 @@ dependencies = [ [[package]] name = "saya-core" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "async-trait", @@ -10295,7 +10295,7 @@ dependencies = [ [[package]] name = "saya-provider" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "alloy-primitives", "anyhow", @@ -11058,7 +11058,7 @@ dependencies = [ [[package]] name = "sozo" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "assert_fs", @@ -11109,7 +11109,7 @@ dependencies = [ [[package]] name = "sozo-ops" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "assert_fs", @@ -11158,7 +11158,7 @@ dependencies = [ [[package]] name = "sozo-signers" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "starknet 0.9.0", @@ -12598,7 +12598,7 @@ dependencies = [ [[package]] name = "torii" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "async-trait", @@ -12643,7 +12643,7 @@ dependencies = [ [[package]] name = "torii-client" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "async-trait", "camino", @@ -12671,7 +12671,7 @@ dependencies = [ [[package]] name = "torii-core" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "async-trait", @@ -12708,7 +12708,7 @@ dependencies = [ [[package]] name = "torii-graphql" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "async-graphql", @@ -12749,7 +12749,7 @@ dependencies = [ [[package]] name = "torii-grpc" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "bytes", "crypto-bigint", @@ -12792,7 +12792,7 @@ dependencies = [ [[package]] name = "torii-relay" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "async-trait", @@ -12827,7 +12827,7 @@ dependencies = [ [[package]] name = "torii-server" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "async-trait", @@ -13132,7 +13132,7 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "types-test" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" [[package]] name = "ucd-trie" diff --git a/Cargo.toml b/Cargo.toml index 2906f5cfba..75e4ee6918 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,7 +45,7 @@ edition = "2021" license = "Apache-2.0" license-file = "LICENSE" repository = "https://github.com/dojoengine/dojo/" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" [profile.performance] codegen-units = 1 diff --git a/crates/katana/runner/runner-macro/Cargo.toml b/crates/katana/runner/runner-macro/Cargo.toml index 837fe7afb2..bd3ba86c28 100644 --- a/crates/katana/runner/runner-macro/Cargo.toml +++ b/crates/katana/runner/runner-macro/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "runner-macro" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From 9a814ad59b3855bf8f3bbdeefe13c407cf4edf87 Mon Sep 17 00:00:00 2001 From: Tarrence van As Date: Fri, 12 Apr 2024 10:07:17 -0400 Subject: [PATCH 14/23] Strip version v prefix with release-dispatch (#1825) Makes it a bit easier to use the action --- .github/workflows/release-dispatch.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release-dispatch.yml b/.github/workflows/release-dispatch.yml index 0e3faa3d13..0a8a08c3f6 100644 --- a/.github/workflows/release-dispatch.yml +++ b/.github/workflows/release-dispatch.yml @@ -19,7 +19,10 @@ jobs: # Workaround described here: https://github.com/actions/checkout/issues/760 - uses: actions/checkout@v3 - run: git config --global --add safe.directory "$GITHUB_WORKSPACE" - - run: cargo release version ${{ inputs.version }} --execute --no-confirm && cargo release replace --execute --no-confirm + - run: | + version=${{ inputs.version }} + version=${version#v} + cargo release version ${{ inputs.version }} --execute --no-confirm && cargo release replace --execute --no-confirm - id: version_info run: | cargo install cargo-get From e9fab2caf970621ed8311330f3752dc9635f60d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Baranx?= Date: Fri, 12 Apr 2024 23:49:34 +0700 Subject: [PATCH 15/23] feat: store contract abi and source in contract metadata (#1682) sozo: store ABI and source in metadata registry This PR updates the `sozo build` command to save ABI and expanded source files, into the `target` directory, for the world contract and every user contracts. These ABI and source files are then uploaded as IPFS artifacts in the `ResourceMetadata` registry, for the world, models and contracts. --- Cargo.lock | 1 + bin/sozo/src/commands/dev.rs | 2 +- bin/sozo/src/commands/migrate.rs | 2 +- bin/sozo/src/utils.rs | 2 +- crates/dojo-lang/Cargo.toml | 2 +- crates/dojo-lang/src/compiler.rs | 36 +- crates/dojo-test-utils/src/compiler.rs | 41 +- crates/dojo-world/src/metadata.rs | 209 +++++++- crates/dojo-world/src/metadata_test.rs | 117 ++++- .../src/metadata_test_data/abi.json | 17 + .../src/metadata_test_data/source.cairo | 79 +++ crates/dojo-world/src/migration/mod.rs | 1 + crates/sozo/ops/Cargo.toml | 1 + crates/sozo/ops/src/migration/mod.rs | 380 +++++++++----- crates/sozo/ops/src/tests/migration.rs | 496 ++++++++++++++++++ crates/sozo/ops/src/tests/mod.rs | 1 + crates/sozo/ops/src/tests/setup.rs | 50 +- .../torii/graphql/src/tests/metadata_test.rs | 6 +- crates/torii/libp2p/src/server/mod.rs | 2 +- 19 files changed, 1271 insertions(+), 174 deletions(-) create mode 100644 crates/dojo-world/src/metadata_test_data/abi.json create mode 100644 crates/dojo-world/src/metadata_test_data/source.cairo create mode 100644 crates/sozo/ops/src/tests/migration.rs diff --git a/Cargo.lock b/Cargo.lock index 408bd71b93..0c0faa343c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11137,6 +11137,7 @@ dependencies = [ "dojo-types", "dojo-world", "futures", + "ipfs-api-backend-hyper", "katana-runner", "notify", "notify-debouncer-mini", diff --git a/bin/sozo/src/commands/dev.rs b/bin/sozo/src/commands/dev.rs index 4c03b9d749..f92eda68d4 100644 --- a/bin/sozo/src/commands/dev.rs +++ b/bin/sozo/src/commands/dev.rs @@ -199,7 +199,7 @@ impl DevArgs { let env_metadata = if config.manifest_path().exists() { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) + dojo_metadata_from_workspace(&ws).env().cloned() } else { None }; diff --git a/bin/sozo/src/commands/migrate.rs b/bin/sozo/src/commands/migrate.rs index 849fe88462..fc56cdee0e 100644 --- a/bin/sozo/src/commands/migrate.rs +++ b/bin/sozo/src/commands/migrate.rs @@ -129,7 +129,7 @@ impl MigrateArgs { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; let env_metadata = if config.manifest_path().exists() { - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) + dojo_metadata_from_workspace(&ws).env().cloned() } else { None }; diff --git a/bin/sozo/src/utils.rs b/bin/sozo/src/utils.rs index 8bd219e5b7..7dbbfe28fd 100644 --- a/bin/sozo/src/utils.rs +++ b/bin/sozo/src/utils.rs @@ -25,7 +25,7 @@ pub fn load_metadata_from_config(config: &Config) -> Result, let env_metadata = if config.manifest_path().exists() { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) + dojo_metadata_from_workspace(&ws).env().cloned() } else { None }; diff --git a/crates/dojo-lang/Cargo.toml b/crates/dojo-lang/Cargo.toml index 277d7ea15c..2541730c92 100644 --- a/crates/dojo-lang/Cargo.toml +++ b/crates/dojo-lang/Cargo.toml @@ -16,6 +16,7 @@ cairo-lang-debug.workspace = true cairo-lang-defs.workspace = true cairo-lang-diagnostics.workspace = true cairo-lang-filesystem.workspace = true +cairo-lang-formatter.workspace = true cairo-lang-lowering.workspace = true cairo-lang-parser.workspace = true cairo-lang-plugins.workspace = true @@ -50,7 +51,6 @@ tracing.workspace = true url = "2.2.2" [dev-dependencies] -cairo-lang-formatter.workspace = true cairo-lang-semantic.workspace = true cairo-lang-test-utils.workspace = true dojo-test-utils = { path = "../dojo-test-utils" } diff --git a/crates/dojo-lang/src/compiler.rs b/crates/dojo-lang/src/compiler.rs index 0f7d7d4163..010dab774a 100644 --- a/crates/dojo-lang/src/compiler.rs +++ b/crates/dojo-lang/src/compiler.rs @@ -1,4 +1,5 @@ use std::collections::{BTreeMap, BTreeSet, HashMap}; +use std::io::Write; use std::iter::zip; use std::ops::DerefMut; @@ -8,13 +9,14 @@ use cairo_lang_defs::db::DefsGroup; use cairo_lang_defs::ids::{ModuleId, ModuleItemId}; use cairo_lang_filesystem::db::FilesGroup; use cairo_lang_filesystem::ids::{CrateId, CrateLongId}; +use cairo_lang_formatter::format_string; use cairo_lang_semantic::db::SemanticGroup; use cairo_lang_starknet::abi; use cairo_lang_starknet::contract::{find_contracts, ContractDeclaration}; use cairo_lang_starknet::contract_class::{compile_prepared_db, ContractClass}; use cairo_lang_starknet::plugin::aux_data::StarkNetContractAuxData; use cairo_lang_utils::UpcastMut; -use camino::Utf8PathBuf; +use camino::{Utf8Path, Utf8PathBuf}; use convert_case::{Case, Casing}; use dojo_world::manifest::{ AbiFormat, Class, ComputedValueEntrypoint, DojoContract, DojoModel, Manifest, ManifestMethods, @@ -46,6 +48,8 @@ pub const ABIS_DIR: &str = "abis"; pub const CONTRACTS_DIR: &str = "contracts"; pub const MODELS_DIR: &str = "models"; +pub const SOURCES_DIR: &str = "src"; + pub(crate) const LOG_TARGET: &str = "dojo_lang::compiler"; #[cfg(test)] @@ -87,6 +91,8 @@ impl Compiler for DojoCompiler { ) -> Result<()> { let props: Props = unit.target().props()?; let target_dir = unit.target_dir(ws); + let sources_dir = target_dir.child(Utf8Path::new(SOURCES_DIR)); + let compiler_config = build_compiler_config(&unit, ws); let mut main_crate_ids = collect_main_crate_ids(&unit, db); @@ -118,11 +124,31 @@ impl Compiler for DojoCompiler { for (decl, class) in zip(contracts, classes) { let contract_full_path = decl.module_id().full_path(db.upcast_mut()); - let file_name = format!("{contract_full_path}.json"); - let mut file = target_dir.open_rw(file_name.clone(), "output file", ws.config())?; - serde_json::to_writer_pretty(file.deref_mut(), &class) - .with_context(|| format!("failed to serialize contract: {contract_full_path}"))?; + // save expanded contract source file + if let Ok(file_id) = db.module_main_file(decl.module_id()) { + if let Some(file_content) = db.file_content(file_id) { + let src_file_name = format!("{contract_full_path}.cairo").replace("::", "_"); + + let mut file = + sources_dir.open_rw(src_file_name.clone(), "source file", ws.config())?; + file.write(format_string(db, file_content.to_string()).as_bytes()) + .with_context(|| { + format!("failed to serialize contract source: {contract_full_path}") + })?; + } else { + return Err(anyhow!("failed to get source file content: {contract_full_path}")); + } + } else { + return Err(anyhow!("failed to get source file: {contract_full_path}")); + } + + // save JSON artifact file + let file_name = format!("{contract_full_path}.json"); + let mut file = target_dir.open_rw(file_name.clone(), "class file", ws.config())?; + serde_json::to_writer_pretty(file.deref_mut(), &class).with_context(|| { + format!("failed to serialize contract artifact: {contract_full_path}") + })?; let class_hash = compute_class_hash_of_contract_class(&class).with_context(|| { format!("problem computing class hash for contract `{contract_full_path}`") diff --git a/crates/dojo-test-utils/src/compiler.rs b/crates/dojo-test-utils/src/compiler.rs index 2486ec1eac..335c86f63c 100644 --- a/crates/dojo-test-utils/src/compiler.rs +++ b/crates/dojo-test-utils/src/compiler.rs @@ -11,25 +11,40 @@ use scarb::ops; use scarb_ui::Verbosity; pub fn build_test_config(path: &str) -> anyhow::Result { + build_full_test_config(path, true) +} + +pub fn build_full_test_config(path: &str, override_dirs: bool) -> anyhow::Result { let mut compilers = CompilerRepository::empty(); compilers.add(Box::new(DojoCompiler)).unwrap(); let cairo_plugins = CairoPluginRepository::default(); + let path = Utf8PathBuf::from_path_buf(path.into()).unwrap(); - let cache_dir = TempDir::new().unwrap(); - let config_dir = TempDir::new().unwrap(); - let target_dir = TempDir::new().unwrap(); + if override_dirs { + let cache_dir = TempDir::new().unwrap(); + let config_dir = TempDir::new().unwrap(); + let target_dir = TempDir::new().unwrap(); - let path = Utf8PathBuf::from_path_buf(path.into()).unwrap(); - Config::builder(path.canonicalize_utf8().unwrap()) - .global_cache_dir_override(Some(Utf8Path::from_path(cache_dir.path()).unwrap())) - .global_config_dir_override(Some(Utf8Path::from_path(config_dir.path()).unwrap())) - .target_dir_override(Some(Utf8Path::from_path(target_dir.path()).unwrap().to_path_buf())) - .ui_verbosity(Verbosity::Verbose) - .log_filter_directive(env::var_os("SCARB_LOG")) - .compilers(compilers) - .cairo_plugins(cairo_plugins.into()) - .build() + Config::builder(path.canonicalize_utf8().unwrap()) + .global_cache_dir_override(Some(Utf8Path::from_path(cache_dir.path()).unwrap())) + .global_config_dir_override(Some(Utf8Path::from_path(config_dir.path()).unwrap())) + .target_dir_override(Some( + Utf8Path::from_path(target_dir.path()).unwrap().to_path_buf(), + )) + .ui_verbosity(Verbosity::Verbose) + .log_filter_directive(env::var_os("SCARB_LOG")) + .compilers(compilers) + .cairo_plugins(cairo_plugins.into()) + .build() + } else { + Config::builder(path.canonicalize_utf8().unwrap()) + .ui_verbosity(Verbosity::Verbose) + .log_filter_directive(env::var_os("SCARB_LOG")) + .compilers(compilers) + .cairo_plugins(cairo_plugins.into()) + .build() + } } pub fn corelib() -> PathBuf { diff --git a/crates/dojo-world/src/metadata.rs b/crates/dojo-world/src/metadata.rs index a3c2c7d1ca..2d50f5fa70 100644 --- a/crates/dojo-world/src/metadata.rs +++ b/crates/dojo-world/src/metadata.rs @@ -3,24 +3,144 @@ use std::io::Cursor; use std::path::PathBuf; use anyhow::Result; +use camino::Utf8PathBuf; use ipfs_api_backend_hyper::{IpfsApi, IpfsClient, TryFromUri}; use scarb::core::{ManifestMetadata, Workspace}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_json::json; use url::Url; +use crate::manifest::{BaseManifest, WORLD_CONTRACT_NAME}; + #[cfg(test)] #[path = "metadata_test.rs"] mod test; -pub fn dojo_metadata_from_workspace(ws: &Workspace<'_>) -> Option { - Some(ws.current_package().ok()?.manifest.metadata.dojo()) +pub const IPFS_CLIENT_URL: &str = "https://ipfs.infura.io:5001"; +pub const IPFS_USERNAME: &str = "2EBrzr7ZASQZKH32sl2xWauXPSA"; +pub const IPFS_PASSWORD: &str = "12290b883db9138a8ae3363b6739d220"; + +// copy constants from dojo-lang to avoid circular dependency +pub const MANIFESTS_DIR: &str = "manifests"; +pub const ABIS_DIR: &str = "abis"; +pub const SOURCES_DIR: &str = "src"; +pub const BASE_DIR: &str = "base"; + +fn build_artifact_from_name( + source_dir: &Utf8PathBuf, + abi_dir: &Utf8PathBuf, + element_name: &str, +) -> ArtifactMetadata { + let sanitized_name = element_name.replace("::", "_"); + let abi_file = abi_dir.join(format!("{sanitized_name}.json")); + let src_file = source_dir.join(format!("{sanitized_name}.cairo")); + + ArtifactMetadata { + abi: if abi_file.exists() { Some(Uri::File(abi_file.into_std_path_buf())) } else { None }, + source: if src_file.exists() { + Some(Uri::File(src_file.into_std_path_buf())) + } else { + None + }, + } } +/// Build world metadata with data read from the project configuration. +/// +/// # Arguments +/// +/// * `project_metadata` - The project metadata. +/// +/// # Returns +/// +/// A [`WorldMetadata`] object initialized with project metadata. +pub fn project_to_world_metadata(project_metadata: Option) -> WorldMetadata { + if let Some(m) = project_metadata { + WorldMetadata { + name: m.name, + description: m.description, + cover_uri: m.cover_uri, + icon_uri: m.icon_uri, + website: m.website, + socials: m.socials, + ..Default::default() + } + } else { + WorldMetadata { + name: None, + description: None, + cover_uri: None, + icon_uri: None, + website: None, + socials: None, + ..Default::default() + } + } +} + +/// Collect metadata from the project configuration and from the workspace. +/// +/// # Arguments +/// `ws`: the workspace. +/// +/// # Returns +/// A [`DojoMetadata`] object containing all Dojo metadata. +pub fn dojo_metadata_from_workspace(ws: &Workspace<'_>) -> DojoMetadata { + let profile = ws.config().profile(); + + let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); + let manifest_dir = manifest_dir.join(MANIFESTS_DIR).join(profile.as_str()); + let target_dir = ws.target_dir().path_existent().unwrap(); + let sources_dir = target_dir.join(profile.as_str()).join(SOURCES_DIR); + let abis_dir = manifest_dir.join(ABIS_DIR).join(BASE_DIR); + + let project_metadata = ws.current_package().unwrap().manifest.metadata.dojo(); + let mut dojo_metadata = + DojoMetadata { env: project_metadata.env.clone(), ..Default::default() }; + + let world_artifact = build_artifact_from_name(&sources_dir, &abis_dir, WORLD_CONTRACT_NAME); + + // inialize Dojo world metadata with world metadata coming from project configuration + dojo_metadata.world = project_to_world_metadata(project_metadata.world); + dojo_metadata.world.artifacts = world_artifact; + + // load models and contracts metadata + if manifest_dir.join(BASE_DIR).exists() { + if let Ok(manifest) = BaseManifest::load_from_path(&manifest_dir.join(BASE_DIR)) { + for model in manifest.models { + let name = model.name.to_string(); + dojo_metadata.artifacts.insert( + name.clone(), + build_artifact_from_name(&sources_dir, &abis_dir.join("models"), &name), + ); + } + + for contract in manifest.contracts { + let name = contract.name.to_string(); + dojo_metadata.artifacts.insert( + name.clone(), + build_artifact_from_name(&sources_dir, &abis_dir.join("contracts"), &name), + ); + } + } + } + + dojo_metadata +} + +/// Metadata coming from project configuration (Scarb.toml) +#[derive(Default, Deserialize, Debug, Clone)] +pub struct ProjectMetadata { + pub world: Option, + pub env: Option, +} + +/// Metadata collected from the project configuration and the Dojo workspace #[derive(Default, Deserialize, Debug, Clone)] -pub struct Metadata { - pub world: Option, +pub struct DojoMetadata { + pub world: WorldMetadata, pub env: Option, + pub artifacts: HashMap, } #[derive(Debug)] @@ -76,6 +196,18 @@ impl Uri { } } +/// World metadata coming from the project configuration (Scarb.toml) +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +pub struct ProjectWorldMetadata { + pub name: Option, + pub description: Option, + pub cover_uri: Option, + pub icon_uri: Option, + pub website: Option, + pub socials: Option>, +} + +/// World metadata collected from the project configuration and the Dojo workspace #[derive(Default, Serialize, Deserialize, Debug, Clone)] pub struct WorldMetadata { pub name: Option, @@ -84,6 +216,14 @@ pub struct WorldMetadata { pub icon_uri: Option, pub website: Option, pub socials: Option>, + pub artifacts: ArtifactMetadata, +} + +/// Metadata Artifacts collected for one Dojo element (world, model, contract...) +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +pub struct ArtifactMetadata { + pub abi: Option, + pub source: Option, } #[derive(Default, Deserialize, Clone, Debug)] @@ -122,7 +262,7 @@ impl Environment { } } -impl WorldMetadata { +impl ProjectWorldMetadata { pub fn name(&self) -> Option<&str> { self.name.as_deref() } @@ -135,8 +275,8 @@ impl WorldMetadata { impl WorldMetadata { pub async fn upload(&self) -> Result { let mut meta = self.clone(); - let client = IpfsClient::from_str("https://ipfs.infura.io:5001")? - .with_credentials("2EBrzr7ZASQZKH32sl2xWauXPSA", "12290b883db9138a8ae3363b6739d220"); + let client = + IpfsClient::from_str(IPFS_CLIENT_URL)?.with_credentials(IPFS_USERNAME, IPFS_PASSWORD); if let Some(Uri::File(icon)) = &self.icon_uri { let icon_data = std::fs::read(icon)?; @@ -152,6 +292,20 @@ impl WorldMetadata { meta.cover_uri = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) }; + if let Some(Uri::File(abi)) = &self.artifacts.abi { + let abi_data = std::fs::read(abi)?; + let reader = Cursor::new(abi_data); + let response = client.add(reader).await?; + meta.artifacts.abi = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) + }; + + if let Some(Uri::File(source)) = &self.artifacts.source { + let source_data = std::fs::read(source)?; + let reader = Cursor::new(source_data); + let response = client.add(reader).await?; + meta.artifacts.source = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) + }; + let serialized = json!(meta).to_string(); let reader = Cursor::new(serialized); let response = client.add(reader).await?; @@ -160,26 +314,51 @@ impl WorldMetadata { } } -impl Metadata { - pub fn env(&self) -> Option<&Environment> { - self.env.as_ref() +impl ArtifactMetadata { + pub async fn upload(&self) -> Result { + let mut meta = self.clone(); + let client = + IpfsClient::from_str(IPFS_CLIENT_URL)?.with_credentials(IPFS_USERNAME, IPFS_PASSWORD); + + if let Some(Uri::File(abi)) = &self.abi { + let abi_data = std::fs::read(abi)?; + let reader = Cursor::new(abi_data); + let response = client.add(reader).await?; + meta.abi = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) + }; + + if let Some(Uri::File(source)) = &self.source { + let source_data = std::fs::read(source)?; + let reader = Cursor::new(source_data); + let response = client.add(reader).await?; + meta.source = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) + }; + + let serialized = json!(meta).to_string(); + let reader = Cursor::new(serialized); + let response = client.add(reader).await?; + + Ok(response.hash) } +} - pub fn world(&self) -> Option<&WorldMetadata> { - self.world.as_ref() +impl DojoMetadata { + pub fn env(&self) -> Option<&Environment> { + self.env.as_ref() } } + trait MetadataExt { - fn dojo(&self) -> Metadata; + fn dojo(&self) -> ProjectMetadata; } impl MetadataExt for ManifestMetadata { - fn dojo(&self) -> Metadata { + fn dojo(&self) -> ProjectMetadata { self.tool_metadata .as_ref() .and_then(|e| e.get("dojo")) .cloned() - .map(|v| v.try_into::().unwrap_or_default()) + .map(|v| v.try_into::().unwrap_or_default()) .unwrap_or_default() } } diff --git a/crates/dojo-world/src/metadata_test.rs b/crates/dojo-world/src/metadata_test.rs index a6c950fa6c..b30624320f 100644 --- a/crates/dojo-world/src/metadata_test.rs +++ b/crates/dojo-world/src/metadata_test.rs @@ -1,13 +1,18 @@ use std::collections::HashMap; +use camino::Utf8PathBuf; +use dojo_test_utils::compiler::build_full_test_config; +use scarb::ops; use url::Url; -use super::WorldMetadata; -use crate::metadata::{Metadata, Uri}; +use crate::metadata::{ + dojo_metadata_from_workspace, ArtifactMetadata, ProjectMetadata, Uri, WorldMetadata, ABIS_DIR, + BASE_DIR, MANIFESTS_DIR, SOURCES_DIR, +}; #[test] fn check_metadata_deserialization() { - let metadata: Metadata = toml::from_str( + let metadata: ProjectMetadata = toml::from_str( r#" [env] rpc_url = "http://localhost:5050/" @@ -64,9 +69,13 @@ async fn world_metadata_hash_and_upload() { name: Some("Test World".to_string()), description: Some("A world used for testing".to_string()), cover_uri: Some(Uri::File("src/metadata_test_data/cover.png".into())), - icon_uri: None, + icon_uri: Some(Uri::File("src/metadata_test_data/cover.png".into())), website: Some(Url::parse("https://dojoengine.org").unwrap()), socials: Some(HashMap::from([("x".to_string(), "https://x.com/dojostarknet".to_string())])), + artifacts: ArtifactMetadata { + abi: Some(Uri::File("src/metadata_test_data/abi.json".into())), + source: Some(Uri::File("src/metadata_test_data/source.cairo".into())), + }, }; let _ = meta.upload().await.unwrap(); @@ -74,7 +83,7 @@ async fn world_metadata_hash_and_upload() { #[tokio::test] async fn parse_world_metadata_without_socials() { - let metadata: Metadata = toml::from_str( + let metadata: ProjectMetadata = toml::from_str( r#" [env] rpc_url = "http://localhost:5050/" @@ -97,3 +106,101 @@ website = "https://dojoengine.org" assert!(metadata.world.is_some()); } + +#[tokio::test] +async fn get_full_dojo_metadata_from_workspace() { + let config = build_full_test_config("../../examples/spawn-and-move/Scarb.toml", false).unwrap(); + let ws = ops::read_workspace(config.manifest_path(), &config) + .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); + + let profile = ws.config().profile(); + let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); + let manifest_dir = manifest_dir.join(MANIFESTS_DIR).join(profile.as_str()); + let target_dir = ws.target_dir().path_existent().unwrap(); + let sources_dir = target_dir.join(profile.as_str()).join(SOURCES_DIR); + let abis_dir = manifest_dir.join(ABIS_DIR).join(BASE_DIR); + + let dojo_metadata = dojo_metadata_from_workspace(&ws); + + // env + assert!(dojo_metadata.env.is_some()); + let env = dojo_metadata.env.unwrap(); + + assert!(env.rpc_url.is_some()); + assert!(env.rpc_url.unwrap().eq("http://localhost:5050/")); + + assert!(env.account_address.is_some()); + assert!( + env.account_address + .unwrap() + .eq("0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03") + ); + + assert!(env.private_key.is_some()); + assert!( + env.private_key.unwrap().eq("0x1800000000300000180000000000030000000000003006001800006600") + ); + + assert!(env.world_address.is_some()); + assert!( + env.world_address + .unwrap() + .eq("0x1385f25d20a724edc9c7b3bd9636c59af64cbaf9fcd12f33b3af96b2452f295") + ); + + assert!(env.keystore_path.is_none()); + assert!(env.keystore_password.is_none()); + + // world + assert!(dojo_metadata.world.name.is_some()); + assert!(dojo_metadata.world.name.unwrap().eq("example")); + + assert!(dojo_metadata.world.description.is_some()); + assert!(dojo_metadata.world.description.unwrap().eq("example world")); + + assert!(dojo_metadata.world.cover_uri.is_none()); + assert!(dojo_metadata.world.icon_uri.is_none()); + assert!(dojo_metadata.world.website.is_none()); + assert!(dojo_metadata.world.socials.is_none()); + + check_artifact( + dojo_metadata.world.artifacts, + "dojo_world_world".to_string(), + &abis_dir, + &sources_dir, + ); + + // artifacts + let artifacts = vec![ + ("models", "dojo_examples::actions::actions::moved"), + ("models", "dojo_examples::models::emote_message"), + ("models", "dojo_examples::models::moves"), + ("models", "dojo_examples::models::position"), + ("contracts", "dojo_examples::actions::actions"), + ]; + + for (abi_subdir, name) in artifacts { + let artifact = dojo_metadata.artifacts.get(name); + assert!(artifact.is_some()); + let artifact = artifact.unwrap(); + + let sanitized_name = name.replace("::", "_"); + + check_artifact(artifact.clone(), sanitized_name, &abis_dir.join(abi_subdir), &sources_dir); + } +} + +fn check_artifact( + artifact: ArtifactMetadata, + name: String, + abis_dir: &Utf8PathBuf, + sources_dir: &Utf8PathBuf, +) { + assert!(artifact.abi.is_some()); + let abi = artifact.abi.unwrap(); + assert_eq!(abi, Uri::File(abis_dir.join(format!("{name}.json")).into())); + + assert!(artifact.source.is_some()); + let source = artifact.source.unwrap(); + assert_eq!(source, Uri::File(sources_dir.join(format!("{name}.cairo")).into())); +} diff --git a/crates/dojo-world/src/metadata_test_data/abi.json b/crates/dojo-world/src/metadata_test_data/abi.json new file mode 100644 index 0000000000..78efed0140 --- /dev/null +++ b/crates/dojo-world/src/metadata_test_data/abi.json @@ -0,0 +1,17 @@ +[ + { + "type": "impl", + "name": "WorldProviderImpl", + "interface_name": "dojo::world::IWorldProvider" + }, + { + "type": "struct", + "name": "dojo::world::IWorldDispatcher", + "members": [ + { + "name": "contract_address", + "type": "core::starknet::contract_address::ContractAddress" + } + ] + } +] diff --git a/crates/dojo-world/src/metadata_test_data/source.cairo b/crates/dojo-world/src/metadata_test_data/source.cairo new file mode 100644 index 0000000000..c917342ece --- /dev/null +++ b/crates/dojo-world/src/metadata_test_data/source.cairo @@ -0,0 +1,79 @@ +use starknet::ContractAddress; + +#[derive(Serde, Copy, Drop, Introspect)] +enum Direction { + None, + Left, + Right, + Up, + Down, +} + +impl DirectionIntoFelt252 of Into { + fn into(self: Direction) -> felt252 { + match self { + Direction::None => 0, + Direction::Left => 1, + Direction::Right => 2, + Direction::Up => 3, + Direction::Down => 4, + } + } +} + +#[derive(Model, Copy, Drop, Serde)] +struct Moves { + #[key] + player: ContractAddress, + remaining: u8, + last_direction: Direction +} + +#[derive(Copy, Drop, Serde, Introspect)] +struct Vec2 { + x: u32, + y: u32 +} + +#[derive(Model, Copy, Drop, Serde)] +struct Position { + #[key] + player: ContractAddress, + vec: Vec2, +} + +trait Vec2Trait { + fn is_zero(self: Vec2) -> bool; + fn is_equal(self: Vec2, b: Vec2) -> bool; +} + +impl Vec2Impl of Vec2Trait { + fn is_zero(self: Vec2) -> bool { + if self.x - self.y == 0 { + return true; + } + false + } + + fn is_equal(self: Vec2, b: Vec2) -> bool { + self.x == b.x && self.y == b.y + } +} + +#[cfg(test)] +mod tests { + use super::{Position, Vec2, Vec2Trait}; + + #[test] + #[available_gas(100000)] + fn test_vec_is_zero() { + assert(Vec2Trait::is_zero(Vec2 { x: 0, y: 0 }), 'not zero'); + } + + #[test] + #[available_gas(100000)] + fn test_vec_is_equal() { + let position = Vec2 { x: 420, y: 0 }; + assert(position.is_equal(Vec2 { x: 420, y: 0 }), 'not equal'); + } +} diff --git a/crates/dojo-world/src/migration/mod.rs b/crates/dojo-world/src/migration/mod.rs index 4af66e2ac5..943c47725e 100644 --- a/crates/dojo-world/src/migration/mod.rs +++ b/crates/dojo-world/src/migration/mod.rs @@ -51,6 +51,7 @@ pub struct UpgradeOutput { pub struct RegisterOutput { pub transaction_hash: FieldElement, pub declare_output: Vec, + pub registered_model_names: Vec, } #[derive(Debug, Error)] diff --git a/crates/sozo/ops/Cargo.toml b/crates/sozo/ops/Cargo.toml index 2472bb87fe..077f54ef54 100644 --- a/crates/sozo/ops/Cargo.toml +++ b/crates/sozo/ops/Cargo.toml @@ -49,5 +49,6 @@ cainome = { git = "https://github.com/cartridge-gg/cainome", tag = "v0.2.2" } [dev-dependencies] assert_fs = "1.0.10" dojo-test-utils = { workspace = true, features = [ "build-examples" ] } +ipfs-api-backend-hyper = { git = "https://github.com/ferristseng/rust-ipfs-api", rev = "af2c17f7b19ef5b9898f458d97a90055c3605633", features = [ "with-hyper-rustls" ] } katana-runner.workspace = true snapbox = "0.4.6" diff --git a/crates/sozo/ops/src/migration/mod.rs b/crates/sozo/ops/src/migration/mod.rs index 4c096fe037..74f7c6499e 100644 --- a/crates/sozo/ops/src/migration/mod.rs +++ b/crates/sozo/ops/src/migration/mod.rs @@ -11,7 +11,7 @@ use dojo_world::manifest::{ Manifest, ManifestMethods, OverlayManifest, WorldContract as ManifestWorldContract, WorldMetadata, }; -use dojo_world::metadata::dojo_metadata_from_workspace; +use dojo_world::metadata::{dojo_metadata_from_workspace, ArtifactMetadata}; use dojo_world::migration::contract::ContractMigration; use dojo_world::migration::strategy::{generate_salt, prepare_for_migration, MigrationStrategy}; use dojo_world::migration::world::WorldDiff; @@ -20,6 +20,7 @@ use dojo_world::migration::{ Upgradable, UpgradeOutput, }; use dojo_world::utils::TransactionWaiter; +use futures::future; use scarb::core::Workspace; use scarb_ui::Ui; use starknet::accounts::{Account, ConnectedAccount, SingleOwnerAccount}; @@ -32,9 +33,6 @@ use starknet::core::utils::{ use starknet::providers::{Provider, ProviderError}; use tokio::fs; -#[cfg(test)] -#[path = "migration_test.rs"] -mod migration_test; mod ui; use starknet::signers::Signer; @@ -51,7 +49,15 @@ pub struct MigrationOutput { // If false that means migration got partially completed. pub full: bool, - pub contracts: Vec>, + pub models: Vec, + pub contracts: Vec>, +} + +#[derive(Debug, Default, Clone)] +pub struct ContractMigrationOutput { + name: String, + contract_address: FieldElement, + base_class_hash: FieldElement, } pub async fn migrate( @@ -107,13 +113,40 @@ where let mut strategy = prepare_migration(&target_dir, diff, name.clone(), world_address, &ui)?; let world_address = strategy.world_address().expect("world address must exist"); - let migration_output = if dry_run { + if dry_run { print_strategy(&ui, account.provider(), &strategy).await; - MigrationOutput { world_address, ..Default::default() } + + update_manifests_and_abis( + ws, + local_manifest, + &profile_dir, + &profile_name, + &rpc_url, + world_address, + None, + name.as_ref(), + ) + .await?; } else { // Migrate according to the diff. match apply_diff(ws, account, txn_config, &mut strategy).await { - Ok(migration_output) => migration_output, + Ok(migration_output) => { + update_manifests_and_abis( + ws, + local_manifest.clone(), + &profile_dir, + &profile_name, + &rpc_url, + world_address, + Some(migration_output.clone()), + name.as_ref(), + ) + .await?; + + if !ws.config().offline() { + upload_metadata(ws, account, migration_output).await?; + } + } Err(e) => { update_manifests_and_abis( ws, @@ -121,7 +154,8 @@ where &profile_dir, &profile_name, &rpc_url, - MigrationOutput { world_address, ..Default::default() }, + world_address, + None, name.as_ref(), ) .await?; @@ -130,17 +164,6 @@ where } }; - update_manifests_and_abis( - ws, - local_manifest, - &profile_dir, - &profile_name, - &rpc_url, - migration_output, - name.as_ref(), - ) - .await?; - Ok(()) } @@ -150,11 +173,12 @@ async fn update_manifests_and_abis( profile_dir: &Utf8PathBuf, profile_name: &str, rpc_url: &str, - migration_output: MigrationOutput, + world_address: FieldElement, + migration_output: Option, salt: Option<&String>, ) -> Result<()> { let ui = ws.config().ui(); - ui.print("\n✨ Updating manifests..."); + ui.print_step(5, "✨", "Updating manifests..."); let deployed_path = profile_dir.join("manifest").with_extension("toml"); let deployed_path_json = profile_dir.join("manifest").with_extension("json"); @@ -171,39 +195,43 @@ async fn update_manifests_and_abis( local_manifest.merge_from_previous(previous_manifest); }; - local_manifest.world.inner.address = Some(migration_output.world_address); + local_manifest.world.inner.address = Some(world_address); if let Some(salt) = salt { local_manifest.world.inner.seed = Some(salt.to_owned()); } - if migration_output.world_tx_hash.is_some() { - local_manifest.world.inner.transaction_hash = migration_output.world_tx_hash; - } - if migration_output.world_block_number.is_some() { - local_manifest.world.inner.block_number = migration_output.world_block_number; - } + // when the migration has not been applied because in `plan` mode or because of an error, + // the `migration_output` is empty. + if let Some(migration_output) = migration_output { + if migration_output.world_tx_hash.is_some() { + local_manifest.world.inner.transaction_hash = migration_output.world_tx_hash; + } + if migration_output.world_block_number.is_some() { + local_manifest.world.inner.block_number = migration_output.world_block_number; + } - migration_output.contracts.iter().for_each(|contract_output| { - // ignore failed migration which are represented by None - if let Some(output) = contract_output { - // find the contract in local manifest and update its address and base class hash - let local = local_manifest - .contracts - .iter_mut() - .find(|c| c.name == output.name.as_ref().unwrap()) - .expect("contract got migrated, means it should be present here"); - - let salt = generate_salt(&local.name); - local.inner.address = Some(get_contract_address( - salt, - output.base_class_hash, - &[], - migration_output.world_address, - )); + migration_output.contracts.iter().for_each(|contract_output| { + // ignore failed migration which are represented by None + if let Some(output) = contract_output { + // find the contract in local manifest and update its address and base class hash + let local = local_manifest + .contracts + .iter_mut() + .find(|c| c.name == output.name) + .expect("contract got migrated, means it should be present here"); + + let salt = generate_salt(&local.name); + local.inner.address = Some(get_contract_address( + salt, + output.base_class_hash, + &[], + migration_output.world_address, + )); - local.inner.base_class_hash = output.base_class_hash; - } - }); + local.inner.base_class_hash = output.base_class_hash; + } + }); + } // copy abi files from `abi/base` to `abi/deployments/{chain_id}` and update abi path in // local_manifest @@ -289,7 +317,8 @@ where { let ui = ws.config().ui(); - println!(" "); + ui.print_step(4, "🛠", "Migrating..."); + ui.print(" "); let migration_output = execute_strategy(ws, strategy, account, txn_config) .await @@ -299,7 +328,7 @@ where if migration_output.full { if let Some(block_number) = migration_output.world_block_number { ui.print(format!( - "\n🎉 Successfully migrated World on block #{} at address {}", + "\n🎉 Successfully migrated World on block #{} at address {}\n", block_number, bold_message(format!( "{:#x}", @@ -308,7 +337,7 @@ where )); } else { ui.print(format!( - "\n🎉 Successfully migrated World at address {}", + "\n🎉 Successfully migrated World at address {}\n", bold_message(format!( "{:#x}", strategy.world_address().expect("world address must exist") @@ -493,14 +522,6 @@ where }; ui.print_sub(format!("Contract address: {:#x}", world.contract_address)); - - let offline = ws.config().offline(); - - if offline { - ui.print_sub("Skipping metadata upload because of offline mode"); - } else { - upload_metadata(ws, world, migrator, &ui).await?; - } } } None => {} @@ -511,23 +532,25 @@ where world_tx_hash, world_block_number, full: false, + models: vec![], contracts: vec![], }; // Once Torii supports indexing arrays, we should declare and register the // ResourceMetadata model. - match register_models(strategy, migrator, &ui, txn_config).await { - Ok(_) => (), + Ok(output) => { + migration_output.models = output.registered_model_names; + } Err(e) => { ui.anyhow(&e); return Ok(migration_output); } - } + }; match deploy_dojo_contracts(strategy, migrator, &ui, txn_config).await { - Ok(res) => { - migration_output.contracts = res; + Ok(output) => { + migration_output.contracts = output; } Err(e) => { ui.anyhow(&e); @@ -540,53 +563,6 @@ where Ok(migration_output) } -async fn upload_metadata( - ws: &Workspace<'_>, - world: &ContractMigration, - migrator: &SingleOwnerAccount, - ui: &Ui, -) -> Result<(), anyhow::Error> -where - P: Provider + Sync + Send + 'static, - S: Signer + Sync + Send + 'static, -{ - let metadata = dojo_metadata_from_workspace(ws); - if let Some(meta) = metadata.as_ref().and_then(|inner| inner.world()) { - match meta.upload().await { - Ok(hash) => { - let mut encoded_uri = cairo_utils::encode_uri(&format!("ipfs://{hash}"))?; - - // Metadata is expecting an array of capacity 3. - if encoded_uri.len() < 3 { - encoded_uri.extend(vec![FieldElement::ZERO; 3 - encoded_uri.len()]); - } - - let world_metadata = - ResourceMetadata { resource_id: FieldElement::ZERO, metadata_uri: encoded_uri }; - - let InvokeTransactionResult { transaction_hash } = - WorldContract::new(world.contract_address, migrator) - .set_metadata(&world_metadata) - .send() - .await - .map_err(|e| { - ui.verbose(format!("{e:?}")); - anyhow!("Failed to set World metadata: {e}") - })?; - - TransactionWaiter::new(transaction_hash, migrator.provider()).await?; - - ui.print_sub(format!("Set Metadata transaction: {:#x}", transaction_hash)); - ui.print_sub(format!("Metadata uri: ipfs://{hash}")); - } - Err(err) => { - ui.print_sub(format!("Failed to set World metadata:\n{err}")); - } - } - } - Ok(()) -} - enum ContractDeploymentOutput { AlreadyDeployed(FieldElement), Output(DeployOutput), @@ -693,7 +669,7 @@ async fn register_models( migrator: &SingleOwnerAccount, ui: &Ui, txn_config: Option, -) -> Result> +) -> Result where P: Provider + Sync + Send + 'static, S: Signer + Sync + Send + 'static, @@ -701,12 +677,17 @@ where let models = &strategy.models; if models.is_empty() { - return Ok(None); + return Ok(RegisterOutput { + transaction_hash: FieldElement::ZERO, + declare_output: vec![], + registered_model_names: vec![], + }); } ui.print_header(format!("# Models ({})", models.len())); let mut declare_output = vec![]; + let mut registered_model_names = vec![]; for c in models.iter() { ui.print(italic_message(&c.diff.name).to_string()); @@ -741,7 +722,10 @@ where let calls = models .iter() - .map(|c| world.register_model_getcall(&c.diff.local.into())) + .map(|c| { + registered_model_names.push(c.diff.name.clone()); + world.register_model_getcall(&c.diff.local.into()) + }) .collect::>(); let InvokeTransactionResult { transaction_hash } = @@ -754,7 +738,7 @@ where ui.print(format!("All models are registered at: {transaction_hash:#x}")); - Ok(Some(RegisterOutput { transaction_hash, declare_output })) + Ok(RegisterOutput { transaction_hash, declare_output, registered_model_names }) } async fn deploy_dojo_contracts( @@ -762,7 +746,7 @@ async fn deploy_dojo_contracts( migrator: &SingleOwnerAccount, ui: &Ui, txn_config: Option, -) -> Result>> +) -> Result>> where P: Provider + Sync + Send + 'static, S: Signer + Sync + Send + 'static, @@ -793,7 +777,7 @@ where ) .await { - Ok(mut output) => { + Ok(output) => { if let Some(ref declare) = output.declare { ui.print_hidden_sub(format!( "Declare transaction: {:#x}", @@ -819,10 +803,11 @@ where )); ui.print_sub(format!("Contract address: {:#x}", output.contract_address)); } - let name = contract.diff.name.clone(); - - output.name = Some(name); - deploy_output.push(Some(output)); + deploy_output.push(Some(ContractMigrationOutput { + name: name.to_string(), + contract_address: output.contract_address, + base_class_hash: output.base_class_hash, + })); } Err(MigrationError::ContractAlreadyDeployed(contract_address)) => { ui.print_sub(format!("Already deployed: {:#x}", contract_address)); @@ -926,3 +911,154 @@ where ui.print(" "); } } + +/// Upload a metadata as a IPFS artifact and then create a resource to register +/// into the Dojo resource registry. +/// +/// # Arguments +/// * `element_name` - fully qualified name of the element linked to the metadata +/// * `resource_id` - the id of the resource to create. +/// * `artifact` - the artifact to upload on IPFS. +/// +/// # Returns +/// A [`ResourceData`] object to register in the Dojo resource register +/// on success. +/// +async fn upload_on_ipfs_and_create_resource( + ui: &Ui, + element_name: String, + resource_id: FieldElement, + artifact: ArtifactMetadata, +) -> Result { + match artifact.upload().await { + Ok(hash) => { + ui.print_sub(format!("{}: ipfs://{}", element_name, hash)); + create_resource_metadata(resource_id, hash) + } + Err(_) => Err(anyhow!("Failed to upload IPFS resource.")), + } +} + +/// Create a resource to register in the Dojo resource registry. +/// +/// # Arguments +/// * `resource_id` - the ID of the resource +/// * `hash` - the IPFS hash +/// +/// # Returns +/// A [`ResourceData`] object to register in the Dojo resource register +/// on success. +fn create_resource_metadata(resource_id: FieldElement, hash: String) -> Result { + let mut encoded_uri = cairo_utils::encode_uri(&format!("ipfs://{hash}"))?; + + // Metadata is expecting an array of capacity 3. + if encoded_uri.len() < 3 { + encoded_uri.extend(vec![FieldElement::ZERO; 3 - encoded_uri.len()]); + } + + Ok(ResourceMetadata { resource_id, metadata_uri: encoded_uri }) +} + +/// Upload metadata of the world/models/contracts as IPFS artifacts and then +/// register them in the Dojo resource registry. +/// +/// # Arguments +/// +/// * `ws` - the workspace +/// * `migrator` - the account used to migrate +/// * `migration_output` - the output after having applied the migration plan. +pub async fn upload_metadata( + ws: &Workspace<'_>, + migrator: &SingleOwnerAccount, + migration_output: MigrationOutput, +) -> Result<()> +where + P: Provider + Sync + Send + 'static, + S: Signer + Sync + Send + 'static, +{ + let ui = ws.config().ui(); + + ui.print(" "); + ui.print_step(6, "🌐", "Uploading metadata..."); + ui.print(" "); + + let dojo_metadata = dojo_metadata_from_workspace(ws); + let mut ipfs = vec![]; + let mut resources = vec![]; + + // world + if migration_output.world_tx_hash.is_some() { + match dojo_metadata.world.upload().await { + Ok(hash) => { + let resource = create_resource_metadata(FieldElement::ZERO, hash.clone())?; + ui.print_sub(format!("world: ipfs://{}", hash)); + resources.push(resource); + } + Err(err) => { + ui.print_sub(format!("Failed to upload World metadata:\n{err}")); + } + } + } + + // models + if !migration_output.models.is_empty() { + for model_name in migration_output.models { + if let Some(m) = dojo_metadata.artifacts.get(&model_name) { + ipfs.push(upload_on_ipfs_and_create_resource( + &ui, + model_name.clone(), + get_selector_from_name(&model_name).expect("ASCII model name"), + m.clone(), + )); + } + } + } + + // contracts + let migrated_contracts = migration_output.contracts.into_iter().flatten().collect::>(); + + if !migrated_contracts.is_empty() { + for contract in migrated_contracts { + if let Some(m) = dojo_metadata.artifacts.get(&contract.name) { + ipfs.push(upload_on_ipfs_and_create_resource( + &ui, + contract.name.clone(), + contract.contract_address, + m.clone(), + )); + } + } + } + + // upload IPFS + resources.extend( + future::try_join_all(ipfs) + .await + .map_err(|_| anyhow!("Unable to upload IPFS artifacts."))?, + ); + + ui.print("> All IPFS artifacts have been successfully uploaded.".to_string()); + + // update the resource registry + let world = WorldContract::new(migration_output.world_address, migrator); + + let calls = resources.iter().map(|r| world.set_metadata_getcall(r)).collect::>(); + + let InvokeTransactionResult { transaction_hash } = + migrator.execute(calls).send().await.map_err(|e| { + ui.verbose(format!("{e:?}")); + anyhow!("Failed to register metadata into the resource registry: {e}") + })?; + + TransactionWaiter::new(transaction_hash, migrator.provider()).await?; + + ui.print(format!( + "> All metadata have been registered in the resource registry (tx hash: \ + {transaction_hash:#x})" + )); + + ui.print(""); + ui.print("\n✨ Done."); + + Ok(()) +} diff --git a/crates/sozo/ops/src/tests/migration.rs b/crates/sozo/ops/src/tests/migration.rs new file mode 100644 index 0000000000..d499b8cb5d --- /dev/null +++ b/crates/sozo/ops/src/tests/migration.rs @@ -0,0 +1,496 @@ +use std::str; + +use camino::Utf8Path; +use dojo_lang::compiler::{BASE_DIR, MANIFESTS_DIR}; +use dojo_test_utils::compiler::build_full_test_config; +use dojo_test_utils::sequencer::{ + get_default_test_starknet_config, SequencerConfig, StarknetConfig, TestSequencer, +}; +use dojo_world::contracts::WorldContractReader; +use dojo_world::manifest::{BaseManifest, DeploymentManifest, WORLD_CONTRACT_NAME}; +use dojo_world::metadata::{ + dojo_metadata_from_workspace, ArtifactMetadata, DojoMetadata, Uri, WorldMetadata, + IPFS_CLIENT_URL, IPFS_PASSWORD, IPFS_USERNAME, +}; +use dojo_world::migration::strategy::prepare_for_migration; +use dojo_world::migration::world::WorldDiff; +use dojo_world::migration::TxConfig; +use futures::TryStreamExt; +use ipfs_api_backend_hyper::{HyperBackend, IpfsApi, IpfsClient, TryFromUri}; +use starknet::accounts::{ExecutionEncoding, SingleOwnerAccount}; +use starknet::core::chain_id; +use starknet::core::types::{BlockId, BlockTag}; +use starknet::core::utils::{get_selector_from_name, parse_cairo_short_string}; +use starknet::macros::felt; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use starknet::signers::{LocalWallet, SigningKey}; +use starknet_crypto::FieldElement; + +use super::setup::{load_config, setup_migration, setup_ws}; +use crate::migration::{execute_strategy, upload_metadata}; +use crate::utils::get_contract_address_from_reader; + +#[tokio::test(flavor = "multi_thread")] +async fn migrate_with_auto_mine() { + let config = load_config(); + let ws = setup_ws(&config); + + let mut migration = setup_migration().unwrap(); + + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + + sequencer.stop().unwrap(); +} + +#[tokio::test(flavor = "multi_thread")] +async fn migrate_with_block_time() { + let config = load_config(); + let ws = setup_ws(&config); + + let mut migration = setup_migration().unwrap(); + + let sequencer = TestSequencer::start( + SequencerConfig { block_time: Some(1000), ..Default::default() }, + get_default_test_starknet_config(), + ) + .await; + + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + sequencer.stop().unwrap(); +} + +#[tokio::test(flavor = "multi_thread")] +async fn migrate_with_small_fee_multiplier_will_fail() { + let config = load_config(); + let ws = setup_ws(&config); + + let mut migration = setup_migration().unwrap(); + + let sequencer = TestSequencer::start( + Default::default(), + StarknetConfig { disable_fee: false, ..Default::default() }, + ) + .await; + + let account = SingleOwnerAccount::new( + JsonRpcClient::new(HttpTransport::new(sequencer.url())), + LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + sequencer.raw_account().private_key, + )), + sequencer.raw_account().account_address, + chain_id::TESTNET, + ExecutionEncoding::New, + ); + + assert!( + execute_strategy( + &ws, + &mut migration, + &account, + Some(TxConfig { fee_estimate_multiplier: Some(0.2f64), wait: false, receipt: false }), + ) + .await + .is_err() + ); + sequencer.stop().unwrap(); +} + +#[test] +fn migrate_world_without_seed_will_fail() { + let profile_name = "dev"; + let base = "../../../examples/spawn-and-move"; + let target_dir = format!("{}/target/dev", base); + let manifest = BaseManifest::load_from_path( + &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(profile_name).join(BASE_DIR), + ) + .unwrap(); + let world = WorldDiff::compute(manifest, None); + let res = prepare_for_migration(None, None, &Utf8Path::new(&target_dir).to_path_buf(), world); + assert!(res.is_err_and(|e| e.to_string().contains("Missing seed for World deployment."))) +} + +#[tokio::test] +async fn migration_from_remote() { + let config = load_config(); + let ws = setup_ws(&config); + + let base = "../../../examples/spawn-and-move"; + let target_dir = format!("{}/target/dev", base); + + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let account = SingleOwnerAccount::new( + JsonRpcClient::new(HttpTransport::new(sequencer.url())), + LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + sequencer.raw_account().private_key, + )), + sequencer.raw_account().account_address, + chain_id::TESTNET, + ExecutionEncoding::New, + ); + + let profile_name = ws.current_profile().unwrap().to_string(); + + let manifest = BaseManifest::load_from_path( + &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(&profile_name).join(BASE_DIR), + ) + .unwrap(); + + let world = WorldDiff::compute(manifest, None); + + let mut migration = prepare_for_migration( + None, + Some(felt!("0x12345")), + &Utf8Path::new(&target_dir).to_path_buf(), + world, + ) + .unwrap(); + + execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + + let local_manifest = BaseManifest::load_from_path( + &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(&profile_name).join(BASE_DIR), + ) + .unwrap(); + + let remote_manifest = DeploymentManifest::load_from_remote( + JsonRpcClient::new(HttpTransport::new(sequencer.url())), + migration.world_address().unwrap(), + ) + .await + .unwrap(); + + sequencer.stop().unwrap(); + + assert_eq!(local_manifest.world.inner.class_hash, remote_manifest.world.inner.class_hash); + assert_eq!(local_manifest.models.len(), remote_manifest.models.len()); +} + +#[tokio::test(flavor = "multi_thread")] +async fn migrate_with_metadata() { + let config = build_full_test_config("../../../examples/spawn-and-move/Scarb.toml", false) + .unwrap_or_else(|c| panic!("Error loading config: {c:?}")); + let ws = setup_ws(&config); + + let mut migration = setup_migration().unwrap(); + + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + let output = execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + + let res = upload_metadata(&ws, &account, output.clone()).await; + assert!(res.is_ok()); + + let provider = sequencer.provider(); + let world_reader = WorldContractReader::new(output.world_address, &provider); + + let client = IpfsClient::from_str(IPFS_CLIENT_URL) + .unwrap_or_else(|_| panic!("Unable to initialize the IPFS Client")) + .with_credentials(IPFS_USERNAME, IPFS_PASSWORD); + + let dojo_metadata = dojo_metadata_from_workspace(&ws); + + // check world metadata + let resource = world_reader.metadata(&FieldElement::ZERO).call().await.unwrap(); + let element_name = WORLD_CONTRACT_NAME.to_string(); + + let full_uri = get_and_check_metadata_uri(&element_name, &resource.metadata_uri); + let resource_bytes = get_ipfs_resource_data(&client, &element_name, &full_uri).await; + + let metadata = resource_bytes_to_world_metadata(&resource_bytes, &element_name); + + assert_eq!(metadata.name, dojo_metadata.world.name, ""); + assert_eq!(metadata.description, dojo_metadata.world.description, ""); + assert_eq!(metadata.cover_uri, dojo_metadata.world.cover_uri, ""); + assert_eq!(metadata.icon_uri, dojo_metadata.world.icon_uri, ""); + assert_eq!(metadata.website, dojo_metadata.world.website, ""); + assert_eq!(metadata.socials, dojo_metadata.world.socials, ""); + + check_artifact_fields( + &client, + &metadata.artifacts, + &dojo_metadata.world.artifacts, + &element_name, + ) + .await; + + // check model metadata + for m in migration.models { + let selector = get_selector_from_name(&m.diff.name).unwrap(); + check_artifact_metadata(&client, &world_reader, selector, &m.diff.name, &dojo_metadata) + .await; + } + + // check contract metadata + for c in migration.contracts { + let contract_address = + get_contract_address_from_reader(&world_reader, c.diff.name.clone()).await.unwrap(); + check_artifact_metadata( + &client, + &world_reader, + contract_address, + &c.diff.name, + &dojo_metadata, + ) + .await; + } +} + +/// Get the hash from a IPFS URI +/// +/// # Arguments +/// +/// * `uri` - a full IPFS URI +/// +/// # Returns +/// +/// A [`String`] containing the hash from the URI. +fn get_hash_from_uri(uri: &str) -> String { + let hash = match uri.strip_prefix("ipfs://") { + Some(s) => s.to_string(), + None => uri.to_owned(), + }; + match hash.strip_suffix('/') { + Some(s) => s.to_string(), + None => hash, + } +} + +/// Check a metadata field which refers to a file. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `uri` - the IPFS URI of the abi field. +/// * `expected_uri` - the URI of the expected file. +/// * `field_name` - the field name. +/// * `element_name` - the fully qualified name of the element linked to this field. +async fn check_file_field( + client: &HyperBackend, + uri: &Uri, + expected_uri: &Uri, + field_name: String, + element_name: &String, +) { + if let Uri::Ipfs(uri) = uri { + let resource_data = get_ipfs_resource_data(client, element_name, uri).await; + assert!( + !resource_data.is_empty(), + "{field_name} IPFS artifact for {} is empty", + element_name + ); + + if let Uri::File(f) = expected_uri { + let file_content = std::fs::read_to_string(f).unwrap(); + let resource_content = std::str::from_utf8(&resource_data).unwrap_or_else(|_| { + panic!( + "Unable to stringify resource data for field '{}' of {}", + field_name, element_name + ) + }); + + assert!( + file_content.eq(&resource_content), + "local '{field_name}' content differs from the one uploaded on IPFS for {}", + element_name + ); + } else { + panic!( + "The field '{field_name}' of {} is not a file (Should never happen !)", + element_name + ); + } + } else { + panic!("The '{field_name}' field is not an IPFS artifact for {}", element_name); + } +} + +/// Convert resource bytes to a ArtifactMetadata object. +/// +/// # Arguments +/// +/// * `raw_data` - resource data as bytes. +/// * `element_name` - name of the element linked to this resource. +/// +/// # Returns +/// +/// A [`ArtifactMetadata`] object. +fn resource_bytes_to_metadata(raw_data: &[u8], element_name: &String) -> ArtifactMetadata { + let data = std::str::from_utf8(raw_data) + .unwrap_or_else(|_| panic!("Unable to stringify raw metadata for {}", element_name)); + serde_json::from_str(data) + .unwrap_or_else(|_| panic!("Unable to deserialize metadata for {}", element_name)) +} + +/// Convert resource bytes to a WorldMetadata object. +/// +/// # Arguments +/// +/// * `raw_data` - resource data as bytes. +/// * `element_name` - name of the element linked to this resource. +/// +/// # Returns +/// +/// A [`WorldMetadata`] object. +fn resource_bytes_to_world_metadata(raw_data: &[u8], element_name: &String) -> WorldMetadata { + let data = std::str::from_utf8(raw_data) + .unwrap_or_else(|_| panic!("Unable to stringify raw metadata for {}", element_name)); + serde_json::from_str(data) + .unwrap_or_else(|_| panic!("Unable to deserialize metadata for {}", element_name)) +} + +/// Read the content of a resource identified by its IPFS URI. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `element_name` - the name of the element (model or contract) linked to this artifact. +/// * `uri` - the IPFS resource URI. +/// +/// # Returns +/// +/// A [`Vec`] containing the resource content as bytes. +async fn get_ipfs_resource_data( + client: &HyperBackend, + element_name: &String, + uri: &String, +) -> Vec { + let hash = get_hash_from_uri(uri); + + let res = client.cat(&hash).map_ok(|chunk| chunk.to_vec()).try_concat().await; + assert!(res.is_ok(), "Unable to read the IPFS artifact {} for {}", uri, element_name); + + res.unwrap() +} + +/// Check the validity of artifact metadata fields. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `metadata` - the metadata to check. +/// * `expected_metadata` - the metadata values coming from local Dojo metadata. +/// * `element_name` - the name of the element linked to this metadata. +async fn check_artifact_fields( + client: &HyperBackend, + metadata: &ArtifactMetadata, + expected_metadata: &ArtifactMetadata, + element_name: &String, +) { + assert!(metadata.abi.is_some(), "'abi' field not set for {}", element_name); + let abi = metadata.abi.as_ref().unwrap(); + let expected_abi = expected_metadata.abi.as_ref().unwrap(); + check_file_field(client, abi, expected_abi, "abi".to_string(), element_name).await; + + assert!(metadata.source.is_some(), "'source' field not set for {}", element_name); + let source = metadata.source.as_ref().unwrap(); + let expected_source = expected_metadata.source.as_ref().unwrap(); + check_file_field(client, source, expected_source, "source".to_string(), element_name).await; +} + +/// Check the validity of a IPFS artifact metadata. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `element_name` - the fully qualified name of the element linked to the artifact. +/// * `uri` - the full metadata URI. +/// * `expected_metadata` - the expected metadata values coming from local Dojo metadata. +async fn check_ipfs_metadata( + client: &HyperBackend, + element_name: &String, + uri: &String, + expected_metadata: &ArtifactMetadata, +) { + let resource_bytes = get_ipfs_resource_data(client, element_name, uri).await; + let metadata = resource_bytes_to_metadata(&resource_bytes, element_name); + + check_artifact_fields(client, &metadata, expected_metadata, element_name).await; +} + +/// Rebuild the full metadata URI from an array of 3 FieldElement. +/// +/// # Arguments +/// +/// * `element_name` - name of the element (model or contract) linked to the metadata URI. +/// * `uri` - uri as an array of 3 FieldElement. +/// +/// # Returns +/// +/// A [`String`] containing the full metadata URI. +fn get_and_check_metadata_uri(element_name: &String, uri: &Vec) -> String { + assert!(uri.len() == 3, "bad metadata URI length for {} ({:#?})", element_name, uri); + + let mut i = 0; + let mut full_uri = "".to_string(); + + while i < uri.len() && uri[i] != FieldElement::ZERO { + let uri_str = parse_cairo_short_string(&uri[i]); + assert!( + uri_str.is_ok(), + "unable to parse the part {} of the metadata URI for {}", + i + 1, + element_name + ); + + full_uri = format!("{}{}", full_uri, uri_str.unwrap()); + + i += 1; + } + + assert!(!full_uri.is_empty(), "metadata URI is empty for {}", element_name); + + assert!( + full_uri.starts_with("ipfs://"), + "metadata URI for {} is not an IPFS artifact", + element_name + ); + + full_uri +} + +/// Check an artifact metadata read from the resource registry against its value +/// in the local Dojo metadata. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `world_reader` - a world reader object. +/// * `resource_id` - the resource ID in the resource registry. +/// * `element_name` - the fully qualified name of the element linked to this metadata. +/// * `dojo_metadata` - local Dojo metadata. +async fn check_artifact_metadata( + client: &HyperBackend, + world_reader: &WorldContractReader

, + resource_id: FieldElement, + element_name: &String, + dojo_metadata: &DojoMetadata, +) { + let resource = world_reader.metadata(&resource_id).call().await.unwrap(); + + let expected_artifact = dojo_metadata.artifacts.get(element_name); + assert!( + expected_artifact.is_some(), + "Unable to find local artifact metadata for {}", + element_name + ); + let expected_artifact = expected_artifact.unwrap(); + + let full_uri = get_and_check_metadata_uri(element_name, &resource.metadata_uri); + check_ipfs_metadata(client, element_name, &full_uri, expected_artifact).await; +} diff --git a/crates/sozo/ops/src/tests/mod.rs b/crates/sozo/ops/src/tests/mod.rs index 25bdba5697..f754ddc5a6 100644 --- a/crates/sozo/ops/src/tests/mod.rs +++ b/crates/sozo/ops/src/tests/mod.rs @@ -1,4 +1,5 @@ pub mod auth; pub mod call; +pub mod migration; pub mod setup; pub mod utils; diff --git a/crates/sozo/ops/src/tests/setup.rs b/crates/sozo/ops/src/tests/setup.rs index 47eb424524..c55be7c1f4 100644 --- a/crates/sozo/ops/src/tests/setup.rs +++ b/crates/sozo/ops/src/tests/setup.rs @@ -3,7 +3,9 @@ use dojo_test_utils::compiler::build_test_config; use dojo_test_utils::migration::prepare_migration; use dojo_test_utils::sequencer::TestSequencer; use dojo_world::contracts::world::WorldContract; +use dojo_world::migration::strategy::MigrationStrategy; use dojo_world::migration::TxConfig; +use scarb::core::{Config, Workspace}; use scarb::ops; use starknet::accounts::SingleOwnerAccount; use starknet::core::types::{BlockId, BlockTag}; @@ -13,8 +15,47 @@ use starknet::signers::LocalWallet; use crate::migration; +/// Load the spawn-and-moves project configuration. +/// +/// # Returns +/// +/// A [`Config`] object loaded from the spawn-and-moves Scarb.toml file. +pub fn load_config() -> Config { + build_test_config("../../../examples/spawn-and-move/Scarb.toml") + .unwrap_or_else(|c| panic!("Error loading config: {c:?}")) +} + +/// Setups the workspace for the spawn-and-moves project. +/// +/// # Arguments +/// * `config` - the project configuration. +/// +/// # Returns +/// +/// A [`Workspace`] loaded from the spawn-and-moves project. +pub fn setup_ws(config: &Config) -> Workspace<'_> { + ops::read_workspace(config.manifest_path(), config) + .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")) +} + +/// Prepare the migration for the spawn-and-moves project. +/// +/// # Returns +/// +/// A [`MigrationStrategy`] to execute to migrate the full spawn-and-moves project. +pub fn setup_migration() -> Result { + let base_dir = "../../../examples/spawn-and-move"; + let target_dir = format!("{}/target/dev", base_dir); + + prepare_migration(base_dir.into(), target_dir.into()) +} + /// Setups the project by migrating the full spawn-and-moves project. /// +/// # Arguments +/// +/// * `sequencer` - The sequencer used for tests. +/// /// # Returns /// /// A [`WorldContract`] initialized with the migrator account, @@ -22,13 +63,10 @@ use crate::migration; pub async fn setup( sequencer: &TestSequencer, ) -> Result, LocalWallet>>> { - let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml")?; - let ws = ops::read_workspace(config.manifest_path(), &config) - .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - let base_dir = "../../../examples/spawn-and-move"; - let target_dir = format!("{}/target/dev", base_dir); + let config = load_config(); + let ws = setup_ws(&config); - let mut migration = prepare_migration(base_dir.into(), target_dir.into())?; + let mut migration = setup_migration()?; let mut account = sequencer.account(); account.set_block_id(BlockId::Tag(BlockTag::Pending)); diff --git a/crates/torii/graphql/src/tests/metadata_test.rs b/crates/torii/graphql/src/tests/metadata_test.rs index 01914abfb7..c834ea1d3c 100644 --- a/crates/torii/graphql/src/tests/metadata_test.rs +++ b/crates/torii/graphql/src/tests/metadata_test.rs @@ -1,6 +1,6 @@ #[cfg(test)] mod tests { - use dojo_world::metadata::Metadata as DojoMetadata; + use dojo_world::metadata::{project_to_world_metadata, ProjectMetadata}; use sqlx::SqlitePool; use starknet_crypto::FieldElement; use torii_core::sql::Sql; @@ -51,7 +51,7 @@ mod tests { let schema = build_schema(&pool).await.unwrap(); let cover_img = "QWxsIHlvdXIgYmFzZSBiZWxvbmcgdG8gdXM="; - let dojo_metadata: DojoMetadata = toml::from_str( + let project_metadata: ProjectMetadata = toml::from_str( r#" [world] name = "example" @@ -62,7 +62,7 @@ mod tests { "#, ) .unwrap(); - let world_metadata = dojo_metadata.world.unwrap(); + let world_metadata = project_to_world_metadata(project_metadata.world); db.set_metadata(&RESOURCE, URI, BLOCK_TIMESTAMP); db.update_metadata(&RESOURCE, URI, &world_metadata, &None, &Some(cover_img.to_string())) .await diff --git a/crates/torii/libp2p/src/server/mod.rs b/crates/torii/libp2p/src/server/mod.rs index 36df964fed..8806c46040 100644 --- a/crates/torii/libp2p/src/server/mod.rs +++ b/crates/torii/libp2p/src/server/mod.rs @@ -630,7 +630,7 @@ async fn validate_message( } else { return Err(Error::InvalidMessageError("Model name is missing".to_string())); }; - let model_selector = get_selector_from_name(&model_name).map_err(|e| { + let model_selector = get_selector_from_name(model_name).map_err(|e| { Error::InvalidMessageError(format!("Failed to get selector from model name: {}", e)) })?; From 995dd8dbd33627ef847d1a374a951e71db72063c Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Sat, 13 Apr 2024 23:58:55 +0800 Subject: [PATCH 16/23] refactor(katana-primitives): remove total supply from fee token genesis config (#1821) * remove total supply from genesis config * add sanity check --- crates/katana/primitives/src/genesis/json.rs | 21 +------ crates/katana/primitives/src/genesis/mod.rs | 59 ++++++++++++-------- 2 files changed, 37 insertions(+), 43 deletions(-) diff --git a/crates/katana/primitives/src/genesis/json.rs b/crates/katana/primitives/src/genesis/json.rs index a50bc4d54a..5b62c72035 100644 --- a/crates/katana/primitives/src/genesis/json.rs +++ b/crates/katana/primitives/src/genesis/json.rs @@ -315,10 +315,9 @@ impl TryFrom for Genesis { }) .collect::>()?; - let mut fee_token = FeeTokenConfig { + let fee_token = FeeTokenConfig { name: value.fee_token.name, symbol: value.fee_token.symbol, - total_supply: U256::ZERO, decimals: value.fee_token.decimals, address: value.fee_token.address.unwrap_or(DEFAULT_FEE_TOKEN_ADDRESS), class_hash: value.fee_token.class.unwrap_or(DEFAULT_LEGACY_ERC20_CONTRACT_CLASS_HASH), @@ -414,11 +413,6 @@ impl TryFrom for Genesis { } }; - // increase the total supply of the fee token if balance is given - if let Some(balance) = account.balance { - fee_token.total_supply += balance; - } - match account.private_key { Some(private_key) => allocations.insert( address, @@ -456,11 +450,6 @@ impl TryFrom for Genesis { } } - // increase the total supply of the fee token if balance is given - if let Some(balance) = contract.balance { - fee_token.total_supply += balance; - } - allocations.insert( address, GenesisAllocation::Contract(GenesisContractAlloc { @@ -803,9 +792,6 @@ mod tests { address: ContractAddress::from(felt!("0x55")), name: String::from("ETHER"), symbol: String::from("ETH"), - total_supply: U256::from_str("0xD3C21BCECCEDA1000000") - .unwrap() - .wrapping_mul(U256::from(5)), decimals: 18, class_hash: felt!("0x8"), storage: Some(HashMap::from([ @@ -939,16 +925,12 @@ mod tests { assert_eq!(actual_genesis.timestamp, expected_genesis.timestamp); assert_eq!(actual_genesis.state_root, expected_genesis.state_root); assert_eq!(actual_genesis.gas_prices, expected_genesis.gas_prices); - assert_eq!(actual_genesis.fee_token.address, expected_genesis.fee_token.address); assert_eq!(actual_genesis.fee_token.name, expected_genesis.fee_token.name); assert_eq!(actual_genesis.fee_token.symbol, expected_genesis.fee_token.symbol); assert_eq!(actual_genesis.fee_token.decimals, expected_genesis.fee_token.decimals); - assert_eq!(actual_genesis.fee_token.total_supply, expected_genesis.fee_token.total_supply); assert_eq!(actual_genesis.fee_token.class_hash, expected_genesis.fee_token.class_hash); - assert_eq!(actual_genesis.universal_deployer, expected_genesis.universal_deployer); - assert_eq!(actual_genesis.allocations.len(), expected_genesis.allocations.len()); for alloc in actual_genesis.allocations { @@ -1030,7 +1012,6 @@ mod tests { address: DEFAULT_FEE_TOKEN_ADDRESS, name: String::from("ETHER"), symbol: String::from("ETH"), - total_supply: U256::from_str("0xD3C21BCECCEDA1000000").unwrap(), decimals: 18, class_hash: DEFAULT_LEGACY_ERC20_CONTRACT_CLASS_HASH, storage: None, diff --git a/crates/katana/primitives/src/genesis/mod.rs b/crates/katana/primitives/src/genesis/mod.rs index c1574f660d..5774e3407c 100644 --- a/crates/katana/primitives/src/genesis/mod.rs +++ b/crates/katana/primitives/src/genesis/mod.rs @@ -42,8 +42,6 @@ pub struct FeeTokenConfig { pub address: ContractAddress, /// The decimals of the fee token. pub decimals: u8, - /// The total supply of the fee token. - pub total_supply: U256, /// The class hash of the fee token contract. #[serde_as(as = "UfeHex")] pub class_hash: ClassHash, @@ -186,24 +184,13 @@ impl Genesis { states.state_updates.storage_updates.insert(address, storage); } - // TODO: put this in a separate function - // insert fee token related data let mut fee_token_storage = self.fee_token.storage.clone().unwrap_or_default(); - - let name: FieldElement = cairo_short_string_to_felt(&self.fee_token.name).unwrap(); - let symbol: FieldElement = cairo_short_string_to_felt(&self.fee_token.symbol).unwrap(); - let decimals: FieldElement = self.fee_token.decimals.into(); - let (total_supply_low, total_supply_high) = split_u256(self.fee_token.total_supply); - - fee_token_storage.insert(ERC20_NAME_STORAGE_SLOT, name); - fee_token_storage.insert(ERC20_SYMBOL_STORAGE_SLOT, symbol); - fee_token_storage.insert(ERC20_DECIMAL_STORAGE_SLOT, decimals); - fee_token_storage.insert(ERC20_TOTAL_SUPPLY_STORAGE_SLOT, total_supply_low); - fee_token_storage.insert(ERC20_TOTAL_SUPPLY_STORAGE_SLOT + 1u8.into(), total_supply_high); + let mut fee_token_total_supply = U256::ZERO; for (address, alloc) in &self.allocations { if let Some(balance) = alloc.balance() { + fee_token_total_supply += balance; let (low, high) = split_u256(balance); // the base storage address for a standard ERC20 contract balance @@ -219,6 +206,19 @@ impl Genesis { } } + // TODO: put this in a separate function + + let name: FieldElement = cairo_short_string_to_felt(&self.fee_token.name).unwrap(); + let symbol: FieldElement = cairo_short_string_to_felt(&self.fee_token.symbol).unwrap(); + let decimals: FieldElement = self.fee_token.decimals.into(); + let (total_supply_low, total_supply_high) = split_u256(fee_token_total_supply); + + fee_token_storage.insert(ERC20_NAME_STORAGE_SLOT, name); + fee_token_storage.insert(ERC20_SYMBOL_STORAGE_SLOT, symbol); + fee_token_storage.insert(ERC20_DECIMAL_STORAGE_SLOT, decimals); + fee_token_storage.insert(ERC20_TOTAL_SUPPLY_STORAGE_SLOT, total_supply_low); + fee_token_storage.insert(ERC20_TOTAL_SUPPLY_STORAGE_SLOT + 1u8.into(), total_supply_high); + states .state_updates .contract_updates @@ -246,7 +246,6 @@ impl Default for Genesis { decimals: 18, name: "Ether".into(), symbol: "ETH".into(), - total_supply: U256::ZERO, address: DEFAULT_FEE_TOKEN_ADDRESS, class_hash: DEFAULT_LEGACY_ERC20_CONTRACT_CLASS_HASH, storage: None, @@ -352,7 +351,6 @@ mod tests { address: DEFAULT_FEE_TOKEN_ADDRESS, name: String::from("ETHER"), symbol: String::from("ETH"), - total_supply: U256::from_str("0x1a784379d99db42000000").unwrap(), decimals: 18, class_hash: DEFAULT_LEGACY_ERC20_CONTRACT_CLASS_HASH, storage: Some(HashMap::from([ @@ -420,12 +418,16 @@ mod tests { universal_deployer: Some(ud.clone()), }; - // setup expected values + // setup expected storage values let name: FieldElement = cairo_short_string_to_felt(&fee_token.name).unwrap(); let symbol: FieldElement = cairo_short_string_to_felt(&fee_token.symbol).unwrap(); let decimals: FieldElement = fee_token.decimals.into(); - let (total_supply_low, total_supply_high) = split_u256(fee_token.total_supply); + + // there are only two allocations so the total token supply is + // 0xD3C21BCECCEDA1000000 * 2 = 0x1a784379d99db42000000 + let (total_supply_low, total_supply_high) = + split_u256(U256::from_str("0x1a784379d99db42000000").unwrap()); let mut fee_token_storage = HashMap::new(); fee_token_storage.insert(ERC20_NAME_STORAGE_SLOT, name); @@ -649,7 +651,7 @@ mod tests { assert_eq!(fee_token_storage.get(&felt!("0x111")), Some(&felt!("0x1"))); assert_eq!(fee_token_storage.get(&felt!("0x222")), Some(&felt!("0x2"))); - let mut actual_total_supply = U256::ZERO; + let mut allocs_total_supply = U256::ZERO; // check for balance for (address, alloc) in &allocations { @@ -667,13 +669,24 @@ mod tests { assert_eq!(fee_token_storage.get(&low_bal_storage_var), Some(&low)); assert_eq!(fee_token_storage.get(&high_bal_storage_var), Some(&high)); - actual_total_supply += balance; + allocs_total_supply += balance; } } + // Check that the total supply is the sum of all balances in the allocations. + // Technically this is not necessary bcs we already checked the total supply in + // the fee token storage but it's a good sanity check. + + let (actual_total_supply_low, actual_total_supply_high) = split_u256(allocs_total_supply); assert_eq!( - actual_total_supply, fee_token.total_supply, - "total supply should match the total balances of all allocations" + fee_token_storage.get(&ERC20_TOTAL_SUPPLY_STORAGE_SLOT), + Some(&actual_total_supply_low), + "total supply must be calculated from allocations balances correctly" + ); + assert_eq!( + fee_token_storage.get(&(ERC20_TOTAL_SUPPLY_STORAGE_SLOT + 1u8.into())), + Some(&actual_total_supply_high), + "total supply must be calculated from allocations balances correctly" ); let udc_storage = From 371c121fea32430ded6429eed42b1c67a76b8336 Mon Sep 17 00:00:00 2001 From: lambda-0x <0xlambda@protonmail.com> Date: Sun, 14 Apr 2024 19:22:49 +0530 Subject: [PATCH 17/23] refactor: clean up bin/sozo (#1828) * refactor: move verify_cairo_version_compatibility to utils module * refactor: move around more things to appropriate places * refactor: make bin/sozo more consistent * fix: paths in test --- bin/sozo/src/args.rs | 61 +----- bin/sozo/src/commands/auth.rs | 106 +++++----- bin/sozo/src/commands/build.rs | 2 +- bin/sozo/src/commands/clean.rs | 30 +-- bin/sozo/src/commands/completions.rs | 2 +- bin/sozo/src/commands/dev.rs | 202 +++++++++---------- bin/sozo/src/commands/events.rs | 4 +- bin/sozo/src/commands/init.rs | 2 +- bin/sozo/src/commands/migrate.rs | 118 +++++------ bin/sozo/src/commands/mod.rs | 48 ++++- bin/sozo/src/commands/model.rs | 12 +- bin/sozo/src/commands/options/account.rs | 4 +- bin/sozo/src/commands/options/transaction.rs | 2 +- bin/sozo/src/commands/test.rs | 2 +- bin/sozo/src/main.rs | 34 +--- bin/sozo/src/utils.rs | 44 +++- crates/benches/src/deployer.rs | 3 +- 17 files changed, 341 insertions(+), 335 deletions(-) diff --git a/bin/sozo/src/args.rs b/bin/sozo/src/args.rs index 4dbc54003d..142d1ee90f 100644 --- a/bin/sozo/src/args.rs +++ b/bin/sozo/src/args.rs @@ -1,38 +1,14 @@ use anyhow::Result; use camino::Utf8PathBuf; -use clap::{Parser, Subcommand}; +use clap::Parser; use scarb::compiler::Profile; use scarb_ui::Verbosity; use smol_str::SmolStr; use tracing::level_filters::LevelFilter; use tracing_log::AsTrace; -use crate::commands::auth::AuthArgs; -use crate::commands::build::BuildArgs; -use crate::commands::call::CallArgs; -use crate::commands::clean::CleanArgs; -use crate::commands::completions::CompletionsArgs; -use crate::commands::dev::DevArgs; -use crate::commands::events::EventsArgs; -use crate::commands::execute::ExecuteArgs; -use crate::commands::init::InitArgs; -use crate::commands::migrate::MigrateArgs; -use crate::commands::model::ModelArgs; -use crate::commands::register::RegisterArgs; -use crate::commands::test::TestArgs; - -fn generate_version() -> String { - const DOJO_VERSION: &str = env!("CARGO_PKG_VERSION"); - let scarb_version = scarb::version::get().version; - let scarb_sierra_version = scarb::version::get().sierra.version; - let scarb_cairo_version = scarb::version::get().cairo.version; - - let version_string = format!( - "{}\nscarb: {}\ncairo: {}\nsierra: {}", - DOJO_VERSION, scarb_version, scarb_cairo_version, scarb_sierra_version, - ); - version_string -} +use crate::commands::Commands; +use crate::utils::generate_version; #[derive(Parser)] #[command(author, version=generate_version(), about, long_about = None)] @@ -63,37 +39,6 @@ pub struct SozoArgs { pub command: Commands, } -#[derive(Subcommand)] -pub enum Commands { - #[command(about = "Build the world, generating the necessary artifacts for deployment")] - Build(BuildArgs), - #[command(about = "Initialize a new project")] - Init(InitArgs), - #[command(about = "Remove generated artifacts, manifests and abis")] - Clean(CleanArgs), - #[command(about = "Run a migration, declaring and deploying contracts as necessary to \ - update the world")] - Migrate(Box), - #[command(about = "Developer mode: watcher for building and migration")] - Dev(DevArgs), - #[command(about = "Test the project's smart contracts")] - Test(TestArgs), - #[command(about = "Execute a world's system")] - Execute(ExecuteArgs), - #[command(about = "Call a world's system")] - Call(CallArgs), - #[command(about = "Interact with a worlds models")] - Model(ModelArgs), - #[command(about = "Register new models")] - Register(RegisterArgs), - #[command(about = "Queries world events")] - Events(EventsArgs), - #[command(about = "Manage world authorization")] - Auth(AuthArgs), - #[command(about = "Generate shell completion file for specified shell")] - Completions(CompletionsArgs), -} - impl SozoArgs { pub fn ui_verbosity(&self) -> Verbosity { let filter = self.verbose.log_level_filter().as_trace(); diff --git a/bin/sozo/src/commands/auth.rs b/bin/sozo/src/commands/auth.rs index cca991be77..434e8c0313 100644 --- a/bin/sozo/src/commands/auth.rs +++ b/bin/sozo/src/commands/auth.rs @@ -16,6 +16,59 @@ pub struct AuthArgs { pub command: AuthCommand, } +#[derive(Debug, Subcommand)] +pub enum AuthCommand { + #[command(about = "Grant an auth role.")] + Grant { + #[command(subcommand)] + kind: AuthKind, + + #[command(flatten)] + world: WorldOptions, + + #[command(flatten)] + starknet: StarknetOptions, + + #[command(flatten)] + account: AccountOptions, + + #[command(flatten)] + transaction: TransactionOptions, + }, + #[command(about = "Revoke an auth role.")] + Revoke { + #[command(subcommand)] + kind: AuthKind, + + #[command(flatten)] + world: WorldOptions, + + #[command(flatten)] + starknet: StarknetOptions, + + #[command(flatten)] + account: AccountOptions, + + #[command(flatten)] + transaction: TransactionOptions, + }, +} + +impl AuthArgs { + pub fn run(self, config: &Config) -> Result<()> { + let env_metadata = utils::load_metadata_from_config(config)?; + + match self.command { + AuthCommand::Grant { kind, world, starknet, account, transaction } => config + .tokio_handle() + .block_on(grant(world, account, starknet, env_metadata, kind, transaction)), + AuthCommand::Revoke { kind, world, starknet, account, transaction } => config + .tokio_handle() + .block_on(revoke(world, account, starknet, env_metadata, kind, transaction)), + } + } +} + #[derive(Debug, Subcommand)] pub enum AuthKind { #[command(about = "Grant a contract permission to write to a model.")] @@ -82,59 +135,6 @@ pub async fn revoke( } } -#[derive(Debug, Subcommand)] -pub enum AuthCommand { - #[command(about = "Grant an auth role.")] - Grant { - #[command(subcommand)] - kind: AuthKind, - - #[command(flatten)] - world: WorldOptions, - - #[command(flatten)] - starknet: StarknetOptions, - - #[command(flatten)] - account: AccountOptions, - - #[command(flatten)] - transaction: TransactionOptions, - }, - #[command(about = "Revoke an auth role.")] - Revoke { - #[command(subcommand)] - kind: AuthKind, - - #[command(flatten)] - world: WorldOptions, - - #[command(flatten)] - starknet: StarknetOptions, - - #[command(flatten)] - account: AccountOptions, - - #[command(flatten)] - transaction: TransactionOptions, - }, -} - -impl AuthArgs { - pub fn run(self, config: &Config) -> Result<()> { - let env_metadata = utils::load_metadata_from_config(config)?; - - match self.command { - AuthCommand::Grant { kind, world, starknet, account, transaction } => config - .tokio_handle() - .block_on(grant(world, account, starknet, env_metadata, kind, transaction)), - AuthCommand::Revoke { kind, world, starknet, account, transaction } => config - .tokio_handle() - .block_on(revoke(world, account, starknet, env_metadata, kind, transaction)), - } - } -} - #[cfg(test)] mod tests { use std::str::FromStr; diff --git a/bin/sozo/src/commands/build.rs b/bin/sozo/src/commands/build.rs index bb45c1f6c4..f3347fd449 100644 --- a/bin/sozo/src/commands/build.rs +++ b/bin/sozo/src/commands/build.rs @@ -5,7 +5,7 @@ use dojo_lang::scarb_internal::compile_workspace; use scarb::core::{Config, TargetKind}; use scarb::ops::CompileOpts; -#[derive(Args, Debug)] +#[derive(Debug, Args)] pub struct BuildArgs { #[arg(long)] #[arg(help = "Generate Typescript bindings.")] diff --git a/bin/sozo/src/commands/clean.rs b/bin/sozo/src/commands/clean.rs index 42129aed84..6ed05e1837 100644 --- a/bin/sozo/src/commands/clean.rs +++ b/bin/sozo/src/commands/clean.rs @@ -20,21 +20,6 @@ pub struct CleanArgs { } impl CleanArgs { - pub fn clean_manifests_abis(&self, root_dir: &Utf8PathBuf, profile_name: &str) -> Result<()> { - let dirs = vec![ - root_dir.join(MANIFESTS_DIR).join(profile_name).join(BASE_DIR), - root_dir.join(MANIFESTS_DIR).join(profile_name).join(ABIS_DIR).join(BASE_DIR), - ]; - - for d in dirs { - if d.exists() { - fs::remove_dir_all(d)?; - } - } - - Ok(()) - } - pub fn run(self, config: &Config) -> Result<()> { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; @@ -55,4 +40,19 @@ impl CleanArgs { Ok(()) } + + pub fn clean_manifests_abis(&self, root_dir: &Utf8PathBuf, profile_name: &str) -> Result<()> { + let dirs = vec![ + root_dir.join(MANIFESTS_DIR).join(profile_name).join(BASE_DIR), + root_dir.join(MANIFESTS_DIR).join(profile_name).join(ABIS_DIR).join(BASE_DIR), + ]; + + for d in dirs { + if d.exists() { + fs::remove_dir_all(d)?; + } + } + + Ok(()) + } } diff --git a/bin/sozo/src/commands/completions.rs b/bin/sozo/src/commands/completions.rs index 65c99ac2bb..1f71098822 100644 --- a/bin/sozo/src/commands/completions.rs +++ b/bin/sozo/src/commands/completions.rs @@ -6,7 +6,7 @@ use clap_complete::{generate, Shell}; use crate::args::SozoArgs; -#[derive(Args, Debug)] +#[derive(Debug, Args)] pub struct CompletionsArgs { shell: Shell, } diff --git a/bin/sozo/src/commands/dev.rs b/bin/sozo/src/commands/dev.rs index f92eda68d4..47fd5f5ba5 100644 --- a/bin/sozo/src/commands/dev.rs +++ b/bin/sozo/src/commands/dev.rs @@ -31,7 +31,7 @@ use super::options::world::WorldOptions; pub(crate) const LOG_TARGET: &str = "sozo::cli::commands::dev"; -#[derive(Args)] +#[derive(Debug, Args)] pub struct DevArgs { #[arg(long)] #[arg(help = "Name of the World.")] @@ -49,6 +49,106 @@ pub struct DevArgs { pub account: AccountOptions, } +impl DevArgs { + pub fn run(self, config: &Config) -> Result<()> { + let env_metadata = if config.manifest_path().exists() { + let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; + + dojo_metadata_from_workspace(&ws).env().cloned() + } else { + None + }; + + let mut context = load_context(config)?; + let (tx, rx) = channel(); + let mut debouncer = new_debouncer(Duration::from_secs(1), None, tx)?; + + debouncer.watcher().watch( + config.manifest_path().parent().unwrap().as_std_path(), + RecursiveMode::Recursive, + )?; + let name = self.name.clone(); + let mut previous_manifest: Option = Option::None; + let result = build(&mut context); + + let Some((mut world_address, account, _, _)) = context + .ws + .config() + .tokio_handle() + .block_on(setup_env( + &context.ws, + self.account, + self.starknet, + self.world, + name.as_ref(), + env_metadata.as_ref(), + )) + .ok() + else { + return Err(anyhow!("Failed to setup environment")); + }; + + match context.ws.config().tokio_handle().block_on(migrate( + world_address, + &account, + name.clone(), + &context.ws, + previous_manifest.clone(), + )) { + Ok((manifest, address)) => { + previous_manifest = Some(manifest); + world_address = address; + } + Err(error) => { + error!( + target: LOG_TARGET, + error = ?error, + address = ?world_address, + "Migrating world." + ); + } + } + loop { + let action = match rx.recv() { + Ok(Ok(events)) => events + .iter() + .map(|event| process_event(event, &mut context)) + .last() + .unwrap_or(DevAction::None), + Ok(Err(_)) => DevAction::None, + Err(error) => { + error!(target: LOG_TARGET, error = ?error, "Receiving dev action."); + break; + } + }; + + if action != DevAction::None && build(&mut context).is_ok() { + match context.ws.config().tokio_handle().block_on(migrate( + world_address, + &account, + name.clone(), + &context.ws, + previous_manifest.clone(), + )) { + Ok((manifest, address)) => { + previous_manifest = Some(manifest); + world_address = address; + } + Err(error) => { + error!( + target: LOG_TARGET, + error = ?error, + address = ?world_address, + "Migrating world.", + ); + } + } + } + } + result + } +} + #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] enum DevAction { None, @@ -193,103 +293,3 @@ fn handle_reload_action(context: &mut DevContext<'_>) { let new_context = load_context(config).expect("Failed to load context"); let _ = mem::replace(context, new_context); } - -impl DevArgs { - pub fn run(self, config: &Config) -> Result<()> { - let env_metadata = if config.manifest_path().exists() { - let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - - dojo_metadata_from_workspace(&ws).env().cloned() - } else { - None - }; - - let mut context = load_context(config)?; - let (tx, rx) = channel(); - let mut debouncer = new_debouncer(Duration::from_secs(1), None, tx)?; - - debouncer.watcher().watch( - config.manifest_path().parent().unwrap().as_std_path(), - RecursiveMode::Recursive, - )?; - let name = self.name.clone(); - let mut previous_manifest: Option = Option::None; - let result = build(&mut context); - - let Some((mut world_address, account, _, _)) = context - .ws - .config() - .tokio_handle() - .block_on(setup_env( - &context.ws, - self.account, - self.starknet, - self.world, - name.as_ref(), - env_metadata.as_ref(), - )) - .ok() - else { - return Err(anyhow!("Failed to setup environment")); - }; - - match context.ws.config().tokio_handle().block_on(migrate( - world_address, - &account, - name.clone(), - &context.ws, - previous_manifest.clone(), - )) { - Ok((manifest, address)) => { - previous_manifest = Some(manifest); - world_address = address; - } - Err(error) => { - error!( - target: LOG_TARGET, - error = ?error, - address = ?world_address, - "Migrating world." - ); - } - } - loop { - let action = match rx.recv() { - Ok(Ok(events)) => events - .iter() - .map(|event| process_event(event, &mut context)) - .last() - .unwrap_or(DevAction::None), - Ok(Err(_)) => DevAction::None, - Err(error) => { - error!(target: LOG_TARGET, error = ?error, "Receiving dev action."); - break; - } - }; - - if action != DevAction::None && build(&mut context).is_ok() { - match context.ws.config().tokio_handle().block_on(migrate( - world_address, - &account, - name.clone(), - &context.ws, - previous_manifest.clone(), - )) { - Ok((manifest, address)) => { - previous_manifest = Some(manifest); - world_address = address; - } - Err(error) => { - error!( - target: LOG_TARGET, - error = ?error, - address = ?world_address, - "Migrating world.", - ); - } - } - } - } - result - } -} diff --git a/bin/sozo/src/commands/events.rs b/bin/sozo/src/commands/events.rs index cd1aac2e13..d08a3a74d3 100644 --- a/bin/sozo/src/commands/events.rs +++ b/bin/sozo/src/commands/events.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use clap::Parser; +use clap::Args; use scarb::core::Config; use sozo_ops::events; @@ -7,7 +7,7 @@ use super::options::starknet::StarknetOptions; use super::options::world::WorldOptions; use crate::utils; -#[derive(Parser, Debug)] +#[derive(Debug, Args)] pub struct EventsArgs { #[arg(help = "List of specific events to be filtered")] #[arg(value_delimiter = ',')] diff --git a/bin/sozo/src/commands/init.rs b/bin/sozo/src/commands/init.rs index 7fffbbb2e9..f37bc70036 100644 --- a/bin/sozo/src/commands/init.rs +++ b/bin/sozo/src/commands/init.rs @@ -7,7 +7,7 @@ use anyhow::{ensure, Result}; use clap::Args; use scarb::core::Config; -#[derive(Args, Debug)] +#[derive(Debug, Args)] pub struct InitArgs { #[arg(help = "Target directory")] path: Option, diff --git a/bin/sozo/src/commands/migrate.rs b/bin/sozo/src/commands/migrate.rs index fc56cdee0e..3046c26b60 100644 --- a/bin/sozo/src/commands/migrate.rs +++ b/bin/sozo/src/commands/migrate.rs @@ -65,65 +65,6 @@ pub enum MigrateCommand { }, } -pub async fn setup_env<'a>( - ws: &'a Workspace<'a>, - account: AccountOptions, - starknet: StarknetOptions, - world: WorldOptions, - name: Option<&'a String>, - env: Option<&'a Environment>, -) -> Result<( - Option, - SingleOwnerAccount, LocalWallet>, - String, - String, -)> { - let ui = ws.config().ui(); - - let world_address = world.address(env).ok(); - - let (account, chain_id, rpc_url) = { - let provider = starknet.provider(env)?; - - let spec_version = provider.spec_version().await?; - - if spec_version != RPC_SPEC_VERSION { - return Err(anyhow!( - "Unsupported Starknet RPC version: {}, expected {}.", - spec_version, - RPC_SPEC_VERSION - )); - } - - let rpc_url = starknet.url(env)?; - - let chain_id = provider.chain_id().await?; - let chain_id = parse_cairo_short_string(&chain_id) - .with_context(|| "Cannot parse chain_id as string")?; - - let mut account = account.account(provider, env).await?; - account.set_block_id(BlockId::Tag(BlockTag::Pending)); - - let address = account.address(); - - ui.print(format!("\nMigration account: {address:#x}")); - if let Some(name) = name { - ui.print(format!("\nWorld name: {name}\n")); - } - - match account.provider().get_class_hash_at(BlockId::Tag(BlockTag::Pending), address).await { - Ok(_) => Ok((account, chain_id, rpc_url)), - Err(ProviderError::StarknetError(StarknetError::ContractNotFound)) => { - Err(anyhow!("Account with address {:#x} doesn't exist.", account.address())) - } - Err(e) => Err(e.into()), - } - } - .with_context(|| "Problem initializing account for migration.")?; - - Ok((world_address, account, chain_id, rpc_url.to_string())) -} - impl MigrateArgs { pub fn run(self, config: &Config) -> Result<()> { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; @@ -207,3 +148,62 @@ impl MigrateArgs { } } } + +pub async fn setup_env<'a>( + ws: &'a Workspace<'a>, + account: AccountOptions, + starknet: StarknetOptions, + world: WorldOptions, + name: Option<&'a String>, + env: Option<&'a Environment>, +) -> Result<( + Option, + SingleOwnerAccount, LocalWallet>, + String, + String, +)> { + let ui = ws.config().ui(); + + let world_address = world.address(env).ok(); + + let (account, chain_id, rpc_url) = { + let provider = starknet.provider(env)?; + + let spec_version = provider.spec_version().await?; + + if spec_version != RPC_SPEC_VERSION { + return Err(anyhow!( + "Unsupported Starknet RPC version: {}, expected {}.", + spec_version, + RPC_SPEC_VERSION + )); + } + + let rpc_url = starknet.url(env)?; + + let chain_id = provider.chain_id().await?; + let chain_id = parse_cairo_short_string(&chain_id) + .with_context(|| "Cannot parse chain_id as string")?; + + let mut account = account.account(provider, env).await?; + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + let address = account.address(); + + ui.print(format!("\nMigration account: {address:#x}")); + if let Some(name) = name { + ui.print(format!("\nWorld name: {name}\n")); + } + + match account.provider().get_class_hash_at(BlockId::Tag(BlockTag::Pending), address).await { + Ok(_) => Ok((account, chain_id, rpc_url)), + Err(ProviderError::StarknetError(StarknetError::ContractNotFound)) => { + Err(anyhow!("Account with address {:#x} doesn't exist.", account.address())) + } + Err(e) => Err(e.into()), + } + } + .with_context(|| "Problem initializing account for migration.")?; + + Ok((world_address, account, chain_id, rpc_url.to_string())) +} diff --git a/bin/sozo/src/commands/mod.rs b/bin/sozo/src/commands/mod.rs index d0f563bce4..b7b2d53b64 100644 --- a/bin/sozo/src/commands/mod.rs +++ b/bin/sozo/src/commands/mod.rs @@ -1,8 +1,7 @@ use anyhow::Result; +use clap::{command, Subcommand}; use scarb::core::Config; -use crate::args::Commands; - pub(crate) mod auth; pub(crate) mod build; pub(crate) mod call; @@ -18,6 +17,51 @@ pub(crate) mod options; pub(crate) mod register; pub(crate) mod test; +use auth::AuthArgs; +use build::BuildArgs; +use call::CallArgs; +use clean::CleanArgs; +use completions::CompletionsArgs; +use dev::DevArgs; +use events::EventsArgs; +use execute::ExecuteArgs; +use init::InitArgs; +use migrate::MigrateArgs; +use model::ModelArgs; +use register::RegisterArgs; +use test::TestArgs; + +#[derive(Subcommand)] +pub enum Commands { + #[command(about = "Build the world, generating the necessary artifacts for deployment")] + Build(BuildArgs), + #[command(about = "Initialize a new project")] + Init(InitArgs), + #[command(about = "Remove generated artifacts, manifests and abis")] + Clean(CleanArgs), + #[command(about = "Run a migration, declaring and deploying contracts as necessary to \ + update the world")] + Migrate(Box), + #[command(about = "Developer mode: watcher for building and migration")] + Dev(DevArgs), + #[command(about = "Test the project's smart contracts")] + Test(TestArgs), + #[command(about = "Execute a world's system")] + Execute(ExecuteArgs), + #[command(about = "Call a world's system")] + Call(CallArgs), + #[command(about = "Interact with a worlds models")] + Model(ModelArgs), + #[command(about = "Register new models")] + Register(RegisterArgs), + #[command(about = "Queries world events")] + Events(EventsArgs), + #[command(about = "Manage world authorization")] + Auth(AuthArgs), + #[command(about = "Generate shell completion file for specified shell")] + Completions(CompletionsArgs), +} + pub fn run(command: Commands, config: &Config) -> Result<()> { match command { Commands::Init(args) => args.run(config), diff --git a/bin/sozo/src/commands/model.rs b/bin/sozo/src/commands/model.rs index 1c69bbe3fe..e5e0aae244 100644 --- a/bin/sozo/src/commands/model.rs +++ b/bin/sozo/src/commands/model.rs @@ -11,11 +11,11 @@ use crate::utils; #[derive(Debug, Args)] pub struct ModelArgs { #[command(subcommand)] - command: ModelCommands, + command: ModelCommand, } #[derive(Debug, Subcommand)] -pub enum ModelCommands { +pub enum ModelCommand { #[command(about = "Retrieve the class hash of a model")] ClassHash { #[arg(help = "The name of the model")] @@ -80,22 +80,22 @@ impl ModelArgs { config.tokio_handle().block_on(async { match self.command { - ModelCommands::ClassHash { name, starknet, world } => { + ModelCommand::ClassHash { name, starknet, world } => { let world_address = world.address(env_metadata.as_ref()).unwrap(); let provider = starknet.provider(env_metadata.as_ref()).unwrap(); model::model_class_hash(name, world_address, provider).await } - ModelCommands::ContractAddress { name, starknet, world } => { + ModelCommand::ContractAddress { name, starknet, world } => { let world_address = world.address(env_metadata.as_ref()).unwrap(); let provider = starknet.provider(env_metadata.as_ref()).unwrap(); model::model_contract_address(name, world_address, provider).await } - ModelCommands::Schema { name, to_json, starknet, world } => { + ModelCommand::Schema { name, to_json, starknet, world } => { let world_address = world.address(env_metadata.as_ref()).unwrap(); let provider = starknet.provider(env_metadata.as_ref()).unwrap(); model::model_schema(name, world_address, provider, to_json).await } - ModelCommands::Get { name, keys, starknet, world } => { + ModelCommand::Get { name, keys, starknet, world } => { let world_address = world.address(env_metadata.as_ref()).unwrap(); let provider = starknet.provider(env_metadata.as_ref()).unwrap(); model::model_get(name, keys, world_address, provider).await diff --git a/bin/sozo/src/commands/options/account.rs b/bin/sozo/src/commands/options/account.rs index 1538e8f106..bf05c5ec11 100644 --- a/bin/sozo/src/commands/options/account.rs +++ b/bin/sozo/src/commands/options/account.rs @@ -13,13 +13,13 @@ use super::{ DOJO_PRIVATE_KEY_ENV_VAR, }; -#[derive(Debug, Args)] -#[command(next_help_heading = "Account options")] // INVARIANT: // - For commandline: we can either specify `private_key` or `keystore_path` along with // `keystore_password`. This is enforced by Clap. // - For `Scarb.toml`: if both private_key and keystore are specified in `Scarb.toml` private_key // will take priority +#[derive(Debug, Args)] +#[command(next_help_heading = "Account options")] pub struct AccountOptions { #[arg(long, env = DOJO_ACCOUNT_ADDRESS_ENV_VAR)] pub account_address: Option, diff --git a/bin/sozo/src/commands/options/transaction.rs b/bin/sozo/src/commands/options/transaction.rs index 7901c6ca6a..a54076cd44 100644 --- a/bin/sozo/src/commands/options/transaction.rs +++ b/bin/sozo/src/commands/options/transaction.rs @@ -1,7 +1,7 @@ use clap::Args; use dojo_world::migration::TxConfig; -#[derive(Debug, Args, Clone)] +#[derive(Debug, Args)] #[command(next_help_heading = "Transaction options")] pub struct TransactionOptions { #[arg(long)] diff --git a/bin/sozo/src/commands/test.rs b/bin/sozo/src/commands/test.rs index d64dde856d..9ca645c72c 100644 --- a/bin/sozo/src/commands/test.rs +++ b/bin/sozo/src/commands/test.rs @@ -21,7 +21,7 @@ use tracing::trace; pub(crate) const LOG_TARGET: &str = "sozo::cli::commands::test"; /// Execute all unit tests of a local package. -#[derive(Args, Clone)] +#[derive(Debug, Args)] pub struct TestArgs { /// The filter for the tests, running only tests containing the filter string. #[arg(short, long, default_value_t = String::default())] diff --git a/bin/sozo/src/main.rs b/bin/sozo/src/main.rs index 060a9d0cf9..18da01da73 100644 --- a/bin/sozo/src/main.rs +++ b/bin/sozo/src/main.rs @@ -1,17 +1,16 @@ use std::env; use std::process::exit; -use std::str::FromStr; use anyhow::Result; -use args::{Commands, SozoArgs}; -use camino::Utf8PathBuf; +use args::SozoArgs; use clap::Parser; use dojo_lang::compiler::DojoCompiler; use dojo_lang::plugin::CairoPluginRepository; use scarb::compiler::CompilerRepository; -use scarb::core::{Config, TomlManifest}; +use scarb::core::Config; use scarb_ui::{OutputFormat, Ui}; -use semver::Version; + +use crate::commands::Commands; mod args; mod commands; @@ -41,7 +40,7 @@ fn cli_main(args: SozoArgs) -> Result<()> { let manifest_path = scarb::ops::find_manifest_path(args.manifest_path.as_deref())?; - verify_cairo_version_compatibility(&manifest_path)?; + utils::verify_cairo_version_compatibility(&manifest_path)?; let config = Config::builder(manifest_path) .log_filter_directive(env::var_os("SCARB_LOG")) @@ -54,26 +53,3 @@ fn cli_main(args: SozoArgs) -> Result<()> { commands::run(args.command, &config) } - -fn verify_cairo_version_compatibility(manifest_path: &Utf8PathBuf) -> Result<()> { - let scarb_cairo_version = scarb::version::get().cairo; - // When manifest file doesn't exists ignore it. Would be the case during `sozo init` - let Ok(manifest) = TomlManifest::read_from_path(manifest_path) else { return Ok(()) }; - - // For any kind of error, like package not specified, cairo version not specified return - // without an error - let Some(package) = manifest.package else { return Ok(()) }; - - let Some(cairo_version) = package.cairo_version else { return Ok(()) }; - - // only when cairo version is found in manifest file confirm that it matches - let version_req = cairo_version.as_defined().unwrap(); - let version = Version::from_str(scarb_cairo_version.version).unwrap(); - if !version_req.matches(&version) { - anyhow::bail!( - "Specified cairo version not supported by dojo. Please verify and update dojo." - ); - }; - - Ok(()) -} diff --git a/bin/sozo/src/utils.rs b/bin/sozo/src/utils.rs index 7dbbfe28fd..d277c8d910 100644 --- a/bin/sozo/src/utils.rs +++ b/bin/sozo/src/utils.rs @@ -1,8 +1,12 @@ -use anyhow::Error; +use std::str::FromStr; + +use anyhow::{Error, Result}; +use camino::Utf8PathBuf; use dojo_world::contracts::world::WorldContract; use dojo_world::contracts::WorldContractReader; use dojo_world::metadata::{dojo_metadata_from_workspace, Environment}; -use scarb::core::Config; +use scarb::core::{Config, TomlManifest}; +use semver::Version; use starknet::accounts::SingleOwnerAccount; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::JsonRpcClient; @@ -79,3 +83,39 @@ pub async fn world_reader_from_env_metadata( Ok(WorldContractReader::new(world_address, provider)) } + +pub fn verify_cairo_version_compatibility(manifest_path: &Utf8PathBuf) -> Result<()> { + let scarb_cairo_version = scarb::version::get().cairo; + // When manifest file doesn't exists ignore it. Would be the case during `sozo init` + let Ok(manifest) = TomlManifest::read_from_path(manifest_path) else { return Ok(()) }; + + // For any kind of error, like package not specified, cairo version not specified return + // without an error + let Some(package) = manifest.package else { return Ok(()) }; + + let Some(cairo_version) = package.cairo_version else { return Ok(()) }; + + // only when cairo version is found in manifest file confirm that it matches + let version_req = cairo_version.as_defined().unwrap(); + let version = Version::from_str(scarb_cairo_version.version).unwrap(); + if !version_req.matches(&version) { + anyhow::bail!( + "Specified cairo version not supported by dojo. Please verify and update dojo." + ); + }; + + Ok(()) +} + +pub fn generate_version() -> String { + const DOJO_VERSION: &str = env!("CARGO_PKG_VERSION"); + let scarb_version = scarb::version::get().version; + let scarb_sierra_version = scarb::version::get().sierra.version; + let scarb_cairo_version = scarb::version::get().cairo.version; + + let version_string = format!( + "{}\nscarb: {}\ncairo: {}\nsierra: {}", + DOJO_VERSION, scarb_version, scarb_cairo_version, scarb_sierra_version, + ); + version_string +} diff --git a/crates/benches/src/deployer.rs b/crates/benches/src/deployer.rs index 663d54e73a..abb0251c07 100644 --- a/crates/benches/src/deployer.rs +++ b/crates/benches/src/deployer.rs @@ -10,7 +10,8 @@ use futures::executor::block_on; use katana_runner::KatanaRunner; use scarb::compiler::CompilerRepository; use scarb::core::Config; -use sozo::args::{Commands, SozoArgs}; +use sozo::args::SozoArgs; +use sozo::commands::Commands; use starknet::core::types::FieldElement; use tokio::process::Command; From 0227d3759961acf0c71ed4bd077c53ad9e5d454d Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 15 Apr 2024 05:41:39 +0200 Subject: [PATCH 18/23] Derive `Default` for `GenesisJson` (#1830) derive Default for GenesisJson --- crates/katana/primitives/src/genesis/json.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/katana/primitives/src/genesis/json.rs b/crates/katana/primitives/src/genesis/json.rs index 5b62c72035..84a97e101e 100644 --- a/crates/katana/primitives/src/genesis/json.rs +++ b/crates/katana/primitives/src/genesis/json.rs @@ -99,7 +99,7 @@ pub struct GenesisClassJson { pub class_hash: Option, } -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)] #[serde(rename_all = "camelCase")] pub struct FeeTokenConfigJson { pub name: String, @@ -199,7 +199,7 @@ pub enum GenesisJsonError { /// (eg, using `serde_json`). /// /// The path of the class artifact are computed **relative** to the JSON file. -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)] #[serde(rename_all = "camelCase")] pub struct GenesisJson { pub parent_hash: BlockHash, From 017c0d14286512ec195e6b1f0cfff2cd18d4385b Mon Sep 17 00:00:00 2001 From: RareSecond Date: Mon, 15 Apr 2024 07:19:08 +0200 Subject: [PATCH 19/23] Add new TypeScript bindgen (#1783) * feat: added new bindgen with all types and utility functions * feat: removed bunch of extracted functions and extracted models to function * refactor: moved generating to main function to intertwine models and contracts * feat: generated system calls * cleanup: removed old contract generation code * feat: finished bindgen * refactor: renamed TypescriptNew to TypeScriptV2 * Updated flag naming to typescript_v2 * Fixed some build and layout issues * Fixed empty line in models * Renamed modules as well * Update crates/dojo-bindgen/src/plugins/typescript_v2/mod.rs Co-authored-by: glihm * Fixed formatting * Updated return values * Added query function to class * Added test for the generated file * Removed full file test for now until DojoData is sorted * Added comment * fix: move mocks to test_data --------- Co-authored-by: glihm --- bin/sozo/src/commands/build.rs | 16 +- crates/dojo-bindgen/src/lib.rs | 2 + crates/dojo-bindgen/src/plugins/mod.rs | 3 + .../src/plugins/typescript_v2/mod.rs | 622 ++++++++++++++++++ .../src/test_data/mocks/dojo_examples.ts | 297 +++++++++ 5 files changed, 938 insertions(+), 2 deletions(-) create mode 100644 crates/dojo-bindgen/src/plugins/typescript_v2/mod.rs create mode 100644 crates/dojo-bindgen/src/test_data/mocks/dojo_examples.ts diff --git a/bin/sozo/src/commands/build.rs b/bin/sozo/src/commands/build.rs index f3347fd449..f56ba04e51 100644 --- a/bin/sozo/src/commands/build.rs +++ b/bin/sozo/src/commands/build.rs @@ -11,6 +11,10 @@ pub struct BuildArgs { #[arg(help = "Generate Typescript bindings.")] pub typescript: bool, + #[arg(long)] + #[arg(help = "Generate Typescript bindings.")] + pub typescript_v2: bool, + #[arg(long)] #[arg(help = "Generate Unity bindings.")] pub unity: bool, @@ -32,6 +36,10 @@ impl BuildArgs { builtin_plugins.push(BuiltinPlugins::Typescript); } + if self.typescript_v2 { + builtin_plugins.push(BuiltinPlugins::TypeScriptV2); + } + if self.unity { builtin_plugins.push(BuiltinPlugins::Unity); } @@ -67,8 +75,12 @@ mod tests { fn build_example_with_typescript_and_unity_bindings() { let config = build_test_config("../../examples/spawn-and-move/Scarb.toml").unwrap(); - let build_args = - BuildArgs { bindings_output: "generated".to_string(), typescript: true, unity: true }; + let build_args = BuildArgs { + bindings_output: "generated".to_string(), + typescript: true, + unity: true, + typescript_v2: true, + }; let result = build_args.run(&config); assert!(result.is_ok()); } diff --git a/crates/dojo-bindgen/src/lib.rs b/crates/dojo-bindgen/src/lib.rs index 627dc1961c..ca8e24cd79 100644 --- a/crates/dojo-bindgen/src/lib.rs +++ b/crates/dojo-bindgen/src/lib.rs @@ -12,6 +12,7 @@ use error::{BindgenResult, Error}; mod plugins; use plugins::typescript::TypescriptPlugin; +use plugins::typescript_v2::TypeScriptV2Plugin; use plugins::unity::UnityPlugin; use plugins::BuiltinPlugin; pub use plugins::BuiltinPlugins; @@ -85,6 +86,7 @@ impl PluginManager { let builder: Box = match plugin { BuiltinPlugins::Typescript => Box::new(TypescriptPlugin::new()), BuiltinPlugins::Unity => Box::new(UnityPlugin::new()), + BuiltinPlugins::TypeScriptV2 => Box::new(TypeScriptV2Plugin::new()), }; let files = builder.generate_code(&data).await?; diff --git a/crates/dojo-bindgen/src/plugins/mod.rs b/crates/dojo-bindgen/src/plugins/mod.rs index ab6abbcb8b..b603262e44 100644 --- a/crates/dojo-bindgen/src/plugins/mod.rs +++ b/crates/dojo-bindgen/src/plugins/mod.rs @@ -8,12 +8,14 @@ use crate::error::BindgenResult; use crate::DojoData; pub mod typescript; +pub mod typescript_v2; pub mod unity; #[derive(Debug)] pub enum BuiltinPlugins { Typescript, Unity, + TypeScriptV2, } impl fmt::Display for BuiltinPlugins { @@ -21,6 +23,7 @@ impl fmt::Display for BuiltinPlugins { match self { BuiltinPlugins::Typescript => write!(f, "typescript"), BuiltinPlugins::Unity => write!(f, "unity"), + BuiltinPlugins::TypeScriptV2 => write!(f, "typescript_v2"), } } } diff --git a/crates/dojo-bindgen/src/plugins/typescript_v2/mod.rs b/crates/dojo-bindgen/src/plugins/typescript_v2/mod.rs new file mode 100644 index 0000000000..b23549aa89 --- /dev/null +++ b/crates/dojo-bindgen/src/plugins/typescript_v2/mod.rs @@ -0,0 +1,622 @@ +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +use async_trait::async_trait; +use cainome::parser::tokens::{Composite, CompositeType, Function}; +use convert_case::Casing; + +use crate::error::BindgenResult; +use crate::plugins::BuiltinPlugin; +use crate::{DojoContract, DojoData, DojoModel}; + +pub struct TypeScriptV2Plugin {} + +impl TypeScriptV2Plugin { + pub fn new() -> Self { + Self {} + } + + // Maps cairo types to TypeScript defined types + fn map_type(type_name: &str) -> String { + match type_name { + "bool" => "boolean".to_string(), + "u8" => "number".to_string(), + "u16" => "number".to_string(), + "u32" => "number".to_string(), + "u64" => "bigint".to_string(), + "u128" => "bigint".to_string(), + "u256" => "bigint".to_string(), + "usize" => "number".to_string(), + "felt252" => "string".to_string(), + "ClassHash" => "string".to_string(), + "ContractAddress" => "string".to_string(), + + _ => type_name.to_string(), + } + } + + fn generate_header() -> String { + format!( + "// Generated by dojo-bindgen on {}. Do not modify this file manually.\n", + chrono::Utc::now().to_rfc2822() + ) + } + + fn generate_imports() -> String { + "import { Account } from \"starknet\"; +import { + Clause, + Client, + ModelClause, + createClient, + valueToToriiValueAndOperator, +} from \"@dojoengine/torii-client\"; +import { LOCAL_KATANA, createManifestFromJson } from \"@dojoengine/core\";" + .to_string() + } + + fn generate_query_types(models: &[&DojoModel]) -> String { + let mut query_fields = Vec::new(); + let mut result_mapping = Vec::new(); + + for model in models { + query_fields + .push(format!("{model_name}: ModelClause<{model_name}>;", model_name = model.name)); + + result_mapping.push(format!("{model_name}: {model_name};", model_name = model.name)); + } + + format!( + "type Query = Partial<{{ + {query_fields} +}}>; + +type ResultMapping = {{ + {result_mapping} +}}; + +type QueryResult = {{ + [K in keyof T]: K extends keyof ResultMapping ? ResultMapping[K] : never; +}}; + +// Only supports a single model for now, since torii doesn't support multiple models +// And inside that single model, there's only support for a single query. +function convertQueryToToriiClause(query: Query): Clause | undefined {{ + const [model, clause] = Object.entries(query)[0]; + + if (Object.keys(clause).length === 0) {{ + return undefined; + }} + + const clauses: Clause[] = Object.entries(clause).map(([key, value]) => {{ + return {{ + Member: {{ + model, + member: key, + ...valueToToriiValueAndOperator(value), + }}, + }} satisfies Clause; + }}); + + return clauses[0]; +}}", + query_fields = query_fields.join("\n "), + result_mapping = result_mapping.join("\n "), + ) + } + + fn generate_model_types(models: &[&DojoModel], handled_tokens: &mut Vec) -> String { + let mut out = String::new(); + + for model in models { + let tokens = &model.tokens; + + for token in &tokens.enums { + handled_tokens.push(token.to_composite().unwrap().to_owned()); + } + for token in &tokens.structs { + handled_tokens.push(token.to_composite().unwrap().to_owned()); + } + + let mut structs = tokens.structs.to_owned(); + structs.sort_by(|a, b| { + if a.to_composite() + .unwrap() + .inners + .iter() + .any(|field| field.token.type_name() == b.type_name()) + { + std::cmp::Ordering::Greater + } else { + std::cmp::Ordering::Less + } + }); + + for token in &structs { + out += TypeScriptV2Plugin::format_struct( + token.to_composite().unwrap(), + handled_tokens, + ) + .as_str(); + } + + for token in &tokens.enums { + out += TypeScriptV2Plugin::format_enum(token.to_composite().unwrap()).as_str(); + } + + out += "\n"; + } + + out + } + + fn generate_base_calls_class() -> String { + "class BaseCalls { + contractAddress: string; + account?: Account; + + constructor(contractAddress: string, account?: Account) { + this.account = account; + this.contractAddress = contractAddress; + } + + async execute(entrypoint: string, calldata: any[] = []): Promise { + if (!this.account) { + throw new Error(\"No account set to interact with dojo_starter\"); + } + + await this.account.execute( + { + contractAddress: this.contractAddress, + entrypoint, + calldata, + }, + undefined, + { + maxFee: 0, + } + ); + } +} +" + .to_string() + } + + fn generate_contracts(contracts: &[&DojoContract], handled_tokens: &[Composite]) -> String { + let mut out = String::new(); + + for contract in contracts { + let systems = contract + .systems + .iter() + .map(|system| { + TypeScriptV2Plugin::format_system(system.to_function().unwrap(), handled_tokens) + }) + .collect::>() + .join("\n\n "); + + out += &format!( + "class {}Calls extends BaseCalls {{ + constructor(contractAddress: string, account?: Account) {{ + super(contractAddress, account); + }} + + {} +}} +", + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Pascal), + systems, + ); + } + + out + } + + fn generate_initial_params(contracts: &[&DojoContract]) -> String { + let system_addresses = contracts + .iter() + .map(|contract| { + format!( + "{}Address: string;", + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel) + ) + }) + .collect::>() + .join("\n "); + + format!( + "type InitialParams = GeneralParams & + ( + | {{ + rpcUrl: string; + worldAddress: string; + {system_addresses} + }} + | {{ + manifest: any; + }} + );" + ) + } + + fn generate_world_class(world_name: &String, contracts: &[&DojoContract]) -> String { + let mut out = String::new(); + + out += "type GeneralParams = { + toriiUrl: string; + relayUrl: string; + account?: Account; +};"; + + out += "\n\n"; + + out += TypeScriptV2Plugin::generate_initial_params(contracts).as_str(); + + out += "\n\n"; + + let system_properties = contracts + .iter() + .map(|contract| { + format!( + "{camel_case_name}: {pascal_case_name}Calls; + {camel_case_name}Address: string;", + camel_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel), + pascal_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Pascal) + ) + }) + .collect::>() + .join("\n "); + + let system_address_initializations = contracts + .iter() + .map(|contract| { + format!( + "const {contract_name}Address = config.contracts.find( + (contract) => + contract.name === \"dojo_starter::systems::{contract_name}::{contract_name}\" + )?.address; + + if (!{contract_name}Address) {{ + throw new Error(\"No {contract_name} contract found in the manifest\"); + }} + + this.{contract_name}Address = {contract_name}Address;", + contract_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel) + ) + }) + .collect::>() + .join("\n "); + + let system_address_initializations_from_params = contracts + .iter() + .map(|contract| { + format!( + "this.{camel_case_name}Address = params.{camel_case_name}Address;", + camel_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel), + ) + }) + .collect::>() + .join("\n "); + + let system_initializations = contracts + .iter() + .map(|contract| { + format!( + "this.{camel_case_name} = new \ + {pascal_case_name}Calls(this.{camel_case_name}Address, this._account);", + camel_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel), + pascal_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Pascal) + ) + }) + .collect::>() + .join("\n "); + + let formatted_world_name = world_name.to_case(convert_case::Case::Pascal); + + out += &format!( + "export class {formatted_world_name} {{ + rpcUrl: string; + toriiUrl: string; + toriiPromise: Promise; + relayUrl: string; + worldAddress: string; + private _account?: Account; + {system_properties} + + constructor(params: InitialParams) {{ + this.rpcUrl = LOCAL_KATANA; + if (\"manifest\" in params) {{ + const config = createManifestFromJson(params.manifest); + this.worldAddress = config.world.address; + + {system_address_initializations} + }} else {{ + this.rpcUrl = params.rpcUrl; + this.worldAddress = params.worldAddress; + {system_address_initializations_from_params} + }} + this.toriiUrl = params.toriiUrl; + this.relayUrl = params.relayUrl; + this._account = params.account; + {system_initializations} + + this.toriiPromise = createClient([], {{ + rpcUrl: this.rpcUrl, + toriiUrl: this.toriiUrl, + worldAddress: this.worldAddress, + relayUrl: this.relayUrl, + }}); + }} + + get account(): Account | undefined {{ + return this._account; + }} + + set account(account: Account) {{ + this._account = account; + {system_initializations} + }} + + async query(query: T, limit = 10, offset = 0) {{ + const torii = await this.toriiPromise; + + return {{ + torii, + findEntities: async () => this.findEntities(query, limit, offset), + }}; + }} + + async findEntities(query: T, limit = 10, offset = 0) {{ + const torii = await this.toriiPromise; + + const clause = convertQueryToToriiClause(query); + + const toriiResult = await torii.getEntities({{ + limit, + offset, + clause, + }}); + + return toriiResult as Record>; + }} + + async findEntity(query: T) {{ + const result = await this.findEntities(query, 1); + + if (Object.values(result).length === 0) {{ + return undefined; + }} + + return Object.values(result)[0] as QueryResult; + }} +}}" + ); + + out + } + + // Token should be a struct + // This will be formatted into a TypeScript interface + // using TypeScript defined types + fn format_struct(token: &Composite, handled_tokens: &[Composite]) -> String { + let mut native_fields: Vec = Vec::new(); + + for field in &token.inners { + let mapped = TypeScriptV2Plugin::map_type(field.token.type_name().as_str()); + if mapped == field.token.type_name() { + let token = handled_tokens + .iter() + .find(|t| t.type_name() == field.token.type_name()) + .unwrap_or_else(|| panic!("Token not found: {}", field.token.type_name())); + if token.r#type == CompositeType::Enum { + native_fields.push(format!("{}: {};", field.name, mapped)); + } else { + native_fields.push(format!("{}: {};", field.name, field.token.type_name())); + } + } else { + native_fields.push(format!("{}: {};", field.name, mapped)); + } + } + + format!( + " +// Type definition for `{path}` struct +export interface {name} {{ + {native_fields} +}} +", + path = token.type_path, + name = token.type_name(), + native_fields = native_fields.join("\n ") + ) + } + + // Token should be an enum + // This will be formatted into a C# enum + // Enum is mapped using index of cairo enum + fn format_enum(token: &Composite) -> String { + let fields = token + .inners + .iter() + .map(|field| format!("{},", field.name,)) + .collect::>() + .join("\n "); + + format!( + " +// Type definition for `{}` enum +export enum {} {{ + {} +}} +", + token.type_path, + token.type_name(), + fields + ) + } + + // Formats a system into a JS method used by the contract class + // Handled tokens should be a list of all structs and enums used by the contract + // Such as a set of referenced tokens from a model + fn format_system(system: &Function, handled_tokens: &[Composite]) -> String { + let args = system + .inputs + .iter() + .map(|arg| { + format!( + "{}: {}", + arg.0, + if TypeScriptV2Plugin::map_type(&arg.1.type_name()) == arg.1.type_name() { + arg.1.type_name() + } else { + TypeScriptV2Plugin::map_type(&arg.1.type_name()) + } + ) + }) + .collect::>() + .join(", "); + + let calldata = system + .inputs + .iter() + .map(|arg| { + let token = &arg.1; + let type_name = &arg.0; + + match handled_tokens.iter().find(|t| t.type_name() == token.type_name()) { + Some(t) => { + // Need to flatten the struct members. + match t.r#type { + CompositeType::Struct => t + .inners + .iter() + .map(|field| format!("props.{}.{}", type_name, field.name)) + .collect::>() + .join(",\n "), + _ => type_name.to_string(), + } + } + None => type_name.to_string(), + } + }) + .collect::>() + .join(",\n "); + + format!( + "async {pretty_system_name}({args}): Promise {{ + try {{ + await this.execute(\"{system_name}\", [{calldata}]) + }} catch (error) {{ + console.error(\"Error executing {pretty_system_name}:\", error); + throw error; + }} + }}", + pretty_system_name = system.name.to_case(convert_case::Case::Camel), + // formatted args to use our mapped types + args = args, + system_name = system.name, + // calldata for execute + calldata = calldata + ) + } + + // Formats a contract file path into a pretty contract name + // eg. dojo_examples::actions::actions.json -> Actions + fn formatted_contract_name(contract_file_name: &str) -> String { + let contract_name = + contract_file_name.split("::").last().unwrap().trim_end_matches(".json"); + contract_name.to_string() + } + + fn generate_code_content(data: &DojoData) -> String { + let mut handled_tokens = Vec::::new(); + let models = data.models.values().collect::>(); + let contracts = data.contracts.values().collect::>(); + + let mut code = String::new(); + code += TypeScriptV2Plugin::generate_header().as_str(); + code += TypeScriptV2Plugin::generate_imports().as_str(); + code += "\n"; + code += TypeScriptV2Plugin::generate_model_types(models.as_slice(), &mut handled_tokens) + .as_str(); + code += "\n"; + code += TypeScriptV2Plugin::generate_base_calls_class().as_str(); + code += "\n"; + code += + TypeScriptV2Plugin::generate_contracts(contracts.as_slice(), &handled_tokens).as_str(); + code += "\n"; + code += TypeScriptV2Plugin::generate_query_types(models.as_slice()).as_str(); + code += "\n"; + code += TypeScriptV2Plugin::generate_world_class(&data.world.name, contracts.as_slice()) + .as_str(); + + code + } +} + +#[async_trait] +impl BuiltinPlugin for TypeScriptV2Plugin { + async fn generate_code(&self, data: &DojoData) -> BindgenResult>> { + let code: String = TypeScriptV2Plugin::generate_code_content(data); + + let mut out: HashMap> = HashMap::new(); + let output_path = Path::new(&format!("{}.ts", data.world.name)).to_owned(); + + out.insert(output_path, code.as_bytes().to_vec()); + + Ok(out) + } +} + +#[cfg(test)] +mod tests { + use std::fs; + use std::io::Read; + + use camino::Utf8PathBuf; + + use super::*; + use crate::gather_dojo_data; + + #[test] + fn test_output() { + let mut expected_output = String::new(); + let mut file = + fs::File::open("src/test_data/mocks/dojo_examples.ts").expect("file not found"); + file.read_to_string(&mut expected_output).expect("error reading file"); + + let expected_output_without_header = + expected_output.lines().skip(1).collect::>().join("\n"); + + let data = gather_dojo_data( + &Utf8PathBuf::from("src/test_data/spawn-and-move/Scarb.toml"), + "dojo_examples", + "dev", + ) + .unwrap(); + + let actual_output = TypeScriptV2Plugin::generate_code_content(&data); + let actual_output_without_header = + actual_output.lines().skip(1).collect::>().join("\n"); + + // This test currently is very naive, but DojoData is unsorted, so the output + // can change between tests. This is a temporary solution until we have a better + // way to test this. + assert_eq!(actual_output_without_header.len(), 7479); + assert_eq!(expected_output_without_header.len(), 7479); + } +} diff --git a/crates/dojo-bindgen/src/test_data/mocks/dojo_examples.ts b/crates/dojo-bindgen/src/test_data/mocks/dojo_examples.ts new file mode 100644 index 0000000000..49e3805bf7 --- /dev/null +++ b/crates/dojo-bindgen/src/test_data/mocks/dojo_examples.ts @@ -0,0 +1,297 @@ +// Generated by dojo-bindgen on Fri, 12 Apr 2024 13:23:24 +0000. Do not modify this file manually. +import { Account } from "starknet"; +import { + Clause, + Client, + ModelClause, + createClient, + valueToToriiValueAndOperator, +} from "@dojoengine/torii-client"; +import { LOCAL_KATANA, createManifestFromJson } from "@dojoengine/core"; + +// Type definition for `dojo_examples::actions::actions::Moved` struct +export interface Moved { + player: string; + direction: Direction; +} + +// Type definition for `dojo_examples::models::Direction` enum +export enum Direction { + None, + Left, + Right, + Up, + Down, +} + + +// Type definition for `dojo_examples::models::Vec2` struct +export interface Vec2 { + x: number; + y: number; +} + +// Type definition for `dojo_examples::models::Position` struct +export interface Position { + player: string; + vec: Vec2; +} + + +// Type definition for `dojo_examples::models::Moves` struct +export interface Moves { + player: string; + remaining: number; + last_direction: Direction; +} + +// Type definition for `dojo_examples::models::Direction` enum +export enum Direction { + None, + Left, + Right, + Up, + Down, +} + + +// Type definition for `dojo_examples::models::EmoteMessage` struct +export interface EmoteMessage { + identity: string; + emote: Emote; +} + +// Type definition for `dojo_examples::models::Emote` enum +export enum Emote { + None, + Happy, + Sad, + Angry, + Love, +} + + +class BaseCalls { + contractAddress: string; + account?: Account; + + constructor(contractAddress: string, account?: Account) { + this.account = account; + this.contractAddress = contractAddress; + } + + async execute(entrypoint: string, calldata: any[] = []): Promise { + if (!this.account) { + throw new Error("No account set to interact with dojo_starter"); + } + + await this.account.execute( + { + contractAddress: this.contractAddress, + entrypoint, + calldata, + }, + undefined, + { + maxFee: 0, + } + ); + } +} + +class ActionsCalls extends BaseCalls { + constructor(contractAddress: string, account?: Account) { + super(contractAddress, account); + } + + async tileTerrain(vec: Vec2): Promise { + try { + await this.execute("tile_terrain", [props.vec.x, + props.vec.y]) + } catch (error) { + console.error("Error executing tileTerrain:", error); + throw error; + } + } + + async quadrant(pos: Position): Promise { + try { + await this.execute("quadrant", [props.pos.player, + props.pos.vec]) + } catch (error) { + console.error("Error executing quadrant:", error); + throw error; + } + } + + async dojoResource(): Promise { + try { + await this.execute("dojo_resource", []) + } catch (error) { + console.error("Error executing dojoResource:", error); + throw error; + } + } + + async spawn(): Promise { + try { + await this.execute("spawn", []) + } catch (error) { + console.error("Error executing spawn:", error); + throw error; + } + } + + async move(direction: Direction): Promise { + try { + await this.execute("move", [direction]) + } catch (error) { + console.error("Error executing move:", error); + throw error; + } + } +} + +type Query = Partial<{ + Moved: ModelClause; + Position: ModelClause; + Moves: ModelClause; + EmoteMessage: ModelClause; +}>; + +type ResultMapping = { + Moved: Moved; + Position: Position; + Moves: Moves; + EmoteMessage: EmoteMessage; +}; + +type QueryResult = { + [K in keyof T]: K extends keyof ResultMapping ? ResultMapping[K] : never; +}; + +// Only supports a single model for now, since torii doesn't support multiple models +// And inside that single model, there's only support for a single query. +function convertQueryToToriiClause(query: Query): Clause | undefined { + const [model, clause] = Object.entries(query)[0]; + + if (Object.keys(clause).length === 0) { + return undefined; + } + + const clauses: Clause[] = Object.entries(clause).map(([key, value]) => { + return { + Member: { + model, + member: key, + ...valueToToriiValueAndOperator(value), + }, + } satisfies Clause; + }); + + return clauses[0]; +} +type GeneralParams = { + toriiUrl: string; + relayUrl: string; + account?: Account; +}; + +type InitialParams = GeneralParams & + ( + | { + rpcUrl: string; + worldAddress: string; + actionsAddress: string; + } + | { + manifest: any; + } + ); + +export class DojoExamples { + rpcUrl: string; + toriiUrl: string; + toriiPromise: Promise; + relayUrl: string; + worldAddress: string; + private _account?: Account; + actions: ActionsCalls; + actionsAddress: string; + + constructor(params: InitialParams) { + this.rpcUrl = LOCAL_KATANA; + if ("manifest" in params) { + const config = createManifestFromJson(params.manifest); + this.worldAddress = config.world.address; + + const actionsAddress = config.contracts.find( + (contract) => + contract.name === "dojo_starter::systems::actions::actions" + )?.address; + + if (!actionsAddress) { + throw new Error("No actions contract found in the manifest"); + } + + this.actionsAddress = actionsAddress; + } else { + this.rpcUrl = params.rpcUrl; + this.worldAddress = params.worldAddress; + this.actionsAddress = params.actionsAddress; + } + this.toriiUrl = params.toriiUrl; + this.relayUrl = params.relayUrl; + this._account = params.account; + this.actions = new ActionsCalls(this.actionsAddress, this._account); + + this.toriiPromise = createClient([], { + rpcUrl: this.rpcUrl, + toriiUrl: this.toriiUrl, + worldAddress: this.worldAddress, + relayUrl: this.relayUrl, + }); + } + + get account(): Account | undefined { + return this._account; + } + + set account(account: Account) { + this._account = account; + this.actions = new ActionsCalls(this.actionsAddress, this._account); + } + + async query(query: T, limit = 10, offset = 0) { + const torii = await this.toriiPromise; + + return { + torii, + findEntities: async () => this.findEntities(query, limit, offset), + }; + } + + async findEntities(query: T, limit = 10, offset = 0) { + const torii = await this.toriiPromise; + + const clause = convertQueryToToriiClause(query); + + const toriiResult = await torii.getEntities({ + limit, + offset, + clause, + }); + + return toriiResult as Record>; + } + + async findEntity(query: T) { + const result = await this.findEntities(query, 1); + + if (Object.values(result).length === 0) { + return undefined; + } + + return Object.values(result)[0] as QueryResult; + } +} \ No newline at end of file From de25042cb003866a0a4dcb519ba2d32994f868b6 Mon Sep 17 00:00:00 2001 From: lambda-0x <0xlambda@protonmail.com> Date: Mon, 15 Apr 2024 16:14:15 +0530 Subject: [PATCH 20/23] fix: calculate address of all contracts during migration (#1822) --- crates/sozo/ops/src/migration/mod.rs | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/crates/sozo/ops/src/migration/mod.rs b/crates/sozo/ops/src/migration/mod.rs index 74f7c6499e..0373b5c1a1 100644 --- a/crates/sozo/ops/src/migration/mod.rs +++ b/crates/sozo/ops/src/migration/mod.rs @@ -220,19 +220,17 @@ async fn update_manifests_and_abis( .find(|c| c.name == output.name) .expect("contract got migrated, means it should be present here"); - let salt = generate_salt(&local.name); - local.inner.address = Some(get_contract_address( - salt, - output.base_class_hash, - &[], - migration_output.world_address, - )); - local.inner.base_class_hash = output.base_class_hash; } }); } + local_manifest.contracts.iter_mut().for_each(|contract| { + let salt = generate_salt(&contract.name); + contract.inner.address = + Some(get_contract_address(salt, contract.inner.base_class_hash, &[], world_address)); + }); + // copy abi files from `abi/base` to `abi/deployments/{chain_id}` and update abi path in // local_manifest update_manifest_abis(&mut local_manifest, profile_dir, profile_name).await; @@ -923,7 +921,6 @@ where /// # Returns /// A [`ResourceData`] object to register in the Dojo resource register /// on success. -/// async fn upload_on_ipfs_and_create_resource( ui: &Ui, element_name: String, From cedc5136e98d0d146e97ff01ce3848f5225adf26 Mon Sep 17 00:00:00 2001 From: glihm Date: Mon, 15 Apr 2024 09:46:21 -0600 Subject: [PATCH 21/23] fix: rework clean command to be more intuitive and propose a clean all (#1784) * fix: rework clean command to be more intuitive and propose a clean all * fix: ensure profile directory is the one being cleaned up * fix: add comment to explain safe unwrap for manifest dir * fix: add missing migrate param * fix: reword comment for clarity * fix: remove duplicate help for clean command * fix: use katana runner for testing --- bin/sozo/src/commands/clean.rs | 129 ++++++++++++---- crates/dojo-test-utils/src/compiler.rs | 196 ++++++++++++++++++++++++- crates/sozo/ops/src/migration/mod.rs | 1 + 3 files changed, 297 insertions(+), 29 deletions(-) diff --git a/bin/sozo/src/commands/clean.rs b/bin/sozo/src/commands/clean.rs index 6ed05e1837..e8f4993d97 100644 --- a/bin/sozo/src/commands/clean.rs +++ b/bin/sozo/src/commands/clean.rs @@ -9,50 +9,125 @@ use scarb::core::Config; #[derive(Debug, Args)] pub struct CleanArgs { #[arg(short, long)] - #[arg(help = "Remove manifests and abis only.")] - #[arg(long_help = "Remove manifests and abis only.")] - pub manifests_abis: bool, - - #[arg(short, long)] - #[arg(help = "Remove artifacts only.")] - #[arg(long_help = "Remove artifacts only.")] - pub artifacts: bool, + #[arg(help = "Removes all the generated files, including scarb artifacts and ALL the \ + manifests files.")] + pub all: bool, } impl CleanArgs { + /// Cleans the manifests and abis files that are generated at build time. + /// + /// # Arguments + /// + /// * `profile_dir` - The directory where the profile files are located. + pub fn clean_manifests(&self, profile_dir: &Utf8PathBuf) -> Result<()> { + let dirs = vec![profile_dir.join(BASE_DIR), profile_dir.join(ABIS_DIR).join(BASE_DIR)]; + + for d in dirs { + if d.exists() { + fs::remove_dir_all(d)?; + } + } + + Ok(()) + } + pub fn run(self, config: &Config) -> Result<()> { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; let profile_name = ws.current_profile().expect("Scarb profile is expected at this point.").to_string(); - let clean_manifests_abis = self.manifests_abis || !self.artifacts; - let clean_artifacts = self.artifacts || !self.manifests_abis; + // Manifest path is always a file, we can unwrap safely to get the + // parent folder. + let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); - if clean_manifests_abis { - let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); - self.clean_manifests_abis(&manifest_dir, &profile_name)?; - } + let profile_dir = manifest_dir.join(MANIFESTS_DIR).join(profile_name); + + // By default, this command cleans the build manifests and scarb artifacts. + scarb::ops::clean(config)?; + self.clean_manifests(&profile_dir)?; - if clean_artifacts { - scarb::ops::clean(config)?; + if self.all && profile_dir.exists() { + fs::remove_dir_all(profile_dir)?; } Ok(()) } +} - pub fn clean_manifests_abis(&self, root_dir: &Utf8PathBuf, profile_name: &str) -> Result<()> { - let dirs = vec![ - root_dir.join(MANIFESTS_DIR).join(profile_name).join(BASE_DIR), - root_dir.join(MANIFESTS_DIR).join(profile_name).join(ABIS_DIR).join(BASE_DIR), - ]; +#[cfg(test)] +mod tests { + use dojo_test_utils::compiler; + use katana_runner::KatanaRunner; + use sozo_ops::migration; - for d in dirs { - if d.exists() { - fs::remove_dir_all(d)?; - } - } + use super::*; - Ok(()) + #[test] + fn test_clean() { + let source_project = "../../examples/spawn-and-move/Scarb.toml"; + + // Build a completely new project in it's own directory. + let (temp_project_dir, config, _) = compiler::copy_build_project_temp(source_project, true); + + let runner = KatanaRunner::new().expect("Fail to set runner"); + + let ws = scarb::ops::read_workspace(config.manifest_path(), &config).unwrap(); + + // Plan the migration to generate some manifests other than base. + config.tokio_handle().block_on(async { + migration::migrate( + &ws, + None, + "chain_id".to_string(), + runner.endpoint(), + &runner.account(0), + Some("dojo_examples".to_string()), + true, + None, + ) + .await + .unwrap() + }); + + let clean_cmd = CleanArgs { all: false }; + clean_cmd.run(&config).unwrap(); + + let profile_name = config.profile().to_string(); + + let target_dev_dir = temp_project_dir.join("target").join(&profile_name); + let profile_manifests_dir = temp_project_dir.join("manifests").join(&profile_name); + let manifests_dev_base_dir = profile_manifests_dir.join("base"); + let manifests_dev_abis_base_dir = profile_manifests_dir.join("abis").join("base"); + let manifests_dev_abis_depl_dir = profile_manifests_dir.join("abis").join("deployments"); + let manifest_toml = profile_manifests_dir.join("manifest").with_extension("toml"); + let manifest_json = profile_manifests_dir.join("manifest").with_extension("json"); + + assert!(fs::read_dir(&target_dev_dir).is_err(), "Expected 'target/dev' to be empty"); + assert!( + fs::read_dir(&manifests_dev_base_dir).is_err(), + "Expected 'manifests/dev/base' to be empty" + ); + assert!( + fs::read_dir(&manifests_dev_abis_base_dir).is_err(), + "Expected 'manifests/dev/abis/base' to be empty" + ); + assert!( + fs::read_dir(&manifests_dev_abis_depl_dir).is_ok(), + "Expected 'manifests/dev/abis/deployments' to not be empty" + ); + assert!(manifest_toml.exists(), "Expected 'manifest.toml' to exist"); + assert!(manifest_json.exists(), "Expected 'manifest.json' to exist"); + + let clean_cmd = CleanArgs { all: true }; + clean_cmd.run(&config).unwrap(); + + assert!( + fs::read_dir(&manifests_dev_abis_depl_dir).is_err(), + "Expected 'manifests/dev/abis/deployments' to be empty" + ); + assert!(!manifest_toml.exists(), "Expected 'manifest.toml' to not exist"); + assert!(!manifest_json.exists(), "Expected 'manifest.json' to not exist"); } } diff --git a/crates/dojo-test-utils/src/compiler.rs b/crates/dojo-test-utils/src/compiler.rs index 335c86f63c..5496241e76 100644 --- a/crates/dojo-test-utils/src/compiler.rs +++ b/crates/dojo-test-utils/src/compiler.rs @@ -1,14 +1,145 @@ -use std::env; +use std::fs::File; +use std::io::{Read, Write}; use std::path::PathBuf; +use std::{env, fs, io}; use assert_fs::TempDir; use camino::{Utf8Path, Utf8PathBuf}; use dojo_lang::compiler::DojoCompiler; use dojo_lang::plugin::CairoPluginRepository; +use dojo_lang::scarb_internal::{compile_workspace, CompileInfo}; use scarb::compiler::CompilerRepository; -use scarb::core::Config; +use scarb::core::{Config, TargetKind}; use scarb::ops; +use scarb::ops::CompileOpts; use scarb_ui::Verbosity; +use toml::{Table, Value}; + +/// Copies a project to a new location, excluding the manifests +/// and target directories, build the temporary project and +/// return the temporary project directory. +/// +/// # Arguments +/// +/// * `source_project_path` - The path to the source project to copy and build at the temporary +/// location. +/// * `do_build` - Whether to build the temporary project. Only use this if you want to build the +/// project again to re-generate all the artifacts. This is a slow operation on the CI (~70s), use +/// it wisely. +pub fn copy_build_project_temp( + source_project_path: &str, + do_build: bool, +) -> (Utf8PathBuf, Config, Option) { + let source_project_dir = Utf8PathBuf::from(source_project_path).parent().unwrap().to_path_buf(); + + let temp_project_dir = Utf8PathBuf::from( + assert_fs::TempDir::new().unwrap().to_path_buf().to_string_lossy().to_string(), + ); + + let temp_project_path = temp_project_dir.join(&"Scarb").with_extension("toml").to_string(); + + copy_project_temp(&source_project_dir, &temp_project_dir).unwrap(); + + let config = build_test_config_default(&temp_project_path).unwrap(); + + let compile_info = if do_build { + Some( + compile_workspace( + &config, + CompileOpts { include_targets: vec![], exclude_targets: vec![TargetKind::TEST] }, + ) + .unwrap(), + ) + } else { + None + }; + + (temp_project_dir, config, compile_info) +} + +/// Copies a project to a new location, excluding the manifests and target directories. +/// +/// # Arguments +/// +/// * `source_dir` - The source directory to copy from. +pub fn copy_project_temp( + source_dir: &Utf8PathBuf, + destination_dir: &Utf8PathBuf, +) -> io::Result<()> { + let ignore_dirs = vec!["manifests", "target"]; + + if !destination_dir.exists() { + fs::create_dir_all(&destination_dir)?; + } + + for entry in fs::read_dir(&source_dir)? { + let entry = entry?; + let path = entry.path(); + if path.is_dir() { + let dir_name = match entry.file_name().into_string() { + Ok(name) => name, + Err(_) => continue, // Skip directories/files with non-UTF8 names + }; + + if ignore_dirs.contains(&dir_name.as_str()) { + continue; // Skip ignored directories + } + + copy_project_temp( + &Utf8PathBuf::from_path_buf(path).unwrap(), + &destination_dir.join(dir_name), + )?; + } else { + let file_name = entry.file_name().to_string_lossy().to_string(); + let dest_path = destination_dir.join(&file_name); + + // Replace in the Scarb.toml the path of dojo crate with the + // absolute path. + if file_name == "Scarb.toml" { + let mut contents = String::new(); + File::open(&path) + .and_then(|mut file| file.read_to_string(&mut contents)) + .expect(&format!("Failed to read {file_name}")); + + let mut table = contents.parse::().expect("Failed to parse Scab.toml"); + + let dojo = table["dependencies"]["dojo"].as_table_mut().unwrap(); + + let absolute_path = Value::String( + fs::canonicalize(Utf8PathBuf::from(dojo["path"].as_str().unwrap())) + .unwrap() + .to_string_lossy() + .to_string(), + ); + + dojo["path"] = absolute_path; + + File::create(&dest_path) + .and_then(|mut file| file.write_all(table.to_string().as_bytes())) + .expect("Failed to write to Scab.toml"); + } else { + fs::copy(path, dest_path)?; + } + } + } + + Ok(()) +} + +pub fn build_test_config_default(path: &str) -> anyhow::Result { + let mut compilers = CompilerRepository::empty(); + compilers.add(Box::new(DojoCompiler)).unwrap(); + + let cairo_plugins = CairoPluginRepository::default(); + + let path = Utf8PathBuf::from_path_buf(path.into()).unwrap(); + Config::builder(path.canonicalize_utf8().unwrap()) + .ui_verbosity(Verbosity::Verbose) + .log_filter_directive(env::var_os("SCARB_LOG")) + .compilers(compilers) + .cairo_plugins(cairo_plugins.into()) + .build() +} pub fn build_test_config(path: &str) -> anyhow::Result { build_full_test_config(path, true) @@ -59,3 +190,64 @@ pub fn corelib() -> PathBuf { .source_root() .into() } + +#[cfg(test)] +mod tests { + use std::fs::{self, File}; + use std::io::Write; + + use assert_fs::TempDir; + + use super::*; + + #[test] + fn test_copy_project() { + let temp_dir = TempDir::new().unwrap(); + let project_dir = temp_dir.path().join("project"); + let dest_dir = temp_dir.path().join("dest"); + + fs::create_dir(&project_dir).unwrap(); + fs::create_dir(&dest_dir).unwrap(); + + // Create a file in the project directory + let file_path = project_dir.join("file.txt"); + let mut file = File::create(&file_path).unwrap(); + writeln!(file, "Hello, world!").unwrap(); + + // Create a subdirectory with a file in the project directory + let sub_dir = project_dir.join("subdir"); + fs::create_dir(&sub_dir).unwrap(); + let sub_file_path = sub_dir.join("subfile.txt"); + let mut sub_file = File::create(&sub_file_path).unwrap(); + writeln!(sub_file, "Hello, from subdir!").unwrap(); + + // Create a subdir that should be ignored + let ignored_sub_dir = project_dir.join("manifests"); + fs::create_dir(&ignored_sub_dir).unwrap(); + let ignored_sub_file_path = ignored_sub_dir.join("ignored_file.txt"); + let mut ignored_sub_file = File::create(&ignored_sub_file_path).unwrap(); + writeln!(ignored_sub_file, "This should be ignored!").unwrap(); + + // Perform the copy + copy_project_temp( + &Utf8PathBuf::from(&project_dir.to_string_lossy()), + &Utf8PathBuf::from(&dest_dir.to_string_lossy()), + ) + .unwrap(); + + // Check that the file exists in the destination directory + let dest_file_path = dest_dir.join("file.txt"); + assert!(dest_file_path.exists()); + + // Check that the subdirectory and its file exist in the destination directory + let dest_sub_dir = dest_dir.join("subdir"); + let dest_sub_file_path = dest_sub_dir.join("subfile.txt"); + let dest_ignored_sub_dir = dest_sub_dir.join("manifests"); + assert!(dest_sub_dir.exists()); + assert!(dest_sub_file_path.exists()); + assert!(!dest_ignored_sub_dir.exists()); + + // Clean up + temp_dir.close().unwrap(); + } +} diff --git a/crates/sozo/ops/src/migration/mod.rs b/crates/sozo/ops/src/migration/mod.rs index 0373b5c1a1..d12e2f71ed 100644 --- a/crates/sozo/ops/src/migration/mod.rs +++ b/crates/sozo/ops/src/migration/mod.rs @@ -60,6 +60,7 @@ pub struct ContractMigrationOutput { base_class_hash: FieldElement, } +#[allow(clippy::too_many_arguments)] pub async fn migrate( ws: &Workspace<'_>, world_address: Option, From 634bf285d42d1e01065422a583cea474dd6473df Mon Sep 17 00:00:00 2001 From: lambda-0x <0xlambda@protonmail.com> Date: Mon, 15 Apr 2024 21:16:34 +0530 Subject: [PATCH 22/23] fix: use `fee_estimate_multiplier` on more places (#1823) * fix: use fee estimate multipler in more places * add similar logic at more places * rename TxConfig to TxnConfig * abstract the logic with a trait * fix tests * fix formatting * add some documentation on type * update doc comment --- bin/sozo/src/commands/dev.rs | 3 +- bin/sozo/src/commands/execute.rs | 3 +- bin/sozo/src/commands/migrate.rs | 6 +- bin/sozo/src/commands/options/transaction.rs | 4 +- bin/sozo/tests/register_test.rs | 3 +- crates/dojo-world/src/contracts/world_test.rs | 12 ++-- crates/dojo-world/src/migration/mod.rs | 62 +++++++------------ crates/dojo-world/src/utils.rs | 61 +++++++++++++++++- crates/sozo/ops/src/auth.rs | 47 ++++++++------ crates/sozo/ops/src/execute.rs | 11 ++-- .../sozo/ops/src/migration/migration_test.rs | 20 +++--- crates/sozo/ops/src/migration/mod.rs | 44 ++++++------- crates/sozo/ops/src/register.rs | 22 +++---- crates/sozo/ops/src/tests/auth.rs | 24 ++++--- crates/sozo/ops/src/tests/migration.rs | 17 ++--- crates/sozo/ops/src/tests/setup.rs | 4 +- crates/torii/core/src/sql_test.rs | 3 +- crates/torii/graphql/src/tests/mod.rs | 3 +- .../grpc/src/server/tests/entities_test.rs | 3 +- 19 files changed, 204 insertions(+), 148 deletions(-) diff --git a/bin/sozo/src/commands/dev.rs b/bin/sozo/src/commands/dev.rs index 47fd5f5ba5..3aee618053 100644 --- a/bin/sozo/src/commands/dev.rs +++ b/bin/sozo/src/commands/dev.rs @@ -13,6 +13,7 @@ use dojo_lang::scarb_internal::build_scarb_root_database; use dojo_world::manifest::{BaseManifest, DeploymentManifest}; use dojo_world::metadata::dojo_metadata_from_workspace; use dojo_world::migration::world::WorldDiff; +use dojo_world::migration::TxnConfig; use notify_debouncer_mini::notify::RecursiveMode; use notify_debouncer_mini::{new_debouncer, DebouncedEvent, DebouncedEventKind}; use scarb::compiler::CompilationUnit; @@ -246,7 +247,7 @@ where let ui = ws.config().ui(); let mut strategy = prepare_migration(&target_dir, diff, name, world_address, &ui)?; - match migration::apply_diff(ws, account, None, &mut strategy).await { + match migration::apply_diff(ws, account, TxnConfig::default(), &mut strategy).await { Ok(migration_output) => { config.ui().print(format!( "🎉 World at address {} updated!", diff --git a/bin/sozo/src/commands/execute.rs b/bin/sozo/src/commands/execute.rs index 663290ad85..b53ffa9691 100644 --- a/bin/sozo/src/commands/execute.rs +++ b/bin/sozo/src/commands/execute.rs @@ -54,7 +54,8 @@ impl ExecuteArgs { .unwrap(); let tx_config = self.transaction.into(); - execute::execute(self.contract, self.entrypoint, self.calldata, &world, tx_config).await + execute::execute(self.contract, self.entrypoint, self.calldata, &world, &tx_config) + .await }) } } diff --git a/bin/sozo/src/commands/migrate.rs b/bin/sozo/src/commands/migrate.rs index 3046c26b60..b392ef1014 100644 --- a/bin/sozo/src/commands/migrate.rs +++ b/bin/sozo/src/commands/migrate.rs @@ -2,7 +2,7 @@ use anyhow::{anyhow, Context, Result}; use clap::{Args, Subcommand}; use dojo_lang::compiler::MANIFESTS_DIR; use dojo_world::metadata::{dojo_metadata_from_workspace, Environment}; -use dojo_world::migration::TxConfig; +use dojo_world::migration::TxnConfig; use katana_rpc_api::starknet::RPC_SPEC_VERSION; use scarb::core::{Config, Workspace}; use sozo_ops::migration; @@ -107,13 +107,13 @@ impl MigrateArgs { &account, name, true, - None, + TxnConfig::default(), ) .await }) } MigrateCommand::Apply { mut name, world, starknet, account, transaction } => { - let txn_config: Option = Some(transaction.into()); + let txn_config: TxnConfig = transaction.into(); if name.is_none() { if let Some(root_package) = ws.root_package() { diff --git a/bin/sozo/src/commands/options/transaction.rs b/bin/sozo/src/commands/options/transaction.rs index a54076cd44..9783378e04 100644 --- a/bin/sozo/src/commands/options/transaction.rs +++ b/bin/sozo/src/commands/options/transaction.rs @@ -1,5 +1,5 @@ use clap::Args; -use dojo_world::migration::TxConfig; +use dojo_world::migration::TxnConfig; #[derive(Debug, Args)] #[command(next_help_heading = "Transaction options")] @@ -29,7 +29,7 @@ pub struct TransactionOptions { pub receipt: bool, } -impl From for TxConfig { +impl From for TxnConfig { fn from(value: TransactionOptions) -> Self { Self { fee_estimate_multiplier: value.fee_estimate_multiplier, diff --git a/bin/sozo/tests/register_test.rs b/bin/sozo/tests/register_test.rs index 1f09489db9..e44ac2e96b 100644 --- a/bin/sozo/tests/register_test.rs +++ b/bin/sozo/tests/register_test.rs @@ -5,6 +5,7 @@ use dojo_test_utils::migration::prepare_migration; use dojo_test_utils::sequencer::{ get_default_test_starknet_config, SequencerConfig, TestSequencer, }; +use dojo_world::migration::TxnConfig; use scarb::ops; use sozo_ops::migration::execute_strategy; use starknet::accounts::Account; @@ -27,7 +28,7 @@ async fn reregister_models() { let mut account = sequencer.account(); account.set_block_id(BlockId::Tag(BlockTag::Pending)); - execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + execute_strategy(&ws, &mut migration, &account, TxnConfig::default()).await.unwrap(); let world_address = &format!("0x{:x}", &migration.world_address().unwrap()); let account_address = &format!("0x{:x}", account.address()); let private_key = &format!("0x{:x}", sequencer.raw_account().private_key); diff --git a/crates/dojo-world/src/contracts/world_test.rs b/crates/dojo-world/src/contracts/world_test.rs index 0cd62f69f6..3f14e50e73 100644 --- a/crates/dojo-world/src/contracts/world_test.rs +++ b/crates/dojo-world/src/contracts/world_test.rs @@ -12,7 +12,7 @@ use super::{WorldContract, WorldContractReader}; use crate::manifest::BaseManifest; use crate::migration::strategy::prepare_for_migration; use crate::migration::world::WorldDiff; -use crate::migration::{Declarable, Deployable}; +use crate::migration::{Declarable, Deployable, TxnConfig}; #[tokio::test(flavor = "multi_thread")] async fn test_world_contract_reader() { @@ -54,7 +54,7 @@ pub async fn deploy_world( .unwrap(); let base_class_hash = - strategy.base.unwrap().declare(&account, Default::default()).await.unwrap().class_hash; + strategy.base.unwrap().declare(&account, &TxnConfig::default()).await.unwrap().class_hash; // wait for the tx to be mined tokio::time::sleep(Duration::from_millis(250)).await; @@ -66,7 +66,7 @@ pub async fn deploy_world( manifest.clone().world.inner.class_hash, vec![base_class_hash], &account, - Default::default(), + &TxnConfig::default(), ) .await .unwrap() @@ -74,7 +74,7 @@ pub async fn deploy_world( let mut declare_output = vec![]; for model in strategy.models { - let res = model.declare(&account, Default::default()).await.unwrap(); + let res = model.declare(&account, &TxnConfig::default()).await.unwrap(); declare_output.push(res); } @@ -94,14 +94,14 @@ pub async fn deploy_world( tokio::time::sleep(Duration::from_millis(250)).await; for contract in strategy.contracts { - let declare_res = contract.declare(&account, Default::default()).await.unwrap(); + let declare_res = contract.declare(&account, &TxnConfig::default()).await.unwrap(); contract .deploy_dojo_contract( world_address, declare_res.class_hash, base_class_hash, &account, - Default::default(), + &TxnConfig::default(), ) .await .unwrap(); diff --git a/crates/dojo-world/src/migration/mod.rs b/crates/dojo-world/src/migration/mod.rs index 943c47725e..8d900c6878 100644 --- a/crates/dojo-world/src/migration/mod.rs +++ b/crates/dojo-world/src/migration/mod.rs @@ -18,7 +18,7 @@ use starknet::providers::{Provider, ProviderError}; use starknet::signers::Signer; use thiserror::Error; -use crate::utils::{TransactionWaiter, TransactionWaitingError}; +use crate::utils::{TransactionExt, TransactionWaiter, TransactionWaitingError}; pub mod class; pub mod contract; @@ -92,7 +92,7 @@ pub trait StateDiff { /// The transaction configuration to use when sending a transaction. #[derive(Debug, Copy, Clone, Default)] -pub struct TxConfig { +pub struct TxnConfig { /// The multiplier for how much the actual transaction max fee should be relative to the /// estimated fee. If `None` is provided, the multiplier is set to `1.1`. pub fee_estimate_multiplier: Option, @@ -106,7 +106,7 @@ pub trait Declarable { async fn declare( &self, account: &SingleOwnerAccount, - txn_config: TxConfig, + txn_config: &TxnConfig, ) -> Result as Account>::SignError>> where P: Provider + Sync + Send, @@ -125,14 +125,11 @@ pub trait Declarable { Err(e) => return Err(MigrationError::Provider(e)), } - let mut txn = account.declare(Arc::new(flattened_class), casm_class_hash); - - if let TxConfig { fee_estimate_multiplier: Some(multiplier), .. } = txn_config { - txn = txn.fee_estimate_multiplier(multiplier); - } - - let DeclareTransactionResult { transaction_hash, class_hash } = - txn.send().await.map_err(MigrationError::Migrator)?; + let DeclareTransactionResult { transaction_hash, class_hash } = account + .declare(Arc::new(flattened_class), casm_class_hash) + .send_with_cfg(&txn_config) + .await + .map_err(MigrationError::Migrator)?; TransactionWaiter::new(transaction_hash, account.provider()) .await @@ -153,7 +150,7 @@ pub trait Deployable: Declarable + Sync { class_hash: FieldElement, base_class_hash: FieldElement, account: &SingleOwnerAccount, - txn_config: TxConfig, + txn_config: &TxnConfig, ) -> Result as Account>::SignError>> where P: Provider + Sync + Send, @@ -198,14 +195,11 @@ pub trait Deployable: Declarable + Sync { Err(e) => return Err(MigrationError::Provider(e)), }; - let mut txn = account.execute(vec![call]); - - if let TxConfig { fee_estimate_multiplier: Some(multiplier), .. } = txn_config { - txn = txn.fee_estimate_multiplier(multiplier); - } - - let InvokeTransactionResult { transaction_hash } = - txn.send().await.map_err(MigrationError::Migrator)?; + let InvokeTransactionResult { transaction_hash } = account + .execute(vec![call]) + .send_with_cfg(&txn_config) + .await + .map_err(MigrationError::Migrator)?; let receipt = TransactionWaiter::new(transaction_hash, account.provider()).await?; let block_number = get_block_number_from_receipt(receipt); @@ -226,7 +220,7 @@ pub trait Deployable: Declarable + Sync { class_hash: FieldElement, constructor_calldata: Vec, account: &SingleOwnerAccount, - txn_config: TxConfig, + txn_config: &TxnConfig, ) -> Result as Account>::SignError>> where P: Provider + Sync + Send, @@ -266,19 +260,15 @@ pub trait Deployable: Declarable + Sync { Err(e) => return Err(MigrationError::Provider(e)), } - let mut txn = account.execute(vec![Call { + let txn = account.execute(vec![Call { calldata, // devnet UDC address selector: selector!("deployContract"), to: felt!("0x41a78e741e5af2fec34b695679bc6891742439f7afb8484ecd7766661ad02bf"), }]); - if let TxConfig { fee_estimate_multiplier: Some(multiplier), .. } = txn_config { - txn = txn.fee_estimate_multiplier(multiplier); - } - let InvokeTransactionResult { transaction_hash } = - txn.send().await.map_err(MigrationError::Migrator)?; + txn.send_with_cfg(&txn_config).await.map_err(MigrationError::Migrator)?; let receipt = TransactionWaiter::new(transaction_hash, account.provider()).await?; let block_number = get_block_number_from_receipt(receipt); @@ -306,7 +296,7 @@ pub trait Upgradable: Deployable + Declarable + Sync { original_class_hash: FieldElement, original_base_class_hash: FieldElement, account: &SingleOwnerAccount, - txn_config: TxConfig, + txn_config: &TxnConfig, ) -> Result as Account>::SignError>> where P: Provider + Sync + Send, @@ -336,18 +326,12 @@ pub trait Upgradable: Deployable + Declarable + Sync { } let calldata = vec![class_hash]; - let mut txn = account.execute(vec![Call { - calldata, - selector: selector!("upgrade"), - to: contract_address, - }]); - if let TxConfig { fee_estimate_multiplier: Some(multiplier), .. } = txn_config { - txn = txn.fee_estimate_multiplier(multiplier); - } - - let InvokeTransactionResult { transaction_hash } = - txn.send().await.map_err(MigrationError::Migrator)?; + let InvokeTransactionResult { transaction_hash } = account + .execute(vec![Call { calldata, selector: selector!("upgrade"), to: contract_address }]) + .send_with_cfg(&txn_config) + .await + .map_err(MigrationError::Migrator)?; let receipt = TransactionWaiter::new(transaction_hash, account.provider()).await?; let block_number = get_block_number_from_receipt(receipt); diff --git a/crates/dojo-world/src/utils.rs b/crates/dojo-world/src/utils.rs index 83d3101daf..5efdeaef3d 100644 --- a/crates/dojo-world/src/utils.rs +++ b/crates/dojo-world/src/utils.rs @@ -4,13 +4,17 @@ use std::task::{Context, Poll}; use std::time::Duration; use futures::FutureExt; +use starknet::accounts::{AccountError, ConnectedAccount, Declaration, Execution}; use starknet::core::types::{ - ExecutionResult, FieldElement, MaybePendingTransactionReceipt, PendingTransactionReceipt, - StarknetError, TransactionFinalityStatus, TransactionReceipt, TransactionStatus, + DeclareTransactionResult, ExecutionResult, FieldElement, InvokeTransactionResult, + MaybePendingTransactionReceipt, PendingTransactionReceipt, StarknetError, + TransactionFinalityStatus, TransactionReceipt, TransactionStatus, }; use starknet::providers::{Provider, ProviderError}; use tokio::time::{Instant, Interval}; +use crate::migration::TxnConfig; + type GetTxStatusResult = Result; type GetTxReceiptResult = Result; @@ -328,6 +332,59 @@ pub fn block_number_from_receipt(receipt: &TransactionReceipt) -> u64 { } } +/// Helper trait to abstract away setting `TxnConfig` configurations before sending a transaction +/// Implemented by types from `starknet-accounts` like `Execution`, `Declaration`, etc... +#[allow(async_fn_in_trait)] +pub trait TransactionExt +where + T: ConnectedAccount + Sync, +{ + type R; + + /// Sets `fee_estimate_multiplier` from `TxnConfig` if its present before calling `send` method + /// on the respective type. + async fn send_with_cfg( + self, + txn_config: &TxnConfig, + ) -> Result>; +} + +impl TransactionExt for Execution<'_, T> +where + T: ConnectedAccount + Sync, +{ + type R = InvokeTransactionResult; + + async fn send_with_cfg( + mut self, + txn_config: &TxnConfig, + ) -> Result> { + if let TxnConfig { fee_estimate_multiplier: Some(fee_est_mul), .. } = txn_config { + self = self.fee_estimate_multiplier(*fee_est_mul); + } + + self.send().await + } +} + +impl TransactionExt for Declaration<'_, T> +where + T: ConnectedAccount + Sync, +{ + type R = DeclareTransactionResult; + + async fn send_with_cfg( + mut self, + txn_config: &TxnConfig, + ) -> Result> { + if let TxnConfig { fee_estimate_multiplier: Some(fee_est_mul), .. } = txn_config { + self = self.fee_estimate_multiplier(*fee_est_mul); + } + + self.send().await + } +} + #[cfg(test)] mod tests { use assert_matches::assert_matches; diff --git a/crates/sozo/ops/src/auth.rs b/crates/sozo/ops/src/auth.rs index a852e84174..0b26cc2144 100644 --- a/crates/sozo/ops/src/auth.rs +++ b/crates/sozo/ops/src/auth.rs @@ -4,7 +4,8 @@ use anyhow::{Context, Result}; use dojo_world::contracts::model::ModelError; use dojo_world::contracts::world::WorldContract; use dojo_world::contracts::{cairo_utils, WorldContractReader}; -use dojo_world::migration::TxConfig; +use dojo_world::migration::TxnConfig; +use dojo_world::utils::TransactionExt; use starknet::accounts::ConnectedAccount; use starknet::core::types::{BlockId, BlockTag}; use starknet::core::utils::parse_cairo_short_string; @@ -89,7 +90,7 @@ impl FromStr for OwnerResource { pub async fn grant_writer( world: &WorldContract, models_contracts: Vec, - transaction: TxConfig, + txn_config: TxnConfig, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, @@ -121,15 +122,15 @@ where let res = world .account .execute(calls) - .send() + .send_with_cfg(&txn_config) .await .with_context(|| "Failed to send transaction")?; utils::handle_transaction_result( &world.account.provider(), res, - transaction.wait, - transaction.receipt, + txn_config.wait, + txn_config.receipt, ) .await?; } @@ -140,7 +141,7 @@ where pub async fn grant_owner( world: &WorldContract, owners_resources: Vec, - transaction: TxConfig, + txn_config: TxnConfig, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, @@ -158,14 +159,18 @@ where calls.push(world.grant_owner_getcall(&or.owner.into(), &resource)); } - let res = - world.account.execute(calls).send().await.with_context(|| "Failed to send transaction")?; + let res = world + .account + .execute(calls) + .send_with_cfg(&txn_config) + .await + .with_context(|| "Failed to send transaction")?; utils::handle_transaction_result( &world.account.provider(), res, - transaction.wait, - transaction.receipt, + txn_config.wait, + txn_config.receipt, ) .await?; @@ -175,7 +180,7 @@ where pub async fn revoke_writer( world: &WorldContract, models_contracts: Vec, - transaction: TxConfig, + txn_config: TxnConfig, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, @@ -207,15 +212,15 @@ where let res = world .account .execute(calls) - .send() + .send_with_cfg(&txn_config) .await .with_context(|| "Failed to send transaction")?; utils::handle_transaction_result( &world.account.provider(), res, - transaction.wait, - transaction.receipt, + txn_config.wait, + txn_config.receipt, ) .await?; } @@ -226,7 +231,7 @@ where pub async fn revoke_owner( world: &WorldContract, owners_resources: Vec, - transaction: TxConfig, + txn_config: TxnConfig, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, @@ -244,14 +249,18 @@ where calls.push(world.revoke_owner_getcall(&or.owner.into(), &resource)); } - let res = - world.account.execute(calls).send().await.with_context(|| "Failed to send transaction")?; + let res = world + .account + .execute(calls) + .send_with_cfg(&txn_config) + .await + .with_context(|| "Failed to send transaction")?; utils::handle_transaction_result( &world.account.provider(), res, - transaction.wait, - transaction.receipt, + txn_config.wait, + txn_config.receipt, ) .await?; diff --git a/crates/sozo/ops/src/execute.rs b/crates/sozo/ops/src/execute.rs index 8530dc50c9..8c7f403100 100644 --- a/crates/sozo/ops/src/execute.rs +++ b/crates/sozo/ops/src/execute.rs @@ -1,6 +1,7 @@ use anyhow::{Context, Result}; use dojo_world::contracts::world::WorldContract; -use dojo_world::migration::TxConfig; +use dojo_world::migration::TxnConfig; +use dojo_world::utils::TransactionExt; use starknet::accounts::{Call, ConnectedAccount}; use starknet::core::types::FieldElement; use starknet::core::utils::get_selector_from_name; @@ -12,7 +13,7 @@ pub async fn execute( entrypoint: String, calldata: Vec, world: &WorldContract, - transaction: TxConfig, + txn_config: &TxnConfig, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, @@ -25,15 +26,15 @@ where to: contract_address, selector: get_selector_from_name(&entrypoint)?, }]) - .send() + .send_with_cfg(txn_config) .await .with_context(|| "Failed to send transaction")?; utils::handle_transaction_result( &world.account.provider(), res, - transaction.wait, - transaction.receipt, + txn_config.wait, + txn_config.receipt, ) .await } diff --git a/crates/sozo/ops/src/migration/migration_test.rs b/crates/sozo/ops/src/migration/migration_test.rs index 8758de2b44..1ba389d6e1 100644 --- a/crates/sozo/ops/src/migration/migration_test.rs +++ b/crates/sozo/ops/src/migration/migration_test.rs @@ -8,7 +8,7 @@ use dojo_test_utils::sequencer::{ use dojo_world::manifest::{BaseManifest, DeploymentManifest}; use dojo_world::migration::strategy::prepare_for_migration; use dojo_world::migration::world::WorldDiff; -use dojo_world::migration::TxConfig; +use dojo_world::migration::TxnConfig; use scarb::ops; use starknet::accounts::{ExecutionEncoding, SingleOwnerAccount}; use starknet::core::chain_id; @@ -90,16 +90,14 @@ async fn migrate_with_small_fee_multiplier_will_fail() { ExecutionEncoding::New, ); - assert!( - execute_strategy( - &ws, - &mut migration, - &account, - Some(TxConfig { fee_estimate_multiplier: Some(0.2f64), wait: false, receipt: false }), - ) - .await - .is_err() - ); + assert!(execute_strategy( + &ws, + &mut migration, + &account, + Some(TxnConfig { fee_estimate_multiplier: Some(0.2f64), wait: false, receipt: false }), + ) + .await + .is_err()); sequencer.stop().unwrap(); } diff --git a/crates/sozo/ops/src/migration/mod.rs b/crates/sozo/ops/src/migration/mod.rs index d12e2f71ed..0ef5acd714 100644 --- a/crates/sozo/ops/src/migration/mod.rs +++ b/crates/sozo/ops/src/migration/mod.rs @@ -16,10 +16,10 @@ use dojo_world::migration::contract::ContractMigration; use dojo_world::migration::strategy::{generate_salt, prepare_for_migration, MigrationStrategy}; use dojo_world::migration::world::WorldDiff; use dojo_world::migration::{ - Declarable, DeployOutput, Deployable, MigrationError, RegisterOutput, StateDiff, TxConfig, + Declarable, DeployOutput, Deployable, MigrationError, RegisterOutput, StateDiff, TxnConfig, Upgradable, UpgradeOutput, }; -use dojo_world::utils::TransactionWaiter; +use dojo_world::utils::{TransactionExt, TransactionWaiter}; use futures::future; use scarb::core::Workspace; use scarb_ui::Ui; @@ -69,7 +69,7 @@ pub async fn migrate( account: &SingleOwnerAccount, name: Option, dry_run: bool, - txn_config: Option, + txn_config: TxnConfig, ) -> Result<()> where P: Provider + Sync + Send + 'static, @@ -145,7 +145,7 @@ where .await?; if !ws.config().offline() { - upload_metadata(ws, account, migration_output).await?; + upload_metadata(ws, account, migration_output, txn_config).await?; } } Err(e) => { @@ -307,7 +307,7 @@ async fn update_manifest_abis( pub async fn apply_diff( ws: &Workspace<'_>, account: &SingleOwnerAccount, - txn_config: Option, + txn_config: TxnConfig, strategy: &mut MigrationStrategy, ) -> Result where @@ -444,7 +444,7 @@ pub async fn execute_strategy( ws: &Workspace<'_>, strategy: &mut MigrationStrategy, migrator: &SingleOwnerAccount, - txn_config: Option, + txn_config: TxnConfig, ) -> Result where P: Provider + Sync + Send + 'static, @@ -458,7 +458,7 @@ where Some(base) => { ui.print_header("# Base Contract"); - match base.declare(migrator, txn_config.unwrap_or_default()).await { + match base.declare(migrator, &txn_config).await { Ok(res) => { ui.print_sub(format!("Class Hash: {:#x}", res.class_hash)); } @@ -537,7 +537,7 @@ where // Once Torii supports indexing arrays, we should declare and register the // ResourceMetadata model. - match register_models(strategy, migrator, &ui, txn_config).await { + match register_models(strategy, migrator, &ui, &txn_config).await { Ok(output) => { migration_output.models = output.registered_model_names; } @@ -547,7 +547,7 @@ where } }; - match deploy_dojo_contracts(strategy, migrator, &ui, txn_config).await { + match deploy_dojo_contracts(strategy, migrator, &ui, &txn_config).await { Ok(output) => { migration_output.contracts = output; } @@ -577,19 +577,14 @@ async fn deploy_contract( constructor_calldata: Vec, migrator: &SingleOwnerAccount, ui: &Ui, - txn_config: &Option, + txn_config: &TxnConfig, ) -> Result where P: Provider + Sync + Send + 'static, S: Signer + Sync + Send + 'static, { match contract - .deploy( - contract.diff.local_class_hash, - constructor_calldata, - migrator, - txn_config.unwrap_or_default(), - ) + .deploy(contract.diff.local_class_hash, constructor_calldata, migrator, txn_config) .await { Ok(mut val) => { @@ -625,7 +620,7 @@ async fn upgrade_contract( original_base_class_hash: FieldElement, migrator: &SingleOwnerAccount, ui: &Ui, - txn_config: &Option, + txn_config: &TxnConfig, ) -> Result where P: Provider + Sync + Send + 'static, @@ -637,7 +632,7 @@ where original_class_hash, original_base_class_hash, migrator, - (*txn_config).unwrap_or_default(), + txn_config, ) .await { @@ -667,7 +662,7 @@ async fn register_models( strategy: &MigrationStrategy, migrator: &SingleOwnerAccount, ui: &Ui, - txn_config: Option, + txn_config: &TxnConfig, ) -> Result where P: Provider + Sync + Send + 'static, @@ -691,7 +686,7 @@ where for c in models.iter() { ui.print(italic_message(&c.diff.name).to_string()); - let res = c.declare(migrator, txn_config.unwrap_or_default()).await; + let res = c.declare(migrator, txn_config).await; match res { Ok(output) => { ui.print_hidden_sub(format!("Declare transaction: {:#x}", output.transaction_hash)); @@ -728,7 +723,7 @@ where .collect::>(); let InvokeTransactionResult { transaction_hash } = - migrator.execute(calls).send().await.map_err(|e| { + world.account.execute(calls).send_with_cfg(&txn_config).await.map_err(|e| { ui.verbose(format!("{e:?}")); anyhow!("Failed to register models to World: {e}") })?; @@ -744,7 +739,7 @@ async fn deploy_dojo_contracts( strategy: &mut MigrationStrategy, migrator: &SingleOwnerAccount, ui: &Ui, - txn_config: Option, + txn_config: &TxnConfig, ) -> Result>> where P: Provider + Sync + Send + 'static, @@ -772,7 +767,7 @@ where contract.diff.local_class_hash, contract.diff.base_class_hash, migrator, - txn_config.unwrap_or_default(), + txn_config, ) .await { @@ -969,6 +964,7 @@ pub async fn upload_metadata( ws: &Workspace<'_>, migrator: &SingleOwnerAccount, migration_output: MigrationOutput, + txn_config: TxnConfig, ) -> Result<()> where P: Provider + Sync + Send + 'static, @@ -1043,7 +1039,7 @@ where let calls = resources.iter().map(|r| world.set_metadata_getcall(r)).collect::>(); let InvokeTransactionResult { transaction_hash } = - migrator.execute(calls).send().await.map_err(|e| { + migrator.execute(calls).send_with_cfg(&txn_config).await.map_err(|e| { ui.verbose(format!("{e:?}")); anyhow!("Failed to register metadata into the resource registry: {e}") })?; diff --git a/crates/sozo/ops/src/register.rs b/crates/sozo/ops/src/register.rs index 882ca55c8a..df86b27b0e 100644 --- a/crates/sozo/ops/src/register.rs +++ b/crates/sozo/ops/src/register.rs @@ -4,7 +4,8 @@ use anyhow::{Context, Result}; use dojo_world::contracts::model::ModelReader; use dojo_world::contracts::{WorldContract, WorldContractReader}; use dojo_world::manifest::DeploymentManifest; -use dojo_world::migration::TxConfig; +use dojo_world::migration::TxnConfig; +use dojo_world::utils::TransactionExt; use scarb::core::Config; use starknet::accounts::ConnectedAccount; use starknet::providers::Provider; @@ -15,7 +16,7 @@ use crate::utils::handle_transaction_result; pub async fn model_register( models: Vec, world: &WorldContract, - transaction: TxConfig, + txn_config: TxnConfig, world_reader: WorldContractReader

, world_address: FieldElement, config: &Config, @@ -63,16 +64,15 @@ where .map(|c| world.register_model_getcall(&(*c).into())) .collect::>(); - let res = - world.account.execute(calls).send().await.with_context(|| "Failed to send transaction")?; + let res = world + .account + .execute(calls) + .send_with_cfg(&txn_config) + .await + .with_context(|| "Failed to send transaction")?; - handle_transaction_result( - &world.account.provider(), - res, - transaction.wait, - transaction.receipt, - ) - .await?; + handle_transaction_result(&world.account.provider(), res, txn_config.wait, txn_config.receipt) + .await?; Ok(()) } diff --git a/crates/sozo/ops/src/tests/auth.rs b/crates/sozo/ops/src/tests/auth.rs index ae5a97355a..0535b56384 100644 --- a/crates/sozo/ops/src/tests/auth.rs +++ b/crates/sozo/ops/src/tests/auth.rs @@ -2,7 +2,7 @@ use dojo_test_utils::sequencer::{ get_default_test_starknet_config, SequencerConfig, TestSequencer, }; use dojo_world::contracts::world::WorldContract; -use dojo_world::migration::TxConfig; +use dojo_world::migration::TxnConfig; use starknet::accounts::{Account, ConnectedAccount}; use starknet::core::utils::cairo_short_string_to_felt; @@ -43,7 +43,7 @@ async fn auth_grant_writer_ok() { auth::grant_writer( &world, vec![moves_mc, position_mc], - TxConfig { wait: true, ..Default::default() }, + TxnConfig { wait: true, ..Default::default() }, ) .await .unwrap(); @@ -83,7 +83,7 @@ async fn auth_revoke_writer_ok() { auth::grant_writer( &world, vec![moves_mc.clone(), position_mc.clone()], - TxConfig { wait: true, ..Default::default() }, + TxnConfig { wait: true, ..Default::default() }, ) .await .unwrap(); @@ -95,7 +95,7 @@ async fn auth_revoke_writer_ok() { auth::revoke_writer( &world, vec![moves_mc, position_mc], - TxConfig { wait: true, ..Default::default() }, + TxnConfig { wait: true, ..Default::default() }, ) .await .unwrap(); @@ -132,9 +132,13 @@ async fn auth_grant_owner_ok() { owner: account_2_addr, }; - auth::grant_owner(&world, vec![moves, position], TxConfig { wait: true, ..Default::default() }) - .await - .unwrap(); + auth::grant_owner( + &world, + vec![moves, position], + TxnConfig { wait: true, ..Default::default() }, + ) + .await + .unwrap(); assert!(execute_spawn(&world_2).await); } @@ -170,7 +174,7 @@ async fn auth_revoke_owner_ok() { auth::grant_owner( &world, vec![moves.clone(), position.clone()], - TxConfig { wait: true, ..Default::default() }, + TxnConfig { wait: true, ..Default::default() }, ) .await .unwrap(); @@ -179,7 +183,7 @@ async fn auth_revoke_owner_ok() { auth::revoke_owner( &world, vec![moves, position], - TxConfig { wait: true, ..Default::default() }, + TxnConfig { wait: true, ..Default::default() }, ) .await .unwrap(); @@ -202,7 +206,7 @@ async fn execute_spawn( system_spawn, vec![], world, - TxConfig { wait: true, ..Default::default() }, + &TxnConfig { wait: true, ..Default::default() }, ) .await .is_ok() diff --git a/crates/sozo/ops/src/tests/migration.rs b/crates/sozo/ops/src/tests/migration.rs index d499b8cb5d..53b418d10b 100644 --- a/crates/sozo/ops/src/tests/migration.rs +++ b/crates/sozo/ops/src/tests/migration.rs @@ -14,7 +14,7 @@ use dojo_world::metadata::{ }; use dojo_world::migration::strategy::prepare_for_migration; use dojo_world::migration::world::WorldDiff; -use dojo_world::migration::TxConfig; +use dojo_world::migration::TxnConfig; use futures::TryStreamExt; use ipfs_api_backend_hyper::{HyperBackend, IpfsApi, IpfsClient, TryFromUri}; use starknet::accounts::{ExecutionEncoding, SingleOwnerAccount}; @@ -44,7 +44,7 @@ async fn migrate_with_auto_mine() { let mut account = sequencer.account(); account.set_block_id(BlockId::Tag(BlockTag::Pending)); - execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + execute_strategy(&ws, &mut migration, &account, TxnConfig::default()).await.unwrap(); sequencer.stop().unwrap(); } @@ -65,7 +65,7 @@ async fn migrate_with_block_time() { let mut account = sequencer.account(); account.set_block_id(BlockId::Tag(BlockTag::Pending)); - execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + execute_strategy(&ws, &mut migration, &account, TxnConfig::default()).await.unwrap(); sequencer.stop().unwrap(); } @@ -97,7 +97,7 @@ async fn migrate_with_small_fee_multiplier_will_fail() { &ws, &mut migration, &account, - Some(TxConfig { fee_estimate_multiplier: Some(0.2f64), wait: false, receipt: false }), + TxnConfig { fee_estimate_multiplier: Some(0.2f64), wait: false, receipt: false }, ) .await .is_err() @@ -157,7 +157,7 @@ async fn migration_from_remote() { ) .unwrap(); - execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + execute_strategy(&ws, &mut migration, &account, TxnConfig::default()).await.unwrap(); let local_manifest = BaseManifest::load_from_path( &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(&profile_name).join(BASE_DIR), @@ -191,9 +191,10 @@ async fn migrate_with_metadata() { let mut account = sequencer.account(); account.set_block_id(BlockId::Tag(BlockTag::Pending)); - let output = execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + let output = + execute_strategy(&ws, &mut migration, &account, TxnConfig::default()).await.unwrap(); - let res = upload_metadata(&ws, &account, output.clone()).await; + let res = upload_metadata(&ws, &account, output.clone(), TxnConfig::default()).await; assert!(res.is_ok()); let provider = sequencer.provider(); @@ -465,7 +466,7 @@ fn get_and_check_metadata_uri(element_name: &String, uri: &Vec) -> } /// Check an artifact metadata read from the resource registry against its value -/// in the local Dojo metadata. +/// in the local Dojo metadata. /// /// # Arguments /// diff --git a/crates/sozo/ops/src/tests/setup.rs b/crates/sozo/ops/src/tests/setup.rs index c55be7c1f4..14bc1624fa 100644 --- a/crates/sozo/ops/src/tests/setup.rs +++ b/crates/sozo/ops/src/tests/setup.rs @@ -4,7 +4,7 @@ use dojo_test_utils::migration::prepare_migration; use dojo_test_utils::sequencer::TestSequencer; use dojo_world::contracts::world::WorldContract; use dojo_world::migration::strategy::MigrationStrategy; -use dojo_world::migration::TxConfig; +use dojo_world::migration::TxnConfig; use scarb::core::{Config, Workspace}; use scarb::ops; use starknet::accounts::SingleOwnerAccount; @@ -75,7 +75,7 @@ pub async fn setup( &ws, &mut migration, &account, - Some(TxConfig { wait: true, ..Default::default() }), + TxnConfig { wait: true, ..Default::default() }, ) .await?; let world = WorldContract::new(output.world_address, account); diff --git a/crates/torii/core/src/sql_test.rs b/crates/torii/core/src/sql_test.rs index e926b021e9..8dafa934fd 100644 --- a/crates/torii/core/src/sql_test.rs +++ b/crates/torii/core/src/sql_test.rs @@ -6,6 +6,7 @@ use dojo_test_utils::sequencer::{ get_default_test_starknet_config, SequencerConfig, TestSequencer, }; use dojo_world::contracts::world::WorldContractReader; +use dojo_world::migration::TxnConfig; use dojo_world::utils::TransactionWaiter; use scarb::ops; use sozo_ops::migration::execute_strategy; @@ -70,7 +71,7 @@ async fn test_load_from_remote() { let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); let ws = ops::read_workspace(config.manifest_path(), &config) .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + execute_strategy(&ws, &mut migration, &account, TxnConfig::default()).await.unwrap(); // spawn let tx = account diff --git a/crates/torii/graphql/src/tests/mod.rs b/crates/torii/graphql/src/tests/mod.rs index 48eafec3cd..15b6a89d8d 100644 --- a/crates/torii/graphql/src/tests/mod.rs +++ b/crates/torii/graphql/src/tests/mod.rs @@ -11,6 +11,7 @@ use dojo_types::primitive::Primitive; use dojo_types::schema::{Enum, EnumOption, Member, Struct, Ty}; use dojo_world::contracts::WorldContractReader; use dojo_world::manifest::DeploymentManifest; +use dojo_world::migration::TxnConfig; use dojo_world::utils::TransactionWaiter; use scarb::ops; use serde::Deserialize; @@ -292,7 +293,7 @@ pub async fn spinup_types_test() -> Result { let ws = ops::read_workspace(config.manifest_path(), &config) .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + execute_strategy(&ws, &mut migration, &account, TxnConfig::default()).await.unwrap(); let manifest = DeploymentManifest::load_from_remote(&provider, migration.world_address().unwrap()) diff --git a/crates/torii/grpc/src/server/tests/entities_test.rs b/crates/torii/grpc/src/server/tests/entities_test.rs index 5f16412b4d..337a048808 100644 --- a/crates/torii/grpc/src/server/tests/entities_test.rs +++ b/crates/torii/grpc/src/server/tests/entities_test.rs @@ -7,6 +7,7 @@ use dojo_test_utils::sequencer::{ get_default_test_starknet_config, SequencerConfig, TestSequencer, }; use dojo_world::contracts::WorldContractReader; +use dojo_world::migration::TxnConfig; use dojo_world::utils::TransactionWaiter; use scarb::ops; use sozo_ops::migration::execute_strategy; @@ -47,7 +48,7 @@ async fn test_entities_queries() { let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml").unwrap(); let ws = ops::read_workspace(config.manifest_path(), &config) .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + execute_strategy(&ws, &mut migration, &account, TxnConfig::default()).await.unwrap(); // spawn let tx = account From 89a5b9eb31a32e58a0429b7d17151361cb09f746 Mon Sep 17 00:00:00 2001 From: lambda-0x <0xlambda@protonmail.com> Date: Mon, 15 Apr 2024 22:59:32 +0530 Subject: [PATCH 23/23] fix ci (#1836) --- bin/sozo/src/commands/clean.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bin/sozo/src/commands/clean.rs b/bin/sozo/src/commands/clean.rs index e8f4993d97..f1ff03f95a 100644 --- a/bin/sozo/src/commands/clean.rs +++ b/bin/sozo/src/commands/clean.rs @@ -59,6 +59,7 @@ impl CleanArgs { #[cfg(test)] mod tests { use dojo_test_utils::compiler; + use dojo_world::migration::TxnConfig; use katana_runner::KatanaRunner; use sozo_ops::migration; @@ -85,7 +86,7 @@ mod tests { &runner.account(0), Some("dojo_examples".to_string()), true, - None, + TxnConfig::default(), ) .await .unwrap()