From 4ec33212177648aa2ccafccfe38bfd9331db393d Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Fri, 12 Apr 2024 17:04:47 +0800 Subject: [PATCH 1/9] feat(katana): add executor metrics (#1791) ref #1369 Add metrics on katana executor; tracking the total L1 **gas** and Cairo **steps** used. There were two approaches that i thought of; 1. record the metrics on every tx execution, or 2. on every block ~Decided to go with (1) as it would allow to measure it in realtime (as the tx is being executed), instead of having to wait until the block is finished being processed.~ Thought im not exactly sure which one is the ideal one. Doing (1) might be less performant bcs we have to acquire the lock to the metrics recorder more frequently (ie every tx), as opposed to only updating the metrics once every block. another thing to note, currently doing (1) would require all executor implementations to define the metrics in their own implmentations, meaning have to duplicate code. If do (2) can just define it under `block_producer` scope and be executor agnostic. EDIT: doing (2). metrics are collected upon completion of block production --- some changes are made to gather the value after block production: - simplify params on `backend::do_mine_block`, now only accept two args; `BlockEnv` and `ExecutionOutput` - add a new type `ExecutionStats` under `katana-executor`, this is where executor would store the gas and steps value --- Cargo.lock | 2 ++ crates/katana/core/Cargo.toml | 2 ++ crates/katana/core/src/backend/mod.rs | 36 ++++++++++--------- crates/katana/core/src/sequencer.rs | 8 ++--- .../katana/core/src/service/block_producer.rs | 27 ++++++-------- crates/katana/core/src/service/metrics.rs | 15 ++++++++ crates/katana/core/src/service/mod.rs | 30 ++++++++++++++++ crates/katana/executor/src/abstraction/mod.rs | 11 ++++++ .../src/implementation/blockifier/mod.rs | 14 +++++--- .../executor/src/implementation/sir/mod.rs | 11 ++++-- crates/katana/executor/tests/executor.rs | 23 +++++++++--- crates/katana/executor/tests/simulate.rs | 4 ++- crates/katana/primitives/src/receipt.rs | 12 +++++++ 13 files changed, 145 insertions(+), 50 deletions(-) create mode 100644 crates/katana/core/src/service/metrics.rs diff --git a/Cargo.lock b/Cargo.lock index 86b07d5fe3..2b16039879 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6682,6 +6682,7 @@ dependencies = [ "cairo-vm 0.9.2", "convert_case 0.6.0", "derive_more", + "dojo-metrics", "flate2", "futures", "hex", @@ -6692,6 +6693,7 @@ dependencies = [ "katana-provider", "katana-tasks", "lazy_static", + "metrics", "parking_lot 0.12.1", "rand", "reqwest", diff --git a/crates/katana/core/Cargo.toml b/crates/katana/core/Cargo.toml index aeb7c1af4e..68c395f28b 100644 --- a/crates/katana/core/Cargo.toml +++ b/crates/katana/core/Cargo.toml @@ -15,6 +15,8 @@ katana-tasks.workspace = true anyhow.workspace = true async-trait.workspace = true +dojo-metrics.workspace = true +metrics.workspace = true cairo-lang-casm = "2.3.1" cairo-lang-starknet = "2.3.1" cairo-vm.workspace = true diff --git a/crates/katana/core/src/backend/mod.rs b/crates/katana/core/src/backend/mod.rs index badd55ab36..209b58a2dc 100644 --- a/crates/katana/core/src/backend/mod.rs +++ b/crates/katana/core/src/backend/mod.rs @@ -1,12 +1,11 @@ use std::sync::Arc; -use katana_executor::ExecutorFactory; +use katana_executor::{ExecutionOutput, ExecutionResult, ExecutorFactory}; use katana_primitives::block::{ Block, FinalityStatus, GasPrices, Header, PartialHeader, SealedBlockWithStatus, }; use katana_primitives::chain::ChainId; use katana_primitives::env::BlockEnv; -use katana_primitives::state::StateUpdatesWithDeclaredClasses; use katana_primitives::version::CURRENT_STARKNET_VERSION; use katana_primitives::FieldElement; use katana_provider::providers::fork::ForkedProvider; @@ -26,7 +25,7 @@ pub mod storage; use self::config::StarknetConfig; use self::storage::Blockchain; use crate::env::BlockContextGenerator; -use crate::service::block_producer::{BlockProductionError, MinedBlockOutcome, TxWithOutcome}; +use crate::service::block_producer::{BlockProductionError, MinedBlockOutcome}; use crate::utils::get_current_timestamp; pub(crate) const LOG_TARGET: &str = "katana::core::backend"; @@ -120,17 +119,20 @@ impl Backend { pub fn do_mine_block( &self, block_env: &BlockEnv, - txs_outcomes: Vec, - state_updates: StateUpdatesWithDeclaredClasses, + execution_output: ExecutionOutput, ) -> Result { - let mut txs = vec![]; - let mut receipts = vec![]; - let mut execs = vec![]; - - for t in txs_outcomes { - txs.push(t.tx); - receipts.push(t.receipt); - execs.push(t.exec_info); + // we optimistically allocate the maximum amount possible + let mut txs = Vec::with_capacity(execution_output.transactions.len()); + let mut traces = Vec::with_capacity(execution_output.transactions.len()); + let mut receipts = Vec::with_capacity(execution_output.transactions.len()); + + // only include successful transactions in the block + for (tx, res) in execution_output.transactions { + if let ExecutionResult::Success { receipt, trace, .. } = res { + txs.push(tx); + traces.push(trace); + receipts.push(receipt); + } } let prev_hash = BlockHashProvider::latest_hash(self.blockchain.provider())?; @@ -156,9 +158,9 @@ impl Backend { BlockWriter::insert_block_with_states_and_receipts( self.blockchain.provider(), block, - state_updates, + execution_output.states, receipts, - execs, + traces, )?; info!( @@ -168,7 +170,7 @@ impl Backend { "Block mined.", ); - Ok(MinedBlockOutcome { block_number }) + Ok(MinedBlockOutcome { block_number, stats: execution_output.stats }) } pub fn update_block_env(&self, block_env: &mut BlockEnv) { @@ -192,7 +194,7 @@ impl Backend { &self, block_env: &BlockEnv, ) -> Result { - self.do_mine_block(block_env, Default::default(), Default::default()) + self.do_mine_block(block_env, Default::default()) } } diff --git a/crates/katana/core/src/sequencer.rs b/crates/katana/core/src/sequencer.rs index 790be10c69..0a0db8678d 100644 --- a/crates/katana/core/src/sequencer.rs +++ b/crates/katana/core/src/sequencer.rs @@ -85,13 +85,13 @@ impl KatanaSequencer { let block_producer = Arc::new(block_producer); - tokio::spawn(NodeService { + tokio::spawn(NodeService::new( + Arc::clone(&pool), miner, - pool: Arc::clone(&pool), - block_producer: block_producer.clone(), + block_producer.clone(), #[cfg(feature = "messaging")] messaging, - }); + )); Ok(Self { pool, config, backend, block_producer }) } diff --git a/crates/katana/core/src/service/block_producer.rs b/crates/katana/core/src/service/block_producer.rs index 09e5be0a1b..4205290389 100644 --- a/crates/katana/core/src/service/block_producer.rs +++ b/crates/katana/core/src/service/block_producer.rs @@ -8,7 +8,7 @@ use std::time::Duration; use futures::channel::mpsc::{channel, Receiver, Sender}; use futures::stream::{Stream, StreamExt}; use futures::FutureExt; -use katana_executor::{BlockExecutor, ExecutionOutput, ExecutionResult, ExecutorFactory}; +use katana_executor::{BlockExecutor, ExecutionResult, ExecutionStats, ExecutorFactory}; use katana_primitives::block::{BlockHashOrNumber, ExecutableBlock, PartialHeader}; use katana_primitives::receipt::Receipt; use katana_primitives::trace::TxExecInfo; @@ -42,8 +42,10 @@ pub enum BlockProductionError { TransactionExecutionError(#[from] katana_executor::ExecutorError), } +#[derive(Debug, Clone)] pub struct MinedBlockOutcome { pub block_number: u64, + pub stats: ExecutionStats, } #[derive(Debug, Clone)] @@ -264,19 +266,8 @@ impl IntervalBlockProducer { trace!(target: LOG_TARGET, "Creating new block."); let block_env = executor.block_env(); - let ExecutionOutput { states, transactions } = executor.take_execution_output()?; - - let transactions = transactions - .into_iter() - .filter_map(|(tx, res)| match res { - ExecutionResult::Failed { .. } => None, - ExecutionResult::Success { receipt, trace, .. } => { - Some(TxWithOutcome { tx, receipt, exec_info: trace }) - } - }) - .collect::>(); - - let outcome = backend.do_mine_block(&block_env, transactions, states)?; + let execution_output = executor.take_execution_output()?; + let outcome = backend.do_mine_block(&block_env, execution_output)?; trace!(target: LOG_TARGET, block_number = %outcome.block_number, "Created new block."); @@ -515,8 +506,10 @@ impl InstantBlockProducer { executor.execute_block(block)?; - let ExecutionOutput { states, transactions } = executor.take_execution_output()?; - let txs_outcomes = transactions + let execution_output = executor.take_execution_output()?; + let txs_outcomes = execution_output + .transactions + .clone() .into_iter() .filter_map(|(tx, res)| match res { ExecutionResult::Success { receipt, trace, .. } => { @@ -526,7 +519,7 @@ impl InstantBlockProducer { }) .collect::>(); - let outcome = backend.do_mine_block(&block_env, txs_outcomes.clone(), states)?; + let outcome = backend.do_mine_block(&block_env, execution_output)?; trace!(target: LOG_TARGET, block_number = %outcome.block_number, "Created new block."); diff --git a/crates/katana/core/src/service/metrics.rs b/crates/katana/core/src/service/metrics.rs new file mode 100644 index 0000000000..3749ab8c03 --- /dev/null +++ b/crates/katana/core/src/service/metrics.rs @@ -0,0 +1,15 @@ +use dojo_metrics::Metrics; +use metrics::Counter; + +pub(crate) struct ServiceMetrics { + pub(crate) block_producer: BlockProducerMetrics, +} + +#[derive(Metrics)] +#[metrics(scope = "block_producer")] +pub(crate) struct BlockProducerMetrics { + /// The amount of L1 gas processed in a block. + pub(crate) l1_gas_processed_total: Counter, + /// The amount of Cairo steps processed in a block. + pub(crate) cairo_steps_processed_total: Counter, +} diff --git a/crates/katana/core/src/service/mod.rs b/crates/katana/core/src/service/mod.rs index a636764ec1..74b9fb2a18 100644 --- a/crates/katana/core/src/service/mod.rs +++ b/crates/katana/core/src/service/mod.rs @@ -13,11 +13,13 @@ use starknet::core::types::FieldElement; use tracing::{error, info}; use self::block_producer::BlockProducer; +use self::metrics::{BlockProducerMetrics, ServiceMetrics}; use crate::pool::TransactionPool; pub mod block_producer; #[cfg(feature = "messaging")] pub mod messaging; +mod metrics; #[cfg(feature = "messaging")] use self::messaging::{MessagingOutcome, MessagingService}; @@ -39,6 +41,28 @@ pub struct NodeService { /// The messaging service #[cfg(feature = "messaging")] pub(crate) messaging: Option>, + /// Metrics for recording the service operations + metrics: ServiceMetrics, +} + +impl NodeService { + pub fn new( + pool: Arc, + miner: TransactionMiner, + block_producer: Arc>, + #[cfg(feature = "messaging")] messaging: Option>, + ) -> Self { + let metrics = ServiceMetrics { block_producer: BlockProducerMetrics::default() }; + + Self { + pool, + miner, + block_producer, + metrics, + #[cfg(feature = "messaging")] + messaging, + } + } } impl Future for NodeService { @@ -68,6 +92,12 @@ impl Future for NodeService { match res { Ok(outcome) => { info!(target: LOG_TARGET, block_number = %outcome.block_number, "Mined block."); + + let metrics = &pin.metrics.block_producer; + let gas_used = outcome.stats.l1_gas_used; + let steps_used = outcome.stats.cairo_steps_used; + metrics.l1_gas_processed_total.increment(gas_used as u64); + metrics.cairo_steps_processed_total.increment(steps_used as u64); } Err(err) => { diff --git a/crates/katana/executor/src/abstraction/mod.rs b/crates/katana/executor/src/abstraction/mod.rs index 313ce8e045..5e231cc823 100644 --- a/crates/katana/executor/src/abstraction/mod.rs +++ b/crates/katana/executor/src/abstraction/mod.rs @@ -72,9 +72,20 @@ impl SimulationFlag { } } +/// Stats about the transactions execution. +#[derive(Debug, Clone, Default)] +pub struct ExecutionStats { + /// The total gas used. + pub l1_gas_used: u128, + /// The total cairo steps used. + pub cairo_steps_used: u128, +} + /// The output of a executor after a series of executions. #[derive(Debug, Default)] pub struct ExecutionOutput { + /// Statistics throughout the executions process. + pub stats: ExecutionStats, /// The state updates produced by the executions. pub states: StateUpdatesWithDeclaredClasses, /// The transactions that have been executed. diff --git a/crates/katana/executor/src/implementation/blockifier/mod.rs b/crates/katana/executor/src/implementation/blockifier/mod.rs index a18c4cb0e6..1ad7bd2094 100644 --- a/crates/katana/executor/src/implementation/blockifier/mod.rs +++ b/crates/katana/executor/src/implementation/blockifier/mod.rs @@ -19,8 +19,9 @@ use tracing::info; use self::output::receipt_from_exec_info; use self::state::CachedState; use crate::{ - BlockExecutor, EntryPointCall, ExecutionError, ExecutionOutput, ExecutionResult, ExecutorExt, - ExecutorFactory, ExecutorResult, ResultAndStates, SimulationFlag, StateProviderDb, + BlockExecutor, EntryPointCall, ExecutionError, ExecutionOutput, ExecutionResult, + ExecutionStats, ExecutorExt, ExecutorFactory, ExecutorResult, ResultAndStates, SimulationFlag, + StateProviderDb, }; pub(crate) const LOG_TARGET: &str = "katana::executor::blockifier"; @@ -69,6 +70,7 @@ pub struct StarknetVMProcessor<'a> { state: CachedState>, transactions: Vec<(TxWithHash, ExecutionResult)>, simulation_flags: SimulationFlag, + stats: ExecutionStats, } impl<'a> StarknetVMProcessor<'a> { @@ -81,7 +83,7 @@ impl<'a> StarknetVMProcessor<'a> { let transactions = Vec::new(); let block_context = utils::block_context_from_envs(&block_env, &cfg_env); let state = state::CachedState::new(StateProviderDb(state)); - Self { block_context, state, transactions, simulation_flags } + Self { block_context, state, transactions, simulation_flags, stats: Default::default() } } fn fill_block_env_from_header(&mut self, header: &PartialHeader) { @@ -159,6 +161,9 @@ impl<'a> BlockExecutor<'a> for StarknetVMProcessor<'a> { crate::utils::log_resources(&trace.actual_resources); crate::utils::log_events(receipt.events()); + self.stats.l1_gas_used += fee.gas_consumed; + self.stats.cairo_steps_used += receipt.resources_used().steps as u128; + if let Some(reason) = receipt.revert_reason() { info!(target: LOG_TARGET, reason = %reason, "Transaction reverted."); } @@ -187,7 +192,8 @@ impl<'a> BlockExecutor<'a> for StarknetVMProcessor<'a> { fn take_execution_output(&mut self) -> ExecutorResult { let states = utils::state_update_from_cached_state(&self.state); let transactions = std::mem::take(&mut self.transactions); - Ok(ExecutionOutput { states, transactions }) + let stats = std::mem::take(&mut self.stats); + Ok(ExecutionOutput { stats, states, transactions }) } fn state(&self) -> Box { diff --git a/crates/katana/executor/src/implementation/sir/mod.rs b/crates/katana/executor/src/implementation/sir/mod.rs index f73930b73d..bb6c44d704 100644 --- a/crates/katana/executor/src/implementation/sir/mod.rs +++ b/crates/katana/executor/src/implementation/sir/mod.rs @@ -23,7 +23,7 @@ use crate::abstraction::{ BlockExecutor, ExecutionOutput, ExecutorExt, ExecutorFactory, ExecutorResult, SimulationFlag, StateProviderDb, }; -use crate::{EntryPointCall, ExecutionError, ExecutionResult, ResultAndStates}; +use crate::{EntryPointCall, ExecutionError, ExecutionResult, ExecutionStats, ResultAndStates}; pub(crate) const LOG_TARGET: &str = "katana::executor::sir"; @@ -72,6 +72,7 @@ pub struct StarknetVMProcessor<'a> { state: CachedState, PermanentContractClassCache>, transactions: Vec<(TxWithHash, ExecutionResult)>, simulation_flags: SimulationFlag, + stats: ExecutionStats, } impl<'a> StarknetVMProcessor<'a> { @@ -85,7 +86,7 @@ impl<'a> StarknetVMProcessor<'a> { let block_context = utils::block_context_from_envs(&block_env, &cfg_env); let state = CachedState::new(StateProviderDb(state), PermanentContractClassCache::default()); - Self { block_context, state, transactions, simulation_flags } + Self { block_context, state, transactions, simulation_flags, stats: Default::default() } } fn fill_block_env_from_header(&mut self, header: &PartialHeader) { @@ -160,6 +161,9 @@ impl<'a> BlockExecutor<'a> for StarknetVMProcessor<'a> { crate::utils::log_resources(&trace.actual_resources); crate::utils::log_events(receipt.events()); + self.stats.l1_gas_used += fee.gas_consumed; + self.stats.cairo_steps_used += receipt.resources_used().steps as u128; + if let Some(reason) = receipt.revert_reason() { info!(target: LOG_TARGET, reason = %reason, "Transaction reverted."); } @@ -194,7 +198,8 @@ impl<'a> BlockExecutor<'a> for StarknetVMProcessor<'a> { fn take_execution_output(&mut self) -> ExecutorResult { let states = utils::state_update_from_cached_state(&self.state); let transactions = std::mem::take(&mut self.transactions); - Ok(ExecutionOutput { states, transactions }) + let stats = std::mem::take(&mut self.stats); + Ok(ExecutionOutput { stats, states, transactions }) } fn state(&self) -> Box { diff --git a/crates/katana/executor/tests/executor.rs b/crates/katana/executor/tests/executor.rs index 70a85ee6fb..3d64cf2393 100644 --- a/crates/katana/executor/tests/executor.rs +++ b/crates/katana/executor/tests/executor.rs @@ -249,13 +249,28 @@ fn test_executor_with_valid_blocks_impl( ); // assert the state updates after all the blocks are executed - // + let mut actual_total_gas: u128 = 0; + let mut actual_total_steps: u128 = 0; // assert the state updates - let ExecutionOutput { states, transactions } = executor.take_execution_output().unwrap(); - // asserts that the executed transactions are stored - let actual_txs: Vec = transactions.iter().map(|(tx, _)| tx.clone()).collect(); + let ExecutionOutput { states, transactions, stats } = executor.take_execution_output().unwrap(); + // asserts that the executed transactions are stored + let actual_txs: Vec = transactions + .iter() + .map(|(tx, res)| { + if let Some(fee) = res.fee() { + actual_total_gas += fee.gas_consumed; + } + if let Some(rec) = res.receipt() { + actual_total_steps += rec.resources_used().steps as u128; + } + tx.clone() + }) + .collect(); + + assert_eq!(actual_total_gas, stats.l1_gas_used); + assert_eq!(actual_total_steps, stats.cairo_steps_used); assert_eq!(actual_txs, expected_txs); let actual_nonce_updates = states.state_updates.nonce_updates; diff --git a/crates/katana/executor/tests/simulate.rs b/crates/katana/executor/tests/simulate.rs index 7a8905fa43..076f2ba013 100644 --- a/crates/katana/executor/tests/simulate.rs +++ b/crates/katana/executor/tests/simulate.rs @@ -62,9 +62,11 @@ fn test_simulate_tx_impl( }),); // check that the underlying state is not modified - let ExecutionOutput { states, transactions } = + let ExecutionOutput { states, transactions, stats } = executor.take_execution_output().expect("must take output"); + assert_eq!(stats.l1_gas_used, 0, "no gas usage should be recorded"); + assert_eq!(stats.cairo_steps_used, 0, "no steps usage should be recorded"); assert!(transactions.is_empty(), "simulated tx should not be stored"); assert!(states.state_updates.nonce_updates.is_empty(), "no state updates"); diff --git a/crates/katana/primitives/src/receipt.rs b/crates/katana/primitives/src/receipt.rs index b9f5bb5e9f..f17e6d9cc9 100644 --- a/crates/katana/primitives/src/receipt.rs +++ b/crates/katana/primitives/src/receipt.rs @@ -122,6 +122,7 @@ impl Receipt { } } + /// Returns the L1 messages sent. pub fn messages_sent(&self) -> &[MessageToL1] { match self { Receipt::Invoke(rct) => &rct.messages_sent, @@ -131,6 +132,7 @@ impl Receipt { } } + /// Returns the events emitted. pub fn events(&self) -> &[Event] { match self { Receipt::Invoke(rct) => &rct.events, @@ -139,6 +141,16 @@ impl Receipt { Receipt::DeployAccount(rct) => &rct.events, } } + + /// Returns the execution resources used. + pub fn resources_used(&self) -> &TxExecutionResources { + match self { + Receipt::Invoke(rct) => &rct.execution_resources, + Receipt::Declare(rct) => &rct.execution_resources, + Receipt::L1Handler(rct) => &rct.execution_resources, + Receipt::DeployAccount(rct) => &rct.execution_resources, + } + } } /// Transaction execution resources. From 99bd4cba7536a38a3d821f0e057845b70c542608 Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Fri, 12 Apr 2024 17:07:49 +0800 Subject: [PATCH 2/9] katana: update grafana dashboard with execution metrics (#1818) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ref #1791 #1369 Screenshot 2024-04-12 at 2 36 56 AM showing total gas and steps using a simple line charts, tracking its growth over time --- monitoring/grafana/katana.json | 233 +++++++++++++++++++++++++++++++-- 1 file changed, 223 insertions(+), 10 deletions(-) diff --git a/monitoring/grafana/katana.json b/monitoring/grafana/katana.json index 9d0626ffa0..1e85a36de3 100644 --- a/monitoring/grafana/katana.json +++ b/monitoring/grafana/katana.json @@ -36,6 +36,219 @@ "x": 0, "y": 0 }, + "id": 122, + "panels": [], + "title": "Execution", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "description": "The total amount of L1 gas that has been processed", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Total gas", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 1 + }, + "id": 121, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "katana_block_producer_l1_gas_processed_total{instance=\"localhost:9100\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "L1 Gas Processed", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "description": "The total amount of Cairo steps that has been processed", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Total steps", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "smooth", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 1 + }, + "id": 123, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "cdh4g2kxwgx6od" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "katana_block_producer_cairo_steps_processed_total{instance=\"localhost:9100\"}", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Cairo Steps Processed", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 9 + }, "id": 108, "panels": [], "title": "RPC Server", @@ -131,7 +344,7 @@ "h": 8, "w": 12, "x": 0, - "y": 1 + "y": 10 }, "id": 109, "options": { @@ -193,7 +406,7 @@ "h": 8, "w": 12, "x": 12, - "y": 1 + "y": 10 }, "id": 111, "maxDataPoints": 25, @@ -322,7 +535,7 @@ "h": 8, "w": 12, "x": 0, - "y": 9 + "y": 18 }, "id": 120, "options": { @@ -380,7 +593,7 @@ "h": 8, "w": 12, "x": 12, - "y": 9 + "y": 18 }, "id": 112, "maxDataPoints": 25, @@ -451,7 +664,7 @@ "h": 1, "w": 24, "x": 0, - "y": 17 + "y": 26 }, "id": 97, "panels": [], @@ -523,7 +736,7 @@ "h": 8, "w": 12, "x": 0, - "y": 18 + "y": 27 }, "id": 99, "options": { @@ -620,7 +833,7 @@ "h": 8, "w": 12, "x": 12, - "y": 18 + "y": 27 }, "id": 101, "options": { @@ -716,7 +929,7 @@ "h": 8, "w": 12, "x": 0, - "y": 26 + "y": 35 }, "id": 98, "options": { @@ -878,7 +1091,7 @@ "h": 8, "w": 12, "x": 12, - "y": 26 + "y": 35 }, "id": 100, "options": { @@ -953,6 +1166,6 @@ "timezone": "", "title": "katana", "uid": "2k8BXz24x", - "version": 3, + "version": 6, "weekStart": "" } From 59ff3a6b99af85342d259e8397c46fc8bcc6b02a Mon Sep 17 00:00:00 2001 From: Tarrence van As Date: Fri, 12 Apr 2024 10:01:17 -0400 Subject: [PATCH 3/9] Prepare release: v0.6.1-alpha.1 (#1824) --- Cargo.lock | 82 ++++++++++---------- Cargo.toml | 2 +- crates/katana/runner/runner-macro/Cargo.toml | 2 +- 3 files changed, 43 insertions(+), 43 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2b16039879..408bd71b93 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1275,7 +1275,7 @@ dependencies = [ [[package]] name = "benches" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "clap", @@ -2815,7 +2815,7 @@ dependencies = [ [[package]] name = "common" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "reqwest", @@ -3544,7 +3544,7 @@ checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" [[package]] name = "dojo-bindgen" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "async-trait", "cainome 0.1.5", @@ -3560,15 +3560,15 @@ dependencies = [ [[package]] name = "dojo-core" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" [[package]] name = "dojo-examples-spawn-and-move" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" [[package]] name = "dojo-lang" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -3618,7 +3618,7 @@ dependencies = [ [[package]] name = "dojo-language-server" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -3640,7 +3640,7 @@ dependencies = [ [[package]] name = "dojo-metrics" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "hyper", @@ -3658,7 +3658,7 @@ dependencies = [ [[package]] name = "dojo-test-utils" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "assert_fs", @@ -3692,7 +3692,7 @@ dependencies = [ [[package]] name = "dojo-types" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "crypto-bigint", "hex", @@ -3707,7 +3707,7 @@ dependencies = [ [[package]] name = "dojo-world" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "assert_fs", @@ -3743,7 +3743,7 @@ dependencies = [ [[package]] name = "dojo-world-abigen" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "cairo-lang-starknet", "camino", @@ -6618,7 +6618,7 @@ dependencies = [ [[package]] name = "katana" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "alloy-primitives", "anyhow", @@ -6645,7 +6645,7 @@ dependencies = [ [[package]] name = "katana-codecs" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "bytes", "katana-primitives", @@ -6653,7 +6653,7 @@ dependencies = [ [[package]] name = "katana-codecs-derive" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "proc-macro2", "quote", @@ -6663,7 +6663,7 @@ dependencies = [ [[package]] name = "katana-core" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "alloy-contract", "alloy-network", @@ -6711,7 +6711,7 @@ dependencies = [ [[package]] name = "katana-db" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "cairo-lang-starknet", @@ -6733,7 +6733,7 @@ dependencies = [ [[package]] name = "katana-executor" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "alloy-primitives", "anyhow", @@ -6761,7 +6761,7 @@ dependencies = [ [[package]] name = "katana-primitives" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "alloy-primitives", "anyhow", @@ -6787,7 +6787,7 @@ dependencies = [ [[package]] name = "katana-provider" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "auto_impl", @@ -6812,7 +6812,7 @@ dependencies = [ [[package]] name = "katana-rpc" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "assert_matches", @@ -6848,7 +6848,7 @@ dependencies = [ [[package]] name = "katana-rpc-api" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "jsonrpsee 0.16.3", "katana-core", @@ -6859,7 +6859,7 @@ dependencies = [ [[package]] name = "katana-rpc-types" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "alloy-primitives", "anyhow", @@ -6880,7 +6880,7 @@ dependencies = [ [[package]] name = "katana-rpc-types-builder" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "katana-executor", @@ -6892,7 +6892,7 @@ dependencies = [ [[package]] name = "katana-runner" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "chrono", @@ -6911,7 +6911,7 @@ dependencies = [ [[package]] name = "katana-tasks" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "futures", "rayon", @@ -10013,7 +10013,7 @@ checksum = "e666a5496a0b2186dbcd0ff6106e29e093c15591bde62c20d3842007c6978a09" [[package]] name = "runner-macro" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "quote", "syn 2.0.55", @@ -10240,7 +10240,7 @@ dependencies = [ [[package]] name = "saya" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "clap", @@ -10259,7 +10259,7 @@ dependencies = [ [[package]] name = "saya-core" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "async-trait", @@ -10295,7 +10295,7 @@ dependencies = [ [[package]] name = "saya-provider" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "alloy-primitives", "anyhow", @@ -11058,7 +11058,7 @@ dependencies = [ [[package]] name = "sozo" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "assert_fs", @@ -11109,7 +11109,7 @@ dependencies = [ [[package]] name = "sozo-ops" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "assert_fs", @@ -11158,7 +11158,7 @@ dependencies = [ [[package]] name = "sozo-signers" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "starknet 0.9.0", @@ -12598,7 +12598,7 @@ dependencies = [ [[package]] name = "torii" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "async-trait", @@ -12643,7 +12643,7 @@ dependencies = [ [[package]] name = "torii-client" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "async-trait", "camino", @@ -12671,7 +12671,7 @@ dependencies = [ [[package]] name = "torii-core" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "async-trait", @@ -12708,7 +12708,7 @@ dependencies = [ [[package]] name = "torii-graphql" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "async-graphql", @@ -12749,7 +12749,7 @@ dependencies = [ [[package]] name = "torii-grpc" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "bytes", "crypto-bigint", @@ -12792,7 +12792,7 @@ dependencies = [ [[package]] name = "torii-relay" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "async-trait", @@ -12827,7 +12827,7 @@ dependencies = [ [[package]] name = "torii-server" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" dependencies = [ "anyhow", "async-trait", @@ -13132,7 +13132,7 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "types-test" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" [[package]] name = "ucd-trie" diff --git a/Cargo.toml b/Cargo.toml index 2906f5cfba..75e4ee6918 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,7 +45,7 @@ edition = "2021" license = "Apache-2.0" license-file = "LICENSE" repository = "https://github.com/dojoengine/dojo/" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" [profile.performance] codegen-units = 1 diff --git a/crates/katana/runner/runner-macro/Cargo.toml b/crates/katana/runner/runner-macro/Cargo.toml index 837fe7afb2..bd3ba86c28 100644 --- a/crates/katana/runner/runner-macro/Cargo.toml +++ b/crates/katana/runner/runner-macro/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "runner-macro" -version = "0.6.1-alpha.0" +version = "0.6.1-alpha.1" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From 9a814ad59b3855bf8f3bbdeefe13c407cf4edf87 Mon Sep 17 00:00:00 2001 From: Tarrence van As Date: Fri, 12 Apr 2024 10:07:17 -0400 Subject: [PATCH 4/9] Strip version v prefix with release-dispatch (#1825) Makes it a bit easier to use the action --- .github/workflows/release-dispatch.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release-dispatch.yml b/.github/workflows/release-dispatch.yml index 0e3faa3d13..0a8a08c3f6 100644 --- a/.github/workflows/release-dispatch.yml +++ b/.github/workflows/release-dispatch.yml @@ -19,7 +19,10 @@ jobs: # Workaround described here: https://github.com/actions/checkout/issues/760 - uses: actions/checkout@v3 - run: git config --global --add safe.directory "$GITHUB_WORKSPACE" - - run: cargo release version ${{ inputs.version }} --execute --no-confirm && cargo release replace --execute --no-confirm + - run: | + version=${{ inputs.version }} + version=${version#v} + cargo release version ${{ inputs.version }} --execute --no-confirm && cargo release replace --execute --no-confirm - id: version_info run: | cargo install cargo-get From e9fab2caf970621ed8311330f3752dc9635f60d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Baranx?= Date: Fri, 12 Apr 2024 23:49:34 +0700 Subject: [PATCH 5/9] feat: store contract abi and source in contract metadata (#1682) sozo: store ABI and source in metadata registry This PR updates the `sozo build` command to save ABI and expanded source files, into the `target` directory, for the world contract and every user contracts. These ABI and source files are then uploaded as IPFS artifacts in the `ResourceMetadata` registry, for the world, models and contracts. --- Cargo.lock | 1 + bin/sozo/src/commands/dev.rs | 2 +- bin/sozo/src/commands/migrate.rs | 2 +- bin/sozo/src/utils.rs | 2 +- crates/dojo-lang/Cargo.toml | 2 +- crates/dojo-lang/src/compiler.rs | 36 +- crates/dojo-test-utils/src/compiler.rs | 41 +- crates/dojo-world/src/metadata.rs | 209 +++++++- crates/dojo-world/src/metadata_test.rs | 117 ++++- .../src/metadata_test_data/abi.json | 17 + .../src/metadata_test_data/source.cairo | 79 +++ crates/dojo-world/src/migration/mod.rs | 1 + crates/sozo/ops/Cargo.toml | 1 + crates/sozo/ops/src/migration/mod.rs | 380 +++++++++----- crates/sozo/ops/src/tests/migration.rs | 496 ++++++++++++++++++ crates/sozo/ops/src/tests/mod.rs | 1 + crates/sozo/ops/src/tests/setup.rs | 50 +- .../torii/graphql/src/tests/metadata_test.rs | 6 +- crates/torii/libp2p/src/server/mod.rs | 2 +- 19 files changed, 1271 insertions(+), 174 deletions(-) create mode 100644 crates/dojo-world/src/metadata_test_data/abi.json create mode 100644 crates/dojo-world/src/metadata_test_data/source.cairo create mode 100644 crates/sozo/ops/src/tests/migration.rs diff --git a/Cargo.lock b/Cargo.lock index 408bd71b93..0c0faa343c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11137,6 +11137,7 @@ dependencies = [ "dojo-types", "dojo-world", "futures", + "ipfs-api-backend-hyper", "katana-runner", "notify", "notify-debouncer-mini", diff --git a/bin/sozo/src/commands/dev.rs b/bin/sozo/src/commands/dev.rs index 4c03b9d749..f92eda68d4 100644 --- a/bin/sozo/src/commands/dev.rs +++ b/bin/sozo/src/commands/dev.rs @@ -199,7 +199,7 @@ impl DevArgs { let env_metadata = if config.manifest_path().exists() { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) + dojo_metadata_from_workspace(&ws).env().cloned() } else { None }; diff --git a/bin/sozo/src/commands/migrate.rs b/bin/sozo/src/commands/migrate.rs index 849fe88462..fc56cdee0e 100644 --- a/bin/sozo/src/commands/migrate.rs +++ b/bin/sozo/src/commands/migrate.rs @@ -129,7 +129,7 @@ impl MigrateArgs { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; let env_metadata = if config.manifest_path().exists() { - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) + dojo_metadata_from_workspace(&ws).env().cloned() } else { None }; diff --git a/bin/sozo/src/utils.rs b/bin/sozo/src/utils.rs index 8bd219e5b7..7dbbfe28fd 100644 --- a/bin/sozo/src/utils.rs +++ b/bin/sozo/src/utils.rs @@ -25,7 +25,7 @@ pub fn load_metadata_from_config(config: &Config) -> Result, let env_metadata = if config.manifest_path().exists() { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) + dojo_metadata_from_workspace(&ws).env().cloned() } else { None }; diff --git a/crates/dojo-lang/Cargo.toml b/crates/dojo-lang/Cargo.toml index 277d7ea15c..2541730c92 100644 --- a/crates/dojo-lang/Cargo.toml +++ b/crates/dojo-lang/Cargo.toml @@ -16,6 +16,7 @@ cairo-lang-debug.workspace = true cairo-lang-defs.workspace = true cairo-lang-diagnostics.workspace = true cairo-lang-filesystem.workspace = true +cairo-lang-formatter.workspace = true cairo-lang-lowering.workspace = true cairo-lang-parser.workspace = true cairo-lang-plugins.workspace = true @@ -50,7 +51,6 @@ tracing.workspace = true url = "2.2.2" [dev-dependencies] -cairo-lang-formatter.workspace = true cairo-lang-semantic.workspace = true cairo-lang-test-utils.workspace = true dojo-test-utils = { path = "../dojo-test-utils" } diff --git a/crates/dojo-lang/src/compiler.rs b/crates/dojo-lang/src/compiler.rs index 0f7d7d4163..010dab774a 100644 --- a/crates/dojo-lang/src/compiler.rs +++ b/crates/dojo-lang/src/compiler.rs @@ -1,4 +1,5 @@ use std::collections::{BTreeMap, BTreeSet, HashMap}; +use std::io::Write; use std::iter::zip; use std::ops::DerefMut; @@ -8,13 +9,14 @@ use cairo_lang_defs::db::DefsGroup; use cairo_lang_defs::ids::{ModuleId, ModuleItemId}; use cairo_lang_filesystem::db::FilesGroup; use cairo_lang_filesystem::ids::{CrateId, CrateLongId}; +use cairo_lang_formatter::format_string; use cairo_lang_semantic::db::SemanticGroup; use cairo_lang_starknet::abi; use cairo_lang_starknet::contract::{find_contracts, ContractDeclaration}; use cairo_lang_starknet::contract_class::{compile_prepared_db, ContractClass}; use cairo_lang_starknet::plugin::aux_data::StarkNetContractAuxData; use cairo_lang_utils::UpcastMut; -use camino::Utf8PathBuf; +use camino::{Utf8Path, Utf8PathBuf}; use convert_case::{Case, Casing}; use dojo_world::manifest::{ AbiFormat, Class, ComputedValueEntrypoint, DojoContract, DojoModel, Manifest, ManifestMethods, @@ -46,6 +48,8 @@ pub const ABIS_DIR: &str = "abis"; pub const CONTRACTS_DIR: &str = "contracts"; pub const MODELS_DIR: &str = "models"; +pub const SOURCES_DIR: &str = "src"; + pub(crate) const LOG_TARGET: &str = "dojo_lang::compiler"; #[cfg(test)] @@ -87,6 +91,8 @@ impl Compiler for DojoCompiler { ) -> Result<()> { let props: Props = unit.target().props()?; let target_dir = unit.target_dir(ws); + let sources_dir = target_dir.child(Utf8Path::new(SOURCES_DIR)); + let compiler_config = build_compiler_config(&unit, ws); let mut main_crate_ids = collect_main_crate_ids(&unit, db); @@ -118,11 +124,31 @@ impl Compiler for DojoCompiler { for (decl, class) in zip(contracts, classes) { let contract_full_path = decl.module_id().full_path(db.upcast_mut()); - let file_name = format!("{contract_full_path}.json"); - let mut file = target_dir.open_rw(file_name.clone(), "output file", ws.config())?; - serde_json::to_writer_pretty(file.deref_mut(), &class) - .with_context(|| format!("failed to serialize contract: {contract_full_path}"))?; + // save expanded contract source file + if let Ok(file_id) = db.module_main_file(decl.module_id()) { + if let Some(file_content) = db.file_content(file_id) { + let src_file_name = format!("{contract_full_path}.cairo").replace("::", "_"); + + let mut file = + sources_dir.open_rw(src_file_name.clone(), "source file", ws.config())?; + file.write(format_string(db, file_content.to_string()).as_bytes()) + .with_context(|| { + format!("failed to serialize contract source: {contract_full_path}") + })?; + } else { + return Err(anyhow!("failed to get source file content: {contract_full_path}")); + } + } else { + return Err(anyhow!("failed to get source file: {contract_full_path}")); + } + + // save JSON artifact file + let file_name = format!("{contract_full_path}.json"); + let mut file = target_dir.open_rw(file_name.clone(), "class file", ws.config())?; + serde_json::to_writer_pretty(file.deref_mut(), &class).with_context(|| { + format!("failed to serialize contract artifact: {contract_full_path}") + })?; let class_hash = compute_class_hash_of_contract_class(&class).with_context(|| { format!("problem computing class hash for contract `{contract_full_path}`") diff --git a/crates/dojo-test-utils/src/compiler.rs b/crates/dojo-test-utils/src/compiler.rs index 2486ec1eac..335c86f63c 100644 --- a/crates/dojo-test-utils/src/compiler.rs +++ b/crates/dojo-test-utils/src/compiler.rs @@ -11,25 +11,40 @@ use scarb::ops; use scarb_ui::Verbosity; pub fn build_test_config(path: &str) -> anyhow::Result { + build_full_test_config(path, true) +} + +pub fn build_full_test_config(path: &str, override_dirs: bool) -> anyhow::Result { let mut compilers = CompilerRepository::empty(); compilers.add(Box::new(DojoCompiler)).unwrap(); let cairo_plugins = CairoPluginRepository::default(); + let path = Utf8PathBuf::from_path_buf(path.into()).unwrap(); - let cache_dir = TempDir::new().unwrap(); - let config_dir = TempDir::new().unwrap(); - let target_dir = TempDir::new().unwrap(); + if override_dirs { + let cache_dir = TempDir::new().unwrap(); + let config_dir = TempDir::new().unwrap(); + let target_dir = TempDir::new().unwrap(); - let path = Utf8PathBuf::from_path_buf(path.into()).unwrap(); - Config::builder(path.canonicalize_utf8().unwrap()) - .global_cache_dir_override(Some(Utf8Path::from_path(cache_dir.path()).unwrap())) - .global_config_dir_override(Some(Utf8Path::from_path(config_dir.path()).unwrap())) - .target_dir_override(Some(Utf8Path::from_path(target_dir.path()).unwrap().to_path_buf())) - .ui_verbosity(Verbosity::Verbose) - .log_filter_directive(env::var_os("SCARB_LOG")) - .compilers(compilers) - .cairo_plugins(cairo_plugins.into()) - .build() + Config::builder(path.canonicalize_utf8().unwrap()) + .global_cache_dir_override(Some(Utf8Path::from_path(cache_dir.path()).unwrap())) + .global_config_dir_override(Some(Utf8Path::from_path(config_dir.path()).unwrap())) + .target_dir_override(Some( + Utf8Path::from_path(target_dir.path()).unwrap().to_path_buf(), + )) + .ui_verbosity(Verbosity::Verbose) + .log_filter_directive(env::var_os("SCARB_LOG")) + .compilers(compilers) + .cairo_plugins(cairo_plugins.into()) + .build() + } else { + Config::builder(path.canonicalize_utf8().unwrap()) + .ui_verbosity(Verbosity::Verbose) + .log_filter_directive(env::var_os("SCARB_LOG")) + .compilers(compilers) + .cairo_plugins(cairo_plugins.into()) + .build() + } } pub fn corelib() -> PathBuf { diff --git a/crates/dojo-world/src/metadata.rs b/crates/dojo-world/src/metadata.rs index a3c2c7d1ca..2d50f5fa70 100644 --- a/crates/dojo-world/src/metadata.rs +++ b/crates/dojo-world/src/metadata.rs @@ -3,24 +3,144 @@ use std::io::Cursor; use std::path::PathBuf; use anyhow::Result; +use camino::Utf8PathBuf; use ipfs_api_backend_hyper::{IpfsApi, IpfsClient, TryFromUri}; use scarb::core::{ManifestMetadata, Workspace}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_json::json; use url::Url; +use crate::manifest::{BaseManifest, WORLD_CONTRACT_NAME}; + #[cfg(test)] #[path = "metadata_test.rs"] mod test; -pub fn dojo_metadata_from_workspace(ws: &Workspace<'_>) -> Option { - Some(ws.current_package().ok()?.manifest.metadata.dojo()) +pub const IPFS_CLIENT_URL: &str = "https://ipfs.infura.io:5001"; +pub const IPFS_USERNAME: &str = "2EBrzr7ZASQZKH32sl2xWauXPSA"; +pub const IPFS_PASSWORD: &str = "12290b883db9138a8ae3363b6739d220"; + +// copy constants from dojo-lang to avoid circular dependency +pub const MANIFESTS_DIR: &str = "manifests"; +pub const ABIS_DIR: &str = "abis"; +pub const SOURCES_DIR: &str = "src"; +pub const BASE_DIR: &str = "base"; + +fn build_artifact_from_name( + source_dir: &Utf8PathBuf, + abi_dir: &Utf8PathBuf, + element_name: &str, +) -> ArtifactMetadata { + let sanitized_name = element_name.replace("::", "_"); + let abi_file = abi_dir.join(format!("{sanitized_name}.json")); + let src_file = source_dir.join(format!("{sanitized_name}.cairo")); + + ArtifactMetadata { + abi: if abi_file.exists() { Some(Uri::File(abi_file.into_std_path_buf())) } else { None }, + source: if src_file.exists() { + Some(Uri::File(src_file.into_std_path_buf())) + } else { + None + }, + } } +/// Build world metadata with data read from the project configuration. +/// +/// # Arguments +/// +/// * `project_metadata` - The project metadata. +/// +/// # Returns +/// +/// A [`WorldMetadata`] object initialized with project metadata. +pub fn project_to_world_metadata(project_metadata: Option) -> WorldMetadata { + if let Some(m) = project_metadata { + WorldMetadata { + name: m.name, + description: m.description, + cover_uri: m.cover_uri, + icon_uri: m.icon_uri, + website: m.website, + socials: m.socials, + ..Default::default() + } + } else { + WorldMetadata { + name: None, + description: None, + cover_uri: None, + icon_uri: None, + website: None, + socials: None, + ..Default::default() + } + } +} + +/// Collect metadata from the project configuration and from the workspace. +/// +/// # Arguments +/// `ws`: the workspace. +/// +/// # Returns +/// A [`DojoMetadata`] object containing all Dojo metadata. +pub fn dojo_metadata_from_workspace(ws: &Workspace<'_>) -> DojoMetadata { + let profile = ws.config().profile(); + + let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); + let manifest_dir = manifest_dir.join(MANIFESTS_DIR).join(profile.as_str()); + let target_dir = ws.target_dir().path_existent().unwrap(); + let sources_dir = target_dir.join(profile.as_str()).join(SOURCES_DIR); + let abis_dir = manifest_dir.join(ABIS_DIR).join(BASE_DIR); + + let project_metadata = ws.current_package().unwrap().manifest.metadata.dojo(); + let mut dojo_metadata = + DojoMetadata { env: project_metadata.env.clone(), ..Default::default() }; + + let world_artifact = build_artifact_from_name(&sources_dir, &abis_dir, WORLD_CONTRACT_NAME); + + // inialize Dojo world metadata with world metadata coming from project configuration + dojo_metadata.world = project_to_world_metadata(project_metadata.world); + dojo_metadata.world.artifacts = world_artifact; + + // load models and contracts metadata + if manifest_dir.join(BASE_DIR).exists() { + if let Ok(manifest) = BaseManifest::load_from_path(&manifest_dir.join(BASE_DIR)) { + for model in manifest.models { + let name = model.name.to_string(); + dojo_metadata.artifacts.insert( + name.clone(), + build_artifact_from_name(&sources_dir, &abis_dir.join("models"), &name), + ); + } + + for contract in manifest.contracts { + let name = contract.name.to_string(); + dojo_metadata.artifacts.insert( + name.clone(), + build_artifact_from_name(&sources_dir, &abis_dir.join("contracts"), &name), + ); + } + } + } + + dojo_metadata +} + +/// Metadata coming from project configuration (Scarb.toml) +#[derive(Default, Deserialize, Debug, Clone)] +pub struct ProjectMetadata { + pub world: Option, + pub env: Option, +} + +/// Metadata collected from the project configuration and the Dojo workspace #[derive(Default, Deserialize, Debug, Clone)] -pub struct Metadata { - pub world: Option, +pub struct DojoMetadata { + pub world: WorldMetadata, pub env: Option, + pub artifacts: HashMap, } #[derive(Debug)] @@ -76,6 +196,18 @@ impl Uri { } } +/// World metadata coming from the project configuration (Scarb.toml) +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +pub struct ProjectWorldMetadata { + pub name: Option, + pub description: Option, + pub cover_uri: Option, + pub icon_uri: Option, + pub website: Option, + pub socials: Option>, +} + +/// World metadata collected from the project configuration and the Dojo workspace #[derive(Default, Serialize, Deserialize, Debug, Clone)] pub struct WorldMetadata { pub name: Option, @@ -84,6 +216,14 @@ pub struct WorldMetadata { pub icon_uri: Option, pub website: Option, pub socials: Option>, + pub artifacts: ArtifactMetadata, +} + +/// Metadata Artifacts collected for one Dojo element (world, model, contract...) +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +pub struct ArtifactMetadata { + pub abi: Option, + pub source: Option, } #[derive(Default, Deserialize, Clone, Debug)] @@ -122,7 +262,7 @@ impl Environment { } } -impl WorldMetadata { +impl ProjectWorldMetadata { pub fn name(&self) -> Option<&str> { self.name.as_deref() } @@ -135,8 +275,8 @@ impl WorldMetadata { impl WorldMetadata { pub async fn upload(&self) -> Result { let mut meta = self.clone(); - let client = IpfsClient::from_str("https://ipfs.infura.io:5001")? - .with_credentials("2EBrzr7ZASQZKH32sl2xWauXPSA", "12290b883db9138a8ae3363b6739d220"); + let client = + IpfsClient::from_str(IPFS_CLIENT_URL)?.with_credentials(IPFS_USERNAME, IPFS_PASSWORD); if let Some(Uri::File(icon)) = &self.icon_uri { let icon_data = std::fs::read(icon)?; @@ -152,6 +292,20 @@ impl WorldMetadata { meta.cover_uri = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) }; + if let Some(Uri::File(abi)) = &self.artifacts.abi { + let abi_data = std::fs::read(abi)?; + let reader = Cursor::new(abi_data); + let response = client.add(reader).await?; + meta.artifacts.abi = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) + }; + + if let Some(Uri::File(source)) = &self.artifacts.source { + let source_data = std::fs::read(source)?; + let reader = Cursor::new(source_data); + let response = client.add(reader).await?; + meta.artifacts.source = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) + }; + let serialized = json!(meta).to_string(); let reader = Cursor::new(serialized); let response = client.add(reader).await?; @@ -160,26 +314,51 @@ impl WorldMetadata { } } -impl Metadata { - pub fn env(&self) -> Option<&Environment> { - self.env.as_ref() +impl ArtifactMetadata { + pub async fn upload(&self) -> Result { + let mut meta = self.clone(); + let client = + IpfsClient::from_str(IPFS_CLIENT_URL)?.with_credentials(IPFS_USERNAME, IPFS_PASSWORD); + + if let Some(Uri::File(abi)) = &self.abi { + let abi_data = std::fs::read(abi)?; + let reader = Cursor::new(abi_data); + let response = client.add(reader).await?; + meta.abi = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) + }; + + if let Some(Uri::File(source)) = &self.source { + let source_data = std::fs::read(source)?; + let reader = Cursor::new(source_data); + let response = client.add(reader).await?; + meta.source = Some(Uri::Ipfs(format!("ipfs://{}", response.hash))) + }; + + let serialized = json!(meta).to_string(); + let reader = Cursor::new(serialized); + let response = client.add(reader).await?; + + Ok(response.hash) } +} - pub fn world(&self) -> Option<&WorldMetadata> { - self.world.as_ref() +impl DojoMetadata { + pub fn env(&self) -> Option<&Environment> { + self.env.as_ref() } } + trait MetadataExt { - fn dojo(&self) -> Metadata; + fn dojo(&self) -> ProjectMetadata; } impl MetadataExt for ManifestMetadata { - fn dojo(&self) -> Metadata { + fn dojo(&self) -> ProjectMetadata { self.tool_metadata .as_ref() .and_then(|e| e.get("dojo")) .cloned() - .map(|v| v.try_into::().unwrap_or_default()) + .map(|v| v.try_into::().unwrap_or_default()) .unwrap_or_default() } } diff --git a/crates/dojo-world/src/metadata_test.rs b/crates/dojo-world/src/metadata_test.rs index a6c950fa6c..b30624320f 100644 --- a/crates/dojo-world/src/metadata_test.rs +++ b/crates/dojo-world/src/metadata_test.rs @@ -1,13 +1,18 @@ use std::collections::HashMap; +use camino::Utf8PathBuf; +use dojo_test_utils::compiler::build_full_test_config; +use scarb::ops; use url::Url; -use super::WorldMetadata; -use crate::metadata::{Metadata, Uri}; +use crate::metadata::{ + dojo_metadata_from_workspace, ArtifactMetadata, ProjectMetadata, Uri, WorldMetadata, ABIS_DIR, + BASE_DIR, MANIFESTS_DIR, SOURCES_DIR, +}; #[test] fn check_metadata_deserialization() { - let metadata: Metadata = toml::from_str( + let metadata: ProjectMetadata = toml::from_str( r#" [env] rpc_url = "http://localhost:5050/" @@ -64,9 +69,13 @@ async fn world_metadata_hash_and_upload() { name: Some("Test World".to_string()), description: Some("A world used for testing".to_string()), cover_uri: Some(Uri::File("src/metadata_test_data/cover.png".into())), - icon_uri: None, + icon_uri: Some(Uri::File("src/metadata_test_data/cover.png".into())), website: Some(Url::parse("https://dojoengine.org").unwrap()), socials: Some(HashMap::from([("x".to_string(), "https://x.com/dojostarknet".to_string())])), + artifacts: ArtifactMetadata { + abi: Some(Uri::File("src/metadata_test_data/abi.json".into())), + source: Some(Uri::File("src/metadata_test_data/source.cairo".into())), + }, }; let _ = meta.upload().await.unwrap(); @@ -74,7 +83,7 @@ async fn world_metadata_hash_and_upload() { #[tokio::test] async fn parse_world_metadata_without_socials() { - let metadata: Metadata = toml::from_str( + let metadata: ProjectMetadata = toml::from_str( r#" [env] rpc_url = "http://localhost:5050/" @@ -97,3 +106,101 @@ website = "https://dojoengine.org" assert!(metadata.world.is_some()); } + +#[tokio::test] +async fn get_full_dojo_metadata_from_workspace() { + let config = build_full_test_config("../../examples/spawn-and-move/Scarb.toml", false).unwrap(); + let ws = ops::read_workspace(config.manifest_path(), &config) + .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); + + let profile = ws.config().profile(); + let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); + let manifest_dir = manifest_dir.join(MANIFESTS_DIR).join(profile.as_str()); + let target_dir = ws.target_dir().path_existent().unwrap(); + let sources_dir = target_dir.join(profile.as_str()).join(SOURCES_DIR); + let abis_dir = manifest_dir.join(ABIS_DIR).join(BASE_DIR); + + let dojo_metadata = dojo_metadata_from_workspace(&ws); + + // env + assert!(dojo_metadata.env.is_some()); + let env = dojo_metadata.env.unwrap(); + + assert!(env.rpc_url.is_some()); + assert!(env.rpc_url.unwrap().eq("http://localhost:5050/")); + + assert!(env.account_address.is_some()); + assert!( + env.account_address + .unwrap() + .eq("0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03") + ); + + assert!(env.private_key.is_some()); + assert!( + env.private_key.unwrap().eq("0x1800000000300000180000000000030000000000003006001800006600") + ); + + assert!(env.world_address.is_some()); + assert!( + env.world_address + .unwrap() + .eq("0x1385f25d20a724edc9c7b3bd9636c59af64cbaf9fcd12f33b3af96b2452f295") + ); + + assert!(env.keystore_path.is_none()); + assert!(env.keystore_password.is_none()); + + // world + assert!(dojo_metadata.world.name.is_some()); + assert!(dojo_metadata.world.name.unwrap().eq("example")); + + assert!(dojo_metadata.world.description.is_some()); + assert!(dojo_metadata.world.description.unwrap().eq("example world")); + + assert!(dojo_metadata.world.cover_uri.is_none()); + assert!(dojo_metadata.world.icon_uri.is_none()); + assert!(dojo_metadata.world.website.is_none()); + assert!(dojo_metadata.world.socials.is_none()); + + check_artifact( + dojo_metadata.world.artifacts, + "dojo_world_world".to_string(), + &abis_dir, + &sources_dir, + ); + + // artifacts + let artifacts = vec![ + ("models", "dojo_examples::actions::actions::moved"), + ("models", "dojo_examples::models::emote_message"), + ("models", "dojo_examples::models::moves"), + ("models", "dojo_examples::models::position"), + ("contracts", "dojo_examples::actions::actions"), + ]; + + for (abi_subdir, name) in artifacts { + let artifact = dojo_metadata.artifacts.get(name); + assert!(artifact.is_some()); + let artifact = artifact.unwrap(); + + let sanitized_name = name.replace("::", "_"); + + check_artifact(artifact.clone(), sanitized_name, &abis_dir.join(abi_subdir), &sources_dir); + } +} + +fn check_artifact( + artifact: ArtifactMetadata, + name: String, + abis_dir: &Utf8PathBuf, + sources_dir: &Utf8PathBuf, +) { + assert!(artifact.abi.is_some()); + let abi = artifact.abi.unwrap(); + assert_eq!(abi, Uri::File(abis_dir.join(format!("{name}.json")).into())); + + assert!(artifact.source.is_some()); + let source = artifact.source.unwrap(); + assert_eq!(source, Uri::File(sources_dir.join(format!("{name}.cairo")).into())); +} diff --git a/crates/dojo-world/src/metadata_test_data/abi.json b/crates/dojo-world/src/metadata_test_data/abi.json new file mode 100644 index 0000000000..78efed0140 --- /dev/null +++ b/crates/dojo-world/src/metadata_test_data/abi.json @@ -0,0 +1,17 @@ +[ + { + "type": "impl", + "name": "WorldProviderImpl", + "interface_name": "dojo::world::IWorldProvider" + }, + { + "type": "struct", + "name": "dojo::world::IWorldDispatcher", + "members": [ + { + "name": "contract_address", + "type": "core::starknet::contract_address::ContractAddress" + } + ] + } +] diff --git a/crates/dojo-world/src/metadata_test_data/source.cairo b/crates/dojo-world/src/metadata_test_data/source.cairo new file mode 100644 index 0000000000..c917342ece --- /dev/null +++ b/crates/dojo-world/src/metadata_test_data/source.cairo @@ -0,0 +1,79 @@ +use starknet::ContractAddress; + +#[derive(Serde, Copy, Drop, Introspect)] +enum Direction { + None, + Left, + Right, + Up, + Down, +} + +impl DirectionIntoFelt252 of Into { + fn into(self: Direction) -> felt252 { + match self { + Direction::None => 0, + Direction::Left => 1, + Direction::Right => 2, + Direction::Up => 3, + Direction::Down => 4, + } + } +} + +#[derive(Model, Copy, Drop, Serde)] +struct Moves { + #[key] + player: ContractAddress, + remaining: u8, + last_direction: Direction +} + +#[derive(Copy, Drop, Serde, Introspect)] +struct Vec2 { + x: u32, + y: u32 +} + +#[derive(Model, Copy, Drop, Serde)] +struct Position { + #[key] + player: ContractAddress, + vec: Vec2, +} + +trait Vec2Trait { + fn is_zero(self: Vec2) -> bool; + fn is_equal(self: Vec2, b: Vec2) -> bool; +} + +impl Vec2Impl of Vec2Trait { + fn is_zero(self: Vec2) -> bool { + if self.x - self.y == 0 { + return true; + } + false + } + + fn is_equal(self: Vec2, b: Vec2) -> bool { + self.x == b.x && self.y == b.y + } +} + +#[cfg(test)] +mod tests { + use super::{Position, Vec2, Vec2Trait}; + + #[test] + #[available_gas(100000)] + fn test_vec_is_zero() { + assert(Vec2Trait::is_zero(Vec2 { x: 0, y: 0 }), 'not zero'); + } + + #[test] + #[available_gas(100000)] + fn test_vec_is_equal() { + let position = Vec2 { x: 420, y: 0 }; + assert(position.is_equal(Vec2 { x: 420, y: 0 }), 'not equal'); + } +} diff --git a/crates/dojo-world/src/migration/mod.rs b/crates/dojo-world/src/migration/mod.rs index 4af66e2ac5..943c47725e 100644 --- a/crates/dojo-world/src/migration/mod.rs +++ b/crates/dojo-world/src/migration/mod.rs @@ -51,6 +51,7 @@ pub struct UpgradeOutput { pub struct RegisterOutput { pub transaction_hash: FieldElement, pub declare_output: Vec, + pub registered_model_names: Vec, } #[derive(Debug, Error)] diff --git a/crates/sozo/ops/Cargo.toml b/crates/sozo/ops/Cargo.toml index 2472bb87fe..077f54ef54 100644 --- a/crates/sozo/ops/Cargo.toml +++ b/crates/sozo/ops/Cargo.toml @@ -49,5 +49,6 @@ cainome = { git = "https://github.com/cartridge-gg/cainome", tag = "v0.2.2" } [dev-dependencies] assert_fs = "1.0.10" dojo-test-utils = { workspace = true, features = [ "build-examples" ] } +ipfs-api-backend-hyper = { git = "https://github.com/ferristseng/rust-ipfs-api", rev = "af2c17f7b19ef5b9898f458d97a90055c3605633", features = [ "with-hyper-rustls" ] } katana-runner.workspace = true snapbox = "0.4.6" diff --git a/crates/sozo/ops/src/migration/mod.rs b/crates/sozo/ops/src/migration/mod.rs index 4c096fe037..74f7c6499e 100644 --- a/crates/sozo/ops/src/migration/mod.rs +++ b/crates/sozo/ops/src/migration/mod.rs @@ -11,7 +11,7 @@ use dojo_world::manifest::{ Manifest, ManifestMethods, OverlayManifest, WorldContract as ManifestWorldContract, WorldMetadata, }; -use dojo_world::metadata::dojo_metadata_from_workspace; +use dojo_world::metadata::{dojo_metadata_from_workspace, ArtifactMetadata}; use dojo_world::migration::contract::ContractMigration; use dojo_world::migration::strategy::{generate_salt, prepare_for_migration, MigrationStrategy}; use dojo_world::migration::world::WorldDiff; @@ -20,6 +20,7 @@ use dojo_world::migration::{ Upgradable, UpgradeOutput, }; use dojo_world::utils::TransactionWaiter; +use futures::future; use scarb::core::Workspace; use scarb_ui::Ui; use starknet::accounts::{Account, ConnectedAccount, SingleOwnerAccount}; @@ -32,9 +33,6 @@ use starknet::core::utils::{ use starknet::providers::{Provider, ProviderError}; use tokio::fs; -#[cfg(test)] -#[path = "migration_test.rs"] -mod migration_test; mod ui; use starknet::signers::Signer; @@ -51,7 +49,15 @@ pub struct MigrationOutput { // If false that means migration got partially completed. pub full: bool, - pub contracts: Vec>, + pub models: Vec, + pub contracts: Vec>, +} + +#[derive(Debug, Default, Clone)] +pub struct ContractMigrationOutput { + name: String, + contract_address: FieldElement, + base_class_hash: FieldElement, } pub async fn migrate( @@ -107,13 +113,40 @@ where let mut strategy = prepare_migration(&target_dir, diff, name.clone(), world_address, &ui)?; let world_address = strategy.world_address().expect("world address must exist"); - let migration_output = if dry_run { + if dry_run { print_strategy(&ui, account.provider(), &strategy).await; - MigrationOutput { world_address, ..Default::default() } + + update_manifests_and_abis( + ws, + local_manifest, + &profile_dir, + &profile_name, + &rpc_url, + world_address, + None, + name.as_ref(), + ) + .await?; } else { // Migrate according to the diff. match apply_diff(ws, account, txn_config, &mut strategy).await { - Ok(migration_output) => migration_output, + Ok(migration_output) => { + update_manifests_and_abis( + ws, + local_manifest.clone(), + &profile_dir, + &profile_name, + &rpc_url, + world_address, + Some(migration_output.clone()), + name.as_ref(), + ) + .await?; + + if !ws.config().offline() { + upload_metadata(ws, account, migration_output).await?; + } + } Err(e) => { update_manifests_and_abis( ws, @@ -121,7 +154,8 @@ where &profile_dir, &profile_name, &rpc_url, - MigrationOutput { world_address, ..Default::default() }, + world_address, + None, name.as_ref(), ) .await?; @@ -130,17 +164,6 @@ where } }; - update_manifests_and_abis( - ws, - local_manifest, - &profile_dir, - &profile_name, - &rpc_url, - migration_output, - name.as_ref(), - ) - .await?; - Ok(()) } @@ -150,11 +173,12 @@ async fn update_manifests_and_abis( profile_dir: &Utf8PathBuf, profile_name: &str, rpc_url: &str, - migration_output: MigrationOutput, + world_address: FieldElement, + migration_output: Option, salt: Option<&String>, ) -> Result<()> { let ui = ws.config().ui(); - ui.print("\n✨ Updating manifests..."); + ui.print_step(5, "✨", "Updating manifests..."); let deployed_path = profile_dir.join("manifest").with_extension("toml"); let deployed_path_json = profile_dir.join("manifest").with_extension("json"); @@ -171,39 +195,43 @@ async fn update_manifests_and_abis( local_manifest.merge_from_previous(previous_manifest); }; - local_manifest.world.inner.address = Some(migration_output.world_address); + local_manifest.world.inner.address = Some(world_address); if let Some(salt) = salt { local_manifest.world.inner.seed = Some(salt.to_owned()); } - if migration_output.world_tx_hash.is_some() { - local_manifest.world.inner.transaction_hash = migration_output.world_tx_hash; - } - if migration_output.world_block_number.is_some() { - local_manifest.world.inner.block_number = migration_output.world_block_number; - } + // when the migration has not been applied because in `plan` mode or because of an error, + // the `migration_output` is empty. + if let Some(migration_output) = migration_output { + if migration_output.world_tx_hash.is_some() { + local_manifest.world.inner.transaction_hash = migration_output.world_tx_hash; + } + if migration_output.world_block_number.is_some() { + local_manifest.world.inner.block_number = migration_output.world_block_number; + } - migration_output.contracts.iter().for_each(|contract_output| { - // ignore failed migration which are represented by None - if let Some(output) = contract_output { - // find the contract in local manifest and update its address and base class hash - let local = local_manifest - .contracts - .iter_mut() - .find(|c| c.name == output.name.as_ref().unwrap()) - .expect("contract got migrated, means it should be present here"); - - let salt = generate_salt(&local.name); - local.inner.address = Some(get_contract_address( - salt, - output.base_class_hash, - &[], - migration_output.world_address, - )); + migration_output.contracts.iter().for_each(|contract_output| { + // ignore failed migration which are represented by None + if let Some(output) = contract_output { + // find the contract in local manifest and update its address and base class hash + let local = local_manifest + .contracts + .iter_mut() + .find(|c| c.name == output.name) + .expect("contract got migrated, means it should be present here"); + + let salt = generate_salt(&local.name); + local.inner.address = Some(get_contract_address( + salt, + output.base_class_hash, + &[], + migration_output.world_address, + )); - local.inner.base_class_hash = output.base_class_hash; - } - }); + local.inner.base_class_hash = output.base_class_hash; + } + }); + } // copy abi files from `abi/base` to `abi/deployments/{chain_id}` and update abi path in // local_manifest @@ -289,7 +317,8 @@ where { let ui = ws.config().ui(); - println!(" "); + ui.print_step(4, "🛠", "Migrating..."); + ui.print(" "); let migration_output = execute_strategy(ws, strategy, account, txn_config) .await @@ -299,7 +328,7 @@ where if migration_output.full { if let Some(block_number) = migration_output.world_block_number { ui.print(format!( - "\n🎉 Successfully migrated World on block #{} at address {}", + "\n🎉 Successfully migrated World on block #{} at address {}\n", block_number, bold_message(format!( "{:#x}", @@ -308,7 +337,7 @@ where )); } else { ui.print(format!( - "\n🎉 Successfully migrated World at address {}", + "\n🎉 Successfully migrated World at address {}\n", bold_message(format!( "{:#x}", strategy.world_address().expect("world address must exist") @@ -493,14 +522,6 @@ where }; ui.print_sub(format!("Contract address: {:#x}", world.contract_address)); - - let offline = ws.config().offline(); - - if offline { - ui.print_sub("Skipping metadata upload because of offline mode"); - } else { - upload_metadata(ws, world, migrator, &ui).await?; - } } } None => {} @@ -511,23 +532,25 @@ where world_tx_hash, world_block_number, full: false, + models: vec![], contracts: vec![], }; // Once Torii supports indexing arrays, we should declare and register the // ResourceMetadata model. - match register_models(strategy, migrator, &ui, txn_config).await { - Ok(_) => (), + Ok(output) => { + migration_output.models = output.registered_model_names; + } Err(e) => { ui.anyhow(&e); return Ok(migration_output); } - } + }; match deploy_dojo_contracts(strategy, migrator, &ui, txn_config).await { - Ok(res) => { - migration_output.contracts = res; + Ok(output) => { + migration_output.contracts = output; } Err(e) => { ui.anyhow(&e); @@ -540,53 +563,6 @@ where Ok(migration_output) } -async fn upload_metadata( - ws: &Workspace<'_>, - world: &ContractMigration, - migrator: &SingleOwnerAccount, - ui: &Ui, -) -> Result<(), anyhow::Error> -where - P: Provider + Sync + Send + 'static, - S: Signer + Sync + Send + 'static, -{ - let metadata = dojo_metadata_from_workspace(ws); - if let Some(meta) = metadata.as_ref().and_then(|inner| inner.world()) { - match meta.upload().await { - Ok(hash) => { - let mut encoded_uri = cairo_utils::encode_uri(&format!("ipfs://{hash}"))?; - - // Metadata is expecting an array of capacity 3. - if encoded_uri.len() < 3 { - encoded_uri.extend(vec![FieldElement::ZERO; 3 - encoded_uri.len()]); - } - - let world_metadata = - ResourceMetadata { resource_id: FieldElement::ZERO, metadata_uri: encoded_uri }; - - let InvokeTransactionResult { transaction_hash } = - WorldContract::new(world.contract_address, migrator) - .set_metadata(&world_metadata) - .send() - .await - .map_err(|e| { - ui.verbose(format!("{e:?}")); - anyhow!("Failed to set World metadata: {e}") - })?; - - TransactionWaiter::new(transaction_hash, migrator.provider()).await?; - - ui.print_sub(format!("Set Metadata transaction: {:#x}", transaction_hash)); - ui.print_sub(format!("Metadata uri: ipfs://{hash}")); - } - Err(err) => { - ui.print_sub(format!("Failed to set World metadata:\n{err}")); - } - } - } - Ok(()) -} - enum ContractDeploymentOutput { AlreadyDeployed(FieldElement), Output(DeployOutput), @@ -693,7 +669,7 @@ async fn register_models( migrator: &SingleOwnerAccount, ui: &Ui, txn_config: Option, -) -> Result> +) -> Result where P: Provider + Sync + Send + 'static, S: Signer + Sync + Send + 'static, @@ -701,12 +677,17 @@ where let models = &strategy.models; if models.is_empty() { - return Ok(None); + return Ok(RegisterOutput { + transaction_hash: FieldElement::ZERO, + declare_output: vec![], + registered_model_names: vec![], + }); } ui.print_header(format!("# Models ({})", models.len())); let mut declare_output = vec![]; + let mut registered_model_names = vec![]; for c in models.iter() { ui.print(italic_message(&c.diff.name).to_string()); @@ -741,7 +722,10 @@ where let calls = models .iter() - .map(|c| world.register_model_getcall(&c.diff.local.into())) + .map(|c| { + registered_model_names.push(c.diff.name.clone()); + world.register_model_getcall(&c.diff.local.into()) + }) .collect::>(); let InvokeTransactionResult { transaction_hash } = @@ -754,7 +738,7 @@ where ui.print(format!("All models are registered at: {transaction_hash:#x}")); - Ok(Some(RegisterOutput { transaction_hash, declare_output })) + Ok(RegisterOutput { transaction_hash, declare_output, registered_model_names }) } async fn deploy_dojo_contracts( @@ -762,7 +746,7 @@ async fn deploy_dojo_contracts( migrator: &SingleOwnerAccount, ui: &Ui, txn_config: Option, -) -> Result>> +) -> Result>> where P: Provider + Sync + Send + 'static, S: Signer + Sync + Send + 'static, @@ -793,7 +777,7 @@ where ) .await { - Ok(mut output) => { + Ok(output) => { if let Some(ref declare) = output.declare { ui.print_hidden_sub(format!( "Declare transaction: {:#x}", @@ -819,10 +803,11 @@ where )); ui.print_sub(format!("Contract address: {:#x}", output.contract_address)); } - let name = contract.diff.name.clone(); - - output.name = Some(name); - deploy_output.push(Some(output)); + deploy_output.push(Some(ContractMigrationOutput { + name: name.to_string(), + contract_address: output.contract_address, + base_class_hash: output.base_class_hash, + })); } Err(MigrationError::ContractAlreadyDeployed(contract_address)) => { ui.print_sub(format!("Already deployed: {:#x}", contract_address)); @@ -926,3 +911,154 @@ where ui.print(" "); } } + +/// Upload a metadata as a IPFS artifact and then create a resource to register +/// into the Dojo resource registry. +/// +/// # Arguments +/// * `element_name` - fully qualified name of the element linked to the metadata +/// * `resource_id` - the id of the resource to create. +/// * `artifact` - the artifact to upload on IPFS. +/// +/// # Returns +/// A [`ResourceData`] object to register in the Dojo resource register +/// on success. +/// +async fn upload_on_ipfs_and_create_resource( + ui: &Ui, + element_name: String, + resource_id: FieldElement, + artifact: ArtifactMetadata, +) -> Result { + match artifact.upload().await { + Ok(hash) => { + ui.print_sub(format!("{}: ipfs://{}", element_name, hash)); + create_resource_metadata(resource_id, hash) + } + Err(_) => Err(anyhow!("Failed to upload IPFS resource.")), + } +} + +/// Create a resource to register in the Dojo resource registry. +/// +/// # Arguments +/// * `resource_id` - the ID of the resource +/// * `hash` - the IPFS hash +/// +/// # Returns +/// A [`ResourceData`] object to register in the Dojo resource register +/// on success. +fn create_resource_metadata(resource_id: FieldElement, hash: String) -> Result { + let mut encoded_uri = cairo_utils::encode_uri(&format!("ipfs://{hash}"))?; + + // Metadata is expecting an array of capacity 3. + if encoded_uri.len() < 3 { + encoded_uri.extend(vec![FieldElement::ZERO; 3 - encoded_uri.len()]); + } + + Ok(ResourceMetadata { resource_id, metadata_uri: encoded_uri }) +} + +/// Upload metadata of the world/models/contracts as IPFS artifacts and then +/// register them in the Dojo resource registry. +/// +/// # Arguments +/// +/// * `ws` - the workspace +/// * `migrator` - the account used to migrate +/// * `migration_output` - the output after having applied the migration plan. +pub async fn upload_metadata( + ws: &Workspace<'_>, + migrator: &SingleOwnerAccount, + migration_output: MigrationOutput, +) -> Result<()> +where + P: Provider + Sync + Send + 'static, + S: Signer + Sync + Send + 'static, +{ + let ui = ws.config().ui(); + + ui.print(" "); + ui.print_step(6, "🌐", "Uploading metadata..."); + ui.print(" "); + + let dojo_metadata = dojo_metadata_from_workspace(ws); + let mut ipfs = vec![]; + let mut resources = vec![]; + + // world + if migration_output.world_tx_hash.is_some() { + match dojo_metadata.world.upload().await { + Ok(hash) => { + let resource = create_resource_metadata(FieldElement::ZERO, hash.clone())?; + ui.print_sub(format!("world: ipfs://{}", hash)); + resources.push(resource); + } + Err(err) => { + ui.print_sub(format!("Failed to upload World metadata:\n{err}")); + } + } + } + + // models + if !migration_output.models.is_empty() { + for model_name in migration_output.models { + if let Some(m) = dojo_metadata.artifacts.get(&model_name) { + ipfs.push(upload_on_ipfs_and_create_resource( + &ui, + model_name.clone(), + get_selector_from_name(&model_name).expect("ASCII model name"), + m.clone(), + )); + } + } + } + + // contracts + let migrated_contracts = migration_output.contracts.into_iter().flatten().collect::>(); + + if !migrated_contracts.is_empty() { + for contract in migrated_contracts { + if let Some(m) = dojo_metadata.artifacts.get(&contract.name) { + ipfs.push(upload_on_ipfs_and_create_resource( + &ui, + contract.name.clone(), + contract.contract_address, + m.clone(), + )); + } + } + } + + // upload IPFS + resources.extend( + future::try_join_all(ipfs) + .await + .map_err(|_| anyhow!("Unable to upload IPFS artifacts."))?, + ); + + ui.print("> All IPFS artifacts have been successfully uploaded.".to_string()); + + // update the resource registry + let world = WorldContract::new(migration_output.world_address, migrator); + + let calls = resources.iter().map(|r| world.set_metadata_getcall(r)).collect::>(); + + let InvokeTransactionResult { transaction_hash } = + migrator.execute(calls).send().await.map_err(|e| { + ui.verbose(format!("{e:?}")); + anyhow!("Failed to register metadata into the resource registry: {e}") + })?; + + TransactionWaiter::new(transaction_hash, migrator.provider()).await?; + + ui.print(format!( + "> All metadata have been registered in the resource registry (tx hash: \ + {transaction_hash:#x})" + )); + + ui.print(""); + ui.print("\n✨ Done."); + + Ok(()) +} diff --git a/crates/sozo/ops/src/tests/migration.rs b/crates/sozo/ops/src/tests/migration.rs new file mode 100644 index 0000000000..d499b8cb5d --- /dev/null +++ b/crates/sozo/ops/src/tests/migration.rs @@ -0,0 +1,496 @@ +use std::str; + +use camino::Utf8Path; +use dojo_lang::compiler::{BASE_DIR, MANIFESTS_DIR}; +use dojo_test_utils::compiler::build_full_test_config; +use dojo_test_utils::sequencer::{ + get_default_test_starknet_config, SequencerConfig, StarknetConfig, TestSequencer, +}; +use dojo_world::contracts::WorldContractReader; +use dojo_world::manifest::{BaseManifest, DeploymentManifest, WORLD_CONTRACT_NAME}; +use dojo_world::metadata::{ + dojo_metadata_from_workspace, ArtifactMetadata, DojoMetadata, Uri, WorldMetadata, + IPFS_CLIENT_URL, IPFS_PASSWORD, IPFS_USERNAME, +}; +use dojo_world::migration::strategy::prepare_for_migration; +use dojo_world::migration::world::WorldDiff; +use dojo_world::migration::TxConfig; +use futures::TryStreamExt; +use ipfs_api_backend_hyper::{HyperBackend, IpfsApi, IpfsClient, TryFromUri}; +use starknet::accounts::{ExecutionEncoding, SingleOwnerAccount}; +use starknet::core::chain_id; +use starknet::core::types::{BlockId, BlockTag}; +use starknet::core::utils::{get_selector_from_name, parse_cairo_short_string}; +use starknet::macros::felt; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use starknet::signers::{LocalWallet, SigningKey}; +use starknet_crypto::FieldElement; + +use super::setup::{load_config, setup_migration, setup_ws}; +use crate::migration::{execute_strategy, upload_metadata}; +use crate::utils::get_contract_address_from_reader; + +#[tokio::test(flavor = "multi_thread")] +async fn migrate_with_auto_mine() { + let config = load_config(); + let ws = setup_ws(&config); + + let mut migration = setup_migration().unwrap(); + + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + + sequencer.stop().unwrap(); +} + +#[tokio::test(flavor = "multi_thread")] +async fn migrate_with_block_time() { + let config = load_config(); + let ws = setup_ws(&config); + + let mut migration = setup_migration().unwrap(); + + let sequencer = TestSequencer::start( + SequencerConfig { block_time: Some(1000), ..Default::default() }, + get_default_test_starknet_config(), + ) + .await; + + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + sequencer.stop().unwrap(); +} + +#[tokio::test(flavor = "multi_thread")] +async fn migrate_with_small_fee_multiplier_will_fail() { + let config = load_config(); + let ws = setup_ws(&config); + + let mut migration = setup_migration().unwrap(); + + let sequencer = TestSequencer::start( + Default::default(), + StarknetConfig { disable_fee: false, ..Default::default() }, + ) + .await; + + let account = SingleOwnerAccount::new( + JsonRpcClient::new(HttpTransport::new(sequencer.url())), + LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + sequencer.raw_account().private_key, + )), + sequencer.raw_account().account_address, + chain_id::TESTNET, + ExecutionEncoding::New, + ); + + assert!( + execute_strategy( + &ws, + &mut migration, + &account, + Some(TxConfig { fee_estimate_multiplier: Some(0.2f64), wait: false, receipt: false }), + ) + .await + .is_err() + ); + sequencer.stop().unwrap(); +} + +#[test] +fn migrate_world_without_seed_will_fail() { + let profile_name = "dev"; + let base = "../../../examples/spawn-and-move"; + let target_dir = format!("{}/target/dev", base); + let manifest = BaseManifest::load_from_path( + &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(profile_name).join(BASE_DIR), + ) + .unwrap(); + let world = WorldDiff::compute(manifest, None); + let res = prepare_for_migration(None, None, &Utf8Path::new(&target_dir).to_path_buf(), world); + assert!(res.is_err_and(|e| e.to_string().contains("Missing seed for World deployment."))) +} + +#[tokio::test] +async fn migration_from_remote() { + let config = load_config(); + let ws = setup_ws(&config); + + let base = "../../../examples/spawn-and-move"; + let target_dir = format!("{}/target/dev", base); + + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let account = SingleOwnerAccount::new( + JsonRpcClient::new(HttpTransport::new(sequencer.url())), + LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + sequencer.raw_account().private_key, + )), + sequencer.raw_account().account_address, + chain_id::TESTNET, + ExecutionEncoding::New, + ); + + let profile_name = ws.current_profile().unwrap().to_string(); + + let manifest = BaseManifest::load_from_path( + &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(&profile_name).join(BASE_DIR), + ) + .unwrap(); + + let world = WorldDiff::compute(manifest, None); + + let mut migration = prepare_for_migration( + None, + Some(felt!("0x12345")), + &Utf8Path::new(&target_dir).to_path_buf(), + world, + ) + .unwrap(); + + execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + + let local_manifest = BaseManifest::load_from_path( + &Utf8Path::new(base).to_path_buf().join(MANIFESTS_DIR).join(&profile_name).join(BASE_DIR), + ) + .unwrap(); + + let remote_manifest = DeploymentManifest::load_from_remote( + JsonRpcClient::new(HttpTransport::new(sequencer.url())), + migration.world_address().unwrap(), + ) + .await + .unwrap(); + + sequencer.stop().unwrap(); + + assert_eq!(local_manifest.world.inner.class_hash, remote_manifest.world.inner.class_hash); + assert_eq!(local_manifest.models.len(), remote_manifest.models.len()); +} + +#[tokio::test(flavor = "multi_thread")] +async fn migrate_with_metadata() { + let config = build_full_test_config("../../../examples/spawn-and-move/Scarb.toml", false) + .unwrap_or_else(|c| panic!("Error loading config: {c:?}")); + let ws = setup_ws(&config); + + let mut migration = setup_migration().unwrap(); + + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let mut account = sequencer.account(); + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + let output = execute_strategy(&ws, &mut migration, &account, None).await.unwrap(); + + let res = upload_metadata(&ws, &account, output.clone()).await; + assert!(res.is_ok()); + + let provider = sequencer.provider(); + let world_reader = WorldContractReader::new(output.world_address, &provider); + + let client = IpfsClient::from_str(IPFS_CLIENT_URL) + .unwrap_or_else(|_| panic!("Unable to initialize the IPFS Client")) + .with_credentials(IPFS_USERNAME, IPFS_PASSWORD); + + let dojo_metadata = dojo_metadata_from_workspace(&ws); + + // check world metadata + let resource = world_reader.metadata(&FieldElement::ZERO).call().await.unwrap(); + let element_name = WORLD_CONTRACT_NAME.to_string(); + + let full_uri = get_and_check_metadata_uri(&element_name, &resource.metadata_uri); + let resource_bytes = get_ipfs_resource_data(&client, &element_name, &full_uri).await; + + let metadata = resource_bytes_to_world_metadata(&resource_bytes, &element_name); + + assert_eq!(metadata.name, dojo_metadata.world.name, ""); + assert_eq!(metadata.description, dojo_metadata.world.description, ""); + assert_eq!(metadata.cover_uri, dojo_metadata.world.cover_uri, ""); + assert_eq!(metadata.icon_uri, dojo_metadata.world.icon_uri, ""); + assert_eq!(metadata.website, dojo_metadata.world.website, ""); + assert_eq!(metadata.socials, dojo_metadata.world.socials, ""); + + check_artifact_fields( + &client, + &metadata.artifacts, + &dojo_metadata.world.artifacts, + &element_name, + ) + .await; + + // check model metadata + for m in migration.models { + let selector = get_selector_from_name(&m.diff.name).unwrap(); + check_artifact_metadata(&client, &world_reader, selector, &m.diff.name, &dojo_metadata) + .await; + } + + // check contract metadata + for c in migration.contracts { + let contract_address = + get_contract_address_from_reader(&world_reader, c.diff.name.clone()).await.unwrap(); + check_artifact_metadata( + &client, + &world_reader, + contract_address, + &c.diff.name, + &dojo_metadata, + ) + .await; + } +} + +/// Get the hash from a IPFS URI +/// +/// # Arguments +/// +/// * `uri` - a full IPFS URI +/// +/// # Returns +/// +/// A [`String`] containing the hash from the URI. +fn get_hash_from_uri(uri: &str) -> String { + let hash = match uri.strip_prefix("ipfs://") { + Some(s) => s.to_string(), + None => uri.to_owned(), + }; + match hash.strip_suffix('/') { + Some(s) => s.to_string(), + None => hash, + } +} + +/// Check a metadata field which refers to a file. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `uri` - the IPFS URI of the abi field. +/// * `expected_uri` - the URI of the expected file. +/// * `field_name` - the field name. +/// * `element_name` - the fully qualified name of the element linked to this field. +async fn check_file_field( + client: &HyperBackend, + uri: &Uri, + expected_uri: &Uri, + field_name: String, + element_name: &String, +) { + if let Uri::Ipfs(uri) = uri { + let resource_data = get_ipfs_resource_data(client, element_name, uri).await; + assert!( + !resource_data.is_empty(), + "{field_name} IPFS artifact for {} is empty", + element_name + ); + + if let Uri::File(f) = expected_uri { + let file_content = std::fs::read_to_string(f).unwrap(); + let resource_content = std::str::from_utf8(&resource_data).unwrap_or_else(|_| { + panic!( + "Unable to stringify resource data for field '{}' of {}", + field_name, element_name + ) + }); + + assert!( + file_content.eq(&resource_content), + "local '{field_name}' content differs from the one uploaded on IPFS for {}", + element_name + ); + } else { + panic!( + "The field '{field_name}' of {} is not a file (Should never happen !)", + element_name + ); + } + } else { + panic!("The '{field_name}' field is not an IPFS artifact for {}", element_name); + } +} + +/// Convert resource bytes to a ArtifactMetadata object. +/// +/// # Arguments +/// +/// * `raw_data` - resource data as bytes. +/// * `element_name` - name of the element linked to this resource. +/// +/// # Returns +/// +/// A [`ArtifactMetadata`] object. +fn resource_bytes_to_metadata(raw_data: &[u8], element_name: &String) -> ArtifactMetadata { + let data = std::str::from_utf8(raw_data) + .unwrap_or_else(|_| panic!("Unable to stringify raw metadata for {}", element_name)); + serde_json::from_str(data) + .unwrap_or_else(|_| panic!("Unable to deserialize metadata for {}", element_name)) +} + +/// Convert resource bytes to a WorldMetadata object. +/// +/// # Arguments +/// +/// * `raw_data` - resource data as bytes. +/// * `element_name` - name of the element linked to this resource. +/// +/// # Returns +/// +/// A [`WorldMetadata`] object. +fn resource_bytes_to_world_metadata(raw_data: &[u8], element_name: &String) -> WorldMetadata { + let data = std::str::from_utf8(raw_data) + .unwrap_or_else(|_| panic!("Unable to stringify raw metadata for {}", element_name)); + serde_json::from_str(data) + .unwrap_or_else(|_| panic!("Unable to deserialize metadata for {}", element_name)) +} + +/// Read the content of a resource identified by its IPFS URI. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `element_name` - the name of the element (model or contract) linked to this artifact. +/// * `uri` - the IPFS resource URI. +/// +/// # Returns +/// +/// A [`Vec`] containing the resource content as bytes. +async fn get_ipfs_resource_data( + client: &HyperBackend, + element_name: &String, + uri: &String, +) -> Vec { + let hash = get_hash_from_uri(uri); + + let res = client.cat(&hash).map_ok(|chunk| chunk.to_vec()).try_concat().await; + assert!(res.is_ok(), "Unable to read the IPFS artifact {} for {}", uri, element_name); + + res.unwrap() +} + +/// Check the validity of artifact metadata fields. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `metadata` - the metadata to check. +/// * `expected_metadata` - the metadata values coming from local Dojo metadata. +/// * `element_name` - the name of the element linked to this metadata. +async fn check_artifact_fields( + client: &HyperBackend, + metadata: &ArtifactMetadata, + expected_metadata: &ArtifactMetadata, + element_name: &String, +) { + assert!(metadata.abi.is_some(), "'abi' field not set for {}", element_name); + let abi = metadata.abi.as_ref().unwrap(); + let expected_abi = expected_metadata.abi.as_ref().unwrap(); + check_file_field(client, abi, expected_abi, "abi".to_string(), element_name).await; + + assert!(metadata.source.is_some(), "'source' field not set for {}", element_name); + let source = metadata.source.as_ref().unwrap(); + let expected_source = expected_metadata.source.as_ref().unwrap(); + check_file_field(client, source, expected_source, "source".to_string(), element_name).await; +} + +/// Check the validity of a IPFS artifact metadata. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `element_name` - the fully qualified name of the element linked to the artifact. +/// * `uri` - the full metadata URI. +/// * `expected_metadata` - the expected metadata values coming from local Dojo metadata. +async fn check_ipfs_metadata( + client: &HyperBackend, + element_name: &String, + uri: &String, + expected_metadata: &ArtifactMetadata, +) { + let resource_bytes = get_ipfs_resource_data(client, element_name, uri).await; + let metadata = resource_bytes_to_metadata(&resource_bytes, element_name); + + check_artifact_fields(client, &metadata, expected_metadata, element_name).await; +} + +/// Rebuild the full metadata URI from an array of 3 FieldElement. +/// +/// # Arguments +/// +/// * `element_name` - name of the element (model or contract) linked to the metadata URI. +/// * `uri` - uri as an array of 3 FieldElement. +/// +/// # Returns +/// +/// A [`String`] containing the full metadata URI. +fn get_and_check_metadata_uri(element_name: &String, uri: &Vec) -> String { + assert!(uri.len() == 3, "bad metadata URI length for {} ({:#?})", element_name, uri); + + let mut i = 0; + let mut full_uri = "".to_string(); + + while i < uri.len() && uri[i] != FieldElement::ZERO { + let uri_str = parse_cairo_short_string(&uri[i]); + assert!( + uri_str.is_ok(), + "unable to parse the part {} of the metadata URI for {}", + i + 1, + element_name + ); + + full_uri = format!("{}{}", full_uri, uri_str.unwrap()); + + i += 1; + } + + assert!(!full_uri.is_empty(), "metadata URI is empty for {}", element_name); + + assert!( + full_uri.starts_with("ipfs://"), + "metadata URI for {} is not an IPFS artifact", + element_name + ); + + full_uri +} + +/// Check an artifact metadata read from the resource registry against its value +/// in the local Dojo metadata. +/// +/// # Arguments +/// +/// * `client` - a IPFS client. +/// * `world_reader` - a world reader object. +/// * `resource_id` - the resource ID in the resource registry. +/// * `element_name` - the fully qualified name of the element linked to this metadata. +/// * `dojo_metadata` - local Dojo metadata. +async fn check_artifact_metadata( + client: &HyperBackend, + world_reader: &WorldContractReader

, + resource_id: FieldElement, + element_name: &String, + dojo_metadata: &DojoMetadata, +) { + let resource = world_reader.metadata(&resource_id).call().await.unwrap(); + + let expected_artifact = dojo_metadata.artifacts.get(element_name); + assert!( + expected_artifact.is_some(), + "Unable to find local artifact metadata for {}", + element_name + ); + let expected_artifact = expected_artifact.unwrap(); + + let full_uri = get_and_check_metadata_uri(element_name, &resource.metadata_uri); + check_ipfs_metadata(client, element_name, &full_uri, expected_artifact).await; +} diff --git a/crates/sozo/ops/src/tests/mod.rs b/crates/sozo/ops/src/tests/mod.rs index 25bdba5697..f754ddc5a6 100644 --- a/crates/sozo/ops/src/tests/mod.rs +++ b/crates/sozo/ops/src/tests/mod.rs @@ -1,4 +1,5 @@ pub mod auth; pub mod call; +pub mod migration; pub mod setup; pub mod utils; diff --git a/crates/sozo/ops/src/tests/setup.rs b/crates/sozo/ops/src/tests/setup.rs index 47eb424524..c55be7c1f4 100644 --- a/crates/sozo/ops/src/tests/setup.rs +++ b/crates/sozo/ops/src/tests/setup.rs @@ -3,7 +3,9 @@ use dojo_test_utils::compiler::build_test_config; use dojo_test_utils::migration::prepare_migration; use dojo_test_utils::sequencer::TestSequencer; use dojo_world::contracts::world::WorldContract; +use dojo_world::migration::strategy::MigrationStrategy; use dojo_world::migration::TxConfig; +use scarb::core::{Config, Workspace}; use scarb::ops; use starknet::accounts::SingleOwnerAccount; use starknet::core::types::{BlockId, BlockTag}; @@ -13,8 +15,47 @@ use starknet::signers::LocalWallet; use crate::migration; +/// Load the spawn-and-moves project configuration. +/// +/// # Returns +/// +/// A [`Config`] object loaded from the spawn-and-moves Scarb.toml file. +pub fn load_config() -> Config { + build_test_config("../../../examples/spawn-and-move/Scarb.toml") + .unwrap_or_else(|c| panic!("Error loading config: {c:?}")) +} + +/// Setups the workspace for the spawn-and-moves project. +/// +/// # Arguments +/// * `config` - the project configuration. +/// +/// # Returns +/// +/// A [`Workspace`] loaded from the spawn-and-moves project. +pub fn setup_ws(config: &Config) -> Workspace<'_> { + ops::read_workspace(config.manifest_path(), config) + .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")) +} + +/// Prepare the migration for the spawn-and-moves project. +/// +/// # Returns +/// +/// A [`MigrationStrategy`] to execute to migrate the full spawn-and-moves project. +pub fn setup_migration() -> Result { + let base_dir = "../../../examples/spawn-and-move"; + let target_dir = format!("{}/target/dev", base_dir); + + prepare_migration(base_dir.into(), target_dir.into()) +} + /// Setups the project by migrating the full spawn-and-moves project. /// +/// # Arguments +/// +/// * `sequencer` - The sequencer used for tests. +/// /// # Returns /// /// A [`WorldContract`] initialized with the migrator account, @@ -22,13 +63,10 @@ use crate::migration; pub async fn setup( sequencer: &TestSequencer, ) -> Result, LocalWallet>>> { - let config = build_test_config("../../../examples/spawn-and-move/Scarb.toml")?; - let ws = ops::read_workspace(config.manifest_path(), &config) - .unwrap_or_else(|op| panic!("Error building workspace: {op:?}")); - let base_dir = "../../../examples/spawn-and-move"; - let target_dir = format!("{}/target/dev", base_dir); + let config = load_config(); + let ws = setup_ws(&config); - let mut migration = prepare_migration(base_dir.into(), target_dir.into())?; + let mut migration = setup_migration()?; let mut account = sequencer.account(); account.set_block_id(BlockId::Tag(BlockTag::Pending)); diff --git a/crates/torii/graphql/src/tests/metadata_test.rs b/crates/torii/graphql/src/tests/metadata_test.rs index 01914abfb7..c834ea1d3c 100644 --- a/crates/torii/graphql/src/tests/metadata_test.rs +++ b/crates/torii/graphql/src/tests/metadata_test.rs @@ -1,6 +1,6 @@ #[cfg(test)] mod tests { - use dojo_world::metadata::Metadata as DojoMetadata; + use dojo_world::metadata::{project_to_world_metadata, ProjectMetadata}; use sqlx::SqlitePool; use starknet_crypto::FieldElement; use torii_core::sql::Sql; @@ -51,7 +51,7 @@ mod tests { let schema = build_schema(&pool).await.unwrap(); let cover_img = "QWxsIHlvdXIgYmFzZSBiZWxvbmcgdG8gdXM="; - let dojo_metadata: DojoMetadata = toml::from_str( + let project_metadata: ProjectMetadata = toml::from_str( r#" [world] name = "example" @@ -62,7 +62,7 @@ mod tests { "#, ) .unwrap(); - let world_metadata = dojo_metadata.world.unwrap(); + let world_metadata = project_to_world_metadata(project_metadata.world); db.set_metadata(&RESOURCE, URI, BLOCK_TIMESTAMP); db.update_metadata(&RESOURCE, URI, &world_metadata, &None, &Some(cover_img.to_string())) .await diff --git a/crates/torii/libp2p/src/server/mod.rs b/crates/torii/libp2p/src/server/mod.rs index 36df964fed..8806c46040 100644 --- a/crates/torii/libp2p/src/server/mod.rs +++ b/crates/torii/libp2p/src/server/mod.rs @@ -630,7 +630,7 @@ async fn validate_message( } else { return Err(Error::InvalidMessageError("Model name is missing".to_string())); }; - let model_selector = get_selector_from_name(&model_name).map_err(|e| { + let model_selector = get_selector_from_name(model_name).map_err(|e| { Error::InvalidMessageError(format!("Failed to get selector from model name: {}", e)) })?; From 995dd8dbd33627ef847d1a374a951e71db72063c Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Sat, 13 Apr 2024 23:58:55 +0800 Subject: [PATCH 6/9] refactor(katana-primitives): remove total supply from fee token genesis config (#1821) * remove total supply from genesis config * add sanity check --- crates/katana/primitives/src/genesis/json.rs | 21 +------ crates/katana/primitives/src/genesis/mod.rs | 59 ++++++++++++-------- 2 files changed, 37 insertions(+), 43 deletions(-) diff --git a/crates/katana/primitives/src/genesis/json.rs b/crates/katana/primitives/src/genesis/json.rs index a50bc4d54a..5b62c72035 100644 --- a/crates/katana/primitives/src/genesis/json.rs +++ b/crates/katana/primitives/src/genesis/json.rs @@ -315,10 +315,9 @@ impl TryFrom for Genesis { }) .collect::>()?; - let mut fee_token = FeeTokenConfig { + let fee_token = FeeTokenConfig { name: value.fee_token.name, symbol: value.fee_token.symbol, - total_supply: U256::ZERO, decimals: value.fee_token.decimals, address: value.fee_token.address.unwrap_or(DEFAULT_FEE_TOKEN_ADDRESS), class_hash: value.fee_token.class.unwrap_or(DEFAULT_LEGACY_ERC20_CONTRACT_CLASS_HASH), @@ -414,11 +413,6 @@ impl TryFrom for Genesis { } }; - // increase the total supply of the fee token if balance is given - if let Some(balance) = account.balance { - fee_token.total_supply += balance; - } - match account.private_key { Some(private_key) => allocations.insert( address, @@ -456,11 +450,6 @@ impl TryFrom for Genesis { } } - // increase the total supply of the fee token if balance is given - if let Some(balance) = contract.balance { - fee_token.total_supply += balance; - } - allocations.insert( address, GenesisAllocation::Contract(GenesisContractAlloc { @@ -803,9 +792,6 @@ mod tests { address: ContractAddress::from(felt!("0x55")), name: String::from("ETHER"), symbol: String::from("ETH"), - total_supply: U256::from_str("0xD3C21BCECCEDA1000000") - .unwrap() - .wrapping_mul(U256::from(5)), decimals: 18, class_hash: felt!("0x8"), storage: Some(HashMap::from([ @@ -939,16 +925,12 @@ mod tests { assert_eq!(actual_genesis.timestamp, expected_genesis.timestamp); assert_eq!(actual_genesis.state_root, expected_genesis.state_root); assert_eq!(actual_genesis.gas_prices, expected_genesis.gas_prices); - assert_eq!(actual_genesis.fee_token.address, expected_genesis.fee_token.address); assert_eq!(actual_genesis.fee_token.name, expected_genesis.fee_token.name); assert_eq!(actual_genesis.fee_token.symbol, expected_genesis.fee_token.symbol); assert_eq!(actual_genesis.fee_token.decimals, expected_genesis.fee_token.decimals); - assert_eq!(actual_genesis.fee_token.total_supply, expected_genesis.fee_token.total_supply); assert_eq!(actual_genesis.fee_token.class_hash, expected_genesis.fee_token.class_hash); - assert_eq!(actual_genesis.universal_deployer, expected_genesis.universal_deployer); - assert_eq!(actual_genesis.allocations.len(), expected_genesis.allocations.len()); for alloc in actual_genesis.allocations { @@ -1030,7 +1012,6 @@ mod tests { address: DEFAULT_FEE_TOKEN_ADDRESS, name: String::from("ETHER"), symbol: String::from("ETH"), - total_supply: U256::from_str("0xD3C21BCECCEDA1000000").unwrap(), decimals: 18, class_hash: DEFAULT_LEGACY_ERC20_CONTRACT_CLASS_HASH, storage: None, diff --git a/crates/katana/primitives/src/genesis/mod.rs b/crates/katana/primitives/src/genesis/mod.rs index c1574f660d..5774e3407c 100644 --- a/crates/katana/primitives/src/genesis/mod.rs +++ b/crates/katana/primitives/src/genesis/mod.rs @@ -42,8 +42,6 @@ pub struct FeeTokenConfig { pub address: ContractAddress, /// The decimals of the fee token. pub decimals: u8, - /// The total supply of the fee token. - pub total_supply: U256, /// The class hash of the fee token contract. #[serde_as(as = "UfeHex")] pub class_hash: ClassHash, @@ -186,24 +184,13 @@ impl Genesis { states.state_updates.storage_updates.insert(address, storage); } - // TODO: put this in a separate function - // insert fee token related data let mut fee_token_storage = self.fee_token.storage.clone().unwrap_or_default(); - - let name: FieldElement = cairo_short_string_to_felt(&self.fee_token.name).unwrap(); - let symbol: FieldElement = cairo_short_string_to_felt(&self.fee_token.symbol).unwrap(); - let decimals: FieldElement = self.fee_token.decimals.into(); - let (total_supply_low, total_supply_high) = split_u256(self.fee_token.total_supply); - - fee_token_storage.insert(ERC20_NAME_STORAGE_SLOT, name); - fee_token_storage.insert(ERC20_SYMBOL_STORAGE_SLOT, symbol); - fee_token_storage.insert(ERC20_DECIMAL_STORAGE_SLOT, decimals); - fee_token_storage.insert(ERC20_TOTAL_SUPPLY_STORAGE_SLOT, total_supply_low); - fee_token_storage.insert(ERC20_TOTAL_SUPPLY_STORAGE_SLOT + 1u8.into(), total_supply_high); + let mut fee_token_total_supply = U256::ZERO; for (address, alloc) in &self.allocations { if let Some(balance) = alloc.balance() { + fee_token_total_supply += balance; let (low, high) = split_u256(balance); // the base storage address for a standard ERC20 contract balance @@ -219,6 +206,19 @@ impl Genesis { } } + // TODO: put this in a separate function + + let name: FieldElement = cairo_short_string_to_felt(&self.fee_token.name).unwrap(); + let symbol: FieldElement = cairo_short_string_to_felt(&self.fee_token.symbol).unwrap(); + let decimals: FieldElement = self.fee_token.decimals.into(); + let (total_supply_low, total_supply_high) = split_u256(fee_token_total_supply); + + fee_token_storage.insert(ERC20_NAME_STORAGE_SLOT, name); + fee_token_storage.insert(ERC20_SYMBOL_STORAGE_SLOT, symbol); + fee_token_storage.insert(ERC20_DECIMAL_STORAGE_SLOT, decimals); + fee_token_storage.insert(ERC20_TOTAL_SUPPLY_STORAGE_SLOT, total_supply_low); + fee_token_storage.insert(ERC20_TOTAL_SUPPLY_STORAGE_SLOT + 1u8.into(), total_supply_high); + states .state_updates .contract_updates @@ -246,7 +246,6 @@ impl Default for Genesis { decimals: 18, name: "Ether".into(), symbol: "ETH".into(), - total_supply: U256::ZERO, address: DEFAULT_FEE_TOKEN_ADDRESS, class_hash: DEFAULT_LEGACY_ERC20_CONTRACT_CLASS_HASH, storage: None, @@ -352,7 +351,6 @@ mod tests { address: DEFAULT_FEE_TOKEN_ADDRESS, name: String::from("ETHER"), symbol: String::from("ETH"), - total_supply: U256::from_str("0x1a784379d99db42000000").unwrap(), decimals: 18, class_hash: DEFAULT_LEGACY_ERC20_CONTRACT_CLASS_HASH, storage: Some(HashMap::from([ @@ -420,12 +418,16 @@ mod tests { universal_deployer: Some(ud.clone()), }; - // setup expected values + // setup expected storage values let name: FieldElement = cairo_short_string_to_felt(&fee_token.name).unwrap(); let symbol: FieldElement = cairo_short_string_to_felt(&fee_token.symbol).unwrap(); let decimals: FieldElement = fee_token.decimals.into(); - let (total_supply_low, total_supply_high) = split_u256(fee_token.total_supply); + + // there are only two allocations so the total token supply is + // 0xD3C21BCECCEDA1000000 * 2 = 0x1a784379d99db42000000 + let (total_supply_low, total_supply_high) = + split_u256(U256::from_str("0x1a784379d99db42000000").unwrap()); let mut fee_token_storage = HashMap::new(); fee_token_storage.insert(ERC20_NAME_STORAGE_SLOT, name); @@ -649,7 +651,7 @@ mod tests { assert_eq!(fee_token_storage.get(&felt!("0x111")), Some(&felt!("0x1"))); assert_eq!(fee_token_storage.get(&felt!("0x222")), Some(&felt!("0x2"))); - let mut actual_total_supply = U256::ZERO; + let mut allocs_total_supply = U256::ZERO; // check for balance for (address, alloc) in &allocations { @@ -667,13 +669,24 @@ mod tests { assert_eq!(fee_token_storage.get(&low_bal_storage_var), Some(&low)); assert_eq!(fee_token_storage.get(&high_bal_storage_var), Some(&high)); - actual_total_supply += balance; + allocs_total_supply += balance; } } + // Check that the total supply is the sum of all balances in the allocations. + // Technically this is not necessary bcs we already checked the total supply in + // the fee token storage but it's a good sanity check. + + let (actual_total_supply_low, actual_total_supply_high) = split_u256(allocs_total_supply); assert_eq!( - actual_total_supply, fee_token.total_supply, - "total supply should match the total balances of all allocations" + fee_token_storage.get(&ERC20_TOTAL_SUPPLY_STORAGE_SLOT), + Some(&actual_total_supply_low), + "total supply must be calculated from allocations balances correctly" + ); + assert_eq!( + fee_token_storage.get(&(ERC20_TOTAL_SUPPLY_STORAGE_SLOT + 1u8.into())), + Some(&actual_total_supply_high), + "total supply must be calculated from allocations balances correctly" ); let udc_storage = From 371c121fea32430ded6429eed42b1c67a76b8336 Mon Sep 17 00:00:00 2001 From: lambda-0x <0xlambda@protonmail.com> Date: Sun, 14 Apr 2024 19:22:49 +0530 Subject: [PATCH 7/9] refactor: clean up bin/sozo (#1828) * refactor: move verify_cairo_version_compatibility to utils module * refactor: move around more things to appropriate places * refactor: make bin/sozo more consistent * fix: paths in test --- bin/sozo/src/args.rs | 61 +----- bin/sozo/src/commands/auth.rs | 106 +++++----- bin/sozo/src/commands/build.rs | 2 +- bin/sozo/src/commands/clean.rs | 30 +-- bin/sozo/src/commands/completions.rs | 2 +- bin/sozo/src/commands/dev.rs | 202 +++++++++---------- bin/sozo/src/commands/events.rs | 4 +- bin/sozo/src/commands/init.rs | 2 +- bin/sozo/src/commands/migrate.rs | 118 +++++------ bin/sozo/src/commands/mod.rs | 48 ++++- bin/sozo/src/commands/model.rs | 12 +- bin/sozo/src/commands/options/account.rs | 4 +- bin/sozo/src/commands/options/transaction.rs | 2 +- bin/sozo/src/commands/test.rs | 2 +- bin/sozo/src/main.rs | 34 +--- bin/sozo/src/utils.rs | 44 +++- crates/benches/src/deployer.rs | 3 +- 17 files changed, 341 insertions(+), 335 deletions(-) diff --git a/bin/sozo/src/args.rs b/bin/sozo/src/args.rs index 4dbc54003d..142d1ee90f 100644 --- a/bin/sozo/src/args.rs +++ b/bin/sozo/src/args.rs @@ -1,38 +1,14 @@ use anyhow::Result; use camino::Utf8PathBuf; -use clap::{Parser, Subcommand}; +use clap::Parser; use scarb::compiler::Profile; use scarb_ui::Verbosity; use smol_str::SmolStr; use tracing::level_filters::LevelFilter; use tracing_log::AsTrace; -use crate::commands::auth::AuthArgs; -use crate::commands::build::BuildArgs; -use crate::commands::call::CallArgs; -use crate::commands::clean::CleanArgs; -use crate::commands::completions::CompletionsArgs; -use crate::commands::dev::DevArgs; -use crate::commands::events::EventsArgs; -use crate::commands::execute::ExecuteArgs; -use crate::commands::init::InitArgs; -use crate::commands::migrate::MigrateArgs; -use crate::commands::model::ModelArgs; -use crate::commands::register::RegisterArgs; -use crate::commands::test::TestArgs; - -fn generate_version() -> String { - const DOJO_VERSION: &str = env!("CARGO_PKG_VERSION"); - let scarb_version = scarb::version::get().version; - let scarb_sierra_version = scarb::version::get().sierra.version; - let scarb_cairo_version = scarb::version::get().cairo.version; - - let version_string = format!( - "{}\nscarb: {}\ncairo: {}\nsierra: {}", - DOJO_VERSION, scarb_version, scarb_cairo_version, scarb_sierra_version, - ); - version_string -} +use crate::commands::Commands; +use crate::utils::generate_version; #[derive(Parser)] #[command(author, version=generate_version(), about, long_about = None)] @@ -63,37 +39,6 @@ pub struct SozoArgs { pub command: Commands, } -#[derive(Subcommand)] -pub enum Commands { - #[command(about = "Build the world, generating the necessary artifacts for deployment")] - Build(BuildArgs), - #[command(about = "Initialize a new project")] - Init(InitArgs), - #[command(about = "Remove generated artifacts, manifests and abis")] - Clean(CleanArgs), - #[command(about = "Run a migration, declaring and deploying contracts as necessary to \ - update the world")] - Migrate(Box), - #[command(about = "Developer mode: watcher for building and migration")] - Dev(DevArgs), - #[command(about = "Test the project's smart contracts")] - Test(TestArgs), - #[command(about = "Execute a world's system")] - Execute(ExecuteArgs), - #[command(about = "Call a world's system")] - Call(CallArgs), - #[command(about = "Interact with a worlds models")] - Model(ModelArgs), - #[command(about = "Register new models")] - Register(RegisterArgs), - #[command(about = "Queries world events")] - Events(EventsArgs), - #[command(about = "Manage world authorization")] - Auth(AuthArgs), - #[command(about = "Generate shell completion file for specified shell")] - Completions(CompletionsArgs), -} - impl SozoArgs { pub fn ui_verbosity(&self) -> Verbosity { let filter = self.verbose.log_level_filter().as_trace(); diff --git a/bin/sozo/src/commands/auth.rs b/bin/sozo/src/commands/auth.rs index cca991be77..434e8c0313 100644 --- a/bin/sozo/src/commands/auth.rs +++ b/bin/sozo/src/commands/auth.rs @@ -16,6 +16,59 @@ pub struct AuthArgs { pub command: AuthCommand, } +#[derive(Debug, Subcommand)] +pub enum AuthCommand { + #[command(about = "Grant an auth role.")] + Grant { + #[command(subcommand)] + kind: AuthKind, + + #[command(flatten)] + world: WorldOptions, + + #[command(flatten)] + starknet: StarknetOptions, + + #[command(flatten)] + account: AccountOptions, + + #[command(flatten)] + transaction: TransactionOptions, + }, + #[command(about = "Revoke an auth role.")] + Revoke { + #[command(subcommand)] + kind: AuthKind, + + #[command(flatten)] + world: WorldOptions, + + #[command(flatten)] + starknet: StarknetOptions, + + #[command(flatten)] + account: AccountOptions, + + #[command(flatten)] + transaction: TransactionOptions, + }, +} + +impl AuthArgs { + pub fn run(self, config: &Config) -> Result<()> { + let env_metadata = utils::load_metadata_from_config(config)?; + + match self.command { + AuthCommand::Grant { kind, world, starknet, account, transaction } => config + .tokio_handle() + .block_on(grant(world, account, starknet, env_metadata, kind, transaction)), + AuthCommand::Revoke { kind, world, starknet, account, transaction } => config + .tokio_handle() + .block_on(revoke(world, account, starknet, env_metadata, kind, transaction)), + } + } +} + #[derive(Debug, Subcommand)] pub enum AuthKind { #[command(about = "Grant a contract permission to write to a model.")] @@ -82,59 +135,6 @@ pub async fn revoke( } } -#[derive(Debug, Subcommand)] -pub enum AuthCommand { - #[command(about = "Grant an auth role.")] - Grant { - #[command(subcommand)] - kind: AuthKind, - - #[command(flatten)] - world: WorldOptions, - - #[command(flatten)] - starknet: StarknetOptions, - - #[command(flatten)] - account: AccountOptions, - - #[command(flatten)] - transaction: TransactionOptions, - }, - #[command(about = "Revoke an auth role.")] - Revoke { - #[command(subcommand)] - kind: AuthKind, - - #[command(flatten)] - world: WorldOptions, - - #[command(flatten)] - starknet: StarknetOptions, - - #[command(flatten)] - account: AccountOptions, - - #[command(flatten)] - transaction: TransactionOptions, - }, -} - -impl AuthArgs { - pub fn run(self, config: &Config) -> Result<()> { - let env_metadata = utils::load_metadata_from_config(config)?; - - match self.command { - AuthCommand::Grant { kind, world, starknet, account, transaction } => config - .tokio_handle() - .block_on(grant(world, account, starknet, env_metadata, kind, transaction)), - AuthCommand::Revoke { kind, world, starknet, account, transaction } => config - .tokio_handle() - .block_on(revoke(world, account, starknet, env_metadata, kind, transaction)), - } - } -} - #[cfg(test)] mod tests { use std::str::FromStr; diff --git a/bin/sozo/src/commands/build.rs b/bin/sozo/src/commands/build.rs index bb45c1f6c4..f3347fd449 100644 --- a/bin/sozo/src/commands/build.rs +++ b/bin/sozo/src/commands/build.rs @@ -5,7 +5,7 @@ use dojo_lang::scarb_internal::compile_workspace; use scarb::core::{Config, TargetKind}; use scarb::ops::CompileOpts; -#[derive(Args, Debug)] +#[derive(Debug, Args)] pub struct BuildArgs { #[arg(long)] #[arg(help = "Generate Typescript bindings.")] diff --git a/bin/sozo/src/commands/clean.rs b/bin/sozo/src/commands/clean.rs index 42129aed84..6ed05e1837 100644 --- a/bin/sozo/src/commands/clean.rs +++ b/bin/sozo/src/commands/clean.rs @@ -20,21 +20,6 @@ pub struct CleanArgs { } impl CleanArgs { - pub fn clean_manifests_abis(&self, root_dir: &Utf8PathBuf, profile_name: &str) -> Result<()> { - let dirs = vec![ - root_dir.join(MANIFESTS_DIR).join(profile_name).join(BASE_DIR), - root_dir.join(MANIFESTS_DIR).join(profile_name).join(ABIS_DIR).join(BASE_DIR), - ]; - - for d in dirs { - if d.exists() { - fs::remove_dir_all(d)?; - } - } - - Ok(()) - } - pub fn run(self, config: &Config) -> Result<()> { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; @@ -55,4 +40,19 @@ impl CleanArgs { Ok(()) } + + pub fn clean_manifests_abis(&self, root_dir: &Utf8PathBuf, profile_name: &str) -> Result<()> { + let dirs = vec![ + root_dir.join(MANIFESTS_DIR).join(profile_name).join(BASE_DIR), + root_dir.join(MANIFESTS_DIR).join(profile_name).join(ABIS_DIR).join(BASE_DIR), + ]; + + for d in dirs { + if d.exists() { + fs::remove_dir_all(d)?; + } + } + + Ok(()) + } } diff --git a/bin/sozo/src/commands/completions.rs b/bin/sozo/src/commands/completions.rs index 65c99ac2bb..1f71098822 100644 --- a/bin/sozo/src/commands/completions.rs +++ b/bin/sozo/src/commands/completions.rs @@ -6,7 +6,7 @@ use clap_complete::{generate, Shell}; use crate::args::SozoArgs; -#[derive(Args, Debug)] +#[derive(Debug, Args)] pub struct CompletionsArgs { shell: Shell, } diff --git a/bin/sozo/src/commands/dev.rs b/bin/sozo/src/commands/dev.rs index f92eda68d4..47fd5f5ba5 100644 --- a/bin/sozo/src/commands/dev.rs +++ b/bin/sozo/src/commands/dev.rs @@ -31,7 +31,7 @@ use super::options::world::WorldOptions; pub(crate) const LOG_TARGET: &str = "sozo::cli::commands::dev"; -#[derive(Args)] +#[derive(Debug, Args)] pub struct DevArgs { #[arg(long)] #[arg(help = "Name of the World.")] @@ -49,6 +49,106 @@ pub struct DevArgs { pub account: AccountOptions, } +impl DevArgs { + pub fn run(self, config: &Config) -> Result<()> { + let env_metadata = if config.manifest_path().exists() { + let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; + + dojo_metadata_from_workspace(&ws).env().cloned() + } else { + None + }; + + let mut context = load_context(config)?; + let (tx, rx) = channel(); + let mut debouncer = new_debouncer(Duration::from_secs(1), None, tx)?; + + debouncer.watcher().watch( + config.manifest_path().parent().unwrap().as_std_path(), + RecursiveMode::Recursive, + )?; + let name = self.name.clone(); + let mut previous_manifest: Option = Option::None; + let result = build(&mut context); + + let Some((mut world_address, account, _, _)) = context + .ws + .config() + .tokio_handle() + .block_on(setup_env( + &context.ws, + self.account, + self.starknet, + self.world, + name.as_ref(), + env_metadata.as_ref(), + )) + .ok() + else { + return Err(anyhow!("Failed to setup environment")); + }; + + match context.ws.config().tokio_handle().block_on(migrate( + world_address, + &account, + name.clone(), + &context.ws, + previous_manifest.clone(), + )) { + Ok((manifest, address)) => { + previous_manifest = Some(manifest); + world_address = address; + } + Err(error) => { + error!( + target: LOG_TARGET, + error = ?error, + address = ?world_address, + "Migrating world." + ); + } + } + loop { + let action = match rx.recv() { + Ok(Ok(events)) => events + .iter() + .map(|event| process_event(event, &mut context)) + .last() + .unwrap_or(DevAction::None), + Ok(Err(_)) => DevAction::None, + Err(error) => { + error!(target: LOG_TARGET, error = ?error, "Receiving dev action."); + break; + } + }; + + if action != DevAction::None && build(&mut context).is_ok() { + match context.ws.config().tokio_handle().block_on(migrate( + world_address, + &account, + name.clone(), + &context.ws, + previous_manifest.clone(), + )) { + Ok((manifest, address)) => { + previous_manifest = Some(manifest); + world_address = address; + } + Err(error) => { + error!( + target: LOG_TARGET, + error = ?error, + address = ?world_address, + "Migrating world.", + ); + } + } + } + } + result + } +} + #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] enum DevAction { None, @@ -193,103 +293,3 @@ fn handle_reload_action(context: &mut DevContext<'_>) { let new_context = load_context(config).expect("Failed to load context"); let _ = mem::replace(context, new_context); } - -impl DevArgs { - pub fn run(self, config: &Config) -> Result<()> { - let env_metadata = if config.manifest_path().exists() { - let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - - dojo_metadata_from_workspace(&ws).env().cloned() - } else { - None - }; - - let mut context = load_context(config)?; - let (tx, rx) = channel(); - let mut debouncer = new_debouncer(Duration::from_secs(1), None, tx)?; - - debouncer.watcher().watch( - config.manifest_path().parent().unwrap().as_std_path(), - RecursiveMode::Recursive, - )?; - let name = self.name.clone(); - let mut previous_manifest: Option = Option::None; - let result = build(&mut context); - - let Some((mut world_address, account, _, _)) = context - .ws - .config() - .tokio_handle() - .block_on(setup_env( - &context.ws, - self.account, - self.starknet, - self.world, - name.as_ref(), - env_metadata.as_ref(), - )) - .ok() - else { - return Err(anyhow!("Failed to setup environment")); - }; - - match context.ws.config().tokio_handle().block_on(migrate( - world_address, - &account, - name.clone(), - &context.ws, - previous_manifest.clone(), - )) { - Ok((manifest, address)) => { - previous_manifest = Some(manifest); - world_address = address; - } - Err(error) => { - error!( - target: LOG_TARGET, - error = ?error, - address = ?world_address, - "Migrating world." - ); - } - } - loop { - let action = match rx.recv() { - Ok(Ok(events)) => events - .iter() - .map(|event| process_event(event, &mut context)) - .last() - .unwrap_or(DevAction::None), - Ok(Err(_)) => DevAction::None, - Err(error) => { - error!(target: LOG_TARGET, error = ?error, "Receiving dev action."); - break; - } - }; - - if action != DevAction::None && build(&mut context).is_ok() { - match context.ws.config().tokio_handle().block_on(migrate( - world_address, - &account, - name.clone(), - &context.ws, - previous_manifest.clone(), - )) { - Ok((manifest, address)) => { - previous_manifest = Some(manifest); - world_address = address; - } - Err(error) => { - error!( - target: LOG_TARGET, - error = ?error, - address = ?world_address, - "Migrating world.", - ); - } - } - } - } - result - } -} diff --git a/bin/sozo/src/commands/events.rs b/bin/sozo/src/commands/events.rs index cd1aac2e13..d08a3a74d3 100644 --- a/bin/sozo/src/commands/events.rs +++ b/bin/sozo/src/commands/events.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use clap::Parser; +use clap::Args; use scarb::core::Config; use sozo_ops::events; @@ -7,7 +7,7 @@ use super::options::starknet::StarknetOptions; use super::options::world::WorldOptions; use crate::utils; -#[derive(Parser, Debug)] +#[derive(Debug, Args)] pub struct EventsArgs { #[arg(help = "List of specific events to be filtered")] #[arg(value_delimiter = ',')] diff --git a/bin/sozo/src/commands/init.rs b/bin/sozo/src/commands/init.rs index 7fffbbb2e9..f37bc70036 100644 --- a/bin/sozo/src/commands/init.rs +++ b/bin/sozo/src/commands/init.rs @@ -7,7 +7,7 @@ use anyhow::{ensure, Result}; use clap::Args; use scarb::core::Config; -#[derive(Args, Debug)] +#[derive(Debug, Args)] pub struct InitArgs { #[arg(help = "Target directory")] path: Option, diff --git a/bin/sozo/src/commands/migrate.rs b/bin/sozo/src/commands/migrate.rs index fc56cdee0e..3046c26b60 100644 --- a/bin/sozo/src/commands/migrate.rs +++ b/bin/sozo/src/commands/migrate.rs @@ -65,65 +65,6 @@ pub enum MigrateCommand { }, } -pub async fn setup_env<'a>( - ws: &'a Workspace<'a>, - account: AccountOptions, - starknet: StarknetOptions, - world: WorldOptions, - name: Option<&'a String>, - env: Option<&'a Environment>, -) -> Result<( - Option, - SingleOwnerAccount, LocalWallet>, - String, - String, -)> { - let ui = ws.config().ui(); - - let world_address = world.address(env).ok(); - - let (account, chain_id, rpc_url) = { - let provider = starknet.provider(env)?; - - let spec_version = provider.spec_version().await?; - - if spec_version != RPC_SPEC_VERSION { - return Err(anyhow!( - "Unsupported Starknet RPC version: {}, expected {}.", - spec_version, - RPC_SPEC_VERSION - )); - } - - let rpc_url = starknet.url(env)?; - - let chain_id = provider.chain_id().await?; - let chain_id = parse_cairo_short_string(&chain_id) - .with_context(|| "Cannot parse chain_id as string")?; - - let mut account = account.account(provider, env).await?; - account.set_block_id(BlockId::Tag(BlockTag::Pending)); - - let address = account.address(); - - ui.print(format!("\nMigration account: {address:#x}")); - if let Some(name) = name { - ui.print(format!("\nWorld name: {name}\n")); - } - - match account.provider().get_class_hash_at(BlockId::Tag(BlockTag::Pending), address).await { - Ok(_) => Ok((account, chain_id, rpc_url)), - Err(ProviderError::StarknetError(StarknetError::ContractNotFound)) => { - Err(anyhow!("Account with address {:#x} doesn't exist.", account.address())) - } - Err(e) => Err(e.into()), - } - } - .with_context(|| "Problem initializing account for migration.")?; - - Ok((world_address, account, chain_id, rpc_url.to_string())) -} - impl MigrateArgs { pub fn run(self, config: &Config) -> Result<()> { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; @@ -207,3 +148,62 @@ impl MigrateArgs { } } } + +pub async fn setup_env<'a>( + ws: &'a Workspace<'a>, + account: AccountOptions, + starknet: StarknetOptions, + world: WorldOptions, + name: Option<&'a String>, + env: Option<&'a Environment>, +) -> Result<( + Option, + SingleOwnerAccount, LocalWallet>, + String, + String, +)> { + let ui = ws.config().ui(); + + let world_address = world.address(env).ok(); + + let (account, chain_id, rpc_url) = { + let provider = starknet.provider(env)?; + + let spec_version = provider.spec_version().await?; + + if spec_version != RPC_SPEC_VERSION { + return Err(anyhow!( + "Unsupported Starknet RPC version: {}, expected {}.", + spec_version, + RPC_SPEC_VERSION + )); + } + + let rpc_url = starknet.url(env)?; + + let chain_id = provider.chain_id().await?; + let chain_id = parse_cairo_short_string(&chain_id) + .with_context(|| "Cannot parse chain_id as string")?; + + let mut account = account.account(provider, env).await?; + account.set_block_id(BlockId::Tag(BlockTag::Pending)); + + let address = account.address(); + + ui.print(format!("\nMigration account: {address:#x}")); + if let Some(name) = name { + ui.print(format!("\nWorld name: {name}\n")); + } + + match account.provider().get_class_hash_at(BlockId::Tag(BlockTag::Pending), address).await { + Ok(_) => Ok((account, chain_id, rpc_url)), + Err(ProviderError::StarknetError(StarknetError::ContractNotFound)) => { + Err(anyhow!("Account with address {:#x} doesn't exist.", account.address())) + } + Err(e) => Err(e.into()), + } + } + .with_context(|| "Problem initializing account for migration.")?; + + Ok((world_address, account, chain_id, rpc_url.to_string())) +} diff --git a/bin/sozo/src/commands/mod.rs b/bin/sozo/src/commands/mod.rs index d0f563bce4..b7b2d53b64 100644 --- a/bin/sozo/src/commands/mod.rs +++ b/bin/sozo/src/commands/mod.rs @@ -1,8 +1,7 @@ use anyhow::Result; +use clap::{command, Subcommand}; use scarb::core::Config; -use crate::args::Commands; - pub(crate) mod auth; pub(crate) mod build; pub(crate) mod call; @@ -18,6 +17,51 @@ pub(crate) mod options; pub(crate) mod register; pub(crate) mod test; +use auth::AuthArgs; +use build::BuildArgs; +use call::CallArgs; +use clean::CleanArgs; +use completions::CompletionsArgs; +use dev::DevArgs; +use events::EventsArgs; +use execute::ExecuteArgs; +use init::InitArgs; +use migrate::MigrateArgs; +use model::ModelArgs; +use register::RegisterArgs; +use test::TestArgs; + +#[derive(Subcommand)] +pub enum Commands { + #[command(about = "Build the world, generating the necessary artifacts for deployment")] + Build(BuildArgs), + #[command(about = "Initialize a new project")] + Init(InitArgs), + #[command(about = "Remove generated artifacts, manifests and abis")] + Clean(CleanArgs), + #[command(about = "Run a migration, declaring and deploying contracts as necessary to \ + update the world")] + Migrate(Box), + #[command(about = "Developer mode: watcher for building and migration")] + Dev(DevArgs), + #[command(about = "Test the project's smart contracts")] + Test(TestArgs), + #[command(about = "Execute a world's system")] + Execute(ExecuteArgs), + #[command(about = "Call a world's system")] + Call(CallArgs), + #[command(about = "Interact with a worlds models")] + Model(ModelArgs), + #[command(about = "Register new models")] + Register(RegisterArgs), + #[command(about = "Queries world events")] + Events(EventsArgs), + #[command(about = "Manage world authorization")] + Auth(AuthArgs), + #[command(about = "Generate shell completion file for specified shell")] + Completions(CompletionsArgs), +} + pub fn run(command: Commands, config: &Config) -> Result<()> { match command { Commands::Init(args) => args.run(config), diff --git a/bin/sozo/src/commands/model.rs b/bin/sozo/src/commands/model.rs index 1c69bbe3fe..e5e0aae244 100644 --- a/bin/sozo/src/commands/model.rs +++ b/bin/sozo/src/commands/model.rs @@ -11,11 +11,11 @@ use crate::utils; #[derive(Debug, Args)] pub struct ModelArgs { #[command(subcommand)] - command: ModelCommands, + command: ModelCommand, } #[derive(Debug, Subcommand)] -pub enum ModelCommands { +pub enum ModelCommand { #[command(about = "Retrieve the class hash of a model")] ClassHash { #[arg(help = "The name of the model")] @@ -80,22 +80,22 @@ impl ModelArgs { config.tokio_handle().block_on(async { match self.command { - ModelCommands::ClassHash { name, starknet, world } => { + ModelCommand::ClassHash { name, starknet, world } => { let world_address = world.address(env_metadata.as_ref()).unwrap(); let provider = starknet.provider(env_metadata.as_ref()).unwrap(); model::model_class_hash(name, world_address, provider).await } - ModelCommands::ContractAddress { name, starknet, world } => { + ModelCommand::ContractAddress { name, starknet, world } => { let world_address = world.address(env_metadata.as_ref()).unwrap(); let provider = starknet.provider(env_metadata.as_ref()).unwrap(); model::model_contract_address(name, world_address, provider).await } - ModelCommands::Schema { name, to_json, starknet, world } => { + ModelCommand::Schema { name, to_json, starknet, world } => { let world_address = world.address(env_metadata.as_ref()).unwrap(); let provider = starknet.provider(env_metadata.as_ref()).unwrap(); model::model_schema(name, world_address, provider, to_json).await } - ModelCommands::Get { name, keys, starknet, world } => { + ModelCommand::Get { name, keys, starknet, world } => { let world_address = world.address(env_metadata.as_ref()).unwrap(); let provider = starknet.provider(env_metadata.as_ref()).unwrap(); model::model_get(name, keys, world_address, provider).await diff --git a/bin/sozo/src/commands/options/account.rs b/bin/sozo/src/commands/options/account.rs index 1538e8f106..bf05c5ec11 100644 --- a/bin/sozo/src/commands/options/account.rs +++ b/bin/sozo/src/commands/options/account.rs @@ -13,13 +13,13 @@ use super::{ DOJO_PRIVATE_KEY_ENV_VAR, }; -#[derive(Debug, Args)] -#[command(next_help_heading = "Account options")] // INVARIANT: // - For commandline: we can either specify `private_key` or `keystore_path` along with // `keystore_password`. This is enforced by Clap. // - For `Scarb.toml`: if both private_key and keystore are specified in `Scarb.toml` private_key // will take priority +#[derive(Debug, Args)] +#[command(next_help_heading = "Account options")] pub struct AccountOptions { #[arg(long, env = DOJO_ACCOUNT_ADDRESS_ENV_VAR)] pub account_address: Option, diff --git a/bin/sozo/src/commands/options/transaction.rs b/bin/sozo/src/commands/options/transaction.rs index 7901c6ca6a..a54076cd44 100644 --- a/bin/sozo/src/commands/options/transaction.rs +++ b/bin/sozo/src/commands/options/transaction.rs @@ -1,7 +1,7 @@ use clap::Args; use dojo_world::migration::TxConfig; -#[derive(Debug, Args, Clone)] +#[derive(Debug, Args)] #[command(next_help_heading = "Transaction options")] pub struct TransactionOptions { #[arg(long)] diff --git a/bin/sozo/src/commands/test.rs b/bin/sozo/src/commands/test.rs index d64dde856d..9ca645c72c 100644 --- a/bin/sozo/src/commands/test.rs +++ b/bin/sozo/src/commands/test.rs @@ -21,7 +21,7 @@ use tracing::trace; pub(crate) const LOG_TARGET: &str = "sozo::cli::commands::test"; /// Execute all unit tests of a local package. -#[derive(Args, Clone)] +#[derive(Debug, Args)] pub struct TestArgs { /// The filter for the tests, running only tests containing the filter string. #[arg(short, long, default_value_t = String::default())] diff --git a/bin/sozo/src/main.rs b/bin/sozo/src/main.rs index 060a9d0cf9..18da01da73 100644 --- a/bin/sozo/src/main.rs +++ b/bin/sozo/src/main.rs @@ -1,17 +1,16 @@ use std::env; use std::process::exit; -use std::str::FromStr; use anyhow::Result; -use args::{Commands, SozoArgs}; -use camino::Utf8PathBuf; +use args::SozoArgs; use clap::Parser; use dojo_lang::compiler::DojoCompiler; use dojo_lang::plugin::CairoPluginRepository; use scarb::compiler::CompilerRepository; -use scarb::core::{Config, TomlManifest}; +use scarb::core::Config; use scarb_ui::{OutputFormat, Ui}; -use semver::Version; + +use crate::commands::Commands; mod args; mod commands; @@ -41,7 +40,7 @@ fn cli_main(args: SozoArgs) -> Result<()> { let manifest_path = scarb::ops::find_manifest_path(args.manifest_path.as_deref())?; - verify_cairo_version_compatibility(&manifest_path)?; + utils::verify_cairo_version_compatibility(&manifest_path)?; let config = Config::builder(manifest_path) .log_filter_directive(env::var_os("SCARB_LOG")) @@ -54,26 +53,3 @@ fn cli_main(args: SozoArgs) -> Result<()> { commands::run(args.command, &config) } - -fn verify_cairo_version_compatibility(manifest_path: &Utf8PathBuf) -> Result<()> { - let scarb_cairo_version = scarb::version::get().cairo; - // When manifest file doesn't exists ignore it. Would be the case during `sozo init` - let Ok(manifest) = TomlManifest::read_from_path(manifest_path) else { return Ok(()) }; - - // For any kind of error, like package not specified, cairo version not specified return - // without an error - let Some(package) = manifest.package else { return Ok(()) }; - - let Some(cairo_version) = package.cairo_version else { return Ok(()) }; - - // only when cairo version is found in manifest file confirm that it matches - let version_req = cairo_version.as_defined().unwrap(); - let version = Version::from_str(scarb_cairo_version.version).unwrap(); - if !version_req.matches(&version) { - anyhow::bail!( - "Specified cairo version not supported by dojo. Please verify and update dojo." - ); - }; - - Ok(()) -} diff --git a/bin/sozo/src/utils.rs b/bin/sozo/src/utils.rs index 7dbbfe28fd..d277c8d910 100644 --- a/bin/sozo/src/utils.rs +++ b/bin/sozo/src/utils.rs @@ -1,8 +1,12 @@ -use anyhow::Error; +use std::str::FromStr; + +use anyhow::{Error, Result}; +use camino::Utf8PathBuf; use dojo_world::contracts::world::WorldContract; use dojo_world::contracts::WorldContractReader; use dojo_world::metadata::{dojo_metadata_from_workspace, Environment}; -use scarb::core::Config; +use scarb::core::{Config, TomlManifest}; +use semver::Version; use starknet::accounts::SingleOwnerAccount; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::JsonRpcClient; @@ -79,3 +83,39 @@ pub async fn world_reader_from_env_metadata( Ok(WorldContractReader::new(world_address, provider)) } + +pub fn verify_cairo_version_compatibility(manifest_path: &Utf8PathBuf) -> Result<()> { + let scarb_cairo_version = scarb::version::get().cairo; + // When manifest file doesn't exists ignore it. Would be the case during `sozo init` + let Ok(manifest) = TomlManifest::read_from_path(manifest_path) else { return Ok(()) }; + + // For any kind of error, like package not specified, cairo version not specified return + // without an error + let Some(package) = manifest.package else { return Ok(()) }; + + let Some(cairo_version) = package.cairo_version else { return Ok(()) }; + + // only when cairo version is found in manifest file confirm that it matches + let version_req = cairo_version.as_defined().unwrap(); + let version = Version::from_str(scarb_cairo_version.version).unwrap(); + if !version_req.matches(&version) { + anyhow::bail!( + "Specified cairo version not supported by dojo. Please verify and update dojo." + ); + }; + + Ok(()) +} + +pub fn generate_version() -> String { + const DOJO_VERSION: &str = env!("CARGO_PKG_VERSION"); + let scarb_version = scarb::version::get().version; + let scarb_sierra_version = scarb::version::get().sierra.version; + let scarb_cairo_version = scarb::version::get().cairo.version; + + let version_string = format!( + "{}\nscarb: {}\ncairo: {}\nsierra: {}", + DOJO_VERSION, scarb_version, scarb_cairo_version, scarb_sierra_version, + ); + version_string +} diff --git a/crates/benches/src/deployer.rs b/crates/benches/src/deployer.rs index 663d54e73a..abb0251c07 100644 --- a/crates/benches/src/deployer.rs +++ b/crates/benches/src/deployer.rs @@ -10,7 +10,8 @@ use futures::executor::block_on; use katana_runner::KatanaRunner; use scarb::compiler::CompilerRepository; use scarb::core::Config; -use sozo::args::{Commands, SozoArgs}; +use sozo::args::SozoArgs; +use sozo::commands::Commands; use starknet::core::types::FieldElement; use tokio::process::Command; From 0227d3759961acf0c71ed4bd077c53ad9e5d454d Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 15 Apr 2024 05:41:39 +0200 Subject: [PATCH 8/9] Derive `Default` for `GenesisJson` (#1830) derive Default for GenesisJson --- crates/katana/primitives/src/genesis/json.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/katana/primitives/src/genesis/json.rs b/crates/katana/primitives/src/genesis/json.rs index 5b62c72035..84a97e101e 100644 --- a/crates/katana/primitives/src/genesis/json.rs +++ b/crates/katana/primitives/src/genesis/json.rs @@ -99,7 +99,7 @@ pub struct GenesisClassJson { pub class_hash: Option, } -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)] #[serde(rename_all = "camelCase")] pub struct FeeTokenConfigJson { pub name: String, @@ -199,7 +199,7 @@ pub enum GenesisJsonError { /// (eg, using `serde_json`). /// /// The path of the class artifact are computed **relative** to the JSON file. -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)] #[serde(rename_all = "camelCase")] pub struct GenesisJson { pub parent_hash: BlockHash, From 017c0d14286512ec195e6b1f0cfff2cd18d4385b Mon Sep 17 00:00:00 2001 From: RareSecond Date: Mon, 15 Apr 2024 07:19:08 +0200 Subject: [PATCH 9/9] Add new TypeScript bindgen (#1783) * feat: added new bindgen with all types and utility functions * feat: removed bunch of extracted functions and extracted models to function * refactor: moved generating to main function to intertwine models and contracts * feat: generated system calls * cleanup: removed old contract generation code * feat: finished bindgen * refactor: renamed TypescriptNew to TypeScriptV2 * Updated flag naming to typescript_v2 * Fixed some build and layout issues * Fixed empty line in models * Renamed modules as well * Update crates/dojo-bindgen/src/plugins/typescript_v2/mod.rs Co-authored-by: glihm * Fixed formatting * Updated return values * Added query function to class * Added test for the generated file * Removed full file test for now until DojoData is sorted * Added comment * fix: move mocks to test_data --------- Co-authored-by: glihm --- bin/sozo/src/commands/build.rs | 16 +- crates/dojo-bindgen/src/lib.rs | 2 + crates/dojo-bindgen/src/plugins/mod.rs | 3 + .../src/plugins/typescript_v2/mod.rs | 622 ++++++++++++++++++ .../src/test_data/mocks/dojo_examples.ts | 297 +++++++++ 5 files changed, 938 insertions(+), 2 deletions(-) create mode 100644 crates/dojo-bindgen/src/plugins/typescript_v2/mod.rs create mode 100644 crates/dojo-bindgen/src/test_data/mocks/dojo_examples.ts diff --git a/bin/sozo/src/commands/build.rs b/bin/sozo/src/commands/build.rs index f3347fd449..f56ba04e51 100644 --- a/bin/sozo/src/commands/build.rs +++ b/bin/sozo/src/commands/build.rs @@ -11,6 +11,10 @@ pub struct BuildArgs { #[arg(help = "Generate Typescript bindings.")] pub typescript: bool, + #[arg(long)] + #[arg(help = "Generate Typescript bindings.")] + pub typescript_v2: bool, + #[arg(long)] #[arg(help = "Generate Unity bindings.")] pub unity: bool, @@ -32,6 +36,10 @@ impl BuildArgs { builtin_plugins.push(BuiltinPlugins::Typescript); } + if self.typescript_v2 { + builtin_plugins.push(BuiltinPlugins::TypeScriptV2); + } + if self.unity { builtin_plugins.push(BuiltinPlugins::Unity); } @@ -67,8 +75,12 @@ mod tests { fn build_example_with_typescript_and_unity_bindings() { let config = build_test_config("../../examples/spawn-and-move/Scarb.toml").unwrap(); - let build_args = - BuildArgs { bindings_output: "generated".to_string(), typescript: true, unity: true }; + let build_args = BuildArgs { + bindings_output: "generated".to_string(), + typescript: true, + unity: true, + typescript_v2: true, + }; let result = build_args.run(&config); assert!(result.is_ok()); } diff --git a/crates/dojo-bindgen/src/lib.rs b/crates/dojo-bindgen/src/lib.rs index 627dc1961c..ca8e24cd79 100644 --- a/crates/dojo-bindgen/src/lib.rs +++ b/crates/dojo-bindgen/src/lib.rs @@ -12,6 +12,7 @@ use error::{BindgenResult, Error}; mod plugins; use plugins::typescript::TypescriptPlugin; +use plugins::typescript_v2::TypeScriptV2Plugin; use plugins::unity::UnityPlugin; use plugins::BuiltinPlugin; pub use plugins::BuiltinPlugins; @@ -85,6 +86,7 @@ impl PluginManager { let builder: Box = match plugin { BuiltinPlugins::Typescript => Box::new(TypescriptPlugin::new()), BuiltinPlugins::Unity => Box::new(UnityPlugin::new()), + BuiltinPlugins::TypeScriptV2 => Box::new(TypeScriptV2Plugin::new()), }; let files = builder.generate_code(&data).await?; diff --git a/crates/dojo-bindgen/src/plugins/mod.rs b/crates/dojo-bindgen/src/plugins/mod.rs index ab6abbcb8b..b603262e44 100644 --- a/crates/dojo-bindgen/src/plugins/mod.rs +++ b/crates/dojo-bindgen/src/plugins/mod.rs @@ -8,12 +8,14 @@ use crate::error::BindgenResult; use crate::DojoData; pub mod typescript; +pub mod typescript_v2; pub mod unity; #[derive(Debug)] pub enum BuiltinPlugins { Typescript, Unity, + TypeScriptV2, } impl fmt::Display for BuiltinPlugins { @@ -21,6 +23,7 @@ impl fmt::Display for BuiltinPlugins { match self { BuiltinPlugins::Typescript => write!(f, "typescript"), BuiltinPlugins::Unity => write!(f, "unity"), + BuiltinPlugins::TypeScriptV2 => write!(f, "typescript_v2"), } } } diff --git a/crates/dojo-bindgen/src/plugins/typescript_v2/mod.rs b/crates/dojo-bindgen/src/plugins/typescript_v2/mod.rs new file mode 100644 index 0000000000..b23549aa89 --- /dev/null +++ b/crates/dojo-bindgen/src/plugins/typescript_v2/mod.rs @@ -0,0 +1,622 @@ +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +use async_trait::async_trait; +use cainome::parser::tokens::{Composite, CompositeType, Function}; +use convert_case::Casing; + +use crate::error::BindgenResult; +use crate::plugins::BuiltinPlugin; +use crate::{DojoContract, DojoData, DojoModel}; + +pub struct TypeScriptV2Plugin {} + +impl TypeScriptV2Plugin { + pub fn new() -> Self { + Self {} + } + + // Maps cairo types to TypeScript defined types + fn map_type(type_name: &str) -> String { + match type_name { + "bool" => "boolean".to_string(), + "u8" => "number".to_string(), + "u16" => "number".to_string(), + "u32" => "number".to_string(), + "u64" => "bigint".to_string(), + "u128" => "bigint".to_string(), + "u256" => "bigint".to_string(), + "usize" => "number".to_string(), + "felt252" => "string".to_string(), + "ClassHash" => "string".to_string(), + "ContractAddress" => "string".to_string(), + + _ => type_name.to_string(), + } + } + + fn generate_header() -> String { + format!( + "// Generated by dojo-bindgen on {}. Do not modify this file manually.\n", + chrono::Utc::now().to_rfc2822() + ) + } + + fn generate_imports() -> String { + "import { Account } from \"starknet\"; +import { + Clause, + Client, + ModelClause, + createClient, + valueToToriiValueAndOperator, +} from \"@dojoengine/torii-client\"; +import { LOCAL_KATANA, createManifestFromJson } from \"@dojoengine/core\";" + .to_string() + } + + fn generate_query_types(models: &[&DojoModel]) -> String { + let mut query_fields = Vec::new(); + let mut result_mapping = Vec::new(); + + for model in models { + query_fields + .push(format!("{model_name}: ModelClause<{model_name}>;", model_name = model.name)); + + result_mapping.push(format!("{model_name}: {model_name};", model_name = model.name)); + } + + format!( + "type Query = Partial<{{ + {query_fields} +}}>; + +type ResultMapping = {{ + {result_mapping} +}}; + +type QueryResult = {{ + [K in keyof T]: K extends keyof ResultMapping ? ResultMapping[K] : never; +}}; + +// Only supports a single model for now, since torii doesn't support multiple models +// And inside that single model, there's only support for a single query. +function convertQueryToToriiClause(query: Query): Clause | undefined {{ + const [model, clause] = Object.entries(query)[0]; + + if (Object.keys(clause).length === 0) {{ + return undefined; + }} + + const clauses: Clause[] = Object.entries(clause).map(([key, value]) => {{ + return {{ + Member: {{ + model, + member: key, + ...valueToToriiValueAndOperator(value), + }}, + }} satisfies Clause; + }}); + + return clauses[0]; +}}", + query_fields = query_fields.join("\n "), + result_mapping = result_mapping.join("\n "), + ) + } + + fn generate_model_types(models: &[&DojoModel], handled_tokens: &mut Vec) -> String { + let mut out = String::new(); + + for model in models { + let tokens = &model.tokens; + + for token in &tokens.enums { + handled_tokens.push(token.to_composite().unwrap().to_owned()); + } + for token in &tokens.structs { + handled_tokens.push(token.to_composite().unwrap().to_owned()); + } + + let mut structs = tokens.structs.to_owned(); + structs.sort_by(|a, b| { + if a.to_composite() + .unwrap() + .inners + .iter() + .any(|field| field.token.type_name() == b.type_name()) + { + std::cmp::Ordering::Greater + } else { + std::cmp::Ordering::Less + } + }); + + for token in &structs { + out += TypeScriptV2Plugin::format_struct( + token.to_composite().unwrap(), + handled_tokens, + ) + .as_str(); + } + + for token in &tokens.enums { + out += TypeScriptV2Plugin::format_enum(token.to_composite().unwrap()).as_str(); + } + + out += "\n"; + } + + out + } + + fn generate_base_calls_class() -> String { + "class BaseCalls { + contractAddress: string; + account?: Account; + + constructor(contractAddress: string, account?: Account) { + this.account = account; + this.contractAddress = contractAddress; + } + + async execute(entrypoint: string, calldata: any[] = []): Promise { + if (!this.account) { + throw new Error(\"No account set to interact with dojo_starter\"); + } + + await this.account.execute( + { + contractAddress: this.contractAddress, + entrypoint, + calldata, + }, + undefined, + { + maxFee: 0, + } + ); + } +} +" + .to_string() + } + + fn generate_contracts(contracts: &[&DojoContract], handled_tokens: &[Composite]) -> String { + let mut out = String::new(); + + for contract in contracts { + let systems = contract + .systems + .iter() + .map(|system| { + TypeScriptV2Plugin::format_system(system.to_function().unwrap(), handled_tokens) + }) + .collect::>() + .join("\n\n "); + + out += &format!( + "class {}Calls extends BaseCalls {{ + constructor(contractAddress: string, account?: Account) {{ + super(contractAddress, account); + }} + + {} +}} +", + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Pascal), + systems, + ); + } + + out + } + + fn generate_initial_params(contracts: &[&DojoContract]) -> String { + let system_addresses = contracts + .iter() + .map(|contract| { + format!( + "{}Address: string;", + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel) + ) + }) + .collect::>() + .join("\n "); + + format!( + "type InitialParams = GeneralParams & + ( + | {{ + rpcUrl: string; + worldAddress: string; + {system_addresses} + }} + | {{ + manifest: any; + }} + );" + ) + } + + fn generate_world_class(world_name: &String, contracts: &[&DojoContract]) -> String { + let mut out = String::new(); + + out += "type GeneralParams = { + toriiUrl: string; + relayUrl: string; + account?: Account; +};"; + + out += "\n\n"; + + out += TypeScriptV2Plugin::generate_initial_params(contracts).as_str(); + + out += "\n\n"; + + let system_properties = contracts + .iter() + .map(|contract| { + format!( + "{camel_case_name}: {pascal_case_name}Calls; + {camel_case_name}Address: string;", + camel_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel), + pascal_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Pascal) + ) + }) + .collect::>() + .join("\n "); + + let system_address_initializations = contracts + .iter() + .map(|contract| { + format!( + "const {contract_name}Address = config.contracts.find( + (contract) => + contract.name === \"dojo_starter::systems::{contract_name}::{contract_name}\" + )?.address; + + if (!{contract_name}Address) {{ + throw new Error(\"No {contract_name} contract found in the manifest\"); + }} + + this.{contract_name}Address = {contract_name}Address;", + contract_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel) + ) + }) + .collect::>() + .join("\n "); + + let system_address_initializations_from_params = contracts + .iter() + .map(|contract| { + format!( + "this.{camel_case_name}Address = params.{camel_case_name}Address;", + camel_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel), + ) + }) + .collect::>() + .join("\n "); + + let system_initializations = contracts + .iter() + .map(|contract| { + format!( + "this.{camel_case_name} = new \ + {pascal_case_name}Calls(this.{camel_case_name}Address, this._account);", + camel_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Camel), + pascal_case_name = + TypeScriptV2Plugin::formatted_contract_name(&contract.qualified_path) + .to_case(convert_case::Case::Pascal) + ) + }) + .collect::>() + .join("\n "); + + let formatted_world_name = world_name.to_case(convert_case::Case::Pascal); + + out += &format!( + "export class {formatted_world_name} {{ + rpcUrl: string; + toriiUrl: string; + toriiPromise: Promise; + relayUrl: string; + worldAddress: string; + private _account?: Account; + {system_properties} + + constructor(params: InitialParams) {{ + this.rpcUrl = LOCAL_KATANA; + if (\"manifest\" in params) {{ + const config = createManifestFromJson(params.manifest); + this.worldAddress = config.world.address; + + {system_address_initializations} + }} else {{ + this.rpcUrl = params.rpcUrl; + this.worldAddress = params.worldAddress; + {system_address_initializations_from_params} + }} + this.toriiUrl = params.toriiUrl; + this.relayUrl = params.relayUrl; + this._account = params.account; + {system_initializations} + + this.toriiPromise = createClient([], {{ + rpcUrl: this.rpcUrl, + toriiUrl: this.toriiUrl, + worldAddress: this.worldAddress, + relayUrl: this.relayUrl, + }}); + }} + + get account(): Account | undefined {{ + return this._account; + }} + + set account(account: Account) {{ + this._account = account; + {system_initializations} + }} + + async query(query: T, limit = 10, offset = 0) {{ + const torii = await this.toriiPromise; + + return {{ + torii, + findEntities: async () => this.findEntities(query, limit, offset), + }}; + }} + + async findEntities(query: T, limit = 10, offset = 0) {{ + const torii = await this.toriiPromise; + + const clause = convertQueryToToriiClause(query); + + const toriiResult = await torii.getEntities({{ + limit, + offset, + clause, + }}); + + return toriiResult as Record>; + }} + + async findEntity(query: T) {{ + const result = await this.findEntities(query, 1); + + if (Object.values(result).length === 0) {{ + return undefined; + }} + + return Object.values(result)[0] as QueryResult; + }} +}}" + ); + + out + } + + // Token should be a struct + // This will be formatted into a TypeScript interface + // using TypeScript defined types + fn format_struct(token: &Composite, handled_tokens: &[Composite]) -> String { + let mut native_fields: Vec = Vec::new(); + + for field in &token.inners { + let mapped = TypeScriptV2Plugin::map_type(field.token.type_name().as_str()); + if mapped == field.token.type_name() { + let token = handled_tokens + .iter() + .find(|t| t.type_name() == field.token.type_name()) + .unwrap_or_else(|| panic!("Token not found: {}", field.token.type_name())); + if token.r#type == CompositeType::Enum { + native_fields.push(format!("{}: {};", field.name, mapped)); + } else { + native_fields.push(format!("{}: {};", field.name, field.token.type_name())); + } + } else { + native_fields.push(format!("{}: {};", field.name, mapped)); + } + } + + format!( + " +// Type definition for `{path}` struct +export interface {name} {{ + {native_fields} +}} +", + path = token.type_path, + name = token.type_name(), + native_fields = native_fields.join("\n ") + ) + } + + // Token should be an enum + // This will be formatted into a C# enum + // Enum is mapped using index of cairo enum + fn format_enum(token: &Composite) -> String { + let fields = token + .inners + .iter() + .map(|field| format!("{},", field.name,)) + .collect::>() + .join("\n "); + + format!( + " +// Type definition for `{}` enum +export enum {} {{ + {} +}} +", + token.type_path, + token.type_name(), + fields + ) + } + + // Formats a system into a JS method used by the contract class + // Handled tokens should be a list of all structs and enums used by the contract + // Such as a set of referenced tokens from a model + fn format_system(system: &Function, handled_tokens: &[Composite]) -> String { + let args = system + .inputs + .iter() + .map(|arg| { + format!( + "{}: {}", + arg.0, + if TypeScriptV2Plugin::map_type(&arg.1.type_name()) == arg.1.type_name() { + arg.1.type_name() + } else { + TypeScriptV2Plugin::map_type(&arg.1.type_name()) + } + ) + }) + .collect::>() + .join(", "); + + let calldata = system + .inputs + .iter() + .map(|arg| { + let token = &arg.1; + let type_name = &arg.0; + + match handled_tokens.iter().find(|t| t.type_name() == token.type_name()) { + Some(t) => { + // Need to flatten the struct members. + match t.r#type { + CompositeType::Struct => t + .inners + .iter() + .map(|field| format!("props.{}.{}", type_name, field.name)) + .collect::>() + .join(",\n "), + _ => type_name.to_string(), + } + } + None => type_name.to_string(), + } + }) + .collect::>() + .join(",\n "); + + format!( + "async {pretty_system_name}({args}): Promise {{ + try {{ + await this.execute(\"{system_name}\", [{calldata}]) + }} catch (error) {{ + console.error(\"Error executing {pretty_system_name}:\", error); + throw error; + }} + }}", + pretty_system_name = system.name.to_case(convert_case::Case::Camel), + // formatted args to use our mapped types + args = args, + system_name = system.name, + // calldata for execute + calldata = calldata + ) + } + + // Formats a contract file path into a pretty contract name + // eg. dojo_examples::actions::actions.json -> Actions + fn formatted_contract_name(contract_file_name: &str) -> String { + let contract_name = + contract_file_name.split("::").last().unwrap().trim_end_matches(".json"); + contract_name.to_string() + } + + fn generate_code_content(data: &DojoData) -> String { + let mut handled_tokens = Vec::::new(); + let models = data.models.values().collect::>(); + let contracts = data.contracts.values().collect::>(); + + let mut code = String::new(); + code += TypeScriptV2Plugin::generate_header().as_str(); + code += TypeScriptV2Plugin::generate_imports().as_str(); + code += "\n"; + code += TypeScriptV2Plugin::generate_model_types(models.as_slice(), &mut handled_tokens) + .as_str(); + code += "\n"; + code += TypeScriptV2Plugin::generate_base_calls_class().as_str(); + code += "\n"; + code += + TypeScriptV2Plugin::generate_contracts(contracts.as_slice(), &handled_tokens).as_str(); + code += "\n"; + code += TypeScriptV2Plugin::generate_query_types(models.as_slice()).as_str(); + code += "\n"; + code += TypeScriptV2Plugin::generate_world_class(&data.world.name, contracts.as_slice()) + .as_str(); + + code + } +} + +#[async_trait] +impl BuiltinPlugin for TypeScriptV2Plugin { + async fn generate_code(&self, data: &DojoData) -> BindgenResult>> { + let code: String = TypeScriptV2Plugin::generate_code_content(data); + + let mut out: HashMap> = HashMap::new(); + let output_path = Path::new(&format!("{}.ts", data.world.name)).to_owned(); + + out.insert(output_path, code.as_bytes().to_vec()); + + Ok(out) + } +} + +#[cfg(test)] +mod tests { + use std::fs; + use std::io::Read; + + use camino::Utf8PathBuf; + + use super::*; + use crate::gather_dojo_data; + + #[test] + fn test_output() { + let mut expected_output = String::new(); + let mut file = + fs::File::open("src/test_data/mocks/dojo_examples.ts").expect("file not found"); + file.read_to_string(&mut expected_output).expect("error reading file"); + + let expected_output_without_header = + expected_output.lines().skip(1).collect::>().join("\n"); + + let data = gather_dojo_data( + &Utf8PathBuf::from("src/test_data/spawn-and-move/Scarb.toml"), + "dojo_examples", + "dev", + ) + .unwrap(); + + let actual_output = TypeScriptV2Plugin::generate_code_content(&data); + let actual_output_without_header = + actual_output.lines().skip(1).collect::>().join("\n"); + + // This test currently is very naive, but DojoData is unsorted, so the output + // can change between tests. This is a temporary solution until we have a better + // way to test this. + assert_eq!(actual_output_without_header.len(), 7479); + assert_eq!(expected_output_without_header.len(), 7479); + } +} diff --git a/crates/dojo-bindgen/src/test_data/mocks/dojo_examples.ts b/crates/dojo-bindgen/src/test_data/mocks/dojo_examples.ts new file mode 100644 index 0000000000..49e3805bf7 --- /dev/null +++ b/crates/dojo-bindgen/src/test_data/mocks/dojo_examples.ts @@ -0,0 +1,297 @@ +// Generated by dojo-bindgen on Fri, 12 Apr 2024 13:23:24 +0000. Do not modify this file manually. +import { Account } from "starknet"; +import { + Clause, + Client, + ModelClause, + createClient, + valueToToriiValueAndOperator, +} from "@dojoengine/torii-client"; +import { LOCAL_KATANA, createManifestFromJson } from "@dojoengine/core"; + +// Type definition for `dojo_examples::actions::actions::Moved` struct +export interface Moved { + player: string; + direction: Direction; +} + +// Type definition for `dojo_examples::models::Direction` enum +export enum Direction { + None, + Left, + Right, + Up, + Down, +} + + +// Type definition for `dojo_examples::models::Vec2` struct +export interface Vec2 { + x: number; + y: number; +} + +// Type definition for `dojo_examples::models::Position` struct +export interface Position { + player: string; + vec: Vec2; +} + + +// Type definition for `dojo_examples::models::Moves` struct +export interface Moves { + player: string; + remaining: number; + last_direction: Direction; +} + +// Type definition for `dojo_examples::models::Direction` enum +export enum Direction { + None, + Left, + Right, + Up, + Down, +} + + +// Type definition for `dojo_examples::models::EmoteMessage` struct +export interface EmoteMessage { + identity: string; + emote: Emote; +} + +// Type definition for `dojo_examples::models::Emote` enum +export enum Emote { + None, + Happy, + Sad, + Angry, + Love, +} + + +class BaseCalls { + contractAddress: string; + account?: Account; + + constructor(contractAddress: string, account?: Account) { + this.account = account; + this.contractAddress = contractAddress; + } + + async execute(entrypoint: string, calldata: any[] = []): Promise { + if (!this.account) { + throw new Error("No account set to interact with dojo_starter"); + } + + await this.account.execute( + { + contractAddress: this.contractAddress, + entrypoint, + calldata, + }, + undefined, + { + maxFee: 0, + } + ); + } +} + +class ActionsCalls extends BaseCalls { + constructor(contractAddress: string, account?: Account) { + super(contractAddress, account); + } + + async tileTerrain(vec: Vec2): Promise { + try { + await this.execute("tile_terrain", [props.vec.x, + props.vec.y]) + } catch (error) { + console.error("Error executing tileTerrain:", error); + throw error; + } + } + + async quadrant(pos: Position): Promise { + try { + await this.execute("quadrant", [props.pos.player, + props.pos.vec]) + } catch (error) { + console.error("Error executing quadrant:", error); + throw error; + } + } + + async dojoResource(): Promise { + try { + await this.execute("dojo_resource", []) + } catch (error) { + console.error("Error executing dojoResource:", error); + throw error; + } + } + + async spawn(): Promise { + try { + await this.execute("spawn", []) + } catch (error) { + console.error("Error executing spawn:", error); + throw error; + } + } + + async move(direction: Direction): Promise { + try { + await this.execute("move", [direction]) + } catch (error) { + console.error("Error executing move:", error); + throw error; + } + } +} + +type Query = Partial<{ + Moved: ModelClause; + Position: ModelClause; + Moves: ModelClause; + EmoteMessage: ModelClause; +}>; + +type ResultMapping = { + Moved: Moved; + Position: Position; + Moves: Moves; + EmoteMessage: EmoteMessage; +}; + +type QueryResult = { + [K in keyof T]: K extends keyof ResultMapping ? ResultMapping[K] : never; +}; + +// Only supports a single model for now, since torii doesn't support multiple models +// And inside that single model, there's only support for a single query. +function convertQueryToToriiClause(query: Query): Clause | undefined { + const [model, clause] = Object.entries(query)[0]; + + if (Object.keys(clause).length === 0) { + return undefined; + } + + const clauses: Clause[] = Object.entries(clause).map(([key, value]) => { + return { + Member: { + model, + member: key, + ...valueToToriiValueAndOperator(value), + }, + } satisfies Clause; + }); + + return clauses[0]; +} +type GeneralParams = { + toriiUrl: string; + relayUrl: string; + account?: Account; +}; + +type InitialParams = GeneralParams & + ( + | { + rpcUrl: string; + worldAddress: string; + actionsAddress: string; + } + | { + manifest: any; + } + ); + +export class DojoExamples { + rpcUrl: string; + toriiUrl: string; + toriiPromise: Promise; + relayUrl: string; + worldAddress: string; + private _account?: Account; + actions: ActionsCalls; + actionsAddress: string; + + constructor(params: InitialParams) { + this.rpcUrl = LOCAL_KATANA; + if ("manifest" in params) { + const config = createManifestFromJson(params.manifest); + this.worldAddress = config.world.address; + + const actionsAddress = config.contracts.find( + (contract) => + contract.name === "dojo_starter::systems::actions::actions" + )?.address; + + if (!actionsAddress) { + throw new Error("No actions contract found in the manifest"); + } + + this.actionsAddress = actionsAddress; + } else { + this.rpcUrl = params.rpcUrl; + this.worldAddress = params.worldAddress; + this.actionsAddress = params.actionsAddress; + } + this.toriiUrl = params.toriiUrl; + this.relayUrl = params.relayUrl; + this._account = params.account; + this.actions = new ActionsCalls(this.actionsAddress, this._account); + + this.toriiPromise = createClient([], { + rpcUrl: this.rpcUrl, + toriiUrl: this.toriiUrl, + worldAddress: this.worldAddress, + relayUrl: this.relayUrl, + }); + } + + get account(): Account | undefined { + return this._account; + } + + set account(account: Account) { + this._account = account; + this.actions = new ActionsCalls(this.actionsAddress, this._account); + } + + async query(query: T, limit = 10, offset = 0) { + const torii = await this.toriiPromise; + + return { + torii, + findEntities: async () => this.findEntities(query, limit, offset), + }; + } + + async findEntities(query: T, limit = 10, offset = 0) { + const torii = await this.toriiPromise; + + const clause = convertQueryToToriiClause(query); + + const toriiResult = await torii.getEntities({ + limit, + offset, + clause, + }); + + return toriiResult as Record>; + } + + async findEntity(query: T) { + const result = await this.findEntities(query, 1); + + if (Object.values(result).length === 0) { + return undefined; + } + + return Object.values(result)[0] as QueryResult; + } +} \ No newline at end of file