From 5eb8cdc7e6166e4df3593e762d2abb20de0c6486 Mon Sep 17 00:00:00 2001 From: dorimedini-starkware Date: Thu, 25 Jul 2024 18:13:08 +0300 Subject: [PATCH 01/12] fix: global file changes trigger specific package CIs (#77) Signed-off-by: Dori Medini --- scripts/run_tests.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/scripts/run_tests.py b/scripts/run_tests.py index 9bbe0ed2d1..619b6542fa 100755 --- a/scripts/run_tests.py +++ b/scripts/run_tests.py @@ -19,6 +19,10 @@ DEPENDENCY_PATTERN = r"([a-zA-Z0-9_]+) [^(]* \(([^)]+)\)" +# Set of files which - if changed - should trigger tests for all packages. +ALL_TEST_TRIGGERS: Set[str] = {"Cargo.toml", "Cargo.lock"} + + def get_workspace_tree() -> Dict[str, str]: tree = dict() res = subprocess.check_output("cargo tree --depth 0".split()).decode("utf-8").splitlines() @@ -65,6 +69,12 @@ def get_package_dependencies(package_name: str) -> Set[str]: return deps +def packages_to_test_due_to_global_changes(files: List[str]) -> Set[str]: + if len(set(files).intersection(ALL_TEST_TRIGGERS)) > 0: + return set(get_workspace_tree().keys()) + return set() + + def run_test(changes_only: bool, commit_id: Optional[str], concurrency: bool): local_changes = get_local_changes(".", commit_id=commit_id) modified_packages = get_modified_packages(local_changes) @@ -73,17 +83,26 @@ def run_test(changes_only: bool, commit_id: Optional[str], concurrency: bool): if changes_only: for p in modified_packages: deps = get_package_dependencies(p) - print(f"Running tests for {deps}") tested_packages.update(deps) - if len(args) == 0: + print(f"Running tests for {tested_packages} (due to modifications in {modified_packages}).") + # Add global-triggered packages. + extra_packages = packages_to_test_due_to_global_changes(files=local_changes) + print(f"Running tests for global-triggered packages {extra_packages}") + tested_packages.update(extra_packages) + if len(tested_packages) == 0: print("No changes detected.") return for package in tested_packages: args.extend(["--package", package]) + # If tested_packages is empty (i.e. changes_only is False), all packages will be tested (no + # args). cmd = ["cargo", "test"] + args + # TODO: Less specific handling of active feature combinations in tests (which combos should be + # tested and which shouldn't?). + # If blockifier is to be tested, add the concurrency flag if requested. if concurrency and "blockifier" in tested_packages: cmd.extend(["--features", "concurrency"]) From 6c916db4172f8ccd415ad9250108917a6db3d7d0 Mon Sep 17 00:00:00 2001 From: dan-starkware <56217775+dan-starkware@users.noreply.github.com> Date: Sun, 28 Jul 2024 11:58:29 +0300 Subject: [PATCH 02/12] chore(ci): allow warning during cargo doc (#127) --- .github/workflows/papyrus_ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/papyrus_ci.yml b/.github/workflows/papyrus_ci.yml index b3b8b1f7c6..5f7c6cf708 100644 --- a/.github/workflows/papyrus_ci.yml +++ b/.github/workflows/papyrus_ci.yml @@ -93,8 +93,8 @@ jobs: doc: runs-on: ubuntu-latest - env: - RUSTDOCFLAGS: "-D warnings" + # env: + # RUSTDOCFLAGS: "-D warnings" steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable From 3f13669e5402008e5cb746740aaeca6720b8b6bc Mon Sep 17 00:00:00 2001 From: dan-starkware <56217775+dan-starkware@users.noreply.github.com> Date: Sun, 28 Jul 2024 12:12:51 +0300 Subject: [PATCH 03/12] chore: fix some clippy doc issues (#122) --- crates/sequencing/papyrus_consensus/src/state_machine.rs | 9 ++++----- crates/starknet_api/src/crypto/patricia_hash.rs | 6 +++--- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/crates/sequencing/papyrus_consensus/src/state_machine.rs b/crates/sequencing/papyrus_consensus/src/state_machine.rs index f0a706d54f..083ccc97c9 100644 --- a/crates/sequencing/papyrus_consensus/src/state_machine.rs +++ b/crates/sequencing/papyrus_consensus/src/state_machine.rs @@ -19,17 +19,16 @@ use crate::types::Round; pub enum StateMachineEvent { /// StartRound is effective 2 questions: /// 1. Is the local node the proposer for this round? - /// 2. If so, what value should be proposed? - /// While waiting for the response to this event, the state machine will buffer all other - /// events. + /// 2. If so, what value should be proposed? While waiting for the response to this event, the + /// state machine will buffer all other events. /// /// How should the caller handle this event? /// 1. If the local node is not the proposer, the caller responds with with `None` as the block /// hash. /// 2. If the local node is the proposer and a block hash was supplied by the state machine, - /// the caller responds with the supplied block hash. + /// the caller responds with the supplied block hash. /// 3. If the local node is the proposer and no block hash was supplied by the state machine, - /// the caller must find/build a block to respond with. + /// the caller must find/build a block to respond with. StartRound(Option, Round), /// Consensus message, can be both sent from and to the state machine. Proposal(BlockHash, Round), diff --git a/crates/starknet_api/src/crypto/patricia_hash.rs b/crates/starknet_api/src/crypto/patricia_hash.rs index 41acf4e22f..c5b28b457e 100644 --- a/crates/starknet_api/src/crypto/patricia_hash.rs +++ b/crates/starknet_api/src/crypto/patricia_hash.rs @@ -10,10 +10,10 @@ //! //! The edges coming out of an internal node with a key `K` are: //! - If there are input keys that start with 'K0...' and 'K1...', then two edges come out, marked -//! with '0' and '1' bits. +//! with '0' and '1' bits. //! - Otherwise, a single edge mark with 'Z' is coming out. 'Z' is the longest string, such that all -//! the input keys that start with 'K...' start with 'KZ...' as well. Note, the order of the input -//! keys in this implementation forces 'Z' to be a zeros string. +//! the input keys that start with 'K...' start with 'KZ...' as well. Note, the order of the input +//! keys in this implementation forces 'Z' to be a zeros string. //! //! Hash of a node depends on the number of edges coming out of it: //! - A leaf: The hash is the input value of its key. From a848403b2b31023c2728fd8ef991a24c84a4ab07 Mon Sep 17 00:00:00 2001 From: dan-starkware <56217775+dan-starkware@users.noreply.github.com> Date: Sun, 28 Jul 2024 12:12:58 +0300 Subject: [PATCH 04/12] chore(ci): fix package selection (#123) --- .github/workflows/papyrus_ci.yml | 4 ++-- .github/workflows/papyrus_nightly-tests.yml | 12 ++++++++---- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/.github/workflows/papyrus_ci.yml b/.github/workflows/papyrus_ci.yml index 5f7c6cf708..4e2c23ff29 100644 --- a/.github/workflows/papyrus_ci.yml +++ b/.github/workflows/papyrus_ci.yml @@ -35,7 +35,7 @@ jobs: - name: Build node run: | mkdir data - cargo build -r + cargo build -r -p papyrus_node - name: Run executable run: > @@ -54,7 +54,7 @@ jobs: - name: Build node run: | mkdir data - cargo build -r --no-default-features + cargo build -r -p papyrus_node --no-default-features - name: Run executable run: > diff --git a/.github/workflows/papyrus_nightly-tests.yml b/.github/workflows/papyrus_nightly-tests.yml index 3ee41bb567..5c4a038b6f 100644 --- a/.github/workflows/papyrus_nightly-tests.yml +++ b/.github/workflows/papyrus_nightly-tests.yml @@ -45,7 +45,7 @@ jobs: - run: brew install protobuf@$PROTOC_VERSION - name: Build node - run: cargo build -r + run: cargo build -r -p papyrus_node - name: Run executable run: > @@ -64,7 +64,7 @@ jobs: - run: brew install protobuf@$PROTOC_VERSION - run: | - cargo test -r + cargo test -r -p papyrus_node env: SEED: 0 @@ -85,10 +85,14 @@ jobs: - uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 - run: > - cargo test -r --test '*' -- --include-ignored --skip test_gw_integration_testnet; + cargo test -r + --test latency_histogram + --test gateway_integration_test + --test feeder_gateway_integration_test + -- --include-ignored --skip test_gw_integration_testnet; cargo run -r -p papyrus_node --bin central_source_integration_test --features="futures-util tokio-stream" - # TODO(dvir): make this run only if the path 'crates/papyrus_storage/src/db/**' (same path as in the CI) was changed on the + # TODO(dvir): make this run only if the path 'crates/papyrus_storage/src/db/**' (same path as in the CI) was changed on the # last day and increase the number of repetitions. random-table-test: runs-on: ubuntu-latest From e12ba1cc01b0b2a5e88edc23ddd118534e4b1144 Mon Sep 17 00:00:00 2001 From: dan-starkware <56217775+dan-starkware@users.noreply.github.com> Date: Sun, 28 Jul 2024 12:15:50 +0300 Subject: [PATCH 05/12] chore(ci): add triggers to papyrus ci (#121) --- .github/workflows/papyrus_ci.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/papyrus_ci.yml b/.github/workflows/papyrus_ci.yml index 4e2c23ff29..3dabe31fe3 100644 --- a/.github/workflows/papyrus_ci.yml +++ b/.github/workflows/papyrus_ci.yml @@ -4,6 +4,9 @@ on: push: branches: [main] paths: + - '.github/workflows/papyrus_ci.yml' + - 'Cargo.toml' + - 'Cargo.lock' - 'crates/papyrus**/**' pull_request: @@ -14,6 +17,9 @@ on: - auto_merge_enabled - edited # for when the PR title is edited paths: + - '.github/workflows/papyrus_ci.yml' + - 'Cargo.toml' + - 'Cargo.lock' - 'crates/papyrus**/**' merge_group: From a8c358ea553478bb39205525d33eb762ab07d2db Mon Sep 17 00:00:00 2001 From: dan-starkware <56217775+dan-starkware@users.noreply.github.com> Date: Sun, 28 Jul 2024 12:44:29 +0300 Subject: [PATCH 06/12] ci: add triggers for papyrus CI (#135) --- .github/workflows/papyrus_ci.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/papyrus_ci.yml b/.github/workflows/papyrus_ci.yml index 3dabe31fe3..2321b6939d 100644 --- a/.github/workflows/papyrus_ci.yml +++ b/.github/workflows/papyrus_ci.yml @@ -5,9 +5,13 @@ on: branches: [main] paths: - '.github/workflows/papyrus_ci.yml' + - 'Dockerfile' + - 'papyrus_utilities.Dockerfile' - 'Cargo.toml' - 'Cargo.lock' - 'crates/papyrus**/**' + - 'crates/sequencing/**' + - 'crates/starknet_client/**' pull_request: types: @@ -18,9 +22,13 @@ on: - edited # for when the PR title is edited paths: - '.github/workflows/papyrus_ci.yml' + - 'Dockerfile' + - 'papyrus_utilities.Dockerfile' - 'Cargo.toml' - 'Cargo.lock' - 'crates/papyrus**/**' + - 'crates/sequencing/**' + - 'crates/starknet_client/**' merge_group: types: [checks_requested] From cd76a9f85f26b59e0a4a90dd172ff52a72a559c5 Mon Sep 17 00:00:00 2001 From: dorimedini-starkware Date: Sun, 28 Jul 2024 12:48:02 +0300 Subject: [PATCH 07/12] chore(ci): fix merge paths CI (#116) Signed-off-by: Dori Medini --- .github/workflows/merge_paths_ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/merge_paths_ci.yml b/.github/workflows/merge_paths_ci.yml index 8db9363de8..454b784323 100644 --- a/.github/workflows/merge_paths_ci.yml +++ b/.github/workflows/merge_paths_ci.yml @@ -28,6 +28,7 @@ on: - 'scripts/merge_paths_test.py' - 'scripts/merge_status.py' +jobs: merge-paths-test: runs-on: ubuntu-20.04 steps: From d3a61b4ce565cfe99a0e35da433735506b013906 Mon Sep 17 00:00:00 2001 From: aner-starkware <147302140+aner-starkware@users.noreply.github.com> Date: Sun, 28 Jul 2024 15:23:01 +0300 Subject: [PATCH 08/12] refactor: move test files to dedicated directory (#105) --- .github/workflows/committer_ci.yml | 8 ++++---- crates/committer_cli/benches/committer_bench.rs | 4 ++-- crates/committer_cli/benches/committer_flow_inputs.json | 1 - crates/committer_cli/src/tests/regression_tests.rs | 8 ++++---- .../committer_cli/test_inputs/committer_flow_inputs.json | 1 + .../{benches => test_inputs}/tree_flow_inputs.json | 0 6 files changed, 11 insertions(+), 11 deletions(-) delete mode 100644 crates/committer_cli/benches/committer_flow_inputs.json create mode 100644 crates/committer_cli/test_inputs/committer_flow_inputs.json rename crates/committer_cli/{benches => test_inputs}/tree_flow_inputs.json (100%) diff --git a/.github/workflows/committer_ci.yml b/.github/workflows/committer_ci.yml index 7d40303f82..2517ba7e60 100644 --- a/.github/workflows/committer_ci.yml +++ b/.github/workflows/committer_ci.yml @@ -42,7 +42,7 @@ jobs: credentials_json: ${{ secrets.COMMITER_PRODUCTS_EXT_WRITER_JSON }} - uses: 'google-github-actions/setup-gcloud@v2' - run: echo "BENCH_INPUT_FILES_PREFIX=$(cat ./crates/committer_cli/src/tests/flow_test_files_prefix)" >> $GITHUB_ENV - - run: gcloud storage cp -r gs://committer-testing-artifacts/$BENCH_INPUT_FILES_PREFIX/* ./crates/committer_cli/benches + - run: gcloud storage cp -r gs://committer-testing-artifacts/$BENCH_INPUT_FILES_PREFIX/* ./crates/committer_cli/test_inputs - run: cargo test -p committer_cli --release -- --include-ignored test_regression benchmarking: @@ -83,13 +83,13 @@ jobs: # Input files didn't change. - if: env.OLD_BENCH_INPUT_FILES_PREFIX == env.NEW_BENCH_INPUT_FILES_PREFIX run: | - mv ./crates/committer_cli/benches/tree_flow_inputs.json_bu ./crates/committer_cli/benches/tree_flow_inputs.json - mv ./crates/committer_cli/benches/committer_flow_inputs.json_bu ./crates/committer_cli/benches/committer_flow_inputs.json + mv ./crates/committer_cli/benches/tree_flow_inputs.json_bu ./crates/committer_cli/test_inputs/tree_flow_inputs.json + mv ./crates/committer_cli/benches/committer_flow_inputs.json_bu ./crates/committer_cli/test_inputs/committer_flow_inputs.json # Input files did change, download new inputs. - if: env.OLD_BENCH_INPUT_FILES_PREFIX != env.NEW_BENCH_INPUT_FILES_PREFIX run: | - gcloud storage cp -r gs://committer-testing-artifacts/$NEW_BENCH_INPUT_FILES_PREFIX/* ./crates/committer_cli/benches + gcloud storage cp -r gs://committer-testing-artifacts/$NEW_BENCH_INPUT_FILES_PREFIX/* ./crates/committer_cli/test_inputs # Benchmark the new code, splitting the benchmarks, and prepare the results for posting a comment. - run: bash ./crates/committer_cli/benches/bench_split_and_prepare_post.sh benchmarks_list.txt bench_new.txt diff --git a/crates/committer_cli/benches/committer_bench.rs b/crates/committer_cli/benches/committer_bench.rs index 90796c7cb8..cc5ef55480 100644 --- a/crates/committer_cli/benches/committer_bench.rs +++ b/crates/committer_cli/benches/committer_bench.rs @@ -19,8 +19,8 @@ use committer_cli::tests::utils::parse_from_python::TreeFlowInput; use criterion::{criterion_group, criterion_main, Criterion}; const CONCURRENCY_MODE: bool = true; -const SINGLE_TREE_FLOW_INPUT: &str = include_str!("tree_flow_inputs.json"); -const FLOW_TEST_INPUT: &str = include_str!("committer_flow_inputs.json"); +const SINGLE_TREE_FLOW_INPUT: &str = include_str!("../test_inputs/tree_flow_inputs.json"); +const FLOW_TEST_INPUT: &str = include_str!("../test_inputs/committer_flow_inputs.json"); const OUTPUT_PATH: &str = "benchmark_output.txt"; pub fn single_tree_flow_benchmark(criterion: &mut Criterion) { diff --git a/crates/committer_cli/benches/committer_flow_inputs.json b/crates/committer_cli/benches/committer_flow_inputs.json deleted file mode 100644 index c42fd23ef5..0000000000 --- a/crates/committer_cli/benches/committer_flow_inputs.json +++ /dev/null @@ -1 +0,0 @@ -This file is a placeholder for inputs to single_tree_flow regression test and benchmark. diff --git a/crates/committer_cli/src/tests/regression_tests.rs b/crates/committer_cli/src/tests/regression_tests.rs index 1b8c75bbeb..8bbf8c59ca 100644 --- a/crates/committer_cli/src/tests/regression_tests.rs +++ b/crates/committer_cli/src/tests/regression_tests.rs @@ -16,8 +16,8 @@ use crate::tests::utils::parse_from_python::TreeFlowInput; // 2. Fix the max time threshold to be the expected time for the benchmark test. const MAX_TIME_FOR_SINGLE_TREE_BECHMARK_TEST: f64 = 5.0; const MAX_TIME_FOR_COMMITTER_FLOW_BECHMARK_TEST: f64 = 5.0; -const SINGLE_TREE_FLOW_INPUT: &str = include_str!("../../benches/tree_flow_inputs.json"); -const FLOW_TEST_INPUT: &str = include_str!("../../benches/committer_flow_inputs.json"); +const SINGLE_TREE_FLOW_INPUT: &str = include_str!("../../test_inputs/tree_flow_inputs.json"); +const FLOW_TEST_INPUT: &str = include_str!("../../test_inputs/committer_flow_inputs.json"); const OUTPUT_PATH: &str = "benchmark_output.txt"; const EXPECTED_NUMBER_OF_FILES: usize = 100; @@ -163,10 +163,10 @@ pub async fn test_regression_committer_flow() { #[tokio::test(flavor = "multi_thread")] pub async fn test_regression_committer_all_files() { assert_eq!( - fs::read_dir("./benches/regression_files").unwrap().count(), + fs::read_dir("./test_inputs/regression_files").unwrap().count(), EXPECTED_NUMBER_OF_FILES ); - let dir_path = fs::read_dir("./benches/regression_files").unwrap(); + let dir_path = fs::read_dir("./test_inputs/regression_files").unwrap(); for file_path in dir_path { // TODO(Aner, 23/07/24): multi-thread the test. test_single_committer_flow( diff --git a/crates/committer_cli/test_inputs/committer_flow_inputs.json b/crates/committer_cli/test_inputs/committer_flow_inputs.json new file mode 100644 index 0000000000..5b9de40994 --- /dev/null +++ b/crates/committer_cli/test_inputs/committer_flow_inputs.json @@ -0,0 +1 @@ +This file is a placeholder for inputs to committer_flow regression test and benchmark. diff --git a/crates/committer_cli/benches/tree_flow_inputs.json b/crates/committer_cli/test_inputs/tree_flow_inputs.json similarity index 100% rename from crates/committer_cli/benches/tree_flow_inputs.json rename to crates/committer_cli/test_inputs/tree_flow_inputs.json From 5d63dc5fa2ce3cc990219ba0ed45300e682e6714 Mon Sep 17 00:00:00 2001 From: dorimedini-starkware Date: Sun, 28 Jul 2024 15:39:24 +0300 Subject: [PATCH 09/12] chore: move (workspace) doc test to main (workspace) CI (#106) Signed-off-by: Dori Medini --- .github/workflows/main.yml | 13 +++++++++++++ .github/workflows/papyrus_ci.yml | 13 ------------- .../updated_skeleton_tree/hash_function.rs | 8 ++++---- crates/committer_cli/src/parse_input/raw_input.rs | 4 ++-- 4 files changed, 19 insertions(+), 19 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 3efec85a49..74545671a9 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -77,6 +77,19 @@ jobs: - run: scripts/clippy.sh + doc: + runs-on: ubuntu-latest + # env: + # RUSTDOCFLAGS: "-D warnings" + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + - uses: Noelware/setup-protoc@1.1.0 + with: + version: ${{env.PROTOC_VERSION}} + - run: cargo doc --workspace -r --document-private-items --no-deps + run-tests: runs-on: ubuntu-20.04 steps: diff --git a/.github/workflows/papyrus_ci.yml b/.github/workflows/papyrus_ci.yml index 2321b6939d..ad79a6784e 100644 --- a/.github/workflows/papyrus_ci.yml +++ b/.github/workflows/papyrus_ci.yml @@ -105,19 +105,6 @@ jobs: env: SEED: 0 - doc: - runs-on: ubuntu-latest - # env: - # RUSTDOCFLAGS: "-D warnings" - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 - - uses: Noelware/setup-protoc@1.1.0 - with: - version: ${{env.PROTOC_VERSION}} - - run: cargo doc --workspace -r --document-private-items --no-deps - codecov: runs-on: ubuntu-latest steps: diff --git a/crates/committer/src/patricia_merkle_tree/updated_skeleton_tree/hash_function.rs b/crates/committer/src/patricia_merkle_tree/updated_skeleton_tree/hash_function.rs index 0c2d17de95..8c1b38fd4d 100644 --- a/crates/committer/src/patricia_merkle_tree/updated_skeleton_tree/hash_function.rs +++ b/crates/committer/src/patricia_merkle_tree/updated_skeleton_tree/hash_function.rs @@ -46,7 +46,7 @@ pub(crate) trait TreeHashFunction { fn compute_node_hash(node_data: &NodeData) -> HashOutput; /// The default implementation for internal nodes is based on the following reference: - /// https://docs.starknet.io/documentation/architecture_and_concepts/Network_Architecture/starknet-state/#trie_construction + /// fn compute_node_hash_with_inner_hash_function( node_data: &NodeData, ) -> HashOutput { @@ -76,7 +76,7 @@ impl TreeHashFunctionImpl { /// Implementation of TreeHashFunction for contracts trie. /// The implementation is based on the following reference: -/// https://docs.starknet.io/documentation/architecture_and_concepts/Network_Architecture/starknet-state/#trie_construction +/// impl TreeHashFunction for TreeHashFunctionImpl { fn compute_leaf_hash(contract_state: &ContractState) -> HashOutput { HashOutput( @@ -100,7 +100,7 @@ impl TreeHashFunction for TreeHashFunctionImpl { /// Implementation of TreeHashFunction for the classes trie. /// The implementation is based on the following reference: -/// https://docs.starknet.io/documentation/architecture_and_concepts/Network_Architecture/starknet-state/#trie_construction +/// impl TreeHashFunction for TreeHashFunctionImpl { fn compute_leaf_hash(compiled_class_hash: &CompiledClassHash) -> HashOutput { let contract_class_leaf_version: Felt = Felt::from_hex(Self::CONTRACT_CLASS_LEAF_V0) @@ -119,7 +119,7 @@ impl TreeHashFunction for TreeHashFunctionImpl { /// Implementation of TreeHashFunction for the storage trie. /// The implementation is based on the following reference: -/// https://docs.starknet.io/documentation/architecture_and_concepts/Network_Architecture/starknet-state/#trie_construction +/// impl TreeHashFunction for TreeHashFunctionImpl { fn compute_leaf_hash(storage_value: &StarknetStorageValue) -> HashOutput { HashOutput(storage_value.0) diff --git a/crates/committer_cli/src/parse_input/raw_input.rs b/crates/committer_cli/src/parse_input/raw_input.rs index d58dc863de..c27b754b1c 100644 --- a/crates/committer_cli/src/parse_input/raw_input.rs +++ b/crates/committer_cli/src/parse_input/raw_input.rs @@ -7,7 +7,7 @@ type RawFelt = [u8; 32]; #[derive(Deserialize, Debug)] /// Input to the committer. pub(crate) struct RawInput { - /// Storage. Will be casted to HashMap, Vec> to simulate DB access. + /// Storage. Will be casted to `HashMap, Vec>` to simulate DB access. pub storage: Vec, pub state_diff: RawStateDiff, pub contracts_trie_root_hash: RawFelt, @@ -30,7 +30,7 @@ pub(crate) struct RawConfigImpl { #[derive(Deserialize_repr, Debug, Default, Serialize)] #[repr(usize)] -/// Describes a log level https://docs.python.org/3/library/logging.html#logging-levels +/// Describes a log level pub(crate) enum PythonLogLevel { NotSet = 0, Info = 20, From 794869b72d25c053b1a76f26faa8ceb658127ac5 Mon Sep 17 00:00:00 2001 From: aner-starkware <147302140+aner-starkware@users.noreply.github.com> Date: Sun, 28 Jul 2024 15:43:23 +0300 Subject: [PATCH 10/12] fix: restore ci (#107) --- .github/workflows/committer_ci.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/committer_ci.yml b/.github/workflows/committer_ci.yml index 2517ba7e60..d82054465a 100644 --- a/.github/workflows/committer_ci.yml +++ b/.github/workflows/committer_ci.yml @@ -61,7 +61,7 @@ jobs: credentials_json: ${{ secrets.COMMITER_PRODUCTS_EXT_WRITER_JSON }} - uses: 'google-github-actions/setup-gcloud@v2' - run: echo "OLD_BENCH_INPUT_FILES_PREFIX=$(cat ./crates/committer_cli/src/tests/flow_test_files_prefix)" >> $GITHUB_ENV - - run: gcloud storage cp -r gs://committer-testing-artifacts/$OLD_BENCH_INPUT_FILES_PREFIX/* ./crates/committer_cli/benches + - run: gcloud storage cp -r gs://committer-testing-artifacts/$OLD_BENCH_INPUT_FILES_PREFIX/* ./crates/committer_cli/test_inputs # List the existing benchmarks. - run: | @@ -71,8 +71,8 @@ jobs: - run: cargo bench -p committer_cli # Backup the downloaded files to avoid re-downloading them if they didn't change (overwritten by checkout). - - run: mv ./crates/committer_cli/benches/tree_flow_inputs.json ./crates/committer_cli/benches/tree_flow_inputs.json_bu - - run: mv ./crates/committer_cli/benches/committer_flow_inputs.json ./crates/committer_cli/benches/committer_flow_inputs.json_bu + - run: mv ./crates/committer_cli/test_inputs/tree_flow_inputs.json ./crates/committer_cli/test_inputs/tree_flow_inputs.json_bu + - run: mv ./crates/committer_cli/test_inputs/committer_flow_inputs.json ./crates/committer_cli/test_inputs/committer_flow_inputs.json_bu # Checkout the new code. - uses: actions/checkout@v4 @@ -83,8 +83,8 @@ jobs: # Input files didn't change. - if: env.OLD_BENCH_INPUT_FILES_PREFIX == env.NEW_BENCH_INPUT_FILES_PREFIX run: | - mv ./crates/committer_cli/benches/tree_flow_inputs.json_bu ./crates/committer_cli/test_inputs/tree_flow_inputs.json - mv ./crates/committer_cli/benches/committer_flow_inputs.json_bu ./crates/committer_cli/test_inputs/committer_flow_inputs.json + mv ./crates/committer_cli/test_inputs/tree_flow_inputs.json_bu ./crates/committer_cli/test_inputs/tree_flow_inputs.json + mv ./crates/committer_cli/test_inputs/committer_flow_inputs.json_bu ./crates/committer_cli/test_inputs/committer_flow_inputs.json # Input files did change, download new inputs. - if: env.OLD_BENCH_INPUT_FILES_PREFIX != env.NEW_BENCH_INPUT_FILES_PREFIX From 7ee04dbc7e666cfa9e8b7c1a8769970fbea5b6fe Mon Sep 17 00:00:00 2001 From: Meshi Peled <141231558+meship-starkware@users.noreply.github.com> Date: Sun, 28 Jul 2024 17:27:57 +0300 Subject: [PATCH 11/12] fix(execution): forbid calling cairo0 contract with cairo1 only builtins (#128) --- .../deprecated_entry_point_execution.rs | 23 ++++++++++++++++++- crates/blockifier/src/execution/errors.rs | 5 ++++ crates/papyrus_monitoring_gateway/src/lib.rs | 2 +- 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/crates/blockifier/src/execution/deprecated_entry_point_execution.rs b/crates/blockifier/src/execution/deprecated_entry_point_execution.rs index 878c2e276b..c5021fe5fb 100644 --- a/crates/blockifier/src/execution/deprecated_entry_point_execution.rs +++ b/crates/blockifier/src/execution/deprecated_entry_point_execution.rs @@ -1,3 +1,5 @@ +use std::collections::HashSet; + use cairo_vm::types::builtin_name::BuiltinName; use cairo_vm::types::layout_name::LayoutName; use cairo_vm::types::relocatable::{MaybeRelocatable, Relocatable}; @@ -29,6 +31,15 @@ pub struct VmExecutionContext<'a> { pub entry_point_pc: usize, } +pub const CAIRO0_BUILTINS_NAMES: [BuiltinName; 6] = [ + BuiltinName::range_check, + BuiltinName::pedersen, + BuiltinName::ecdsa, + BuiltinName::bitwise, + BuiltinName::ec_op, + BuiltinName::poseidon, +]; + /// Executes a specific call to a contract entry point and returns its output. pub fn execute_entry_point_call( call: CallEntryPoint, @@ -71,9 +82,19 @@ pub fn initialize_execution_context<'a>( resources: &'a mut ExecutionResources, context: &'a mut EntryPointExecutionContext, ) -> Result, PreExecutionError> { + // Verify use of cairo0 builtins only. + let program_builtins: HashSet<&BuiltinName> = + HashSet::from_iter(contract_class.program.iter_builtins()); + let unsupported_builtins = + &program_builtins - &HashSet::from_iter(CAIRO0_BUILTINS_NAMES.iter()); + if !unsupported_builtins.is_empty() { + return Err(PreExecutionError::UnsupportedCairo0Builtin( + unsupported_builtins.iter().map(|&item| *item).collect(), + )); + } + // Resolve initial PC from EP indicator. let entry_point_pc = resolve_entry_point_pc(call, &contract_class)?; - // Instantiate Cairo runner. let proof_mode = false; let trace_enabled = false; diff --git a/crates/blockifier/src/execution/errors.rs b/crates/blockifier/src/execution/errors.rs index 1fa6a1d5b2..cc0e87a753 100644 --- a/crates/blockifier/src/execution/errors.rs +++ b/crates/blockifier/src/execution/errors.rs @@ -1,3 +1,6 @@ +use std::collections::HashSet; + +use cairo_vm::types::builtin_name::BuiltinName; use cairo_vm::types::errors::math_errors::MathError; use cairo_vm::vm::errors::cairo_run_errors::CairoRunError; use cairo_vm::vm::errors::memory_errors::MemoryError; @@ -42,6 +45,8 @@ pub enum PreExecutionError { StateError(#[from] StateError), #[error("Requested contract address {:#064x} is not deployed.", .0.key())] UninitializedStorageAddress(ContractAddress), + #[error("Called builtins: {0:?} are unsupported in a Cairo0 contract")] + UnsupportedCairo0Builtin(HashSet), } impl From for PreExecutionError { diff --git a/crates/papyrus_monitoring_gateway/src/lib.rs b/crates/papyrus_monitoring_gateway/src/lib.rs index 823b2044f2..1f7e068c91 100644 --- a/crates/papyrus_monitoring_gateway/src/lib.rs +++ b/crates/papyrus_monitoring_gateway/src/lib.rs @@ -313,7 +313,7 @@ async fn node_config_by_secret( async fn metrics(prometheus_handle: Option) -> Response { match prometheus_handle { Some(handle) => { - Collector::default().prefix(PROCESS_METRICS_PREFIX).collect(); + Collector::new(PROCESS_METRICS_PREFIX).collect(); handle.render().into_response() } None => StatusCode::METHOD_NOT_ALLOWED.into_response(), From 443e7178c88b80c3fc6ad9d21b7fd86f0e3ea289 Mon Sep 17 00:00:00 2001 From: Dori Medini Date: Mon, 29 Jul 2024 13:38:34 +0300 Subject: [PATCH 12/12] fix: conflict resolution Signed-off-by: Dori Medini --- .github/workflows/committer_ci.yml | 8 ----- .../papyrus_consensus/src/state_machine.rs | 31 ------------------- 2 files changed, 39 deletions(-) diff --git a/.github/workflows/committer_ci.yml b/.github/workflows/committer_ci.yml index bd96e1275f..d82054465a 100644 --- a/.github/workflows/committer_ci.yml +++ b/.github/workflows/committer_ci.yml @@ -83,16 +83,8 @@ jobs: # Input files didn't change. - if: env.OLD_BENCH_INPUT_FILES_PREFIX == env.NEW_BENCH_INPUT_FILES_PREFIX run: | -<<<<<<< HEAD - mv ./crates/committer_cli/benches/tree_flow_inputs.json_bu ./crates/committer_cli/test_inputs/tree_flow_inputs.json - mv ./crates/committer_cli/benches/committer_flow_inputs.json_bu ./crates/committer_cli/test_inputs/committer_flow_inputs.json -||||||| a9dc431b - mv ./crates/committer_cli/benches/tree_flow_inputs.json_bu ./crates/committer_cli/benches/tree_flow_inputs.json - mv ./crates/committer_cli/benches/committer_flow_inputs.json_bu ./crates/committer_cli/benches/committer_flow_inputs.json -======= mv ./crates/committer_cli/test_inputs/tree_flow_inputs.json_bu ./crates/committer_cli/test_inputs/tree_flow_inputs.json mv ./crates/committer_cli/test_inputs/committer_flow_inputs.json_bu ./crates/committer_cli/test_inputs/committer_flow_inputs.json ->>>>>>> origin/main-v0.13.2 # Input files did change, download new inputs. - if: env.OLD_BENCH_INPUT_FILES_PREFIX != env.NEW_BENCH_INPUT_FILES_PREFIX diff --git a/crates/sequencing/papyrus_consensus/src/state_machine.rs b/crates/sequencing/papyrus_consensus/src/state_machine.rs index a934bce0e8..07627acbfe 100644 --- a/crates/sequencing/papyrus_consensus/src/state_machine.rs +++ b/crates/sequencing/papyrus_consensus/src/state_machine.rs @@ -17,42 +17,11 @@ use crate::types::{Round, ValidatorId}; /// Events which the state machine sends/receives. #[derive(Debug, Clone, PartialEq)] pub enum StateMachineEvent { -<<<<<<< HEAD /// Sent by the state machine when a block is required to propose (BlockHash is always None). /// While waiting for the response of GetProposal, the state machine will buffer all other /// events. The caller must respond with a valid block hash for this height to the state /// machine, and the same round sent out. GetProposal(Option, Round), -||||||| a9dc431b - /// StartRound is effective 2 questions: - /// 1. Is the local node the proposer for this round? - /// 2. If so, what value should be proposed? - /// While waiting for the response to this event, the state machine will buffer all other - /// events. - /// - /// How should the caller handle this event? - /// 1. If the local node is not the proposer, the caller responds with with `None` as the block - /// hash. - /// 2. If the local node is the proposer and a block hash was supplied by the state machine, - /// the caller responds with the supplied block hash. - /// 3. If the local node is the proposer and no block hash was supplied by the state machine, - /// the caller must find/build a block to respond with. - StartRound(Option, Round), -======= - /// StartRound is effective 2 questions: - /// 1. Is the local node the proposer for this round? - /// 2. If so, what value should be proposed? While waiting for the response to this event, the - /// state machine will buffer all other events. - /// - /// How should the caller handle this event? - /// 1. If the local node is not the proposer, the caller responds with with `None` as the block - /// hash. - /// 2. If the local node is the proposer and a block hash was supplied by the state machine, - /// the caller responds with the supplied block hash. - /// 3. If the local node is the proposer and no block hash was supplied by the state machine, - /// the caller must find/build a block to respond with. - StartRound(Option, Round), ->>>>>>> origin/main-v0.13.2 /// Consensus message, can be both sent from and to the state machine. Proposal(BlockHash, Round), /// Consensus message, can be both sent from and to the state machine.