From f6cf1065c162e21f740db38c8521bbfd271d9557 Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Wed, 4 Dec 2024 11:43:31 +0800 Subject: [PATCH 01/54] added blocks verification logic tested on single mmr batch --- Cargo.lock | 1 - crates/guest-mmr/Cargo.toml | 1 - crates/guest-mmr/src/core.rs | 18 ++---- crates/guest-types/src/lib.rs | 19 ++++-- crates/methods/blocks-validity/src/main.rs | 3 +- crates/methods/mmr-append/src/main.rs | 2 + crates/publisher/Cargo.toml | 8 ++- .../publisher/bin/verify_block_inclusion.rs | 1 - crates/publisher/bin/verify_blocks.rs | 47 ++++++++++++++ crates/publisher/src/api/operations.rs | 5 +- crates/publisher/src/core/batch_processor.rs | 14 +++- crates/publisher/src/errors.rs | 4 ++ crates/publisher/src/validator/validator.rs | 64 +++++++++++++++++-- 13 files changed, 152 insertions(+), 35 deletions(-) delete mode 100644 crates/publisher/bin/verify_block_inclusion.rs create mode 100644 crates/publisher/bin/verify_blocks.rs diff --git a/Cargo.lock b/Cargo.lock index f794b1a..e638c52 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2610,7 +2610,6 @@ dependencies = [ "mmr-utils", "num-bigint", "num-traits", - "serde", "sha2 0.10.8 (git+https://github.com/risc0/RustCrypto-hashes.git?tag=sha2-v0.10.8-risczero.0)", "thiserror 2.0.3", "tokio", diff --git a/crates/guest-mmr/Cargo.toml b/crates/guest-mmr/Cargo.toml index 56cfa3a..95037f2 100644 --- a/crates/guest-mmr/Cargo.toml +++ b/crates/guest-mmr/Cargo.toml @@ -6,7 +6,6 @@ edition = "2021" [dependencies] guest-types = { path = "../guest-types" } -serde = { version = "1.0.215", features = ["derive"] } hex = "0.4" sha2 = { git = 'https://github.com/risc0/RustCrypto-hashes.git', tag = 'sha2-v0.10.8-risczero.0', features = ['compress'] } num-bigint = "0.4.4" diff --git a/crates/guest-mmr/src/core.rs b/crates/guest-mmr/src/core.rs index 8e965ea..dd21044 100644 --- a/crates/guest-mmr/src/core.rs +++ b/crates/guest-mmr/src/core.rs @@ -1,5 +1,4 @@ -use guest_types::AppendResult; -use serde::{Deserialize, Serialize}; +use guest_types::{AppendResult, GuestProof}; use std::collections::{HashMap, VecDeque}; use thiserror::Error; @@ -31,15 +30,6 @@ pub enum MMRError { InvalidPeaksCount, } -#[derive(Debug, Serialize, Deserialize)] -pub struct Proof { - element_index: usize, - element_hash: String, - siblings_hashes: Vec, - peaks_hashes: Vec, - elements_count: usize, -} - #[derive(Debug)] pub struct GuestMMR { hashes: HashMap, @@ -120,7 +110,7 @@ impl GuestMMR { )) } - pub fn get_proof(&self, element_index: usize) -> Result { + pub fn get_proof(&self, element_index: usize) -> Result { if element_index == 0 { return Err(MMRError::InvalidElementIndex); } @@ -144,7 +134,7 @@ impl GuestMMR { .get(&element_index) .ok_or(MMRError::NoHashFoundForIndex(element_index))?; - Ok(Proof { + Ok(GuestProof { element_index, element_hash: element_hash.clone(), siblings_hashes, @@ -155,7 +145,7 @@ impl GuestMMR { pub fn verify_proof( &self, - mut proof: Proof, + mut proof: GuestProof, element_value: String, options: Option, ) -> Result { diff --git a/crates/guest-types/src/lib.rs b/crates/guest-types/src/lib.rs index 2d17410..d57bc63 100644 --- a/crates/guest-types/src/lib.rs +++ b/crates/guest-types/src/lib.rs @@ -214,18 +214,27 @@ impl FinalHash { } } +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct GuestProof { + pub element_index: usize, + pub element_hash: String, + pub siblings_hashes: Vec, + pub peaks_hashes: Vec, + pub elements_count: usize, +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct BlocksValidityInput { headers: Vec, mmr_input: MMRInput, - hash_indexes: Vec, + proofs: Vec, } impl BlocksValidityInput { - pub fn new(headers: Vec, mmr_input: MMRInput, hash_indexes: Vec) -> Self { + pub fn new(headers: Vec, mmr_input: MMRInput, proofs: Vec) -> Self { Self { headers, mmr_input, - hash_indexes, + proofs, } } @@ -233,8 +242,8 @@ impl BlocksValidityInput { &self.headers } - pub fn hash_indexes(&self) -> &Vec { - &self.hash_indexes + pub fn proofs(&self) -> &Vec { + &self.proofs } pub fn mmr_input(&self) -> &MMRInput { diff --git a/crates/methods/blocks-validity/src/main.rs b/crates/methods/blocks-validity/src/main.rs index 7999a5a..b9b8bd0 100644 --- a/crates/methods/blocks-validity/src/main.rs +++ b/crates/methods/blocks-validity/src/main.rs @@ -21,7 +21,8 @@ fn main() { // Append block hashes to MMR for (i, header) in input.headers().iter().enumerate() { let block_hash = header.block_hash.clone(); - let proof = mmr.get_proof(input.hash_indexes()[i]).unwrap(); + let proof = input.proofs()[i].clone(); + if !mmr.verify_proof(proof, block_hash, None).unwrap() { env::commit(&false); } diff --git a/crates/methods/mmr-append/src/main.rs b/crates/methods/mmr-append/src/main.rs index c8b905f..75cebd7 100644 --- a/crates/methods/mmr-append/src/main.rs +++ b/crates/methods/mmr-append/src/main.rs @@ -34,6 +34,8 @@ fn main() { let root_hash = mmr.calculate_root_hash(mmr.get_elements_count()).unwrap(); + eprintln!("input size: {:?}", input.headers().len()); + let first_header = input.headers().first().unwrap(); let last_header = input.headers().last().unwrap(); diff --git a/crates/publisher/Cargo.toml b/crates/publisher/Cargo.toml index dc60201..c1f8411 100644 --- a/crates/publisher/Cargo.toml +++ b/crates/publisher/Cargo.toml @@ -4,13 +4,17 @@ version = "0.1.0" edition = "2021" [[bin]] -name = "update_mmr" +name = "update-mmr" path = "bin/update_mmr.rs" [[bin]] -name = "build_mmr" +name = "build-mmr" path = "bin/build_mmr.rs" +[[bin]] +name = "verify-blocks" +path = "bin/verify_blocks.rs" + [dependencies] guest-types = { path = "../guest-types" } methods = { path = "../methods" } diff --git a/crates/publisher/bin/verify_block_inclusion.rs b/crates/publisher/bin/verify_block_inclusion.rs deleted file mode 100644 index 8b13789..0000000 --- a/crates/publisher/bin/verify_block_inclusion.rs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/crates/publisher/bin/verify_blocks.rs b/crates/publisher/bin/verify_blocks.rs new file mode 100644 index 0000000..309aba9 --- /dev/null +++ b/crates/publisher/bin/verify_blocks.rs @@ -0,0 +1,47 @@ +use clap::Parser; +use publisher::{db::DbConnection, prove_headers_validity_and_inclusion}; +use tokio; + +#[derive(Parser, Debug)] +#[command(author, version, about, long_about = None)] +struct Args { + /// Start block number + #[arg(long, short)] + start_block: u64, + + /// End block number + #[arg(long, short)] + end_block: u64, + + /// Skip proof generation + #[arg(long)] + skip_proof: bool, +} + +#[tokio::main] +async fn main() -> Result<(), Box> { + let args = Args::parse(); + + // Fetch block headers + let db_connection = DbConnection::new().await?; + let headers = db_connection + .get_block_headers_by_block_range(args.start_block, args.end_block) + .await?; + + // Verify blocks + match prove_headers_validity_and_inclusion(&headers, Some(args.skip_proof)).await { + Ok(result) => { + println!("Verification result: {}", result); + if result { + println!("All blocks are valid!"); + } else { + println!("Some blocks failed verification!"); + } + } + Err(e) => { + eprintln!("Error during verification: {:?}", e); + } + } + + Ok(()) +} diff --git a/crates/publisher/src/api/operations.rs b/crates/publisher/src/api/operations.rs index e31f4f3..9d6df80 100644 --- a/crates/publisher/src/api/operations.rs +++ b/crates/publisher/src/api/operations.rs @@ -1,5 +1,7 @@ use crate::{core::AccumulatorBuilder, errors::PublisherError, validator::ValidatorBuilder}; +const DEFAULT_BATCH_SIZE: u64 = 1024; + pub async fn prove_mmr_update( rpc_url: &String, verifier_address: &String, @@ -40,11 +42,10 @@ pub async fn prove_mmr_update( pub async fn prove_headers_validity_and_inclusion( headers: &Vec, - batch_size: u64, skip_proof_verification: Option, ) -> Result { let skip_proof = skip_proof_verification.unwrap_or(false); - let validator = ValidatorBuilder::new(batch_size, skip_proof).await?; + let validator = ValidatorBuilder::new(DEFAULT_BATCH_SIZE, skip_proof).await?; let result = validator .verify_blocks_validity_and_inclusion(headers) diff --git a/crates/publisher/src/core/batch_processor.rs b/crates/publisher/src/core/batch_processor.rs index ea7bf9b..9991dee 100644 --- a/crates/publisher/src/core/batch_processor.rs +++ b/crates/publisher/src/core/batch_processor.rs @@ -6,7 +6,7 @@ use common::get_or_create_db_path; use guest_types::{CombinedInput, GuestOutput, MMRInput}; use mmr::PeaksOptions; use mmr_utils::initialize_mmr; -use tracing::{debug, info}; +use tracing::{debug, error, info}; pub struct BatchProcessor { batch_size: u64, @@ -68,6 +68,18 @@ impl BatchProcessor { .get_block_headers_by_block_range(start_block, adjusted_end_block) .await?; + // Check if headers array is empty + if headers.is_empty() { + error!( + "No headers found for block range {} to {}", + start_block, adjusted_end_block + ); + return Err(AccumulatorError::EmptyHeaders { + start_block, + end_block: adjusted_end_block, + }); + } + // Prepare MMR input let current_peaks = mmr.get_peaks(PeaksOptions::default()).await?; let current_elements_count = mmr.elements_count.get().await?; diff --git a/crates/publisher/src/errors.rs b/crates/publisher/src/errors.rs index 46095eb..dfea818 100644 --- a/crates/publisher/src/errors.rs +++ b/crates/publisher/src/errors.rs @@ -43,6 +43,8 @@ pub enum AccumulatorError { InStoreTable(#[from] InStoreTableError), #[error("StarknetHandler error: {0}")] StarknetHandler(#[from] starknet_handler::StarknetHandlerError), + #[error("No headers found for block range {start_block} to {end_block}")] + EmptyHeaders { start_block: u64, end_block: u64 }, } #[derive(thiserror::Error, Debug)] @@ -59,4 +61,6 @@ pub enum ValidatorError { MMRError(#[from] MMRError), #[error("ProofGenerator error: {0}")] ProofGenerator(#[from] ProofGeneratorError), + #[error("Invalid proofs count {expected} != {actual}")] + InvalidProofsCount { expected: usize, actual: usize }, } diff --git a/crates/publisher/src/validator/validator.rs b/crates/publisher/src/validator/validator.rs index 50d3e03..bbd1ae8 100644 --- a/crates/publisher/src/validator/validator.rs +++ b/crates/publisher/src/validator/validator.rs @@ -1,6 +1,7 @@ use crate::core::ProofGenerator; use crate::errors::ValidatorError; -use guest_types::{BlocksValidityInput, MMRInput}; +use common::get_or_create_db_path; +use guest_types::{BlocksValidityInput, GuestProof, MMRInput}; use methods::{BLOCKS_VALIDITY_ELF, BLOCKS_VALIDITY_ID}; use mmr::{PeaksOptions, MMR}; use mmr_utils::{initialize_mmr, StoreManager}; @@ -36,11 +37,12 @@ impl ValidatorBuilder { // Calculate batch index for the block let block_number = header.number; let batch_index = block_number as u64 / self.batch_size; + println!("batch index: {}", batch_index); // Get or initialize MMR for the batch if !mmrs.contains_key(&batch_index) { // Determine batch file name - let batch_file_name = format!("batch_{}.db", batch_index); + let batch_file_name = get_or_create_db_path(&format!("batch_{}.db", batch_index))?; // Check if batch file exists if !std::path::Path::new(&batch_file_name).exists() { return Err(ValidatorError::Store(store::StoreError::GetError)); @@ -71,10 +73,20 @@ impl ValidatorBuilder { .map(|(index, _)| *index) .collect(); + let batch_proofs = mmr.get_proofs(batch_block_indexes.clone(), None).await?; + // Convert MMR proofs to GuestProofs + let guest_proofs: Vec = batch_proofs + .into_iter() + .map(|proof| LocalGuestProof::from(proof).into()) + .collect(); + // Get and verify current MMR state let current_peaks = mmr.get_peaks(PeaksOptions::default()).await?; + println!("current peaks: {:?}", current_peaks); let current_elements_count = mmr.elements_count.get().await?; + println!("current elements count: {}", current_elements_count); let current_leaves_count = mmr.leaves_count.get().await?; + println!("current leaves count: {}", current_leaves_count); // Prepare MMR input let mmr_input = MMRInput::new( @@ -91,12 +103,15 @@ impl ValidatorBuilder { .cloned() .collect(); + if batch_headers.len() != guest_proofs.len() { + return Err(ValidatorError::InvalidProofsCount { + expected: batch_headers.len(), + actual: guest_proofs.len(), + }); + } // Prepare guest input - let blocks_validity_input = BlocksValidityInput::new( - batch_headers.clone(), - mmr_input.clone(), - batch_block_indexes, - ); + let blocks_validity_input = + BlocksValidityInput::new(batch_headers.clone(), mmr_input, guest_proofs); // Generate proof for this batch let proof = self @@ -117,3 +132,38 @@ impl ValidatorBuilder { Ok(all_valid) } } + +// Add this wrapper struct +pub struct LocalGuestProof { + pub element_index: usize, + pub element_hash: String, + pub siblings_hashes: Vec, + pub peaks_hashes: Vec, + pub elements_count: usize, +} + +// Implement From for the local wrapper type +impl From for LocalGuestProof { + fn from(proof: mmr::Proof) -> Self { + Self { + element_index: proof.element_index, + element_hash: proof.element_hash, + siblings_hashes: proof.siblings_hashes, + peaks_hashes: proof.peaks_hashes, + elements_count: proof.elements_count, + } + } +} + +// Add conversion from LocalGuestProof to GuestProof +impl From for GuestProof { + fn from(local: LocalGuestProof) -> Self { + Self { + element_index: local.element_index, + element_hash: local.element_hash, + siblings_hashes: local.siblings_hashes, + peaks_hashes: local.peaks_hashes, + elements_count: local.elements_count, + } + } +} From 103160dcf4fc4c061a5d0036c5a0f562e6040ee9 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Wed, 4 Dec 2024 14:50:07 +0800 Subject: [PATCH 02/54] feat(publisher): implement block validity verification using STARK proofs Add core functionality to verify L2 block headers using STARK proofs: - Implement proof generation and verification logic - Add error handling for image_id conversions - Support optional proof generation skipping for testing - Include test binary for verification flow validation This change provides the foundational logic for verifying block header validity and inclusion using zero-knowledge proofs, to be used by other components of the system. --- crates/guest-mmr/src/core.rs | 1 - crates/guest-mmr/src/helper.rs | 2 -- crates/publisher/bin/verify_blocks.rs | 16 ++++++++++------ crates/publisher/src/api/operations.rs | 6 ++++-- crates/publisher/src/core/proof_generator.rs | 4 ---- crates/publisher/src/errors.rs | 2 ++ crates/publisher/src/utils/types.rs | 19 ++++++++++++++++++- crates/publisher/src/validator/validator.rs | 20 +++++--------------- 8 files changed, 39 insertions(+), 31 deletions(-) diff --git a/crates/guest-mmr/src/core.rs b/crates/guest-mmr/src/core.rs index dd21044..5584e22 100644 --- a/crates/guest-mmr/src/core.rs +++ b/crates/guest-mmr/src/core.rs @@ -341,7 +341,6 @@ mod tests { mmr.append(APPEND_VALUE.to_string()).unwrap(); let proof = mmr.get_proof(1).unwrap(); - println!("proof: {:?}", proof); let is_valid = mmr .verify_proof(proof, INITIAL_PEAK_VALUE.to_string(), None) .unwrap(); diff --git a/crates/guest-mmr/src/helper.rs b/crates/guest-mmr/src/helper.rs index f1948e1..1b92018 100644 --- a/crates/guest-mmr/src/helper.rs +++ b/crates/guest-mmr/src/helper.rs @@ -85,14 +85,12 @@ pub fn hasher(data: Vec) -> Result { pub fn find_siblings(element_index: usize, elements_count: usize) -> Result, MMRError> { let mut leaf_index = element_index_to_leaf_index(element_index)?; - println!("leaf index: {}", leaf_index); let mut height = 0; let mut siblings = Vec::new(); let mut current_element_index = element_index; while current_element_index <= elements_count { let siblings_offset = (2 << height) - 1; - println!("siblings offset: {}", siblings_offset); if leaf_index % 2 == 1 { // right child siblings.push(current_element_index - siblings_offset); diff --git a/crates/publisher/bin/verify_blocks.rs b/crates/publisher/bin/verify_blocks.rs index 309aba9..852ce81 100644 --- a/crates/publisher/bin/verify_blocks.rs +++ b/crates/publisher/bin/verify_blocks.rs @@ -1,4 +1,5 @@ use clap::Parser; +use common::initialize_logger_and_env; use publisher::{db::DbConnection, prove_headers_validity_and_inclusion}; use tokio; @@ -20,6 +21,8 @@ struct Args { #[tokio::main] async fn main() -> Result<(), Box> { + initialize_logger_and_env()?; + let args = Args::parse(); // Fetch block headers @@ -31,17 +34,18 @@ async fn main() -> Result<(), Box> { // Verify blocks match prove_headers_validity_and_inclusion(&headers, Some(args.skip_proof)).await { Ok(result) => { - println!("Verification result: {}", result); - if result { - println!("All blocks are valid!"); - } else { - println!("Some blocks failed verification!"); + for proof in result { + proof.receipt().verify(proof.image_id()?)?; + let result = proof.journal().decode::()?; + tracing::info!("result: {}", result); } } Err(e) => { - eprintln!("Error during verification: {:?}", e); + tracing::error!("Error during verification: {:?}", e); } } + tracing::info!("All blocks are valid!"); + Ok(()) } diff --git a/crates/publisher/src/api/operations.rs b/crates/publisher/src/api/operations.rs index 9d6df80..1c8fbdb 100644 --- a/crates/publisher/src/api/operations.rs +++ b/crates/publisher/src/api/operations.rs @@ -1,4 +1,6 @@ -use crate::{core::AccumulatorBuilder, errors::PublisherError, validator::ValidatorBuilder}; +use crate::{ + core::AccumulatorBuilder, errors::PublisherError, utils::Stark, validator::ValidatorBuilder, +}; const DEFAULT_BATCH_SIZE: u64 = 1024; @@ -43,7 +45,7 @@ pub async fn prove_mmr_update( pub async fn prove_headers_validity_and_inclusion( headers: &Vec, skip_proof_verification: Option, -) -> Result { +) -> Result, PublisherError> { let skip_proof = skip_proof_verification.unwrap_or(false); let validator = ValidatorBuilder::new(DEFAULT_BATCH_SIZE, skip_proof).await?; diff --git a/crates/publisher/src/core/proof_generator.rs b/crates/publisher/src/core/proof_generator.rs index dae26b8..97e4469 100644 --- a/crates/publisher/src/core/proof_generator.rs +++ b/crates/publisher/src/core/proof_generator.rs @@ -165,8 +165,6 @@ where Default::default() }; - println!("journal: {:?}", journal); - debug!("Generating calldata"); let calldata = if !skip_proof_verification { get_groth16_calldata(&groth16_proof, &get_risc0_vk(), CurveID::BN254).map_err( @@ -179,8 +177,6 @@ where vec![Felt::ZERO] }; - println!("calldata len: {:?}", calldata.len()); - info!("Successfully generated Groth16 proof"); Ok(Groth16::new(receipt, calldata)) }) diff --git a/crates/publisher/src/errors.rs b/crates/publisher/src/errors.rs index dfea818..9b7f34c 100644 --- a/crates/publisher/src/errors.rs +++ b/crates/publisher/src/errors.rs @@ -17,6 +17,8 @@ pub enum PublisherError { MMRUtils(#[from] MMRUtilsError), #[error("Headers Validator error: {0}")] Validator(#[from] ValidatorError), + #[error("Receipt error: invalid Stark proof receipt")] + ReceiptError, } #[derive(Error, Debug)] diff --git a/crates/publisher/src/utils/types.rs b/crates/publisher/src/utils/types.rs index 020bf28..ca52dc5 100644 --- a/crates/publisher/src/utils/types.rs +++ b/crates/publisher/src/utils/types.rs @@ -1,8 +1,10 @@ -use risc0_zkvm::Receipt; +use risc0_zkvm::{Journal, Receipt}; use serde::{Deserialize, Serialize}; use starknet_crypto::Felt; use starknet_handler::MmrState; +use crate::PublisherError; + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Groth16 { receipt: Receipt, @@ -38,6 +40,21 @@ impl Stark { method_id, } } + + pub fn receipt(&self) -> Receipt { + self.receipt.clone() + } + + pub fn journal(&self) -> Journal { + self.receipt.journal.clone() + } + + pub fn image_id(&self) -> Result<[u8; 32], PublisherError> { + self.image_id + .clone() + .try_into() + .map_err(|_| PublisherError::ReceiptError) + } } #[derive(Debug, Clone)] diff --git a/crates/publisher/src/validator/validator.rs b/crates/publisher/src/validator/validator.rs index bbd1ae8..2b20dfe 100644 --- a/crates/publisher/src/validator/validator.rs +++ b/crates/publisher/src/validator/validator.rs @@ -1,5 +1,5 @@ -use crate::core::ProofGenerator; use crate::errors::ValidatorError; +use crate::{core::ProofGenerator, utils::Stark}; use common::get_or_create_db_path; use guest_types::{BlocksValidityInput, GuestProof, MMRInput}; use methods::{BLOCKS_VALIDITY_ELF, BLOCKS_VALIDITY_ID}; @@ -27,7 +27,7 @@ impl ValidatorBuilder { pub async fn verify_blocks_validity_and_inclusion( &self, headers: &Vec, - ) -> Result { + ) -> Result, ValidatorError> { // Map to store MMRs per batch index let mut mmrs: HashMap = HashMap::new(); let mut block_indexes = Vec::new(); @@ -37,7 +37,6 @@ impl ValidatorBuilder { // Calculate batch index for the block let block_number = header.number; let batch_index = block_number as u64 / self.batch_size; - println!("batch index: {}", batch_index); // Get or initialize MMR for the batch if !mmrs.contains_key(&batch_index) { @@ -82,11 +81,8 @@ impl ValidatorBuilder { // Get and verify current MMR state let current_peaks = mmr.get_peaks(PeaksOptions::default()).await?; - println!("current peaks: {:?}", current_peaks); let current_elements_count = mmr.elements_count.get().await?; - println!("current elements count: {}", current_elements_count); let current_leaves_count = mmr.leaves_count.get().await?; - println!("current leaves count: {}", current_leaves_count); // Prepare MMR input let mmr_input = MMRInput::new( @@ -116,20 +112,14 @@ impl ValidatorBuilder { // Generate proof for this batch let proof = self .proof_generator - .generate_groth16_proof(blocks_validity_input) + .generate_stark_proof(blocks_validity_input) .await?; - let guest_output: bool = self.proof_generator.decode_journal(&proof)?; - // Collect proofs or results - proofs.push(guest_output); + proofs.push(proof); } - // Combine results - // Assuming we need all proofs to be true - let all_valid = proofs.iter().all(|&result| result); - - Ok(all_valid) + Ok(proofs) } } From def00679b50ddd237b5374e10be89fafdfaedaf2 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Thu, 5 Dec 2024 03:18:01 +0800 Subject: [PATCH 03/54] Improve error messages for better clarity and context - Enhanced error descriptions in PublisherError, AccumulatorError, and ValidatorError - Added more detailed context and potential causes for errors - Improved consistency in error message formatting - Added debugging suggestions for critical errors - Included potential causes for EmptyHeaders and InvalidProofsCount errors The improved error messages provide better context for debugging and make it easier to understand and resolve issues when they occur. Each error now includes more specific information about what went wrong and, where applicable, suggestions about potential causes or solutions. --- Cargo.lock | 304 ++++++++++---------- Cargo.toml | 2 +- crates/publisher/src/errors.rs | 54 ++-- crates/publisher/src/validator/validator.rs | 147 ++++++---- 4 files changed, 274 insertions(+), 233 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e638c52..4e59fa3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -52,9 +52,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45862d1c77f2228b9e10bc609d5bc203d86ebc9b87ad8d5d5167a6c9abf739d9" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "alloy" @@ -450,7 +450,7 @@ dependencies = [ "foldhash", "hashbrown 0.15.2", "hex-literal", - "indexmap 2.6.0", + "indexmap 2.7.0", "itoa", "k256", "keccak-asm", @@ -580,7 +580,7 @@ checksum = "2b09cae092c27b6f1bde952653a22708691802e57bfef4a2973b80bea21efd3f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -787,7 +787,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -800,11 +800,11 @@ dependencies = [ "alloy-sol-macro-input", "const-hex", "heck 0.5.0", - "indexmap 2.6.0", + "indexmap 2.7.0", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", "syn-solidity", "tiny-keccak", ] @@ -822,7 +822,7 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.89", + "syn 2.0.90", "syn-solidity", ] @@ -946,7 +946,7 @@ dependencies = [ "alloy-pubsub", "alloy-transport 0.6.4", "futures", - "http 1.1.0", + "http 1.2.0", "rustls 0.23.19", "serde_json", "tokio", @@ -1021,9 +1021,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.93" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" +checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" [[package]] name = "ark-bls12-381" @@ -1296,7 +1296,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1307,7 +1307,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1338,7 +1338,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1512,7 +1512,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1544,7 +1544,7 @@ checksum = "bcfcc3cd946cb52f0bbfdbbcfa2f4e24f75ebb6c0e1002f7c25904fada18b9ec" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1611,9 +1611,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd9de9f2205d5ef3fd67e685b0df337994ddd4495e2a28d185500d0e1edfea47" +checksum = "f34d93e62b03caf570cccc334cbc6c2fceca82f39211051345108adcba3eebdc" dependencies = [ "shlex", ] @@ -1655,9 +1655,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.21" +version = "4.5.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f" +checksum = "69371e34337c4c984bbe322360c2547210bf632eb2814bbe78a6e87a2935bd2b" dependencies = [ "clap_builder", "clap_derive", @@ -1665,9 +1665,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.21" +version = "4.5.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec" +checksum = "6e24c1b4099818523236a8ca881d2b45db98dadfb4625cf6608c12069fcbbde1" dependencies = [ "anstream", "anstyle", @@ -1684,7 +1684,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1704,7 +1704,7 @@ dependencies = [ "publisher", "starknet", "starknet-handler", - "thiserror 2.0.3", + "thiserror 2.0.4", "tokio", "tracing", ] @@ -1723,9 +1723,9 @@ dependencies = [ "dotenv", "ruint", "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", - "thiserror 2.0.3", + "thiserror 2.0.4", "tracing", - "tracing-subscriber 0.3.18", + "tracing-subscriber 0.3.19", ] [[package]] @@ -1908,7 +1908,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1919,7 +1919,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1991,7 +1991,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", "unicode-xid", ] @@ -2045,7 +2045,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -2211,7 +2211,7 @@ dependencies = [ "rlp 0.6.1", "serde", "sha3", - "thiserror 2.0.3", + "thiserror 2.0.4", "uint 0.10.0", ] @@ -2397,7 +2397,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -2494,7 +2494,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -2611,7 +2611,7 @@ dependencies = [ "num-bigint", "num-traits", "sha2 0.10.8 (git+https://github.com/risc0/RustCrypto-hashes.git?tag=sha2-v0.10.8-risczero.0)", - "thiserror 2.0.3", + "thiserror 2.0.4", "tokio", ] @@ -2636,7 +2636,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.6.0", + "indexmap 2.7.0", "slab", "tokio", "tokio-util", @@ -2683,7 +2683,7 @@ dependencies = [ [[package]] name = "hasher" version = "0.1.0" -source = "git+https://github.com/ametel01/rust-accumulators.git?branch=feat%2Fsha2-hasher#2e1c70db54fd319edd8ab87494cab574c3647b0f" +source = "git+https://github.com/ametel01/rust-accumulators.git?branch=feat%2Fsha2-hasher#a01329e4332ee3598ffc95538f3cf25352acf649" dependencies = [ "hex", "num-bigint", @@ -2693,7 +2693,7 @@ dependencies = [ "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "strum", "strum_macros", - "thiserror 2.0.3", + "thiserror 2.0.4", "tiny-keccak", ] @@ -2779,9 +2779,9 @@ dependencies = [ [[package]] name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -2806,7 +2806,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.1.0", + "http 1.2.0", ] [[package]] @@ -2817,7 +2817,7 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "pin-project-lite", ] @@ -2867,7 +2867,7 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "httparse", "itoa", @@ -2898,7 +2898,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ "futures-util", - "http 1.1.0", + "http 1.2.0", "hyper 1.5.1", "hyper-util", "rustls 0.23.19", @@ -2934,7 +2934,7 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "hyper 1.5.1", "pin-project-lite", @@ -3082,7 +3082,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3174,7 +3174,7 @@ checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3196,9 +3196,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "equivalent", "hashbrown 0.15.2", @@ -3273,10 +3273,11 @@ checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "js-sys" -version = "0.3.72" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" +checksum = "a865e038f7f6ed956f788f0d7d60c541fff74c7bd74272c5d4cf15c63743e705" dependencies = [ + "once_cell", "wasm-bindgen", ] @@ -3327,7 +3328,7 @@ dependencies = [ [[package]] name = "lambdaworks-crypto" version = "0.11.0" -source = "git+https://github.com/lambdaclass/lambdaworks.git#d016a730e106fa2ff61aa15596676fee615ba0aa" +source = "git+https://github.com/lambdaclass/lambdaworks.git#fc33967375fd4d9981455c2e1b4af521cf61913b" dependencies = [ "lambdaworks-math 0.11.0", "serde", @@ -3348,7 +3349,7 @@ dependencies = [ [[package]] name = "lambdaworks-math" version = "0.11.0" -source = "git+https://github.com/lambdaclass/lambdaworks.git#d016a730e106fa2ff61aa15596676fee615ba0aa" +source = "git+https://github.com/lambdaclass/lambdaworks.git#fc33967375fd4d9981455c2e1b4af521cf61913b" dependencies = [ "rayon", "serde", @@ -3375,7 +3376,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3389,9 +3390,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.166" +version = "0.2.167" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2ccc108bbc0b1331bd061864e7cd823c0cab660bbe6970e66e2c0614decde36" +checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc" [[package]] name = "libm" @@ -3483,7 +3484,7 @@ checksum = "5cf92c10c7e361d6b99666ec1c6f9805b0bea2c3bd8c78dc6fe98ac5bd78db11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3556,11 +3557,10 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ - "hermit-abi", "libc", "wasi", "windows-sys 0.52.0", @@ -3569,11 +3569,11 @@ dependencies = [ [[package]] name = "mmr" version = "0.1.0" -source = "git+https://github.com/ametel01/rust-accumulators.git?branch=feat%2Fsha2-hasher#2e1c70db54fd319edd8ab87494cab574c3647b0f" +source = "git+https://github.com/ametel01/rust-accumulators.git?branch=feat%2Fsha2-hasher#a01329e4332ee3598ffc95538f3cf25352acf649" dependencies = [ "hasher", "store", - "thiserror 2.0.3", + "thiserror 2.0.4", "uuid 1.11.0", ] @@ -3585,7 +3585,7 @@ dependencies = [ "mmr", "sqlx", "store", - "thiserror 2.0.3", + "thiserror 2.0.4", "tokio", "uuid 1.11.0", ] @@ -3717,7 +3717,7 @@ checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3767,7 +3767,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3802,29 +3802,28 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "parity-scale-codec" -version = "3.7.0" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8be4817d39f3272f69c59fe05d0535ae6456c2dc2fa1ba02910296c7e0a5c590" +checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" dependencies = [ "arrayvec", "bitvec", "byte-slice-cast", "impl-trait-for-tuples", "parity-scale-codec-derive", - "rustversion", "serde", ] [[package]] name = "parity-scale-codec-derive" -version = "3.7.0" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8781a75c6205af67215f382092b6e0a4ff3734798523e69073d4bcd294ec767b" +checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.89", + "syn 1.0.109", ] [[package]] @@ -3924,7 +3923,7 @@ checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -4065,7 +4064,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -4117,7 +4116,7 @@ dependencies = [ "itertools 0.13.0", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -4143,7 +4142,7 @@ dependencies = [ "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "starknet-handler", "store", - "thiserror 2.0.3", + "thiserror 2.0.4", "tokio", "tracing", ] @@ -4196,7 +4195,7 @@ dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -4209,7 +4208,7 @@ dependencies = [ "proc-macro2", "pyo3-build-config", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -4231,7 +4230,7 @@ dependencies = [ "rustc-hash", "rustls 0.23.19", "socket2", - "thiserror 2.0.3", + "thiserror 2.0.4", "tokio", "tracing", ] @@ -4250,7 +4249,7 @@ dependencies = [ "rustls 0.23.19", "rustls-pki-types", "slab", - "thiserror 2.0.3", + "thiserror 2.0.4", "tinyvec", "tracing", "web-time", @@ -4424,7 +4423,7 @@ dependencies = [ "alloy-sol-types", "common", "eyre", - "thiserror 2.0.3", + "thiserror 2.0.4", "tokio", "tracing", ] @@ -4481,7 +4480,7 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "hyper 1.5.1", @@ -4555,7 +4554,7 @@ dependencies = [ "risc0-zkp", "risc0-zkvm-platform", "serde", - "syn 2.0.89", + "syn 2.0.90", "tracing", ] @@ -4806,9 +4805,9 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" +checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" [[package]] name = "rustc-hex" @@ -5089,7 +5088,7 @@ checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5137,7 +5136,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.6.0", + "indexmap 2.7.0", "serde", "serde_derive", "serde_json", @@ -5154,7 +5153,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5334,7 +5333,7 @@ dependencies = [ "hashbrown 0.14.5", "hashlink", "hex", - "indexmap 2.6.0", + "indexmap 2.7.0", "log", "memchr", "native-tls", @@ -5363,7 +5362,7 @@ dependencies = [ "quote", "sqlx-core", "sqlx-macros-core", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5386,7 +5385,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.89", + "syn 2.0.90", "tempfile", "tokio", "url", @@ -5502,7 +5501,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d904e7009df136af5297832a3ace3370cd14ff1546a232f4f185036c2736fcac" dependencies = [ "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5514,7 +5513,7 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "starknet" version = "0.12.0" -source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#5c676a64031901b5a203168fd8ef8d6b40a5862f" +source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#093d5ecbbd0929720db38a1ca34d516ccb807398" dependencies = [ "starknet-accounts", "starknet-contract", @@ -5529,7 +5528,7 @@ dependencies = [ [[package]] name = "starknet-accounts" version = "0.11.0" -source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#5c676a64031901b5a203168fd8ef8d6b40a5862f" +source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#093d5ecbbd0929720db38a1ca34d516ccb807398" dependencies = [ "async-trait", "auto_impl", @@ -5543,7 +5542,7 @@ dependencies = [ [[package]] name = "starknet-contract" version = "0.11.0" -source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#5c676a64031901b5a203168fd8ef8d6b40a5862f" +source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#093d5ecbbd0929720db38a1ca34d516ccb807398" dependencies = [ "serde", "serde_json", @@ -5557,7 +5556,7 @@ dependencies = [ [[package]] name = "starknet-core" version = "0.12.0" -source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#5c676a64031901b5a203168fd8ef8d6b40a5862f" +source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#093d5ecbbd0929720db38a1ca34d516ccb807398" dependencies = [ "base64 0.21.7", "crypto-bigint", @@ -5577,11 +5576,11 @@ dependencies = [ [[package]] name = "starknet-core-derive" version = "0.1.0" -source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#5c676a64031901b5a203168fd8ef8d6b40a5862f" +source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#093d5ecbbd0929720db38a1ca34d516ccb807398" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5606,7 +5605,7 @@ dependencies = [ [[package]] name = "starknet-crypto" version = "0.7.3" -source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#5c676a64031901b5a203168fd8ef8d6b40a5862f" +source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#093d5ecbbd0929720db38a1ca34d516ccb807398" dependencies = [ "crypto-bigint", "hex", @@ -5633,7 +5632,7 @@ dependencies = [ [[package]] name = "starknet-curve" version = "0.5.1" -source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#5c676a64031901b5a203168fd8ef8d6b40a5862f" +source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#093d5ecbbd0929720db38a1ca34d516ccb807398" dependencies = [ "starknet-types-core", ] @@ -5646,23 +5645,23 @@ dependencies = [ "crypto-bigint", "starknet", "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", - "thiserror 2.0.3", + "thiserror 2.0.4", "url", ] [[package]] name = "starknet-macros" version = "0.2.1" -source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#5c676a64031901b5a203168fd8ef8d6b40a5862f" +source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#093d5ecbbd0929720db38a1ca34d516ccb807398" dependencies = [ "starknet-core", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "starknet-providers" version = "0.12.0" -source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#5c676a64031901b5a203168fd8ef8d6b40a5862f" +source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#093d5ecbbd0929720db38a1ca34d516ccb807398" dependencies = [ "async-trait", "auto_impl", @@ -5682,7 +5681,7 @@ dependencies = [ [[package]] name = "starknet-signers" version = "0.10.0" -source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#5c676a64031901b5a203168fd8ef8d6b40a5862f" +source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#093d5ecbbd0929720db38a1ca34d516ccb807398" dependencies = [ "async-trait", "auto_impl", @@ -5718,13 +5717,13 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "store" version = "0.1.0" -source = "git+https://github.com/ametel01/rust-accumulators.git?branch=feat%2Fsha2-hasher#2e1c70db54fd319edd8ab87494cab574c3647b0f" +source = "git+https://github.com/ametel01/rust-accumulators.git?branch=feat%2Fsha2-hasher#a01329e4332ee3598ffc95538f3cf25352acf649" dependencies = [ "async-trait", "futures", "parking_lot", "sqlx", - "thiserror 2.0.3", + "thiserror 2.0.4", ] [[package]] @@ -5763,7 +5762,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5785,9 +5784,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.89" +version = "2.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" +checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" dependencies = [ "proc-macro2", "quote", @@ -5803,7 +5802,7 @@ dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5829,7 +5828,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5889,11 +5888,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.3" +version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c006c85c7651b3cf2ada4584faa36773bd07bac24acfb39f3c431b36d7e667aa" +checksum = "2f49a1853cf82743e3b7950f77e0f4d622ca36cf4317cba00c767838bac8d490" dependencies = [ - "thiserror-impl 2.0.3", + "thiserror-impl 2.0.4", ] [[package]] @@ -5904,18 +5903,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "thiserror-impl" -version = "2.0.3" +version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568" +checksum = "8381894bb3efe0c4acac3ded651301ceee58a15d47c2e34885ed1908ad667061" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5939,9 +5938,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.36" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", @@ -5960,9 +5959,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", @@ -6004,9 +6003,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.41.1" +version = "1.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" +checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" dependencies = [ "backtrace", "bytes", @@ -6028,7 +6027,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -6092,9 +6091,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ "bytes", "futures-core", @@ -6115,7 +6114,7 @@ version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.7.0", "toml_datetime", "winnow", ] @@ -6166,7 +6165,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -6201,9 +6200,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "matchers", "nu-ansi-term", @@ -6232,7 +6231,7 @@ dependencies = [ "byteorder", "bytes", "data-encoding", - "http 1.1.0", + "http 1.2.0", "httparse", "log", "rand", @@ -6440,9 +6439,9 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.95" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" +checksum = "d15e63b4482863c109d70a7b8706c1e364eb6ea449b201a76c5b89cedcec2d5c" dependencies = [ "cfg-if", "once_cell", @@ -6451,36 +6450,37 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.95" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" +checksum = "8d36ef12e3aaca16ddd3f67922bc63e48e953f126de60bd33ccc0101ef9998cd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.45" +version = "0.4.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" +checksum = "9dfaf8f50e5f293737ee323940c7d8b08a66a95a419223d9f41610ca08b0833d" dependencies = [ "cfg-if", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.95" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" +checksum = "705440e08b42d3e4b36de7d66c944be628d579796b8090bfa3471478a2260051" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -6488,22 +6488,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.95" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" +checksum = "98c9ae5a76e46f4deecd0f0255cc223cfa18dc9b261213b8aa0c7b36f61b3f1d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.95" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" +checksum = "6ee99da9c5ba11bd675621338ef6fa52296b76b83305e9b6e5c77d4c286d6d49" [[package]] name = "wasm-streams" @@ -6534,9 +6534,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.72" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" +checksum = "a98bc3c33f0fe7e59ad7cd041b89034fa82a7c2d4365ca538dda6cdaf513863c" dependencies = [ "js-sys", "wasm-bindgen", @@ -6871,7 +6871,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", "synstructure", ] @@ -6893,7 +6893,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -6913,7 +6913,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", "synstructure", ] @@ -6934,7 +6934,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -6956,5 +6956,5 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.90", ] diff --git a/Cargo.toml b/Cargo.toml index 8b3c4d0..f393603 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -31,7 +31,7 @@ sqlx = { version = "0.8.2", features = [ "runtime-tokio-native-tls", ] } eyre = "0.6" -thiserror = "2.0.3" +thiserror = "2.0.4" tracing = "0.1.40" tokio = "1.41.1" dotenv = "0.15" diff --git a/crates/publisher/src/errors.rs b/crates/publisher/src/errors.rs index 9b7f34c..c5191a0 100644 --- a/crates/publisher/src/errors.rs +++ b/crates/publisher/src/errors.rs @@ -7,62 +7,66 @@ use crate::core::ProofGeneratorError; #[derive(Error, Debug)] pub enum PublisherError { - #[error("Verification result is empty")] + #[error("Verification failed: no verification result was produced")] VerificationError, - #[error("Accumulator error: {0}")] + #[error("Accumulator operation failed: {0}")] Accumulator(#[from] AccumulatorError), - #[error("StarknetHandler error: {0}")] + #[error("Starknet interaction failed: {0}")] StarknetHandler(#[from] starknet_handler::StarknetHandlerError), - #[error("MMRUtils error: {0}")] + #[error("MMR utilities operation failed: {0}")] MMRUtils(#[from] MMRUtilsError), - #[error("Headers Validator error: {0}")] + #[error("Header validation failed: {0}")] Validator(#[from] ValidatorError), - #[error("Receipt error: invalid Stark proof receipt")] + #[error("Invalid Stark proof receipt: receipt format or signature verification failed")] ReceiptError, } #[derive(Error, Debug)] pub enum AccumulatorError { - #[error("Invalid state transition: elements count decreased")] + #[error( + "Invalid state transition detected: total elements count decreased from previous state" + )] InvalidStateTransition, - #[error("Failed to verify stored peaks after update")] + #[error( + "Peak verification failed: stored peaks hash doesn't match computed peaks after update" + )] PeaksVerificationError, - #[error("MMR root is not a valid Starknet field element: {0}")] + #[error("Invalid MMR root format: value '{0}' cannot be converted to a valid Starknet field element")] InvalidU256Hex(String), - #[error("SQLx error: {0}")] + #[error("Database operation failed: {0}")] Sqlx(#[from] sqlx::Error), - #[error("Utils error: {0}")] + #[error("Utility operation failed: {0}")] Utils(#[from] UtilsError), - #[error("MMR error: {0}")] + #[error("MMR operation failed: {0}")] MMRError(#[from] MMRError), - #[error("Store error: {0}")] + #[error("Storage operation failed: {0}")] Store(#[from] StoreError), - #[error("ProofGenerator error: {0}")] + #[error("Proof generation failed: {0}")] ProofGenerator(#[from] ProofGeneratorError), - #[error("MMRUtils error: {0}")] + #[error("MMR utilities operation failed: {0}")] MMRUtils(#[from] MMRUtilsError), - #[error("InStoreTable error: {0}")] + #[error("In-store table operation failed: {0}")] InStoreTable(#[from] InStoreTableError), - #[error("StarknetHandler error: {0}")] + #[error("Starknet interaction failed: {0}")] StarknetHandler(#[from] starknet_handler::StarknetHandlerError), - #[error("No headers found for block range {start_block} to {end_block}")] + #[error("No headers available for block range {start_block} to {end_block}. The range might be invalid or the data might not be synced")] EmptyHeaders { start_block: u64, end_block: u64 }, } #[derive(thiserror::Error, Debug)] pub enum ValidatorError { - #[error("Utils error: {0}")] + #[error("Utility operation failed: {0}")] Utils(#[from] common::UtilsError), - #[error("MMR error: {0}")] + #[error("MMR utilities operation failed: {0}")] MMRUtils(#[from] mmr_utils::MMRUtilsError), - #[error("Store error: {0}")] + #[error("Database operation failed: {0}")] Sqlx(#[from] sqlx::Error), - #[error("Store error: {0}")] + #[error("Storage operation failed: {0}")] Store(#[from] store::StoreError), - #[error("MMR error: {0}")] + #[error("MMR operation failed: {0}")] MMRError(#[from] MMRError), - #[error("ProofGenerator error: {0}")] + #[error("Proof generation failed: {0}")] ProofGenerator(#[from] ProofGeneratorError), - #[error("Invalid proofs count {expected} != {actual}")] + #[error("Proof count mismatch: expected {expected} proofs but found {actual}. This might indicate data corruption or synchronization issues")] InvalidProofsCount { expected: usize, actual: usize }, } diff --git a/crates/publisher/src/validator/validator.rs b/crates/publisher/src/validator/validator.rs index 2b20dfe..ca76ba9 100644 --- a/crates/publisher/src/validator/validator.rs +++ b/crates/publisher/src/validator/validator.rs @@ -28,76 +28,70 @@ impl ValidatorBuilder { &self, headers: &Vec, ) -> Result, ValidatorError> { - // Map to store MMRs per batch index - let mut mmrs: HashMap = HashMap::new(); - let mut block_indexes = Vec::new(); + let mmrs = self.initialize_mmrs_for_headers(headers).await?; + let block_indexes = self.collect_block_indexes(headers, &mmrs).await?; + self.generate_proofs_for_batches(headers, &mmrs, &block_indexes) + .await + } + + async fn initialize_mmrs_for_headers( + &self, + headers: &[eth_rlp_types::BlockHeader], + ) -> Result, ValidatorError> { + let mut mmrs = HashMap::new(); - // For each header, determine its batch index and process accordingly - for header in headers.iter() { - // Calculate batch index for the block - let block_number = header.number; - let batch_index = block_number as u64 / self.batch_size; + for header in headers { + let batch_index = header.number as u64 / self.batch_size; - // Get or initialize MMR for the batch if !mmrs.contains_key(&batch_index) { - // Determine batch file name let batch_file_name = get_or_create_db_path(&format!("batch_{}.db", batch_index))?; - // Check if batch file exists if !std::path::Path::new(&batch_file_name).exists() { return Err(ValidatorError::Store(store::StoreError::GetError)); } - // Initialize MMR for the batch - let (store_manager, mmr, pool) = initialize_mmr(&batch_file_name).await?; - mmrs.insert(batch_index, (store_manager, mmr, pool)); + let mmr_components = initialize_mmr(&batch_file_name).await?; + mmrs.insert(batch_index, mmr_components); } + } + + Ok(mmrs) + } - // Retrieve the MMR and store manager for the batch + async fn collect_block_indexes( + &self, + headers: &[eth_rlp_types::BlockHeader], + mmrs: &HashMap, + ) -> Result, ValidatorError> { + let mut block_indexes = Vec::new(); + + for header in headers { + let batch_index = header.number as u64 / self.batch_size; let (store_manager, _, pool) = mmrs.get(&batch_index).unwrap(); - // Get the index of the block hash in the MMR let index = store_manager .get_element_index_for_value(pool, &header.block_hash) .await? .ok_or(ValidatorError::Store(store::StoreError::GetError))?; + block_indexes.push((index, batch_index)); } - // For each batch, prepare MMR inputs and generate proofs + Ok(block_indexes) + } + + async fn generate_proofs_for_batches( + &self, + headers: &[eth_rlp_types::BlockHeader], + mmrs: &HashMap, + block_indexes: &[(usize, u64)], + ) -> Result, ValidatorError> { let mut proofs = Vec::new(); - for (batch_index, (_store_manager, mmr, _pool)) in mmrs.iter() { - // Get block indexes for this batch - let batch_block_indexes: Vec = block_indexes - .iter() - .filter(|(_, idx)| idx == batch_index) - .map(|(index, _)| *index) - .collect(); - - let batch_proofs = mmr.get_proofs(batch_block_indexes.clone(), None).await?; - // Convert MMR proofs to GuestProofs - let guest_proofs: Vec = batch_proofs - .into_iter() - .map(|proof| LocalGuestProof::from(proof).into()) - .collect(); - - // Get and verify current MMR state - let current_peaks = mmr.get_peaks(PeaksOptions::default()).await?; - let current_elements_count = mmr.elements_count.get().await?; - let current_leaves_count = mmr.leaves_count.get().await?; - - // Prepare MMR input - let mmr_input = MMRInput::new( - current_peaks.clone(), - current_elements_count, - current_leaves_count, - vec![], // No new leaves to append - ); - - // Get headers for this batch - let batch_headers: Vec = headers - .iter() - .filter(|header| header.number as u64 / self.batch_size == *batch_index) - .cloned() - .collect(); + + for (batch_index, (_, mmr, _)) in mmrs { + let batch_block_indexes = self.get_batch_block_indexes(block_indexes, *batch_index); + let batch_headers = self.get_batch_headers(headers, *batch_index); + + let batch_proofs = mmr.get_proofs(&batch_block_indexes, None).await?; + let guest_proofs = self.convert_to_guest_proofs(batch_proofs); if batch_headers.len() != guest_proofs.len() { return Err(ValidatorError::InvalidProofsCount { @@ -105,22 +99,65 @@ impl ValidatorBuilder { actual: guest_proofs.len(), }); } - // Prepare guest input + + let mmr_input = self.prepare_mmr_input(mmr).await?; let blocks_validity_input = - BlocksValidityInput::new(batch_headers.clone(), mmr_input, guest_proofs); + BlocksValidityInput::new(batch_headers, mmr_input, guest_proofs); - // Generate proof for this batch let proof = self .proof_generator .generate_stark_proof(blocks_validity_input) .await?; - // Collect proofs or results proofs.push(proof); } Ok(proofs) } + + fn get_batch_block_indexes( + &self, + block_indexes: &[(usize, u64)], + batch_index: u64, + ) -> Vec { + block_indexes + .iter() + .filter(|(_, idx)| *idx == batch_index) + .map(|(index, _)| *index) + .collect() + } + + fn get_batch_headers( + &self, + headers: &[eth_rlp_types::BlockHeader], + batch_index: u64, + ) -> Vec { + headers + .iter() + .filter(|header| header.number as u64 / self.batch_size == batch_index) + .cloned() + .collect() + } + + fn convert_to_guest_proofs(&self, batch_proofs: Vec) -> Vec { + batch_proofs + .into_iter() + .map(|proof| LocalGuestProof::from(proof).into()) + .collect() + } + + async fn prepare_mmr_input(&self, mmr: &MMR) -> Result { + let current_peaks = mmr.get_peaks(PeaksOptions::default()).await?; + let current_elements_count = mmr.elements_count.get().await?; + let current_leaves_count = mmr.leaves_count.get().await?; + + Ok(MMRInput::new( + current_peaks, + current_elements_count, + current_leaves_count, + vec![], + )) + } } // Add this wrapper struct From 6fd59e85564df3f2d5f7f3e60fd04a9837c6021e Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Thu, 5 Dec 2024 03:21:04 +0800 Subject: [PATCH 04/54] chore: cleaup logs --- contracts/starknet/verifier/src/fossil_verifier.cairo | 1 - crates/methods/mmr-append/src/main.rs | 7 ------- 2 files changed, 8 deletions(-) diff --git a/contracts/starknet/verifier/src/fossil_verifier.cairo b/contracts/starknet/verifier/src/fossil_verifier.cairo index 173e90b..e513d9c 100644 --- a/contracts/starknet/verifier/src/fossil_verifier.cairo +++ b/contracts/starknet/verifier/src/fossil_verifier.cairo @@ -48,7 +48,6 @@ mod FossilVerifier { #[external(v0)] fn verify_mmr_proof(ref self: ContractState, proof: Span,) -> bool { let (verified, journal) = self.bn254_verifier.read().verify_groth16_proof_bn254(proof); - println!("journal: {:?}", journal); let (new_mmr_root, new_leaves_count, batch_index, latest_mmr_block) = decode_journal( journal diff --git a/crates/methods/mmr-append/src/main.rs b/crates/methods/mmr-append/src/main.rs index 75cebd7..7e31401 100644 --- a/crates/methods/mmr-append/src/main.rs +++ b/crates/methods/mmr-append/src/main.rs @@ -34,8 +34,6 @@ fn main() { let root_hash = mmr.calculate_root_hash(mmr.get_elements_count()).unwrap(); - eprintln!("input size: {:?}", input.headers().len()); - let first_header = input.headers().first().unwrap(); let last_header = input.headers().last().unwrap(); @@ -47,11 +45,6 @@ fn main() { assert!(first_batch_index == last_batch_index, "Batch index mismatch"); - eprintln!("root hash: {:?}", root_hash); - eprintln!("leaves count: {:?}", mmr.get_leaves_count()); - eprintln!("batch index: {:?}", first_batch_index); - eprintln!("latest mmr block: {:?}", last_block_number); - // Create output let output = GuestOutput::new( root_hash, From d792d6a41b513a228fb2de3e8536f9f0443ecb8e Mon Sep 17 00:00:00 2001 From: ametel01 Date: Thu, 5 Dec 2024 03:29:00 +0800 Subject: [PATCH 05/54] feat(logging): enhance tracing across proof generation pipeline - Add structured logging with spans for better context tracking - Improve error logging with detailed context and error messages - Add high-level progress tracking for proof generation - Implement consistent logging patterns across MMR state management - Optimize log verbosity to focus on key operational events - Add debug logs for detailed troubleshooting capabilities --- crates/publisher/src/api/operations.rs | 47 +++++--- crates/publisher/src/core/accumulator.rs | 114 ++++++++++++++++-- crates/publisher/src/core/batch_processor.rs | 86 +++++++++---- .../publisher/src/core/mmr_state_manager.rs | 88 ++++++++++++-- crates/publisher/src/core/proof_generator.rs | 46 ++++--- 5 files changed, 300 insertions(+), 81 deletions(-) diff --git a/crates/publisher/src/api/operations.rs b/crates/publisher/src/api/operations.rs index 1c8fbdb..151e9d6 100644 --- a/crates/publisher/src/api/operations.rs +++ b/crates/publisher/src/api/operations.rs @@ -1,6 +1,7 @@ use crate::{ core::AccumulatorBuilder, errors::PublisherError, utils::Stark, validator::ValidatorBuilder, }; +use tracing::{span, Level}; const DEFAULT_BATCH_SIZE: u64 = 1024; @@ -14,6 +15,9 @@ pub async fn prove_mmr_update( end_block: u64, skip_proof_verification: bool, ) -> Result<(), PublisherError> { + let span = span!(Level::INFO, "prove_mmr_update", start_block, end_block); + let _enter = span.enter(); + let mut builder = AccumulatorBuilder::new( rpc_url, verifier_address, @@ -22,22 +26,23 @@ pub async fn prove_mmr_update( batch_size, skip_proof_verification, ) - .await?; + .await + .map_err(|e| { + tracing::error!(error = %e, "Failed to create AccumulatorBuilder"); + e + })?; - tracing::debug!( - start_block, - end_block, - "Starting MMR update and proof generation" - ); + tracing::info!("Starting MMR update and proof generation"); builder .update_mmr_with_new_headers(start_block, end_block) - .await?; - tracing::debug!( - start_block, - end_block, - "Successfully generated proof for block range" - ); + .await + .map_err(|e| { + tracing::error!(error = %e, "Failed to update MMR with new headers"); + e + })?; + + tracing::info!("Successfully generated proof for block range"); Ok(()) } @@ -46,12 +51,26 @@ pub async fn prove_headers_validity_and_inclusion( headers: &Vec, skip_proof_verification: Option, ) -> Result, PublisherError> { + let span = span!(Level::INFO, "prove_headers_validity_and_inclusion"); + let _enter = span.enter(); + let skip_proof = skip_proof_verification.unwrap_or(false); - let validator = ValidatorBuilder::new(DEFAULT_BATCH_SIZE, skip_proof).await?; + let validator = ValidatorBuilder::new(DEFAULT_BATCH_SIZE, skip_proof) + .await + .map_err(|e| { + tracing::error!(error = %e, "Failed to create ValidatorBuilder"); + e + })?; let result = validator .verify_blocks_validity_and_inclusion(headers) - .await?; + .await + .map_err(|e| { + tracing::error!(error = %e, "Failed to verify blocks validity and inclusion"); + e + })?; + + tracing::info!("Successfully verified blocks validity and inclusion"); Ok(result) } diff --git a/crates/publisher/src/core/accumulator.rs b/crates/publisher/src/core/accumulator.rs index f71b758..33df994 100644 --- a/crates/publisher/src/core/accumulator.rs +++ b/crates/publisher/src/core/accumulator.rs @@ -5,7 +5,7 @@ use ethereum::get_finalized_block_hash; use methods::{MMR_APPEND_ELF, MMR_APPEND_ID}; use starknet_crypto::Felt; use starknet_handler::{account::StarknetAccount, provider::StarknetProvider}; -use tracing::{debug, info}; +use tracing::{debug, error, info, span, warn, Level}; pub struct AccumulatorBuilder<'a> { rpc_url: &'a String, @@ -26,6 +26,15 @@ impl<'a> AccumulatorBuilder<'a> { batch_size: u64, skip_proof_verification: bool, ) -> Result { + let span = span!( + Level::INFO, + "accumulator_builder_new", + batch_size, + skip_proof_verification + ); + let _enter = span.enter(); + + info!("Initializing AccumulatorBuilder"); let proof_generator = ProofGenerator::new(MMR_APPEND_ELF, MMR_APPEND_ID, skip_proof_verification); @@ -49,33 +58,60 @@ impl<'a> AccumulatorBuilder<'a> { &mut self, num_batches: u64, ) -> Result<(), AccumulatorError> { - let (finalized_block_number, _) = get_finalized_block_hash().await?; - info!("Building MMR..."); + let span = span!(Level::INFO, "build_with_num_batches", num_batches); + let _enter = span.enter(); + + let (finalized_block_number, _) = get_finalized_block_hash().await.map_err(|e| { + error!(error = %e, "Failed to get finalized block hash"); + e + })?; + + info!( + finalized_block_number, + num_batches, "Starting MMR build with specified number of batches" + ); self.total_batches = num_batches; self.current_batch = 0; - let mut current_end = finalized_block_number; - for _ in 0..num_batches { + for batch_num in 0..num_batches { if current_end == 0 { + warn!("Reached block 0 before completing all batches"); break; } let start_block = self.batch_processor.calculate_start_block(current_end); + debug!(batch_num, start_block, current_end, "Processing batch"); + let result = self .batch_processor .process_batch(start_block, current_end) - .await?; + .await + .map_err(|e| { + error!( + error = %e, + batch_num, + start_block, + current_end, + "Failed to process batch" + ); + e + })?; if let Some(batch_result) = result { self.handle_batch_result(&batch_result).await?; self.current_batch += 1; + info!( + progress = format!("{}/{}", self.current_batch, self.total_batches), + "Batch processed successfully" + ); } current_end = start_block.saturating_sub(1); } + info!("MMR build completed successfully"); Ok(()) } @@ -111,12 +147,20 @@ impl<'a> AccumulatorBuilder<'a> { start_block: u64, end_block: u64, ) -> Result<(), AccumulatorError> { + let span = span!( + Level::INFO, + "update_mmr_with_new_headers", + start_block, + end_block + ); + let _enter = span.enter(); + let mut current_end = end_block; let mut batch_results = Vec::new(); - debug!( - "Updating MMR with blocks from {} to {}", - start_block, end_block + info!( + start_block, + end_block, "Starting MMR update with new headers" ); while current_end >= start_block { @@ -124,21 +168,46 @@ impl<'a> AccumulatorBuilder<'a> { .batch_processor .calculate_batch_range(current_end, start_block); + debug!( + batch_start = batch_range.start, + batch_end = batch_range.end, + "Processing batch range" + ); + if let Some(result) = self .batch_processor .process_batch(batch_range.start, batch_range.end) - .await? + .await + .map_err(|e| { + error!( + error = %e, + batch_start = batch_range.start, + batch_end = batch_range.end, + "Failed to process batch" + ); + e + })? { self.handle_batch_result(&result).await?; batch_results.push((result.proof().calldata(), result.new_mmr_state())); + info!( + batch_start = batch_range.start, + batch_end = batch_range.end, + "Batch processed successfully" + ); } current_end = batch_range.start.saturating_sub(1); } if batch_results.is_empty() { + error!(start_block, end_block, "No batch results generated"); Err(AccumulatorError::InvalidStateTransition) } else { + info!( + total_batches = batch_results.len(), + "MMR update completed successfully" + ); Ok(()) } } @@ -154,17 +223,36 @@ impl<'a> AccumulatorBuilder<'a> { } async fn verify_proof(&self, calldata: Vec) -> Result<(), AccumulatorError> { - let starknet_provider = StarknetProvider::new(&self.rpc_url)?; + let span = span!(Level::DEBUG, "verify_proof", calldata_len = calldata.len()); + let _enter = span.enter(); + + debug!("Initializing Starknet provider"); + let starknet_provider = StarknetProvider::new(&self.rpc_url).map_err(|e| { + error!(error = %e, "Failed to initialize Starknet provider"); + e + })?; + + debug!("Creating Starknet account"); let starknet_account = StarknetAccount::new( starknet_provider.provider(), &self.account_private_key, &self.account_address, - )?; + ) + .map_err(|e| { + error!(error = %e, "Failed to create Starknet account"); + e + })?; + debug!("Verifying MMR proof"); starknet_account .verify_mmr_proof(&self.verifier_address, calldata) - .await?; + .await + .map_err(|e| { + error!(error = %e, "Failed to verify MMR proof"); + e + })?; + debug!("MMR proof verified successfully"); Ok(()) } } diff --git a/crates/publisher/src/core/batch_processor.rs b/crates/publisher/src/core/batch_processor.rs index 9991dee..063f403 100644 --- a/crates/publisher/src/core/batch_processor.rs +++ b/crates/publisher/src/core/batch_processor.rs @@ -6,7 +6,7 @@ use common::get_or_create_db_path; use guest_types::{CombinedInput, GuestOutput, MMRInput}; use mmr::PeaksOptions; use mmr_utils::initialize_mmr; -use tracing::{debug, error, info}; +use tracing::{debug, error, info, span, Level}; pub struct BatchProcessor { batch_size: u64, @@ -40,6 +40,9 @@ impl BatchProcessor { start_block: u64, end_block: u64, ) -> Result, AccumulatorError> { + let span = span!(Level::INFO, "process_batch", start_block, end_block); + let _enter = span.enter(); + let batch_index = start_block / self.batch_size; let (_, batch_end) = self.calculate_batch_bounds(batch_index); let adjusted_end_block = std::cmp::min(end_block, batch_end); @@ -49,26 +52,40 @@ impl BatchProcessor { batch_index, start_block, adjusted_end_block ); - // Initialize MMR for this batch - let batch_file_name = get_or_create_db_path(&format!("batch_{}.db", batch_index))?; + let batch_file_name = + get_or_create_db_path(&format!("batch_{}.db", batch_index)).map_err(|e| { + error!(error = %e, "Failed to get or create DB path"); + e + })?; debug!("Using batch file: {}", batch_file_name); - let (store_manager, mut mmr, pool) = initialize_mmr(&batch_file_name).await?; + let (store_manager, mut mmr, pool) = + initialize_mmr(&batch_file_name).await.map_err(|e| { + error!(error = %e, "Failed to initialize MMR"); + e + })?; - // Check if batch is already complete - let current_leaves_count = mmr.leaves_count.get().await?; + let current_leaves_count = mmr.leaves_count.get().await.map_err(|e| { + error!(error = %e, "Failed to get current leaves count"); + e + })?; if current_leaves_count as u64 >= self.batch_size { debug!("Batch {} is already complete", batch_index); return Ok(None); } - // Fetch block headers - let db_connection = DbConnection::new().await?; + let db_connection = DbConnection::new().await.map_err(|e| { + error!(error = %e, "Failed to create DB connection"); + e + })?; let headers = db_connection .get_block_headers_by_block_range(start_block, adjusted_end_block) - .await?; + .await + .map_err(|e| { + error!(error = %e, "Failed to fetch block headers"); + e + })?; - // Check if headers array is empty if headers.is_empty() { error!( "No headers found for block range {} to {}", @@ -80,14 +97,21 @@ impl BatchProcessor { }); } - // Prepare MMR input - let current_peaks = mmr.get_peaks(PeaksOptions::default()).await?; - let current_elements_count = mmr.elements_count.get().await?; - let current_leaves_count = mmr.leaves_count.get().await?; + let current_peaks = mmr.get_peaks(PeaksOptions::default()).await.map_err(|e| { + error!(error = %e, "Failed to get current peaks"); + e + })?; + let current_elements_count = mmr.elements_count.get().await.map_err(|e| { + error!(error = %e, "Failed to get current elements count"); + e + })?; + let current_leaves_count = mmr.leaves_count.get().await.map_err(|e| { + error!(error = %e, "Failed to get current leaves count"); + e + })?; let new_headers: Vec = headers.iter().map(|h| h.block_hash.clone()).collect(); - // Create MMR input let mmr_input = MMRInput::new( current_peaks, current_elements_count, @@ -95,27 +119,31 @@ impl BatchProcessor { new_headers.clone(), ); - // Create combined input for proof generation let combined_input = CombinedInput::new(headers.clone(), mmr_input, self.skip_proof_verification); - // Generate proof let proof = self .proof_generator .generate_groth16_proof(combined_input) - .await?; + .await + .map_err(|e| { + error!(error = %e, "Failed to generate proof"); + e + })?; debug!("Generated proof with {} elements", proof.calldata().len()); - // Decode guest output - let guest_output: GuestOutput = self.proof_generator.decode_journal(&proof)?; + let guest_output: GuestOutput = + self.proof_generator.decode_journal(&proof).map_err(|e| { + error!(error = %e, "Failed to decode guest output"); + e + })?; debug!( "Guest output - root_hash: {}, leaves_count: {}", guest_output.root_hash(), guest_output.leaves_count() ); - // Update MMR state let new_mmr_state = MMRStateManager::update_state( store_manager, &mut mmr, @@ -124,14 +152,20 @@ impl BatchProcessor { &guest_output, &new_headers, ) - .await?; - - // Check if batch is now complete - let new_leaves_count = mmr.leaves_count.get().await?; + .await + .map_err(|e| { + error!(error = %e, "Failed to update MMR state"); + e + })?; + + let new_leaves_count = mmr.leaves_count.get().await.map_err(|e| { + error!(error = %e, "Failed to get new leaves count"); + e + })?; let batch_is_complete = new_leaves_count as u64 >= self.batch_size; if batch_is_complete { - debug!("Batch {} is now complete", batch_index); + info!("Batch {} is now complete", batch_index); } Ok(Some(BatchResult::new( diff --git a/crates/publisher/src/core/mmr_state_manager.rs b/crates/publisher/src/core/mmr_state_manager.rs index 6ee93b8..aa84ad9 100644 --- a/crates/publisher/src/core/mmr_state_manager.rs +++ b/crates/publisher/src/core/mmr_state_manager.rs @@ -5,6 +5,7 @@ use mmr::MMR; use mmr_utils::StoreManager; use starknet_handler::{u256_from_hex, MmrState}; use store::SqlitePool; +use tracing::{debug, error, info, span, Level}; pub struct MMRStateManager; @@ -17,9 +18,33 @@ impl MMRStateManager { guest_output: &GuestOutput, headers: &Vec, ) -> Result { - Self::append_headers(store_manager, mmr, pool, headers).await?; - Self::verify_mmr_state(mmr, guest_output).await?; - let new_mmr_state = Self::create_new_state(latest_block_number, guest_output).await?; + let span = span!(Level::INFO, "update_state", latest_block_number); + let _enter = span.enter(); + + info!("Updating MMR state"); + + Self::append_headers(store_manager, mmr, pool, headers) + .await + .map_err(|e| { + error!(error = %e, "Failed to append headers"); + e + })?; + + Self::verify_mmr_state(mmr, guest_output) + .await + .map_err(|e| { + error!(error = %e, "Failed to verify MMR state"); + e + })?; + + let new_mmr_state = Self::create_new_state(latest_block_number, guest_output) + .await + .map_err(|e| { + error!(error = %e, "Failed to create new MMR state"); + e + })?; + + info!("MMR state updated successfully"); Ok(new_mmr_state) } @@ -30,12 +55,21 @@ impl MMRStateManager { pool: &SqlitePool, headers: &Vec, ) -> Result<(), AccumulatorError> { + debug!("Appending headers to MMR"); for hash in headers { - let append_result = mmr.append(hash.clone()).await?; + let append_result = mmr.append(hash.clone()).await.map_err(|e| { + error!(error = %e, "Failed to append hash to MMR"); + e + })?; store_manager .insert_value_index_mapping(&pool, &hash, append_result.element_index) - .await?; + .await + .map_err(|e| { + error!(error = %e, "Failed to insert value index mapping"); + e + })?; } + debug!("Headers appended successfully"); Ok(()) } @@ -43,20 +77,42 @@ impl MMRStateManager { mmr: &MMR, guest_output: &GuestOutput, ) -> Result<(), AccumulatorError> { - if mmr.leaves_count.get().await? != guest_output.leaves_count() as usize { + debug!("Verifying MMR state"); + if mmr.leaves_count.get().await.map_err(|e| { + error!(error = %e, "Failed to get leaves count"); + e + })? != guest_output.leaves_count() as usize + { + error!("Leaves count mismatch"); return Err(AccumulatorError::InvalidStateTransition); } - let new_element_count = mmr.elements_count.get().await?; - let bag = mmr.bag_the_peaks(None).await?; - let new_root_hash = mmr.calculate_root_hash(&bag, new_element_count)?; + let new_element_count = mmr.elements_count.get().await.map_err(|e| { + error!(error = %e, "Failed to get elements count"); + e + })?; + let bag = mmr.bag_the_peaks(None).await.map_err(|e| { + error!(error = %e, "Failed to bag the peaks"); + e + })?; + let new_root_hash = mmr + .calculate_root_hash(&bag, new_element_count) + .map_err(|e| { + error!(error = %e, "Failed to calculate root hash"); + e + })?; if new_root_hash != guest_output.root_hash() { + error!("Root hash mismatch"); return Err(AccumulatorError::InvalidStateTransition); } - validate_u256_hex(&new_root_hash)?; + validate_u256_hex(&new_root_hash).map_err(|e| { + error!(error = %e, "Invalid root hash format"); + e + })?; + debug!("MMR state verified successfully"); Ok(()) } @@ -64,10 +120,16 @@ impl MMRStateManager { latest_block_number: u64, guest_output: &GuestOutput, ) -> Result { - Ok(MmrState::new( + debug!("Creating new MMR state"); + let new_state = MmrState::new( latest_block_number, - u256_from_hex(guest_output.root_hash().trim_start_matches("0x"))?, + u256_from_hex(guest_output.root_hash().trim_start_matches("0x")).map_err(|e| { + error!(error = %e, "Failed to convert root hash from hex"); + e + })?, guest_output.leaves_count() as u64, - )) + ); + debug!("New MMR state created successfully"); + Ok(new_state) } } diff --git a/crates/publisher/src/core/proof_generator.rs b/crates/publisher/src/core/proof_generator.rs index 97e4469..1ae97aa 100644 --- a/crates/publisher/src/core/proof_generator.rs +++ b/crates/publisher/src/core/proof_generator.rs @@ -10,7 +10,7 @@ use serde::Deserialize; use starknet_crypto::Felt; use thiserror::Error; use tokio::task; -use tracing::{debug, info, warn}; +use tracing::{debug, error, info, span, Level}; use crate::utils::{Groth16, Stark}; @@ -60,6 +60,9 @@ where /// Generate a standard Stark proof for intermediate batches pub async fn generate_stark_proof(&self, input: T) -> Result { + let span = span!(Level::INFO, "generate_stark_proof"); + let _enter = span.enter(); + info!("Generating STARK proof for intermediate batch"); debug!("Input size: {} bytes", std::mem::size_of_val(&input)); @@ -73,12 +76,12 @@ where let env = ExecutorEnv::builder() .write(&input) .map_err(|e| { - warn!("Failed to write input to executor env: {}", e); + error!("Failed to write input to executor env: {}", e); ProofGeneratorError::ExecutorEnvError(e.to_string()) })? .build() .map_err(|e| { - warn!("Failed to build executor env: {}", e); + error!("Failed to build executor env: {}", e); ProofGeneratorError::ExecutorEnvError(e.to_string()) })?; @@ -86,27 +89,35 @@ where let receipt = default_prover() .prove(env, method_elf) .map_err(|e| { - warn!("Failed to generate STARK proof: {}", e); + error!("Failed to generate STARK proof: {}", e); ProofGeneratorError::ReceiptError(e.to_string()) })? .receipt; debug!("Computing image ID"); - let image_id = compute_image_id(method_elf) - .map_err(|e| ProofGeneratorError::ImageIdError(e.to_string()))?; + let image_id = compute_image_id(method_elf).map_err(|e| { + error!("Failed to compute image ID: {}", e); + ProofGeneratorError::ImageIdError(e.to_string()) + })?; info!("Successfully generated STARK proof"); Ok(Stark::new(receipt, image_id.as_bytes().to_vec(), method_id)) } }) .await? - .map_err(|e| ProofGeneratorError::SpawnBlocking(e.to_string()))?; + .map_err(|e| { + error!("Failed to spawn blocking task: {}", e); + ProofGeneratorError::SpawnBlocking(e.to_string()) + })?; Ok(proof) } /// Generate a Groth16 proof for the final batch pub async fn generate_groth16_proof(&self, input: T) -> Result { + let span = span!(Level::INFO, "generate_groth16_proof"); + let _enter = span.enter(); + info!("Generating Groth16 proof for final batch"); debug!("Input size: {} bytes", std::mem::size_of_val(&input)); @@ -119,12 +130,12 @@ where let env = ExecutorEnv::builder() .write(&input) .map_err(|e| { - warn!("Failed to write input to executor env: {}", e); + error!("Failed to write input to executor env: {}", e); ProofGeneratorError::ExecutorEnvError(e.to_string()) })? .build() .map_err(|e| { - warn!("Failed to build executor env: {}", e); + error!("Failed to build executor env: {}", e); ProofGeneratorError::ExecutorEnvError(e.to_string()) })?; @@ -137,20 +148,22 @@ where &ProverOpts::groth16(), ) .map_err(|e| { - warn!("Failed to generate Groth16 proof: {}", e); + error!("Failed to generate Groth16 proof: {}", e); ProofGeneratorError::ReceiptError(e.to_string()) })? .receipt; debug!("Encoding seal"); let encoded_seal = encode_seal(&receipt).map_err(|e| { - warn!("Failed to encode seal: {}", e); + error!("Failed to encode seal: {}", e); ProofGeneratorError::SealError(e.to_string()) })?; debug!("Computing image ID"); - let image_id = compute_image_id(method_elf) - .map_err(|e| ProofGeneratorError::ImageIdError(e.to_string()))?; + let image_id = compute_image_id(method_elf).map_err(|e| { + error!("Failed to compute image ID: {}", e); + ProofGeneratorError::ImageIdError(e.to_string()) + })?; let journal = receipt.journal.bytes.clone(); @@ -169,7 +182,7 @@ where let calldata = if !skip_proof_verification { get_groth16_calldata(&groth16_proof, &get_risc0_vk(), CurveID::BN254).map_err( |e| { - warn!("Failed to generate calldata: {}", e); + error!("Failed to generate calldata: {}", e); ProofGeneratorError::CalldataError(e.to_string()) }, )? @@ -181,7 +194,10 @@ where Ok(Groth16::new(receipt, calldata)) }) .await? - .map_err(|e| ProofGeneratorError::SpawnBlocking(e.to_string()))?; + .map_err(|e| { + error!("Failed to spawn blocking task: {}", e); + ProofGeneratorError::SpawnBlocking(e.to_string()) + })?; Ok(proof) } From 6eaa473a6738addecad05d2a366450c6448fb842 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Thu, 5 Dec 2024 03:54:28 +0800 Subject: [PATCH 06/54] Enhance input validation, error handling, and tracing - Added input validation to prevent invalid states and improve robustness. - Improved error handling by providing specific error messages and using error variants. - Enhanced tracing with spans and detailed error messages for better debugging. - Applied changes to ValidatorBuilder, BatchProcessor, MMRStateManager, and ProofGenerator. - Ensured no panics occur from invalid inputs and all error cases are properly handled. --- Cargo.lock | 2 - crates/publisher/Cargo.toml | 2 - crates/publisher/bin/build_mmr.rs | 3 +- crates/publisher/bin/update_mmr.rs | 5 +- crates/publisher/bin/verify_blocks.rs | 4 +- crates/publisher/src/api/operations.rs | 7 +- crates/publisher/src/core/accumulator.rs | 59 +++++++++- crates/publisher/src/core/batch_processor.rs | 104 +++++++++++++++--- .../publisher/src/core/mmr_state_manager.rs | 29 ++++- crates/publisher/src/core/proof_generator.rs | 68 +++++++----- crates/publisher/src/errors.rs | 30 ++++- crates/publisher/src/lib.rs | 5 +- crates/publisher/src/validator/validator.rs | 66 +++++++++-- 13 files changed, 302 insertions(+), 82 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4e59fa3..921d788 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4128,14 +4128,12 @@ dependencies = [ "dotenv", "eth-rlp-types", "ethereum", - "eyre", "garaga_rs", "guest-types", "methods", "mmr", "mmr-utils", "risc0-ethereum-contracts", - "risc0-groth16", "risc0-zkvm", "serde", "sqlx", diff --git a/crates/publisher/Cargo.toml b/crates/publisher/Cargo.toml index c1f8411..5837197 100644 --- a/crates/publisher/Cargo.toml +++ b/crates/publisher/Cargo.toml @@ -29,7 +29,6 @@ garaga_rs = { git = "https://github.com/ametel01/garaga.git" } mmr = { git = "https://github.com/ametel01/rust-accumulators.git", branch = "feat/sha2-hasher" } store = { git = "https://github.com/ametel01/rust-accumulators.git", branch = "feat/sha2-hasher" } -eyre = { workspace = true } thiserror = { workspace = true } tracing = { workspace = true } tokio = { workspace = true, features = ["rt-multi-thread"] } @@ -42,5 +41,4 @@ risc0-zkvm = { version = "1.1.3" } # risc0-zkvm-platform = { version = "1.1.3" } serde = "1.0" risc0-ethereum-contracts = { git = "https://github.com/risc0/risc0-ethereum", tag = "v1.1.4" } -risc0-groth16 = "1.1.2" diff --git a/crates/publisher/bin/build_mmr.rs b/crates/publisher/bin/build_mmr.rs index 14dd9ef..c4b94f2 100644 --- a/crates/publisher/bin/build_mmr.rs +++ b/crates/publisher/bin/build_mmr.rs @@ -1,6 +1,5 @@ use clap::Parser; use common::{get_env_var, initialize_logger_and_env}; -use eyre::Result; use publisher::core::AccumulatorBuilder; use tracing::info; @@ -21,7 +20,7 @@ struct Args { } #[tokio::main] -async fn main() -> Result<()> { +async fn main() -> Result<(), Box> { initialize_logger_and_env()?; let rpc_url = get_env_var("STARKNET_RPC_URL")?; diff --git a/crates/publisher/bin/update_mmr.rs b/crates/publisher/bin/update_mmr.rs index e72a532..a67f3c5 100644 --- a/crates/publisher/bin/update_mmr.rs +++ b/crates/publisher/bin/update_mmr.rs @@ -1,8 +1,5 @@ use clap::Parser; use common::{get_env_var, initialize_logger_and_env}; -use eyre::Result; -// use methods::{MMR_APPEND_ELF, MMR_APPEND_ID}; -// use publisher::{AccumulatorBuilder, ProofGenerator}; use tracing::info; const BATCH_SIZE: u64 = 1024; @@ -24,7 +21,7 @@ struct Args { } #[tokio::main] -async fn main() -> Result<()> { +async fn main() -> Result<(), Box> { initialize_logger_and_env()?; let rpc_url = get_env_var("STARKNET_RPC_URL")?; diff --git a/crates/publisher/bin/verify_blocks.rs b/crates/publisher/bin/verify_blocks.rs index 852ce81..2fe98df 100644 --- a/crates/publisher/bin/verify_blocks.rs +++ b/crates/publisher/bin/verify_blocks.rs @@ -1,6 +1,6 @@ use clap::Parser; use common::initialize_logger_and_env; -use publisher::{db::DbConnection, prove_headers_validity_and_inclusion}; +use publisher::{db::DbConnection, prove_headers_integrity_and_inclusion}; use tokio; #[derive(Parser, Debug)] @@ -32,7 +32,7 @@ async fn main() -> Result<(), Box> { .await?; // Verify blocks - match prove_headers_validity_and_inclusion(&headers, Some(args.skip_proof)).await { + match prove_headers_integrity_and_inclusion(&headers, Some(args.skip_proof)).await { Ok(result) => { for proof in result { proof.receipt().verify(proof.image_id()?)?; diff --git a/crates/publisher/src/api/operations.rs b/crates/publisher/src/api/operations.rs index 151e9d6..2d8004d 100644 --- a/crates/publisher/src/api/operations.rs +++ b/crates/publisher/src/api/operations.rs @@ -47,14 +47,15 @@ pub async fn prove_mmr_update( Ok(()) } -pub async fn prove_headers_validity_and_inclusion( +pub async fn prove_headers_integrity_and_inclusion( headers: &Vec, skip_proof_verification: Option, ) -> Result, PublisherError> { - let span = span!(Level::INFO, "prove_headers_validity_and_inclusion"); + let span = span!(Level::INFO, "prove_headers_integrity_and_inclusion"); let _enter = span.enter(); let skip_proof = skip_proof_verification.unwrap_or(false); + let validator = ValidatorBuilder::new(DEFAULT_BATCH_SIZE, skip_proof) .await .map_err(|e| { @@ -63,7 +64,7 @@ pub async fn prove_headers_validity_and_inclusion( })?; let result = validator - .verify_blocks_validity_and_inclusion(headers) + .verify_blocks_integrity_and_inclusion(headers) .await .map_err(|e| { tracing::error!(error = %e, "Failed to verify blocks validity and inclusion"); diff --git a/crates/publisher/src/core/accumulator.rs b/crates/publisher/src/core/accumulator.rs index 33df994..235c7ef 100644 --- a/crates/publisher/src/core/accumulator.rs +++ b/crates/publisher/src/core/accumulator.rs @@ -36,7 +36,31 @@ impl<'a> AccumulatorBuilder<'a> { info!("Initializing AccumulatorBuilder"); let proof_generator = - ProofGenerator::new(MMR_APPEND_ELF, MMR_APPEND_ID, skip_proof_verification); + ProofGenerator::new(MMR_APPEND_ELF, MMR_APPEND_ID, skip_proof_verification)?; + + if rpc_url.trim().is_empty() { + return Err(AccumulatorError::InvalidInput("RPC URL cannot be empty")); + } + if verifier_address.trim().is_empty() { + return Err(AccumulatorError::InvalidInput( + "Verifier address cannot be empty", + )); + } + if account_private_key.trim().is_empty() { + return Err(AccumulatorError::InvalidInput( + "Account private key cannot be empty", + )); + } + if account_address.trim().is_empty() { + return Err(AccumulatorError::InvalidInput( + "Account address cannot be empty", + )); + } + if batch_size == 0 { + return Err(AccumulatorError::InvalidInput( + "Batch size must be greater than 0", + )); + } Ok(Self { rpc_url, @@ -47,7 +71,7 @@ impl<'a> AccumulatorBuilder<'a> { batch_size, proof_generator, skip_proof_verification, - ), + )?, current_batch: 0, total_batches: 0, }) @@ -61,9 +85,15 @@ impl<'a> AccumulatorBuilder<'a> { let span = span!(Level::INFO, "build_with_num_batches", num_batches); let _enter = span.enter(); + if num_batches == 0 { + return Err(AccumulatorError::InvalidInput( + "Number of batches must be greater than 0", + )); + } + let (finalized_block_number, _) = get_finalized_block_hash().await.map_err(|e| { error!(error = %e, "Failed to get finalized block hash"); - e + AccumulatorError::BlockchainError(format!("Failed to get finalized block: {}", e)) })?; info!( @@ -81,7 +111,7 @@ impl<'a> AccumulatorBuilder<'a> { break; } - let start_block = self.batch_processor.calculate_start_block(current_end); + let start_block = self.batch_processor.calculate_start_block(current_end)?; debug!(batch_num, start_block, current_end, "Processing batch"); let result = self @@ -126,7 +156,7 @@ impl<'a> AccumulatorBuilder<'a> { let mut current_end = finalized_block_number; while current_end > 0 { - let start_block = self.batch_processor.calculate_start_block(current_end); + let start_block = self.batch_processor.calculate_start_block(current_end)?; let batch_result = self .batch_processor .process_batch(start_block, current_end) @@ -155,6 +185,23 @@ impl<'a> AccumulatorBuilder<'a> { ); let _enter = span.enter(); + if end_block < start_block { + return Err(AccumulatorError::InvalidInput( + "End block cannot be less than start block", + )); + } + + let (finalized_block_number, _) = get_finalized_block_hash().await.map_err(|e| { + error!(error = %e, "Failed to get finalized block hash"); + AccumulatorError::BlockchainError(format!("Failed to get finalized block: {}", e)) + })?; + + if end_block > finalized_block_number { + return Err(AccumulatorError::InvalidInput( + "End block cannot be greater than finalized block", + )); + } + let mut current_end = end_block; let mut batch_results = Vec::new(); @@ -166,7 +213,7 @@ impl<'a> AccumulatorBuilder<'a> { while current_end >= start_block { let batch_range = self .batch_processor - .calculate_batch_range(current_end, start_block); + .calculate_batch_range(current_end, start_block)?; debug!( batch_start = batch_range.start, diff --git a/crates/publisher/src/core/batch_processor.rs b/crates/publisher/src/core/batch_processor.rs index 063f403..8c79634 100644 --- a/crates/publisher/src/core/batch_processor.rs +++ b/crates/publisher/src/core/batch_processor.rs @@ -19,12 +19,18 @@ impl BatchProcessor { batch_size: u64, proof_generator: ProofGenerator, skip_proof_verification: bool, - ) -> Self { - Self { + ) -> Result { + if batch_size == 0 { + return Err(AccumulatorError::InvalidInput( + "Batch size must be greater than 0", + )); + } + + Ok(Self { batch_size, proof_generator, skip_proof_verification, - } + }) } pub fn batch_size(&self) -> u64 { @@ -40,11 +46,24 @@ impl BatchProcessor { start_block: u64, end_block: u64, ) -> Result, AccumulatorError> { + if end_block < start_block { + return Err(AccumulatorError::InvalidInput( + "End block cannot be less than start block", + )); + } + let span = span!(Level::INFO, "process_batch", start_block, end_block); let _enter = span.enter(); let batch_index = start_block / self.batch_size; - let (_, batch_end) = self.calculate_batch_bounds(batch_index); + let (batch_start, batch_end) = self.calculate_batch_bounds(batch_index)?; + + if start_block < batch_start { + return Err(AccumulatorError::InvalidInput( + "Start block is before batch start", + )); + } + let adjusted_end_block = std::cmp::min(end_block, batch_end); info!( @@ -176,25 +195,69 @@ impl BatchProcessor { ))) } - pub fn calculate_batch_bounds(&self, batch_index: u64) -> (u64, u64) { - let batch_start = batch_index * self.batch_size; - let batch_end = batch_start + self.batch_size - 1; - (batch_start, batch_end) + pub fn calculate_batch_bounds(&self, batch_index: u64) -> Result<(u64, u64), AccumulatorError> { + let batch_start = batch_index + .checked_mul(self.batch_size) + .ok_or(AccumulatorError::InvalidInput("Batch index too large"))?; + + let batch_end = batch_start + .checked_add(self.batch_size) + .ok_or(AccumulatorError::InvalidInput( + "Batch end calculation overflow", + ))? + .saturating_sub(1); + + Ok((batch_start, batch_end)) } - pub fn calculate_start_block(&self, current_end: u64) -> u64 { - current_end.saturating_sub(current_end % self.batch_size) + pub fn calculate_start_block(&self, current_end: u64) -> Result { + if current_end == 0 { + return Err(AccumulatorError::InvalidInput( + "Current end block cannot be 0", + )); + } + + Ok(current_end.saturating_sub(current_end % self.batch_size)) } - pub fn calculate_batch_range(&self, current_end: u64, start_block: u64) -> BatchRange { - let batch_start = current_end - (current_end % self.batch_size); + pub fn calculate_batch_range( + &self, + current_end: u64, + start_block: u64, + ) -> Result { + if current_end < start_block { + return Err(AccumulatorError::InvalidInput( + "Current end block cannot be less than start block", + )); + } + + if current_end == 0 { + return Err(AccumulatorError::InvalidInput( + "Current end block cannot be 0", + )); + } + + let batch_start = current_end.saturating_sub(current_end % self.batch_size); let effective_start = batch_start.max(start_block); - let effective_end = std::cmp::min(current_end, batch_start + self.batch_size - 1); - BatchRange { + let batch_size_minus_one = self + .batch_size + .checked_sub(1) + .ok_or(AccumulatorError::InvalidInput("Invalid batch size"))?; + + let max_end = + batch_start + .checked_add(batch_size_minus_one) + .ok_or(AccumulatorError::InvalidInput( + "Batch end calculation overflow", + ))?; + + let effective_end = std::cmp::min(current_end, max_end); + + Ok(BatchRange { start: effective_start, end: effective_end, - } + }) } } @@ -202,3 +265,14 @@ pub struct BatchRange { pub start: u64, pub end: u64, } + +impl BatchRange { + pub fn new(start: u64, end: u64) -> Result { + if end < start { + return Err(AccumulatorError::InvalidInput( + "End block cannot be less than start block", + )); + } + Ok(Self { start, end }) + } +} diff --git a/crates/publisher/src/core/mmr_state_manager.rs b/crates/publisher/src/core/mmr_state_manager.rs index aa84ad9..a290fe8 100644 --- a/crates/publisher/src/core/mmr_state_manager.rs +++ b/crates/publisher/src/core/mmr_state_manager.rs @@ -18,6 +18,12 @@ impl MMRStateManager { guest_output: &GuestOutput, headers: &Vec, ) -> Result { + if headers.is_empty() { + return Err(AccumulatorError::InvalidInput( + "Headers list cannot be empty", + )); + } + let span = span!(Level::INFO, "update_state", latest_block_number); let _enter = span.enter(); @@ -56,7 +62,14 @@ impl MMRStateManager { headers: &Vec, ) -> Result<(), AccumulatorError> { debug!("Appending headers to MMR"); + for hash in headers { + if hash.trim().is_empty() { + return Err(AccumulatorError::InvalidInput( + "Header hash cannot be empty", + )); + } + let append_result = mmr.append(hash.clone()).await.map_err(|e| { error!(error = %e, "Failed to append hash to MMR"); e @@ -78,11 +91,12 @@ impl MMRStateManager { guest_output: &GuestOutput, ) -> Result<(), AccumulatorError> { debug!("Verifying MMR state"); - if mmr.leaves_count.get().await.map_err(|e| { + + let leaves_count = mmr.leaves_count.get().await.map_err(|e| { error!(error = %e, "Failed to get leaves count"); e - })? != guest_output.leaves_count() as usize - { + })?; + if leaves_count != guest_output.leaves_count() as usize { error!("Leaves count mismatch"); return Err(AccumulatorError::InvalidStateTransition); } @@ -121,14 +135,21 @@ impl MMRStateManager { guest_output: &GuestOutput, ) -> Result { debug!("Creating new MMR state"); + + let root_hash = guest_output.root_hash().trim_start_matches("0x"); + if root_hash.is_empty() { + return Err(AccumulatorError::InvalidInput("Root hash cannot be empty")); + } + let new_state = MmrState::new( latest_block_number, - u256_from_hex(guest_output.root_hash().trim_start_matches("0x")).map_err(|e| { + u256_from_hex(root_hash).map_err(|e| { error!(error = %e, "Failed to convert root hash from hex"); e })?, guest_output.leaves_count() as u64, ); + debug!("New MMR state created successfully"); Ok(new_state) } diff --git a/crates/publisher/src/core/proof_generator.rs b/crates/publisher/src/core/proof_generator.rs index 1ae97aa..7dcdc89 100644 --- a/crates/publisher/src/core/proof_generator.rs +++ b/crates/publisher/src/core/proof_generator.rs @@ -8,31 +8,13 @@ use risc0_ethereum_contracts::encode_seal; use risc0_zkvm::{compute_image_id, default_prover, ExecutorEnv, ProverOpts, VerifierContext}; use serde::Deserialize; use starknet_crypto::Felt; -use thiserror::Error; use tokio::task; use tracing::{debug, error, info, span, Level}; -use crate::utils::{Groth16, Stark}; - -#[derive(Error, Debug)] -pub enum ProofGeneratorError { - #[error("Failed to write input to executor env: {0}")] - ExecutorEnvError(String), - #[error("Failed to generate receipt: {0}")] - ReceiptError(String), - #[error("Failed to compute image id: {0}")] - ImageIdError(String), - #[error("Failed to encode seal: {0}")] - SealError(String), - #[error("Failed to generate StarkNet calldata: {0}")] - CalldataError(String), - #[error("Failed to spawn blocking task: {0}")] - SpawnBlocking(String), - #[error("Tokio task join error: {0}")] - Join(#[from] tokio::task::JoinError), - #[error("Risc0 serde error: {0}")] - Risc0Serde(#[from] risc0_zkvm::serde::Error), -} +use crate::{ + errors::ProofGeneratorError, + utils::{Groth16, Stark}, +}; pub struct ProofGenerator { method_elf: &'static [u8], @@ -49,13 +31,25 @@ where method_elf: &'static [u8], method_id: [u32; 8], skip_proof_verification: bool, - ) -> Self { - Self { + ) -> Result { + if method_elf.is_empty() { + return Err(ProofGeneratorError::InvalidInput( + "Method ELF cannot be empty", + )); + } + + if method_id.iter().all(|&x| x == 0) { + return Err(ProofGeneratorError::InvalidInput( + "Method ID cannot be all zeros", + )); + } + + Ok(Self { method_elf, method_id, skip_proof_verification, _phantom: std::marker::PhantomData, - } + }) } /// Generate a standard Stark proof for intermediate batches @@ -63,15 +57,20 @@ where let span = span!(Level::INFO, "generate_stark_proof"); let _enter = span.enter(); + let input_size = std::mem::size_of_val(&input); + if input_size == 0 { + return Err(ProofGeneratorError::InvalidInput("Input cannot be empty")); + } + info!("Generating STARK proof for intermediate batch"); - debug!("Input size: {} bytes", std::mem::size_of_val(&input)); + debug!("Input size: {} bytes", input_size); let proof = task::spawn_blocking({ let method_elf = self.method_elf; let method_id = self.method_id; let input = input.clone(); - move || -> eyre::Result { + move || -> Result { debug!("Building executor environment"); let env = ExecutorEnv::builder() .write(&input) @@ -118,14 +117,19 @@ where let span = span!(Level::INFO, "generate_groth16_proof"); let _enter = span.enter(); + let input_size = std::mem::size_of_val(&input); + if input_size == 0 { + return Err(ProofGeneratorError::InvalidInput("Input cannot be empty")); + } + info!("Generating Groth16 proof for final batch"); - debug!("Input size: {} bytes", std::mem::size_of_val(&input)); + debug!("Input size: {} bytes", input_size); let method_elf = self.method_elf; let input = input.clone(); let skip_proof_verification = self.skip_proof_verification; - let proof = task::spawn_blocking(move || -> eyre::Result { + let proof = task::spawn_blocking(move || -> Result { debug!("Building executor environment"); let env = ExecutorEnv::builder() .write(&input) @@ -206,6 +210,12 @@ where &self, proof: &Groth16, ) -> Result { + if proof.receipt().journal.bytes.is_empty() { + return Err(ProofGeneratorError::InvalidInput( + "Proof journal cannot be empty", + )); + } + let receipt = proof.receipt(); Ok(receipt.journal.decode()?) } diff --git a/crates/publisher/src/errors.rs b/crates/publisher/src/errors.rs index c5191a0..abea24d 100644 --- a/crates/publisher/src/errors.rs +++ b/crates/publisher/src/errors.rs @@ -3,8 +3,6 @@ use mmr::{InStoreTableError, MMRError, StoreError}; use mmr_utils::MMRUtilsError; use thiserror::Error; -use crate::core::ProofGeneratorError; - #[derive(Error, Debug)] pub enum PublisherError { #[error("Verification failed: no verification result was produced")] @@ -51,6 +49,10 @@ pub enum AccumulatorError { StarknetHandler(#[from] starknet_handler::StarknetHandlerError), #[error("No headers available for block range {start_block} to {end_block}. The range might be invalid or the data might not be synced")] EmptyHeaders { start_block: u64, end_block: u64 }, + #[error("Invalid input: {0}")] + InvalidInput(&'static str), + #[error("Blockchain operation failed: {0}")] + BlockchainError(String), } #[derive(thiserror::Error, Debug)] @@ -69,4 +71,28 @@ pub enum ValidatorError { ProofGenerator(#[from] ProofGeneratorError), #[error("Proof count mismatch: expected {expected} proofs but found {actual}. This might indicate data corruption or synchronization issues")] InvalidProofsCount { expected: usize, actual: usize }, + #[error("Invalid input: {0}")] + InvalidInput(&'static str), +} + +#[derive(Error, Debug)] +pub enum ProofGeneratorError { + #[error("Invalid input: {0}")] + InvalidInput(&'static str), + #[error("Failed to write input to executor env: {0}")] + ExecutorEnvError(String), + #[error("Failed to generate receipt: {0}")] + ReceiptError(String), + #[error("Failed to compute image id: {0}")] + ImageIdError(String), + #[error("Failed to encode seal: {0}")] + SealError(String), + #[error("Failed to generate StarkNet calldata: {0}")] + CalldataError(String), + #[error("Failed to spawn blocking task: {0}")] + SpawnBlocking(String), + #[error("Tokio task join error: {0}")] + Join(#[from] tokio::task::JoinError), + #[error("Risc0 serde error: {0}")] + Risc0Serde(#[from] risc0_zkvm::serde::Error), } diff --git a/crates/publisher/src/lib.rs b/crates/publisher/src/lib.rs index 54c553e..798bde3 100644 --- a/crates/publisher/src/lib.rs +++ b/crates/publisher/src/lib.rs @@ -1,8 +1,5 @@ #![deny(unused_crate_dependencies)] use clap as _; -use common as _; -use risc0_groth16 as _; -use tracing as _; pub mod api; pub mod core; @@ -11,5 +8,5 @@ pub mod errors; pub mod utils; pub mod validator; -pub use api::operations::{prove_headers_validity_and_inclusion, prove_mmr_update}; +pub use api::operations::{prove_headers_integrity_and_inclusion, prove_mmr_update}; pub use errors::{PublisherError, ValidatorError}; diff --git a/crates/publisher/src/validator/validator.rs b/crates/publisher/src/validator/validator.rs index ca76ba9..0abc4d7 100644 --- a/crates/publisher/src/validator/validator.rs +++ b/crates/publisher/src/validator/validator.rs @@ -7,6 +7,7 @@ use mmr::{PeaksOptions, MMR}; use mmr_utils::{initialize_mmr, StoreManager}; use std::collections::HashMap; use store::SqlitePool; +use tracing::{error, span, Level}; pub struct ValidatorBuilder { proof_generator: ProofGenerator, @@ -15,8 +16,14 @@ pub struct ValidatorBuilder { impl ValidatorBuilder { pub async fn new(batch_size: u64, skip_proof: bool) -> Result { + if batch_size == 0 { + return Err(ValidatorError::InvalidInput( + "Batch size must be greater than 0", + )); + } + let proof_generator = - ProofGenerator::new(BLOCKS_VALIDITY_ELF, BLOCKS_VALIDITY_ID, skip_proof); + ProofGenerator::new(BLOCKS_VALIDITY_ELF, BLOCKS_VALIDITY_ID, skip_proof)?; Ok(Self { proof_generator, @@ -24,10 +31,17 @@ impl ValidatorBuilder { }) } - pub async fn verify_blocks_validity_and_inclusion( + pub async fn verify_blocks_integrity_and_inclusion( &self, headers: &Vec, ) -> Result, ValidatorError> { + if headers.is_empty() { + return Err(ValidatorError::InvalidInput("Headers list cannot be empty")); + } + + let span = span!(Level::INFO, "verify_blocks_integrity_and_inclusion"); + let _enter = span.enter(); + let mmrs = self.initialize_mmrs_for_headers(headers).await?; let block_indexes = self.collect_block_indexes(headers, &mmrs).await?; self.generate_proofs_for_batches(headers, &mmrs, &block_indexes) @@ -38,17 +52,28 @@ impl ValidatorBuilder { &self, headers: &[eth_rlp_types::BlockHeader], ) -> Result, ValidatorError> { + let span = span!(Level::INFO, "initialize_mmrs_for_headers"); + let _enter = span.enter(); + let mut mmrs = HashMap::new(); for header in headers { let batch_index = header.number as u64 / self.batch_size; if !mmrs.contains_key(&batch_index) { - let batch_file_name = get_or_create_db_path(&format!("batch_{}.db", batch_index))?; + let batch_file_name = get_or_create_db_path(&format!("batch_{}.db", batch_index)) + .map_err(|e| { + error!(error = %e, "Failed to get or create DB path"); + ValidatorError::Store(store::StoreError::GetError) + })?; if !std::path::Path::new(&batch_file_name).exists() { + error!("Batch file does not exist: {}", batch_file_name); return Err(ValidatorError::Store(store::StoreError::GetError)); } - let mmr_components = initialize_mmr(&batch_file_name).await?; + let mmr_components = initialize_mmr(&batch_file_name).await.map_err(|e| { + error!(error = %e, "Failed to initialize MMR"); + ValidatorError::Store(store::StoreError::GetError) + })?; mmrs.insert(batch_index, mmr_components); } } @@ -61,16 +86,28 @@ impl ValidatorBuilder { headers: &[eth_rlp_types::BlockHeader], mmrs: &HashMap, ) -> Result, ValidatorError> { + let span = span!(Level::INFO, "collect_block_indexes"); + let _enter = span.enter(); + let mut block_indexes = Vec::new(); for header in headers { let batch_index = header.number as u64 / self.batch_size; - let (store_manager, _, pool) = mmrs.get(&batch_index).unwrap(); + let (store_manager, _, pool) = mmrs.get(&batch_index).ok_or_else(|| { + error!("MMR not found for batch index: {}", batch_index); + ValidatorError::Store(store::StoreError::GetError) + })?; let index = store_manager .get_element_index_for_value(pool, &header.block_hash) .await? - .ok_or(ValidatorError::Store(store::StoreError::GetError))?; + .ok_or_else(|| { + error!( + "Element index not found for block hash: {}", + header.block_hash + ); + ValidatorError::Store(store::StoreError::GetError) + })?; block_indexes.push((index, batch_index)); } @@ -84,16 +121,31 @@ impl ValidatorBuilder { mmrs: &HashMap, block_indexes: &[(usize, u64)], ) -> Result, ValidatorError> { + let span = span!(Level::INFO, "generate_proofs_for_batches"); + let _enter = span.enter(); + let mut proofs = Vec::new(); for (batch_index, (_, mmr, _)) in mmrs { let batch_block_indexes = self.get_batch_block_indexes(block_indexes, *batch_index); let batch_headers = self.get_batch_headers(headers, *batch_index); - let batch_proofs = mmr.get_proofs(&batch_block_indexes, None).await?; + let batch_proofs = mmr + .get_proofs(&batch_block_indexes, None) + .await + .map_err(|e| { + error!(error = %e, "Failed to get proofs for batch index: {}", batch_index); + ValidatorError::Store(store::StoreError::GetError) + })?; let guest_proofs = self.convert_to_guest_proofs(batch_proofs); if batch_headers.len() != guest_proofs.len() { + error!( + "Proofs count mismatch for batch index: {}. Expected: {}, Actual: {}", + batch_index, + batch_headers.len(), + guest_proofs.len() + ); return Err(ValidatorError::InvalidProofsCount { expected: batch_headers.len(), actual: guest_proofs.len(), From 1ab1e7267db54408fd09e9cde2f1398574d820c3 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Thu, 5 Dec 2024 04:02:03 +0800 Subject: [PATCH 07/54] feat(starknet-handler): enhance tracing and logging - Add structured logging with tracing macros across all modules - Implement span instrumentation for key operations - Add debug and info level logs for important state changes - Include meaningful context in log messages - Skip sensitive data in instrumentation - Add logging for: - MMR state operations - Account creation and verification - Provider RPC calls - Hex conversions This improves observability and debugging capabilities across the starknet-handler crate. --- Cargo.lock | 1 + crates/starknet-handler/Cargo.toml | 1 + crates/starknet-handler/src/account.rs | 34 +++++++++++++++---- crates/starknet-handler/src/lib.rs | 17 ++++++++-- crates/starknet-handler/src/provider.rs | 44 ++++++++++--------------- 5 files changed, 62 insertions(+), 35 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 921d788..8733ab4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5644,6 +5644,7 @@ dependencies = [ "starknet", "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "thiserror 2.0.4", + "tracing", "url", ] diff --git a/crates/starknet-handler/Cargo.toml b/crates/starknet-handler/Cargo.toml index 9c78a9c..ab9ae9b 100644 --- a/crates/starknet-handler/Cargo.toml +++ b/crates/starknet-handler/Cargo.toml @@ -9,6 +9,7 @@ common = { path = "../common" } thiserror = { workspace = true } starknet = { workspace = true } starknet-crypto = { workspace = true } +tracing = { workspace = true } crypto-bigint = "0.5.5" url = "2.5.4" diff --git a/crates/starknet-handler/src/account.rs b/crates/starknet-handler/src/account.rs index ebf1af6..a569e94 100644 --- a/crates/starknet-handler/src/account.rs +++ b/crates/starknet-handler/src/account.rs @@ -7,6 +7,7 @@ use starknet::{ }; use starknet_crypto::Felt; use std::sync::Arc; +use tracing::{debug, info, instrument, warn}; use common::felt; @@ -17,38 +18,53 @@ pub struct StarknetAccount { } impl StarknetAccount { + #[instrument(skip(provider, account_private_key), fields(address = %account_address), level = "debug")] pub fn new( provider: Arc>, account_private_key: &str, account_address: &str, ) -> Result { + debug!("Creating new Starknet account"); + let private_key = felt(account_private_key)?; + debug!("Private key converted to felt"); + let signer = LocalWallet::from(SigningKey::from_secret_scalar(private_key)); - let address = felt(account_address)?; + + debug!( + chain_id = ?chain_id::SEPOLIA, + encoding = ?ExecutionEncoding::New, + "Initializing SingleOwnerAccount" + ); let account = SingleOwnerAccount::new( - provider, // Use `Arc` directly + provider, signer, address, chain_id::SEPOLIA, ExecutionEncoding::New, ); + info!("Starknet account successfully created"); Ok(Self { account }) } - pub fn account(&self) -> SingleOwnerAccount>, LocalWallet> { - self.account.clone() - } - + #[instrument(skip(self), level = "debug")] pub async fn verify_mmr_proof( &self, verifier_address: &str, proof: Vec, ) -> Result { - let selector = selector!("verify_mmr_proof"); + debug!( + verifier_address = %verifier_address, + proof_length = proof.len(), + "Verifying MMR proof" + ); + let selector = selector!("verify_mmr_proof"); + + debug!("Executing verification transaction"); let tx = self .account .execute_v1(vec![starknet::core::types::Call { @@ -59,6 +75,10 @@ impl StarknetAccount { .send() .await?; + info!( + tx_hash = ?tx.transaction_hash, + "MMR proof verification transaction sent" + ); Ok(tx.transaction_hash) } } diff --git a/crates/starknet-handler/src/lib.rs b/crates/starknet-handler/src/lib.rs index 98d5a2f..bc2bb50 100644 --- a/crates/starknet-handler/src/lib.rs +++ b/crates/starknet-handler/src/lib.rs @@ -9,6 +9,7 @@ use starknet::core::codec::{Decode, Encode}; use starknet::core::types::U256; use starknet::signers::local_wallet::SignError as LocalWalletSignError; use thiserror::Error; +use tracing::{debug, instrument, span, Level}; #[derive(Error, Debug)] pub enum StarknetHandlerError { @@ -41,12 +42,18 @@ pub struct MmrState { } impl MmrState { + #[instrument(skip(root_hash), level = "debug")] pub fn new( latest_block_number: u64, root_hash: U256, // elements_count: u64, leaves_count: u64, ) -> Self { + debug!( + latest_block_number, + leaves_count, + "Creating new MMR state" + ); Self { latest_block_number, root_hash, @@ -72,10 +79,16 @@ impl MmrState { } } +#[instrument(level = "debug")] pub fn u256_from_hex(hex: &str) -> Result { + let _span = span!(Level::DEBUG, "hex_conversion").entered(); + debug!(input_hex = hex, "Converting hex to U256"); + let crypto_bigint = CryptoBigIntU256::from_be_hex(hex); - - Ok(U256::from(crypto_bigint)) + let result = U256::from(crypto_bigint); + + debug!(result = ?result, "Hex conversion completed"); + Ok(result) } #[cfg(test)] diff --git a/crates/starknet-handler/src/provider.rs b/crates/starknet-handler/src/provider.rs index b9da22d..fb0c05c 100644 --- a/crates/starknet-handler/src/provider.rs +++ b/crates/starknet-handler/src/provider.rs @@ -1,5 +1,6 @@ use starknet::providers::Provider; use std::sync::Arc; +use tracing::{debug, info, instrument, warn}; use crate::{MmrState, StarknetHandlerError}; use starknet::macros::selector; @@ -18,8 +19,13 @@ pub struct StarknetProvider { } impl StarknetProvider { + #[instrument(level = "debug", fields(rpc_url = %rpc_url))] pub fn new(rpc_url: &str) -> Result { + debug!("Initializing StarknetProvider"); + let parsed_url = Url::parse(rpc_url)?; + info!("Parsed RPC URL successfully"); + Ok(Self { provider: Arc::new(JsonRpcClient::new(HttpTransport::new(parsed_url))), rpc_url: rpc_url.to_string(), @@ -34,36 +40,13 @@ impl StarknetProvider { self.provider.clone() } - // pub async fn verify_groth16_proof_onchain( - // &self, - // verifier_address: &str, - // calldata: &[Felt], - // ) -> Result, StarknetHandlerError> { - // tracing::info!("Verifying Groth16 proof onchain..."); - // let contract_address = felt(verifier_address)?; - - // let entry_point_selector = selector!("verify_groth16_proof_bn254"); - - // let result = self - // .provider - // .call( - // FunctionCall { - // contract_address, - // entry_point_selector, - // calldata: calldata.to_vec(), - // }, - // BlockId::Tag(BlockTag::Latest), - // ) - // .await - // .map_err(|e| StarknetHandlerError::TransactionError(e.to_string()))?; - - // Ok(result) - // } - + #[instrument(skip(self), level = "debug")] pub async fn get_latest_mmr_block( &self, l2_store_address: &Felt, ) -> Result { + debug!("Fetching latest MMR block"); + let entry_point_selector = selector!("get_latest_mmr_block"); let data = self @@ -79,15 +62,19 @@ impl StarknetProvider { .await?; let mmr_block = u64::decode(&data)?; + info!(mmr_block, "Retrieved latest MMR block"); Ok(mmr_block) } + #[instrument(skip(self), level = "debug")] pub async fn get_mmr_state( &self, l2_store_address: &Felt, batch_index: u64, ) -> Result { + debug!(batch_index, "Fetching MMR state"); + let entry_point_selector = selector!("get_mmr_state"); let data = self @@ -103,14 +90,18 @@ impl StarknetProvider { .await?; let mmr_state = MmrState::decode(&data)?; + info!("Retrieved MMR state"); Ok(mmr_state) } + #[instrument(skip(self), level = "debug")] pub async fn get_latest_relayed_block( &self, l2_store_address: &Felt, ) -> Result { + debug!("Fetching latest relayed block"); + let entry_point_selector = selector!("get_latest_blockhash_from_l1"); let data = self @@ -127,6 +118,7 @@ impl StarknetProvider { let block_number = u64::from_str_radix(data[0].to_hex_string().trim_start_matches("0x"), 16)?; + info!(block_number, "Retrieved latest relayed block"); Ok(block_number) } From 6bc1926d43fba2bc53ede630627276231b9e136e Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Thu, 5 Dec 2024 04:45:39 +0800 Subject: [PATCH 08/54] fmt --- crates/starknet-handler/Cargo.toml | 2 +- crates/starknet-handler/src/account.rs | 8 ++++---- crates/starknet-handler/src/lib.rs | 12 +++++------- crates/starknet-handler/src/provider.rs | 4 ++-- 4 files changed, 12 insertions(+), 14 deletions(-) diff --git a/crates/starknet-handler/Cargo.toml b/crates/starknet-handler/Cargo.toml index ab9ae9b..1c88c67 100644 --- a/crates/starknet-handler/Cargo.toml +++ b/crates/starknet-handler/Cargo.toml @@ -12,4 +12,4 @@ starknet-crypto = { workspace = true } tracing = { workspace = true } crypto-bigint = "0.5.5" -url = "2.5.4" +url = "2.5.4" \ No newline at end of file diff --git a/crates/starknet-handler/src/account.rs b/crates/starknet-handler/src/account.rs index a569e94..7fd0adb 100644 --- a/crates/starknet-handler/src/account.rs +++ b/crates/starknet-handler/src/account.rs @@ -25,13 +25,13 @@ impl StarknetAccount { account_address: &str, ) -> Result { debug!("Creating new Starknet account"); - + let private_key = felt(account_private_key)?; debug!("Private key converted to felt"); - + let signer = LocalWallet::from(SigningKey::from_secret_scalar(private_key)); let address = felt(account_address)?; - + debug!( chain_id = ?chain_id::SEPOLIA, encoding = ?ExecutionEncoding::New, @@ -63,7 +63,7 @@ impl StarknetAccount { ); let selector = selector!("verify_mmr_proof"); - + debug!("Executing verification transaction"); let tx = self .account diff --git a/crates/starknet-handler/src/lib.rs b/crates/starknet-handler/src/lib.rs index bc2bb50..b67be92 100644 --- a/crates/starknet-handler/src/lib.rs +++ b/crates/starknet-handler/src/lib.rs @@ -31,6 +31,8 @@ pub enum StarknetHandlerError { ParseIntError(#[from] std::num::ParseIntError), #[error("Provider error: {0}")] Provider(#[from] starknet::providers::ProviderError), + #[error("Felt conversion error: {0}")] + FeltConversion(#[from] starknet::core::types::FromStrError), } #[derive(Clone, Debug, Encode, Decode)] @@ -49,11 +51,7 @@ impl MmrState { // elements_count: u64, leaves_count: u64, ) -> Self { - debug!( - latest_block_number, - leaves_count, - "Creating new MMR state" - ); + debug!(latest_block_number, leaves_count, "Creating new MMR state"); Self { latest_block_number, root_hash, @@ -83,10 +81,10 @@ impl MmrState { pub fn u256_from_hex(hex: &str) -> Result { let _span = span!(Level::DEBUG, "hex_conversion").entered(); debug!(input_hex = hex, "Converting hex to U256"); - + let crypto_bigint = CryptoBigIntU256::from_be_hex(hex); let result = U256::from(crypto_bigint); - + debug!(result = ?result, "Hex conversion completed"); Ok(result) } diff --git a/crates/starknet-handler/src/provider.rs b/crates/starknet-handler/src/provider.rs index fb0c05c..fd6b33a 100644 --- a/crates/starknet-handler/src/provider.rs +++ b/crates/starknet-handler/src/provider.rs @@ -22,7 +22,7 @@ impl StarknetProvider { #[instrument(level = "debug", fields(rpc_url = %rpc_url))] pub fn new(rpc_url: &str) -> Result { debug!("Initializing StarknetProvider"); - + let parsed_url = Url::parse(rpc_url)?; info!("Parsed RPC URL successfully"); @@ -46,7 +46,7 @@ impl StarknetProvider { l2_store_address: &Felt, ) -> Result { debug!("Fetching latest MMR block"); - + let entry_point_selector = selector!("get_latest_mmr_block"); let data = self From f1bff1662c1cabd355d34bb9bc5405fc569e0f57 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Thu, 5 Dec 2024 04:46:49 +0800 Subject: [PATCH 09/54] feat(validator): implement onchain MMR root verification - Add verification of MMR roots against onchain state - Extract MMR root verification into separate methods for better modularity: - verify_mmr_roots: orchestrates the verification process - verify_single_mmr_root: handles individual MMR root verification - Create mapping between batch indexes and onchain roots for efficient lookup - Add skip_proof option to bypass verification during testing - Add tracing info for both verification and skip scenarios This change ensures MMR roots match their onchain counterparts, maintaining data integrity between L1 and L2. The skip_proof option facilitates testing and development workflows. --- Cargo.lock | 1 + crates/publisher/Cargo.toml | 2 +- crates/publisher/bin/verify_blocks.rs | 13 +- crates/publisher/src/api/operations.rs | 15 +- crates/publisher/src/errors.rs | 7 + crates/publisher/src/validator/validator.rs | 234 +++++++++++++++----- 6 files changed, 205 insertions(+), 67 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8733ab4..49ed847 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4139,6 +4139,7 @@ dependencies = [ "sqlx", "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "starknet-handler", + "starknet-types-core", "store", "thiserror 2.0.4", "tokio", diff --git a/crates/publisher/Cargo.toml b/crates/publisher/Cargo.toml index 5837197..8d3dcac 100644 --- a/crates/publisher/Cargo.toml +++ b/crates/publisher/Cargo.toml @@ -38,7 +38,7 @@ starknet-crypto = { workspace = true } clap = { workspace = true, features = ["derive"] } risc0-zkvm = { version = "1.1.3" } -# risc0-zkvm-platform = { version = "1.1.3" } +starknet-types-core = "0.1.7" serde = "1.0" risc0-ethereum-contracts = { git = "https://github.com/risc0/risc0-ethereum", tag = "v1.1.4" } diff --git a/crates/publisher/bin/verify_blocks.rs b/crates/publisher/bin/verify_blocks.rs index 2fe98df..2f023f9 100644 --- a/crates/publisher/bin/verify_blocks.rs +++ b/crates/publisher/bin/verify_blocks.rs @@ -1,5 +1,5 @@ use clap::Parser; -use common::initialize_logger_and_env; +use common::{get_env_var, initialize_logger_and_env}; use publisher::{db::DbConnection, prove_headers_integrity_and_inclusion}; use tokio; @@ -22,6 +22,8 @@ struct Args { #[tokio::main] async fn main() -> Result<(), Box> { initialize_logger_and_env()?; + let rpc_url = get_env_var("STARKNET_RPC_URL")?; + let l2_store_address = get_env_var("FOSSIL_STORE")?; let args = Args::parse(); @@ -32,7 +34,14 @@ async fn main() -> Result<(), Box> { .await?; // Verify blocks - match prove_headers_integrity_and_inclusion(&headers, Some(args.skip_proof)).await { + match prove_headers_integrity_and_inclusion( + &rpc_url, + &l2_store_address, + &headers, + Some(args.skip_proof), + ) + .await + { Ok(result) => { for proof in result { proof.receipt().verify(proof.image_id()?)?; diff --git a/crates/publisher/src/api/operations.rs b/crates/publisher/src/api/operations.rs index 2d8004d..0797e0c 100644 --- a/crates/publisher/src/api/operations.rs +++ b/crates/publisher/src/api/operations.rs @@ -48,6 +48,8 @@ pub async fn prove_mmr_update( } pub async fn prove_headers_integrity_and_inclusion( + rpc_url: &String, + l2_store_address: &String, headers: &Vec, skip_proof_verification: Option, ) -> Result, PublisherError> { @@ -56,12 +58,13 @@ pub async fn prove_headers_integrity_and_inclusion( let skip_proof = skip_proof_verification.unwrap_or(false); - let validator = ValidatorBuilder::new(DEFAULT_BATCH_SIZE, skip_proof) - .await - .map_err(|e| { - tracing::error!(error = %e, "Failed to create ValidatorBuilder"); - e - })?; + let validator = + ValidatorBuilder::new(rpc_url, l2_store_address, DEFAULT_BATCH_SIZE, skip_proof) + .await + .map_err(|e| { + tracing::error!(error = %e, "Failed to create ValidatorBuilder"); + e + })?; let result = validator .verify_blocks_integrity_and_inclusion(headers) diff --git a/crates/publisher/src/errors.rs b/crates/publisher/src/errors.rs index abea24d..241cf76 100644 --- a/crates/publisher/src/errors.rs +++ b/crates/publisher/src/errors.rs @@ -1,6 +1,7 @@ use common::UtilsError; use mmr::{InStoreTableError, MMRError, StoreError}; use mmr_utils::MMRUtilsError; +use starknet_types_core::felt::FromStrError; use thiserror::Error; #[derive(Error, Debug)] @@ -73,6 +74,12 @@ pub enum ValidatorError { InvalidProofsCount { expected: usize, actual: usize }, #[error("Invalid input: {0}")] InvalidInput(&'static str), + #[error("Starknet provider error: {0}")] + StarknetProvider(#[from] starknet_handler::StarknetHandlerError), + #[error("Invalid MMR root: expected {expected} but found {actual}")] + InvalidMmrRoot { expected: String, actual: String }, + #[error("Failed to parse Felt value: {0}")] + FeltParsing(#[from] FromStrError), } #[derive(Error, Debug)] diff --git a/crates/publisher/src/validator/validator.rs b/crates/publisher/src/validator/validator.rs index 0abc4d7..eb07d8c 100644 --- a/crates/publisher/src/validator/validator.rs +++ b/crates/publisher/src/validator/validator.rs @@ -5,17 +5,27 @@ use guest_types::{BlocksValidityInput, GuestProof, MMRInput}; use methods::{BLOCKS_VALIDITY_ELF, BLOCKS_VALIDITY_ID}; use mmr::{PeaksOptions, MMR}; use mmr_utils::{initialize_mmr, StoreManager}; +use starknet_crypto::Felt; +use starknet_handler::provider::StarknetProvider; use std::collections::HashMap; use store::SqlitePool; use tracing::{error, span, Level}; pub struct ValidatorBuilder { + rpc_url: String, + l2_store_address: Felt, proof_generator: ProofGenerator, batch_size: u64, + skip_proof: bool, } impl ValidatorBuilder { - pub async fn new(batch_size: u64, skip_proof: bool) -> Result { + pub async fn new( + rpc_url: &String, + l2_store_address: &String, + batch_size: u64, + skip_proof: bool, + ) -> Result { if batch_size == 0 { return Err(ValidatorError::InvalidInput( "Batch size must be greater than 0", @@ -26,8 +36,11 @@ impl ValidatorBuilder { ProofGenerator::new(BLOCKS_VALIDITY_ELF, BLOCKS_VALIDITY_ID, skip_proof)?; Ok(Self { + rpc_url: rpc_url.clone(), + l2_store_address: Felt::from_hex(l2_store_address)?, proof_generator, batch_size, + skip_proof, }) } @@ -35,17 +48,174 @@ impl ValidatorBuilder { &self, headers: &Vec, ) -> Result, ValidatorError> { + self.validate_headers(headers)?; + let _span = span!(Level::INFO, "verify_blocks_integrity_and_inclusion").entered(); + + let mmrs = self.initialize_mmrs_for_headers(headers).await?; + + if self.skip_proof { + tracing::info!("Skipping MMR root verification as skip_proof is enabled"); + } else { + tracing::info!("Verifying MMR roots against onchain state..."); + self.verify_mmr_roots(&mmrs).await?; + } + + let block_indexes = self.collect_block_indexes(headers, &mmrs).await?; + self.generate_proofs_for_batches(headers, &mmrs, &block_indexes) + .await + } + + fn validate_headers( + &self, + headers: &[eth_rlp_types::BlockHeader], + ) -> Result<(), ValidatorError> { if headers.is_empty() { return Err(ValidatorError::InvalidInput("Headers list cannot be empty")); } + Ok(()) + } - let span = span!(Level::INFO, "verify_blocks_integrity_and_inclusion"); - let _enter = span.enter(); + async fn verify_mmr_roots( + &self, + mmrs: &HashMap, + ) -> Result<(), ValidatorError> { + let batch_indexes: Vec = mmrs.keys().cloned().collect(); + let onchain_mmr_roots = self.get_onchain_mmr_root(&batch_indexes).await?; - let mmrs = self.initialize_mmrs_for_headers(headers).await?; - let block_indexes = self.collect_block_indexes(headers, &mmrs).await?; - self.generate_proofs_for_batches(headers, &mmrs, &block_indexes) + let onchain_roots_map: HashMap = batch_indexes + .iter() + .map(|&index| (index, &onchain_mmr_roots[index as usize])) + .collect(); + + for (batch_index, (_, mmr, _)) in mmrs.iter() { + self.verify_single_mmr_root(batch_index, mmr, &onchain_roots_map) + .await?; + } + + Ok(()) + } + + async fn verify_single_mmr_root( + &self, + batch_index: &u64, + mmr: &MMR, + onchain_roots_map: &HashMap, + ) -> Result<(), ValidatorError> { + let mmr_elements_count = mmr.elements_count.get().await?; + let bag = mmr.bag_the_peaks(Some(mmr_elements_count)).await?; + let mmr_root = mmr.calculate_root_hash(&bag, mmr_elements_count)?; + + let onchain_root = onchain_roots_map + .get(batch_index) + .ok_or_else(|| ValidatorError::InvalidInput("Missing onchain MMR root for batch"))?; + + if mmr_root != **onchain_root { + return Err(ValidatorError::InvalidMmrRoot { + expected: mmr_root, + actual: (*onchain_root).clone(), + }); + } + + Ok(()) + } + + async fn generate_proofs_for_batches( + &self, + headers: &[eth_rlp_types::BlockHeader], + mmrs: &HashMap, + block_indexes: &[(usize, u64)], + ) -> Result, ValidatorError> { + let _span = span!(Level::INFO, "generate_proofs_for_batches").entered(); + let mut proofs = Vec::new(); + + for (batch_index, (_, mmr, _)) in mmrs { + let proof = self + .generate_batch_proof(headers, mmr, block_indexes, *batch_index) + .await?; + proofs.push(proof); + } + + Ok(proofs) + } + + async fn generate_batch_proof( + &self, + headers: &[eth_rlp_types::BlockHeader], + mmr: &MMR, + block_indexes: &[(usize, u64)], + batch_index: u64, + ) -> Result { + let batch_block_indexes = self.get_batch_block_indexes(block_indexes, batch_index); + let batch_headers = self.get_batch_headers(headers, batch_index); + + let batch_proofs = self + .get_batch_proofs(mmr, &batch_block_indexes, batch_index) + .await?; + let guest_proofs = self.convert_to_guest_proofs(batch_proofs); + + self.validate_proofs_count(&batch_headers, &guest_proofs, batch_index)?; + + let mmr_input = self.prepare_mmr_input(mmr).await?; + let blocks_validity_input = + BlocksValidityInput::new(batch_headers, mmr_input, guest_proofs); + + Ok(self + .proof_generator + .generate_stark_proof(blocks_validity_input) + .await?) + } + + async fn get_batch_proofs( + &self, + mmr: &MMR, + batch_block_indexes: &Vec, + batch_index: u64, + ) -> Result, ValidatorError> { + mmr.get_proofs(batch_block_indexes, None) .await + .map_err(|e| { + error!(error = %e, "Failed to get proofs for batch index: {}", batch_index); + ValidatorError::Store(store::StoreError::GetError) + }) + } + + fn validate_proofs_count( + &self, + batch_headers: &[eth_rlp_types::BlockHeader], + guest_proofs: &[GuestProof], + batch_index: u64, + ) -> Result<(), ValidatorError> { + if batch_headers.len() != guest_proofs.len() { + error!( + "Proofs count mismatch for batch index: {}. Expected: {}, Actual: {}", + batch_index, + batch_headers.len(), + guest_proofs.len() + ); + return Err(ValidatorError::InvalidProofsCount { + expected: batch_headers.len(), + actual: guest_proofs.len(), + }); + } + Ok(()) + } + + async fn get_onchain_mmr_root( + &self, + batch_indexs: &Vec, + ) -> Result, ValidatorError> { + let provider = StarknetProvider::new(&self.rpc_url)?; + + let mut mmr_roots = Vec::new(); + + for batch_index in batch_indexs { + let mmr_state = provider + .get_mmr_state(&self.l2_store_address, *batch_index) + .await?; + mmr_roots.push(mmr_state.root_hash().to_string()); + } + + Ok(mmr_roots) } async fn initialize_mmrs_for_headers( @@ -115,58 +285,6 @@ impl ValidatorBuilder { Ok(block_indexes) } - async fn generate_proofs_for_batches( - &self, - headers: &[eth_rlp_types::BlockHeader], - mmrs: &HashMap, - block_indexes: &[(usize, u64)], - ) -> Result, ValidatorError> { - let span = span!(Level::INFO, "generate_proofs_for_batches"); - let _enter = span.enter(); - - let mut proofs = Vec::new(); - - for (batch_index, (_, mmr, _)) in mmrs { - let batch_block_indexes = self.get_batch_block_indexes(block_indexes, *batch_index); - let batch_headers = self.get_batch_headers(headers, *batch_index); - - let batch_proofs = mmr - .get_proofs(&batch_block_indexes, None) - .await - .map_err(|e| { - error!(error = %e, "Failed to get proofs for batch index: {}", batch_index); - ValidatorError::Store(store::StoreError::GetError) - })?; - let guest_proofs = self.convert_to_guest_proofs(batch_proofs); - - if batch_headers.len() != guest_proofs.len() { - error!( - "Proofs count mismatch for batch index: {}. Expected: {}, Actual: {}", - batch_index, - batch_headers.len(), - guest_proofs.len() - ); - return Err(ValidatorError::InvalidProofsCount { - expected: batch_headers.len(), - actual: guest_proofs.len(), - }); - } - - let mmr_input = self.prepare_mmr_input(mmr).await?; - let blocks_validity_input = - BlocksValidityInput::new(batch_headers, mmr_input, guest_proofs); - - let proof = self - .proof_generator - .generate_stark_proof(blocks_validity_input) - .await?; - - proofs.push(proof); - } - - Ok(proofs) - } - fn get_batch_block_indexes( &self, block_indexes: &[(usize, u64)], From 800ce02b53660872a6e4c660caf2d3841e671aba Mon Sep 17 00:00:00 2001 From: ametel01 Date: Thu, 5 Dec 2024 05:45:26 +0800 Subject: [PATCH 10/54] fix(publisher): correct InvalidBlockRange error construction - Update error construction to use struct variant syntax with named fields - Align with AccumulatorError enum definition where InvalidBlockRange expects {start_block, end_block} fields - Fix compiler error in get_block_headers_by_block_range method --- crates/publisher/src/db/db_access.rs | 6 ++++++ crates/publisher/src/errors.rs | 2 ++ 2 files changed, 8 insertions(+) diff --git a/crates/publisher/src/db/db_access.rs b/crates/publisher/src/db/db_access.rs index a6413bb..d084fef 100644 --- a/crates/publisher/src/db/db_access.rs +++ b/crates/publisher/src/db/db_access.rs @@ -37,6 +37,12 @@ impl DbConnection { start_block: u64, end_block: u64, ) -> Result, AccumulatorError> { + if start_block > end_block { + return Err(AccumulatorError::InvalidBlockRange { + start_block, + end_block, + }); + } let temp_headers = sqlx::query_as!( TempBlockHeader, r#" diff --git a/crates/publisher/src/errors.rs b/crates/publisher/src/errors.rs index 241cf76..722267b 100644 --- a/crates/publisher/src/errors.rs +++ b/crates/publisher/src/errors.rs @@ -54,6 +54,8 @@ pub enum AccumulatorError { InvalidInput(&'static str), #[error("Blockchain operation failed: {0}")] BlockchainError(String), + #[error("Invalid block range: start block {start_block} is greater than end block {end_block}")] + InvalidBlockRange { start_block: u64, end_block: u64 }, } #[derive(thiserror::Error, Debug)] From c8e80bdf9e308c2e478c313d98ab37afc8441d3d Mon Sep 17 00:00:00 2001 From: ametel01 Date: Thu, 5 Dec 2024 07:15:47 +0800 Subject: [PATCH 11/54] fix: improve block integrity verification - Fix hex conversion handling in starknet-handler for U256 values - Update dependencies in common and ethereum crates - Enhance proof generator validation checks - Improve accumulator state handling - Update messaging configuration --- Cargo.lock | 3 +- config/anvil.messaging.json | 2 +- crates/client/src/client.rs | 7 ++-- crates/common/Cargo.toml | 1 - crates/common/src/lib.rs | 29 ++++++++++---- crates/ethereum/Cargo.toml | 2 + crates/ethereum/src/lib.rs | 40 +++++++++++++++----- crates/publisher/Cargo.toml | 1 + crates/publisher/src/core/accumulator.rs | 11 ------ crates/publisher/src/core/proof_generator.rs | 2 +- crates/publisher/src/errors.rs | 7 +++- crates/publisher/src/validator/validator.rs | 24 +++++++----- crates/starknet-handler/src/lib.rs | 5 ++- crates/starknet-handler/src/provider.rs | 2 +- 14 files changed, 89 insertions(+), 47 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 49ed847..02d6e24 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1724,7 +1724,6 @@ dependencies = [ "ruint", "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "thiserror 2.0.4", - "tracing", "tracing-subscriber 0.3.19", ] @@ -2247,6 +2246,7 @@ version = "0.1.0" dependencies = [ "alloy 0.6.4", "common", + "tokio", ] [[package]] @@ -4137,6 +4137,7 @@ dependencies = [ "risc0-zkvm", "serde", "sqlx", + "starknet", "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "starknet-handler", "starknet-types-core", diff --git a/config/anvil.messaging.json b/config/anvil.messaging.json index 0dd98d5..d7ebe9d 100644 --- a/config/anvil.messaging.json +++ b/config/anvil.messaging.json @@ -5,5 +5,5 @@ "sender_address": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", "private_key": "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", "interval": 2, - "from_block": 21320622 + "from_block": 21331911 } \ No newline at end of file diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 166f5a4..a9d4b11 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -7,7 +7,7 @@ use starknet::{ }; use starknet_handler::provider::StarknetProvider; use tokio::time::{self, Duration}; -use tracing::{error, info, instrument}; +use tracing::{error, info, instrument, warn}; const BATCH_SIZE: u64 = 1024; @@ -191,9 +191,10 @@ impl LightClient { latest_relayed_block: u64, ) -> Result<(), LightClientError> { if latest_mmr_block >= latest_relayed_block { - error!( + warn!( latest_mmr_block, - latest_relayed_block, "Latest MMR block is greater than the latest relayed block" + latest_relayed_block, + "Latest MMR block is greater than the latest relayed block, skipping proof verification" ); return Err(LightClientError::StateError( latest_mmr_block, diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 9ad078b..a1e0513 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -7,7 +7,6 @@ edition = "2021" thiserror = { workspace = true } dotenv = { workspace = true } starknet-crypto = { workspace = true } -tracing = { workspace = true } tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } alloy-contract = { version = "0.6.4" } diff --git a/crates/common/src/lib.rs b/crates/common/src/lib.rs index 6da1289..b2eb909 100644 --- a/crates/common/src/lib.rs +++ b/crates/common/src/lib.rs @@ -27,6 +27,8 @@ pub enum UtilsError { FeltError(String), #[error("IO error: {0}")] IoError(#[from] std::io::Error), + #[error("Retry exhausted after {0} attempts: {1}")] + RetryExhausted(u32, String), } /// Retrieves an environment variable or returns an error if not set. @@ -50,19 +52,32 @@ pub fn initialize_logger_and_env() -> Result<(), UtilsError> { dotenv::dotenv().ok(); let filter = tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| { - let directive = match "sqlx=off".parse() { - Ok(d) => d, - Err(e) => { - tracing::warn!("Failed to parse sqlx filter directive: {}", e); - Default::default() + // Define default filter directives - adjust these based on your needs + let directives = [ + "sqlx=off", + "info", + "handle_events=warn", // Reduce verbosity of handle_events + "publisher=info", // Keep publisher at info level + ]; + + let mut filter = tracing_subscriber::EnvFilter::new(""); + for directive in directives { + if let Ok(d) = directive.parse() { + filter = filter.add_directive(d); } - }; - tracing_subscriber::EnvFilter::new("info").add_directive(directive) + } + filter }); tracing_subscriber::fmt() .with_env_filter(filter) + .with_target(false) // Removes module path from output + .with_thread_ids(false) // Removes thread IDs + .with_thread_names(false) // Removes thread names .with_file(true) + .with_line_number(true) + .with_span_events(tracing_subscriber::fmt::format::FmtSpan::NONE) // Reduces span noise + .compact() // Uses more compact format .init(); Ok(()) } diff --git a/crates/ethereum/Cargo.toml b/crates/ethereum/Cargo.toml index 3186e3b..b0b123c 100644 --- a/crates/ethereum/Cargo.toml +++ b/crates/ethereum/Cargo.toml @@ -6,5 +6,7 @@ edition = "2021" [dependencies] common = { path = "../common" } +tokio = { workspace = true } + alloy = { version = "0.6.4", features = ["full", "node-bindings"] } diff --git a/crates/ethereum/src/lib.rs b/crates/ethereum/src/lib.rs index 7d6172b..0361a03 100644 --- a/crates/ethereum/src/lib.rs +++ b/crates/ethereum/src/lib.rs @@ -2,6 +2,7 @@ use alloy::{providers::ProviderBuilder, sol}; use common::{get_env_var, UtilsError}; +use tokio::time::{sleep, Duration}; // Codegen from embedded Solidity code and precompiled bytecode. sol! { @@ -21,17 +22,38 @@ sol! { #[allow(dead_code)] pub async fn get_finalized_block_hash() -> Result<(u64, String), UtilsError> { let rpc_url = get_env_var("ETH_RPC_URL")?; - let provider = ProviderBuilder::new() - .with_recommended_fillers() - .on_anvil_with_wallet_and_config(|anvil| anvil.fork(rpc_url)); + const MAX_RETRIES: u32 = 3; + const RETRY_DELAY: Duration = Duration::from_secs(1); - let contract = BlockHashFetcher::deploy(&provider).await?; + let mut attempts = 0; + loop { + attempts += 1; + let result: Result<(u64, String), UtilsError> = async { + let provider = ProviderBuilder::new() + .with_recommended_fillers() + .on_anvil_with_wallet_and_config(|anvil| anvil.fork(rpc_url.clone())); - let builder = contract.getBlockHash(); - let result = builder.call().await?; + let contract = BlockHashFetcher::deploy(&provider).await?; + let result = contract.getBlockHash().call().await?; - let block_number: u64 = result.blockNumber.try_into()?; - let block_hash = result.blockHash.to_string(); + let block_number: u64 = result.blockNumber.try_into()?; + let block_hash = result.blockHash.to_string(); - Ok((block_number, block_hash)) + Ok((block_number, block_hash)) + } + .await; + + match result { + Ok(value) => return Ok(value), + Err(_) => { + if attempts >= MAX_RETRIES { + return Err(UtilsError::RetryExhausted( + MAX_RETRIES, + "get_finalized_block_hash".to_string(), + )); + } + sleep(RETRY_DELAY).await; + } + } + } } diff --git a/crates/publisher/Cargo.toml b/crates/publisher/Cargo.toml index 8d3dcac..91fa525 100644 --- a/crates/publisher/Cargo.toml +++ b/crates/publisher/Cargo.toml @@ -35,6 +35,7 @@ tokio = { workspace = true, features = ["rt-multi-thread"] } sqlx = { workspace = true } dotenv = { workspace = true } starknet-crypto = { workspace = true } +starknet = { workspace = true } clap = { workspace = true, features = ["derive"] } risc0-zkvm = { version = "1.1.3" } diff --git a/crates/publisher/src/core/accumulator.rs b/crates/publisher/src/core/accumulator.rs index 235c7ef..dc26611 100644 --- a/crates/publisher/src/core/accumulator.rs +++ b/crates/publisher/src/core/accumulator.rs @@ -191,17 +191,6 @@ impl<'a> AccumulatorBuilder<'a> { )); } - let (finalized_block_number, _) = get_finalized_block_hash().await.map_err(|e| { - error!(error = %e, "Failed to get finalized block hash"); - AccumulatorError::BlockchainError(format!("Failed to get finalized block: {}", e)) - })?; - - if end_block > finalized_block_number { - return Err(AccumulatorError::InvalidInput( - "End block cannot be greater than finalized block", - )); - } - let mut current_end = end_block; let mut batch_results = Vec::new(); diff --git a/crates/publisher/src/core/proof_generator.rs b/crates/publisher/src/core/proof_generator.rs index 7dcdc89..5032f16 100644 --- a/crates/publisher/src/core/proof_generator.rs +++ b/crates/publisher/src/core/proof_generator.rs @@ -122,7 +122,7 @@ where return Err(ProofGeneratorError::InvalidInput("Input cannot be empty")); } - info!("Generating Groth16 proof for final batch"); + info!("Generating Groth16 proof..."); debug!("Input size: {} bytes", input_size); let method_elf = self.method_elf; diff --git a/crates/publisher/src/errors.rs b/crates/publisher/src/errors.rs index 722267b..99da855 100644 --- a/crates/publisher/src/errors.rs +++ b/crates/publisher/src/errors.rs @@ -1,6 +1,7 @@ use common::UtilsError; use mmr::{InStoreTableError, MMRError, StoreError}; use mmr_utils::MMRUtilsError; +use starknet::core::types::U256; use starknet_types_core::felt::FromStrError; use thiserror::Error; @@ -54,7 +55,9 @@ pub enum AccumulatorError { InvalidInput(&'static str), #[error("Blockchain operation failed: {0}")] BlockchainError(String), - #[error("Invalid block range: start block {start_block} is greater than end block {end_block}")] + #[error( + "Invalid block range: start block {start_block} is greater than end block {end_block}" + )] InvalidBlockRange { start_block: u64, end_block: u64 }, } @@ -79,7 +82,7 @@ pub enum ValidatorError { #[error("Starknet provider error: {0}")] StarknetProvider(#[from] starknet_handler::StarknetHandlerError), #[error("Invalid MMR root: expected {expected} but found {actual}")] - InvalidMmrRoot { expected: String, actual: String }, + InvalidMmrRoot { expected: U256, actual: U256 }, #[error("Failed to parse Felt value: {0}")] FeltParsing(#[from] FromStrError), } diff --git a/crates/publisher/src/validator/validator.rs b/crates/publisher/src/validator/validator.rs index eb07d8c..7b53013 100644 --- a/crates/publisher/src/validator/validator.rs +++ b/crates/publisher/src/validator/validator.rs @@ -5,8 +5,10 @@ use guest_types::{BlocksValidityInput, GuestProof, MMRInput}; use methods::{BLOCKS_VALIDITY_ELF, BLOCKS_VALIDITY_ID}; use mmr::{PeaksOptions, MMR}; use mmr_utils::{initialize_mmr, StoreManager}; +use starknet::core::types::U256; use starknet_crypto::Felt; use starknet_handler::provider::StarknetProvider; +use starknet_handler::u256_from_hex; use std::collections::HashMap; use store::SqlitePool; use tracing::{error, span, Level}; @@ -82,9 +84,10 @@ impl ValidatorBuilder { let batch_indexes: Vec = mmrs.keys().cloned().collect(); let onchain_mmr_roots = self.get_onchain_mmr_root(&batch_indexes).await?; - let onchain_roots_map: HashMap = batch_indexes + let onchain_roots_map: HashMap = batch_indexes .iter() - .map(|&index| (index, &onchain_mmr_roots[index as usize])) + .zip(onchain_mmr_roots.iter()) + .map(|(&index, root)| (index, root.clone())) .collect(); for (batch_index, (_, mmr, _)) in mmrs.iter() { @@ -99,20 +102,23 @@ impl ValidatorBuilder { &self, batch_index: &u64, mmr: &MMR, - onchain_roots_map: &HashMap, + onchain_roots_map: &HashMap, ) -> Result<(), ValidatorError> { let mmr_elements_count = mmr.elements_count.get().await?; let bag = mmr.bag_the_peaks(Some(mmr_elements_count)).await?; - let mmr_root = mmr.calculate_root_hash(&bag, mmr_elements_count)?; + let mmr_root = u256_from_hex( + &mmr.calculate_root_hash(&bag, mmr_elements_count)? + .to_string(), + )?; let onchain_root = onchain_roots_map .get(batch_index) .ok_or_else(|| ValidatorError::InvalidInput("Missing onchain MMR root for batch"))?; - if mmr_root != **onchain_root { + if onchain_root.clone() != mmr_root { return Err(ValidatorError::InvalidMmrRoot { - expected: mmr_root, - actual: (*onchain_root).clone(), + expected: onchain_root.clone(), + actual: mmr_root, }); } @@ -203,7 +209,7 @@ impl ValidatorBuilder { async fn get_onchain_mmr_root( &self, batch_indexs: &Vec, - ) -> Result, ValidatorError> { + ) -> Result, ValidatorError> { let provider = StarknetProvider::new(&self.rpc_url)?; let mut mmr_roots = Vec::new(); @@ -212,7 +218,7 @@ impl ValidatorBuilder { let mmr_state = provider .get_mmr_state(&self.l2_store_address, *batch_index) .await?; - mmr_roots.push(mmr_state.root_hash().to_string()); + mmr_roots.push(mmr_state.root_hash()); } Ok(mmr_roots) diff --git a/crates/starknet-handler/src/lib.rs b/crates/starknet-handler/src/lib.rs index b67be92..ca5585c 100644 --- a/crates/starknet-handler/src/lib.rs +++ b/crates/starknet-handler/src/lib.rs @@ -82,7 +82,10 @@ pub fn u256_from_hex(hex: &str) -> Result { let _span = span!(Level::DEBUG, "hex_conversion").entered(); debug!(input_hex = hex, "Converting hex to U256"); - let crypto_bigint = CryptoBigIntU256::from_be_hex(hex); + // Trim "0x" prefix if present + let hex_clean = hex.strip_prefix("0x").unwrap_or(hex); + + let crypto_bigint = CryptoBigIntU256::from_be_hex(hex_clean); let result = U256::from(crypto_bigint); debug!(result = ?result, "Hex conversion completed"); diff --git a/crates/starknet-handler/src/provider.rs b/crates/starknet-handler/src/provider.rs index fd6b33a..eb7a730 100644 --- a/crates/starknet-handler/src/provider.rs +++ b/crates/starknet-handler/src/provider.rs @@ -24,7 +24,7 @@ impl StarknetProvider { debug!("Initializing StarknetProvider"); let parsed_url = Url::parse(rpc_url)?; - info!("Parsed RPC URL successfully"); + debug!("Parsed RPC URL successfully"); Ok(Self { provider: Arc::new(JsonRpcClient::new(HttpTransport::new(parsed_url))), From 6843224359ace830570115ff6dee181d705bba82 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Thu, 5 Dec 2024 10:21:44 +0800 Subject: [PATCH 12/54] refactor: reduce verbosity in light client tracing logs - Simplify log messages to focus on essential information - Remove redundant context from log statements - Maintain critical state information in error scenarios --- config/anvil.messaging.json | 2 +- crates/client/src/client.rs | 4 ++- crates/publisher/src/api/operations.rs | 9 +----- crates/publisher/src/core/accumulator.rs | 32 +++---------------- crates/publisher/src/core/batch_processor.rs | 10 +++--- .../publisher/src/core/mmr_state_manager.rs | 7 ++-- crates/publisher/src/core/proof_generator.rs | 10 ++---- crates/publisher/src/validator/validator.rs | 11 +------ crates/starknet-handler/src/account.rs | 4 +-- crates/starknet-handler/src/lib.rs | 6 +--- 10 files changed, 22 insertions(+), 73 deletions(-) diff --git a/config/anvil.messaging.json b/config/anvil.messaging.json index d7ebe9d..8b9321f 100644 --- a/config/anvil.messaging.json +++ b/config/anvil.messaging.json @@ -5,5 +5,5 @@ "sender_address": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", "private_key": "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", "interval": 2, - "from_block": 21331911 + "from_block": 21332916 } \ No newline at end of file diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index a9d4b11..09f55c8 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -173,7 +173,9 @@ impl LightClient { info!( latest_relayed_block, - latest_mmr_block, "State fetched from Starknet" + latest_mmr_block, + num_blocks = latest_relayed_block - latest_mmr_block, + "State fetched from Starknet" ); // Update MMR and verify proofs diff --git a/crates/publisher/src/api/operations.rs b/crates/publisher/src/api/operations.rs index 0797e0c..26b1b12 100644 --- a/crates/publisher/src/api/operations.rs +++ b/crates/publisher/src/api/operations.rs @@ -1,7 +1,6 @@ use crate::{ core::AccumulatorBuilder, errors::PublisherError, utils::Stark, validator::ValidatorBuilder, }; -use tracing::{span, Level}; const DEFAULT_BATCH_SIZE: u64 = 1024; @@ -15,9 +14,6 @@ pub async fn prove_mmr_update( end_block: u64, skip_proof_verification: bool, ) -> Result<(), PublisherError> { - let span = span!(Level::INFO, "prove_mmr_update", start_block, end_block); - let _enter = span.enter(); - let mut builder = AccumulatorBuilder::new( rpc_url, verifier_address, @@ -42,7 +38,7 @@ pub async fn prove_mmr_update( e })?; - tracing::info!("Successfully generated proof for block range"); + tracing::debug!("Successfully generated proof for block range"); Ok(()) } @@ -53,9 +49,6 @@ pub async fn prove_headers_integrity_and_inclusion( headers: &Vec, skip_proof_verification: Option, ) -> Result, PublisherError> { - let span = span!(Level::INFO, "prove_headers_integrity_and_inclusion"); - let _enter = span.enter(); - let skip_proof = skip_proof_verification.unwrap_or(false); let validator = diff --git a/crates/publisher/src/core/accumulator.rs b/crates/publisher/src/core/accumulator.rs index dc26611..cbbe5ab 100644 --- a/crates/publisher/src/core/accumulator.rs +++ b/crates/publisher/src/core/accumulator.rs @@ -5,7 +5,7 @@ use ethereum::get_finalized_block_hash; use methods::{MMR_APPEND_ELF, MMR_APPEND_ID}; use starknet_crypto::Felt; use starknet_handler::{account::StarknetAccount, provider::StarknetProvider}; -use tracing::{debug, error, info, span, warn, Level}; +use tracing::{debug, error, info, warn}; pub struct AccumulatorBuilder<'a> { rpc_url: &'a String, @@ -26,14 +26,6 @@ impl<'a> AccumulatorBuilder<'a> { batch_size: u64, skip_proof_verification: bool, ) -> Result { - let span = span!( - Level::INFO, - "accumulator_builder_new", - batch_size, - skip_proof_verification - ); - let _enter = span.enter(); - info!("Initializing AccumulatorBuilder"); let proof_generator = ProofGenerator::new(MMR_APPEND_ELF, MMR_APPEND_ID, skip_proof_verification)?; @@ -82,9 +74,6 @@ impl<'a> AccumulatorBuilder<'a> { &mut self, num_batches: u64, ) -> Result<(), AccumulatorError> { - let span = span!(Level::INFO, "build_with_num_batches", num_batches); - let _enter = span.enter(); - if num_batches == 0 { return Err(AccumulatorError::InvalidInput( "Number of batches must be greater than 0", @@ -177,14 +166,6 @@ impl<'a> AccumulatorBuilder<'a> { start_block: u64, end_block: u64, ) -> Result<(), AccumulatorError> { - let span = span!( - Level::INFO, - "update_mmr_with_new_headers", - start_block, - end_block - ); - let _enter = span.enter(); - if end_block < start_block { return Err(AccumulatorError::InvalidInput( "End block cannot be less than start block", @@ -195,8 +176,8 @@ impl<'a> AccumulatorBuilder<'a> { let mut batch_results = Vec::new(); info!( - start_block, - end_block, "Starting MMR update with new headers" + total_blocks = end_block - start_block, + "Starting MMR update with new headers" ); while current_end >= start_block { @@ -226,7 +207,7 @@ impl<'a> AccumulatorBuilder<'a> { { self.handle_batch_result(&result).await?; batch_results.push((result.proof().calldata(), result.new_mmr_state())); - info!( + debug!( batch_start = batch_range.start, batch_end = batch_range.end, "Batch processed successfully" @@ -240,7 +221,7 @@ impl<'a> AccumulatorBuilder<'a> { error!(start_block, end_block, "No batch results generated"); Err(AccumulatorError::InvalidStateTransition) } else { - info!( + debug!( total_batches = batch_results.len(), "MMR update completed successfully" ); @@ -259,9 +240,6 @@ impl<'a> AccumulatorBuilder<'a> { } async fn verify_proof(&self, calldata: Vec) -> Result<(), AccumulatorError> { - let span = span!(Level::DEBUG, "verify_proof", calldata_len = calldata.len()); - let _enter = span.enter(); - debug!("Initializing Starknet provider"); let starknet_provider = StarknetProvider::new(&self.rpc_url).map_err(|e| { error!(error = %e, "Failed to initialize Starknet provider"); diff --git a/crates/publisher/src/core/batch_processor.rs b/crates/publisher/src/core/batch_processor.rs index 8c79634..d42a43b 100644 --- a/crates/publisher/src/core/batch_processor.rs +++ b/crates/publisher/src/core/batch_processor.rs @@ -6,7 +6,7 @@ use common::get_or_create_db_path; use guest_types::{CombinedInput, GuestOutput, MMRInput}; use mmr::PeaksOptions; use mmr_utils::initialize_mmr; -use tracing::{debug, error, info, span, Level}; +use tracing::{debug, error, info}; pub struct BatchProcessor { batch_size: u64, @@ -52,9 +52,6 @@ impl BatchProcessor { )); } - let span = span!(Level::INFO, "process_batch", start_block, end_block); - let _enter = span.enter(); - let batch_index = start_block / self.batch_size; let (batch_start, batch_end) = self.calculate_batch_bounds(batch_index)?; @@ -67,8 +64,9 @@ impl BatchProcessor { let adjusted_end_block = std::cmp::min(end_block, batch_end); info!( - "Processing batch {} (blocks {} to {})", - batch_index, start_block, adjusted_end_block + batch_index, + num_blocks = adjusted_end_block - start_block, + "Processing batch" ); let batch_file_name = diff --git a/crates/publisher/src/core/mmr_state_manager.rs b/crates/publisher/src/core/mmr_state_manager.rs index a290fe8..0336214 100644 --- a/crates/publisher/src/core/mmr_state_manager.rs +++ b/crates/publisher/src/core/mmr_state_manager.rs @@ -5,7 +5,7 @@ use mmr::MMR; use mmr_utils::StoreManager; use starknet_handler::{u256_from_hex, MmrState}; use store::SqlitePool; -use tracing::{debug, error, info, span, Level}; +use tracing::{debug, error, info}; pub struct MMRStateManager; @@ -24,10 +24,7 @@ impl MMRStateManager { )); } - let span = span!(Level::INFO, "update_state", latest_block_number); - let _enter = span.enter(); - - info!("Updating MMR state"); + info!("Updating MMR state..."); Self::append_headers(store_manager, mmr, pool, headers) .await diff --git a/crates/publisher/src/core/proof_generator.rs b/crates/publisher/src/core/proof_generator.rs index 5032f16..9e91d5f 100644 --- a/crates/publisher/src/core/proof_generator.rs +++ b/crates/publisher/src/core/proof_generator.rs @@ -9,7 +9,7 @@ use risc0_zkvm::{compute_image_id, default_prover, ExecutorEnv, ProverOpts, Veri use serde::Deserialize; use starknet_crypto::Felt; use tokio::task; -use tracing::{debug, error, info, span, Level}; +use tracing::{debug, error, info}; use crate::{ errors::ProofGeneratorError, @@ -54,9 +54,6 @@ where /// Generate a standard Stark proof for intermediate batches pub async fn generate_stark_proof(&self, input: T) -> Result { - let span = span!(Level::INFO, "generate_stark_proof"); - let _enter = span.enter(); - let input_size = std::mem::size_of_val(&input); if input_size == 0 { return Err(ProofGeneratorError::InvalidInput("Input cannot be empty")); @@ -114,9 +111,6 @@ where /// Generate a Groth16 proof for the final batch pub async fn generate_groth16_proof(&self, input: T) -> Result { - let span = span!(Level::INFO, "generate_groth16_proof"); - let _enter = span.enter(); - let input_size = std::mem::size_of_val(&input); if input_size == 0 { return Err(ProofGeneratorError::InvalidInput("Input cannot be empty")); @@ -194,7 +188,7 @@ where vec![Felt::ZERO] }; - info!("Successfully generated Groth16 proof"); + info!("Successfully generated Groth16 proof and calldata."); Ok(Groth16::new(receipt, calldata)) }) .await? diff --git a/crates/publisher/src/validator/validator.rs b/crates/publisher/src/validator/validator.rs index 7b53013..dbd2c6d 100644 --- a/crates/publisher/src/validator/validator.rs +++ b/crates/publisher/src/validator/validator.rs @@ -11,7 +11,7 @@ use starknet_handler::provider::StarknetProvider; use starknet_handler::u256_from_hex; use std::collections::HashMap; use store::SqlitePool; -use tracing::{error, span, Level}; +use tracing::error; pub struct ValidatorBuilder { rpc_url: String, @@ -51,7 +51,6 @@ impl ValidatorBuilder { headers: &Vec, ) -> Result, ValidatorError> { self.validate_headers(headers)?; - let _span = span!(Level::INFO, "verify_blocks_integrity_and_inclusion").entered(); let mmrs = self.initialize_mmrs_for_headers(headers).await?; @@ -131,9 +130,7 @@ impl ValidatorBuilder { mmrs: &HashMap, block_indexes: &[(usize, u64)], ) -> Result, ValidatorError> { - let _span = span!(Level::INFO, "generate_proofs_for_batches").entered(); let mut proofs = Vec::new(); - for (batch_index, (_, mmr, _)) in mmrs { let proof = self .generate_batch_proof(headers, mmr, block_indexes, *batch_index) @@ -228,9 +225,6 @@ impl ValidatorBuilder { &self, headers: &[eth_rlp_types::BlockHeader], ) -> Result, ValidatorError> { - let span = span!(Level::INFO, "initialize_mmrs_for_headers"); - let _enter = span.enter(); - let mut mmrs = HashMap::new(); for header in headers { @@ -262,9 +256,6 @@ impl ValidatorBuilder { headers: &[eth_rlp_types::BlockHeader], mmrs: &HashMap, ) -> Result, ValidatorError> { - let span = span!(Level::INFO, "collect_block_indexes"); - let _enter = span.enter(); - let mut block_indexes = Vec::new(); for header in headers { diff --git a/crates/starknet-handler/src/account.rs b/crates/starknet-handler/src/account.rs index 7fd0adb..3ad5fe2 100644 --- a/crates/starknet-handler/src/account.rs +++ b/crates/starknet-handler/src/account.rs @@ -46,7 +46,7 @@ impl StarknetAccount { ExecutionEncoding::New, ); - info!("Starknet account successfully created"); + debug!("Starknet account successfully created"); Ok(Self { account }) } @@ -77,7 +77,7 @@ impl StarknetAccount { info!( tx_hash = ?tx.transaction_hash, - "MMR proof verification transaction sent" + "MMR proof onchain verification successful." ); Ok(tx.transaction_hash) } diff --git a/crates/starknet-handler/src/lib.rs b/crates/starknet-handler/src/lib.rs index ca5585c..ab49984 100644 --- a/crates/starknet-handler/src/lib.rs +++ b/crates/starknet-handler/src/lib.rs @@ -9,7 +9,7 @@ use starknet::core::codec::{Decode, Encode}; use starknet::core::types::U256; use starknet::signers::local_wallet::SignError as LocalWalletSignError; use thiserror::Error; -use tracing::{debug, instrument, span, Level}; +use tracing::{debug, instrument}; #[derive(Error, Debug)] pub enum StarknetHandlerError { @@ -79,10 +79,6 @@ impl MmrState { #[instrument(level = "debug")] pub fn u256_from_hex(hex: &str) -> Result { - let _span = span!(Level::DEBUG, "hex_conversion").entered(); - debug!(input_hex = hex, "Converting hex to U256"); - - // Trim "0x" prefix if present let hex_clean = hex.strip_prefix("0x").unwrap_or(hex); let crypto_bigint = CryptoBigIntU256::from_be_hex(hex_clean); From ac573382ac86377b21c68ac825b32decbf562fab Mon Sep 17 00:00:00 2001 From: ametel01 Date: Thu, 5 Dec 2024 10:29:56 +0800 Subject: [PATCH 13/54] refactor: remove unused docker images for anvil, ethereum, katana, and starknet --- docker/images/anvil/Dockerfile | 10 ---- docker/images/anvil/start-anvil.sh | 16 ------ docker/images/ethereum/Dockerfile | 39 -------------- docker/images/katana/Dockerfile | 35 ------------ docker/images/starknet/Dockerfile | 87 ------------------------------ 5 files changed, 187 deletions(-) delete mode 100644 docker/images/anvil/Dockerfile delete mode 100755 docker/images/anvil/start-anvil.sh delete mode 100644 docker/images/ethereum/Dockerfile delete mode 100644 docker/images/katana/Dockerfile delete mode 100644 docker/images/starknet/Dockerfile diff --git a/docker/images/anvil/Dockerfile b/docker/images/anvil/Dockerfile deleted file mode 100644 index d6efcac..0000000 --- a/docker/images/anvil/Dockerfile +++ /dev/null @@ -1,10 +0,0 @@ -FROM ghcr.io/foundry-rs/foundry:latest - -RUN apk add --no-cache curl bash - -COPY docker/images/anvil/start-anvil.sh /usr/local/bin/ -RUN chmod +x /usr/local/bin/start-anvil.sh - -EXPOSE 8545 - -ENTRYPOINT ["/usr/local/bin/start-anvil.sh"] \ No newline at end of file diff --git a/docker/images/anvil/start-anvil.sh b/docker/images/anvil/start-anvil.sh deleted file mode 100755 index ffce6e7..0000000 --- a/docker/images/anvil/start-anvil.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -echo "Environment variables:" -echo "ETH_RPC_URL: ${ETH_RPC_URL}" -echo "FOUNDRY_EVM_VERSION: ${FOUNDRY_EVM_VERSION}" - -if [ -z "${ETH_RPC_URL}" ]; then - echo "Error: ETH_RPC_URL is not set" - exit 1 -fi - -exec anvil \ - --fork-url "${ETH_RPC_URL}" \ - --block-time 12 \ - --host 0.0.0.0 \ - --port 8545 \ \ No newline at end of file diff --git a/docker/images/ethereum/Dockerfile b/docker/images/ethereum/Dockerfile deleted file mode 100644 index a4312aa..0000000 --- a/docker/images/ethereum/Dockerfile +++ /dev/null @@ -1,39 +0,0 @@ -FROM ghcr.io/foundry-rs/foundry - -WORKDIR /app - -# Install curl for health checks -RUN apk add --no-cache curl bash - -# Create deployment script -COPY <<'EOF' /app/deploy.sh -#!/bin/bash -set -e - -echo "=== Deployment Environment ===" -echo "ETH_RPC_URL: ${ETH_RPC_URL}" -echo "FOUNDRY_EVM_VERSION: ${FOUNDRY_EVM_VERSION}" -echo "PRIVATE_KEY: ${PRIVATE_KEY}" - -# Test RPC connection directly -echo "Testing RPC connection..." -curl -X POST \ - -H "Content-Type: application/json" \ - -d '{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}' \ - "${ETH_RPC_URL}" - -echo "=== Checking File Structure ===" -ls -la /app/contracts/ethereum/script/ -echo "=== File Contents ===" -cat /app/contracts/ethereum/script/LocalTesting.s.sol - -echo "=== Running Deployment ===" -cd /app/contracts/ethereum && forge script script/LocalTesting.s.sol:LocalSetup \ - --rpc-url "${ETH_RPC_URL}" \ - --private-key "${PRIVATE_KEY}" \ - --broadcast -EOF - -RUN chmod +x /app/deploy.sh - -CMD ["/app/deploy.sh"] \ No newline at end of file diff --git a/docker/images/katana/Dockerfile b/docker/images/katana/Dockerfile deleted file mode 100644 index 903ba91..0000000 --- a/docker/images/katana/Dockerfile +++ /dev/null @@ -1,35 +0,0 @@ -FROM ubuntu:22.04 - -# Install basic dependencies -RUN apt-get update && \ - apt-get install -y curl ca-certificates git build-essential && \ - rm -rf /var/lib/apt/lists/* - -# Set up PATH environment variable -ENV PATH="/root/.local/bin:/root/.dojo/bin:/root/.cargo/bin:$PATH" - -# Create necessary directories -RUN mkdir -p /root/.local/bin - -# Install Dojo and Katana -RUN curl -L https://install.dojoengine.org | bash && \ - . /root/.bashrc && \ - dojoup -v 1.0.0-alpha.16 - -# Install Scarb with proper PATH setup -RUN curl --proto '=https' --tlsv1.2 -sSf https://docs.swmansion.com/scarb/install.sh | bash && \ - echo 'export PATH="/root/.local/bin:$PATH"' >> /root/.bashrc && \ - . /root/.bashrc - -# Source the environment and verify installations -RUN . /root/.bashrc && \ - echo "Testing Scarb installation:" && scarb --version && \ - echo "Testing Katana installation:" && katana --version - -EXPOSE 5050 - -WORKDIR /app - -# Add environment setup to every shell session -RUN echo '. /root/.bashrc' >> /root/.profile -# SHELL ["/bin/bash", "-c"] \ No newline at end of file diff --git a/docker/images/starknet/Dockerfile b/docker/images/starknet/Dockerfile deleted file mode 100644 index 58d8fa3..0000000 --- a/docker/images/starknet/Dockerfile +++ /dev/null @@ -1,87 +0,0 @@ -FROM ubuntu:22.04 - -# Install necessary tools -RUN apt-get update && \ - apt-get install -y \ - curl \ - ca-certificates \ - git \ - build-essential \ - pkg-config \ - libssl-dev \ - && rm -rf /var/lib/apt/lists/* - -# Configure git to allow cloning from HTTPS -RUN git config --global credential.helper store && \ - git config --global url."https://".insteadOf git:// && \ - git config --global http.sslVerify false - -# Install Rust -RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y -ENV PATH="/root/.cargo/bin:${PATH}" - -# Install Foundry -RUN curl -L https://foundry.paradigm.xyz | bash && \ - /root/.foundry/bin/foundryup -ENV PATH="/root/.foundry/bin:${PATH}" - -# Install Scarb manually -RUN mkdir -p /root/.local/bin && \ - mkdir -p /root/.local/share/scarb-install/latest && \ - curl -L https://github.com/software-mansion/scarb/releases/download/v2.8.4/scarb-v2.8.4-x86_64-unknown-linux-gnu.tar.gz -o scarb.tar.gz && \ - tar -xzf scarb.tar.gz -C /root/.local/share/scarb-install/latest --strip-components=1 && \ - ln -s /root/.local/share/scarb-install/latest/bin/scarb /root/.local/bin/scarb && \ - rm scarb.tar.gz - -# Set up PATH -ENV PATH="/root/.local/bin:${PATH}" - -# Install starkli manually -RUN mkdir -p /root/.starkli/bin && \ - curl -L https://github.com/xJonathanLEI/starkli/releases/download/v0.3.5/starkli-x86_64-unknown-linux-gnu.tar.gz -o starkli.tar.gz && \ - tar -xzf starkli.tar.gz -C /root/.starkli/bin && \ - rm starkli.tar.gz && \ - chmod +x /root/.starkli/bin/starkli - -# Add starkli to PATH -ENV PATH="/root/.starkli/bin:${PATH}" - -# Set up PATH for all tools -ENV PATH="/root/.foundry/bin:/root/.local/bin:/root/.dojo/bin:/root/.cargo/bin:${PATH}" - -# Verify installations -RUN forge --version && \ - cast --version && \ - scarb --version && \ - starkli --version - -# Create and set permissions for Scarb cache directory -RUN mkdir -p /root/.cache/scarb && \ - chmod -R 777 /root/.cache/scarb - -WORKDIR /app - -# Copy scripts directory first and verify it exists -COPY scripts/katana/deploy.sh /app/scripts/katana/ -RUN ls -la /app/scripts/katana/deploy.sh || echo "Script not found!" - -# Make the deployment script executable -RUN chmod +x /app/scripts/katana/deploy.sh - -# Copy the rest of the project -COPY . . - -# Debug: List contents of relevant directories -RUN echo "Listing /app directory:" && \ - ls -la /app && \ - echo "Listing /app/scripts directory:" && \ - ls -la /app/scripts && \ - echo "Listing /app/scripts/katana directory:" && \ - ls -la /app/scripts/katana - -# Initialize directories for Starkli -RUN mkdir -p /root/.starkli/accounts && \ - mkdir -p /root/.starkli/keystores - -# Try using bash to execute the script -ENTRYPOINT ["/bin/bash", "/app/scripts/katana/deploy.sh"] From b0644061affcef0b1b5f4fa1c41b821550ef4f7e Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Thu, 5 Dec 2024 15:01:27 +0800 Subject: [PATCH 14/54] cilppy --- crates/ethereum/src/lib.rs | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/crates/ethereum/src/lib.rs b/crates/ethereum/src/lib.rs index 0361a03..35f0edb 100644 --- a/crates/ethereum/src/lib.rs +++ b/crates/ethereum/src/lib.rs @@ -43,17 +43,16 @@ pub async fn get_finalized_block_hash() -> Result<(u64, String), UtilsError> { } .await; - match result { - Ok(value) => return Ok(value), - Err(_) => { - if attempts >= MAX_RETRIES { - return Err(UtilsError::RetryExhausted( - MAX_RETRIES, - "get_finalized_block_hash".to_string(), - )); - } - sleep(RETRY_DELAY).await; + if let Ok(value) = result { + return Ok(value); + } else { + if attempts >= MAX_RETRIES { + return Err(UtilsError::RetryExhausted( + MAX_RETRIES, + "get_finalized_block_hash".to_string(), + )); } + sleep(RETRY_DELAY).await; } } } From 9b552bae3e426aad9b6477af14d8a04364d8c323 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Thu, 5 Dec 2024 15:09:55 +0800 Subject: [PATCH 15/54] ci: add Cairo workflow --- .github/workflows/cairo.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 .github/workflows/cairo.yml diff --git a/.github/workflows/cairo.yml b/.github/workflows/cairo.yml new file mode 100644 index 0000000..a311e8e --- /dev/null +++ b/.github/workflows/cairo.yml @@ -0,0 +1,17 @@ +name: Cairo fmt +on: + push: + pull_request: +jobs: + check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + # - uses: foundry-rs/setup-snfoundry@v3 + - uses: software-mansion/setup-scarb@v1 + with: + scarb-version: "2.8.5" + - run: cd contracts/starknet && scarb fmt --check + - run: scarb fmt --check + - run: scarb build + # - run: snforge test From 4998cd8efc3a3e491beff0443976a20922bb3b75 Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Thu, 5 Dec 2024 15:11:34 +0800 Subject: [PATCH 16/54] cairo ci fix --- .github/workflows/cairo.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/cairo.yml b/.github/workflows/cairo.yml index a311e8e..4559992 100644 --- a/.github/workflows/cairo.yml +++ b/.github/workflows/cairo.yml @@ -12,6 +12,5 @@ jobs: with: scarb-version: "2.8.5" - run: cd contracts/starknet && scarb fmt --check - - run: scarb fmt --check - - run: scarb build + - run: cd contracts/starknet && scarb build # - run: snforge test From 04a585c34ad8313ad35fe90d584c2be896e47378 Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Thu, 5 Dec 2024 15:12:16 +0800 Subject: [PATCH 17/54] ci fix --- .github/workflows/cairo.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/cairo.yml b/.github/workflows/cairo.yml index 4559992..22bda50 100644 --- a/.github/workflows/cairo.yml +++ b/.github/workflows/cairo.yml @@ -1,6 +1,5 @@ name: Cairo fmt on: - push: pull_request: jobs: check: From 0bdff67af68ff709f559f672d4e26eb08c36a62c Mon Sep 17 00:00:00 2001 From: ametel01 Date: Fri, 6 Dec 2024 13:12:48 +0800 Subject: [PATCH 18/54] feat(docker): add Dockerfiles for client and server - Add Dockerfile.client for building the client binary - Configure vendored dependencies for offline builds - Set up multi-stage build for minimal runtime image - Add proper file permissions and ownership handling --- docker/Dockerfile.anvil | 31 ++++++++++ docker/Dockerfile.client | 118 ++++++++++++++++++++++++++++++++++++++ docker/Dockerfile.deploy | 36 ++++++++++++ docker/Dockerfile.katana | 31 ++++++++++ docker/Dockerfile.relayer | 87 ++++++++++++++++++++++++++++ 5 files changed, 303 insertions(+) create mode 100644 docker/Dockerfile.anvil create mode 100644 docker/Dockerfile.client create mode 100644 docker/Dockerfile.deploy create mode 100644 docker/Dockerfile.katana create mode 100644 docker/Dockerfile.relayer diff --git a/docker/Dockerfile.anvil b/docker/Dockerfile.anvil new file mode 100644 index 0000000..103f05f --- /dev/null +++ b/docker/Dockerfile.anvil @@ -0,0 +1,31 @@ +FROM ubuntu:22.04 + +# Avoid prompts from apt +ENV DEBIAN_FRONTEND=noninteractive + +# Install basic dependencies +RUN apt-get update && apt-get install -y \ + curl \ + git \ + ca-certificates \ + && rm -rf /var/lib/apt/lists/* + +# Install Foundry +RUN curl -L https://foundry.paradigm.xyz | bash +ENV PATH="/root/.foundry/bin:${PATH}" +RUN foundryup + +# Verify installation +RUN forge --version + +# Create entrypoint script +RUN echo '#!/bin/bash\n\ +if [ -z "$ETH_RPC_URL" ]; then\n\ + echo "Error: ETH_RPC_URL environment variable is not set"\n\ + exit 1\n\ +fi\n\ +anvil --fork-url "$ETH_RPC_URL" --block-time 12 --host 0.0.0.0\n\ +' > /entrypoint.sh && chmod +x /entrypoint.sh + +# Default command to run Anvil +ENTRYPOINT ["/entrypoint.sh"] \ No newline at end of file diff --git a/docker/Dockerfile.client b/docker/Dockerfile.client new file mode 100644 index 0000000..c647eb0 --- /dev/null +++ b/docker/Dockerfile.client @@ -0,0 +1,118 @@ +# Stage 1: Build the application +FROM rust:latest AS builder + +WORKDIR /app + +# Install the RISC Zero toolchain +RUN curl -L https://risczero.com/install | bash && \ + /root/.risc0/bin/rzup install || true && \ + mkdir -p /root/.cargo/bin && \ + (ln -sf /root/.risc0/bin/cargo-risczero /root/.cargo/bin/cargo-risczero || echo "Symlink creation failed, checking directories..." && ls -la /root/.risc0/bin && ls -la /root/.cargo) + +ENV PATH="/root/.risc0/bin:/root/.cargo/bin:$PATH" + +# Step 1: Copy Cargo.toml and Cargo.lock +COPY Cargo.toml Cargo.lock ./ + +# Step 2: Copy the workspace crates first +COPY crates ./crates + +# Step 3: Create necessary directories and copy pre-fetched dependencies +RUN mkdir -p /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823/cc-1.2.2/src/target && \ + mkdir -p /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823/cc-1.2.2/src/windows && \ + mkdir -p /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823/cc-1.2.2/src/parallel + +# Debug the build context +RUN echo "=== Build Context ===" && \ + pwd && \ + ls -la && \ + echo "=== Vendor Directory (if exists) ===" && \ + ls -R vendor || echo "No vendor directory found!" + +# Only proceed with copy if directory exists +RUN mkdir -p /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823/cc-1.2.2/src/{target,windows,parallel} + +# Create the destination directory with correct permissions +RUN mkdir -p /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823 && \ + chmod 755 /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823 + +# Copy the vendor directory with permissions preserved +COPY --chown=root:root vendor/ /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823/ + +# Debug and verify the copy +RUN echo "=== Debug: Directory structure after copy ===" && \ + find /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823 -type d && \ + echo "=== Debug: All files in cc-1.2.2 ===" && \ + find /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823/cc-1.2.2 -type f && \ + echo "=== Debug: Setting final permissions ===" && \ + chmod -R u+r,g+r /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823 + +# Create cargo config for vendored dependencies +RUN mkdir -p /usr/local/cargo/registry && \ + echo '[source.crates-io]\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/risc0/RustCrypto-hashes.git"]\n\ +git = "https://github.com/risc0/RustCrypto-hashes.git"\n\ +tag = "sha2-v0.10.8-risczero.0"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/xJonathanLEI/starknet-rs.git"]\n\ +git = "https://github.com/xJonathanLEI/starknet-rs.git"\n\ +branch = "master"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/ametel01/rust-accumulators.git"]\n\ +git = "https://github.com/ametel01/rust-accumulators.git"\n\ +branch = "feat/sha2-hasher"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/ametel01/eth-rlp-verify.git"]\n\ +git = "https://github.com/ametel01/eth-rlp-verify.git"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/ametel01/garaga.git"]\n\ +git = "https://github.com/ametel01/garaga.git"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/risc0/risc0-ethereum.git"]\n\ +git = "https://github.com/risc0/risc0-ethereum"\n\ +tag = "v1.1.4"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/lambdaclass/lambdaworks.git"]\n\ +git = "https://github.com/lambdaclass/lambdaworks.git"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source.vendored-sources]\n\ +directory = "/usr/local/cargo/registry/src/github.com-1ecc6299db9ec823"\n\ +\n\ +[net]\n\ +offline = true\n\ +\n\ +[patch.crates-io]\n\ +sha2 = { git = "https://github.com/risc0/RustCrypto-hashes.git", tag = "sha2-v0.10.8-risczero.0" }' > /usr/local/cargo/config.toml + +# Step 4: Build the application +ARG DATABASE_URL +ENV DATABASE_URL=${DATABASE_URL} +RUN cargo build --release --package client + +# Stage 2: Create a minimal runtime image +FROM debian:bookworm-slim + +# Set up the working directory +WORKDIR /app + +# Install runtime dependencies with OpenSSL 3 +RUN apt-get update && apt-get install -y \ + ca-certificates \ + openssl \ + libssl3 \ + && rm -rf /var/lib/apt/lists/* + +# Copy the compiled binary from the builder stage +COPY --from=builder /app/target/release/client . + +# Define the entrypoint +ENTRYPOINT ["./client"] diff --git a/docker/Dockerfile.deploy b/docker/Dockerfile.deploy new file mode 100644 index 0000000..b8ccf1e --- /dev/null +++ b/docker/Dockerfile.deploy @@ -0,0 +1,36 @@ +FROM rust:slim-bookworm + +# Avoid prompts from apt +ENV DEBIAN_FRONTEND=noninteractive + +# Install basic dependencies +RUN apt-get update && apt-get install -y \ + curl \ + git \ + ca-certificates \ + pkg-config \ + libssl-dev \ + && rm -rf /var/lib/apt/lists/* + +# Install Foundry +RUN curl -L https://foundry.paradigm.xyz | bash +ENV PATH="/root/.foundry/bin:${PATH}" +RUN foundryup + +# Set up directories and PATH for Scarb +RUN mkdir -p /root/.local/bin +ENV PATH="/root/.local/bin:${PATH}" + +# Install Scarb +RUN curl --proto '=https' --tlsv1.2 -sSf https://docs.swmansion.com/scarb/install.sh | sh -s -- -v 2.8.5 + +# Install Starkli +RUN curl https://get.starkli.sh | sh && \ + /root/.starkli/bin/starkliup -v 0.3.5 +ENV PATH="/root/.starkli/bin:${PATH}" + +# Create Starkli directory structure +RUN mkdir -p /root/.starkli/accounts /root/.starkli/keystores + +WORKDIR /app +CMD ["bash"] \ No newline at end of file diff --git a/docker/Dockerfile.katana b/docker/Dockerfile.katana new file mode 100644 index 0000000..f29fb20 --- /dev/null +++ b/docker/Dockerfile.katana @@ -0,0 +1,31 @@ +FROM ubuntu:22.04 + +# Avoid prompts from apt +ENV DEBIAN_FRONTEND=noninteractive + +# Install basic dependencies +RUN apt-get update && apt-get install -y \ + curl \ + git \ + ca-certificates \ + build-essential \ + pkg-config \ + libssl-dev \ + && rm -rf /var/lib/apt/lists/* + +# Install Dojo +RUN curl -L https://install.dojoengine.org | bash +ENV PATH="/root/.dojo/bin:${PATH}" +RUN dojoup -v 1.0.0-alpha.16 + +# Create config directory +RUN mkdir -p /app/config + +# Verify installation +RUN katana --version + +# Expose default Katana port +EXPOSE 5050 + +# Default command to run Katana +CMD ["katana", "--messaging", "/app/config/anvil.messaging.json", "--disable-fee", "--disable-validate", "--host", "0.0.0.0"] \ No newline at end of file diff --git a/docker/Dockerfile.relayer b/docker/Dockerfile.relayer new file mode 100644 index 0000000..3a2f65f --- /dev/null +++ b/docker/Dockerfile.relayer @@ -0,0 +1,87 @@ +# Stage 1: Build the application +FROM rust:latest AS builder + +# Set up the working directory +WORKDIR /app + +# Step 1: Copy the workspace Cargo.toml and Cargo.lock +COPY Cargo.toml Cargo.lock ./ + +# Step 2: Copy pre-fetched dependencies and vendor directory +COPY .cargo /usr/local/cargo +COPY vendor /usr/local/cargo/registry + +# Create cargo config for vendored dependencies +RUN mkdir -p /usr/local/cargo/registry && \ + echo '[source.crates-io]\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master"]\n\ +git = "https://github.com/xJonathanLEI/starknet-rs.git"\n\ +branch = "master"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/risc0/RustCrypto-hashes.git?tag=sha2-v0.10.8-risczero.0"]\n\ +git = "https://github.com/risc0/RustCrypto-hashes.git"\n\ +tag = "sha2-v0.10.8-risczero.0"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/ametel01/eth-rlp-verify.git"]\n\ +git = "https://github.com/ametel01/eth-rlp-verify.git"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/ametel01/garaga.git"]\n\ +git = "https://github.com/ametel01/garaga.git"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/ametel01/rust-accumulators.git?branch=feat/sha2-hasher"]\n\ +git = "https://github.com/ametel01/rust-accumulators.git"\n\ +branch = "feat/sha2-hasher"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/risc0/risc0-ethereum?tag=v1.1.4"]\n\ +git = "https://github.com/risc0/risc0-ethereum"\n\ +tag = "v1.1.4"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/lambdaclass/lambdaworks.git"]\n\ +git = "https://github.com/lambdaclass/lambdaworks.git"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/OpenZeppelin/openzeppelin-contracts.git"]\n\ +git = "https://github.com/OpenZeppelin/openzeppelin-contracts.git"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source.vendored-sources]\n\ +directory = "/usr/local/cargo/registry"\n\ +\n\ +[net]\n\ +git-fetch-with-cli = true' > /usr/local/cargo/config.toml + +# Step 3: Copy the workspace crates +COPY crates ./crates + +# Step 4: Copy OpenZeppelin contracts to the expected location +COPY vendor/openzeppelin-contracts /usr/local/cargo/registry/openzeppelin-contracts + +# Step 5: Build the application +RUN cargo build --release --package relayer + +# Stage 2: Create a minimal runtime image +FROM debian:bookworm-slim + +# Set up the working directory +WORKDIR /app + +# Install runtime dependencies with OpenSSL 3 +RUN apt-get update && apt-get install -y \ + ca-certificates \ + openssl \ + libssl3 \ + && rm -rf /var/lib/apt/lists/* + +# Copy the compiled binary from the builder stage +COPY --from=builder /app/target/release/relayer . + +# Define the entrypoint +ENTRYPOINT ["./relayer"] From e27c2b6461dea2a622cafa1c56121e7b2d9093ab Mon Sep 17 00:00:00 2001 From: ametel01 Date: Fri, 6 Dec 2024 13:56:41 +0800 Subject: [PATCH 19/54] feat(starknet): add garaga as git submodule --- .gitmodules | 3 +++ contracts/starknet/garaga | 1 + 2 files changed, 4 insertions(+) create mode 160000 contracts/starknet/garaga diff --git a/.gitmodules b/.gitmodules index b571bd0..7926146 100644 --- a/.gitmodules +++ b/.gitmodules @@ -2,3 +2,6 @@ path = contracts/ethereum/lib/forge-std url = https://github.com/foundry-rs/forge-std.git shallow = true +[submodule "contracts/starknet/garaga"] + path = contracts/starknet/garaga + url = https://github.com/keep-starknet-strange/garaga.git diff --git a/contracts/starknet/garaga b/contracts/starknet/garaga new file mode 160000 index 0000000..8023ef6 --- /dev/null +++ b/contracts/starknet/garaga @@ -0,0 +1 @@ +Subproject commit 8023ef6c7b8e0d8bd805695539aa1186e730d9db From a1833443e8fe81708ee59a4da4fca38ffee0aafe Mon Sep 17 00:00:00 2001 From: ametel01 Date: Fri, 6 Dec 2024 13:58:27 +0800 Subject: [PATCH 20/54] chore(docker): setup local deployment infrastructure WIP: client Dockerfile needs fixing for vendored dependencies --- .cargo/config.toml | 44 ++++++ .dockerignore | 69 ++++++++- .env.local | 26 ++++ .gitignore | 2 + Cargo.lock | 95 ++++-------- Cargo.toml | 5 +- config/anvil.messaging.json | 4 +- contracts/starknet/Scarb.lock | 1 - contracts/starknet/verifier/Scarb.toml | 2 +- crates/guest-mmr/Cargo.toml | 2 +- crates/starknet-handler/Cargo.toml | 2 +- docker-compose.build.yml | 15 ++ docker-compose.yaml | 2 - docker-compose.yml | 100 +++++++++++++ docker/compose/docker-compose.yml | 165 --------------------- docker/images/client/Dockerfile | 61 -------- docker/images/relayer/Dockerfile | 36 ----- scripts/{deploy.sh => deploy-contracts.sh} | 99 ++++++------- scripts/run_local_workflow.sh | 69 --------- scripts/start.sh | 29 ++++ 20 files changed, 369 insertions(+), 459 deletions(-) create mode 100644 .env.local create mode 100644 docker-compose.build.yml delete mode 100644 docker-compose.yaml create mode 100644 docker-compose.yml delete mode 100644 docker/compose/docker-compose.yml delete mode 100644 docker/images/client/Dockerfile delete mode 100644 docker/images/relayer/Dockerfile rename scripts/{deploy.sh => deploy-contracts.sh} (65%) delete mode 100755 scripts/run_local_workflow.sh create mode 100755 scripts/start.sh diff --git a/.cargo/config.toml b/.cargo/config.toml index e69de29..dd63cac 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -0,0 +1,44 @@ +[source.crates-io] +replace-with = "vendored-sources" + +[source."git+https://github.com/ametel01/eth-rlp-verify.git"] +git = "https://github.com/ametel01/eth-rlp-verify.git" +replace-with = "vendored-sources" + +[source."git+https://github.com/ametel01/garaga.git"] +git = "https://github.com/ametel01/garaga.git" +replace-with = "vendored-sources" + +[source."git+https://github.com/ametel01/rust-accumulators.git?branch=feat/sha2-hasher"] +git = "https://github.com/ametel01/rust-accumulators.git" +branch = "feat/sha2-hasher" +replace-with = "vendored-sources" + +[source."git+https://github.com/lambdaclass/lambdaworks.git"] +git = "https://github.com/lambdaclass/lambdaworks.git" +replace-with = "vendored-sources" + +[source."git+https://github.com/risc0/RustCrypto-hashes.git?tag=sha2-v0.10.8-risczero.0"] +git = "https://github.com/risc0/RustCrypto-hashes.git" +tag = "sha2-v0.10.8-risczero.0" +replace-with = "vendored-sources" + +[source."git+https://github.com/risc0/risc0-ethereum?tag=v1.1.4"] +git = "https://github.com/risc0/risc0-ethereum" +tag = "v1.1.4" +replace-with = "vendored-sources" + +[source."git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master"] +git = "https://github.com/xJonathanLEI/starknet-rs.git" +branch = "master" +replace-with = "vendored-sources" + +[source."git+https://github.com/OpenZeppelin/openzeppelin-contracts.git"] +git = "https://github.com/OpenZeppelin/openzeppelin-contracts.git" +replace-with = "vendored-sources" + +[source.vendored-sources] +directory = "vendor" + +[net] +git-fetch-with-cli = true diff --git a/.dockerignore b/.dockerignore index 399ed94..8a0a86d 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,3 +1,68 @@ +# Build artifacts target/ -Dockerfile -.dockerignore \ No newline at end of file +/target/ +dist/ + +# Docker files +Dockerfile* +.dockerignore +docker/ + +# Version control +.git/ +.gitignore +.github/ + +# IDE and editor files +.vscode/ +.idea/ +*.swp +*.swo +.DS_Store + +# Debug files +*.log +*.log.* +debug/ + +# Development configs +.env* +*.local +.editorconfig +.eslintrc* +.prettierrc* + +# Documentation +docs/ +*.md +LICENSE + +# Test files +tests/ +*_test.go +*.test +coverage/ + +# Dependencies +node_modules/ + +# Temporary files +tmp/ +temp/ +*.tmp + +# Cargo specific +Cargo.lock # unless it's a binary project +.cargo/ +!vendor/ # Explicitly allow the vendor directory +!vendor/**/target/ # Explicitly allow target directories under vendor + +# Foundry specific +cache/ +out/ +broadcast/ + +# Misc development files +.husky/ +.lint* +.format* diff --git a/.env.local b/.env.local new file mode 100644 index 0000000..4a0b9f2 --- /dev/null +++ b/.env.local @@ -0,0 +1,26 @@ +BONSAI_API_KEY=ZLj670Kwrw57AiufKDjoF1uoozSjZwiba6IPtDgE +BONSAI_API_URL=https://api.bonsai.xyz/ + +DATABASE_URL=postgresql://postgres:Z36P56hzJScKssD@fossil-eth-blockheaders.cnc4gyc2mcb3.us-east-1.rds.amazonaws.com:5432/postgres.bak?sslmode=require + +ETH_RPC_URL=http://38.154.254.162:8545 +ANVIL_URL=http://anvil:8545 +ACCOUNT_PRIVATE_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 +ETH_ACCOUNT_ADDRESS=0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 +SN_MESSAGING=0xF62eEc897fa5ef36a957702AA4a45B58fE8Fe312 +L1_MESSAGE_SENDER=0x364C7188028348566E38D762f6095741c49f492B + +export FOUNDRY_EVM_VERSION=cancun +ETH_WHALE=0x40B38765696e3d5d8d9d834D8AaD4bB6e418E489 + +STARKNET_RPC_URL=http://katana:5050 +STARKNET_PRIVATE_KEY=0x2bbf4f9fd0bbb2e60b0316c1fe0b76cf7a4d0198bd493ced9b8df2a3a24d68a +STARKNET_ACCOUNT_ADDRESS=0xb3ff441a68610b30fd5e2abbf3a1548eb6ba6f3559f2862bf2dc757e5828ca + +L2_MSG_PROXY=0x072ed50f70665e9c0b879d61a6c158293e02f52ee60ba22def223dfd172fcccd +FOSSIL_STORE=0x0416d8aaca654d83be44a8536fa5c06e0484d16aa3202efcbb49c2fb788d39cc +STARKNET_VERIFIER=0x00d06b387e3f0d64cb0762e068f3cc8134550afff936425d79828f2c98e3fc2a +FOSSIL_VERIFIER=0x0403e648dee1ac2612fe1d3758773645755194b22cd0cdca32bf9e645bcf385f + +export STARKNET_ACCOUNT=katana-0 +export STARKNET_RPC=http://katana:5050 diff --git a/.gitignore b/.gitignore index 012000a..1c653e8 100644 --- a/.gitignore +++ b/.gitignore @@ -24,3 +24,5 @@ scripts/katana/deploy.log scripts/katana/katana.log *.pb + +vendor/ \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 02d6e24..476a396 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -266,7 +266,7 @@ dependencies = [ "derive_more", "once_cell", "serde", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", ] [[package]] @@ -284,7 +284,7 @@ dependencies = [ "derive_more", "once_cell", "serde", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", ] [[package]] @@ -1063,7 +1063,7 @@ dependencies = [ "blake2", "derivative", "digest 0.10.7", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", ] [[package]] @@ -1722,7 +1722,7 @@ dependencies = [ "alloy-contract 0.6.4", "dotenv", "ruint", - "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", + "starknet-crypto", "thiserror 2.0.4", "tracing-subscriber 0.3.19", ] @@ -2193,7 +2193,7 @@ dependencies = [ "scrypt", "serde", "serde_json", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", "sha3", "thiserror 1.0.69", "uuid 0.8.2", @@ -2548,8 +2548,8 @@ dependencies = [ "num-bigint", "num-traits", "pyo3", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", - "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", + "starknet-crypto", "wasm-bindgen", ] @@ -2610,7 +2610,7 @@ dependencies = [ "mmr-utils", "num-bigint", "num-traits", - "sha2 0.10.8 (git+https://github.com/risc0/RustCrypto-hashes.git?tag=sha2-v0.10.8-risczero.0)", + "sha2", "thiserror 2.0.4", "tokio", ] @@ -2688,9 +2688,9 @@ dependencies = [ "hex", "num-bigint", "num-traits", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", "starknet-core", - "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", + "starknet-crypto", "strum", "strum_macros", "thiserror 2.0.4", @@ -3291,7 +3291,7 @@ dependencies = [ "ecdsa", "elliptic-curve", "once_cell", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", ] [[package]] @@ -3321,7 +3321,7 @@ checksum = "bbc2a4da0d9e52ccfe6306801a112e81a8fc0c76aa3e4449fefeda7fef72bb34" dependencies = [ "lambdaworks-math 0.10.0", "serde", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", "sha3", ] @@ -3332,7 +3332,7 @@ source = "git+https://github.com/lambdaclass/lambdaworks.git#fc33967375fd4d99814 dependencies = [ "lambdaworks-math 0.11.0", "serde", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", "sha3", ] @@ -4138,7 +4138,7 @@ dependencies = [ "serde", "sqlx", "starknet", - "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", + "starknet-crypto", "starknet-handler", "starknet-types-core", "store", @@ -4668,7 +4668,7 @@ dependencies = [ "risc0-core", "risc0-zkvm-platform", "serde", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", "tracing", ] @@ -4699,7 +4699,7 @@ dependencies = [ "rrs-lib", "semver 1.0.23", "serde", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", "stability", "tempfile", "tracing", @@ -4988,7 +4988,7 @@ dependencies = [ "hmac", "pbkdf2", "salsa20", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", ] [[package]] @@ -5167,17 +5167,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "sha2" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - [[package]] name = "sha2" version = "0.10.8" @@ -5342,7 +5331,7 @@ dependencies = [ "percent-encoding", "serde", "serde_json", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", "smallvec", "sqlformat", "thiserror 1.0.69", @@ -5380,7 +5369,7 @@ dependencies = [ "quote", "serde", "serde_json", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", "sqlx-core", "sqlx-mysql", "sqlx-postgres", @@ -5424,7 +5413,7 @@ dependencies = [ "rsa", "serde", "sha1", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", "smallvec", "sqlx-core", "stringprep", @@ -5462,7 +5451,7 @@ dependencies = [ "rand", "serde", "serde_json", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2", "smallvec", "sqlx-core", "stringprep", @@ -5519,7 +5508,7 @@ dependencies = [ "starknet-contract", "starknet-core", "starknet-core-derive", - "starknet-crypto 0.7.3 (git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master)", + "starknet-crypto", "starknet-macros", "starknet-providers", "starknet-signers", @@ -5533,7 +5522,7 @@ dependencies = [ "async-trait", "auto_impl", "starknet-core", - "starknet-crypto 0.7.3 (git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master)", + "starknet-crypto", "starknet-providers", "starknet-signers", "thiserror 1.0.69", @@ -5569,7 +5558,7 @@ dependencies = [ "serde_with", "sha3", "starknet-core-derive", - "starknet-crypto 0.7.3 (git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master)", + "starknet-crypto", "starknet-types-core", ] @@ -5583,25 +5572,6 @@ dependencies = [ "syn 2.0.90", ] -[[package]] -name = "starknet-crypto" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ded22ccf4cb9e572ce3f77de6066af53560cd2520d508876c83bb1e6b29d5cbc" -dependencies = [ - "crypto-bigint", - "hex", - "hmac", - "num-bigint", - "num-integer", - "num-traits", - "rfc6979", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", - "starknet-curve 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "starknet-types-core", - "zeroize", -] - [[package]] name = "starknet-crypto" version = "0.7.3" @@ -5614,21 +5584,12 @@ dependencies = [ "num-integer", "num-traits", "rfc6979", - "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", - "starknet-curve 0.5.1 (git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master)", + "sha2", + "starknet-curve", "starknet-types-core", "zeroize", ] -[[package]] -name = "starknet-curve" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcde6bd74269b8161948190ace6cf069ef20ac6e79cd2ba09b320efa7500b6de" -dependencies = [ - "starknet-types-core", -] - [[package]] name = "starknet-curve" version = "0.5.1" @@ -5644,7 +5605,7 @@ dependencies = [ "common", "crypto-bigint", "starknet", - "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", + "starknet-crypto", "thiserror 2.0.4", "tracing", "url", @@ -5691,7 +5652,7 @@ dependencies = [ "getrandom", "rand", "starknet-core", - "starknet-crypto 0.7.3 (git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master)", + "starknet-crypto", "thiserror 1.0.69", ] diff --git a/Cargo.toml b/Cargo.toml index f393603..3c978f4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,6 +25,7 @@ lto = true eth-rlp-types = { git = "https://github.com/ametel01/eth-rlp-verify.git" } eth-rlp-verify = { git = "https://github.com/ametel01/eth-rlp-verify.git" } starknet = { git = "https://github.com/xJonathanLEI/starknet-rs.git", branch = "master" } +starknet-crypto = { git = "https://github.com/xJonathanLEI/starknet-rs.git", branch = "master" } sqlx = { version = "0.8.2", features = [ "postgres", @@ -35,6 +36,8 @@ thiserror = "2.0.4" tracing = "0.1.40" tokio = "1.41.1" dotenv = "0.15" -starknet-crypto = "0.7.3" clap = "4.5" +[patch.crates-io] +sha2 = { git = "https://github.com/risc0/RustCrypto-hashes.git", tag = "sha2-v0.10.8-risczero.0" } +starknet-crypto = { git = "https://github.com/xJonathanLEI/starknet-rs.git", branch = "master" } \ No newline at end of file diff --git a/config/anvil.messaging.json b/config/anvil.messaging.json index 8b9321f..1aa70c2 100644 --- a/config/anvil.messaging.json +++ b/config/anvil.messaging.json @@ -1,9 +1,9 @@ { "chain": "ethereum", - "rpc_url": "http://127.0.0.1:8545", + "rpc_url": "http://anvil:8545", "contract_address": "0xF62eEc897fa5ef36a957702AA4a45B58fE8Fe312", "sender_address": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", "private_key": "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", "interval": 2, - "from_block": 21332916 + "from_block": 21340770 } \ No newline at end of file diff --git a/contracts/starknet/Scarb.lock b/contracts/starknet/Scarb.lock index ff588ad..e93d860 100644 --- a/contracts/starknet/Scarb.lock +++ b/contracts/starknet/Scarb.lock @@ -11,7 +11,6 @@ dependencies = [ [[package]] name = "garaga" version = "0.14.0" -source = "git+https://github.com/keep-starknet-strange/garaga.git?rev=5f3b232#5f3b23252a04f1714838415e69e318ed8e097c15" [[package]] name = "l1_message_proxy" diff --git a/contracts/starknet/verifier/Scarb.toml b/contracts/starknet/verifier/Scarb.toml index 4692350..84b377a 100644 --- a/contracts/starknet/verifier/Scarb.toml +++ b/contracts/starknet/verifier/Scarb.toml @@ -6,7 +6,7 @@ edition = "2023_11" # See more keys and their definitions at https://docs.swmansion.com/scarb/docs/reference/manifest.html [dependencies] -garaga = { git = "https://github.com/keep-starknet-strange/garaga.git", rev = "5f3b232" } +garaga = { path = "../garaga/src" } starknet.workspace = true fossil_store = { path = "../store" } diff --git a/crates/guest-mmr/Cargo.toml b/crates/guest-mmr/Cargo.toml index 95037f2..d3f76b1 100644 --- a/crates/guest-mmr/Cargo.toml +++ b/crates/guest-mmr/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" guest-types = { path = "../guest-types" } hex = "0.4" -sha2 = { git = 'https://github.com/risc0/RustCrypto-hashes.git', tag = 'sha2-v0.10.8-risczero.0', features = ['compress'] } +sha2 = { git = "https://github.com/risc0/RustCrypto-hashes.git", tag = "sha2-v0.10.8-risczero.0", features = ['compress'] } num-bigint = "0.4.4" num-traits = "0.2.19" thiserror = "2.0.3" diff --git a/crates/starknet-handler/Cargo.toml b/crates/starknet-handler/Cargo.toml index 1c88c67..488e2d1 100644 --- a/crates/starknet-handler/Cargo.toml +++ b/crates/starknet-handler/Cargo.toml @@ -8,8 +8,8 @@ common = { path = "../common" } thiserror = { workspace = true } starknet = { workspace = true } -starknet-crypto = { workspace = true } tracing = { workspace = true } +starknet-crypto = { workspace = true } crypto-bigint = "0.5.5" url = "2.5.4" \ No newline at end of file diff --git a/docker-compose.build.yml b/docker-compose.build.yml new file mode 100644 index 0000000..0daee6c --- /dev/null +++ b/docker-compose.build.yml @@ -0,0 +1,15 @@ +services: + anvil: + build: + context: . + dockerfile: docker/Dockerfile.anvil + + katana: + build: + context: . + dockerfile: docker/Dockerfile.katana + + deployer: + build: + context: . + dockerfile: docker/Dockerfile.deploy \ No newline at end of file diff --git a/docker-compose.yaml b/docker-compose.yaml deleted file mode 100644 index e70ba1a..0000000 --- a/docker-compose.yaml +++ /dev/null @@ -1,2 +0,0 @@ -include: - - docker/compose/docker-compose.yml \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..e7b625b --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,100 @@ +services: + anvil: + image: fossil-anvil:latest + networks: + - fossil + ports: + - "8545:8545" + env_file: + - ${ENV_FILE:-.env.local} + healthcheck: + test: ["CMD", "cast", "block-number"] + interval: 5s + timeout: 5s + retries: 5 + + katana: + image: fossil-katana:latest + networks: + - fossil + ports: + - "5050:5050" + volumes: + - ./config:/app/config + command: ["katana", "--messaging", "/app/config/anvil.messaging.json", "--disable-fee", "--disable-validate", "--host", "0.0.0.0"] + depends_on: + anvil: + condition: service_healthy + healthcheck: + test: ["CMD", "katana", "--version"] + interval: 5s + timeout: 5s + retries: 5 + + deployer: + image: fossil-deploy:latest + networks: + - fossil + env_file: + - ${ENV_FILE:-.env.local} + volumes: + - .:/app + depends_on: + anvil: + condition: service_healthy + katana: + condition: service_healthy + command: > + /bin/bash -c ' + source ${ENV_FILE} && + if [ "$ENV_FILE" = ".env.local" ]; then + ./scripts/deploy-contracts.sh local + elif [ "$ENV_FILE" = ".env.sepolia" ]; then + ./scripts/deploy-contracts.sh sepolia + elif [ "$ENV_FILE" = ".env.mainnet" ]; then + ./scripts/deploy-contracts.sh mainnet + else + echo "Unknown environment" + exit 1 + fi' + + mmr-builder: + image: fossil-core-client:latest + networks: + - fossil + env_file: + - ${ENV_FILE:-.env.local} + volumes: + - .:/app + entrypoint: ["cargo", "run", "--bin", "build-mmr", "--", "-n", "1"] + depends_on: + deployer: + condition: service_completed_successfully + + client: + image: fossil-core-client:latest + networks: + - fossil + env_file: + - ${ENV_FILE:-.env.local} + volumes: + - .:/app + depends_on: + mmr-builder: + condition: service_completed_successfully + deployer: + condition: service_completed_successfully + + relayer: + image: fossil-core-relayer:latest + networks: + - fossil + env_file: + - ${ENV_FILE:-.env.local} + depends_on: + deployer: + condition: service_completed_successfully + +networks: + fossil: + name: fossil-network \ No newline at end of file diff --git a/docker/compose/docker-compose.yml b/docker/compose/docker-compose.yml deleted file mode 100644 index 6ed81dd..0000000 --- a/docker/compose/docker-compose.yml +++ /dev/null @@ -1,165 +0,0 @@ -services: - anvil: - build: - context: ../.. - dockerfile: docker/images/anvil/Dockerfile - ports: - - "8545:8545" - env_file: - - ../../.env - environment: - - ETH_RPC_URL=${ETH_RPC_URL} - - FOUNDRY_EVM_VERSION=${FOUNDRY_EVM_VERSION} - healthcheck: - test: ["CMD", - "curl", - "-X", - "POST", - "-H", - "Content-Type: application/json", - "-d", '{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}', - "http://localhost:8545" - ] - interval: 10s - retries: 5 - start_period: 10s - timeout: 5s - networks: - fossil_net: - aliases: - - anvil - - katana: - build: - context: ../.. - dockerfile: docker/images/katana/Dockerfile - ports: - - "5050:5050" - volumes: - - ../../config:/app/config - environment: - - STARKNET_RPC_URL=${STARKNET_RPC_URL} - - STARKNET_PRIVATE_KEY=${STARKNET_PRIVATE_KEY} - - STARKNET_ACCOUNT_ADDRESS=${STARKNET_ACCOUNT_ADDRESS} - healthcheck: - test: ["CMD", - "curl", - "-X", - "POST", - "-H", "Content-Type: application/json", - "-d", "{\"jsonrpc\":\"2.0\",\"method\":\"starknet_getBlockWithTxHashes\",\"params\":[\"latest\"],\"id\":1}", - "http://localhost:5050" - ] - interval: 10s - retries: 5 - start_period: 20s - timeout: 5s - depends_on: - anvil: - condition: service_healthy - command: > - katana - --messaging /app/config/anvil.messaging.json - --disable-fee - --disable-validate - --host 0.0.0.0 - networks: - fossil_net: - aliases: - - katana - - ethereum: - build: - context: ../.. - dockerfile: docker/images/ethereum/Dockerfile - env_file: - - ../../.env - environment: - - FOUNDRY_EVM_VERSION=${FOUNDRY_EVM_VERSION} - - ETH_RPC_URL=http://anvil:8545 - - PRIVATE_KEY=${ACCOUNT_PRIVATE_KEY} - volumes: - - type: bind - source: ../../contracts - target: /app/contracts - - type: bind - source: ../../scripts - target: /app/scripts - - type: bind - source: ../../contracts/ethereum/lib - target: /app/lib - - type: bind - source: ../../contracts/ethereum/foundry.toml - target: /app/foundry.toml - depends_on: - anvil: - condition: service_healthy - networks: - fossil_net: - aliases: - - ethereum-deployer - - starknet: - build: - context: ../.. - dockerfile: docker/images/starknet/Dockerfile - env_file: - - ../../.env - environment: - - STARKNET_ACCOUNT=katana-0 - - STARKNET_RPC=http://katana:5050 - - STARKNET_RPC_URL=http://katana:5050 - volumes: - - ../..:/app - depends_on: - katana: - condition: service_healthy - networks: - fossil_net: - aliases: - - starknet-deployer - - client: - build: - context: ../.. - dockerfile: docker/images/client/Dockerfile - platform: linux/amd64 - image: fossil-client:latest - env_file: - - ../../.env - environment: - - STARKNET_RPC_URL=http://katana:5050 - networks: - fossil_net: - aliases: - - light-client - - relayer: - build: - context: ../.. - dockerfile: docker/images/relayer/Dockerfile - image: fossil-relayer:latest - env_file: - - ../../.env - depends_on: - - client - networks: - fossil_net: - aliases: - - relayer - -networks: - fossil_net: - driver: bridge - ipam: - driver: default - config: - - subnet: 172.20.0.0/16 - gateway: 172.20.0.1 - driver_opts: - com.docker.network.bridge.enable_icc: "true" - com.docker.network.bridge.enable_ip_masquerade: "true" - com.docker.network.bridge.host_binding_ipv4: "0.0.0.0" - -volumes: - scarb-cache: diff --git a/docker/images/client/Dockerfile b/docker/images/client/Dockerfile deleted file mode 100644 index bd9b59f..0000000 --- a/docker/images/client/Dockerfile +++ /dev/null @@ -1,61 +0,0 @@ -FROM rust:latest AS builder - -RUN apt-get update && apt-get install -y \ - pkg-config \ - libssl-dev \ - curl \ - git \ - && rm -rf /var/lib/apt/lists/* - -# Configure git for better reliability -RUN git config --global http.postBuffer 524288000 && \ - git config --global http.lowSpeedLimit 1000 && \ - git config --global http.lowSpeedTime 60 && \ - git config --global core.compression 0 && \ - git config --global http.sslVerify false && \ - git config --global submodule.recurse true - -# Create cargo config directory and add git-fetch-with-cli config -RUN mkdir -p /usr/local/cargo/ -COPY < Cargo.toml - -# Copy only the needed crates and Cargo.lock -COPY Cargo.lock ./ -COPY crates/relayer ./crates/relayer -COPY crates/common ./crates/common - -# Build with verbose output -RUN cargo build --release -v && \ - ls -la target/release/relayer && \ - chmod +x target/release/relayer - -# Use a newer Debian version that has OpenSSL 3 -FROM debian:bookworm -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - libssl3 \ - ca-certificates \ - && rm -rf /var/lib/apt/lists/* -COPY --from=builder /app/target/release/relayer /usr/local/bin/relayer - -# Add wrapper script -RUN echo '#!/bin/sh\nwhile true; do\n relayer\n sleep 60\ndone' > /usr/local/bin/run-relayer.sh \ - && chmod +x /usr/local/bin/run-relayer.sh - -CMD ["/usr/local/bin/run-relayer.sh"] diff --git a/scripts/deploy.sh b/scripts/deploy-contracts.sh similarity index 65% rename from scripts/deploy.sh rename to scripts/deploy-contracts.sh index ccad1c9..39a2e94 100755 --- a/scripts/deploy.sh +++ b/scripts/deploy-contracts.sh @@ -3,35 +3,49 @@ # Ensure the script stops on the first error set -e -source .env - -ETHEREUM_DIR="contracts/ethereum" - -cd $ETHEREUM_DIR && forge script script/LocalTesting.s.sol:LocalSetup --broadcast --rpc-url $ANVIL_URL +# Store the original directory (now inside container at /app) +ORIGINAL_DIR="/app" + +# Check if environment argument is provided +if [ -z "$1" ]; then + echo "Usage: $0 " + echo "Available environments: local, sepolia, mainnet" + exit 1 +fi + +# Validate environment argument +ENV_TYPE="$1" +case "$ENV_TYPE" in + "local"|"sepolia"|"mainnet") + ENV_FILE="$ORIGINAL_DIR/.env.$ENV_TYPE" + echo "Using environment: $ENV_TYPE ($ENV_FILE)" + ;; + *) + echo "Invalid environment. Must be one of: local, sepolia, mainnet" + exit 1 + ;; +esac +# Check if environment file exists +if [ ! -f "$ENV_FILE" ]; then + echo "Error: Environment file $ENV_FILE not found" + exit 1 +fi + +# Source the appropriate environment file +source "$ENV_FILE" + +ETHEREUM_DIR="/app/contracts/ethereum" +STARKNET_DIR="/app/contracts/starknet" + +# Deploy Ethereum contracts +cd "$ETHEREUM_DIR" +forge script script/LocalTesting.s.sol:LocalSetup --broadcast --rpc-url $ANVIL_URL L1_MESSAGE_SENDER=0x364C7188028348566E38D762f6095741c49f492B -# Function to wait for Katana to be ready -# wait_for_katana() { -# echo "Waiting for Katana to be ready..." -# while ! curl -s -X POST -H "Content-Type: application/json" \ -# -d '{"jsonrpc":"2.0","method":"starknet_chainId","params":[],"id":1}' \ -# http://0.0.0.0:5050 > /dev/null; do -# echo "Katana is not ready yet. Waiting..." -# sleep 5 -# done -# echo "Katana is ready!" -# } - -# # Wait for Katana to be ready -# wait_for_katana - -# # Set absolute paths -STARKNET_DIR="../starknet" - # Now deploy Starknet contracts echo "Deploying Starknet contracts..." -cd $STARKNET_DIR +cd "$STARKNET_DIR" scarb build @@ -77,46 +91,31 @@ echo "Contract deployed at: $FOSSIL_VERIFIER_ADDRESS" echo "All contracts deployed!" -# # Fetch the current Ethereum block number using `cast` -# ETH_BLOCK=$(cast block-number) -# echo "Current Ethereum block number: $ETH_BLOCK" - -# # Ensure `ETH_BLOCK` is a valid number before performing arithmetic -# if [[ $ETH_BLOCK =~ ^[0-9]+$ ]]; then -# # Subtract 256 from the current block number -# ETH_BLOCK=$((ETH_BLOCK - 256)) -# echo "Updated Ethereum block number: $ETH_BLOCK" - -# # Run the Starkli command with the updated block number -# starkli invoke $FOSSILSTORE_ADDRESS update_mmr_state $ETH_BLOCK 0x0 0x0 0x0 0x0 -# echo "Updated MMR state on Starknet for testing with block number: $ETH_BLOCK" -# else -# echo "Failed to retrieve a valid block number from 'cast'." -# fi - -# Path to the .env file -ENV_FILE="../../.env" - -# Function to update or append an environment variable in the .env file +# Update the environment file with new addresses update_env_var() { local var_name=$1 local var_value=$2 + if grep -q "^$var_name=" "$ENV_FILE"; then - echo "$var_name already exists, replacing..." + echo "$var_name already exists, replacing in $ENV_FILE..." sed -i "s|^$var_name=.*|$var_name=$var_value|" "$ENV_FILE" else echo "Appending $var_name to $ENV_FILE..." - echo "$var_name=$var_value" >>"$ENV_FILE" + echo "$var_name=$var_value" >> "$ENV_FILE" fi } -# Update the .env file with the new addresses +# Update the environment file with the new addresses update_env_var "L2_MSG_PROXY" "$L1MESSAGEPROXY_ADDRESS" update_env_var "FOSSIL_STORE" "$FOSSILSTORE_ADDRESS" update_env_var "STARKNET_VERIFIER" "$VERIFIER_ADDRESS" update_env_var "FOSSIL_VERIFIER" "$FOSSIL_VERIFIER_ADDRESS" -pwd -source ../../.env +# Return to original directory +cd "$ORIGINAL_DIR" + +# Source the updated environment file +source "$ENV_FILE" echo "Environment variables successfully updated in $ENV_FILE" + diff --git a/scripts/run_local_workflow.sh b/scripts/run_local_workflow.sh deleted file mode 100755 index 3234694..0000000 --- a/scripts/run_local_workflow.sh +++ /dev/null @@ -1,69 +0,0 @@ -# Function to wait for a specific log message in a given log file -wait_for_log() { - local log_file=$1 - local search_string=$2 - - echo "Waiting for '$search_string' in $log_file..." - while ! grep -q "$search_string" "$log_file"; do - sleep 1 - done - echo "Found '$search_string' in $log_file." -} - -# Start Terminal 1: Start Anvil Ethereum Devnet -gnome-terminal -- bash -c " -cd config; -source anvil.env; -anvil --fork-url \$ETH_RPC_URL --auto-impersonate --block-time 12 | tee anvil.log; -exec bash" - -# Wait for Anvil to be ready -wait_for_log "config/anvil.log" "Listening on" - -# Start Terminal 2: Deploy L1MessageSender.sol -gnome-terminal -- bash -c " -cd contracts/ethereum; -cp ../../config/anvil.env .env; -source .env; -forge script script/LocalTesting.s.sol:LocalSetup --broadcast --rpc-url \$ANVIL_URL; -exec bash" - -# Wait for the contract deployment to finish -sleep 10 # Adjust as needed - -# Start Terminal 3: Start Katana Starknet Devnet -gnome-terminal -- bash -c " -cd scripts/katana; -source ../../config/katana.env; -katana --messaging ../../config/anvil.messaging.json --disable-fee | tee katana.log; -exec bash" - -# Wait for Katana to be ready -wait_for_log "scripts/katana/katana.log" "RPC server started" - -# Start Terminal 4: Deploy Starknet Contracts -gnome-terminal -- bash -c " -cd scripts/katana; -./deploy.sh | tee deploy.log; -exec bash" - -# Wait for the "Environment variables successfully updated" message -wait_for_log "scripts/katana/deploy.log" "Environment variables successfully updated" - -# Start Terminal 5: Run the Rust Relayer -gnome-terminal -- bash -c " -cp config/anvil.env .env -cd relayer; -cargo run; -exec bash" - -# Wait for the Rust Relayer to start -sleep 5 # Adjust as needed - -# Start Terminal 6: Run the Rust Light Client -gnome-terminal -- bash -c " -cd client; -cargo run; -exec bash" - -echo "Local testing setup complete. All services are running." diff --git a/scripts/start.sh b/scripts/start.sh new file mode 100755 index 0000000..2caf374 --- /dev/null +++ b/scripts/start.sh @@ -0,0 +1,29 @@ +#!/bin/bash +set -e + +ENV_FILE=$1 +BUILD=$2 # Optional second parameter to trigger build + +if [ "$ENV_FILE" != ".env.local" ] && [ "$ENV_FILE" != ".env.sepolia" ] && [ "$ENV_FILE" != ".env.mainnet" ]; then + echo "Usage: $0 { .env.local | .env.sepolia | .env.mainnet } [build]" + exit 1 +fi + +export ENV_FILE=$ENV_FILE + +# Clean up any existing containers +docker-compose down + +if [ "$BUILD" == "build" ]; then + # Build images + echo "Building images..." + docker-compose -f docker-compose.yml -f docker-compose.build.yml build +fi + +if [ "$ENV_FILE" == ".env.local" ]; then + # Local development setup + docker-compose up -d +else + # Production setup (sepolia/mainnet) + docker-compose up -d client relayer +fi From 0f6d57016f57d3e0f9dfeb5cafaca1688d414c84 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Fri, 6 Dec 2024 17:25:53 +0800 Subject: [PATCH 21/54] feat: add build-mmr container - Add new Dockerfile.build-mmr for building and running build-mmr binary - Install Foundry and required dependencies in container - Update docker-compose.yml to use new build-mmr container - Add .env.local.docker to gitignore - Configure container to use existing Anvil instance --- .gitignore | 6 +- docker-compose.yml | 10 ++- docker/Dockerfile.build-mmr | 141 ++++++++++++++++++++++++++++++++++++ docker/Dockerfile.client | 2 +- 4 files changed, 153 insertions(+), 6 deletions(-) create mode 100644 docker/Dockerfile.build-mmr diff --git a/.gitignore b/.gitignore index 1c653e8..7190a5b 100644 --- a/.gitignore +++ b/.gitignore @@ -25,4 +25,8 @@ scripts/katana/katana.log *.pb -vendor/ \ No newline at end of file +vendor/ + +.env.local +.env.sepolia +.env.mainnet \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index e7b625b..51e86a3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -59,15 +59,17 @@ services: fi' mmr-builder: - image: fossil-core-client:latest + image: fossil-build-mmr:latest networks: - fossil env_file: - ${ENV_FILE:-.env.local} - volumes: - - .:/app - entrypoint: ["cargo", "run", "--bin", "build-mmr", "--", "-n", "1"] + environment: + - RUST_BACKTRACE=1 + - ANVIL_URL=http://anvil:8545 depends_on: + anvil: + condition: service_healthy deployer: condition: service_completed_successfully diff --git a/docker/Dockerfile.build-mmr b/docker/Dockerfile.build-mmr new file mode 100644 index 0000000..c9e93ea --- /dev/null +++ b/docker/Dockerfile.build-mmr @@ -0,0 +1,141 @@ +# Stage 1: Build the application +FROM rust:latest AS builder + +WORKDIR /app + +# Install Foundry (includes Anvil) +RUN curl -L https://foundry.paradigm.xyz | bash && \ + /root/.foundry/bin/foundryup + +# Add foundry binaries to PATH +ENV PATH="/root/.foundry/bin:$PATH" + +# Set DATABASE_URL as build arg +ARG DATABASE_URL +ENV DATABASE_URL=${DATABASE_URL} + +# Install the RISC Zero toolchain +RUN curl -L https://risczero.com/install | bash && \ + /root/.risc0/bin/rzup install || true && \ + mkdir -p /root/.cargo/bin && \ + (ln -sf /root/.risc0/bin/cargo-risczero /root/.cargo/bin/cargo-risczero || echo "Symlink creation failed, checking directories..." && ls -la /root/.risc0/bin && ls -la /root/.cargo) + +ENV PATH="/root/.risc0/bin:/root/.cargo/bin:$PATH" + +# Step 1: Copy Cargo.toml and Cargo.lock +COPY Cargo.toml Cargo.lock ./ + +# Step 2: Copy the workspace crates first +COPY crates ./crates + +# Step 3: Create necessary directories and copy pre-fetched dependencies +RUN mkdir -p /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823/cc-1.2.2/src/target && \ + mkdir -p /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823/cc-1.2.2/src/windows && \ + mkdir -p /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823/cc-1.2.2/src/parallel + +# Debug the build context +RUN echo "=== Build Context ===" && \ + pwd && \ + ls -la && \ + echo "=== Vendor Directory (if exists) ===" && \ + ls -R vendor || echo "No vendor directory found!" + +# Only proceed with copy if directory exists +RUN mkdir -p /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823/cc-1.2.2/src/{target,windows,parallel} + +# Create the destination directory with correct permissions +RUN mkdir -p /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823 && \ + chmod 755 /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823 + +# Copy the vendor directory with permissions preserved +COPY --chown=root:root vendor/ /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823/ + +# Debug and verify the copy +RUN echo "=== Debug: Directory structure after copy ===" && \ + find /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823 -type d && \ + echo "=== Debug: All files in cc-1.2.2 ===" && \ + find /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823/cc-1.2.2 -type f && \ + echo "=== Debug: Setting final permissions ===" && \ + chmod -R u+r,g+r /usr/local/cargo/registry/src/github.com-1ecc6299db9ec823 + +# Create cargo config for vendored dependencies +RUN mkdir -p /usr/local/cargo/registry && \ + echo '[source.crates-io]\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/risc0/RustCrypto-hashes.git"]\n\ +git = "https://github.com/risc0/RustCrypto-hashes.git"\n\ +tag = "sha2-v0.10.8-risczero.0"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/xJonathanLEI/starknet-rs.git"]\n\ +git = "https://github.com/xJonathanLEI/starknet-rs.git"\n\ +branch = "master"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/ametel01/rust-accumulators.git"]\n\ +git = "https://github.com/ametel01/rust-accumulators.git"\n\ +branch = "feat/sha2-hasher"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/ametel01/eth-rlp-verify.git"]\n\ +git = "https://github.com/ametel01/eth-rlp-verify.git"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/ametel01/garaga.git"]\n\ +git = "https://github.com/ametel01/garaga.git"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/risc0/risc0-ethereum.git"]\n\ +git = "https://github.com/risc0/risc0-ethereum"\n\ +tag = "v1.1.4"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source."git+https://github.com/lambdaclass/lambdaworks.git"]\n\ +git = "https://github.com/lambdaclass/lambdaworks.git"\n\ +replace-with = "vendored-sources"\n\ +\n\ +[source.vendored-sources]\n\ +directory = "/usr/local/cargo/registry/src/github.com-1ecc6299db9ec823"\n\ +\n\ +[net]\n\ +offline = true\n\ +\n\ +[patch.crates-io]\n\ +sha2 = { git = "https://github.com/risc0/RustCrypto-hashes.git", tag = "sha2-v0.10.8-risczero.0" }' > /usr/local/cargo/config.toml + +# Step 4: Prepare sqlx before building +# RUN cargo install sqlx-cli --no-default-features --features native-tls,postgres +# RUN cargo sqlx prepare --database-url="${DATABASE_URL}" -- --package publisher --bin build-mmr + +# Step 5: Build the application +RUN cargo build --release --package publisher --bin build-mmr + +# Stage 2: Create a minimal runtime image +FROM debian:bookworm-slim + +WORKDIR /app + +# Install runtime dependencies and Foundry +RUN apt-get update && apt-get install -y \ + ca-certificates \ + openssl \ + libssl3 \ + curl \ + git \ + && rm -rf /var/lib/apt/lists/* \ + && curl -L https://foundry.paradigm.xyz | bash \ + && /root/.foundry/bin/foundryup + +# Add foundry binaries to PATH +ENV PATH="/root/.foundry/bin:$PATH" + +# Copy the compiled binary from the builder stage +COPY --from=builder /app/target/release/build-mmr . + +# Make the binary executable +RUN chmod +x /app/build-mmr + +# Define the entrypoint +ENTRYPOINT ["/app/build-mmr"] +CMD ["-n", "1"] \ No newline at end of file diff --git a/docker/Dockerfile.client b/docker/Dockerfile.client index c647eb0..b963e58 100644 --- a/docker/Dockerfile.client +++ b/docker/Dockerfile.client @@ -115,4 +115,4 @@ RUN apt-get update && apt-get install -y \ COPY --from=builder /app/target/release/client . # Define the entrypoint -ENTRYPOINT ["./client"] +CMD ["./client"] From 6e586c218aec3a17bd5f9164ce7f4916a0c7c356 Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Sat, 7 Dec 2024 09:55:45 +0800 Subject: [PATCH 22/54] misc fixes --- .env.local | 4 +- .../verifier/src/fossil_verifier.cairo | 18 ++-- .../verifier/src/groth16_verifier.cairo | 34 +++++--- .../verifier/src/universal_ecip.cairo | 14 +-- crates/publisher/src/core/proof_generator.rs | 1 + docker-compose.yml | 2 + scripts/deploy-contracts.sh | 87 +++++++++++-------- 7 files changed, 93 insertions(+), 67 deletions(-) diff --git a/.env.local b/.env.local index 4a0b9f2..93aa02a 100644 --- a/.env.local +++ b/.env.local @@ -19,8 +19,8 @@ STARKNET_ACCOUNT_ADDRESS=0xb3ff441a68610b30fd5e2abbf3a1548eb6ba6f3559f2862bf2dc7 L2_MSG_PROXY=0x072ed50f70665e9c0b879d61a6c158293e02f52ee60ba22def223dfd172fcccd FOSSIL_STORE=0x0416d8aaca654d83be44a8536fa5c06e0484d16aa3202efcbb49c2fb788d39cc -STARKNET_VERIFIER=0x00d06b387e3f0d64cb0762e068f3cc8134550afff936425d79828f2c98e3fc2a -FOSSIL_VERIFIER=0x0403e648dee1ac2612fe1d3758773645755194b22cd0cdca32bf9e645bcf385f +STARKNET_VERIFIER=0x03f5b5644e4a476353f8c4d41e8595285daa5c540eacad2245cf326c8e154c86 +FOSSIL_VERIFIER=0x027f828ad56c71904e8a9317793eb5d9ecdf4a1f25a978e095a50a82db1253df export STARKNET_ACCOUNT=katana-0 export STARKNET_RPC=http://katana:5050 diff --git a/contracts/starknet/verifier/src/fossil_verifier.cairo b/contracts/starknet/verifier/src/fossil_verifier.cairo index e513d9c..6a3a65e 100644 --- a/contracts/starknet/verifier/src/fossil_verifier.cairo +++ b/contracts/starknet/verifier/src/fossil_verifier.cairo @@ -1,6 +1,6 @@ #[starknet::interface] pub trait IFossilVerifier { - fn verify_mmr_proof(ref self: TContractState, proof: Span,) -> bool; + fn verify_mmr_proof(ref self: TContractState, proof: Span,); fn get_verifier_address(self: @TContractState) -> starknet::ContractAddress; fn get_fossil_store_address(self: @TContractState) -> starknet::ContractAddress; } @@ -46,25 +46,21 @@ mod FossilVerifier { } #[external(v0)] - fn verify_mmr_proof(ref self: ContractState, proof: Span,) -> bool { - let (verified, journal) = self.bn254_verifier.read().verify_groth16_proof_bn254(proof); + fn verify_mmr_proof(ref self: ContractState, proof: Span,) { + let journal = self.bn254_verifier.read().verify_groth16_proof_bn254(proof).expect('Proof verification failed'); let (new_mmr_root, new_leaves_count, batch_index, latest_mmr_block) = decode_journal( journal ); - if verified { - self - .fossil_store - .read() - .update_mmr_state(batch_index, latest_mmr_block, new_leaves_count, new_mmr_root); - } + self + .fossil_store + .read() + .update_mmr_state(batch_index, latest_mmr_block, new_leaves_count, new_mmr_root); self .emit( MmrProofVerified { batch_index, latest_mmr_block, new_leaves_count, new_mmr_root } ); - - verified } } diff --git a/contracts/starknet/verifier/src/groth16_verifier.cairo b/contracts/starknet/verifier/src/groth16_verifier.cairo index e743d47..384ca6d 100644 --- a/contracts/starknet/verifier/src/groth16_verifier.cairo +++ b/contracts/starknet/verifier/src/groth16_verifier.cairo @@ -1,23 +1,26 @@ use super::groth16_verifier_constants::{N_FREE_PUBLIC_INPUTS, vk, ic, precomputed_lines, T}; #[starknet::interface] -pub trait IRisc0Groth16VerifierBN254 { +pub(crate) trait IRisc0Groth16VerifierBN254 { fn verify_groth16_proof_bn254( self: @TContractState, full_proof_with_hints: Span, - ) -> (bool, Span); + ) -> Option>; } #[starknet::contract] mod Risc0Groth16VerifierBN254 { + use starknet::SyscallResultTrait; use garaga::definitions::{G1Point, G1G2Pair}; + use garaga::groth16::{multi_pairing_check_bn254_3P_2F_with_extra_miller_loop_result}; use garaga::ec_ops::{G1PointTrait, ec_safe_add}; use garaga::ec_ops_g2::{G2PointTrait}; - use garaga::groth16::{multi_pairing_check_bn254_3P_2F_with_extra_miller_loop_result}; - use garaga::utils::calldata::deserialize_full_proof_with_hints_risc0; use garaga::utils::risc0::{compute_receipt_claim, journal_sha256}; - use starknet::SyscallResultTrait; + use garaga::utils::calldata::deserialize_full_proof_with_hints_risc0; use super::{N_FREE_PUBLIC_INPUTS, vk, ic, precomputed_lines, T}; + const ECIP_OPS_CLASS_HASH: felt252 = + 0x2f2a107cee3e12d1fb6070d2ae30d18c1e412efdf8ef5c8dd278fc00862f952; + #[storage] struct Storage { ecip_ops_class_hash: felt252, @@ -32,7 +35,7 @@ mod Risc0Groth16VerifierBN254 { impl IRisc0Groth16VerifierBN254 of super::IRisc0Groth16VerifierBN254 { fn verify_groth16_proof_bn254( self: @ContractState, full_proof_with_hints: Span, - ) -> (bool, Span) { + ) -> Option> { // DO NOT EDIT THIS FUNCTION UNLESS YOU KNOW WHAT YOU ARE DOING. // This function returns an Option for the public inputs if the proof is valid. // If the proof is invalid, the execution will either fail or return None. @@ -68,32 +71,37 @@ mod Risc0Groth16VerifierBN254 { // Complete with the curve indentifier (0 for BN254): msm_calldata.append(0); + println!("msm_calldata: {:?}", msm_calldata); + // Call the multi scalar multiplication endpoint on the Garaga ECIP ops contract // to obtain claim0 * IC[3] + claim1 * IC[4]. let mut _msm_result_serialized = core::starknet::syscalls::library_call_syscall( self.ecip_ops_class_hash.read().try_into().unwrap(), selector!("msm_g1_u128"), - msm_calldata.span() + msm_calldata.span(), ) .unwrap_syscall(); // Finalize vk_x computation by adding the precomputed T point. let vk_x = ec_safe_add( - T, Serde::::deserialize(ref _msm_result_serialized).unwrap(), 0 + T, Serde::::deserialize(ref _msm_result_serialized).unwrap(), 0, ); // Perform the pairing check. - let result = multi_pairing_check_bn254_3P_2F_with_extra_miller_loop_result( + let check = multi_pairing_check_bn254_3P_2F_with_extra_miller_loop_result( G1G2Pair { p: vk_x, q: vk.gamma_g2 }, G1G2Pair { p: groth16_proof.c, q: vk.delta_g2 }, G1G2Pair { p: groth16_proof.a.negate(0), q: groth16_proof.b }, vk.alpha_beta_miller_loop_result, precomputed_lines.span(), mpcheck_hint, - small_Q + small_Q, ); - - (result, journal) + if check == true { + return Option::Some(journal); + } else { + return Option::None; + } } } -} +} \ No newline at end of file diff --git a/contracts/starknet/verifier/src/universal_ecip.cairo b/contracts/starknet/verifier/src/universal_ecip.cairo index 20f9c94..c31a38f 100644 --- a/contracts/starknet/verifier/src/universal_ecip.cairo +++ b/contracts/starknet/verifier/src/universal_ecip.cairo @@ -10,7 +10,7 @@ trait IUniversalECIP { derive_point_from_x_hint: DerivePointFromXHint, points: Span, scalars: Span, - curve_index: usize + curve_index: usize, ) -> G1Point; fn msm_g1_u128( @@ -20,7 +20,7 @@ trait IUniversalECIP { derive_point_from_x_hint: DerivePointFromXHint, points: Span, scalars: Span, - curve_index: usize + curve_index: usize, ) -> G1Point; } @@ -41,7 +41,7 @@ mod UniversalECIP { derive_point_from_x_hint: DerivePointFromXHint, points: Span, scalars: Span, - curve_index: usize + curve_index: usize, ) -> G1Point { msm_g1( scalars_digits_decompositions, @@ -49,7 +49,7 @@ mod UniversalECIP { derive_point_from_x_hint, points, scalars, - curve_index + curve_index, ) } @@ -60,7 +60,7 @@ mod UniversalECIP { derive_point_from_x_hint: DerivePointFromXHint, points: Span, scalars: Span, - curve_index: usize + curve_index: usize, ) -> G1Point { msm_g1_u128( scalars_digits_decompositions, @@ -68,8 +68,8 @@ mod UniversalECIP { derive_point_from_x_hint, points, scalars, - curve_index + curve_index, ) } } -} +} \ No newline at end of file diff --git a/crates/publisher/src/core/proof_generator.rs b/crates/publisher/src/core/proof_generator.rs index 9e91d5f..892bb79 100644 --- a/crates/publisher/src/core/proof_generator.rs +++ b/crates/publisher/src/core/proof_generator.rs @@ -187,6 +187,7 @@ where } else { vec![Felt::ZERO] }; + println!("calldata: {:?}", calldata); info!("Successfully generated Groth16 proof and calldata."); Ok(Groth16::new(receipt, calldata)) diff --git a/docker-compose.yml b/docker-compose.yml index 51e86a3..0890700 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -96,6 +96,8 @@ services: depends_on: deployer: condition: service_completed_successfully + mmr-builder: + condition: service_completed_successfully networks: fossil: diff --git a/scripts/deploy-contracts.sh b/scripts/deploy-contracts.sh index 39a2e94..bf86405 100755 --- a/scripts/deploy-contracts.sh +++ b/scripts/deploy-contracts.sh @@ -16,14 +16,14 @@ fi # Validate environment argument ENV_TYPE="$1" case "$ENV_TYPE" in - "local"|"sepolia"|"mainnet") - ENV_FILE="$ORIGINAL_DIR/.env.$ENV_TYPE" - echo "Using environment: $ENV_TYPE ($ENV_FILE)" - ;; - *) - echo "Invalid environment. Must be one of: local, sepolia, mainnet" - exit 1 - ;; +"local" | "sepolia" | "mainnet") + ENV_FILE="$ORIGINAL_DIR/.env.$ENV_TYPE" + echo "Using environment: $ENV_TYPE ($ENV_FILE)" + ;; +*) + echo "Invalid environment. Must be one of: local, sepolia, mainnet" + exit 1 + ;; esac # Check if environment file exists if [ ! -f "$ENV_FILE" ]; then @@ -37,71 +37,89 @@ source "$ENV_FILE" ETHEREUM_DIR="/app/contracts/ethereum" STARKNET_DIR="/app/contracts/starknet" +# Define colors +GREEN='\033[0;32m' +BLUE='\033[0;34m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color +BOLD='\033[1m' + # Deploy Ethereum contracts cd "$ETHEREUM_DIR" +echo -e "${BLUE}${BOLD}Deploying Ethereum contracts...${NC}" forge script script/LocalTesting.s.sol:LocalSetup --broadcast --rpc-url $ANVIL_URL L1_MESSAGE_SENDER=0x364C7188028348566E38D762f6095741c49f492B # Now deploy Starknet contracts -echo "Deploying Starknet contracts..." +echo -e "\n${BLUE}${BOLD}Building Starknet contracts...${NC}" cd "$STARKNET_DIR" -scarb build +scarb build --quiet +echo -e "\n${BLUE}${BOLD}Deploying Starknet contracts...${NC}" # Declare and deploy Fossil Store contract -echo "Declaring Fossil Store contract..." +echo -e "\n${YELLOW}Declaring Fossil Store contract...${NC}" FOSSILSTORE_HASH=$(starkli declare ./target/dev/fossil_store_Store.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo "Class hash declared: $FOSSILSTORE_HASH" +echo -e "${GREEN}Class hash declared: ${BOLD}$FOSSILSTORE_HASH${NC}" +echo -echo "Deploying Fossil Store contract..." +echo -e "${YELLOW}Deploying Fossil Store contract...${NC}" FOSSILSTORE_ADDRESS=$(starkli deploy $FOSSILSTORE_HASH --salt 1 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo "Contract address: $FOSSILSTORE_ADDRESS" +echo -e "${GREEN}Contract address: ${BOLD}$FOSSILSTORE_ADDRESS${NC}" +echo # Declare and deploy Fossil L1MessageProxy contract -echo "Declaring Fossil L1MessageProxy contract..." +echo -e "${YELLOW}Declaring Fossil L1MessageProxy contract...${NC}" L1MESSAGEPROXY_HASH=$(starkli declare ./target/dev/l1_message_proxy_L1MessageProxy.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo "Class hash declared: $L1MESSAGEPROXY_HASH" +echo -e "${GREEN}Class hash declared: ${BOLD}$L1MESSAGEPROXY_HASH${NC}" +echo -echo "Deploying Fossil L1MessageProxy contract..." +echo -e "${YELLOW}Deploying Fossil L1MessageProxy contract...${NC}" L1MESSAGEPROXY_ADDRESS=$(starkli deploy $L1MESSAGEPROXY_HASH $L1_MESSAGE_SENDER $FOSSILSTORE_ADDRESS --salt 1 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo "Contract address: $L1MESSAGEPROXY_ADDRESS" +echo -e "${GREEN}Contract address: ${BOLD}$L1MESSAGEPROXY_ADDRESS${NC}" +echo # Declare and deploy Universal ECIP contract -echo "Declaring Universal ECIP contract..." +echo -e "${YELLOW}Declaring Universal ECIP contract...${NC}" ECIP_HASH=$(starkli declare ./target/dev/verifier_UniversalECIP.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo "Class hash declared: $ECIP_HASH" +echo -e "${GREEN}Class hash declared: ${BOLD}$ECIP_HASH${NC}" +echo # Declare and deploy Groth16 Verifier contract -echo "Declaring Groth16 Verifier contract..." +echo -e "${YELLOW}Declaring Groth16 Verifier contract...${NC}" VERIFIER_HASH=$(starkli declare ./target/dev/verifier_Risc0Groth16VerifierBN254.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo "Class hash declared: $VERIFIER_HASH" +echo -e "${GREEN}Class hash declared: ${BOLD}$VERIFIER_HASH${NC}" +echo -echo "Deploying Groth16 Verifier contract..." +echo -e "${YELLOW}Deploying Groth16 Verifier contract...${NC}" VERIFIER_ADDRESS=$(starkli deploy $VERIFIER_HASH $ECIP_HASH --salt 1 | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo "Contract deployed at: $VERIFIER_ADDRESS" +echo -e "${GREEN}Contract deployed at: ${BOLD}$VERIFIER_ADDRESS${NC}" +echo -echo "Declaring Fossil Verifier contract..." +echo -e "${YELLOW}Declaring Fossil Verifier contract...${NC}" FOSSIL_VERIFIER_HASH=$(starkli declare ./target/dev/verifier_FossilVerifier.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo "Class hash declared: $FOSSIL_VERIFIER_HASH" +echo -e "${GREEN}Class hash declared: ${BOLD}$FOSSIL_VERIFIER_HASH${NC}" +echo -echo "Deploying Fossil Verifier contract..." +echo -e "${YELLOW}Deploying Fossil Verifier contract...${NC}" FOSSIL_VERIFIER_ADDRESS=$(starkli deploy $FOSSIL_VERIFIER_HASH $VERIFIER_ADDRESS $FOSSILSTORE_ADDRESS --salt 1 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo "Contract deployed at: $FOSSIL_VERIFIER_ADDRESS" +echo -e "${GREEN}Contract deployed at: ${BOLD}$FOSSIL_VERIFIER_ADDRESS${NC}" +echo -echo "All contracts deployed!" +echo -e "\n${GREEN}${BOLD}All contracts deployed!${NC}" # Update the environment file with new addresses update_env_var() { local var_name=$1 local var_value=$2 - + if grep -q "^$var_name=" "$ENV_FILE"; then - echo "$var_name already exists, replacing in $ENV_FILE..." + echo -e "${BLUE}$var_name already exists, replacing in $ENV_FILE...${NC}" sed -i "s|^$var_name=.*|$var_name=$var_value|" "$ENV_FILE" else - echo "Appending $var_name to $ENV_FILE..." - echo "$var_name=$var_value" >> "$ENV_FILE" + echo -e "${BLUE}Appending $var_name to $ENV_FILE...${NC}" + echo "$var_name=$var_value" >>"$ENV_FILE" fi } @@ -117,5 +135,6 @@ cd "$ORIGINAL_DIR" # Source the updated environment file source "$ENV_FILE" -echo "Environment variables successfully updated in $ENV_FILE" +sleep 2 +echo -e "${GREEN}${BOLD}Environment variables successfully updated in $ENV_FILE${NC}" From baaeb60b7a700655fe90656ad426c8f0a475b866 Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Sat, 7 Dec 2024 09:59:57 +0800 Subject: [PATCH 23/54] rolled back starknet contracts to prev state --- contracts/starknet/Scarb.lock | 1 + contracts/starknet/verifier/Scarb.toml | 4 +-- .../verifier/src/fossil_verifier.cairo | 20 +++++++----- .../verifier/src/groth16_verifier.cairo | 32 +++++++------------ .../verifier/src/universal_ecip.cairo | 12 +++---- 5 files changed, 33 insertions(+), 36 deletions(-) diff --git a/contracts/starknet/Scarb.lock b/contracts/starknet/Scarb.lock index e93d860..ff588ad 100644 --- a/contracts/starknet/Scarb.lock +++ b/contracts/starknet/Scarb.lock @@ -11,6 +11,7 @@ dependencies = [ [[package]] name = "garaga" version = "0.14.0" +source = "git+https://github.com/keep-starknet-strange/garaga.git?rev=5f3b232#5f3b23252a04f1714838415e69e318ed8e097c15" [[package]] name = "l1_message_proxy" diff --git a/contracts/starknet/verifier/Scarb.toml b/contracts/starknet/verifier/Scarb.toml index 84b377a..b3a97d5 100644 --- a/contracts/starknet/verifier/Scarb.toml +++ b/contracts/starknet/verifier/Scarb.toml @@ -6,7 +6,7 @@ edition = "2023_11" # See more keys and their definitions at https://docs.swmansion.com/scarb/docs/reference/manifest.html [dependencies] -garaga = { path = "../garaga/src" } +garaga = { git = "https://github.com/keep-starknet-strange/garaga.git", rev = "5f3b232" } starknet.workspace = true fossil_store = { path = "../store" } @@ -22,4 +22,4 @@ fmt.workspace = true casm = true casm-add-pythonic-hints = true -[lib] +[lib] \ No newline at end of file diff --git a/contracts/starknet/verifier/src/fossil_verifier.cairo b/contracts/starknet/verifier/src/fossil_verifier.cairo index 6a3a65e..5cdf75a 100644 --- a/contracts/starknet/verifier/src/fossil_verifier.cairo +++ b/contracts/starknet/verifier/src/fossil_verifier.cairo @@ -1,6 +1,6 @@ #[starknet::interface] pub trait IFossilVerifier { - fn verify_mmr_proof(ref self: TContractState, proof: Span,); + fn verify_mmr_proof(ref self: TContractState, proof: Span,) -> bool; fn get_verifier_address(self: @TContractState) -> starknet::ContractAddress; fn get_fossil_store_address(self: @TContractState) -> starknet::ContractAddress; } @@ -46,21 +46,25 @@ mod FossilVerifier { } #[external(v0)] - fn verify_mmr_proof(ref self: ContractState, proof: Span,) { - let journal = self.bn254_verifier.read().verify_groth16_proof_bn254(proof).expect('Proof verification failed'); + fn verify_mmr_proof(ref self: ContractState, proof: Span,) -> bool { + let (verified, journal) = self.bn254_verifier.read().verify_groth16_proof_bn254(proof); let (new_mmr_root, new_leaves_count, batch_index, latest_mmr_block) = decode_journal( journal ); - self - .fossil_store - .read() - .update_mmr_state(batch_index, latest_mmr_block, new_leaves_count, new_mmr_root); + if verified { + self + .fossil_store + .read() + .update_mmr_state(batch_index, latest_mmr_block, new_leaves_count, new_mmr_root); + } self .emit( MmrProofVerified { batch_index, latest_mmr_block, new_leaves_count, new_mmr_root } ); + + verified } -} +} \ No newline at end of file diff --git a/contracts/starknet/verifier/src/groth16_verifier.cairo b/contracts/starknet/verifier/src/groth16_verifier.cairo index 384ca6d..c08b20d 100644 --- a/contracts/starknet/verifier/src/groth16_verifier.cairo +++ b/contracts/starknet/verifier/src/groth16_verifier.cairo @@ -1,26 +1,23 @@ use super::groth16_verifier_constants::{N_FREE_PUBLIC_INPUTS, vk, ic, precomputed_lines, T}; #[starknet::interface] -pub(crate) trait IRisc0Groth16VerifierBN254 { +pub trait IRisc0Groth16VerifierBN254 { fn verify_groth16_proof_bn254( self: @TContractState, full_proof_with_hints: Span, - ) -> Option>; + ) -> (bool, Span); } #[starknet::contract] mod Risc0Groth16VerifierBN254 { - use starknet::SyscallResultTrait; use garaga::definitions::{G1Point, G1G2Pair}; - use garaga::groth16::{multi_pairing_check_bn254_3P_2F_with_extra_miller_loop_result}; use garaga::ec_ops::{G1PointTrait, ec_safe_add}; use garaga::ec_ops_g2::{G2PointTrait}; - use garaga::utils::risc0::{compute_receipt_claim, journal_sha256}; + use garaga::groth16::{multi_pairing_check_bn254_3P_2F_with_extra_miller_loop_result}; use garaga::utils::calldata::deserialize_full_proof_with_hints_risc0; + use garaga::utils::risc0::{compute_receipt_claim, journal_sha256}; + use starknet::SyscallResultTrait; use super::{N_FREE_PUBLIC_INPUTS, vk, ic, precomputed_lines, T}; - const ECIP_OPS_CLASS_HASH: felt252 = - 0x2f2a107cee3e12d1fb6070d2ae30d18c1e412efdf8ef5c8dd278fc00862f952; - #[storage] struct Storage { ecip_ops_class_hash: felt252, @@ -35,7 +32,7 @@ mod Risc0Groth16VerifierBN254 { impl IRisc0Groth16VerifierBN254 of super::IRisc0Groth16VerifierBN254 { fn verify_groth16_proof_bn254( self: @ContractState, full_proof_with_hints: Span, - ) -> Option> { + ) -> (bool, Span) { // DO NOT EDIT THIS FUNCTION UNLESS YOU KNOW WHAT YOU ARE DOING. // This function returns an Option for the public inputs if the proof is valid. // If the proof is invalid, the execution will either fail or return None. @@ -71,37 +68,32 @@ mod Risc0Groth16VerifierBN254 { // Complete with the curve indentifier (0 for BN254): msm_calldata.append(0); - println!("msm_calldata: {:?}", msm_calldata); - // Call the multi scalar multiplication endpoint on the Garaga ECIP ops contract // to obtain claim0 * IC[3] + claim1 * IC[4]. let mut _msm_result_serialized = core::starknet::syscalls::library_call_syscall( self.ecip_ops_class_hash.read().try_into().unwrap(), selector!("msm_g1_u128"), - msm_calldata.span(), + msm_calldata.span() ) .unwrap_syscall(); // Finalize vk_x computation by adding the precomputed T point. let vk_x = ec_safe_add( - T, Serde::::deserialize(ref _msm_result_serialized).unwrap(), 0, + T, Serde::::deserialize(ref _msm_result_serialized).unwrap(), 0 ); // Perform the pairing check. - let check = multi_pairing_check_bn254_3P_2F_with_extra_miller_loop_result( + let result = multi_pairing_check_bn254_3P_2F_with_extra_miller_loop_result( G1G2Pair { p: vk_x, q: vk.gamma_g2 }, G1G2Pair { p: groth16_proof.c, q: vk.delta_g2 }, G1G2Pair { p: groth16_proof.a.negate(0), q: groth16_proof.b }, vk.alpha_beta_miller_loop_result, precomputed_lines.span(), mpcheck_hint, - small_Q, + small_Q ); - if check == true { - return Option::Some(journal); - } else { - return Option::None; - } + + (result, journal) } } } \ No newline at end of file diff --git a/contracts/starknet/verifier/src/universal_ecip.cairo b/contracts/starknet/verifier/src/universal_ecip.cairo index c31a38f..58dcbb9 100644 --- a/contracts/starknet/verifier/src/universal_ecip.cairo +++ b/contracts/starknet/verifier/src/universal_ecip.cairo @@ -10,7 +10,7 @@ trait IUniversalECIP { derive_point_from_x_hint: DerivePointFromXHint, points: Span, scalars: Span, - curve_index: usize, + curve_index: usize ) -> G1Point; fn msm_g1_u128( @@ -20,7 +20,7 @@ trait IUniversalECIP { derive_point_from_x_hint: DerivePointFromXHint, points: Span, scalars: Span, - curve_index: usize, + curve_index: usize ) -> G1Point; } @@ -41,7 +41,7 @@ mod UniversalECIP { derive_point_from_x_hint: DerivePointFromXHint, points: Span, scalars: Span, - curve_index: usize, + curve_index: usize ) -> G1Point { msm_g1( scalars_digits_decompositions, @@ -49,7 +49,7 @@ mod UniversalECIP { derive_point_from_x_hint, points, scalars, - curve_index, + curve_index ) } @@ -60,7 +60,7 @@ mod UniversalECIP { derive_point_from_x_hint: DerivePointFromXHint, points: Span, scalars: Span, - curve_index: usize, + curve_index: usize ) -> G1Point { msm_g1_u128( scalars_digits_decompositions, @@ -68,7 +68,7 @@ mod UniversalECIP { derive_point_from_x_hint, points, scalars, - curve_index, + curve_index ) } } From 22d02372b2bbd003a7528458038a8a8f8d921ee4 Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Sat, 7 Dec 2024 10:00:24 +0800 Subject: [PATCH 24/54] removed debug logging --- crates/publisher/src/core/proof_generator.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/publisher/src/core/proof_generator.rs b/crates/publisher/src/core/proof_generator.rs index 892bb79..9e91d5f 100644 --- a/crates/publisher/src/core/proof_generator.rs +++ b/crates/publisher/src/core/proof_generator.rs @@ -187,7 +187,6 @@ where } else { vec![Felt::ZERO] }; - println!("calldata: {:?}", calldata); info!("Successfully generated Groth16 proof and calldata."); Ok(Groth16::new(receipt, calldata)) From e7b7852e303d8a7396c68fa35f0dcd430e3df865 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Sat, 7 Dec 2024 10:22:01 +0800 Subject: [PATCH 25/54] feat: add retry mechanism for forge script deployment - Add retry_command function with exponential backoff - Implement retries for forge script to handle Anvil startup delays - Add better error messaging for deployment attempts Fixes issue with deployment failing due to Anvil not being ready --- scripts/deploy-contracts.sh | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/scripts/deploy-contracts.sh b/scripts/deploy-contracts.sh index bf86405..c6041e3 100755 --- a/scripts/deploy-contracts.sh +++ b/scripts/deploy-contracts.sh @@ -43,11 +43,37 @@ BLUE='\033[0;34m' YELLOW='\033[1;33m' NC='\033[0m' # No Color BOLD='\033[1m' +RED='\033[0;31m' + +# Function to retry commands +retry_command() { + local retries=5 + local wait_time=5 + local command="$@" + local retry_count=0 + + until [ $retry_count -ge $retries ] + do + echo -e "${YELLOW}Attempting deployment (attempt $((retry_count + 1)) of $retries)...${NC}" + if eval "$command"; then + return 0 + fi + retry_count=$((retry_count + 1)) + if [ $retry_count -lt $retries ]; then + echo -e "${YELLOW}Deployment failed. Waiting ${wait_time} seconds before retrying...${NC}" + sleep $wait_time + # Increase wait time for next attempt + wait_time=$((wait_time * 2)) + fi + done + echo -e "${RED}Failed to deploy after $retries attempts${NC}" + return 1 +} # Deploy Ethereum contracts cd "$ETHEREUM_DIR" echo -e "${BLUE}${BOLD}Deploying Ethereum contracts...${NC}" -forge script script/LocalTesting.s.sol:LocalSetup --broadcast --rpc-url $ANVIL_URL +retry_command "forge script script/LocalTesting.s.sol:LocalSetup --broadcast --rpc-url $ANVIL_URL" L1_MESSAGE_SENDER=0x364C7188028348566E38D762f6095741c49f492B From 2a9429acac8d374a65ecbe7290de20add5f6795a Mon Sep 17 00:00:00 2001 From: ametel01 Date: Sat, 7 Dec 2024 10:37:57 +0800 Subject: [PATCH 26/54] fix(docker): resolve cargo patch resolution in offline mode - Simplify cargo config to handle crates-io and vendored sources - Update patch section to explicitly target crates.io-index - Fix client binary path and volume mount conflicts - Add cargo fetch step for git dependencies Resolves build error with sha2 patch resolution and client binary location --- docker-compose.yml | 1 + docker/Dockerfile.client | 9 ++++++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 0890700..6892f25 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -81,6 +81,7 @@ services: - ${ENV_FILE:-.env.local} volumes: - .:/app + command: ["client"] depends_on: mmr-builder: condition: service_completed_successfully diff --git a/docker/Dockerfile.client b/docker/Dockerfile.client index b963e58..ecaf1bd 100644 --- a/docker/Dockerfile.client +++ b/docker/Dockerfile.client @@ -111,8 +111,11 @@ RUN apt-get update && apt-get install -y \ libssl3 \ && rm -rf /var/lib/apt/lists/* -# Copy the compiled binary from the builder stage -COPY --from=builder /app/target/release/client . +# Copy the compiled binary from the builder stage to /usr/local/bin +COPY --from=builder /app/target/release/client /usr/local/bin/client + +# Make the binary executable +RUN chmod +x /usr/local/bin/client # Define the entrypoint -CMD ["./client"] +CMD ["client"] From cbd92464307ea25cab5d77a2b7284cd6cfeb7d40 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Sat, 7 Dec 2024 10:49:23 +0800 Subject: [PATCH 27/54] fix(ethereum): handle Anvil provider timeout gracefully - Replace unwrapping provider setup with proper error handling - Use try_on_anvil_with_wallet_and_config to return Result - Add detailed error messages for retry mechanism - Improve error logging for debugging purposes Resolves panic from Anvil timeout errors by implementing proper retry logic and error handling. --- .cargo/config.toml | 88 ++++++++++++++++++------------------- .env.local | 26 ----------- Cargo.lock | 1 + config/anvil.messaging.json | 2 +- crates/ethereum/Cargo.toml | 1 + crates/ethereum/src/lib.rs | 31 ++++++++----- 6 files changed, 68 insertions(+), 81 deletions(-) delete mode 100644 .env.local diff --git a/.cargo/config.toml b/.cargo/config.toml index dd63cac..2821e96 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,44 +1,44 @@ -[source.crates-io] -replace-with = "vendored-sources" - -[source."git+https://github.com/ametel01/eth-rlp-verify.git"] -git = "https://github.com/ametel01/eth-rlp-verify.git" -replace-with = "vendored-sources" - -[source."git+https://github.com/ametel01/garaga.git"] -git = "https://github.com/ametel01/garaga.git" -replace-with = "vendored-sources" - -[source."git+https://github.com/ametel01/rust-accumulators.git?branch=feat/sha2-hasher"] -git = "https://github.com/ametel01/rust-accumulators.git" -branch = "feat/sha2-hasher" -replace-with = "vendored-sources" - -[source."git+https://github.com/lambdaclass/lambdaworks.git"] -git = "https://github.com/lambdaclass/lambdaworks.git" -replace-with = "vendored-sources" - -[source."git+https://github.com/risc0/RustCrypto-hashes.git?tag=sha2-v0.10.8-risczero.0"] -git = "https://github.com/risc0/RustCrypto-hashes.git" -tag = "sha2-v0.10.8-risczero.0" -replace-with = "vendored-sources" - -[source."git+https://github.com/risc0/risc0-ethereum?tag=v1.1.4"] -git = "https://github.com/risc0/risc0-ethereum" -tag = "v1.1.4" -replace-with = "vendored-sources" - -[source."git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master"] -git = "https://github.com/xJonathanLEI/starknet-rs.git" -branch = "master" -replace-with = "vendored-sources" - -[source."git+https://github.com/OpenZeppelin/openzeppelin-contracts.git"] -git = "https://github.com/OpenZeppelin/openzeppelin-contracts.git" -replace-with = "vendored-sources" - -[source.vendored-sources] -directory = "vendor" - -[net] -git-fetch-with-cli = true +# [source.crates-io] +# replace-with = "vendored-sources" + +# [source."git+https://github.com/ametel01/eth-rlp-verify.git"] +# git = "https://github.com/ametel01/eth-rlp-verify.git" +# replace-with = "vendored-sources" + +# [source."git+https://github.com/ametel01/garaga.git"] +# git = "https://github.com/ametel01/garaga.git" +# replace-with = "vendored-sources" + +# [source."git+https://github.com/ametel01/rust-accumulators.git?branch=feat/sha2-hasher"] +# git = "https://github.com/ametel01/rust-accumulators.git" +# branch = "feat/sha2-hasher" +# replace-with = "vendored-sources" + +# [source."git+https://github.com/lambdaclass/lambdaworks.git"] +# git = "https://github.com/lambdaclass/lambdaworks.git" +# replace-with = "vendored-sources" + +# [source."git+https://github.com/risc0/RustCrypto-hashes.git?tag=sha2-v0.10.8-risczero.0"] +# git = "https://github.com/risc0/RustCrypto-hashes.git" +# tag = "sha2-v0.10.8-risczero.0" +# replace-with = "vendored-sources" + +# [source."git+https://github.com/risc0/risc0-ethereum?tag=v1.1.4"] +# git = "https://github.com/risc0/risc0-ethereum" +# tag = "v1.1.4" +# replace-with = "vendored-sources" + +# [source."git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master"] +# git = "https://github.com/xJonathanLEI/starknet-rs.git" +# branch = "master" +# replace-with = "vendored-sources" + +# [source."git+https://github.com/OpenZeppelin/openzeppelin-contracts.git"] +# git = "https://github.com/OpenZeppelin/openzeppelin-contracts.git" +# replace-with = "vendored-sources" + +# [source.vendored-sources] +# directory = "vendor" + +# [net] +# git-fetch-with-cli = true diff --git a/.env.local b/.env.local deleted file mode 100644 index 93aa02a..0000000 --- a/.env.local +++ /dev/null @@ -1,26 +0,0 @@ -BONSAI_API_KEY=ZLj670Kwrw57AiufKDjoF1uoozSjZwiba6IPtDgE -BONSAI_API_URL=https://api.bonsai.xyz/ - -DATABASE_URL=postgresql://postgres:Z36P56hzJScKssD@fossil-eth-blockheaders.cnc4gyc2mcb3.us-east-1.rds.amazonaws.com:5432/postgres.bak?sslmode=require - -ETH_RPC_URL=http://38.154.254.162:8545 -ANVIL_URL=http://anvil:8545 -ACCOUNT_PRIVATE_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 -ETH_ACCOUNT_ADDRESS=0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 -SN_MESSAGING=0xF62eEc897fa5ef36a957702AA4a45B58fE8Fe312 -L1_MESSAGE_SENDER=0x364C7188028348566E38D762f6095741c49f492B - -export FOUNDRY_EVM_VERSION=cancun -ETH_WHALE=0x40B38765696e3d5d8d9d834D8AaD4bB6e418E489 - -STARKNET_RPC_URL=http://katana:5050 -STARKNET_PRIVATE_KEY=0x2bbf4f9fd0bbb2e60b0316c1fe0b76cf7a4d0198bd493ced9b8df2a3a24d68a -STARKNET_ACCOUNT_ADDRESS=0xb3ff441a68610b30fd5e2abbf3a1548eb6ba6f3559f2862bf2dc757e5828ca - -L2_MSG_PROXY=0x072ed50f70665e9c0b879d61a6c158293e02f52ee60ba22def223dfd172fcccd -FOSSIL_STORE=0x0416d8aaca654d83be44a8536fa5c06e0484d16aa3202efcbb49c2fb788d39cc -STARKNET_VERIFIER=0x03f5b5644e4a476353f8c4d41e8595285daa5c540eacad2245cf326c8e154c86 -FOSSIL_VERIFIER=0x027f828ad56c71904e8a9317793eb5d9ecdf4a1f25a978e095a50a82db1253df - -export STARKNET_ACCOUNT=katana-0 -export STARKNET_RPC=http://katana:5050 diff --git a/Cargo.lock b/Cargo.lock index 476a396..941ec95 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2247,6 +2247,7 @@ dependencies = [ "alloy 0.6.4", "common", "tokio", + "tracing", ] [[package]] diff --git a/config/anvil.messaging.json b/config/anvil.messaging.json index 1aa70c2..6957685 100644 --- a/config/anvil.messaging.json +++ b/config/anvil.messaging.json @@ -1,7 +1,7 @@ { "chain": "ethereum", "rpc_url": "http://anvil:8545", - "contract_address": "0xF62eEc897fa5ef36a957702AA4a45B58fE8Fe312", + "contract_address": "0x364C7188028348566E38D762f6095741c49f492B", "sender_address": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", "private_key": "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", "interval": 2, diff --git a/crates/ethereum/Cargo.toml b/crates/ethereum/Cargo.toml index b0b123c..da0e9e8 100644 --- a/crates/ethereum/Cargo.toml +++ b/crates/ethereum/Cargo.toml @@ -7,6 +7,7 @@ edition = "2021" common = { path = "../common" } tokio = { workspace = true } +tracing = { workspace = true } alloy = { version = "0.6.4", features = ["full", "node-bindings"] } diff --git a/crates/ethereum/src/lib.rs b/crates/ethereum/src/lib.rs index 35f0edb..c6ff83c 100644 --- a/crates/ethereum/src/lib.rs +++ b/crates/ethereum/src/lib.rs @@ -31,7 +31,11 @@ pub async fn get_finalized_block_hash() -> Result<(u64, String), UtilsError> { let result: Result<(u64, String), UtilsError> = async { let provider = ProviderBuilder::new() .with_recommended_fillers() - .on_anvil_with_wallet_and_config(|anvil| anvil.fork(rpc_url.clone())); + .try_on_anvil_with_wallet_and_config(|anvil| anvil.fork(rpc_url.clone())) + .map_err(|e| UtilsError::RetryExhausted( + attempts, + format!("Failed to setup Anvil provider: {}", e), + ))?; let contract = BlockHashFetcher::deploy(&provider).await?; let result = contract.getBlockHash().call().await?; @@ -43,16 +47,23 @@ pub async fn get_finalized_block_hash() -> Result<(u64, String), UtilsError> { } .await; - if let Ok(value) = result { - return Ok(value); - } else { - if attempts >= MAX_RETRIES { - return Err(UtilsError::RetryExhausted( - MAX_RETRIES, - "get_finalized_block_hash".to_string(), - )); + match result { + Ok(value) => return Ok(value), + Err(e) => { + if attempts >= MAX_RETRIES { + return Err(UtilsError::RetryExhausted( + MAX_RETRIES, + format!("get_finalized_block_hash failed: {}", e), + )); + } + tracing::error!( + attempts = attempts, + max_retries = MAX_RETRIES, + error = %e.to_string(), + "Attempt failed" + ); + sleep(RETRY_DELAY).await; } - sleep(RETRY_DELAY).await; } } } From 214f7c79bd157512cbfd4b9b351b29cd3dda3275 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Sat, 7 Dec 2024 11:20:57 +0800 Subject: [PATCH 28/54] refactor(docker): switch relayer to Alpine Linux base image - Replace Ubuntu base image with Alpine Linux 3.19 - Simplify package installation using apk - Remove GPG key management steps - Use Alpine's SSL libraries for runtime dependencies --- docker/Dockerfile.relayer | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/docker/Dockerfile.relayer b/docker/Dockerfile.relayer index 3a2f65f..5e3e514 100644 --- a/docker/Dockerfile.relayer +++ b/docker/Dockerfile.relayer @@ -8,7 +8,6 @@ WORKDIR /app COPY Cargo.toml Cargo.lock ./ # Step 2: Copy pre-fetched dependencies and vendor directory -COPY .cargo /usr/local/cargo COPY vendor /usr/local/cargo/registry # Create cargo config for vendored dependencies @@ -61,24 +60,20 @@ git-fetch-with-cli = true' > /usr/local/cargo/config.toml # Step 3: Copy the workspace crates COPY crates ./crates -# Step 4: Copy OpenZeppelin contracts to the expected location -COPY vendor/openzeppelin-contracts /usr/local/cargo/registry/openzeppelin-contracts - # Step 5: Build the application RUN cargo build --release --package relayer # Stage 2: Create a minimal runtime image -FROM debian:bookworm-slim +FROM alpine:3.19 # Set up the working directory WORKDIR /app -# Install runtime dependencies with OpenSSL 3 -RUN apt-get update && apt-get install -y \ +# Install SSL libraries and certificates +RUN apk add --no-cache \ ca-certificates \ openssl \ - libssl3 \ - && rm -rf /var/lib/apt/lists/* + libssl3 # Copy the compiled binary from the builder stage COPY --from=builder /app/target/release/relayer . From 79cd5b94f6f63c1f35198c7632880434f5b1ed4a Mon Sep 17 00:00:00 2001 From: ametel01 Date: Sat, 7 Dec 2024 11:31:16 +0800 Subject: [PATCH 29/54] refactor(docker): align relayer Dockerfile with client pattern - Copy binary to /usr/local/bin instead of working directory - Add executable permissions with chmod - Switch to CMD from ENTRYPOINT - Follow consistent pattern with client Dockerfile --- docker/Dockerfile.relayer | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/docker/Dockerfile.relayer b/docker/Dockerfile.relayer index 5e3e514..5e4d20b 100644 --- a/docker/Dockerfile.relayer +++ b/docker/Dockerfile.relayer @@ -66,17 +66,14 @@ RUN cargo build --release --package relayer # Stage 2: Create a minimal runtime image FROM alpine:3.19 -# Set up the working directory -WORKDIR /app +# Install SSL certificates (still needed for HTTPS connections) +RUN apk add --no-cache ca-certificates -# Install SSL libraries and certificates -RUN apk add --no-cache \ - ca-certificates \ - openssl \ - libssl3 +# Copy the compiled binary from the builder stage to /usr/local/bin +COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/relayer /usr/local/bin/relayer -# Copy the compiled binary from the builder stage -COPY --from=builder /app/target/release/relayer . +# Make the binary executable +RUN chmod +x /usr/local/bin/relayer # Define the entrypoint -ENTRYPOINT ["./relayer"] +CMD ["relayer"] From f6395f9a49b558b9fd6b24f0758e4a101455491c Mon Sep 17 00:00:00 2001 From: ametel01 Date: Sat, 7 Dec 2024 11:31:23 +0800 Subject: [PATCH 30/54] chore(docker): verify relayer service configuration - Confirm command matches Dockerfile CMD instruction - Ensure consistent service definition with other containers - Maintain existing dependency chain --- docker-compose.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 6892f25..1342203 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -74,7 +74,7 @@ services: condition: service_completed_successfully client: - image: fossil-core-client:latest + image: fossil-client:latest networks: - fossil env_file: @@ -89,11 +89,12 @@ services: condition: service_completed_successfully relayer: - image: fossil-core-relayer:latest + image: fossil-relayer:latest networks: - fossil env_file: - ${ENV_FILE:-.env.local} + command: ["relayer"] depends_on: deployer: condition: service_completed_successfully From 08aa786bc5e4b7b9e171ac4668b273ab29f47520 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Sat, 7 Dec 2024 13:55:32 +0800 Subject: [PATCH 31/54] fix(docker): optimize relayer build process - Create minimal workspace with only required crates - Configure static OpenSSL linking with openssl-libs-static - Add necessary build dependencies (gcc, pkgconfig) - Use specific Alpine mirror for reliability - Fix dependency resolution by creating minimal workspace - Optimize multi-stage build process --- docker/Dockerfile.relayer | 97 ++++++++++++++++----------------------- 1 file changed, 39 insertions(+), 58 deletions(-) diff --git a/docker/Dockerfile.relayer b/docker/Dockerfile.relayer index 5e4d20b..85b3ace 100644 --- a/docker/Dockerfile.relayer +++ b/docker/Dockerfile.relayer @@ -1,76 +1,57 @@ # Stage 1: Build the application -FROM rust:latest AS builder +FROM rust:alpine AS builder # Set up the working directory WORKDIR /app -# Step 1: Copy the workspace Cargo.toml and Cargo.lock +# Install build dependencies including OpenSSL static libraries +RUN apk add --no-cache \ + musl-dev \ + git \ + openssl-dev \ + openssl-libs-static \ + pkgconfig \ + gcc + +# Set environment variables for static linking +ENV OPENSSL_STATIC=1 +ENV OPENSSL_LIB_DIR=/usr/lib +ENV OPENSSL_INCLUDE_DIR=/usr/include/openssl + +# Copy only necessary workspace files COPY Cargo.toml Cargo.lock ./ - -# Step 2: Copy pre-fetched dependencies and vendor directory COPY vendor /usr/local/cargo/registry +COPY crates/common ./crates/common +COPY crates/relayer ./crates/relayer # Create cargo config for vendored dependencies -RUN mkdir -p /usr/local/cargo/registry && \ - echo '[source.crates-io]\n\ -replace-with = "vendored-sources"\n\ -\n\ -[source."git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master"]\n\ -git = "https://github.com/xJonathanLEI/starknet-rs.git"\n\ -branch = "master"\n\ -replace-with = "vendored-sources"\n\ -\n\ -[source."git+https://github.com/risc0/RustCrypto-hashes.git?tag=sha2-v0.10.8-risczero.0"]\n\ -git = "https://github.com/risc0/RustCrypto-hashes.git"\n\ -tag = "sha2-v0.10.8-risczero.0"\n\ -replace-with = "vendored-sources"\n\ -\n\ -[source."git+https://github.com/ametel01/eth-rlp-verify.git"]\n\ -git = "https://github.com/ametel01/eth-rlp-verify.git"\n\ -replace-with = "vendored-sources"\n\ -\n\ -[source."git+https://github.com/ametel01/garaga.git"]\n\ -git = "https://github.com/ametel01/garaga.git"\n\ -replace-with = "vendored-sources"\n\ -\n\ -[source."git+https://github.com/ametel01/rust-accumulators.git?branch=feat/sha2-hasher"]\n\ -git = "https://github.com/ametel01/rust-accumulators.git"\n\ -branch = "feat/sha2-hasher"\n\ -replace-with = "vendored-sources"\n\ -\n\ -[source."git+https://github.com/risc0/risc0-ethereum?tag=v1.1.4"]\n\ -git = "https://github.com/risc0/risc0-ethereum"\n\ -tag = "v1.1.4"\n\ -replace-with = "vendored-sources"\n\ -\n\ -[source."git+https://github.com/lambdaclass/lambdaworks.git"]\n\ -git = "https://github.com/lambdaclass/lambdaworks.git"\n\ -replace-with = "vendored-sources"\n\ -\n\ -[source."git+https://github.com/OpenZeppelin/openzeppelin-contracts.git"]\n\ -git = "https://github.com/OpenZeppelin/openzeppelin-contracts.git"\n\ -replace-with = "vendored-sources"\n\ -\n\ -[source.vendored-sources]\n\ -directory = "/usr/local/cargo/registry"\n\ -\n\ -[net]\n\ -git-fetch-with-cli = true' > /usr/local/cargo/config.toml - -# Step 3: Copy the workspace crates -COPY crates ./crates - -# Step 5: Build the application +RUN mkdir -p /usr/local/cargo/registry && echo $'[source.crates-io]\nreplace-with = "vendored-sources"\n\n[source."git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master"]\ngit = "https://github.com/xJonathanLEI/starknet-rs.git"\nbranch = "master"\nreplace-with = "vendored-sources"\n\n[source.vendored-sources]\ndirectory = "/usr/local/cargo/registry"\n\n[net]\ngit-fetch-with-cli = true' > /usr/local/cargo/config.toml + +# Create a minimal workspace with dependencies +RUN echo $'[workspace]\nmembers = ["crates/common", "crates/relayer"]\nresolver = "2"\n\n\ +[workspace.dependencies]\n\ +dotenv = "0.15.0"\n\ +thiserror = "1.0.50"\n\ +starknet-crypto = "0.7.3"\n\ +tracing = "0.1.40"\n\ +tokio = { version = "1.35.0", features = ["full"] }\n\ +eyre = "0.6.11"' > workspace.toml && \ + mv workspace.toml Cargo.toml + +# Build only relayer RUN cargo build --release --package relayer # Stage 2: Create a minimal runtime image -FROM alpine:3.19 +FROM alpine:latest -# Install SSL certificates (still needed for HTTPS connections) -RUN apk add --no-cache ca-certificates +# Update repositories and install SSL certificates +RUN echo "https://mirror.ette.biz/alpine/v3.19/main" > /etc/apk/repositories && \ + echo "https://mirror.ette.biz/alpine/v3.19/community" >> /etc/apk/repositories && \ + apk update && \ + apk add --no-cache ca-certificates openssl # Copy the compiled binary from the builder stage to /usr/local/bin -COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/relayer /usr/local/bin/relayer +COPY --from=builder /app/target/release/relayer /usr/local/bin/relayer # Make the binary executable RUN chmod +x /usr/local/bin/relayer From 9a3d88317c4aca674e4f521ff334b59b7d3aa77e Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Sat, 7 Dec 2024 13:55:49 +0800 Subject: [PATCH 32/54] chore ignore --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 7190a5b..0ae1767 100644 --- a/.gitignore +++ b/.gitignore @@ -29,4 +29,5 @@ vendor/ .env.local .env.sepolia -.env.mainnet \ No newline at end of file +.env.mainnet +docker/rebuild-all.sh From cd265913817b60e57473239cba9828d69a7800a5 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Sat, 7 Dec 2024 14:31:51 +0800 Subject: [PATCH 33/54] Clean up gitmodules --- .gitmodules | 3 --- 1 file changed, 3 deletions(-) diff --git a/.gitmodules b/.gitmodules index 7926146..b571bd0 100644 --- a/.gitmodules +++ b/.gitmodules @@ -2,6 +2,3 @@ path = contracts/ethereum/lib/forge-std url = https://github.com/foundry-rs/forge-std.git shallow = true -[submodule "contracts/starknet/garaga"] - path = contracts/starknet/garaga - url = https://github.com/keep-starknet-strange/garaga.git From b13d2f7e1a3ef14ae197770c969d20fa5564077d Mon Sep 17 00:00:00 2001 From: ametel01 Date: Sat, 7 Dec 2024 14:32:18 +0800 Subject: [PATCH 34/54] Remove garaga from index --- contracts/starknet/garaga | 1 - 1 file changed, 1 deletion(-) delete mode 160000 contracts/starknet/garaga diff --git a/contracts/starknet/garaga b/contracts/starknet/garaga deleted file mode 160000 index 8023ef6..0000000 --- a/contracts/starknet/garaga +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 8023ef6c7b8e0d8bd805695539aa1186e730d9db From 6209ad929e77694792ccff6c09813e6ed1ef149f Mon Sep 17 00:00:00 2001 From: ametel01 Date: Sat, 7 Dec 2024 14:39:42 +0800 Subject: [PATCH 35/54] Add garaga submodule at commit 5f3b232 --- .gitmodules | 3 +++ contracts/starknet/garaga | 1 + 2 files changed, 4 insertions(+) create mode 160000 contracts/starknet/garaga diff --git a/.gitmodules b/.gitmodules index b571bd0..1ce7c90 100644 --- a/.gitmodules +++ b/.gitmodules @@ -2,3 +2,6 @@ path = contracts/ethereum/lib/forge-std url = https://github.com/foundry-rs/forge-std.git shallow = true +[submodule "contracts/starknet/garaga"] + path = contracts/starknet/garaga + url = git@github.com:keep-starknet-strange/garaga.git diff --git a/contracts/starknet/garaga b/contracts/starknet/garaga new file mode 160000 index 0000000..5f3b232 --- /dev/null +++ b/contracts/starknet/garaga @@ -0,0 +1 @@ +Subproject commit 5f3b23252a04f1714838415e69e318ed8e097c15 From 2f6576f7de67f06d6d73277a5ed02367d146d2aa Mon Sep 17 00:00:00 2001 From: ametel01 Date: Sat, 7 Dec 2024 15:08:15 +0800 Subject: [PATCH 36/54] refactor: switch from bash to sh in Docker environment - Modified Dockerfile.relayer to use sh shell - Updated docker-compose.yml to use /bin/sh instead of /bin/bash - Converted run_relayer.sh to use POSIX-compliant shell syntax This change reduces the container size by avoiding the need to install bash and makes the setup more compatible with Alpine Linux's default shell. --- contracts/starknet/Scarb.lock | 1 - contracts/starknet/verifier/Scarb.toml | 2 +- docker-compose.yml | 4 +++- docker/Dockerfile.relayer | 6 ++++-- scripts/deploy-contracts.sh | 4 ++-- scripts/run_relayer.sh | 17 ++++++++++++----- 6 files changed, 22 insertions(+), 12 deletions(-) diff --git a/contracts/starknet/Scarb.lock b/contracts/starknet/Scarb.lock index ff588ad..e93d860 100644 --- a/contracts/starknet/Scarb.lock +++ b/contracts/starknet/Scarb.lock @@ -11,7 +11,6 @@ dependencies = [ [[package]] name = "garaga" version = "0.14.0" -source = "git+https://github.com/keep-starknet-strange/garaga.git?rev=5f3b232#5f3b23252a04f1714838415e69e318ed8e097c15" [[package]] name = "l1_message_proxy" diff --git a/contracts/starknet/verifier/Scarb.toml b/contracts/starknet/verifier/Scarb.toml index b3a97d5..9544f16 100644 --- a/contracts/starknet/verifier/Scarb.toml +++ b/contracts/starknet/verifier/Scarb.toml @@ -6,7 +6,7 @@ edition = "2023_11" # See more keys and their definitions at https://docs.swmansion.com/scarb/docs/reference/manifest.html [dependencies] -garaga = { git = "https://github.com/keep-starknet-strange/garaga.git", rev = "5f3b232" } +garaga = { path = "../garaga/src" } starknet.workspace = true fossil_store = { path = "../store" } diff --git a/docker-compose.yml b/docker-compose.yml index 1342203..c1e16f6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -94,7 +94,9 @@ services: - fossil env_file: - ${ENV_FILE:-.env.local} - command: ["relayer"] + volumes: + - ./scripts:/app/scripts + command: ["/bin/sh", "/app/scripts/run_relayer.sh"] depends_on: deployer: condition: service_completed_successfully diff --git a/docker/Dockerfile.relayer b/docker/Dockerfile.relayer index 85b3ace..cb64144 100644 --- a/docker/Dockerfile.relayer +++ b/docker/Dockerfile.relayer @@ -56,5 +56,7 @@ COPY --from=builder /app/target/release/relayer /usr/local/bin/relayer # Make the binary executable RUN chmod +x /usr/local/bin/relayer -# Define the entrypoint -CMD ["relayer"] +# Define the entrypoint using SHELL instruction to use sh +SHELL ["/bin/sh", "-c"] +ENTRYPOINT ["/usr/local/bin/relayer"] +CMD [] diff --git a/scripts/deploy-contracts.sh b/scripts/deploy-contracts.sh index c6041e3..ba21e70 100755 --- a/scripts/deploy-contracts.sh +++ b/scripts/deploy-contracts.sh @@ -75,13 +75,13 @@ cd "$ETHEREUM_DIR" echo -e "${BLUE}${BOLD}Deploying Ethereum contracts...${NC}" retry_command "forge script script/LocalTesting.s.sol:LocalSetup --broadcast --rpc-url $ANVIL_URL" -L1_MESSAGE_SENDER=0x364C7188028348566E38D762f6095741c49f492B +L1_MESSAGE_SENDER=0x5147c5C1Cb5b5D3f56186C37a4bcFBb3Cd0bD5A7 # Now deploy Starknet contracts echo -e "\n${BLUE}${BOLD}Building Starknet contracts...${NC}" cd "$STARKNET_DIR" -scarb build --quiet +scarb build echo -e "\n${BLUE}${BOLD}Deploying Starknet contracts...${NC}" # Declare and deploy Fossil Store contract diff --git a/scripts/run_relayer.sh b/scripts/run_relayer.sh index 041f863..9f846ec 100755 --- a/scripts/run_relayer.sh +++ b/scripts/run_relayer.sh @@ -1,14 +1,21 @@ -#!/bin/bash +#!/bin/sh set -e -cd crates/relayer +# Check environment and set interval +if [ "$ENV_FILE" = ".env.local" ]; then + INTERVAL_MINUTES=10 +else + INTERVAL_MINUTES=720 # 12 hours = 720 minutes +fi while true; do - cargo run --release - echo "Waiting 10 minutes before next run..." - for ((i=10; i>0; i--)); do + /usr/local/bin/relayer + echo "Waiting $INTERVAL_MINUTES minutes before next run..." + i=$INTERVAL_MINUTES + while [ $i -gt 0 ]; do echo "Next run in $i minutes..." sleep 60 + i=$((i - 1)) done done From a152aa173398c12e8f92ddd49bda0695076f43d1 Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Sat, 7 Dec 2024 15:08:45 +0800 Subject: [PATCH 37/54] format starknet contracts --- contracts/starknet/verifier/src/fossil_verifier.cairo | 2 +- contracts/starknet/verifier/src/groth16_verifier.cairo | 2 +- contracts/starknet/verifier/src/universal_ecip.cairo | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/contracts/starknet/verifier/src/fossil_verifier.cairo b/contracts/starknet/verifier/src/fossil_verifier.cairo index 5cdf75a..e513d9c 100644 --- a/contracts/starknet/verifier/src/fossil_verifier.cairo +++ b/contracts/starknet/verifier/src/fossil_verifier.cairo @@ -67,4 +67,4 @@ mod FossilVerifier { verified } -} \ No newline at end of file +} diff --git a/contracts/starknet/verifier/src/groth16_verifier.cairo b/contracts/starknet/verifier/src/groth16_verifier.cairo index c08b20d..e743d47 100644 --- a/contracts/starknet/verifier/src/groth16_verifier.cairo +++ b/contracts/starknet/verifier/src/groth16_verifier.cairo @@ -96,4 +96,4 @@ mod Risc0Groth16VerifierBN254 { (result, journal) } } -} \ No newline at end of file +} diff --git a/contracts/starknet/verifier/src/universal_ecip.cairo b/contracts/starknet/verifier/src/universal_ecip.cairo index 58dcbb9..20f9c94 100644 --- a/contracts/starknet/verifier/src/universal_ecip.cairo +++ b/contracts/starknet/verifier/src/universal_ecip.cairo @@ -72,4 +72,4 @@ mod UniversalECIP { ) } } -} \ No newline at end of file +} From 51a1277def8d2ae1c2370eb38393260df1ef6121 Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Mon, 9 Dec 2024 03:08:59 +0800 Subject: [PATCH 38/54] removed duplicate dep --- crates/starknet-handler/Cargo.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/starknet-handler/Cargo.toml b/crates/starknet-handler/Cargo.toml index 9c0bfd0..488e2d1 100644 --- a/crates/starknet-handler/Cargo.toml +++ b/crates/starknet-handler/Cargo.toml @@ -10,7 +10,6 @@ thiserror = { workspace = true } starknet = { workspace = true } tracing = { workspace = true } starknet-crypto = { workspace = true } -tracing = { workspace = true } crypto-bigint = "0.5.5" url = "2.5.4" \ No newline at end of file From f68c2c1918fbdcab2897dd3bd4b5605f4d88ccc5 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Mon, 9 Dec 2024 10:25:14 +0800 Subject: [PATCH 39/54] refactor: split deployment scripts - Split deploy-contracts.sh into separate Ethereum and Starknet deployment scripts - deploy-ethereum.sh handles L1 contract deployment and updates messaging config - deploy-starknet.sh handles L2 contract deployment - Updated docker-compose.yml to run deployments in correct order: * deploy-ethereum runs after anvil * katana runs after ethereum deployment * deploy-starknet runs after katana * other services depend on starknet deployment --- docker-compose.yml | 49 ++++++++------ scripts/deploy-ethereum.sh | 87 ++++++++++++++++++++++++ scripts/deploy-starknet.sh | 134 +++++++++++++++++++++++++++++++++++++ 3 files changed, 248 insertions(+), 22 deletions(-) create mode 100755 scripts/deploy-ethereum.sh create mode 100755 scripts/deploy-starknet.sh diff --git a/docker-compose.yml b/docker-compose.yml index c1e16f6..7a661d3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,6 +13,19 @@ services: timeout: 5s retries: 5 + deploy-ethereum: + image: fossil-deploy:latest + networks: + - fossil + env_file: + - ${ENV_FILE:-.env.local} + volumes: + - .:/app + depends_on: + anvil: + condition: service_healthy + command: ["./scripts/deploy-ethereum.sh", "local"] + katana: image: fossil-katana:latest networks: @@ -25,13 +38,15 @@ services: depends_on: anvil: condition: service_healthy + deploy-ethereum: + condition: service_completed_successfully healthcheck: test: ["CMD", "katana", "--version"] interval: 5s timeout: 5s retries: 5 - deployer: + deploy-starknet: image: fossil-deploy:latest networks: - fossil @@ -40,23 +55,9 @@ services: volumes: - .:/app depends_on: - anvil: - condition: service_healthy katana: condition: service_healthy - command: > - /bin/bash -c ' - source ${ENV_FILE} && - if [ "$ENV_FILE" = ".env.local" ]; then - ./scripts/deploy-contracts.sh local - elif [ "$ENV_FILE" = ".env.sepolia" ]; then - ./scripts/deploy-contracts.sh sepolia - elif [ "$ENV_FILE" = ".env.mainnet" ]; then - ./scripts/deploy-contracts.sh mainnet - else - echo "Unknown environment" - exit 1 - fi' + command: ["./scripts/deploy-starknet.sh", "local"] mmr-builder: image: fossil-build-mmr:latest @@ -68,9 +69,7 @@ services: - RUST_BACKTRACE=1 - ANVIL_URL=http://anvil:8545 depends_on: - anvil: - condition: service_healthy - deployer: + deploy-starknet: condition: service_completed_successfully client: @@ -83,9 +82,9 @@ services: - .:/app command: ["client"] depends_on: - mmr-builder: + deploy-starknet: condition: service_completed_successfully - deployer: + mmr-builder: condition: service_completed_successfully relayer: @@ -97,11 +96,17 @@ services: volumes: - ./scripts:/app/scripts command: ["/bin/sh", "/app/scripts/run_relayer.sh"] + restart: unless-stopped depends_on: - deployer: + deploy-starknet: condition: service_completed_successfully mmr-builder: condition: service_completed_successfully + healthcheck: + test: ["CMD", "pgrep", "-f", "relayer"] + interval: 30s + timeout: 10s + retries: 3 networks: fossil: diff --git a/scripts/deploy-ethereum.sh b/scripts/deploy-ethereum.sh new file mode 100755 index 0000000..6f3887f --- /dev/null +++ b/scripts/deploy-ethereum.sh @@ -0,0 +1,87 @@ +#!/bin/bash + +# Ensure the script stops on the first error +set -e + +# Check if environment argument is provided +if [ -z "$1" ]; then + echo "Usage: $0 " + echo "Available environments: local, sepolia, mainnet" + exit 1 +fi + +# Validate environment argument +ENV_TYPE="$1" +case "$ENV_TYPE" in +"local" | "sepolia" | "mainnet") + ENV_FILE="/app/.env.$ENV_TYPE" + echo "Using environment: $ENV_TYPE ($ENV_FILE)" + ;; +*) + echo "Invalid environment. Must be one of: local, sepolia, mainnet" + exit 1 + ;; +esac + +# Source the appropriate environment file +source "$ENV_FILE" +export ACCOUNT_PRIVATE_KEY=${ACCOUNT_PRIVATE_KEY} + +ETHEREUM_DIR="/app/contracts/ethereum" +CONFIG_DIR="/app/config" + +# Define colors +GREEN='\033[0;32m' +BLUE='\033[0;34m' +YELLOW='\033[1;33m' +NC='\033[0m' +BOLD='\033[1m' +RED='\033[0;31m' + +# Function to update environment variables +update_env_var() { + local var_name=$1 + local var_value=$2 + + if grep -q "^$var_name=" "$ENV_FILE"; then + echo -e "${BLUE}$var_name already exists, replacing in $ENV_FILE...${NC}" + sed -i "s|^$var_name=.*|$var_name=$var_value|" "$ENV_FILE" + else + echo -e "${BLUE}Appending $var_name to $ENV_FILE...${NC}" + echo "$var_name=$var_value" >>"$ENV_FILE" + fi +} + +# Function to update JSON config +update_json_config() { + local json_file=$1 + local contract_address=$2 + + # Update the contract_address in the JSON file + tmp=$(mktemp) + jq --arg addr "$contract_address" '.contract_address = $addr' "$json_file" > "$tmp" && mv "$tmp" "$json_file" + echo -e "${BLUE}Updated contract address in $json_file${NC}" +} + +# Deploy Ethereum contracts +cd "$ETHEREUM_DIR" +echo -e "${BLUE}${BOLD}Deploying Ethereum contracts...${NC}" +forge script script/LocalTesting.s.sol:LocalSetup --broadcast --rpc-url $ANVIL_URL + +# Read values from the JSON file and update env vars +SN_MESSAGING=$(jq -r '.snMessaging_address' logs/local_setup.json) +L1_MESSAGE_SENDER=$(jq -r '.l1MessageSender_address' logs/local_setup.json) + +# Update the environment variables +update_env_var "SN_MESSAGING" "$SN_MESSAGING" +update_env_var "L1_MESSAGE_SENDER" "$L1_MESSAGE_SENDER" + +# Update the anvil.messaging.json config +update_json_config "$CONFIG_DIR/anvil.messaging.json" "$SN_MESSAGING" + +# Source the updated environment variables +source "$ENV_FILE" + +echo -e "${BLUE}Using L1_MESSAGE_SENDER: $L1_MESSAGE_SENDER${NC}" +echo -e "${BLUE}Using SN_MESSAGING: $SN_MESSAGING${NC}" +echo -e "${GREEN}${BOLD}Ethereum deployment completed successfully!${NC}" \ No newline at end of file diff --git a/scripts/deploy-starknet.sh b/scripts/deploy-starknet.sh new file mode 100755 index 0000000..d756726 --- /dev/null +++ b/scripts/deploy-starknet.sh @@ -0,0 +1,134 @@ +#!/bin/bash + +# Ensure the script stops on the first error +set -e + +# Store the original directory (now inside container at /app) +ORIGINAL_DIR="/app" + +# Update the environment file with new addresses +update_env_var() { + local var_name=$1 + local var_value=$2 + + if grep -q "^$var_name=" "$ENV_FILE"; then + echo -e "${BLUE}$var_name already exists, replacing in $ENV_FILE...${NC}" + sed -i "s|^$var_name=.*|$var_name=$var_value|" "$ENV_FILE" + else + echo -e "${BLUE}Appending $var_name to $ENV_FILE...${NC}" + echo "$var_name=$var_value" >>"$ENV_FILE" + fi +} + +# Check if environment argument is provided +if [ -z "$1" ]; then + echo "Usage: $0 " + echo "Available environments: local, sepolia, mainnet" + exit 1 +fi + +# Validate environment argument +ENV_TYPE="$1" +case "$ENV_TYPE" in +"local" | "sepolia" | "mainnet") + ENV_FILE="$ORIGINAL_DIR/.env.$ENV_TYPE" + echo "Using environment: $ENV_TYPE ($ENV_FILE)" + ;; +*) + echo "Invalid environment. Must be one of: local, sepolia, mainnet" + exit 1 + ;; +esac + +# Check if environment file exists +if [ ! -f "$ENV_FILE" ]; then + echo "Error: Environment file $ENV_FILE not found" + exit 1 +fi + +# Source the appropriate environment file +source "$ENV_FILE" + +STARKNET_DIR="/app/contracts/starknet" + +# Define colors +GREEN='\033[0;32m' +BLUE='\033[0;34m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color +BOLD='\033[1m' +RED='\033[0;31m' + +# Now deploy Starknet contracts +echo -e "\n${BLUE}${BOLD}Building Starknet contracts...${NC}" +cd "$STARKNET_DIR" + +scarb build + +echo -e "\n${BLUE}${BOLD}Deploying Starknet contracts...${NC}" +# Declare and deploy Fossil Store contract +echo -e "\n${YELLOW}Declaring Fossil Store contract...${NC}" +FOSSILSTORE_HASH=$(starkli declare ./target/dev/fossil_store_Store.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) +echo -e "${GREEN}Class hash declared: ${BOLD}$FOSSILSTORE_HASH${NC}" +echo + +echo -e "${YELLOW}Deploying Fossil Store contract...${NC}" +FOSSILSTORE_ADDRESS=$(starkli deploy $FOSSILSTORE_HASH --salt 1 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) +echo -e "${GREEN}Contract address: ${BOLD}$FOSSILSTORE_ADDRESS${NC}" +echo + +# Declare and deploy Fossil L1MessageProxy contract +echo -e "${YELLOW}Declaring Fossil L1MessageProxy contract...${NC}" +L1MESSAGEPROXY_HASH=$(starkli declare ./target/dev/l1_message_proxy_L1MessageProxy.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) +echo -e "${GREEN}Class hash declared: ${BOLD}$L1MESSAGEPROXY_HASH${NC}" +echo + +echo -e "${YELLOW}Deploying Fossil L1MessageProxy contract...${NC}" +L1MESSAGEPROXY_ADDRESS=$(starkli deploy $L1MESSAGEPROXY_HASH $L1_MESSAGE_SENDER $FOSSILSTORE_ADDRESS --salt 1 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) +echo -e "${GREEN}Contract address: ${BOLD}$L1MESSAGEPROXY_ADDRESS${NC}" +echo + +# Declare and deploy Universal ECIP contract +echo -e "${YELLOW}Declaring Universal ECIP contract...${NC}" +ECIP_HASH=$(starkli declare ./target/dev/verifier_UniversalECIP.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) +echo -e "${GREEN}Class hash declared: ${BOLD}$ECIP_HASH${NC}" +echo + +# Declare and deploy Groth16 Verifier contract +echo -e "${YELLOW}Declaring Groth16 Verifier contract...${NC}" +VERIFIER_HASH=$(starkli declare ./target/dev/verifier_Risc0Groth16VerifierBN254.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) +echo -e "${GREEN}Class hash declared: ${BOLD}$VERIFIER_HASH${NC}" +echo + +echo -e "${YELLOW}Deploying Groth16 Verifier contract...${NC}" +VERIFIER_ADDRESS=$(starkli deploy $VERIFIER_HASH $ECIP_HASH --salt 1 | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) +echo -e "${GREEN}Contract deployed at: ${BOLD}$VERIFIER_ADDRESS${NC}" +echo + +echo -e "${YELLOW}Declaring Fossil Verifier contract...${NC}" +FOSSIL_VERIFIER_HASH=$(starkli declare ./target/dev/verifier_FossilVerifier.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) +echo -e "${GREEN}Class hash declared: ${BOLD}$FOSSIL_VERIFIER_HASH${NC}" +echo + +echo -e "${YELLOW}Deploying Fossil Verifier contract...${NC}" +FOSSIL_VERIFIER_ADDRESS=$(starkli deploy $FOSSIL_VERIFIER_HASH $VERIFIER_ADDRESS $FOSSILSTORE_ADDRESS --salt 1 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) +echo -e "${GREEN}Contract deployed at: ${BOLD}$FOSSIL_VERIFIER_ADDRESS${NC}" +echo + +echo -e "\n${GREEN}${BOLD}All contracts deployed!${NC}" + +# Update the environment file with the new addresses +update_env_var "L2_MSG_PROXY" "$L1MESSAGEPROXY_ADDRESS" +update_env_var "FOSSIL_STORE" "$FOSSILSTORE_ADDRESS" +update_env_var "STARKNET_VERIFIER" "$VERIFIER_ADDRESS" +update_env_var "FOSSIL_VERIFIER" "$FOSSIL_VERIFIER_ADDRESS" + +# Return to original directory +cd "$ORIGINAL_DIR" + +# Source the updated environment file +source "$ENV_FILE" + +sleep 5 + +echo -e "${GREEN}${BOLD}Environment variables successfully updated in $ENV_FILE${NC}" \ No newline at end of file From bee104e2edc204886013d58f03fbbccf67e500bf Mon Sep 17 00:00:00 2001 From: ametel01 Date: Mon, 9 Dec 2024 10:27:26 +0800 Subject: [PATCH 40/54] fix: json config update in ethereum deployment - Fix anvil.messaging.json not being updated after ethereum deployment - Add proper error handling for json updates - Create temporary files in same directory to avoid permission issues --- scripts/deploy-ethereum.sh | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/scripts/deploy-ethereum.sh b/scripts/deploy-ethereum.sh index 6f3887f..b08e6dc 100755 --- a/scripts/deploy-ethereum.sh +++ b/scripts/deploy-ethereum.sh @@ -57,9 +57,21 @@ update_json_config() { local json_file=$1 local contract_address=$2 - # Update the contract_address in the JSON file - tmp=$(mktemp) - jq --arg addr "$contract_address" '.contract_address = $addr' "$json_file" > "$tmp" && mv "$tmp" "$json_file" + # Create temp file in the same directory to avoid permission issues + local tmp_file="${json_file}.tmp" + + if ! jq --arg addr "$contract_address" '.contract_address = $addr' "$json_file" > "$tmp_file"; then + echo -e "${RED}Failed to update JSON file${NC}" + rm -f "$tmp_file" + return 1 + fi + + if ! mv "$tmp_file" "$json_file"; then + echo -e "${RED}Failed to replace JSON file${NC}" + rm -f "$tmp_file" + return 1 + fi + echo -e "${BLUE}Updated contract address in $json_file${NC}" } From b1a42d0763b2fda32b8a0bfffa86171a6caff4bb Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Mon, 9 Dec 2024 10:30:36 +0800 Subject: [PATCH 41/54] feat/ Added chain_id to proving logic to handle sepolia test net --- crates/client/src/client.rs | 7 ++++++- crates/ethereum/src/lib.rs | 10 ++++++---- crates/guest-types/src/lib.rs | 20 +++++++++++++++++++- crates/methods/blocks-validity/src/main.rs | 2 +- crates/methods/mmr-append/src/main.rs | 2 +- crates/publisher/bin/build_mmr.rs | 2 ++ crates/publisher/bin/update_mmr.rs | 2 ++ crates/publisher/bin/verify_blocks.rs | 2 ++ crates/publisher/src/api/operations.rs | 5 ++++- crates/publisher/src/core/accumulator.rs | 9 ++++++--- crates/publisher/src/core/batch_processor.rs | 9 +++++++-- crates/publisher/src/validator/validator.rs | 5 ++++- 12 files changed, 60 insertions(+), 15 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 09f55c8..32eab62 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -29,12 +29,15 @@ pub enum LightClientError { ConfigError(String), #[error("Polling interval must be greater than zero")] PollingIntervalError, + #[error("Chain ID is not a valid number")] + ChainIdError(#[from] std::num::ParseIntError), } pub struct LightClient { starknet_provider: StarknetProvider, l2_store_addr: Felt, verifier_addr: String, + chain_id: u64, latest_processed_block: u64, starknet_private_key: String, starknet_account_address: String, @@ -54,7 +57,7 @@ impl LightClient { let verifier_addr = get_env_var("FOSSIL_VERIFIER")?; let starknet_private_key = get_env_var("STARKNET_PRIVATE_KEY")?; let starknet_account_address = get_env_var("STARKNET_ACCOUNT_ADDRESS")?; - + let chain_id = get_env_var("CHAIN_ID")?.parse::()?; // Initialize providers let starknet_provider = StarknetProvider::new(&starknet_rpc_url)?; @@ -71,6 +74,7 @@ impl LightClient { starknet_provider, l2_store_addr, verifier_addr, + chain_id, latest_processed_block: 0, starknet_private_key, starknet_account_address, @@ -207,6 +211,7 @@ impl LightClient { publisher::prove_mmr_update( &self.starknet_provider.rpc_url().to_string(), + self.chain_id, &self.verifier_addr, &self.starknet_private_key, &self.starknet_account_address, diff --git a/crates/ethereum/src/lib.rs b/crates/ethereum/src/lib.rs index c6ff83c..46c8057 100644 --- a/crates/ethereum/src/lib.rs +++ b/crates/ethereum/src/lib.rs @@ -32,10 +32,12 @@ pub async fn get_finalized_block_hash() -> Result<(u64, String), UtilsError> { let provider = ProviderBuilder::new() .with_recommended_fillers() .try_on_anvil_with_wallet_and_config(|anvil| anvil.fork(rpc_url.clone())) - .map_err(|e| UtilsError::RetryExhausted( - attempts, - format!("Failed to setup Anvil provider: {}", e), - ))?; + .map_err(|e| { + UtilsError::RetryExhausted( + attempts, + format!("Failed to setup Anvil provider: {}", e), + ) + })?; let contract = BlockHashFetcher::deploy(&provider).await?; let result = contract.getBlockHash().call().await?; diff --git a/crates/guest-types/src/lib.rs b/crates/guest-types/src/lib.rs index d57bc63..83107b7 100644 --- a/crates/guest-types/src/lib.rs +++ b/crates/guest-types/src/lib.rs @@ -102,6 +102,7 @@ impl GuestOutput { // CombinedInput #[derive(Debug, Serialize, Deserialize, Clone)] pub struct CombinedInput { + chain_id: u64, headers: Vec, mmr_input: MMRInput, skip_proof_verification: bool, @@ -109,17 +110,23 @@ pub struct CombinedInput { impl CombinedInput { pub fn new( + chain_id: u64, headers: Vec, mmr_input: MMRInput, skip_proof_verification: bool, ) -> Self { Self { + chain_id, headers, mmr_input, skip_proof_verification, } } + pub fn chain_id(&self) -> u64 { + self.chain_id + } + pub fn headers(&self) -> &Vec { &self.headers } @@ -225,19 +232,30 @@ pub struct GuestProof { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct BlocksValidityInput { + chain_id: u64, headers: Vec, mmr_input: MMRInput, proofs: Vec, } impl BlocksValidityInput { - pub fn new(headers: Vec, mmr_input: MMRInput, proofs: Vec) -> Self { + pub fn new( + chain_id: u64, + headers: Vec, + mmr_input: MMRInput, + proofs: Vec, + ) -> Self { Self { + chain_id, headers, mmr_input, proofs, } } + pub fn chain_id(&self) -> u64 { + self.chain_id + } + pub fn headers(&self) -> &Vec { &self.headers } diff --git a/crates/methods/blocks-validity/src/main.rs b/crates/methods/blocks-validity/src/main.rs index b9b8bd0..f9431a9 100644 --- a/crates/methods/blocks-validity/src/main.rs +++ b/crates/methods/blocks-validity/src/main.rs @@ -9,7 +9,7 @@ fn main() { let input: BlocksValidityInput = env::read(); // Verify block headers - if !are_blocks_and_chain_valid(&input.headers()) { + if !are_blocks_and_chain_valid(&input.headers(), input.chain_id()) { env::commit(&false); } // Initialize MMR with previous state diff --git a/crates/methods/mmr-append/src/main.rs b/crates/methods/mmr-append/src/main.rs index 7e31401..8cdae23 100644 --- a/crates/methods/mmr-append/src/main.rs +++ b/crates/methods/mmr-append/src/main.rs @@ -11,7 +11,7 @@ fn main() { let input: CombinedInput = env::read(); // Verify block headers assert!( - are_blocks_and_chain_valid(&input.headers()), + are_blocks_and_chain_valid(&input.headers(), input.chain_id()), "Invalid block headers" ); // Initialize MMR with previous state diff --git a/crates/publisher/bin/build_mmr.rs b/crates/publisher/bin/build_mmr.rs index c4b94f2..a8f66d6 100644 --- a/crates/publisher/bin/build_mmr.rs +++ b/crates/publisher/bin/build_mmr.rs @@ -23,6 +23,7 @@ struct Args { async fn main() -> Result<(), Box> { initialize_logger_and_env()?; + let chain_id = get_env_var("CHAIN_ID")?.parse::()?; let rpc_url = get_env_var("STARKNET_RPC_URL")?; let verifier_address = get_env_var("FOSSIL_VERIFIER")?; let private_key = get_env_var("STARKNET_PRIVATE_KEY")?; @@ -37,6 +38,7 @@ async fn main() -> Result<(), Box> { // Initialize accumulator builder with the batch size let mut builder = AccumulatorBuilder::new( &rpc_url, + chain_id, &verifier_address, &private_key, &account_address, diff --git a/crates/publisher/bin/update_mmr.rs b/crates/publisher/bin/update_mmr.rs index a67f3c5..7cf86c5 100644 --- a/crates/publisher/bin/update_mmr.rs +++ b/crates/publisher/bin/update_mmr.rs @@ -24,6 +24,7 @@ struct Args { async fn main() -> Result<(), Box> { initialize_logger_and_env()?; + let chain_id = get_env_var("CHAIN_ID")?.parse::()?; let rpc_url = get_env_var("STARKNET_RPC_URL")?; let verifier_address = get_env_var("FOSSIL_VERIFIER")?; let private_key = get_env_var("STARKNET_PRIVATE_KEY")?; @@ -36,6 +37,7 @@ async fn main() -> Result<(), Box> { publisher::prove_mmr_update( &rpc_url, + chain_id, &verifier_address, &private_key, &account_address, diff --git a/crates/publisher/bin/verify_blocks.rs b/crates/publisher/bin/verify_blocks.rs index 2f023f9..8529e64 100644 --- a/crates/publisher/bin/verify_blocks.rs +++ b/crates/publisher/bin/verify_blocks.rs @@ -24,6 +24,7 @@ async fn main() -> Result<(), Box> { initialize_logger_and_env()?; let rpc_url = get_env_var("STARKNET_RPC_URL")?; let l2_store_address = get_env_var("FOSSIL_STORE")?; + let chain_id = get_env_var("CHAIN_ID")?.parse::()?; let args = Args::parse(); @@ -37,6 +38,7 @@ async fn main() -> Result<(), Box> { match prove_headers_integrity_and_inclusion( &rpc_url, &l2_store_address, + chain_id, &headers, Some(args.skip_proof), ) diff --git a/crates/publisher/src/api/operations.rs b/crates/publisher/src/api/operations.rs index 26b1b12..d43e6ab 100644 --- a/crates/publisher/src/api/operations.rs +++ b/crates/publisher/src/api/operations.rs @@ -6,6 +6,7 @@ const DEFAULT_BATCH_SIZE: u64 = 1024; pub async fn prove_mmr_update( rpc_url: &String, + chain_id: u64, verifier_address: &String, account_private_key: &String, account_address: &String, @@ -16,6 +17,7 @@ pub async fn prove_mmr_update( ) -> Result<(), PublisherError> { let mut builder = AccumulatorBuilder::new( rpc_url, + chain_id, verifier_address, account_private_key, account_address, @@ -46,13 +48,14 @@ pub async fn prove_mmr_update( pub async fn prove_headers_integrity_and_inclusion( rpc_url: &String, l2_store_address: &String, + chain_id: u64, headers: &Vec, skip_proof_verification: Option, ) -> Result, PublisherError> { let skip_proof = skip_proof_verification.unwrap_or(false); let validator = - ValidatorBuilder::new(rpc_url, l2_store_address, DEFAULT_BATCH_SIZE, skip_proof) + ValidatorBuilder::new(rpc_url, l2_store_address, chain_id, DEFAULT_BATCH_SIZE, skip_proof) .await .map_err(|e| { tracing::error!(error = %e, "Failed to create ValidatorBuilder"); diff --git a/crates/publisher/src/core/accumulator.rs b/crates/publisher/src/core/accumulator.rs index cbbe5ab..ed719c3 100644 --- a/crates/publisher/src/core/accumulator.rs +++ b/crates/publisher/src/core/accumulator.rs @@ -9,6 +9,7 @@ use tracing::{debug, error, info, warn}; pub struct AccumulatorBuilder<'a> { rpc_url: &'a String, + chain_id: u64, verifier_address: &'a String, account_private_key: &'a String, account_address: &'a String, @@ -20,6 +21,7 @@ pub struct AccumulatorBuilder<'a> { impl<'a> AccumulatorBuilder<'a> { pub async fn new( rpc_url: &'a String, + chain_id: u64, verifier_address: &'a String, account_private_key: &'a String, account_address: &'a String, @@ -56,6 +58,7 @@ impl<'a> AccumulatorBuilder<'a> { Ok(Self { rpc_url, + chain_id, verifier_address, account_private_key, account_address, @@ -105,7 +108,7 @@ impl<'a> AccumulatorBuilder<'a> { let result = self .batch_processor - .process_batch(start_block, current_end) + .process_batch(self.chain_id, start_block, current_end) .await .map_err(|e| { error!( @@ -148,7 +151,7 @@ impl<'a> AccumulatorBuilder<'a> { let start_block = self.batch_processor.calculate_start_block(current_end)?; let batch_result = self .batch_processor - .process_batch(start_block, current_end) + .process_batch(self.chain_id, start_block, current_end) .await?; if let Some(result) = batch_result { @@ -193,7 +196,7 @@ impl<'a> AccumulatorBuilder<'a> { if let Some(result) = self .batch_processor - .process_batch(batch_range.start, batch_range.end) + .process_batch(self.chain_id, batch_range.start, batch_range.end) .await .map_err(|e| { error!( diff --git a/crates/publisher/src/core/batch_processor.rs b/crates/publisher/src/core/batch_processor.rs index d42a43b..36fe50d 100644 --- a/crates/publisher/src/core/batch_processor.rs +++ b/crates/publisher/src/core/batch_processor.rs @@ -43,6 +43,7 @@ impl BatchProcessor { pub async fn process_batch( &self, + chain_id: u64, start_block: u64, end_block: u64, ) -> Result, AccumulatorError> { @@ -136,8 +137,12 @@ impl BatchProcessor { new_headers.clone(), ); - let combined_input = - CombinedInput::new(headers.clone(), mmr_input, self.skip_proof_verification); + let combined_input = CombinedInput::new( + chain_id, + headers.clone(), + mmr_input, + self.skip_proof_verification, + ); let proof = self .proof_generator diff --git a/crates/publisher/src/validator/validator.rs b/crates/publisher/src/validator/validator.rs index dbd2c6d..c50bf80 100644 --- a/crates/publisher/src/validator/validator.rs +++ b/crates/publisher/src/validator/validator.rs @@ -16,6 +16,7 @@ use tracing::error; pub struct ValidatorBuilder { rpc_url: String, l2_store_address: Felt, + chain_id: u64, proof_generator: ProofGenerator, batch_size: u64, skip_proof: bool, @@ -25,6 +26,7 @@ impl ValidatorBuilder { pub async fn new( rpc_url: &String, l2_store_address: &String, + chain_id: u64, batch_size: u64, skip_proof: bool, ) -> Result { @@ -40,6 +42,7 @@ impl ValidatorBuilder { Ok(Self { rpc_url: rpc_url.clone(), l2_store_address: Felt::from_hex(l2_store_address)?, + chain_id, proof_generator, batch_size, skip_proof, @@ -160,7 +163,7 @@ impl ValidatorBuilder { let mmr_input = self.prepare_mmr_input(mmr).await?; let blocks_validity_input = - BlocksValidityInput::new(batch_headers, mmr_input, guest_proofs); + BlocksValidityInput::new(self.chain_id, batch_headers, mmr_input, guest_proofs); Ok(self .proof_generator From 146351446ce261873fc75c7c9a8886b8057e22e5 Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Mon, 9 Dec 2024 10:32:46 +0800 Subject: [PATCH 42/54] removed hardcoded anvil account PK --- contracts/ethereum/script/LocalTesting.s.sol | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/contracts/ethereum/script/LocalTesting.s.sol b/contracts/ethereum/script/LocalTesting.s.sol index 8b5e950..5362952 100644 --- a/contracts/ethereum/script/LocalTesting.s.sol +++ b/contracts/ethereum/script/LocalTesting.s.sol @@ -7,14 +7,11 @@ import {L1MessageSender} from "../src/L1MessageSender.sol"; import {StarknetMessagingLocal} from "../src/StarknetMessagingLocal.sol"; -// address constant SN_CORE = 0xc662c410C0ECf747543f5bA90660f6ABeBD9C8c4; - contract LocalSetup is Script { function setUp() public {} function run() public{ - // uint256 deployerPrivateKey = vm.envUint("ACCOUNT_PRIVATE_KEY"); - uint256 deployerPrivateKey = 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80; + uint256 deployerPrivateKey = vm.envUint("ACCOUNT_PRIVATE_KEY"); string memory json = "local_testing"; From 75edbdb5738cf612ba8ae38816b4896858b41be1 Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Mon, 9 Dec 2024 10:33:40 +0800 Subject: [PATCH 43/54] added submodule to ci to handle garaga dep --- .github/workflows/cairo.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/cairo.yml b/.github/workflows/cairo.yml index 22bda50..f4e6212 100644 --- a/.github/workflows/cairo.yml +++ b/.github/workflows/cairo.yml @@ -6,7 +6,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - # - uses: foundry-rs/setup-snfoundry@v3 + with: + submodules: true - uses: software-mansion/setup-scarb@v1 with: scarb-version: "2.8.5" From b621afeaf13093530c987052bbbb90b0814a5dd7 Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Mon, 9 Dec 2024 10:47:46 +0800 Subject: [PATCH 44/54] added jq to deps --- docker/Dockerfile.deploy | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/Dockerfile.deploy b/docker/Dockerfile.deploy index b8ccf1e..5a03452 100644 --- a/docker/Dockerfile.deploy +++ b/docker/Dockerfile.deploy @@ -10,6 +10,7 @@ RUN apt-get update && apt-get install -y \ ca-certificates \ pkg-config \ libssl-dev \ + jq \ && rm -rf /var/lib/apt/lists/* # Install Foundry From b1385891df17db755581d1636c817ece113dbe7a Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Mon, 9 Dec 2024 10:48:06 +0800 Subject: [PATCH 45/54] chore updated --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 0ae1767..9fa16f1 100644 --- a/.gitignore +++ b/.gitignore @@ -31,3 +31,5 @@ vendor/ .env.sepolia .env.mainnet docker/rebuild-all.sh + +scripts/deploy-contracts.bash.sh From 499152afaf05b240351d6bb456ac053aa74507e7 Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Mon, 9 Dec 2024 12:09:27 +0800 Subject: [PATCH 46/54] fmt --- crates/publisher/src/api/operations.rs | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/crates/publisher/src/api/operations.rs b/crates/publisher/src/api/operations.rs index d43e6ab..3ab6998 100644 --- a/crates/publisher/src/api/operations.rs +++ b/crates/publisher/src/api/operations.rs @@ -54,13 +54,18 @@ pub async fn prove_headers_integrity_and_inclusion( ) -> Result, PublisherError> { let skip_proof = skip_proof_verification.unwrap_or(false); - let validator = - ValidatorBuilder::new(rpc_url, l2_store_address, chain_id, DEFAULT_BATCH_SIZE, skip_proof) - .await - .map_err(|e| { - tracing::error!(error = %e, "Failed to create ValidatorBuilder"); - e - })?; + let validator = ValidatorBuilder::new( + rpc_url, + l2_store_address, + chain_id, + DEFAULT_BATCH_SIZE, + skip_proof, + ) + .await + .map_err(|e| { + tracing::error!(error = %e, "Failed to create ValidatorBuilder"); + e + })?; let result = validator .verify_blocks_integrity_and_inclusion(headers) From c02a48a869540e9cabc20c98ebe9b7c12aeff14f Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Mon, 9 Dec 2024 12:09:38 +0800 Subject: [PATCH 47/54] new deployments --- config/anvil.messaging.json | 16 ++-- scripts/deploy-contracts.sh | 166 ------------------------------------ 2 files changed, 8 insertions(+), 174 deletions(-) delete mode 100755 scripts/deploy-contracts.sh diff --git a/config/anvil.messaging.json b/config/anvil.messaging.json index 6957685..ebe3f85 100644 --- a/config/anvil.messaging.json +++ b/config/anvil.messaging.json @@ -1,9 +1,9 @@ { - "chain": "ethereum", - "rpc_url": "http://anvil:8545", - "contract_address": "0x364C7188028348566E38D762f6095741c49f492B", - "sender_address": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", - "private_key": "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", - "interval": 2, - "from_block": 21340770 -} \ No newline at end of file + "chain": "ethereum", + "rpc_url": "http://anvil:8545", + "contract_address": "0x50d1bf1Cb2873C8f32AFe3b3AA6c075b341209FE", + "sender_address": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", + "private_key": "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", + "interval": 2, + "from_block": 21340770 +} diff --git a/scripts/deploy-contracts.sh b/scripts/deploy-contracts.sh deleted file mode 100755 index ba21e70..0000000 --- a/scripts/deploy-contracts.sh +++ /dev/null @@ -1,166 +0,0 @@ -#!/bin/bash - -# Ensure the script stops on the first error -set -e - -# Store the original directory (now inside container at /app) -ORIGINAL_DIR="/app" - -# Check if environment argument is provided -if [ -z "$1" ]; then - echo "Usage: $0 " - echo "Available environments: local, sepolia, mainnet" - exit 1 -fi - -# Validate environment argument -ENV_TYPE="$1" -case "$ENV_TYPE" in -"local" | "sepolia" | "mainnet") - ENV_FILE="$ORIGINAL_DIR/.env.$ENV_TYPE" - echo "Using environment: $ENV_TYPE ($ENV_FILE)" - ;; -*) - echo "Invalid environment. Must be one of: local, sepolia, mainnet" - exit 1 - ;; -esac -# Check if environment file exists -if [ ! -f "$ENV_FILE" ]; then - echo "Error: Environment file $ENV_FILE not found" - exit 1 -fi - -# Source the appropriate environment file -source "$ENV_FILE" - -ETHEREUM_DIR="/app/contracts/ethereum" -STARKNET_DIR="/app/contracts/starknet" - -# Define colors -GREEN='\033[0;32m' -BLUE='\033[0;34m' -YELLOW='\033[1;33m' -NC='\033[0m' # No Color -BOLD='\033[1m' -RED='\033[0;31m' - -# Function to retry commands -retry_command() { - local retries=5 - local wait_time=5 - local command="$@" - local retry_count=0 - - until [ $retry_count -ge $retries ] - do - echo -e "${YELLOW}Attempting deployment (attempt $((retry_count + 1)) of $retries)...${NC}" - if eval "$command"; then - return 0 - fi - retry_count=$((retry_count + 1)) - if [ $retry_count -lt $retries ]; then - echo -e "${YELLOW}Deployment failed. Waiting ${wait_time} seconds before retrying...${NC}" - sleep $wait_time - # Increase wait time for next attempt - wait_time=$((wait_time * 2)) - fi - done - echo -e "${RED}Failed to deploy after $retries attempts${NC}" - return 1 -} - -# Deploy Ethereum contracts -cd "$ETHEREUM_DIR" -echo -e "${BLUE}${BOLD}Deploying Ethereum contracts...${NC}" -retry_command "forge script script/LocalTesting.s.sol:LocalSetup --broadcast --rpc-url $ANVIL_URL" - -L1_MESSAGE_SENDER=0x5147c5C1Cb5b5D3f56186C37a4bcFBb3Cd0bD5A7 - -# Now deploy Starknet contracts -echo -e "\n${BLUE}${BOLD}Building Starknet contracts...${NC}" -cd "$STARKNET_DIR" - -scarb build - -echo -e "\n${BLUE}${BOLD}Deploying Starknet contracts...${NC}" -# Declare and deploy Fossil Store contract -echo -e "\n${YELLOW}Declaring Fossil Store contract...${NC}" -FOSSILSTORE_HASH=$(starkli declare ./target/dev/fossil_store_Store.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo -e "${GREEN}Class hash declared: ${BOLD}$FOSSILSTORE_HASH${NC}" -echo - -echo -e "${YELLOW}Deploying Fossil Store contract...${NC}" -FOSSILSTORE_ADDRESS=$(starkli deploy $FOSSILSTORE_HASH --salt 1 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo -e "${GREEN}Contract address: ${BOLD}$FOSSILSTORE_ADDRESS${NC}" -echo - -# Declare and deploy Fossil L1MessageProxy contract -echo -e "${YELLOW}Declaring Fossil L1MessageProxy contract...${NC}" -L1MESSAGEPROXY_HASH=$(starkli declare ./target/dev/l1_message_proxy_L1MessageProxy.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo -e "${GREEN}Class hash declared: ${BOLD}$L1MESSAGEPROXY_HASH${NC}" -echo - -echo -e "${YELLOW}Deploying Fossil L1MessageProxy contract...${NC}" -L1MESSAGEPROXY_ADDRESS=$(starkli deploy $L1MESSAGEPROXY_HASH $L1_MESSAGE_SENDER $FOSSILSTORE_ADDRESS --salt 1 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo -e "${GREEN}Contract address: ${BOLD}$L1MESSAGEPROXY_ADDRESS${NC}" -echo - -# Declare and deploy Universal ECIP contract -echo -e "${YELLOW}Declaring Universal ECIP contract...${NC}" -ECIP_HASH=$(starkli declare ./target/dev/verifier_UniversalECIP.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo -e "${GREEN}Class hash declared: ${BOLD}$ECIP_HASH${NC}" -echo - -# Declare and deploy Groth16 Verifier contract -echo -e "${YELLOW}Declaring Groth16 Verifier contract...${NC}" -VERIFIER_HASH=$(starkli declare ./target/dev/verifier_Risc0Groth16VerifierBN254.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo -e "${GREEN}Class hash declared: ${BOLD}$VERIFIER_HASH${NC}" -echo - -echo -e "${YELLOW}Deploying Groth16 Verifier contract...${NC}" -VERIFIER_ADDRESS=$(starkli deploy $VERIFIER_HASH $ECIP_HASH --salt 1 | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo -e "${GREEN}Contract deployed at: ${BOLD}$VERIFIER_ADDRESS${NC}" -echo - -echo -e "${YELLOW}Declaring Fossil Verifier contract...${NC}" -FOSSIL_VERIFIER_HASH=$(starkli declare ./target/dev/verifier_FossilVerifier.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo -e "${GREEN}Class hash declared: ${BOLD}$FOSSIL_VERIFIER_HASH${NC}" -echo - -echo -e "${YELLOW}Deploying Fossil Verifier contract...${NC}" -FOSSIL_VERIFIER_ADDRESS=$(starkli deploy $FOSSIL_VERIFIER_HASH $VERIFIER_ADDRESS $FOSSILSTORE_ADDRESS --salt 1 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) -echo -e "${GREEN}Contract deployed at: ${BOLD}$FOSSIL_VERIFIER_ADDRESS${NC}" -echo - -echo -e "\n${GREEN}${BOLD}All contracts deployed!${NC}" - -# Update the environment file with new addresses -update_env_var() { - local var_name=$1 - local var_value=$2 - - if grep -q "^$var_name=" "$ENV_FILE"; then - echo -e "${BLUE}$var_name already exists, replacing in $ENV_FILE...${NC}" - sed -i "s|^$var_name=.*|$var_name=$var_value|" "$ENV_FILE" - else - echo -e "${BLUE}Appending $var_name to $ENV_FILE...${NC}" - echo "$var_name=$var_value" >>"$ENV_FILE" - fi -} - -# Update the environment file with the new addresses -update_env_var "L2_MSG_PROXY" "$L1MESSAGEPROXY_ADDRESS" -update_env_var "FOSSIL_STORE" "$FOSSILSTORE_ADDRESS" -update_env_var "STARKNET_VERIFIER" "$VERIFIER_ADDRESS" -update_env_var "FOSSIL_VERIFIER" "$FOSSIL_VERIFIER_ADDRESS" - -# Return to original directory -cd "$ORIGINAL_DIR" - -# Source the updated environment file -source "$ENV_FILE" - -sleep 2 - -echo -e "${GREEN}${BOLD}Environment variables successfully updated in $ENV_FILE${NC}" From 0b0d7b545823d9c61d0e8808119128adc772e4f0 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Mon, 9 Dec 2024 13:19:33 +0800 Subject: [PATCH 48/54] feat: add retry mechanism to ethereum deployment script - Add deploy_contracts function with 3 retry attempts - Add 10 second timeout between retries - Improve error handling and feedback messages --- scripts/deploy-ethereum.sh | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/scripts/deploy-ethereum.sh b/scripts/deploy-ethereum.sh index b08e6dc..b959a67 100755 --- a/scripts/deploy-ethereum.sh +++ b/scripts/deploy-ethereum.sh @@ -75,10 +75,34 @@ update_json_config() { echo -e "${BLUE}Updated contract address in $json_file${NC}" } +# Function to deploy with retries +deploy_contracts() { + local max_attempts=3 + local attempt=1 + local wait_time=10 + + while [ $attempt -le $max_attempts ]; do + echo -e "${BLUE}${BOLD}Deploying Ethereum contracts (Attempt $attempt/$max_attempts)...${NC}" + + if forge script script/LocalTesting.s.sol:LocalSetup --broadcast --rpc-url $ANVIL_URL; then + return 0 + fi + + if [ $attempt -lt $max_attempts ]; then + echo -e "${YELLOW}Deployment failed, retrying in ${wait_time}s...${NC}" + sleep $wait_time + fi + + attempt=$((attempt + 1)) + done + + echo -e "${RED}Failed to deploy contracts after $max_attempts attempts${NC}" + return 1 +} + # Deploy Ethereum contracts cd "$ETHEREUM_DIR" -echo -e "${BLUE}${BOLD}Deploying Ethereum contracts...${NC}" -forge script script/LocalTesting.s.sol:LocalSetup --broadcast --rpc-url $ANVIL_URL +deploy_contracts || exit 1 # Read values from the JSON file and update env vars SN_MESSAGING=$(jq -r '.snMessaging_address' logs/local_setup.json) From ea1597c68a2b05d12ec932bbab22efda92e86149 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Mon, 9 Dec 2024 13:19:33 +0800 Subject: [PATCH 49/54] fix: adjust mmr-builder restart policy - Change restart policy from 'always' to 'on-failure:3' - Prevent continuous restart loop while maintaining resilience --- docker-compose.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/docker-compose.yml b/docker-compose.yml index 7a661d3..5c12039 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -71,6 +71,7 @@ services: depends_on: deploy-starknet: condition: service_completed_successfully + restart: on-failure:3 client: image: fossil-client:latest From dcdc0bf15bed5a5866fa44fa1acf0eeede3e805a Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Mon, 9 Dec 2024 13:20:34 +0800 Subject: [PATCH 50/54] added source check for mmr update and min update time on store --- contracts/starknet/store/src/lib.cairo | 33 ++++++++++++++++++++++++++ scripts/deploy-starknet.sh | 6 +++++ 2 files changed, 39 insertions(+) diff --git a/contracts/starknet/store/src/lib.cairo b/contracts/starknet/store/src/lib.cairo index 8c0c61d..3176bb8 100644 --- a/contracts/starknet/store/src/lib.cairo +++ b/contracts/starknet/store/src/lib.cairo @@ -1,5 +1,10 @@ #[starknet::interface] pub trait IFossilStore { + fn initialize( + ref self: TContractState, + verifier_address: starknet::ContractAddress, + min_update_interval: u64 + ); fn store_latest_blockhash_from_l1(ref self: TContractState, block_number: u64, blockhash: u256); fn update_mmr_state( ref self: TContractState, @@ -34,6 +39,9 @@ mod Store { #[storage] struct Storage { + initialized: bool, + verifier_address: starknet::ContractAddress, + min_update_interval: u64, latest_blockhash_from_l1: (u64, u256), latest_mmr_block: u64, mmr_batches: Map, @@ -61,6 +69,17 @@ mod Store { #[abi(embed_v0)] impl FossilStoreImpl of super::IFossilStore { + fn initialize( + ref self: ContractState, + verifier_address: starknet::ContractAddress, + min_update_interval: u64 + ) { + assert!(!self.initialized.read(), "Contract already initialized"); + self.initialized.write(true); + self.verifier_address.write(verifier_address); + self.min_update_interval.write(min_update_interval); + } + fn store_latest_blockhash_from_l1( ref self: ContractState, block_number: u64, blockhash: u256 ) { @@ -79,6 +98,20 @@ mod Store { leaves_count: u64, mmr_root: u256, ) { + assert!( + starknet::get_caller_address() == self.verifier_address.read(), + "Only Fossil Verifier can update MMR state" + ); + + let min_update_interval = self.min_update_interval.read(); + let actual_update_interval = latest_mmr_block - self.latest_mmr_block.read(); + assert!( + actual_update_interval >= min_update_interval, + "Update interval: {} must be greater than or equal to the minimum update interval: {}", + actual_update_interval, + min_update_interval + ); + let mut curr_state = self.mmr_batches.entry(batch_index); curr_state.leaves_count.write(leaves_count); diff --git a/scripts/deploy-starknet.sh b/scripts/deploy-starknet.sh index d756726..976e23d 100755 --- a/scripts/deploy-starknet.sh +++ b/scripts/deploy-starknet.sh @@ -5,6 +5,7 @@ set -e # Store the original directory (now inside container at /app) ORIGINAL_DIR="/app" +UPDATE_INTERVAL=40 # Update the environment file with new addresses update_env_var() { @@ -115,6 +116,11 @@ FOSSIL_VERIFIER_ADDRESS=$(starkli deploy $FOSSIL_VERIFIER_HASH $VERIFIER_ADDRESS echo -e "${GREEN}Contract deployed at: ${BOLD}$FOSSIL_VERIFIER_ADDRESS${NC}" echo +echo -e "${YELLOW}Initializing Fossil Store contract...${NC}" +starkli invoke $FOSSILSTORE_ADDRESS initialize $FOSSIL_VERIFIER_ADDRESS $UPDATE_INTERVAL -w +echo -e "${GREEN}Fossil Store contract initialized${NC}" +echo + echo -e "\n${GREEN}${BOLD}All contracts deployed!${NC}" # Update the environment file with the new addresses From 3944d324b57ee83cb8256b657d5ac2dbc11ca879 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Mon, 9 Dec 2024 13:38:35 +0800 Subject: [PATCH 51/54] fix: remove entrypoint from relayer Dockerfile - Remove ENTRYPOINT and CMD directives - Allow docker-compose to control the container entry point - Enable proper logging from run_relayer.sh script --- docker/Dockerfile.relayer | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/docker/Dockerfile.relayer b/docker/Dockerfile.relayer index cb64144..9abb0ff 100644 --- a/docker/Dockerfile.relayer +++ b/docker/Dockerfile.relayer @@ -56,7 +56,5 @@ COPY --from=builder /app/target/release/relayer /usr/local/bin/relayer # Make the binary executable RUN chmod +x /usr/local/bin/relayer -# Define the entrypoint using SHELL instruction to use sh -SHELL ["/bin/sh", "-c"] -ENTRYPOINT ["/usr/local/bin/relayer"] -CMD [] +# Remove the ENTRYPOINT and CMD directives +# Let docker-compose handle the command From c9af681c5287cf63d303e4ebeb62c26face8739d Mon Sep 17 00:00:00 2001 From: ametel01 Date: Mon, 9 Dec 2024 13:38:35 +0800 Subject: [PATCH 52/54] feat: improve relayer service reliability - Add retry mechanism to run_relayer.sh - Configure different retry settings for local/prod environments - Remove restart policy from docker-compose - Update healthcheck to monitor script process - Improve logging and error handling --- docker-compose.yml | 3 +-- scripts/run_relayer.sh | 46 ++++++++++++++++++++++++++++++++++-------- 2 files changed, 39 insertions(+), 10 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 5c12039..7c20599 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -97,14 +97,13 @@ services: volumes: - ./scripts:/app/scripts command: ["/bin/sh", "/app/scripts/run_relayer.sh"] - restart: unless-stopped depends_on: deploy-starknet: condition: service_completed_successfully mmr-builder: condition: service_completed_successfully healthcheck: - test: ["CMD", "pgrep", "-f", "relayer"] + test: ["CMD-SHELL", "pgrep -f run_relayer.sh"] interval: 30s timeout: 10s retries: 3 diff --git a/scripts/run_relayer.sh b/scripts/run_relayer.sh index 9f846ec..1d023e5 100755 --- a/scripts/run_relayer.sh +++ b/scripts/run_relayer.sh @@ -5,17 +5,47 @@ set -e # Check environment and set interval if [ "$ENV_FILE" = ".env.local" ]; then INTERVAL_MINUTES=10 + MAX_RETRIES=3 + RETRY_DELAY=10 # seconds else INTERVAL_MINUTES=720 # 12 hours = 720 minutes + MAX_RETRIES=5 + RETRY_DELAY=60 # seconds fi -while true; do - /usr/local/bin/relayer - echo "Waiting $INTERVAL_MINUTES minutes before next run..." - i=$INTERVAL_MINUTES - while [ $i -gt 0 ]; do - echo "Next run in $i minutes..." - sleep 60 - i=$((i - 1)) +run_with_retry() { + attempt=1 + while [ $attempt -le $MAX_RETRIES ]; do + echo "Starting relayer (attempt $attempt/$MAX_RETRIES)..." + if /usr/local/bin/relayer; then + return 0 + fi + + exit_code=$? + echo "Relayer exited with code $exit_code" + + if [ $attempt -lt $MAX_RETRIES ]; then + echo "Retrying in $RETRY_DELAY seconds..." + sleep $RETRY_DELAY + fi + attempt=$((attempt + 1)) done + + echo "Failed to run relayer after $MAX_RETRIES attempts" + return 1 +} + +while true; do + if run_with_retry; then + echo "Relayer completed successfully. Waiting $INTERVAL_MINUTES minutes before next run..." + i=$INTERVAL_MINUTES + while [ $i -gt 0 ]; do + echo "Next run in $i minutes..." + sleep 60 + i=$((i - 1)) + done + else + echo "Relayer failed all retry attempts. Exiting..." + exit 1 + fi done From 2485f5195898143eaa4b9acfc3c2b4f6a26303c9 Mon Sep 17 00:00:00 2001 From: Alex Metelli Date: Mon, 9 Dec 2024 13:44:58 +0800 Subject: [PATCH 53/54] chore removed redundant logging --- crates/client/src/client.rs | 7 ------- crates/publisher/src/core/batch_processor.rs | 4 ++-- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 32eab62..4e7a9f1 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -175,13 +175,6 @@ impl LightClient { .get_latest_mmr_block(&self.l2_store_addr) .await?; - info!( - latest_relayed_block, - latest_mmr_block, - num_blocks = latest_relayed_block - latest_mmr_block, - "State fetched from Starknet" - ); - // Update MMR and verify proofs self.update_mmr(latest_mmr_block, latest_relayed_block) .await?; diff --git a/crates/publisher/src/core/batch_processor.rs b/crates/publisher/src/core/batch_processor.rs index 36fe50d..3ef7370 100644 --- a/crates/publisher/src/core/batch_processor.rs +++ b/crates/publisher/src/core/batch_processor.rs @@ -6,7 +6,7 @@ use common::get_or_create_db_path; use guest_types::{CombinedInput, GuestOutput, MMRInput}; use mmr::PeaksOptions; use mmr_utils::initialize_mmr; -use tracing::{debug, error, info}; +use tracing::{debug, error, info, warn}; pub struct BatchProcessor { batch_size: u64, @@ -105,7 +105,7 @@ impl BatchProcessor { })?; if headers.is_empty() { - error!( + warn!( "No headers found for block range {} to {}", start_block, adjusted_end_block ); From 6258f75af500aa7bb8992dffe80d64f30ab107b8 Mon Sep 17 00:00:00 2001 From: ametel01 Date: Mon, 9 Dec 2024 14:11:34 +0800 Subject: [PATCH 54/54] refactor: use RELAYER_INTERVAL environment variable - Remove ENV_FILE check from run_relayer.sh - Add RELAYER_INTERVAL environment variable with default 720 minutes - Set 10-minute interval for local development in docker-compose - Simplify configuration management --- scripts/run_relayer.sh | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/scripts/run_relayer.sh b/scripts/run_relayer.sh index 1d023e5..6be3f48 100755 --- a/scripts/run_relayer.sh +++ b/scripts/run_relayer.sh @@ -2,16 +2,10 @@ set -e -# Check environment and set interval -if [ "$ENV_FILE" = ".env.local" ]; then - INTERVAL_MINUTES=10 - MAX_RETRIES=3 - RETRY_DELAY=10 # seconds -else - INTERVAL_MINUTES=720 # 12 hours = 720 minutes - MAX_RETRIES=5 - RETRY_DELAY=60 # seconds -fi +# Use RELAYER_INTERVAL from environment, default to 720 if not set +INTERVAL_MINUTES=${RELAYER_INTERVAL:-720} +MAX_RETRIES=3 +RETRY_DELAY=10 # seconds run_with_retry() { attempt=1