diff --git a/Cargo.lock b/Cargo.lock index ea6922bcba..33be2839c6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10582,6 +10582,7 @@ version = "0.0.0" dependencies = [ "async-trait", "mockall", + "papyrus_base_layer", "papyrus_proc_macros", "serde", "starknet_api", diff --git a/crates/papyrus_base_layer/src/ethereum_base_layer_contract.rs b/crates/papyrus_base_layer/src/ethereum_base_layer_contract.rs index 20611f3306..bdbb490416 100644 --- a/crates/papyrus_base_layer/src/ethereum_base_layer_contract.rs +++ b/crates/papyrus_base_layer/src/ethereum_base_layer_contract.rs @@ -1,12 +1,13 @@ use std::collections::BTreeMap; use std::future::IntoFuture; +use std::ops::RangeInclusive; use alloy_dyn_abi::SolType; use alloy_json_rpc::RpcError; use alloy_primitives::Address as EthereumContractAddress; use alloy_provider::network::Ethereum; use alloy_provider::{Provider, ProviderBuilder, RootProvider}; -use alloy_rpc_types_eth::{BlockNumberOrTag, Filter as EthEventFilter}; +use alloy_rpc_types_eth::Filter as EthEventFilter; use alloy_sol_types::{sol, sol_data}; use alloy_transport::TransportErrorKind; use alloy_transport_http::{Client, Http}; @@ -83,14 +84,10 @@ impl BaseLayerContract for EthereumBaseLayerContract { async fn events( &self, - from_block: u64, - until_block: u64, + block_range: RangeInclusive, events: &[&str], ) -> EthereumBaseLayerResult> { - let filter = EthEventFilter::new() - .from_block(BlockNumberOrTag::Number(from_block)) - .events(events) - .to_block(until_block); + let filter = EthEventFilter::new().select(block_range).events(events); let matching_logs = self.contract.provider().get_logs(&filter).await?; matching_logs.into_iter().map(TryInto::try_into).collect() diff --git a/crates/papyrus_base_layer/src/lib.rs b/crates/papyrus_base_layer/src/lib.rs index 5b562a6dc9..69c67cb749 100644 --- a/crates/papyrus_base_layer/src/lib.rs +++ b/crates/papyrus_base_layer/src/lib.rs @@ -1,5 +1,6 @@ use std::error::Error; use std::fmt::{Debug, Display}; +use std::ops::RangeInclusive; use async_trait::async_trait; use serde::{Deserialize, Serialize}; @@ -34,8 +35,7 @@ pub trait BaseLayerContract { /// Get specific events from the Starknet base contract between two L1 block numbers. async fn events( &self, - from_block: u64, - until_block: u64, + block_range: RangeInclusive, event_identifiers: &[&str], ) -> Result, Self::Error>; diff --git a/crates/starknet_api/src/executable_transaction.rs b/crates/starknet_api/src/executable_transaction.rs index 9d862413a7..d851a9ef83 100644 --- a/crates/starknet_api/src/executable_transaction.rs +++ b/crates/starknet_api/src/executable_transaction.rs @@ -326,6 +326,15 @@ pub struct L1HandlerTransaction { } impl L1HandlerTransaction { + pub fn create( + raw_tx: crate::transaction::L1HandlerTransaction, + chain_id: &ChainId, + paid_fee_on_l1: Fee, + ) -> Result { + let tx_hash = raw_tx.calculate_transaction_hash(chain_id, &raw_tx.version)?; + Ok(Self { tx: raw_tx, tx_hash, paid_fee_on_l1 }) + } + pub fn payload_size(&self) -> usize { // The calldata includes the "from" field, which is not a part of the payload. self.tx.calldata.0.len() - 1 diff --git a/crates/starknet_l1_provider/src/l1_scraper.rs b/crates/starknet_l1_provider/src/l1_scraper.rs index 6e406cd472..228fa4c004 100644 --- a/crates/starknet_l1_provider/src/l1_scraper.rs +++ b/crates/starknet_l1_provider/src/l1_scraper.rs @@ -1,12 +1,14 @@ use std::time::Duration; use papyrus_base_layer::constants::EventIdentifier; -use papyrus_base_layer::BaseLayerContract; +use papyrus_base_layer::{BaseLayerContract, L1Event}; use papyrus_config::converters::deserialize_seconds_to_duration; use papyrus_config::validators::validate_ascii; use serde::{Deserialize, Serialize}; use starknet_api::core::ChainId; -use starknet_l1_provider_types::SharedL1ProviderClient; +use starknet_api::executable_transaction::L1HandlerTransaction as ExecutableL1HandlerTransaction; +use starknet_api::StarknetApiError; +use starknet_l1_provider_types::{Event, SharedL1ProviderClient}; use thiserror::Error; use tokio::time::sleep; use tracing::error; @@ -19,7 +21,7 @@ pub struct L1Scraper { pub base_layer: B, pub last_block_number_processed: u64, pub l1_provider_client: SharedL1ProviderClient, - _tracked_event_identifiers: Vec, + tracked_event_identifiers: Vec, } impl L1Scraper { @@ -34,12 +36,38 @@ impl L1Scraper { base_layer, last_block_number_processed: config.l1_block_to_start_scraping_from, config, - _tracked_event_identifiers: events_identifiers_to_track.to_vec(), + tracked_event_identifiers: events_identifiers_to_track.to_vec(), } } pub async fn fetch_events(&mut self) -> L1ScraperResult<(), B> { - todo!() + let latest_l1_block_number = self + .base_layer + .latest_l1_block_number(self.config.finality) + .await + .map_err(L1ScraperError::BaseLayer)?; + + let Some(latest_l1_block_number) = latest_l1_block_number else { + error!("Failed to get latest L1 block number, finality too high."); + return Ok(()); + }; + + let scraping_result = self + .base_layer + .events( + self.last_block_number_processed..=latest_l1_block_number, + &self.tracked_event_identifiers, + ) + .await; + + let events = scraping_result.map_err(L1ScraperError::BaseLayer)?; + let events = events + .into_iter() + .map(|event| self.event_from_raw_l1_event(event)) + .collect::, _>>()?; + + self.last_block_number_processed = latest_l1_block_number + 1; + todo!("send {events:?} to provider"); } async fn _run(&mut self) -> L1ScraperResult<(), B> { @@ -49,6 +77,21 @@ impl L1Scraper { self.fetch_events().await?; } } + + fn event_from_raw_l1_event(&self, l1_event: L1Event) -> L1ScraperResult { + match l1_event { + L1Event::LogMessageToL2 { tx, fee } => { + let chain_id = &self.config.chain_id; + match ExecutableL1HandlerTransaction::create(tx, chain_id, fee) { + Ok(tx) => Ok(Event::L1HandlerTransaction(tx)), + Err(hash_calc_err) => Err(L1ScraperError::HashCalculationError(hash_calc_err)), + } + } + L1Event::MessageToL2CancellationStarted(_messsage_data) => todo!(), + L1Event::MessageToL2Canceled(_messsage_data) => todo!(), + L1Event::ConsumedMessageToL2(_messsage_data) => todo!(), + } + } } #[derive(Clone, Debug, Serialize, Deserialize, Validate, PartialEq)] @@ -76,4 +119,6 @@ impl Default for L1ScraperConfig { pub enum L1ScraperError { #[error("Base layer error: {0}")] BaseLayer(T::Error), + #[error("Failed to calculate hash: {0}")] + HashCalculationError(StarknetApiError), } diff --git a/crates/starknet_l1_provider_types/Cargo.toml b/crates/starknet_l1_provider_types/Cargo.toml index 726fd9eb20..9412b31cce 100644 --- a/crates/starknet_l1_provider_types/Cargo.toml +++ b/crates/starknet_l1_provider_types/Cargo.toml @@ -11,6 +11,7 @@ testing = ["mockall"] [dependencies] async-trait.workspace = true mockall = { workspace = true, optional = true } +papyrus_base_layer.workspace = true papyrus_proc_macros.workspace = true serde.workspace = true starknet_api.workspace = true diff --git a/crates/starknet_l1_provider_types/src/lib.rs b/crates/starknet_l1_provider_types/src/lib.rs index 3e8ebfcb92..541fab1c91 100644 --- a/crates/starknet_l1_provider_types/src/lib.rs +++ b/crates/starknet_l1_provider_types/src/lib.rs @@ -5,6 +5,7 @@ use std::sync::Arc; use async_trait::async_trait; #[cfg(any(feature = "testing", test))] use mockall::automock; +use papyrus_base_layer::L1Event; use papyrus_proc_macros::handle_response_variants; use serde::{Deserialize, Serialize}; use starknet_api::executable_transaction::L1HandlerTransaction; @@ -69,3 +70,11 @@ where todo!(); } } + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum Event { + L1HandlerTransaction(L1HandlerTransaction), + TransactionConsumed(L1Event), + TransactionCancellationStarted(L1Event), + TransactionCanceled(L1Event), +}