Skip to content

Commit

Permalink
Merge branch 'main' into full-sync
Browse files Browse the repository at this point in the history
  • Loading branch information
mpaulucci authored Nov 28, 2024
2 parents 576ee33 + 3741772 commit a0cc0a1
Show file tree
Hide file tree
Showing 44 changed files with 1,477 additions and 261 deletions.
2 changes: 1 addition & 1 deletion .github/scripts/publish.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ $(jq -n --arg text "$(cat results.md)" '{
"type": "header",
"text": {
"type": "plain_text",
"text": "Daily report"
"text": "Daily Hive Coverage report"
}
},
{
Expand Down
40 changes: 0 additions & 40 deletions .github/workflows/asertoor.yaml

This file was deleted.

39 changes: 0 additions & 39 deletions .github/workflows/docker_build.yaml

This file was deleted.

2 changes: 1 addition & 1 deletion .github/workflows/docker_publish.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,6 @@ jobs:
- name: Generate artifact attestation
uses: actions/attest-build-provenance@v1
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
54 changes: 43 additions & 11 deletions .github/workflows/hive_coverage.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,22 @@ env:
RUST_VERSION: 1.80.1

jobs:
hive-coverage:
name: Daily Hive Coverage
run-hive:
name: Run engine hive simulator to gather coverage information.
runs-on: ubuntu-latest
strategy:
matrix:
include:
- simulation: rpc-compat
name: "Rpc Compat tests"
run_command: make run-hive-on-latest SIMULATION=ethereum/rpc-compat HIVE_EXTRA_ARGS="--sim.parallelism 4"
- simulation: devp2p
name: "Devp2p eth tests"
run_command: make run-hive-on-latest SIMULATION=devp2p HIVE_EXTRA_ARGS="--sim.parallelism 4"
- simulation: engine
name: "Cancun Engine tests"
run_command: make run-hive-on-latest SIMULATION=ethereum/engine HIVE_EXTRA_ARGS="--sim.parallelism 4"

steps:
- name: Pull image
run: |
Expand All @@ -22,25 +35,44 @@ jobs:
- name: Checkout sources
uses: actions/checkout@v3

- name: Rustup toolchain install
uses: dtolnay/rust-toolchain@stable
with:
toolchain: ${{ env.RUST_VERSION }}

- name: Setup Go
uses: actions/setup-go@v3

- name: Run Hive Simulation
run: make run-hive-on-latest SIMULATION=ethereum/engine
run: ${{ matrix.run_command }}
continue-on-error: true

- name: Upload results
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.simulation }}_logs
path: hive/workspace/logs/*-*.json

hive-report:
name: Generate report and upload to summary and slack
needs: run-hive
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3

- name: Rustup toolchain install
uses: dtolnay/rust-toolchain@stable
with:
toolchain: ${{ env.RUST_VERSION }}

- name: Download all results
uses: actions/download-artifact@v4
with:
path: hive/workspace/logs
pattern: "*_logs"
merge-multiple: true

- name: Caching
uses: Swatinem/rust-cache@v2

- name: Generate the hive report
id: report
run: |
cargo run -p hive_report > results.md
run: cargo run -p hive_report > results.md

- name: Post results in summary
run: |
Expand Down
65 changes: 57 additions & 8 deletions .github/workflows/hive.yaml → .github/workflows/integration.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
name: Hive
name: Integration Test
on:
workflow_run:
workflows: [Docker build]
types: [completed]
push:
branches: ["main"]
merge_group:
pull_request:
branches: ["**"]

concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
Expand All @@ -12,10 +14,59 @@ env:
RUST_VERSION: 1.80.1

jobs:
docker_build:
name: Docker Build image
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

- name: Build Docker image
uses: docker/build-push-action@v5
with:
context: .
file: ./Dockerfile
load: true
tags: ethrex
outputs: type=docker,dest=/tmp/ethrex_image.tar

- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: ethrex_image
path: /tmp/ethrex_image.tar

run-assertoor:
name: Assertoor - Stability Check
runs-on: ubuntu-latest
needs: [docker_build]
steps:
- uses: actions/checkout@v4

- name: Download artifacts
uses: actions/download-artifact@v4
with:
name: ethrex_image
path: /tmp

- name: Load image
run: |
docker load --input /tmp/ethrex_image.tar
- name: Setup kurtosis testnet and run assertoor tests
uses: ethpandaops/kurtosis-assertoor-github-action@v1
with:
kurtosis_version: "1.4.2"
ethereum_package_url: "github.com/lambdaclass/ethereum-package"
ethereum_package_branch: "ethrex-integration"
ethereum_package_args: "./test_data/network_params.yaml"

run-hive:
name: ${{ matrix.name }}
name: Hive - ${{ matrix.name }}
runs-on: ubuntu-latest
if: ${{ github.event.workflow_run.conclusion == 'success' }}
needs: [docker_build]
strategy:
matrix:
include:
Expand Down Expand Up @@ -46,8 +97,6 @@ jobs:
with:
name: ethrex_image
path: /tmp
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}

- name: Load image
run: |
Expand Down
62 changes: 60 additions & 2 deletions cmd/ef_tests/levm/deserialize.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use crate::types::{EFTest, EFTests};
use crate::types::{EFTest, EFTestAccessListItem, EFTests};
use bytes::Bytes;
use ethrex_core::U256;
use ethrex_core::{H256, U256};
use serde::Deserialize;
use std::{collections::HashMap, str::FromStr};

Expand Down Expand Up @@ -65,6 +65,50 @@ where
)
}

pub fn deserialize_h256_vec_optional_safe<'de, D>(
deserializer: D,
) -> Result<Option<Vec<H256>>, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = Option::<Vec<String>>::deserialize(deserializer)?;
match s {
Some(s) => {
let mut ret = Vec::new();
for s in s {
ret.push(H256::from_str(s.trim_start_matches("0x")).map_err(|err| {
serde::de::Error::custom(format!(
"error parsing H256 when deserializing H256 vec optional: {err}"
))
})?);
}
Ok(Some(ret))
}
None => Ok(None),
}
}

pub fn deserialize_access_lists<'de, D>(
deserializer: D,
) -> Result<Option<Vec<Vec<EFTestAccessListItem>>>, D::Error>
where
D: serde::Deserializer<'de>,
{
let access_lists: Option<Vec<Option<Vec<EFTestAccessListItem>>>> =
Option::<Vec<Option<Vec<EFTestAccessListItem>>>>::deserialize(deserializer)?;

let mut final_access_lists: Vec<Vec<EFTestAccessListItem>> = Vec::new();

if let Some(access_lists) = access_lists {
for access_list in access_lists {
// Treat `null` as an empty vector
final_access_lists.push(access_list.unwrap_or_default());
}
}

Ok(Some(final_access_lists))
}

pub fn deserialize_u256_optional_safe<'de, D>(deserializer: D) -> Result<Option<U256>, D::Error>
where
D: serde::Deserializer<'de>,
Expand Down Expand Up @@ -164,6 +208,20 @@ impl<'de> Deserialize<'de> for EFTests {
sender: raw_tx.sender,
to: raw_tx.to.clone(),
value: *value,
blob_versioned_hashes: raw_tx
.blob_versioned_hashes
.clone()
.unwrap_or_default(),
max_fee_per_blob_gas: raw_tx.max_fee_per_blob_gas,
max_priority_fee_per_gas: raw_tx.max_priority_fee_per_gas,
max_fee_per_gas: raw_tx.max_fee_per_gas,
access_list: raw_tx
.access_lists
.clone()
.unwrap_or_default()
.get(data_id)
.cloned()
.unwrap_or_default(),
};
transactions.insert((data_id, gas_limit_id, value_id), tx);
}
Expand Down
2 changes: 1 addition & 1 deletion cmd/ef_tests/levm/runner/levm_runner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ pub fn prepare_vm_for_tx(vector: &TestVector, test: &EFTest) -> Result<VM, EFTes
origin: test.transactions.get(vector).unwrap().sender,
consumed_gas: U256::default(),
refunded_gas: U256::default(),
gas_limit: test.env.current_gas_limit,
gas_limit: test.env.current_gas_limit, //this should be tx gas limit
block_number: test.env.current_number,
coinbase: test.env.current_coinbase,
timestamp: test.env.current_timestamp,
Expand Down
39 changes: 39 additions & 0 deletions cmd/ef_tests/levm/runner/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,45 @@ fn run_with_levm(
Ok(())
}

/// ### Runs all tests with REVM
/// **Note:** This is not used in the current implementation because we only run with REVM the tests that failed with LEVM so that execution time is minimized.
fn _run_with_revm(
reports: &mut Vec<EFTestReport>,
ef_tests: &[EFTest],
) -> Result<(), EFTestRunnerError> {
let revm_run_time = std::time::Instant::now();
let mut revm_run_spinner = Spinner::new(
Dots,
"Running all tests with REVM...".to_owned(),
Color::Cyan,
);
for (idx, test) in ef_tests.iter().enumerate() {
let total_tests = ef_tests.len();
revm_run_spinner.update_text(format!(
"{} {}/{total_tests} - {}",
"Running all tests with REVM".bold(),
idx + 1,
format_duration_as_mm_ss(revm_run_time.elapsed())
));
let ef_test_report = match revm_runner::_run_ef_test_revm(test) {
Ok(ef_test_report) => ef_test_report,
Err(EFTestRunnerError::Internal(err)) => return Err(EFTestRunnerError::Internal(err)),
non_internal_errors => {
return Err(EFTestRunnerError::Internal(InternalError::FirstRunInternal(format!(
"Non-internal error raised when executing revm. This should not happen: {non_internal_errors:?}",
))))
}
};
reports.push(ef_test_report);
revm_run_spinner.update_text(report::progress(reports, revm_run_time.elapsed()));
}
revm_run_spinner.success(&format!(
"Ran all tests with REVM in {}",
format_duration_as_mm_ss(revm_run_time.elapsed())
));
Ok(())
}

fn re_run_with_revm(
reports: &mut [EFTestReport],
ef_tests: &[EFTest],
Expand Down
Loading

0 comments on commit a0cc0a1

Please sign in to comment.