Skip to content

Commit

Permalink
change implimentation and add CI step to execute arbitrator prover us…
Browse files Browse the repository at this point in the history
…ing block input json
  • Loading branch information
ganeshvanahalli committed Oct 10, 2024
1 parent c4bb3e0 commit be58669
Show file tree
Hide file tree
Showing 7 changed files with 26 additions and 200 deletions.
12 changes: 12 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,18 @@ jobs:
run: |
echo "Running redis tests" >> full.log
TEST_REDIS=redis://localhost:6379/0 gotestsum --format short-verbose -- -p 1 -run TestRedis ./arbnode/... ./system_tests/... -coverprofile=coverage-redis.txt -covermode=atomic -coverpkg=./...
- name: create block input json file
if: matrix.test-mode == 'defaults'
run: |
echo "BLOCK_INPUT_JSON_PATH=$(pwd)/target/block_input.json" >> "$GITHUB_ENV"
${{ github.workspace }}/.github/workflows/gotestsum.sh --run TestProgramStorage$ --count 1
- name: run arbitrator prover on block input json
if: matrix.test-mode == 'defaults'
run: |
make build-prover-bin
target/bin/prover target/machines/latest/machine.wavm.br -b --json-inputs=$BLOCK_INPUT_JSON_PATH
- name: run challenge tests
if: matrix.test-mode == 'challenge'
Expand Down
36 changes: 0 additions & 36 deletions arbitrator/prover/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,9 @@ use machine::{
PreimageResolver,
};
use once_cell::sync::OnceCell;
use parse_input::FileData;
use static_assertions::const_assert_eq;
use std::{
ffi::CStr,
fs::File,
io::BufReader,
num::NonZeroUsize,
os::raw::{c_char, c_int},
path::Path,
Expand Down Expand Up @@ -87,39 +84,6 @@ pub unsafe extern "C" fn arbitrator_load_machine(
}
}

#[no_mangle]
pub unsafe extern "C" fn arbitrator_deserialize_and_serialize_file_data(
read_path: *const c_char,
write_path: *const c_char,
) -> c_int {
let read_path = cstr_to_string(read_path);
let write_path = cstr_to_string(write_path);

let file = File::open(read_path);
let reader = match file {
Ok(file) => BufReader::new(file),
Err(err) => {
eprintln!("Failed to open read_path of FileData: {}", err);
return 1;
}
};
let data = match FileData::from_reader(reader) {
Ok(data) => data,
Err(err) => {
eprintln!("Failed to deserialize FileData: {}", err);
return 2;
}
};

match data.write_to_file(&write_path) {
Ok(()) => 0,
Err(err) => {
eprintln!("Failed to serialize FileData: {}", err);
3
}
}
}

unsafe fn arbitrator_load_machine_impl(
binary_path: *const c_char,
library_paths: *const *const c_char,
Expand Down
27 changes: 5 additions & 22 deletions arbitrator/prover/src/parse_input.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
use arbutil::Bytes32;
use serde::Deserialize;
use serde::Serialize;
use serde_json;
use serde_with::base64::Base64;
use serde_with::As;
use serde_with::DisplayFromStr;
use std::{
collections::HashMap,
fs::File,
io::{self, BufRead, BufWriter},
io::{self, BufRead},
};

/// prefixed_hex deserializes hex strings which are prefixed with `0x`
Expand All @@ -18,7 +16,7 @@ use std::{
/// It is an error to use this deserializer on a string that does not
/// begin with `0x`.
mod prefixed_hex {
use serde::{self, Deserialize, Deserializer, Serializer};
use serde::{self, Deserialize, Deserializer};

pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<u8>, D::Error>
where
Expand All @@ -31,14 +29,6 @@ mod prefixed_hex {
Err(serde::de::Error::custom("missing 0x prefix"))
}
}

pub fn serialize<S>(bytes: &Vec<u8>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let hex_string = format!("0x{}", hex::encode(bytes));
serializer.serialize_str(&hex_string)
}
}

#[derive(Debug)]
Expand Down Expand Up @@ -71,15 +61,15 @@ impl From<Vec<u8>> for UserWasm {
}
}

#[derive(Debug, Clone, Deserialize, Serialize)]
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct BatchInfo {
pub number: u64,
#[serde(with = "As::<Base64>")]
pub data_b64: Vec<u8>,
}

#[derive(Debug, Deserialize, Serialize)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct StartState {
#[serde(with = "prefixed_hex")]
Expand All @@ -98,7 +88,7 @@ pub struct StartState {
///
/// Note: It is important to change this file whenever the go JSON
/// serialization changes.
#[derive(Debug, Deserialize, Serialize)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct FileData {
pub id: u64,
Expand All @@ -119,11 +109,4 @@ impl FileData {
let data = serde_json::from_reader(&mut reader)?;
Ok(data)
}

pub fn write_to_file(&self, file_path: &str) -> io::Result<()> {
let file = File::create(file_path)?;
let writer = BufWriter::new(file);
serde_json::to_writer_pretty(writer, &self)?;
Ok(())
}
}
12 changes: 5 additions & 7 deletions system_tests/common_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ import (
"github.com/offchainlabs/nitro/util/headerreader"
"github.com/offchainlabs/nitro/util/redisutil"
"github.com/offchainlabs/nitro/util/signature"
"github.com/offchainlabs/nitro/validator/inputs"
"github.com/offchainlabs/nitro/validator/server_api"
"github.com/offchainlabs/nitro/validator/server_common"
"github.com/offchainlabs/nitro/validator/valnode"
Expand Down Expand Up @@ -1719,7 +1718,7 @@ func logParser[T any](t *testing.T, source string, name string) func(*types.Log)
// recordBlock writes a json file with all of the data needed to validate a block.
//
// This can be used as an input to the arbitrator prover to validate a block.
func recordBlock(t *testing.T, block uint64, builder *NodeBuilder) {
func recordBlock(t *testing.T, block uint64, builder *NodeBuilder, blockInputJSONPath string) {
t.Helper()
ctx := builder.ctx
inboxPos := arbutil.MessageIndex(block)
Expand All @@ -1733,15 +1732,14 @@ func recordBlock(t *testing.T, block uint64, builder *NodeBuilder) {
break
}
}
validationInputsWriter, err := inputs.NewWriter(inputs.WithSlug(t.Name()))
Require(t, err)
inputJson, err := builder.L2.ConsensusNode.StatelessBlockValidator.ValidationInputsAt(ctx, inboxPos, rawdb.TargetWavm)
if err != nil {
Fatal(t, "failed to get validation inputs", block, err)
}
if err := validationInputsWriter.Write(&inputJson); err != nil {
Fatal(t, "failed to write validation inputs", block, err)
}
contents, err := json.Marshal(inputJson)
Require(t, err)
err = os.WriteFile(blockInputJSONPath, contents, 0600)
Require(t, err)
}

func populateMachineDir(t *testing.T, cr *github.ConsensusRelease) string {
Expand Down
5 changes: 4 additions & 1 deletion system_tests/program_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,10 @@ func storageTest(t *testing.T, jit bool) {

// Captures a block_input_<id>.json file for the block that included the
// storage write transaction.
recordBlock(t, receipt.BlockNumber.Uint64(), builder)
blockInputJSONPath := os.Getenv("BLOCK_INPUT_JSON_PATH")
if blockInputJSONPath != "" {
recordBlock(t, receipt.BlockNumber.Uint64(), builder, blockInputJSONPath)
}
}

func TestProgramTransientStorage(t *testing.T) {
Expand Down
120 changes: 0 additions & 120 deletions system_tests/validationinputjson_rustfiledata_test.go

This file was deleted.

14 changes: 0 additions & 14 deletions validator/server_arb/machine.go
Original file line number Diff line number Diff line change
Expand Up @@ -312,20 +312,6 @@ func (m *ArbitratorMachine) DeserializeAndReplaceState(path string) error {
}
}

func DeserializeAndSerializeFileData(readPath, writePath string) error {
cReadPath := C.CString(readPath)
cWritePath := C.CString(writePath)
status := C.arbitrator_deserialize_and_serialize_file_data(cReadPath, cWritePath)
C.free(unsafe.Pointer(cReadPath))
C.free(unsafe.Pointer(cWritePath))

if status != 0 {
return fmt.Errorf("failed to call arbitrator_deserialize_and_serialize_file_data. Error code: %d", status)
} else {
return nil
}
}

func (m *ArbitratorMachine) AddSequencerInboxMessage(index uint64, data []byte) error {
defer runtime.KeepAlive(m)
m.mutex.Lock()
Expand Down

0 comments on commit be58669

Please sign in to comment.