From 7bbbcd6bf15bfb94a101d7d7c539d37d9d8b5a04 Mon Sep 17 00:00:00 2001 From: "[eureka@nixos]" <57543709+eureka-cpu@users.noreply.github.com> Date: Thu, 12 Dec 2024 07:07:17 -0800 Subject: [PATCH 1/4] feat: Add `estimate` subcommand (#93) Closes #78 Uses the risc0 emulation tools to step through the ELF and return the user cycles, total cycles and segment count. --- .github/docker/Dockerfile.build | 2 +- .github/workflows/integration-tests.yml | 34 +++++++++++ CHANGELOG.md | 3 + Cargo.lock | 47 ++++++++++++++- cli/Cargo.toml | 11 +++- cli/README.md | 17 ++++++ cli/src/command.rs | 34 +++++++++++ cli/src/estimate.rs | 74 ++++++++++++++++++++++++ cli/src/main.rs | 47 ++++++++++++++- cli/src/tests.rs | 32 ++++++++++ cli/src/tests/estimate.rs | 65 +++++++++++++++++++++ cli/src/tests/test_data/test_id.json | 1 + testing-examples/example-input-file.json | 12 ++++ 13 files changed, 374 insertions(+), 5 deletions(-) create mode 100644 .github/workflows/integration-tests.yml create mode 100644 cli/src/estimate.rs create mode 100644 cli/src/tests.rs create mode 100644 cli/src/tests/estimate.rs create mode 100644 cli/src/tests/test_data/test_id.json create mode 100644 testing-examples/example-input-file.json diff --git a/.github/docker/Dockerfile.build b/.github/docker/Dockerfile.build index cc9f3d4..71f07fd 100644 --- a/.github/docker/Dockerfile.build +++ b/.github/docker/Dockerfile.build @@ -57,4 +57,4 @@ RUN /go/bin/yamlfmt -lint .github/workflows/*.yaml .github/workflows/*.yml .gith RUN cargo check RUN cargo +nightly fmt --all -- --check -RUN cargo test +RUN cargo test diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml new file mode 100644 index 0000000..b4416f9 --- /dev/null +++ b/.github/workflows/integration-tests.yml @@ -0,0 +1,34 @@ +name: Integration Tests +on: + pull_request: + branches: + - '**' + +concurrency: + group: "integration-tests" + cancel-in-progress: true + +permissions: read-all + +jobs: + integration-tests: + name: Setup Toolchain and Test + runs-on: ubuntu-latest-m + permissions: + id-token: "write" + contents: "read" + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Nix With Bonsol Binary Cache + uses: DeterminateSystems/nix-installer-action@main + with: + extra-conf: | + extra-substituters = https://bonsol.cachix.org + extra-trusted-public-keys = bonsol.cachix.org-1:yz7vi1rCPW1BpqoszdJvf08HZxQ/5gPTPxft4NnT74A= + - name: Setup Toolchain, Build and Test + run: | + nix develop --command bash -c " + cargo build && + cargo test --features integration -- --nocapture + " diff --git a/CHANGELOG.md b/CHANGELOG.md index bc54d5c..b6848b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Changed * `bonsol` cli option requirements and error messages updated for added clarity +### Added +* `bonsol estimate` for estimating execution cost of bonsol programs. + ### Fixed * **Breaking**: `execute_v1` interface instruction now uses the new `InputRef` to improve CU usage. * Adds a callback struct to use the input_hash and committed_outputs from the callback program ergonomically. diff --git a/Cargo.lock b/Cargo.lock index 311008b..d8396db 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -855,6 +855,22 @@ dependencies = [ "serde_json", ] +[[package]] +name = "assert_cmd" +version = "2.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1835b7f27878de8525dc71410b5a31cdcc5f230aed5ba5df968e09c201b23d" +dependencies = [ + "anstyle", + "bstr", + "doc-comment", + "libc", + "predicates 3.1.2", + "predicates-core", + "predicates-tree", + "wait-timeout", +] + [[package]] name = "assert_matches" version = "1.5.0" @@ -1189,12 +1205,14 @@ name = "bonsol-cli" version = "0.2.1" dependencies = [ "anyhow", + "assert_cmd", "atty", "bincode", "bonsol-interface", "bonsol-prover", "bonsol-sdk", "byte-unit", + "bytemuck", "bytes", "cargo_toml 0.20.5", "clap 4.5.20", @@ -1202,10 +1220,13 @@ dependencies = [ "indicatif", "num-traits", "object_store", + "predicates 3.1.2", "rand 0.8.5", "reqwest", "risc0-binfmt", + "risc0-circuit-rv32im", "risc0-zkvm", + "risc0-zkvm-platform", "serde", "serde_json", "sha2 0.10.8", @@ -1534,6 +1555,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" dependencies = [ "memchr", + "regex-automata 0.4.8", "serde", ] @@ -4034,7 +4056,7 @@ dependencies = [ "fragile", "lazy_static", "mockall_derive", - "predicates", + "predicates 2.1.5", "predicates-tree", ] @@ -4888,6 +4910,20 @@ dependencies = [ "regex", ] +[[package]] +name = "predicates" +version = "3.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e9086cc7640c29a356d1a29fd134380bee9d8f79a17410aa76e7ad295f42c97" +dependencies = [ + "anstyle", + "difflib", + "float-cmp", + "normalize-line-endings", + "predicates-core", + "regex", +] + [[package]] name = "predicates-core" version = "1.0.8" @@ -8764,6 +8800,15 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + [[package]] name = "walkdir" version = "2.5.0" diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 76f6823..c1bba3a 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -11,13 +11,16 @@ path = "src/main.rs" [features] mac = ["risc0-zkvm/metal"] linux = ["risc0-zkvm/cuda"] +integration = [] [dependencies] anyhow = "1.0.86" atty = "0.2.14" bincode = "1.3.3" +bonsol-interface.workspace = true bonsol-prover = { path = "../prover" } bonsol-sdk = { path = "../sdk" } +bytemuck = "1.15.0" hex = "0.4.3" byte-unit = "4.0.19" bytes = "1.4.0" @@ -34,7 +37,9 @@ reqwest = { version = "0.11.26", features = [ "native-tls-vendored", ] } risc0-binfmt = { workspace = true } -risc0-zkvm = { workspace = true, features = ["prove"] } +risc0-zkvm = { workspace = true, default-features = false, features = ["prove", "std"] } +risc0-zkvm-platform = { git = "https://github.com/anagrambuild/risc0", branch = "v1.0.1-bonsai-fix" } +risc0-circuit-rv32im = { git = "https://github.com/anagrambuild/risc0", branch = "v1.0.1-bonsai-fix" } serde = { version = "1.0.197", features = ["derive"] } serde_json = "1.0.104" sha2 = "0.10.6" @@ -46,4 +51,6 @@ tera = "1.17.1" thiserror = "1.0.65" tokio = { version = "1.38.0", features = ["full"] } -bonsol-interface.workspace = true +[dev-dependencies] +assert_cmd = "2.0.16" +predicates = "3.1.2" diff --git a/cli/README.md b/cli/README.md index 56b2206..36c93f1 100644 --- a/cli/README.md +++ b/cli/README.md @@ -58,3 +58,20 @@ todo ### Prove todo + +### Estimate + +You can estimate the number of cycles and segments using risc0 emulation to step through an ELF by passing the `estimate` command the path to a manifest.json and an inputs file (if required). + +``` +bonsol -k ./keypair.json -u http://localhost:8899 estimate \ + --manifest-path program/manifest.json \ + --input-file program/inputs.json \ + --max-cycles 16777216 # this is the default + +# Example Output: +# +# User cycles: 3380 +# Total cycles: 65536 +# Segments: 1 +``` diff --git a/cli/src/command.rs b/cli/src/command.rs index 45ec694..b0efffc 100644 --- a/cli/src/command.rs +++ b/cli/src/command.rs @@ -227,6 +227,7 @@ pub enum Command { )] auto_confirm: bool, }, + #[command(about = "Build a ZK program")] Build { #[arg( help = "The path to a ZK program folder containing a Cargo.toml", @@ -235,6 +236,25 @@ pub enum Command { )] zk_program_path: String, }, + #[command(about = "Estimate the execution cost of a ZK RISC0 program")] + Estimate { + #[arg( + help = "The path to the program's manifest file (manifest.json)", + short = 'm', + long + )] + manifest_path: String, + + #[arg(help = "The path to the program input file", short = 'i', long)] + input_file: Option, + + #[arg( + help = "Set the maximum number of cycles [default: 16777216u64]", + short = 'c', + long + )] + max_cycles: Option, + }, Execute { #[arg(short = 'f', long)] execution_request_file: Option, @@ -296,6 +316,11 @@ pub enum ParsedCommand { Build { zk_program_path: String, }, + Estimate { + manifest_path: String, + input_file: Option, + max_cycles: Option, + }, Execute { execution_request_file: Option, @@ -351,6 +376,15 @@ impl TryFrom for ParsedCommand { ), }), Command::Build { zk_program_path } => Ok(ParsedCommand::Build { zk_program_path }), + Command::Estimate { + manifest_path, + input_file, + max_cycles, + } => Ok(ParsedCommand::Estimate { + manifest_path, + input_file, + max_cycles, + }), Command::Execute { execution_request_file, program_id, diff --git a/cli/src/estimate.rs b/cli/src/estimate.rs new file mode 100644 index 0000000..1c12595 --- /dev/null +++ b/cli/src/estimate.rs @@ -0,0 +1,74 @@ +//! Bare bones upper bound estimator that uses the rv32im +//! emulation utils for fast lookups in the opcode list +//! to extract the cycle count from an elf. + +use anyhow::Result; +use risc0_binfmt::{MemoryImage, Program}; +use risc0_zkvm::{ExecutorEnv, ExecutorImpl, Session, GUEST_MAX_MEM}; +use risc0_zkvm_platform::PAGE_SIZE; + +pub fn estimate(elf: E, env: ExecutorEnv) -> Result<()> { + let session = get_session(elf, env)?; + println!( + "User cycles: {}\nTotal cycles: {}\nSegments: {}", + session.user_cycles, + session.total_cycles, + session.segments.len() + ); + + Ok(()) +} + +/// Get the total number of cycles by stepping through the ELF using emulation +/// tools from the risc0_circuit_rv32im module. +pub fn get_session(elf: E, env: ExecutorEnv) -> Result { + Ok(ExecutorImpl::new(env, elf.mk_image()?)?.run()?) +} + +/// Helper trait for loading an image from an elf. +pub trait MkImage { + fn mk_image(self) -> Result; +} +impl<'a> MkImage for &'a [u8] { + fn mk_image(self) -> Result { + let program = Program::load_elf(self, GUEST_MAX_MEM as u32)?; + MemoryImage::new(&program, PAGE_SIZE as u32) + } +} + +#[cfg(test)] +mod estimate_tests { + use anyhow::Result; + use risc0_binfmt::MemoryImage; + use risc0_circuit_rv32im::prove::emu::{ + exec::DEFAULT_SEGMENT_LIMIT_PO2, + testutil::{basic as basic_test_program, DEFAULT_SESSION_LIMIT}, + }; + use risc0_zkvm::{ExecutorEnv, PAGE_SIZE}; + + use super::MkImage; + use crate::estimate; + + impl MkImage for MemoryImage { + fn mk_image(self) -> Result { + Ok(self) + } + } + + #[test] + fn estimate_basic() { + let program = basic_test_program(); + let mut env = &mut ExecutorEnv::builder(); + env = env + .segment_limit_po2(DEFAULT_SEGMENT_LIMIT_PO2 as u32) + .session_limit(DEFAULT_SESSION_LIMIT); + let image = MemoryImage::new(&program, PAGE_SIZE as u32) + .expect("failed to create image from basic program"); + let res = estimate::get_session(image, env.build().unwrap()); + + assert_eq!( + res.ok().and_then(|session| Some(session.total_cycles)), + Some(16384) + ); + } +} diff --git a/cli/src/main.rs b/cli/src/main.rs index f5819d3..11cdea1 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -1,22 +1,31 @@ +use std::fs; use std::io::{self, Read}; use std::path::Path; use atty::Stream; use bonsol_sdk::BonsolClient; use clap::Parser; +use common::{execute_get_inputs, ZkProgramManifest}; +use risc0_circuit_rv32im::prove::emu::exec::DEFAULT_SEGMENT_LIMIT_PO2; +use risc0_circuit_rv32im::prove::emu::testutil::DEFAULT_SESSION_LIMIT; +use risc0_zkvm::ExecutorEnv; use solana_sdk::signature::read_keypair_file; use solana_sdk::signer::Signer; use crate::command::{BonsolCli, ParsedBonsolCli, ParsedCommand}; use crate::common::{sol_check, try_load_from_config}; -use crate::error::BonsolCliError; +use crate::error::{BonsolCliError, ZkManifestError}; mod build; mod deploy; +mod estimate; mod execute; mod init; mod prove; +#[cfg(all(test, feature = "integration"))] +mod tests; + pub mod command; pub mod common; pub(crate) mod error; @@ -59,6 +68,42 @@ async fn main() -> anyhow::Result<()> { } deploy::deploy(rpc, keypair, deploy_args).await } + ParsedCommand::Estimate { + manifest_path, + input_file, + max_cycles, + } => { + let manifest_file = fs::File::open(Path::new(&manifest_path)).map_err(|err| { + BonsolCliError::ZkManifestError(ZkManifestError::FailedToOpen { + manifest_path: manifest_path.clone(), + err, + }) + })?; + let manifest: ZkProgramManifest = + serde_json::from_reader(manifest_file).map_err(|err| { + BonsolCliError::ZkManifestError(ZkManifestError::FailedDeserialization { + manifest_path, + err, + }) + })?; + let elf = fs::read(&manifest.binary_path).map_err(|err| { + BonsolCliError::ZkManifestError(ZkManifestError::FailedToLoadBinary { + binary_path: manifest.binary_path.clone(), + err, + }) + })?; + let mut env = &mut ExecutorEnv::builder(); + env = env + .segment_limit_po2(DEFAULT_SEGMENT_LIMIT_PO2 as u32) + .session_limit(max_cycles.or(DEFAULT_SESSION_LIMIT)); + + if input_file.is_some() { + let inputs = execute_get_inputs(input_file, None)?; + let inputs: Vec<&str> = inputs.iter().map(|i| i.data.as_str()).collect(); + env = env.write(&inputs.as_slice())?; + } + estimate::estimate(elf.as_slice(), env.build()?) + } ParsedCommand::Execute { execution_request_file, program_id, diff --git a/cli/src/tests.rs b/cli/src/tests.rs new file mode 100644 index 0000000..597d93a --- /dev/null +++ b/cli/src/tests.rs @@ -0,0 +1,32 @@ +use std::path::Path; + +use assert_cmd::Command; + +mod estimate; + +pub(crate) fn bonsol_cmd() -> Command { + let mut cmd = Command::cargo_bin("bonsol").unwrap(); + // the test directory must be the project root + cmd.current_dir(Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap()); + cmd +} + +pub(crate) fn bonsol_build() -> Command { + let mut cmd = bonsol_cmd(); + let keypair = cmd + .get_current_dir() + .unwrap() + .join("cli") + .join("src") + .join("tests") + .join("test_data") + .join("test_id.json"); + cmd.args(&[ + "--keypair", + keypair.to_str().unwrap(), + "--rpc-url", + "http://localhost:8899", + ]) + .arg("build"); + cmd +} diff --git a/cli/src/tests/estimate.rs b/cli/src/tests/estimate.rs new file mode 100644 index 0000000..9a782ae --- /dev/null +++ b/cli/src/tests/estimate.rs @@ -0,0 +1,65 @@ +use std::path::PathBuf; + +use assert_cmd::Command; + +use super::bonsol_build; +use crate::tests::bonsol_cmd; + +fn bonsol_estimate() -> Command { + let mut cmd = bonsol_cmd(); + let keypair = cmd + .get_current_dir() + .unwrap() + .join("cli") + .join("src") + .join("tests") + .join("test_data") + .join("test_id.json"); + cmd.args(&[ + "--keypair", + keypair.to_str().unwrap(), + "--rpc-url", + "http://localhost:8899", + ]) + .arg("estimate"); + cmd +} + +fn build_test_image(image_path: &PathBuf) { + let mut cmd = bonsol_build(); + cmd.args(&[ + "-z", + image_path + .to_str() + .expect("failed to convert image path to str"), + ]); + cmd.assert().success(); +} + +#[test] +fn estimate_simple() { + let mut bonsol_estimate = bonsol_estimate(); + let image_path = bonsol_estimate + .get_current_dir() + .unwrap() + .join("images") + .join("simple"); + + build_test_image(&image_path); + let input_file = bonsol_estimate + .get_current_dir() + .unwrap() + .join("testing-examples") + .join("example-input-file.json"); + + bonsol_estimate.args(&[ + "--manifest-path", + image_path.join("manifest.json").to_str().unwrap(), + "--input-file", + input_file.to_str().unwrap(), + ]); + bonsol_estimate.assert().success().stdout( + predicates::str::is_match(r##"User cycles: 3380\nTotal cycles: 65536\nSegments: 1"##) + .unwrap(), + ); +} diff --git a/cli/src/tests/test_data/test_id.json b/cli/src/tests/test_data/test_id.json new file mode 100644 index 0000000..299600c --- /dev/null +++ b/cli/src/tests/test_data/test_id.json @@ -0,0 +1 @@ +[23,63,45,154,238,247,208,106,253,84,40,156,83,148,184,176,156,65,81,38,111,102,129,239,63,225,80,151,2,247,225,80,177,81,166,141,115,49,177,27,143,115,92,164,45,31,137,203,27,112,123,75,141,151,218,1,117,144,233,58,129,29,112,119] \ No newline at end of file diff --git a/testing-examples/example-input-file.json b/testing-examples/example-input-file.json new file mode 100644 index 0000000..16e2397 --- /dev/null +++ b/testing-examples/example-input-file.json @@ -0,0 +1,12 @@ +{ + "inputs": [ + { + "inputType": "PublicData", + "data": "{\"attestation\":\"test\"}" + }, + { + "inputType": "Private", + "data": "https://echoserver.dev/server?response=N4IgFgpghgJhBOBnEAuA2mkBjA9gOwBcJCBaAgTwAcIQAaEIgDwIHpKAbKASzxAF0+9AEY4Y5VKArVUDCMzogYUAlBlFEBEAF96G5QFdkKAEwAGU1qA" + } + ] +} From 4ade6c8bade915c5c98336e495dc058d5fdadff3 Mon Sep 17 00:00:00 2001 From: "[eureka@nixos]" <57543709+eureka-cpu@users.noreply.github.com> Date: Thu, 12 Dec 2024 07:07:43 -0800 Subject: [PATCH 2/4] fix: update-cachix.yml (#104) Closes #92 Fixes a syntax error in the cachix update action. --- .github/workflows/update-cachix.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/update-cachix.yml b/.github/workflows/update-cachix.yml index da522a7..d90049c 100644 --- a/.github/workflows/update-cachix.yml +++ b/.github/workflows/update-cachix.yml @@ -22,16 +22,22 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 + - name: Install Nix uses: DeterminateSystems/nix-installer-action@main with: extra-conf: | extra-substituters = https://bonsol.cachix.org extra-trusted-public-keys = bonsol.cachix.org-1:yz7vi1rCPW1BpqoszdJvf08HZxQ/5gPTPxft4NnT74A= + - name: Install and configure Cachix uses: cachix/cachix-action@v15 with: name: bonsol authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + + - name: Run nix flake check run: nix flake check + + - name: Enter development shell run: nix develop From 5b3873b608b2c2292c865501e20b9bb6dd1bb16a Mon Sep 17 00:00:00 2001 From: kidneyweakx <35759909+kidneyweakx@users.noreply.github.com> Date: Tue, 17 Dec 2024 07:00:02 +0800 Subject: [PATCH 3/4] fix: Add optional S3 endpoint to upload arguments (#110) ### Description Add an optional `endpoint` field in the `S3UploadArgs` struct. Users can now specify a custom AWS S3-compatible endpoint, enhancing the deployment function's flexibility for working with different S3-compatible services. The changes include: 1. Modifying the `S3UploadArgs` struct to include an `endpoint` field. 2. Updating the deployment logic to use the provided `endpoint` if specified. 3. Defaulting to the standard S3 URL format when the `endpoint` field is not provided. 4. Updating the `AmazonS3Builder` configuration to incorporate the custom endpoint. ### Related Issues - Resolves #108 ### References - https://docs.rs/object_store/latest/object_store/aws/struct.AmazonS3Builder.html#method.with_endpoint --- cli/src/command.rs | 8 ++++++++ cli/src/deploy.rs | 12 +++++++++--- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/cli/src/command.rs b/cli/src/command.rs index b0efffc..7ff85a2 100644 --- a/cli/src/command.rs +++ b/cli/src/command.rs @@ -100,6 +100,14 @@ pub struct S3UploadArgs { env = "AWS_REGION" )] pub region: String, + + #[arg( + help = "Specify the AWS S3 compatibility endpoint", + long, + required = false, + env = "AWS_S3_ENDPOINT" + )] + pub endpoint: Option, } #[derive(Debug, Clone, Args)] diff --git a/cli/src/deploy.rs b/cli/src/deploy.rs index e2dc7db..7d0981b 100644 --- a/cli/src/deploy.rs +++ b/cli/src/deploy.rs @@ -53,14 +53,22 @@ pub async fn deploy(rpc_url: String, signer: Keypair, deploy_args: DeployArgs) - access_key, secret_key, region, + endpoint, .. } = s3_upload; + let dest = + object_store::path::Path::from(format!("{}-{}", manifest.name, manifest.image_id)); + + let url = endpoint.unwrap_or( + format!("https://{}.s3.{}.amazonaws.com/{}", bucket, region, dest)); + let s3_client = AmazonS3Builder::new() .with_bucket_name(&bucket) .with_region(®ion) .with_access_key_id(&access_key) .with_secret_access_key(&secret_key) + .with_endpoint(&url) .build() .map_err(|err| { BonsolCliError::S3ClientError(S3ClientError::FailedToBuildClient { @@ -78,9 +86,6 @@ pub async fn deploy(rpc_url: String, signer: Keypair, deploy_args: DeployArgs) - }) })?; - let dest = - object_store::path::Path::from(format!("{}-{}", manifest.name, manifest.image_id)); - let url = format!("https://{}.s3.{}.amazonaws.com/{}", bucket, region, dest); // get the file to see if it exists if s3_client.head(&dest).await.is_ok() { bar.set_message("File already exists, skipping upload"); @@ -94,6 +99,7 @@ pub async fn deploy(rpc_url: String, signer: Keypair, deploy_args: DeployArgs) - } bar.finish_and_clear(); + println!("Uploaded to S3 url {}", url); url } DeployDestination::ShadowDrive(shadow_drive_upload) => { From 6a4cfef585d53ad54b92848ace379354b6f7bf83 Mon Sep 17 00:00:00 2001 From: "[eureka@nixos]" <57543709+eureka-cpu@users.noreply.github.com> Date: Wed, 18 Dec 2024 06:31:55 -0800 Subject: [PATCH 4/4] refactor: deploy subcommands (#111) Ref #35 #73 Refactors the `bonsol deploy` command, adding better help messages and clearer usage. ``` $ bonsol deploy --help Deploy a program with various storage options, such as S3, ShadowDrive, or manually with a URL Usage: bonsol deploy Commands: s3 Deploy a program using an AWS S3 bucket shadow-drive Deploy a program using ShadowDrive url Deploy a program manually with a URL help Print this message or the help of the given subcommand(s) Options: -h, --help Print help -- $ bonsol deploy s3 --help Deploy a program using an AWS S3 bucket Usage: bonsol deploy s3 [OPTIONS] --bucket --access-key --secret-key --region --manifest-path Options: --bucket Specify the S3 bucket name --access-key Specify the AWS access key ID [env: AWS_ACCESS_KEY_ID=] --secret-key Specify the AWS secret access key [env: AWS_SECRET_ACCESS_KEY=] --region Specify the AWS region [env: AWS_REGION=] --endpoint Specify the AWS S3 compatibility endpoint [env: AWS_S3_ENDPOINT=] -m, --manifest-path The path to the program's manifest file (manifest.json) -y, --auto-confirm Whether to automatically confirm deployment -h, --help Print help ``` --- .github/workflows/integration-tests.yml | 3 - .github/workflows/update-cachix.yml | 5 - CHANGELOG.md | 1 + charts/templates/deployment-tester.yaml | 2 +- cli/README.md | 4 +- cli/src/command.rs | 258 +++++------------------- cli/src/common.rs | 20 ++ cli/src/deploy.rs | 23 ++- cli/src/error.rs | 2 +- cli/src/main.rs | 86 ++++---- cli/src/tests/estimate.rs | 16 +- docs/docs/contributing/contributing.mdx | 2 +- docs/docs/shared/deploy.mdx | 9 +- 13 files changed, 134 insertions(+), 297 deletions(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index b4416f9..55b7d77 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -3,13 +3,10 @@ on: pull_request: branches: - '**' - concurrency: group: "integration-tests" cancel-in-progress: true - permissions: read-all - jobs: integration-tests: name: Setup Toolchain and Test diff --git a/.github/workflows/update-cachix.yml b/.github/workflows/update-cachix.yml index d90049c..47f5111 100644 --- a/.github/workflows/update-cachix.yml +++ b/.github/workflows/update-cachix.yml @@ -2,7 +2,6 @@ # # substituter: https://bonsol.cachix.org # public-key: bonsol.cachix.org-1:yz7vi1rCPW1BpqoszdJvf08HZxQ/5gPTPxft4NnT74A= - name: Update Cachix on: push: @@ -22,22 +21,18 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 - - name: Install Nix uses: DeterminateSystems/nix-installer-action@main with: extra-conf: | extra-substituters = https://bonsol.cachix.org extra-trusted-public-keys = bonsol.cachix.org-1:yz7vi1rCPW1BpqoszdJvf08HZxQ/5gPTPxft4NnT74A= - - name: Install and configure Cachix uses: cachix/cachix-action@v15 with: name: bonsol authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - - name: Run nix flake check run: nix flake check - - name: Enter development shell run: nix develop diff --git a/CHANGELOG.md b/CHANGELOG.md index b6848b1..18995ab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Changed * `bonsol` cli option requirements and error messages updated for added clarity +* **Breaking**: `bonsol deploy` cli subcommand requirements updated. Please refer to the docs, or use `bonsol deploy --help` for more info. ### Added * `bonsol estimate` for estimating execution cost of bonsol programs. diff --git a/charts/templates/deployment-tester.yaml b/charts/templates/deployment-tester.yaml index a344623..7aa9b22 100644 --- a/charts/templates/deployment-tester.yaml +++ b/charts/templates/deployment-tester.yaml @@ -59,7 +59,7 @@ spec: - -c - | solana config set --keypair {{ .Values.signer.path }} -u $RPC_ENDPOINT - bonsol deploy -y -m {{ include "bonsol-tester.manifestPath" . }} -t url --url $SIMPLE_IMAGE + bonsol deploy url -y -m {{ include "bonsol-tester.manifestPath" . }} --url $SIMPLE_IMAGE while : do date '+%Y/%m/%d %H:%M:%S' diff --git a/cli/README.md b/cli/README.md index 36c93f1..2e1e4df 100644 --- a/cli/README.md +++ b/cli/README.md @@ -43,12 +43,12 @@ The output of the build command is a manifest.json file which is placed in the r You can deploy a bonsol program with the following command ``` -bonsol -k ./keypair.json -u http://localhost:8899 deploy -m {path to manifest.json} -y {auto confirm} -t {s3|shadow-drive|url} ... {upload type specific options} +bonsol -k ./keypair.json -u http://localhost:8899 deploy {s3|shadow-drive|url} -m {path to manifest.json} -y {auto confirm} ... {upload type specific options} ``` There will be many options for how to upload the program, the default is s3. Here is an example of how to deploy a program to s3 ``` -bonsol -k ./keypair.json -u http://localhost:8899 deploy -m program/manifest.json -t s3 --bucket bonsol-public-images --region us-east-1 --access-key {your key} --secret-key {your secret key} +bonsol -k ./keypair.json -u http://localhost:8899 deploy s3 -m program/manifest.json --bucket bonsol-public-images --region us-east-1 --access-key {your key} --secret-key {your secret key} ``` In the above example the manifest.json file is the file that was created by the build command. This will try to upload the binary to the s3 bucket and create a deployment account for the program. Programs are indexed by the image id, which is a kind of checksum of the program elf file. This means that if you change the elf file, the image id will change and the program will be deployed again under a new deployment account. Programs are immutable and can only be changed by redeploying the program. When a node downloads a program it will check the image id and if it doesnt match the deployment account it will reject the program. Furthermore when bonsol checks the proof, it will check the image id and if it doesnt match the deployment account and desired image id from execution request it will reject the proof. diff --git a/cli/src/command.rs b/cli/src/command.rs index 7ff85a2..3cc33af 100644 --- a/cli/src/command.rs +++ b/cli/src/command.rs @@ -1,4 +1,4 @@ -use clap::{command, ArgGroup, Args, Parser, Subcommand, ValueEnum}; +use clap::{command, ArgGroup, Args, Parser, Subcommand}; #[derive(Parser, Debug)] #[command(version)] @@ -39,29 +39,6 @@ pub struct BonsolCli { pub command: Command, } -pub struct ParsedBonsolCli { - pub config: Option, - - pub keypair: Option, - - pub rpc_url: Option, - - pub command: ParsedCommand, -} - -impl TryFrom for ParsedBonsolCli { - type Error = anyhow::Error; - - fn try_from(value: BonsolCli) -> Result { - Ok(Self { - config: value.config, - keypair: value.keypair, - rpc_url: value.rpc_url, - command: value.command.try_into()?, - }) - } -} - #[derive(Debug, Clone, Args)] pub struct S3UploadArgs { #[arg( @@ -108,6 +85,9 @@ pub struct S3UploadArgs { env = "AWS_S3_ENDPOINT" )] pub endpoint: Option, + + #[command(flatten)] + pub shared_args: SharedDeployArgs, } #[derive(Debug, Clone, Args)] @@ -139,63 +119,58 @@ pub struct ShadowDriveUploadArgs { #[arg(help = "Create a new Shadow Drive storage account", long)] pub create: bool, + + #[command(flatten)] + pub shared_args: SharedDeployArgs, } #[derive(Debug, Clone, Args)] pub struct UrlUploadArgs { #[arg(help = "Specify a URL endpoint to deploy to", long, required = true)] pub url: String, -} -#[derive(Debug, Clone, ValueEnum)] -pub enum DeployType { - S3, - ShadowDrive, - Url, + #[command(flatten)] + pub shared_args: SharedDeployArgs, } -#[derive(Debug, Clone)] -pub enum DeployDestination { +#[derive(Debug, Clone, Subcommand)] +pub enum DeployArgs { + #[command(about = "Deploy a program using an AWS S3 bucket")] S3(S3UploadArgs), + + #[command(about = "Deploy a program using ShadowDrive")] ShadowDrive(ShadowDriveUploadArgs), + + #[command(about = "Deploy a program manually with a URL")] Url(UrlUploadArgs), } -impl DeployDestination { - pub fn try_parse( - deploy_type: DeployType, - s3: Option, - sd: Option, - url: Option, - ) -> anyhow::Result { - match deploy_type { - // Because we are not supporting a direct mapping (eg, subcommand), - // it's possible for a user to specify a deployment type and provide the wrong - // arguments. If we support subcommands in the future this will be - // much clearer, otherwise we would need to do more validation here - // to provide better error messages when the wrong args are present. - DeployType::S3 if s3.is_some() => Ok(Self::S3(s3.unwrap())), - DeployType::ShadowDrive if sd.is_some() => Ok(Self::ShadowDrive(sd.unwrap())), - DeployType::Url if url.is_some() => Ok(Self::Url(url.unwrap())), - _ => anyhow::bail!("The deployment type and its corresponding args do not match, expected args for deployment type '{:?}'", deploy_type), + +impl DeployArgs { + pub fn shared_args(&self) -> SharedDeployArgs { + match self { + Self::S3(s3) => s3.shared_args.clone(), + Self::ShadowDrive(sd) => sd.shared_args.clone(), + Self::Url(url) => url.shared_args.clone(), } } } -#[derive(Debug, Clone)] -pub struct DeployArgs { - pub dest: DeployDestination, +#[derive(Debug, Clone, Args)] +pub struct SharedDeployArgs { + #[arg( + help = "The path to the program's manifest file (manifest.json)", + short = 'm', + long + )] pub manifest_path: String, + + #[arg( + help = "Whether to automatically confirm deployment", + short = 'y', + long + )] pub auto_confirm: bool, } -impl DeployArgs { - pub fn parse(dest: DeployDestination, manifest_path: String, auto_confirm: bool) -> Self { - Self { - dest, - manifest_path, - auto_confirm, - } - } -} #[derive(Subcommand, Debug)] pub enum Command { @@ -203,38 +178,10 @@ pub enum Command { about = "Deploy a program with various storage options, such as S3, ShadowDrive, or manually with a URL" )] Deploy { - #[arg( - help = "Specify the deployment type", - short = 't', - long, - value_enum, - required = true - )] - deploy_type: DeployType, - - #[command(flatten)] - s3: Option, - - #[command(flatten)] - shadow_drive: Option, - - #[command(flatten)] - url: Option, - - #[arg( - help = "The path to the program's manifest file (manifest.json)", - short = 'm', - long - )] - manifest_path: String, - - #[arg( - help = "Whether to automatically confirm deployment", - short = 'y', - long - )] - auto_confirm: bool, + #[clap(subcommand)] + deploy_args: DeployArgs, }, + #[command(about = "Build a ZK program")] Build { #[arg( @@ -244,6 +191,7 @@ pub enum Command { )] zk_program_path: String, }, + #[command(about = "Estimate the execution cost of a ZK RISC0 program")] Estimate { #[arg( @@ -263,6 +211,7 @@ pub enum Command { )] max_cycles: Option, }, + Execute { #[arg(short = 'f', long)] execution_request_file: Option, @@ -280,17 +229,18 @@ pub enum Command { #[arg(short = 'm', long)] tip: Option, - #[arg(short = 'i')] - input_file: Option, // overrides inputs in execution request file + #[arg(short = 'i', long, help = "override inputs in execution request file")] + input_file: Option, /// wait for execution to be proven - #[arg(short = 'w', long)] + #[arg(short = 'w', long, help = "wait for execution to be proven")] wait: bool, /// timeout in seconds - #[arg(short = 't', long)] + #[arg(short = 't', long, help = "timeout in seconds")] timeout: Option, }, + Prove { #[arg(short = 'm', long)] manifest_path: Option, @@ -307,6 +257,8 @@ pub enum Command { #[arg(short = 'o')] output_location: Option, }, + + #[command(about = "Initialize a new project")] Init { #[arg(short = 'd', long)] dir: Option, @@ -315,117 +267,3 @@ pub enum Command { project_name: String, }, } - -#[derive(Debug)] -pub enum ParsedCommand { - Deploy { - deploy_args: DeployArgs, - }, - Build { - zk_program_path: String, - }, - Estimate { - manifest_path: String, - input_file: Option, - max_cycles: Option, - }, - Execute { - execution_request_file: Option, - - program_id: Option, - - execution_id: Option, - - expiry: Option, - - tip: Option, - - input_file: Option, - - wait: bool, - - timeout: Option, - }, - Prove { - manifest_path: Option, - - program_id: Option, - - input_file: Option, - - execution_id: String, - - output_location: Option, - }, - Init { - dir: Option, - - project_name: String, - }, -} - -impl TryFrom for ParsedCommand { - type Error = anyhow::Error; - - fn try_from(value: Command) -> Result { - match value { - Command::Deploy { - deploy_type, - s3, - shadow_drive, - url, - manifest_path, - auto_confirm, - } => Ok(ParsedCommand::Deploy { - deploy_args: DeployArgs::parse( - DeployDestination::try_parse(deploy_type, s3, shadow_drive, url)?, - manifest_path, - auto_confirm, - ), - }), - Command::Build { zk_program_path } => Ok(ParsedCommand::Build { zk_program_path }), - Command::Estimate { - manifest_path, - input_file, - max_cycles, - } => Ok(ParsedCommand::Estimate { - manifest_path, - input_file, - max_cycles, - }), - Command::Execute { - execution_request_file, - program_id, - execution_id, - expiry, - tip, - input_file, - wait, - timeout, - } => Ok(ParsedCommand::Execute { - execution_request_file, - program_id, - execution_id, - expiry, - tip, - input_file, - wait, - timeout, - }), - Command::Prove { - manifest_path, - program_id, - input_file, - execution_id, - output_location, - } => Ok(ParsedCommand::Prove { - manifest_path, - program_id, - input_file, - execution_id, - output_location, - }), - Command::Init { dir, project_name } => Ok(ParsedCommand::Init { dir, project_name }), - } - } -} diff --git a/cli/src/common.rs b/cli/src/common.rs index 625726e..67dc2e1 100644 --- a/cli/src/common.rs +++ b/cli/src/common.rs @@ -215,6 +215,26 @@ pub(crate) fn try_load_from_config(config: Option) -> anyhow::Result<(St Ok((config.json_rpc_url, config.keypair_path)) } +pub(crate) fn load_solana_config( + config: Option, + rpc_url: Option, + keypair: Option, +) -> anyhow::Result<(String, solana_sdk::signer::keypair::Keypair)> { + let (rpc_url, keypair_file) = match rpc_url.zip(keypair) { + Some(config) => config, + None => try_load_from_config(config)?, + }; + Ok(( + rpc_url, + solana_sdk::signature::read_keypair_file(std::path::Path::new(&keypair_file)).map_err( + |err| BonsolCliError::FailedToReadKeypair { + file: keypair_file, + err: format!("{err:?}"), + }, + )?, + )) +} + pub async fn sol_check(rpc_client: String, pubkey: Pubkey) -> bool { let rpc_client = rpc_client::RpcClient::new(rpc_client); if let Ok(account) = rpc_client.get_account(&pubkey).await { diff --git a/cli/src/deploy.rs b/cli/src/deploy.rs index 7d0981b..54b471c 100644 --- a/cli/src/deploy.rs +++ b/cli/src/deploy.rs @@ -15,18 +15,17 @@ use solana_sdk::commitment_config::CommitmentConfig; use solana_sdk::pubkey::Pubkey; use solana_sdk::signature::read_keypair_file; -use crate::command::{DeployArgs, DeployDestination, S3UploadArgs, ShadowDriveUploadArgs}; +use crate::command::{DeployArgs, S3UploadArgs, ShadowDriveUploadArgs, SharedDeployArgs}; use crate::common::ZkProgramManifest; use crate::error::{BonsolCliError, S3ClientError, ShadowDriveClientError, ZkManifestError}; pub async fn deploy(rpc_url: String, signer: Keypair, deploy_args: DeployArgs) -> Result<()> { let bar = ProgressBar::new_spinner(); let rpc_client = RpcClient::new_with_commitment(rpc_url.clone(), CommitmentConfig::confirmed()); - let DeployArgs { - dest, + let SharedDeployArgs { manifest_path, auto_confirm, - } = deploy_args; + } = deploy_args.shared_args(); let manifest_file = File::open(Path::new(&manifest_path)).map_err(|err| { BonsolCliError::ZkManifestError(ZkManifestError::FailedToOpen { @@ -46,8 +45,8 @@ pub async fn deploy(rpc_url: String, signer: Keypair, deploy_args: DeployArgs) - err, }) })?; - let url: String = match dest { - DeployDestination::S3(s3_upload) => { + let url: String = match deploy_args { + DeployArgs::S3(s3_upload) => { let S3UploadArgs { bucket, access_key, @@ -58,10 +57,12 @@ pub async fn deploy(rpc_url: String, signer: Keypair, deploy_args: DeployArgs) - } = s3_upload; let dest = - object_store::path::Path::from(format!("{}-{}", manifest.name, manifest.image_id)); + object_store::path::Path::from(format!("{}-{}", manifest.name, manifest.image_id)); - let url = endpoint.unwrap_or( - format!("https://{}.s3.{}.amazonaws.com/{}", bucket, region, dest)); + let url = endpoint.unwrap_or(format!( + "https://{}.s3.{}.amazonaws.com/{}", + bucket, region, dest + )); let s3_client = AmazonS3Builder::new() .with_bucket_name(&bucket) @@ -102,7 +103,7 @@ pub async fn deploy(rpc_url: String, signer: Keypair, deploy_args: DeployArgs) - println!("Uploaded to S3 url {}", url); url } - DeployDestination::ShadowDrive(shadow_drive_upload) => { + DeployArgs::ShadowDrive(shadow_drive_upload) => { let ShadowDriveUploadArgs { storage_account, storage_account_size_mb, @@ -198,7 +199,7 @@ pub async fn deploy(rpc_url: String, signer: Keypair, deploy_args: DeployArgs) - println!("Uploaded to shadow drive"); resp.message } - DeployDestination::Url(url_upload) => { + DeployArgs::Url(url_upload) => { let req = reqwest::get(&url_upload.url).await?; let bytes = req.bytes().await?; if bytes != loaded_binary { diff --git a/cli/src/error.rs b/cli/src/error.rs index 345d709..053a0c0 100644 --- a/cli/src/error.rs +++ b/cli/src/error.rs @@ -21,7 +21,7 @@ pub enum BonsolCliError { FailedToReadKeypair { file: String, err: String }, #[error("Account '{0}' does not have any SOL to pay for the transaction(s)")] - InsufficientFundsForTransactions(String), + InsufficientFunds(String), #[error(transparent)] ZkManifestError(#[from] ZkManifestError), diff --git a/cli/src/main.rs b/cli/src/main.rs index 11cdea1..2108c5e 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -5,15 +5,13 @@ use std::path::Path; use atty::Stream; use bonsol_sdk::BonsolClient; use clap::Parser; -use common::{execute_get_inputs, ZkProgramManifest}; use risc0_circuit_rv32im::prove::emu::exec::DEFAULT_SEGMENT_LIMIT_PO2; use risc0_circuit_rv32im::prove::emu::testutil::DEFAULT_SESSION_LIMIT; use risc0_zkvm::ExecutorEnv; -use solana_sdk::signature::read_keypair_file; use solana_sdk::signer::Signer; -use crate::command::{BonsolCli, ParsedBonsolCli, ParsedCommand}; -use crate::common::{sol_check, try_load_from_config}; +use crate::command::{BonsolCli, Command}; +use crate::common::{execute_get_inputs, load_solana_config, sol_check, ZkProgramManifest}; use crate::error::{BonsolCliError, ZkManifestError}; mod build; @@ -32,43 +30,27 @@ pub(crate) mod error; #[tokio::main] async fn main() -> anyhow::Result<()> { - let ParsedBonsolCli { + let BonsolCli { config, keypair, rpc_url, command, - } = BonsolCli::parse().try_into()?; - - let (rpc, kpp) = match rpc_url.zip(keypair) { - Some(conf) => conf, - None => try_load_from_config(config)?, - }; - let keypair = - read_keypair_file(Path::new(&kpp)).map_err(|err| BonsolCliError::FailedToReadKeypair { - file: kpp, - err: format!("{err:?}"), - })?; - let stdin = atty::isnt(Stream::Stdin) - .then(|| { - let mut buffer = String::new(); - io::stdin().read_to_string(&mut buffer).ok()?; - (!buffer.trim().is_empty()).then_some(buffer) - }) - .flatten(); - let sdk = BonsolClient::new(rpc.clone()); + } = BonsolCli::parse(); match command { - ParsedCommand::Build { zk_program_path } => build::build(&keypair, zk_program_path), - ParsedCommand::Deploy { deploy_args } => { - if !sol_check(rpc.clone(), keypair.pubkey()).await { - return Err(BonsolCliError::InsufficientFundsForTransactions( - keypair.pubkey().to_string(), - ) - .into()); + Command::Build { zk_program_path } => build::build( + &load_solana_config(config, rpc_url, keypair)?.1, + zk_program_path, + ), + Command::Deploy { deploy_args } => { + let (rpc_url, keypair) = load_solana_config(config, rpc_url, keypair)?; + if !sol_check(rpc_url.clone(), keypair.pubkey()).await { + return Err(BonsolCliError::InsufficientFunds(keypair.pubkey().to_string()).into()); } - deploy::deploy(rpc, keypair, deploy_args).await + + deploy::deploy(rpc_url, keypair, deploy_args).await } - ParsedCommand::Estimate { + Command::Estimate { manifest_path, input_file, max_cycles, @@ -92,19 +74,20 @@ async fn main() -> anyhow::Result<()> { err, }) })?; + let mut env = &mut ExecutorEnv::builder(); env = env .segment_limit_po2(DEFAULT_SEGMENT_LIMIT_PO2 as u32) .session_limit(max_cycles.or(DEFAULT_SESSION_LIMIT)); - if input_file.is_some() { let inputs = execute_get_inputs(input_file, None)?; let inputs: Vec<&str> = inputs.iter().map(|i| i.data.as_str()).collect(); env = env.write(&inputs.as_slice())?; } + estimate::estimate(elf.as_slice(), env.build()?) } - ParsedCommand::Execute { + Command::Execute { execution_request_file, program_id, execution_id, @@ -114,15 +97,22 @@ async fn main() -> anyhow::Result<()> { tip, timeout, } => { - if !sol_check(rpc.clone(), keypair.pubkey()).await { - return Err(BonsolCliError::InsufficientFundsForTransactions( - keypair.pubkey().to_string(), - ) - .into()); + let (rpc_url, keypair) = load_solana_config(config, rpc_url, keypair)?; + if !sol_check(rpc_url.clone(), keypair.pubkey()).await { + return Err(BonsolCliError::InsufficientFunds(keypair.pubkey().to_string()).into()); } + let stdin = atty::isnt(Stream::Stdin) + .then(|| { + let mut buffer = String::new(); + io::stdin().read_to_string(&mut buffer).ok()?; + (!buffer.trim().is_empty()).then_some(buffer) + }) + .flatten(); + let sdk = BonsolClient::new(rpc_url.clone()); + execute::execute( &sdk, - rpc, + rpc_url, &keypair, execution_request_file, program_id, @@ -136,13 +126,23 @@ async fn main() -> anyhow::Result<()> { ) .await } - ParsedCommand::Prove { + Command::Prove { manifest_path, program_id, input_file, execution_id, output_location, } => { + let rpc_url = load_solana_config(config, rpc_url, keypair)?.0; + let stdin = atty::isnt(Stream::Stdin) + .then(|| { + let mut buffer = String::new(); + io::stdin().read_to_string(&mut buffer).ok()?; + (!buffer.trim().is_empty()).then_some(buffer) + }) + .flatten(); + let sdk = BonsolClient::new(rpc_url.clone()); + prove::prove( &sdk, execution_id, @@ -154,6 +154,6 @@ async fn main() -> anyhow::Result<()> { ) .await } - ParsedCommand::Init { project_name, dir } => init::init_project(&project_name, dir), + Command::Init { project_name, dir } => init::init_project(&project_name, dir), } } diff --git a/cli/src/tests/estimate.rs b/cli/src/tests/estimate.rs index 9a782ae..108f5fc 100644 --- a/cli/src/tests/estimate.rs +++ b/cli/src/tests/estimate.rs @@ -7,21 +7,7 @@ use crate::tests::bonsol_cmd; fn bonsol_estimate() -> Command { let mut cmd = bonsol_cmd(); - let keypair = cmd - .get_current_dir() - .unwrap() - .join("cli") - .join("src") - .join("tests") - .join("test_data") - .join("test_id.json"); - cmd.args(&[ - "--keypair", - keypair.to_str().unwrap(), - "--rpc-url", - "http://localhost:8899", - ]) - .arg("estimate"); + cmd.arg("estimate"); cmd } diff --git a/docs/docs/contributing/contributing.mdx b/docs/docs/contributing/contributing.mdx index 4399ead..9a4efee 100644 --- a/docs/docs/contributing/contributing.mdx +++ b/docs/docs/contributing/contributing.mdx @@ -70,7 +70,7 @@ cargo run -p bonsol-cli build -z images/simple ``` 5. Use the bonsol cli to deploy a zkprogram (here is a example already uploaded for you) ```bash -cargo run -p bonsol-cli deploy -m images/simple/manifest.json -t url --url https://bonsol-public-images.s3.us-east-1.amazonaws.com/simple-68f4b0c5f9ce034aa60ceb264a18d6c410a3af68fafd931bcfd9ebe7c1e42960 +cargo run -p bonsol-cli deploy url -m images/simple/manifest.json --url https://bonsol-public-images.s3.us-east-1.amazonaws.com/simple-68f4b0c5f9ce034aa60ceb264a18d6c410a3af68fafd931bcfd9ebe7c1e42960 ``` 6. Use the bonsol cli to execute a zkprogram ```bash diff --git a/docs/docs/shared/deploy.mdx b/docs/docs/shared/deploy.mdx index 4e14ded..b848b33 100644 --- a/docs/docs/shared/deploy.mdx +++ b/docs/docs/shared/deploy.mdx @@ -16,7 +16,7 @@ Manual deployment can be a cause of bugs and mismatches in this regard so we don To deploy manually you can use the following command. ```bash -bonsol deploy -m ./path-to-your-manifest.json -t {s3|shadow-drive|url} +bonsol deploy {s3|shadow-drive|url} -m ./path-to-your-manifest.json ``` #### S3 @@ -31,8 +31,7 @@ aws s3api create-bucket \ ``` ```bash -bonsol deploy \ - --deploy-type s3 \ +bonsol deploy s3 \ --bucket {bucket_name} \ --access-key {access-key} \ --secret-key {secret-key} \ @@ -46,9 +45,9 @@ ShadowDrive is a decentralized storage network that allows you to upload your pr If you have not already created a storage account you can create and upload in one command. ```bash -bonsol deploy -m ./path-to-your-manifest.json -t shadow-drive --storage-account-name {your storage account} --storage-account-size-mb {your storage account size in mb} --storage-account-name {your storage account name} --alternate-keypair {path to your alternate keypair} +bonsol deploy shadow-drive -m ./path-to-your-manifest.json --storage-account-name {your storage account} --storage-account-size-mb {your storage account size in mb} --storage-account-name {your storage account name} --alternate-keypair {path to your alternate keypair} ``` Once you have created your storage account you can upload your program to it for the future versions of your program. ```bash -bonsol deploy -m ./path-to-your-manifest.json -t shadow-drive --storage-account {your storage account} +bonsol deploy shadow-drive -m ./path-to-your-manifest.json --storage-account {your storage account} ```