diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 47dda7d..e0e6ff5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -60,3 +60,28 @@ jobs: run: echo "PKG_CONFIG_PATH=/usr/lib/pkgconfig" >> $GITHUB_ENV - name: Run tests run: cargo tarpaulin --all-features --verbose --tests --skip-clean + + build-and-push: + name: Build and push docker images to docker hub + env: + PKG_NAME: irelia-public-server + PKG_NAME_WORKER: irelia-worker + runs-on: ubuntu-latest + needs: [cargo-check, fmt-check, test-and-coverage] + steps: + - uses: actions/checkout@v4 + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Build and push docker images + uses: docker/build-push-action@v4 + with: + context: . + file: ./Dockerfile + push: true + tags: | + ${{ secrets.DOCKERHUB_USERNAME }}/${{ PKG_NAME }}:latest + ${{ secrets.DOCKERHUB_USERNAME }}/${{ PKG_NAME_WORKER }}:latest diff --git a/Cargo.lock b/Cargo.lock index aa35c11..c148840 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1059,7 +1059,7 @@ dependencies = [ "serde-big-array", "serde_bytes", "serde_json", - "serde_with", + "serde_with 3.11.0", "serde_yaml 0.8.26", "strum 0.24.1", "strum_macros 0.24.3", @@ -2076,7 +2076,7 @@ checksum = "6d7c5415e3a6bc6d3e99eff6268e488fd4ee25e7b28c10f08fa6760bd9de16e4" dependencies = [ "serde", "serde_repr", - "serde_with", + "serde_with 3.11.0", ] [[package]] @@ -2188,7 +2188,7 @@ dependencies = [ "num-traits", "serde", "serde_json", - "starknet-crypto", + "starknet-crypto 0.6.2", "starknet-types-core 0.1.7", "thiserror", "thiserror-no-std", @@ -2207,21 +2207,6 @@ dependencies = [ "serde", ] -[[package]] -name = "cairo-proof-parser" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e453b4f5548816666ad56a05e0e0dc57a55116570a74093fc6b5856fea3069b" -dependencies = [ - "anyhow", - "itertools 0.12.1", - "num-bigint 0.4.6", - "regex", - "serde", - "serde_json", - "starknet-crypto", -] - [[package]] name = "cairo-vm" version = "1.0.0-rc0" @@ -2246,7 +2231,7 @@ dependencies = [ "serde_json", "sha2 0.10.8", "sha3 0.10.8", - "starknet-crypto", + "starknet-crypto 0.6.2", "starknet-types-core 0.0.6", "thiserror-no-std", "zip", @@ -2276,7 +2261,7 @@ dependencies = [ "serde_json", "sha2 0.10.8", "sha3 0.10.8", - "starknet-crypto", + "starknet-crypto 0.6.2", "starknet-types-core 0.1.7", "thiserror-no-std", "wasm-bindgen", @@ -2671,7 +2656,7 @@ dependencies = [ "rust-ini", "serde", "serde_json", - "toml 0.8.13", + "toml 0.8.19", "yaml-rust", ] @@ -3826,7 +3811,7 @@ dependencies = [ "serde", "serde_json", "syn 2.0.64", - "toml 0.8.13", + "toml 0.8.19", "walkdir", ] @@ -4041,6 +4026,12 @@ dependencies = [ "rand 0.8.5", ] +[[package]] +name = "fallible-iterator" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" + [[package]] name = "fastrand" version = "2.1.1" @@ -5458,27 +5449,28 @@ version = "0.0.1" dependencies = [ "anyhow", "axum 0.8.0-alpha.1", + "base64 0.22.1", "clap 4.5.17", "deadpool-diesel", "diesel", "diesel_migrations", - "graphile_worker", "irelia_adapter", "irelia_common", "irelia_core", - "openssl", "opentelemetry", - "rand 0.9.0-alpha.2", "readonly", + "reqwest 0.12.9", "serde", "serde_json", - "testcontainers-modules", + "stone-cli", "thiserror", "tokio", + "tokio-postgres", + "toml 0.8.19", "tower-http", "tracing", - "tracing-opentelemetry", "uuid 1.10.0", + "zip", ] [[package]] @@ -5511,7 +5503,6 @@ dependencies = [ "stone-cli", "tempfile", "test-log", - "testcontainers-modules", "tokio", "tracing", "tracing-opentelemetry", @@ -5551,7 +5542,6 @@ dependencies = [ "regex", "serde", "serde_json", - "stone-cli", "thiserror", "uuid 1.10.0", ] @@ -5563,7 +5553,10 @@ dependencies = [ "anyhow", "axum 0.8.0-alpha.1", "clap 4.5.17", + "deadpool-diesel", + "diesel", "graphile_worker", + "irelia_adapter", "irelia_common", "irelia_core", "opentelemetry", @@ -6055,7 +6048,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd01039851e82f8799046eabbb354056283fb265c8ec0996af940f4e85a380ff" dependencies = [ "serde", - "toml 0.8.13", + "toml 0.8.19", ] [[package]] @@ -7920,6 +7913,35 @@ dependencies = [ "ark-std", ] +[[package]] +name = "postgres-protocol" +version = "0.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acda0ebdebc28befa84bee35e651e4c5f09073d668c7aed4cf7e23c3cda84b23" +dependencies = [ + "base64 0.22.1", + "byteorder", + "bytes", + "fallible-iterator", + "hmac 0.12.1", + "md-5", + "memchr", + "rand 0.8.5", + "sha2 0.10.8", + "stringprep", +] + +[[package]] +name = "postgres-types" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f66ea23a2d0e5734297357705193335e0a957696f34bed2f2faefacb2fec336f" +dependencies = [ + "bytes", + "fallible-iterator", + "postgres-protocol", +] + [[package]] name = "powerfmt" version = "0.2.0" @@ -8589,9 +8611,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.8" +version = "0.12.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f713147fbe92361e52392c73b8c9e48c04c6625bce969ef54dc901e58e042a7b" +checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" dependencies = [ "base64 0.22.1", "bytes", @@ -9195,6 +9217,17 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_json_pythonic" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62212da9872ca2a0cad0093191ee33753eddff9266cbbc1b4a602d13a3a768db" +dependencies = [ + "itoa", + "ryu", + "serde", +] + [[package]] name = "serde_merge" version = "0.1.3" @@ -9261,9 +9294,24 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.8.1" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ad483d2ab0149d5a5ebcd9972a3852711e0153d863bf5a5d0391d28883c4a20" +checksum = "07ff71d2c147a7b57362cead5e22f772cd52f6ab31cfcd9edcd7f6aeb2a0afbe" +dependencies = [ + "base64 0.13.1", + "chrono", + "hex", + "serde", + "serde_json", + "serde_with_macros 2.3.3", + "time", +] + +[[package]] +name = "serde_with" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817" dependencies = [ "base64 0.22.1", "chrono", @@ -9273,15 +9321,27 @@ dependencies = [ "serde", "serde_derive", "serde_json", - "serde_with_macros", + "serde_with_macros 3.11.0", "time", ] [[package]] name = "serde_with_macros" -version = "3.8.1" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65569b702f41443e8bc8bbb1c5779bd0450bbe723b56198980e80ec45780bce2" +checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.64", +] + +[[package]] +name = "serde_with_macros" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d" dependencies = [ "darling", "proc-macro2", @@ -9822,7 +9882,7 @@ dependencies = [ "regex", "serde", "serde_json", - "serde_with", + "serde_with 3.11.0", "thiserror", "tokio", ] @@ -9839,11 +9899,30 @@ dependencies = [ "regex", "serde", "serde_json", - "serde_with", + "serde_with 3.11.0", "thiserror", "tokio", ] +[[package]] +name = "starknet-core" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d506e02a4083290d13b427dfe437fd95aa8b56315c455bb2f9cdeca76620d457" +dependencies = [ + "base64 0.21.7", + "crypto-bigint", + "flate2", + "hex", + "serde", + "serde_json", + "serde_json_pythonic", + "serde_with 2.3.3", + "sha3 0.10.8", + "starknet-crypto 0.7.3", + "starknet-types-core 0.1.7", +] + [[package]] name = "starknet-crypto" version = "0.6.2" @@ -9859,18 +9938,37 @@ dependencies = [ "rfc6979", "sha2 0.10.8", "starknet-crypto-codegen", - "starknet-curve", + "starknet-curve 0.4.2", "starknet-ff", "zeroize", ] +[[package]] +name = "starknet-crypto" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded22ccf4cb9e572ce3f77de6066af53560cd2520d508876c83bb1e6b29d5cbc" +dependencies = [ + "crypto-bigint", + "hex", + "hmac 0.12.1", + "num-bigint 0.4.6", + "num-integer", + "num-traits", + "rfc6979", + "sha2 0.10.8", + "starknet-curve 0.5.1", + "starknet-types-core 0.1.7", + "zeroize", +] + [[package]] name = "starknet-crypto-codegen" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbc159a1934c7be9761c237333a57febe060ace2bc9e3b337a59a37af206d19f" dependencies = [ - "starknet-curve", + "starknet-curve 0.4.2", "starknet-ff", "syn 2.0.64", ] @@ -9884,6 +9982,15 @@ dependencies = [ "starknet-ff", ] +[[package]] +name = "starknet-curve" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcde6bd74269b8161948190ace6cf069ef20ac6e79cd2ba09b320efa7500b6de" +dependencies = [ + "starknet-types-core 0.1.7", +] + [[package]] name = "starknet-ff" version = "0.3.7" @@ -9933,26 +10040,33 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "stone-cli" -version = "0.1.0-alpha" -source = "git+https://github.com/sota-zk-labs/stone-cli#f98b6944f7227a7408cbb9ab2ae500d626b4f278" +version = "0.1.0" +source = "git+https://github.com/zksecurity/stone-cli.git?branch=main#e12447e22cca70ec43a9154ce7c506bd3aa69f42" dependencies = [ "anyhow", "bincode 2.0.0-rc.3", "cairo-bootloader", "cairo-felt", - "cairo-proof-parser", "cairo-vm 1.0.1", "clap 4.5.17", "flate2", "itertools 0.13.0", "num-bigint 0.4.6", - "reqwest 0.12.8", + "num-traits", + "once_cell", + "reqwest 0.12.9", "rstest", "serde", "serde_json", "sha256", "stark_evm_adapter 0.1.5 (git+https://github.com/zksecurity/stark-evm-adapter.git?branch=add-build-configs)", + "starknet-crypto 0.7.3", "stone-prover-sdk", + "swiftness", + "swiftness_air", + "swiftness_fri", + "swiftness_proof_parser", + "swiftness_stark", "tar", "tempfile", "thiserror", @@ -10158,6 +10272,140 @@ dependencies = [ "zip", ] +[[package]] +name = "swiftness" +version = "0.1.2" +source = "git+https://github.com/zksecurity/integrity-calldata-generator#6f2dd268274e40e5ea75e2f17aff6b8e53f8f499" +dependencies = [ + "anyhow", + "cairo-felt", + "clap 4.5.17", + "itertools 0.13.0", + "num-bigint 0.4.6", + "num-traits", + "serde", + "serde_json", + "starknet-crypto 0.7.3", + "swiftness_air", + "swiftness_commitment", + "swiftness_fri", + "swiftness_pow", + "swiftness_proof_parser", + "swiftness_stark", + "thiserror", + "thiserror-no-std", +] + +[[package]] +name = "swiftness_air" +version = "0.1.2" +source = "git+https://github.com/zksecurity/integrity-calldata-generator#6f2dd268274e40e5ea75e2f17aff6b8e53f8f499" +dependencies = [ + "num-bigint 0.4.6", + "serde", + "serde_with 3.11.0", + "starknet-core", + "starknet-crypto 0.7.3", + "starknet-types-core 0.1.7", + "swiftness_commitment", + "swiftness_transcript", + "thiserror", + "thiserror-no-std", +] + +[[package]] +name = "swiftness_commitment" +version = "0.1.2" +source = "git+https://github.com/zksecurity/integrity-calldata-generator#6f2dd268274e40e5ea75e2f17aff6b8e53f8f499" +dependencies = [ + "blake2", + "num-bigint 0.4.6", + "serde", + "serde_with 3.11.0", + "sha3 0.10.8", + "starknet-core", + "starknet-crypto 0.7.3", + "starknet-types-core 0.1.7", + "swiftness_transcript", + "thiserror", + "thiserror-no-std", +] + +[[package]] +name = "swiftness_fri" +version = "0.1.2" +source = "git+https://github.com/zksecurity/integrity-calldata-generator#6f2dd268274e40e5ea75e2f17aff6b8e53f8f499" +dependencies = [ + "num-bigint 0.4.6", + "serde", + "serde_with 3.11.0", + "sha3 0.10.8", + "starknet-core", + "starknet-crypto 0.7.3", + "swiftness_commitment", + "swiftness_transcript", + "thiserror", + "thiserror-no-std", +] + +[[package]] +name = "swiftness_pow" +version = "0.1.2" +source = "git+https://github.com/zksecurity/integrity-calldata-generator#6f2dd268274e40e5ea75e2f17aff6b8e53f8f499" +dependencies = [ + "blake2", + "serde", + "sha3 0.10.8", + "starknet-crypto 0.7.3", + "starknet-types-core 0.1.7", + "swiftness_commitment", + "swiftness_transcript", + "thiserror", + "thiserror-no-std", +] + +[[package]] +name = "swiftness_proof_parser" +version = "0.1.2" +source = "git+https://github.com/zksecurity/integrity-calldata-generator#6f2dd268274e40e5ea75e2f17aff6b8e53f8f499" +dependencies = [ + "anyhow", + "clap 4.5.17", + "num-bigint 0.4.6", + "regex", + "serde", + "serde_json", + "starknet-crypto 0.7.3", + "starknet-types-core 0.1.7", + "thiserror", +] + +[[package]] +name = "swiftness_stark" +version = "0.1.2" +source = "git+https://github.com/zksecurity/integrity-calldata-generator#6f2dd268274e40e5ea75e2f17aff6b8e53f8f499" +dependencies = [ + "serde", + "serde_with 3.11.0", + "starknet-core", + "starknet-crypto 0.7.3", + "swiftness_air", + "swiftness_commitment", + "swiftness_fri", + "swiftness_pow", + "swiftness_transcript", + "thiserror", + "thiserror-no-std", +] + +[[package]] +name = "swiftness_transcript" +version = "0.1.2" +source = "git+https://github.com/zksecurity/integrity-calldata-generator#6f2dd268274e40e5ea75e2f17aff6b8e53f8f499" +dependencies = [ + "starknet-crypto 0.7.3", +] + [[package]] name = "syn" version = "1.0.109" @@ -10361,7 +10609,7 @@ dependencies = [ "pin-project-lite", "serde", "serde_json", - "serde_with", + "serde_with 3.11.0", "thiserror", "tokio", "tokio-stream", @@ -10389,7 +10637,7 @@ dependencies = [ "pin-project-lite", "serde", "serde_json", - "serde_with", + "serde_with 3.11.0", "thiserror", "tokio", "tokio-stream", @@ -10400,9 +10648,9 @@ dependencies = [ [[package]] name = "testcontainers-modules" -version = "0.11.2" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebd106a016d7b19e4c080c73c858d842fead9e420620ad873955569cc6bdac11" +checksum = "064a2677e164cad39ef3c1abddb044d5a25c49d27005804563d8c4227aac8bd0" dependencies = [ "testcontainers 0.23.1", ] @@ -10635,6 +10883,32 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-postgres" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b5d3742945bc7d7f210693b0c58ae542c6fd47b17adbbda0885f3dcb34a6bdb" +dependencies = [ + "async-trait", + "byteorder", + "bytes", + "fallible-iterator", + "futures-channel", + "futures-util", + "log", + "parking_lot", + "percent-encoding", + "phf", + "pin-project-lite", + "postgres-protocol", + "postgres-types", + "rand 0.8.5", + "socket2", + "tokio", + "tokio-util", + "whoami", +] + [[package]] name = "tokio-rustls" version = "0.24.1" @@ -10735,9 +11009,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.13" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4e43f8cc456c9704c851ae29c67e17ef65d2c30017c17a9765b89c382dc8bba" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" dependencies = [ "serde", "serde_spanned", diff --git a/Cargo.toml b/Cargo.toml index 1480263..e0538c7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,58 +13,53 @@ members = [ irelia_adapter = { path = "crates/adapter" } irelia_common = { path = "crates/common" } irelia_core = { path = "crates/core" } -verifier = { path = "crates/temp/verifier" } -irelia_worker = { path = "crates/worker" } + anyhow = { version = "1.0.87" } aptos-sdk = { git = "https://github.com/aptos-labs/aptos-core", branch = "mainnet" } aptos-testcontainer = { version = "0.1.2", features = ["testing"] } async-trait = { version = "0.1.81" } axum = { version = "0.8.0-alpha.1" } +base64 = { version = "0.22.1" } clap = { version = "4.5.17" } config = { version = "0.14.0" } deadpool-diesel = { version = "0.6.1" } -diesel = { version = "2.2.4" } +diesel = { version = "2.2.4", features = ["uuid", "postgres"] } diesel_migrations = { version = "2.2.0" } dotenv = { version = "0.15.0" } ethers = { version = "2.0.14" } glob = { version = "0.3.1" } -itertools = { version = "0.13.0" } -lazy_static = { version = "1.4.0" } +graphile_worker = { version = "0.8.0" } log = { version = "0.4.22" } num-bigint = { version = "0.4.6" } num-traits = { version = "0.2.19" } -once_cell = { version = "1.19.0" } -openssl = { version = "0.10.66" } opentelemetry = { version = "0.26.0" } opentelemetry-otlp = { version = "0.26.0" } opentelemetry-semantic-conventions = { version = "0.26.0" } opentelemetry_sdk = { version = "0.26.0" } -prost = { version = "0.13.2" } -rand = { version = "0.9.0-alpha.2" } rand_core = { version = "0.5.1" } -graphile_worker = { version = "0.8.0" } readonly = { version = "0.2.12" } redis-async = { version = "0.17.2" } regex = { version = "1.11.0" } +reqwest = { version = "0.12.9" } scopeguard = { version = "1.2.0" } serde = { version = "1.0.210", features = ["derive"] } -sqlx = { version = "*" } serde_json = { version = "1.0.128" } -stone-cli = { git = "https://github.com/sota-zk-labs/stone-cli" } +sqlx = { version = "*" } +stone-cli = { git = "https://github.com/zksecurity/stone-cli.git", branch = "main" } tempfile = { version = "3.13.0" } test-log = { version = "0.2.16" } -testcontainers-modules = { version = "0.11.2" } thiserror = { version = "1.0.64" } tokio = { version = "1.39.3", features = ["macros", "rt-multi-thread"] } -tonic = { version = "0.12.2" } +tokio-postgres = { version = "0.7.12" } +toml = { version = "0.8.19" } tower-http = { version = "0.6.1" } tracing = { version = "0.1.40" } tracing-bunyan-formatter = { version = "0.3.9" } tracing-opentelemetry = { version = "0.27.0" } tracing-subscriber = { version = "0.3.18" } -url = { version = "2.4.0", features = ["serde"] } -uuid = { version = "1.10.0" } +zip = { version = "0.6.6" } +uuid = { version = "1.10.0", features = ["v4"] } [patch.crates-io] merlin = { git = "https://github.com/aptos-labs/merlin" } diff --git a/Dockerfile b/Dockerfile index db54b85..6897a46 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,35 +1,44 @@ # Using the `rust-musl-builder` as base image, instead of # the official Rust toolchain -FROM clux/muslrust:stable AS chef -USER root -RUN cargo install cargo-chef - +FROM lukemathwalker/cargo-chef:latest-rust-1 AS chef WORKDIR /app -FROM clux/muslrust:stable AS bunyan -RUN cargo install bunyan - FROM chef AS planner COPY . . RUN cargo chef prepare --recipe-path recipe.json FROM chef AS builder +RUN apt update +RUN apt install -y git build-essential libssl-dev pkg-config protobuf-compiler libclang1 clang \ + cmake \ + libpq-dev \ + libdw-dev \ + binutils \ + lld \ + libudev-dev +RUN rm -rf /var/lib/apt/lists/* + +RUN git clone https://github.com/Draply/source.git /root/.stone-cli/v0.1.0 COPY --from=planner /app/recipe.json recipe.json RUN cargo chef cook --release --recipe-path recipe.json COPY . . RUN cargo build --release --all RUN mv target/${CARGO_BUILD_TARGET}/release /out -FROM alpine AS public-dev -WORKDIR /user -COPY crates/public/config/00-default.toml 00-default.toml -COPY --from=builder /out/cli /usr/local/bin/rust-api-server -COPY --from=bunyan /root/.cargo/bin/bunyan /usr/local/bin/ -ENTRYPOINT ["/bin/sh"] -CMD ["-c", "/usr/local/bin/irelia --config-path=*.toml | bunyan"] - -FROM scratch AS public-prod +FROM debian:bookworm-slim AS irelia-public-server WORKDIR /user +RUN apt update +RUN apt install -y libssl-dev libpq-dev +RUN rm -rf /var/lib/apt/lists/* COPY crates/public/config/00-default.toml 00-default.toml COPY --from=builder /out/irelia /usr/local/bin/irelia ENTRYPOINT ["/usr/local/bin/irelia", "--config-path=*.toml"] + +FROM debian:bookworm-slim AS irelia-worker +WORKDIR /user +RUN apt update +RUN apt install -y libssl-dev libpq-dev +RUN rm -rf /var/lib/apt/lists/* +COPY crates/worker/config/00-default.toml 00-default.toml +COPY --from=builder /out/irelia_worker /usr/local/bin/irelia_worker +ENTRYPOINT ["/usr/local/bin/irelia_worker", "--config-path=*.toml"] diff --git a/Makefile b/Makefile index 2b734de..8489038 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,8 @@ POSTGRES_DIR="./src/adapter/src/repositories/postgres" DATABASE_URL="postgres://postgres:changeme@127.0.0.1:5432/postgres" -PKG_NAME=rust-api-server -PKG_NAME_GRPC=rust-grpc-server +PKG_NAME=irelia-public-server +PKG_NAME_WORKER=irelia-public-worker BUILD_VERSION=$(shell git describe --long) BUILD_RELEASE=$(shell git describe --tags --abbrev=0) @@ -30,17 +30,13 @@ migrate-redo: --config-file ${POSTGRES_DIR}/diesel.toml build: - PKG_NAME=rust-api-server - PKG_NAME_GRPC=rust-grpc-server - BUILD_VERSION=$(shell git describe --long) - BUILD_RELEASE=$(shell git describe --tags --abbrev=0) - BUILDKIT_PROGRESS=plain - DOCKER_BUILDKIT=1 - docker build -t $(PKG_NAME):$(BUILD_VERSION) --target=public-prod . - docker build -t $(PKG_NAME_GRPC):$(BUILD_VERSION) --target=gpt-prod . + export BUILDKIT_PROGRESS=plain + export DOCKER_BUILDKIT=1 + docker build -t $(PKG_NAME):$(BUILD_VERSION) --target=public-server . + docker build -t $(PKG_NAME_WORKER):$(BUILD_VERSION) --target=public-worker . build-dev: - BUILDKIT_PROGRESS=plain DOCKER_BUILDKIT=1 docker build -t $(PKG_NAME):$(BUILD_VERSION) --target=public-dev . + BUILDKIT_PROGRESS=plain DOCKER_BUILDKIT=1 docker build -t $(PKG_NAME):$(BUILD_VERSION) --target=public-dev . && \ BUILDKIT_PROGRESS=plain DOCKER_BUILDKIT=1 docker build -t $(PKG_NAME_GRPC):$(BUILD_VERSION) --target=gpt-dev . profiling-public: diff --git a/contracts/navori b/contracts/navori index cc5bbc2..e1f89e1 160000 --- a/contracts/navori +++ b/contracts/navori @@ -1 +1 @@ -Subproject commit cc5bbc28b488bf3f846c7dc27d452425d88f45d3 +Subproject commit e1f89e14820668ca0e888c030bddd97fb6f91554 diff --git a/crates/adapter/Cargo.toml b/crates/adapter/Cargo.toml index e6644d3..b39b401 100644 --- a/crates/adapter/Cargo.toml +++ b/crates/adapter/Cargo.toml @@ -20,22 +20,17 @@ diesel = { workspace = true, features = [ diesel_migrations = { workspace = true } dotenv = { workspace = true } ethers = { workspace = true } +graphile_worker = { workspace = true } log = { workspace = true } num-bigint = { workspace = true } num-traits = { workspace = true } +opentelemetry = { workspace = true } rand_core = { workspace = true } redis-async = { workspace = true } regex = { workspace = true } scopeguard = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } -stone-cli = { workspace = true } -tempfile = { workspace = true } -test-log = { workspace = true } -testcontainers-modules = { workspace = true, features = ["postgres", "redis"] } -tokio = { workspace = true, features = ["full"] } -uuid = { version = "1.10.0", features = ["v4", "serde"] } -graphile_worker = { workspace = true } sqlx = { workspace = true, features = [ "chrono", "postgres", @@ -43,6 +38,10 @@ sqlx = { workspace = true, features = [ "macros", "runtime-tokio", ] } +stone-cli = { workspace = true } +tempfile = { workspace = true } +test-log = { workspace = true } +tokio = { workspace = true, features = ["full"] } tracing = { workspace = true } -opentelemetry = { workspace = true } -tracing-opentelemetry = {workspace = true} +tracing-opentelemetry = { workspace = true } +uuid = { workspace = true } diff --git a/crates/adapter/src/aptos_writer/contracts_caller/gps/extract_gps_input.rs b/crates/adapter/src/aptos_writer/contracts_caller/gps/extract_gps_input.rs index 603751d..d2bb2e1 100644 --- a/crates/adapter/src/aptos_writer/contracts_caller/gps/extract_gps_input.rs +++ b/crates/adapter/src/aptos_writer/contracts_caller/gps/extract_gps_input.rs @@ -2,7 +2,7 @@ use crate::aptos_writer::contracts_caller::gps::types::verify_proof_and_register VerifyProofAndRegisterData, VerifyProofAndRegisterDataJson, }; -pub fn extract_gps_input(main_proof: &String) -> anyhow::Result { +pub fn extract_gps_input(main_proof: &str) -> anyhow::Result { let data = serde_json::from_str::(main_proof)?; Ok(VerifyProofAndRegisterData::from(data)) } diff --git a/crates/adapter/src/aptos_writer/contracts_caller/memory_page_fact_registry/register_continuous_page_batch.rs b/crates/adapter/src/aptos_writer/contracts_caller/memory_page_fact_registry/register_continuous_page_batch.rs index db9297c..68c207b 100644 --- a/crates/adapter/src/aptos_writer/contracts_caller/memory_page_fact_registry/register_continuous_page_batch.rs +++ b/crates/adapter/src/aptos_writer/contracts_caller/memory_page_fact_registry/register_continuous_page_batch.rs @@ -21,13 +21,7 @@ pub async fn register_continuous_page_batch( config: &AppConfig, data: MemoryPageEntries, ) -> anyhow::Result<()> { - let ContinuousMemoryPage { - z, - alpha, - prime, - values, - start_addr, - } = data.memory_page_entries.first().unwrap(); + let ContinuousMemoryPage { z, alpha, .. } = data.memory_page_entries.first().unwrap(); let z = MoveValue::U256(U256::from_str(z)?); let alpha = MoveValue::U256(U256::from_str(alpha)?); diff --git a/crates/adapter/src/aptos_writer/contracts_caller/verify_merkle/mod.rs b/crates/adapter/src/aptos_writer/contracts_caller/verify_merkle/mod.rs index d7c94b3..ba63bea 100644 --- a/crates/adapter/src/aptos_writer/contracts_caller/verify_merkle/mod.rs +++ b/crates/adapter/src/aptos_writer/contracts_caller/verify_merkle/mod.rs @@ -3,4 +3,5 @@ pub mod merkle_statement; pub mod register_fact_merkle; pub mod sample_verify_merkle_input; pub mod types; +#[allow(clippy::module_inception)] pub mod verify_merkle; diff --git a/crates/adapter/src/prover/mod.rs b/crates/adapter/src/prover/mod.rs index 388e4ab..5f8e51f 100644 --- a/crates/adapter/src/prover/mod.rs +++ b/crates/adapter/src/prover/mod.rs @@ -4,7 +4,7 @@ mod annotation_parser; mod builtin_info; mod oods_statement; mod sharp_prover; -mod stone_prover; +pub mod stone_prover; /// Default prime field for cairo. This prime will be used when modular operations are needed. pub fn default_prime() -> U256 { diff --git a/crates/adapter/src/prover/oods_statement.rs b/crates/adapter/src/prover/oods_statement.rs index 1eebe37..fb78740 100644 --- a/crates/adapter/src/prover/oods_statement.rs +++ b/crates/adapter/src/prover/oods_statement.rs @@ -16,7 +16,6 @@ use serde_json::json; use crate::prover::default_prime; /// Adapted from https://github.com/zksecurity/stark-evm-adapter/blob/main/src/oods_statement.rs - /// Proof for consistency check for out of domain sampling #[derive(Serialize, Deserialize, Debug)] pub struct FactTopology { diff --git a/crates/adapter/src/prover/stone_prover.rs b/crates/adapter/src/prover/stone_prover.rs index d3a4d30..b3d5af0 100644 --- a/crates/adapter/src/prover/stone_prover.rs +++ b/crates/adapter/src/prover/stone_prover.rs @@ -11,7 +11,7 @@ use irelia_core::entities::sharp_proof::SharpProof; use irelia_core::ports::prover::ProverPort; use scopeguard::defer; use stone_cli::args::Network::ethereum; -use stone_cli::args::{LayoutName, SerializeArgs, VerifyArgs}; +use stone_cli::args::{LayoutName, SerializeArgs, StoneVersion, VerifyArgs}; use stone_cli::bootloader::run_bootloader; use stone_cli::prover::run_stone_prover_bootloader; use stone_cli::serialize::serialize_proof; @@ -31,7 +31,6 @@ const SERIALIZED_PROOF_PATH: &str = "bootloader_serialized_proof.json"; /// This code is adapted from: https://github.com/zksecurity/stone-cli/blob/main/src/main.rs /// Generate proof from cairo pies or cairo programs - pub struct StoneProver { pub cairo_pie: Vec, pub layout: LayoutName, @@ -104,6 +103,7 @@ impl ProverPort for StoneProver { proof: proof_tmp_dir.path().join(BOOTLOADER_PROOF_NAME), annotation_file: Some(proof_tmp_dir.path().join(ANNOTATION_PATH)), extra_output_file: Some(proof_tmp_dir.path().join(EXTRA_OUTPUT_PATH)), + stone_version: StoneVersion::V5, }; run_stone_verifier(verify_args).map_err(|e| VerifierError(e.to_string()))?; @@ -111,9 +111,12 @@ impl ProverPort for StoneProver { let serialize_args = SerializeArgs { proof: proof_tmp_dir.path().join(BOOTLOADER_PROOF_NAME), network: ethereum, - output: proof_tmp_dir.path().join(SERIALIZED_PROOF_PATH), + output: Option::from(proof_tmp_dir.path().join(SERIALIZED_PROOF_PATH)), + output_dir: None, + layout: None, annotation_file: Some(proof_tmp_dir.path().join(ANNOTATION_PATH)), extra_output_file: Some(proof_tmp_dir.path().join(EXTRA_OUTPUT_PATH)), + serialization_type: None, }; serialize_proof(serialize_args).map_err(|e| SerializationError(e.to_string()))?; @@ -143,6 +146,5 @@ mod tests { let layout = LayoutName::starknet; let stone_prover = StoneProver { layout, cairo_pie }; assert!(stone_prover.generate_proof().await.is_ok()); - // println!("vjp"); } } diff --git a/crates/adapter/src/repositories/postgres/job_db.rs b/crates/adapter/src/repositories/postgres/job_db.rs index 1464670..ce3b9bb 100644 --- a/crates/adapter/src/repositories/postgres/job_db.rs +++ b/crates/adapter/src/repositories/postgres/job_db.rs @@ -1,3 +1,5 @@ +use std::fmt::{Debug, Formatter}; + use async_trait::async_trait; use deadpool_diesel::postgres::Pool; use diesel::{ @@ -10,13 +12,12 @@ use irelia_core::ports::job::JobPort; use crate::repositories::postgres::models::job::JobModel; use crate::repositories::postgres::schema::jobs::dsl::jobs; -use crate::repositories::postgres::schema::jobs::id; +use crate::repositories::postgres::schema::jobs::{cairo_job_key, customer_id, id}; // NOTE: path relative to Cargo.toml pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("./src/repositories/postgres/migrations"); -#[derive(Clone)] pub struct JobDBRepository { pub db: Pool, } @@ -27,6 +28,12 @@ impl JobDBRepository { } } +impl Debug for JobDBRepository { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("JobDBRepository").finish() + } +} + #[async_trait] impl JobPort for JobDBRepository { async fn add(&self, job: JobEntity) -> Result { @@ -35,16 +42,15 @@ impl JobPort for JobDBRepository { .await .unwrap() .interact(move |conn| { - let job = + let job_model = JobModel::try_from(job).map_err(|err| CoreError::InternalError(err.into()))?; let response = insert_into(jobs) - .values(&job) + .values(&job_model) .get_result::(conn) .map_err(|err| match err { diesel::result::Error::NotFound => CoreError::NotFound, _ => CoreError::InternalError(err.into()), - }) - .unwrap(); + })?; Ok(response.into()) }) .await @@ -57,10 +63,10 @@ impl JobPort for JobDBRepository { .await .unwrap() .interact(move |conn| { - let job = + let job_model = JobModel::try_from(job).map_err(|err| CoreError::InternalError(err.into()))?; - let response = update(jobs.filter(id.eq(job.id))) - .set(&job) + let response = update(jobs.filter(id.eq(job_model.id))) + .set(&job_model) .get_result::(conn) .map_err(|err| match err { diesel::result::Error::NotFound => CoreError::NotFound, @@ -95,16 +101,20 @@ impl JobPort for JobDBRepository { .unwrap() } - async fn get(&self, job_id: &JobId) -> Result { - let job_id = job_id.0; + async fn get_job( + &self, + customer_id_value: String, + cairo_job_key_value: String, + ) -> Result { self.db .get() .await .unwrap() .interact(move |conn| { let response = jobs + .filter(customer_id.eq(customer_id_value)) + .filter(cairo_job_key.eq(cairo_job_key_value)) .select(JobModel::as_select()) - .find(job_id) .first(conn) .map_err(|err| match err { diesel::result::Error::NotFound => CoreError::NotFound, diff --git a/crates/adapter/src/repositories/postgres/migrations/2024-11-25-092759_create_status_job/down.sql b/crates/adapter/src/repositories/postgres/migrations/2024-11-25-092759_create_status_job/down.sql new file mode 100644 index 0000000..f09ee0e --- /dev/null +++ b/crates/adapter/src/repositories/postgres/migrations/2024-11-25-092759_create_status_job/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE jobs; diff --git a/crates/adapter/src/repositories/postgres/migrations/2024-11-25-092759_create_status_job/up.sql b/crates/adapter/src/repositories/postgres/migrations/2024-11-25-092759_create_status_job/up.sql new file mode 100644 index 0000000..0997d66 --- /dev/null +++ b/crates/adapter/src/repositories/postgres/migrations/2024-11-25-092759_create_status_job/up.sql @@ -0,0 +1,12 @@ +-- Your SQL goes here +CREATE TABLE jobs ( + id UUID PRIMARY KEY, + customer_id VARCHAR NOT NULL, + cairo_job_key VARCHAR NOT NULL, + status VARCHAR NOT NULL, + invalid_reason VARCHAR NOT NULL, + error_log VARCHAR NOT NULL, + validation_done BOOLEAN NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP NOT NULL DEFAULT NOW() +); diff --git a/crates/adapter/src/repositories/postgres/models/job.rs b/crates/adapter/src/repositories/postgres/models/job.rs index 6b055db..4ef079a 100644 --- a/crates/adapter/src/repositories/postgres/models/job.rs +++ b/crates/adapter/src/repositories/postgres/models/job.rs @@ -1,8 +1,9 @@ -use std::io::{Error, ErrorKind}; +use std::io::Error; +use std::str::FromStr; use std::time::SystemTime; use diesel::{AsChangeset, Identifiable, Insertable, Queryable, Selectable}; -use irelia_core::entities::job::{JobEntity, JobId}; +use irelia_core::entities::job::{CairoJobStatus, JobEntity, JobId}; use uuid::Uuid; #[derive(Debug, Queryable, Insertable, Selectable, AsChangeset, Identifiable)] @@ -11,31 +12,32 @@ pub struct JobModel { pub id: Uuid, pub customer_id: String, pub cairo_job_key: String, - pub offchain_proof: bool, - pub proof_layout: String, - pub cairo_pie: String, + pub status: String, + pub invalid_reason: String, + pub error_log: String, + pub validation_done: bool, - pub created_on: SystemTime, + #[diesel(skip_insertion)] + pub updated_at: SystemTime, + #[diesel(skip_insertion)] + pub created_at: SystemTime, } impl TryFrom for JobModel { type Error = Error; - fn try_from(entity: JobEntity) -> Result { - let id = entity - .id - .0 - .try_into() - .map_err(|_| Error::new(ErrorKind::InvalidInput, "Invalid ID"))?; - + fn try_from(entity: JobEntity) -> Result { Ok(JobModel { - id, + id: entity.id.0, customer_id: entity.customer_id, cairo_job_key: entity.cairo_job_key, - offchain_proof: entity.offchain_proof, - proof_layout: entity.proof_layout, - cairo_pie: entity.cairo_pie, - created_on: SystemTime::now(), + status: entity.status.to_string(), + invalid_reason: entity.invalid_reason, + error_log: entity.error_log, + validation_done: entity.validation_done, + + updated_at: SystemTime::now(), + created_at: SystemTime::now(), }) } } @@ -43,12 +45,13 @@ impl TryFrom for JobModel { impl From for JobEntity { fn from(val: JobModel) -> Self { JobEntity { - id: JobId(val.id.try_into().unwrap()), + id: JobId(val.id), customer_id: val.customer_id, cairo_job_key: val.cairo_job_key, - offchain_proof: val.offchain_proof, - proof_layout: val.proof_layout, - cairo_pie: "".to_string(), + status: CairoJobStatus::from_str(val.status.as_str()).unwrap(), + invalid_reason: val.invalid_reason, + error_log: val.error_log, + validation_done: val.validation_done, } } } diff --git a/crates/adapter/src/repositories/postgres/schema.rs b/crates/adapter/src/repositories/postgres/schema.rs index cf6462a..989693a 100644 --- a/crates/adapter/src/repositories/postgres/schema.rs +++ b/crates/adapter/src/repositories/postgres/schema.rs @@ -1,14 +1,16 @@ // @generated automatically by Diesel CLI. - diesel::table! { - jobs (id) { + jobs(id) { id -> Uuid, #[max_length = 255] customer_id -> Varchar, cairo_job_key -> Varchar, - offchain_proof -> Bool, - proof_layout -> Varchar, - cairo_pie -> Text, - created_on -> Timestamp, + status -> Varchar, + invalid_reason -> Varchar, + error_log -> Varchar, + validation_done -> Bool, + + updated_at -> Timestamp, + created_at -> Timestamp, } } diff --git a/crates/adapter/src/worker/mod.rs b/crates/adapter/src/worker/mod.rs index a1989b1..e77c9d8 100644 --- a/crates/adapter/src/worker/mod.rs +++ b/crates/adapter/src/worker/mod.rs @@ -5,7 +5,7 @@ use async_trait::async_trait; use graphile_worker::WorkerUtils; use irelia_common::workers::{Worker, ADD_JOB_WORKER_IDENTIFIER}; use irelia_core::common::core_error::CoreError; -use irelia_core::entities::job::JobEntity; +use irelia_core::entities::worker_job::WorkerJobEntity; use irelia_core::ports::worker::WorkerPort; use sqlx::postgres::PgConnectOptions; use tracing_opentelemetry::OpenTelemetrySpanExt; @@ -30,7 +30,7 @@ impl WorkerAdapter { #[async_trait] impl WorkerPort for WorkerAdapter { - async fn add(&self, job: JobEntity) -> Result { + async fn add(&self, job: WorkerJobEntity) -> Result { // retrieve the current span let span = tracing::Span::current(); // retrieve the current context diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 37a3863..f76497f 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -11,10 +11,8 @@ opentelemetry = { workspace = true } opentelemetry-otlp = { workspace = true, features = ["tonic"] } opentelemetry-semantic-conventions = { workspace = true } opentelemetry_sdk = { workspace = true, features = ["rt-tokio"] } -prost = { workspace = true } serde = { workspace = true, features = ["derive"] } tokio = { workspace = true } -tonic = { workspace = true } tracing = { workspace = true } tracing-bunyan-formatter = { workspace = true } tracing-opentelemetry = { workspace = true } diff --git a/crates/common/src/cli_args.rs b/crates/common/src/cli_args.rs index b44ef3a..563e4e4 100644 --- a/crates/common/src/cli_args.rs +++ b/crates/common/src/cli_args.rs @@ -1,7 +1,6 @@ use std::fmt::Debug; use std::process::exit; -use clap::builder::TypedValueParser; use clap::{Parser, Subcommand}; use serde::Deserialize; diff --git a/crates/common/src/workers/mod.rs b/crates/common/src/workers/mod.rs index 572e5a1..7caea56 100644 --- a/crates/common/src/workers/mod.rs +++ b/crates/common/src/workers/mod.rs @@ -2,7 +2,7 @@ use std::collections::HashMap; use serde::{Deserialize, Serialize}; -#[derive(Deserialize, Serialize)] +#[derive(Deserialize, Serialize, Debug)] pub struct Worker { pub data: T, pub tracing: HashMap, diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index e14f01a..17c0e99 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -12,6 +12,5 @@ num-bigint = { workspace = true } regex = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } -stone-cli = { workspace = true } thiserror = { workspace = true } uuid = { workspace = true, features = ["v4", "serde"] } diff --git a/crates/core/src/entities/job.rs b/crates/core/src/entities/job.rs index f3ed1e1..3eaf6f6 100644 --- a/crates/core/src/entities/job.rs +++ b/crates/core/src/entities/job.rs @@ -1,7 +1,52 @@ +use std::fmt; +use std::str::FromStr; + use serde::{Deserialize, Serialize}; use uuid::Uuid; -/// Identifier for a question. -#[derive(Debug, Eq, Hash, PartialEq, Serialize, Deserialize, Clone)] + +#[derive(Serialize, Deserialize, PartialEq, Debug)] +#[allow(non_camel_case_types)] +pub enum CairoJobStatus { + FAILED, // Stone failed + INVALID, // Wrong pie format + UNKNOWN, // + IN_PROGRESS, // init status + NOT_CREATED, // + PROCESSED, // stone completed => to submit on chain + ONCHAIN, // stone completed and submit on chain completed +} + +impl fmt::Display for CairoJobStatus { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + CairoJobStatus::FAILED => write!(f, "FAILED"), + CairoJobStatus::INVALID => write!(f, "INVALID"), + CairoJobStatus::UNKNOWN => write!(f, "UNKNOWN"), + CairoJobStatus::IN_PROGRESS => write!(f, "IN_PROGRESS"), + CairoJobStatus::NOT_CREATED => write!(f, "NOT_CREATED"), + CairoJobStatus::PROCESSED => write!(f, "PROCESSED"), + CairoJobStatus::ONCHAIN => write!(f, "ONCHAIN"), + } + } +} + +impl FromStr for CairoJobStatus { + type Err = String; + fn from_str(s: &str) -> Result { + match s.to_uppercase().as_str() { + "FAILED" => Ok(CairoJobStatus::FAILED), + "INVALID" => Ok(CairoJobStatus::INVALID), + "UNKNOWN" => Ok(CairoJobStatus::UNKNOWN), + "IN_PROGRESS" => Ok(CairoJobStatus::IN_PROGRESS), + "NOT_CREATED" => Ok(CairoJobStatus::NOT_CREATED), + "PROCESSED" => Ok(CairoJobStatus::PROCESSED), + "ONCHAIN" => Ok(CairoJobStatus::ONCHAIN), + _ => Err(format!("'{}' is not a valid value of job status", s)), + } + } +} + +#[derive(Debug, Eq, Hash, PartialEq, Serialize, Deserialize)] pub struct JobId(pub Uuid); #[derive(Serialize, Deserialize, PartialEq, Debug)] @@ -9,13 +54,8 @@ pub struct JobEntity { pub id: JobId, pub customer_id: String, pub cairo_job_key: String, - pub offchain_proof: bool, - pub proof_layout: String, - pub cairo_pie: String, -} - -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] -pub struct JobResponse { - pub code: Option, - pub message: Option, + pub status: CairoJobStatus, + pub invalid_reason: String, + pub error_log: String, + pub validation_done: bool, } diff --git a/crates/core/src/entities/mod.rs b/crates/core/src/entities/mod.rs index 3aaf432..210378a 100644 --- a/crates/core/src/entities/mod.rs +++ b/crates/core/src/entities/mod.rs @@ -5,3 +5,4 @@ pub mod job; pub mod memory_statement; pub mod merkle_statement; pub mod sharp_proof; +pub mod worker_job; diff --git a/crates/core/src/entities/worker_job.rs b/crates/core/src/entities/worker_job.rs new file mode 100644 index 0000000..b79bf92 --- /dev/null +++ b/crates/core/src/entities/worker_job.rs @@ -0,0 +1,24 @@ +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Debug, PartialEq)] +pub enum WorkerJobStatus { + IncorrectLayout, + NoCairoJobId, + IncorrectOffchainProof, + + Successfully, +} + +#[derive(Debug, Eq, Hash, PartialEq, Serialize, Deserialize, Clone)] +pub struct WorkerJobId(pub Uuid); + +#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] +pub struct WorkerJobEntity { + pub id: WorkerJobId, + pub customer_id: String, + pub cairo_job_key: String, + pub offchain_proof: bool, + pub proof_layout: String, + pub cairo_pie: String, +} diff --git a/crates/core/src/ports/job.rs b/crates/core/src/ports/job.rs index 6858593..02a059c 100644 --- a/crates/core/src/ports/job.rs +++ b/crates/core/src/ports/job.rs @@ -8,5 +8,9 @@ pub trait JobPort { async fn add(&self, job: JobEntity) -> Result; async fn update(&self, job: JobEntity) -> Result; async fn delete(&self, job_id: &JobId) -> Result<(), CoreError>; - async fn get(&self, job_id: &JobId) -> Result; + async fn get_job( + &self, + customer_id: String, + cairo_job_key: String, + ) -> Result; } diff --git a/crates/core/src/ports/worker.rs b/crates/core/src/ports/worker.rs index 036aea3..221d121 100644 --- a/crates/core/src/ports/worker.rs +++ b/crates/core/src/ports/worker.rs @@ -1,9 +1,9 @@ use async_trait::async_trait; use crate::common::core_error::CoreError; -use crate::entities::job::JobEntity; +use crate::entities::worker_job::WorkerJobEntity; #[async_trait] pub trait WorkerPort { - async fn add(&self, job: JobEntity) -> Result; + async fn add(&self, job: WorkerJobEntity) -> Result; } diff --git a/crates/public/Cargo.toml b/crates/public/Cargo.toml index 4761519..7dfc00e 100644 --- a/crates/public/Cargo.toml +++ b/crates/public/Cargo.toml @@ -11,6 +11,7 @@ irelia_core = { workspace = true } anyhow = { workspace = true } axum = { workspace = true, features = ["macros"] } +base64 = { workspace = true } clap = { workspace = true, features = ["derive"] } deadpool-diesel = { workspace = true, features = ["postgres", "serde"] } diesel = { workspace = true, features = [ @@ -19,17 +20,17 @@ diesel = { workspace = true, features = [ "uuid", ] } diesel_migrations = { workspace = true } -openssl = { workspace = true } opentelemetry = { workspace = true } -rand = { workspace = true } readonly = { workspace = true } +reqwest = { workspace = true, features = ["json"] } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } -testcontainers-modules = { workspace = true, features = ["postgres", "redis"] } +stone-cli = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true, features = ["full"] } +tokio-postgres = { workspace = true } +toml = { workspace = true } tower-http = { workspace = true, features = ["timeout", "trace"] } tracing = { workspace = true } -tracing-opentelemetry = { workspace = true } -graphile_worker = { workspace = true } -uuid = { version = "1.10.0", features = ["v4"] } +uuid = { workspace = true } +zip = { workspace = true } diff --git a/crates/public/src/app_state.rs b/crates/public/src/app_state.rs index 2276f79..339c2c7 100644 --- a/crates/public/src/app_state.rs +++ b/crates/public/src/app_state.rs @@ -1,22 +1,19 @@ use std::sync::Arc; -use irelia_core::ports::job::JobPort; -use irelia_core::ports::worker::WorkerPort; +use crate::services::job::JobService; +use crate::services::worker_job::WorkerJobService; #[derive(Clone)] pub struct AppState { - pub job_port: Arc, - pub worker_port: Arc, + pub worker_service: Arc, + pub job_service: Arc, } impl AppState { - pub fn new( - job_port: Arc, - worker_port: Arc, - ) -> Self { + pub fn new(worker_service: Arc, job_service: Arc) -> Self { Self { - job_port, - worker_port, + worker_service, + job_service, } } } diff --git a/crates/public/src/assets/test_data/encoded_cairo_pie.txt b/crates/public/src/assets/test_data/encoded_cairo_pie.txt new file mode 100644 index 0000000..8ad2b3c --- /dev/null +++ b/crates/public/src/assets/test_data/encoded_cairo_pie.txt @@ -0,0 +1 @@ +UEsDBBQAAAAIAAAAIQB4ytupXQEAAKMDAAANAAAAbWV0YWRhdGEuanNvbq2S24rCQAyGX0V67UXOk+yrLCJd7UphraIVZMV339QDylLvhKGkyTf//JPJqdruNqtdva4+JqeM23WTERu6AhV3M0NGQrYoTJlhCIUCCGrECIVQmVHDLCIRCMoKsnohIBDH6aRa1n2dqp8UwIIiEhii4RzTSdbFkKwgOkUwpjz4dKLoqWdsooQu7kwX+HU+EIHIAJkLCRpkPv9JtAiJkxIhJmzXE9mj5BdRFE3KRcHTkxqXkupeNOFhcRLgxYyxMApIKqDYoK3mwgYcpqn6cCEUyq6STl8IjJJKkJ0NESJwZnaSF77e+EKv+vS+IwqPNmGWo/F1aH/6ttsP41FtDv320FdDfl23XebgnPFtQuf7ZrVuuv4yqW23bI4DkPV9+zsMLekAN8dmcejbTTeK4wM3HPBd08+/t6MsP1i4o9vFKCr/0Nut7uz+At9u97yPntyfr+77Xf287XN2/gNQSwMEFAAAAAgAAAAhAIbCeb3VAQAAwA0AAAoAAABtZW1vcnkuYmlujdXJVsJAEAXQJEAIYZ5xxgkV5xGnBUt/wa9J7934ux7FSMxLThZUXhUbFpeqV306xLL+y4RBGNimXLSEsuFsCaAcOMssAssU5pIrpC6a675JrqicW1L2c5X9ypl+lvHFfl7q7OWn2JZcBc4lc324MFgs93DmkqumcyM3EfPV4Bwytw73G0Q34WMsuQbcIr4vYtsmnHihUC24aOOfQD7ntjJfR9mvm7rvpZuKrpe6/HPuJ04o5PcGab/8+zKE+wpXF87NG8Gxc0mmxl/E2x8/F4mTn5L4PqX9xGtgCnB+brCkbOqK6Kf6P1A4V5mvDId9pDKerp+pKPP5cOz5rcJViKtlnZHz1eFG+e1MQ5mvCVcgrgXnEdeGw3wxXweO7dFV5uvBlYjrw5WJG8BhvphvCMf2GCnzrcGxc16HY++tjWw+I+fbzDq5zJYy3zZcnbgdOHYuYzjMF/PtwrE99pT59uH6xB3AsffqIRzmi/kmcGyPI2W+4+zcnDqBY8/bFA7zxXyncGyPM2W+c7gZcRdw7P/vEg7zxXxXcGyPa2W+G7h34m7h2PvoDg7zxXz3cGyPB2W+GdwncY9whFlPcJgv5nuGY3u8KPPF52tT94p+TuZXq+b+AVBLAwQUAAAACAAAACEArCBYhC0AAAAzAAAAFAAAAGFkZGl0aW9uYWxfZGF0YS5qc29uq1bKLy0pKC2JTyrNzCnJzFOyUqhWKkhMTy0GsWp1FJQSS0qKMpNKS6AitbUAUEsDBBQAAAAIAAAAIQBhYoo8TwAAAFcAAAAYAAAAZXhlY3V0aW9uX3Jlc291cmNlcy5qc29uq1bKiy8uSS0oVrJSMDfSUVBKKs3MKcnMi8/MKy5JzEtOjU/OL80rSS0Cylcr5ZeWFJSWxEPVAIWMaoFa8uJzU3PziyrjM/JzUkEGGdQCAFBLAwQUAAAACAAAACEA2YDFkhYAAAAUAAAADAAAAHZlcnNpb24uanNvbqtWSk7MLMqPL8hMVbJSUDLUM1SqBQBQSwECFAMUAAAACAAAACEAeMrbqV0BAACjAwAADQAAAAAAAAAAAAAAgAEAAAAAbWV0YWRhdGEuanNvblBLAQIUAxQAAAAIAAAAIQCGwnm91QEAAMANAAAKAAAAAAAAAAAAAACAAYgBAABtZW1vcnkuYmluUEsBAhQDFAAAAAgAAAAhAKwgWIQtAAAAMwAAABQAAAAAAAAAAAAAAIABhQMAAGFkZGl0aW9uYWxfZGF0YS5qc29uUEsBAhQDFAAAAAgAAAAhAGFiijxPAAAAVwAAABgAAAAAAAAAAAAAAIAB5AMAAGV4ZWN1dGlvbl9yZXNvdXJjZXMuanNvblBLAQIUAxQAAAAIAAAAIQDZgMWSFgAAABQAAAAMAAAAAAAAAAAAAACAAWkEAAB2ZXJzaW9uLmpzb25QSwUGAAAAAAUABQA1AQAAqQQAAAAA \ No newline at end of file diff --git a/crates/public/src/assets/test_data/fibonacci_with_output.zip b/crates/public/src/assets/test_data/fibonacci_with_output.zip new file mode 100644 index 0000000..e91f58e Binary files /dev/null and b/crates/public/src/assets/test_data/fibonacci_with_output.zip differ diff --git a/crates/public/src/controllers/job.rs b/crates/public/src/controllers/job.rs index c7afaa3..305ac7d 100644 --- a/crates/public/src/controllers/job.rs +++ b/crates/public/src/controllers/job.rs @@ -1,39 +1,38 @@ -use std::str; - -use axum::body::Bytes; -use axum::extract::State; -use irelia_core::entities::job::{JobEntity, JobId, JobResponse}; +use axum::extract::{Query, State}; +use serde::Deserialize; +use serde_json::{json, Value}; use tracing::instrument; use tracing::log::info; -use uuid::Uuid; use crate::app_state::AppState; use crate::errors::AppError; use crate::json_response::JsonResponse; +use crate::services::job::JobResponse; + +#[derive(Debug, Deserialize)] +pub struct GetStatusParams { + pub customer_id: String, + pub cairo_job_key: String, +} #[instrument(level = "info", skip(app_state))] -pub async fn add_job( +pub async fn get_status( State(app_state): State, - body: Bytes, -) -> Result>, AppError> { - // TODO: Process the data - let data = str::from_utf8(&body).unwrap(); - info!("{}", data); - - let job_entity = app_state - .worker_port - .add(JobEntity { - id: JobId(Uuid::new_v4()), - customer_id: "1".to_string(), - cairo_job_key: "1".to_string(), - offchain_proof: false, - proof_layout: "1".to_string(), - cairo_pie: "1".to_string(), - }) - .await?; + Query(params): Query, +) -> Result, AppError> { + let res = app_state.job_service.get_job_status(params).await?; + Ok(JsonResponse(res)) +} - Ok(JsonResponse(vec![JobResponse { - code: Some("JOB_RECEIVED_SUCCESSFULLY".to_string()), - message: Some(job_entity.id.0.to_string()), - }])) +#[instrument(level = "info", skip(_app_state))] +pub async fn get_proof( + State(_app_state): State, + Query(params): Query, +) -> Result, AppError> { + // TODO: process get proof + info!("params: {:?}", params); + let res = json!({ + "code" : "NO_OFFCHAIN_PROOF_FOR_JOB" + }); + Ok(JsonResponse(res)) } diff --git a/crates/public/src/controllers/mod.rs b/crates/public/src/controllers/mod.rs index 80daa3e..c1ad2e1 100644 --- a/crates/public/src/controllers/mod.rs +++ b/crates/public/src/controllers/mod.rs @@ -1 +1,2 @@ pub mod job; +pub mod worker_job; diff --git a/crates/public/src/controllers/worker_job.rs b/crates/public/src/controllers/worker_job.rs new file mode 100644 index 0000000..4fee247 --- /dev/null +++ b/crates/public/src/controllers/worker_job.rs @@ -0,0 +1,32 @@ +use std::str; + +use axum::extract::{Query, State}; +use serde::{Deserialize, Serialize}; +use tracing::instrument; + +use crate::app_state::AppState; +use crate::errors::AppError; +use crate::json_response::JsonResponse; +use crate::services::worker_job::WorkerJobResponse; + +#[derive(Serialize, Deserialize, PartialEq, Debug)] +pub struct WorkerJob { + pub customer_id: String, + pub cairo_job_key: Option, + pub offchain_proof: bool, + pub proof_layout: String, +} + +#[instrument(level = "info", skip(app_state))] +pub async fn add_worker_job( + State(app_state): State, + Query(params): Query, + cairo_pie_req: String, +) -> Result, AppError> { + let res = app_state + .worker_service + .add_worker_job(params, cairo_pie_req) + .await?; + + Ok(JsonResponse(res)) +} diff --git a/crates/public/src/errors.rs b/crates/public/src/errors.rs index 68a9b3f..28580cf 100644 --- a/crates/public/src/errors.rs +++ b/crates/public/src/errors.rs @@ -17,6 +17,8 @@ pub enum AppError { CoreError(#[from] CoreError), #[error("io error")] IOError(#[from] io::Error), + #[error("unknown error")] + Unknown(#[from] anyhow::Error), } // Tell axum how `AppError` should be converted into a response. diff --git a/crates/public/src/lib.rs b/crates/public/src/lib.rs index 09312fe..8fb8dd1 100644 --- a/crates/public/src/lib.rs +++ b/crates/public/src/lib.rs @@ -4,3 +4,6 @@ pub mod errors; pub mod json_response; pub mod options; pub mod router; +pub mod services; +pub mod tests; +pub mod utils; diff --git a/crates/public/src/main.rs b/crates/public/src/main.rs index 48f0853..3078712 100644 --- a/crates/public/src/main.rs +++ b/crates/public/src/main.rs @@ -1,13 +1,17 @@ +use std::env; use std::sync::Arc; use std::time::Duration; use clap::{Parser, Subcommand}; use deadpool_diesel::postgres::Pool; use deadpool_diesel::{Manager, Runtime}; +use diesel_migrations::MigrationHarness; use irelia::app_state::AppState; use irelia::options::Options; use irelia::router::routes; -use irelia_adapter::repositories::postgres::job_db::JobDBRepository; +use irelia::services::job::JobService; +use irelia::services::worker_job::WorkerJobService; +use irelia_adapter::repositories::postgres::job_db::{JobDBRepository, MIGRATIONS}; use irelia_adapter::worker::WorkerAdapter; use irelia_common::cli_args::CliArgs; use irelia_common::kill_signals; @@ -21,7 +25,6 @@ use tracing::info; #[tokio::main] async fn main() { let options: Options = CliArgs::default_run_or_get_options(env!("APP_VERSION")); - init_telemetry( options.service_name.as_str(), options.exporter_endpoint.as_str(), @@ -65,9 +68,21 @@ pub async fn serve(options: Options) { .build() .unwrap(); - // TODO: use the same DB pool for the worker_adapter + // Migration the database + let conn = pool.get().await.unwrap(); + let _ = conn + .interact(|connection| { + let result = MigrationHarness::run_pending_migrations(connection, MIGRATIONS); + match result { + Ok(_) => Ok(()), + Err(err) => Err(err), + } + }) + .await; - let job_repository = Arc::new(JobDBRepository::new(pool.clone())); + let job_repository = Arc::new(JobDBRepository::new(pool)); + let job_service = Arc::new(JobService::new(job_repository)); + // TODO: use the same DB pool for the worker_adapter let worker_adapter: Arc = Arc::new( WorkerAdapter::new( &options.pg.url, @@ -76,7 +91,9 @@ pub async fn serve(options: Options) { ) .await, ); - let routes = routes(AppState::new(job_repository, worker_adapter)).layer(( + let worker_job_service = Arc::new(WorkerJobService::new(worker_adapter, job_service.clone())); + + let routes = routes(AppState::new(worker_job_service, job_service)).layer(( TraceLayer::new_for_http(), // Graceful shutdown will wait for outstanding requests to complete. Add a timeout so // requests don't hang forever. diff --git a/crates/public/src/router.rs b/crates/public/src/router.rs index 848d08c..00b40c8 100644 --- a/crates/public/src/router.rs +++ b/crates/public/src/router.rs @@ -6,7 +6,8 @@ use axum::{ }; use crate::app_state::AppState; -use crate::controllers::job::add_job; +use crate::controllers::job::{get_proof, get_status}; +use crate::controllers::worker_job::add_worker_job; pub fn routes(app_state: AppState) -> Router { Router::new() @@ -14,7 +15,9 @@ pub fn routes(app_state: AppState) -> Router { .nest( "/v1/gateway", Router::new() - .route("/add_job", post(add_job)) + .route("/add_job", post(add_worker_job)) + .route("/get_status", get(get_status)) + .route("/get_proof", get(get_proof)) .with_state(app_state), ) .fallback(handler_404) diff --git a/crates/public/src/services/job.rs b/crates/public/src/services/job.rs new file mode 100644 index 0000000..c2a0e55 --- /dev/null +++ b/crates/public/src/services/job.rs @@ -0,0 +1,70 @@ +use std::sync::Arc; + +use irelia_core::entities::job::{CairoJobStatus, JobEntity, JobId}; +use irelia_core::ports::job::JobPort; +use serde::{Deserialize, Serialize}; +use tracing::log::debug; +use uuid::Uuid; + +use crate::controllers::job::GetStatusParams; +use crate::controllers::worker_job::WorkerJob; +use crate::errors::AppError; + +pub struct JobService { + job: Arc, +} + +impl JobService { + pub fn new(job: Arc) -> Self { + Self { job } + } + + pub async fn add_job( + &self, + params: WorkerJob, + job_status: CairoJobStatus, + validation_done_value: bool, + ) -> Result<(), AppError> { + let job = self + .job + .add(JobEntity { + id: JobId(Uuid::new_v4()), + customer_id: params.customer_id, + cairo_job_key: params.cairo_job_key.unwrap(), + status: job_status, + invalid_reason: Default::default(), + error_log: Default::default(), + validation_done: validation_done_value, + }) + .await?; + debug!("{:?}", job); + Ok(()) + } + + pub async fn get_job_status(&self, params: GetStatusParams) -> Result { + let job = self + .job + .get_job(params.customer_id, params.cairo_job_key) + .await?; + Ok(JobResponse::get_job_response(job)) + } +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +pub struct JobResponse { + pub status: String, + pub invalid_reason: String, + pub error_log: String, + pub validation_done: bool, +} + +impl JobResponse { + pub fn get_job_response(job: JobEntity) -> Self { + JobResponse { + status: job.status.to_string(), + invalid_reason: job.invalid_reason, + error_log: job.error_log, + validation_done: job.validation_done, + } + } +} diff --git a/crates/public/src/services/mod.rs b/crates/public/src/services/mod.rs new file mode 100644 index 0000000..c1ad2e1 --- /dev/null +++ b/crates/public/src/services/mod.rs @@ -0,0 +1,2 @@ +pub mod job; +pub mod worker_job; diff --git a/crates/public/src/services/worker_job.rs b/crates/public/src/services/worker_job.rs new file mode 100644 index 0000000..b9e0bbf --- /dev/null +++ b/crates/public/src/services/worker_job.rs @@ -0,0 +1,126 @@ +use std::{str::FromStr, sync::Arc}; + +use irelia_core::entities::job::CairoJobStatus::IN_PROGRESS; +use irelia_core::entities::worker_job::WorkerJobStatus::{ + IncorrectLayout, IncorrectOffchainProof, NoCairoJobId, Successfully, +}; +use irelia_core::entities::worker_job::{WorkerJobEntity, WorkerJobId, WorkerJobStatus}; +use irelia_core::ports::worker::WorkerPort; +use serde::{Deserialize, Serialize}; +use stone_cli::args::LayoutName; +use uuid::Uuid; + +use crate::controllers::worker_job::WorkerJob; +use crate::errors::AppError; +use crate::errors::AppError::Unknown; +use crate::services::job::JobService; +use crate::utils::save_cairo_pie; + +const SUCCESSFULLY_CODE: &str = "JOB_RECEIVED_SUCCESSFULLY"; +const INTERNAL_SERVER_ERROR_CODE: &str = "500"; +const INTERNAL_SERVER_ERROR_MESSAGE: &str = "Internal server error"; + +pub struct WorkerJobService { + worker_job: Arc, + job_service: Arc, +} + +impl WorkerJobService { + pub fn new( + worker_job: Arc, + job_service: Arc, + ) -> Self { + Self { + worker_job, + job_service, + } + } + + pub async fn add_worker_job( + &self, + params: WorkerJob, + cairo_pie_req: String, + ) -> Result { + let response_code = Self::check_job(¶ms); + + if matches!( + response_code, + IncorrectLayout | NoCairoJobId | IncorrectOffchainProof + ) { + return Ok(WorkerJobResponse::get_worker_job_response(response_code)); + } + + let cairo_pie = save_cairo_pie(&cairo_pie_req, params.cairo_job_key.as_ref().unwrap()) + .map_err(|e| Unknown(e))? + .to_string_lossy() + .to_string(); + let _ = self + .worker_job + .add(WorkerJobEntity { + id: WorkerJobId(Uuid::new_v4()), + customer_id: params.customer_id.clone(), + cairo_job_key: params.cairo_job_key.clone().unwrap(), + offchain_proof: params.offchain_proof.clone(), + proof_layout: params.proof_layout.clone(), + cairo_pie, + }) + .await?; + let _ = self.job_service.add_job(params, IN_PROGRESS, false).await; + Ok(WorkerJobResponse::get_worker_job_response(response_code)) + } + + pub fn check_job(params: &WorkerJob) -> WorkerJobStatus { + // Check incorrect layout + match LayoutName::from_str(params.proof_layout.to_lowercase().as_str()) { + Ok(_) => (), + _ => { + return IncorrectLayout; + } + } + // Check no cairo job id + if params.cairo_job_key.is_none() { + return NoCairoJobId; + }; + + // check incorrect off chain proof + if !params.offchain_proof { + return IncorrectOffchainProof; + }; + + // Successfully + Successfully + } +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +pub struct WorkerJobResponse { + pub code: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub message: Option, +} + +impl WorkerJobResponse { + pub fn successfully() -> Self { + WorkerJobResponse { + code: SUCCESSFULLY_CODE.to_string(), + message: None, + } + } + + pub fn internal_server_error() -> Self { + WorkerJobResponse { + code: INTERNAL_SERVER_ERROR_CODE.to_string(), + message: Some(INTERNAL_SERVER_ERROR_MESSAGE.to_string()), + } + } + + pub fn get_worker_job_response(code: WorkerJobStatus) -> Self { + match code { + IncorrectLayout => Self::internal_server_error(), + NoCairoJobId => Self::internal_server_error(), + IncorrectOffchainProof => Self::internal_server_error(), + + Successfully => Self::successfully(), + } + } +} diff --git a/crates/public/src/tests/mod.rs b/crates/public/src/tests/mod.rs new file mode 100644 index 0000000..8c8905e --- /dev/null +++ b/crates/public/src/tests/mod.rs @@ -0,0 +1,4 @@ +#[cfg(test)] +pub mod test_add_job; +#[cfg(test)] +pub mod test_get_status; diff --git a/crates/public/src/tests/test_add_job.rs b/crates/public/src/tests/test_add_job.rs new file mode 100644 index 0000000..c1d9e3d --- /dev/null +++ b/crates/public/src/tests/test_add_job.rs @@ -0,0 +1,135 @@ +use std::fs; + +use reqwest::Client; +use serde_json::{json, Value}; +use tokio; +use uuid::Uuid; + +use crate::options::Options; + +#[tokio::test] +async fn test_add_job() { + let client = Client::new(); + + let config_content = + fs::read_to_string("./config/00-default.toml").expect("Failed to read config file"); + + let options: Options = toml::from_str(&config_content).expect("Failed to parse config file"); + + let base_url = format!( + "http://{}:{}", + options.server.url.as_str(), + options.server.port + ); + + let cairo_pie = fs::read_to_string("./src/assets/test_data/encoded_cairo_pie.txt").unwrap(); + + test_incorrect_layout(client.clone(), base_url.clone(), cairo_pie.clone()).await; + println!("✅ test_incorrect_layout completed"); + + test_additional_bad_flag(client.clone(), base_url.clone(), cairo_pie.clone()).await; + println!("✅ test_additional_bad_flag completed"); + + test_no_cairo_job_id(client.clone(), base_url.clone(), cairo_pie.clone()).await; + println!("✅ test_no_cairo_job_id completed"); + + test_incorrect_offchain_proof(client.clone(), base_url.clone(), cairo_pie.clone()).await; + println!("✅ test_incorrect_offchain_proof completed"); + + test_successfully(client.clone(), base_url.clone(), cairo_pie.clone()).await; + println!("✅ test_successfully completed"); +} + +async fn test_incorrect_layout(client: Client, base_url: String, cairo_pie: String) { + let url = + format!( + "{}/v1/gateway/add_job?customer_id={}&cairo_job_key={}&offchain_proof={}&proof_layout={}", + base_url, Uuid::new_v4(), Uuid::new_v4(), true, "smal" + ); + let correct_body = cairo_pie.to_string(); + let expected = json!( + { + "code": "500", + "message": "Internal server error" + } + ); + let res = post_request(client, url, correct_body).await; + assert_eq!(res, expected, "Response did not match expected value"); +} + +async fn test_additional_bad_flag(client: Client, base_url: String, cairo_pie: String) { + let url = format!( + "{}/v1/gateway/add_job?customer_id={}&cairo_job_key={}&offchain_proof={}&proof_layout={}&bla={}", + base_url, Uuid::new_v4(), Uuid::new_v4(), true, "small", true + ); + let correct_body = cairo_pie.to_string(); + let expected = json!( + {"code" : "JOB_RECEIVED_SUCCESSFULLY"} + ); + let res = post_request(client, url, correct_body).await; + assert_eq!(res, expected, "Response did not match expected value"); +} + +async fn test_no_cairo_job_id(client: Client, base_url: String, cairo_pie: String) { + let url = format!( + "{}/v1/gateway/add_job?customer_id={}&offchain_proof={}&proof_layout={}", + base_url, + Uuid::new_v4(), + true, + "small" + ); + let correct_body = cairo_pie.to_string(); + let expected = json!( + { + "code": "500", + "message": "Internal server error" + } + ); + let res = post_request(client, url, correct_body).await; + assert_eq!(res, expected, "Response did not match expected value"); +} + +async fn test_incorrect_offchain_proof(client: Client, base_url: String, cairo_pie: String) { + let url = + format!( + "{}/v1/gateway/add_job?customer_id={}&cairo_job_key={}&offchain_proof={}&proof_layout={}", + base_url, Uuid::new_v4(), Uuid::new_v4(), false, "small" + ); + let correct_body = cairo_pie.to_string(); + let expected = json!( + { + "code": "500", + "message": "Internal server error" + } + ); + let res = post_request(client, url, correct_body).await; + assert_eq!(res, expected, "Response did not match expected value"); +} + +async fn test_successfully(client: Client, base_url: String, cairo_pie: String) { + let url = + format!( + "{}/v1/gateway/add_job?customer_id={}&cairo_job_key={}&offchain_proof={}&proof_layout={}", + base_url, Uuid::new_v4(), Uuid::new_v4(), true, "small" + ); + + let correct_body = cairo_pie.to_string(); + + let expected = json!( + {"code" : "JOB_RECEIVED_SUCCESSFULLY"} + ); + let res = post_request(client, url, correct_body).await; + assert_eq!(res, expected, "Response did not match expected value"); +} + +async fn post_request(client: Client, url: String, body: String) -> Value { + client + .post(&url) + .body(body) + .send() + .await + .expect("Failed to send POST request") + .json::() + .await + .expect("Failed to parse response body as JSON") +} diff --git a/crates/public/src/tests/test_get_status.rs b/crates/public/src/tests/test_get_status.rs new file mode 100644 index 0000000..a0e232f --- /dev/null +++ b/crates/public/src/tests/test_get_status.rs @@ -0,0 +1,241 @@ +use std::fs; + +use reqwest::Client; +use serde_json::{json, Value}; +use tokio; +use tokio_postgres::NoTls; + +use crate::options::Options; + +#[tokio::test] +async fn test_get_status() { + let client = Client::new(); + + let config_content = + fs::read_to_string("./config/00-default.toml").expect("Failed to read config file"); + + let options: Options = toml::from_str(&config_content).expect("Failed to parse config file"); + + let base_url = format!( + "http://{}:{}", + options.server.url.as_str(), + options.server.port + ); + + // Set up the database + setup_database(&*options.pg.url).await; + println!("✅ Database setup completed"); + + test_failed(client.clone(), base_url.clone()).await; + println!("✅ test_failed completed"); + + test_invalid(client.clone(), base_url.clone()).await; + println!("✅ test_invalid completed"); + + test_unknown(client.clone(), base_url.clone()).await; + println!("✅ test_unknown completed"); + + test_in_progress(client.clone(), base_url.clone()).await; + println!("✅ test_in_progress completed"); + + test_additional_bad_flag(client.clone(), base_url.clone()).await; + println!("✅ test_additional_bad_flag completed"); + + test_not_created(client.clone(), base_url.clone()).await; + println!("✅ test_not_created completed"); + + test_processed(client.clone(), base_url.clone()).await; + println!("✅ test_processed completed"); + + test_onchain(client.clone(), base_url.clone()).await; + println!("✅ test_onchain completed"); +} + +async fn test_failed(client: Client, base_url: String) { + let customer_id = "93bc3373-5115-4f99-aecc-1bc57997bfd3".to_string(); + let cairo_job_key = "11395dd2-b874-4c11-8744-ba6482da997d".to_string(); + + let expected = json!( + { + "status" : "FAILED", + "invalid_reason" : "", + "error_log": "Sharp task failed", + "validation_done": false + } + ); + let res = get_response(client, base_url, customer_id, cairo_job_key).await; + assert_eq!(res, expected, "Response did not match expected value"); +} + +async fn test_invalid(client: Client, base_url: String) { + let customer_id = "18dc4b30-8b46-42d1-8b51-aba8c8abc7b0".to_string(); + let cairo_job_key = "09a10775-7294-4e5d-abbc-7659caa1a330".to_string(); + + let expected = json!( + { + "status" : "INVALID", + "invalid_reason": "INVALID_CAIRO_PIE_FILE_FORMAT", + "error_log": "The Cairo PIE file has a wrong format. \ + Deserialization ended with \ + exception: Invalid prefix for zip file..", + "validation_done": false + } + ); + let res = get_response(client, base_url, customer_id, cairo_job_key).await; + assert_eq!(res, expected, "Response did not match expected value"); +} + +async fn test_unknown(client: Client, base_url: String) { + let customer_id = "2dd71442-58ca-4c35-a6de-8e637ff3c24b".to_string(); + let cairo_job_key = "f946ec7d-c3bf-42df-8bf0-9bcc751a8b3e".to_string(); + + let expected = json!( + { + "status" : "UNKNOWN", + "invalid_reason" : "", + "error_log": "", + "validation_done": false + } + ); + let res = get_response(client, base_url, customer_id, cairo_job_key).await; + assert_eq!(res, expected, "Response did not match expected value"); +} + +async fn test_in_progress(client: Client, base_url: String) { + let customer_id = "e703be2c-9ffe-4992-b968-da75da75d0b8".to_string(); + let cairo_job_key = "37e9d193-8e94-4df3-893a-cafa62a418c0".to_string(); + + let expected = json!( + { + "status" : "IN_PROGRESS", + "invalid_reason" : "", + "error_log": "", + "validation_done": false + } + ); + let res = get_response(client, base_url, customer_id, cairo_job_key).await; + assert_eq!(res, expected, "Response did not match expected value"); +} + +async fn test_additional_bad_flag(client: Client, base_url: String) { + let customer_id = "0581368e-2a32-4e93-b211-3f0ac9bae790".to_string(); + let cairo_job_key = "b01d3ad5-10db-4fcd-8746-fdc886de50bc".to_string(); + + let expected = json!( + { + "status" : "IN_PROGRESS", + "invalid_reason" : "", + "error_log": "", + "validation_done": true + } + ); + let res = get_response(client, base_url, customer_id, cairo_job_key).await; + assert_eq!(res, expected, "Response did not match expected value"); +} + +async fn test_not_created(client: Client, base_url: String) { + let customer_id = "040832f8-245f-4f05-a165-e2810e30047f".to_string(); + let cairo_job_key = "803eac13-3dbb-4ad2-a1df-311cfc2829cf".to_string(); + + let expected = json!( + { + "status" : "NOT_CREATED", + "invalid_reason" : "", + "error_log": "", + "validation_done": false + } + ); + let res = get_response(client, base_url, customer_id, cairo_job_key).await; + assert_eq!(res, expected, "Response did not match expected value"); +} + +async fn test_processed(client: Client, base_url: String) { + let customer_id = "8758d917-bbdc-4573-97ae-817e94fa31fb".to_string(); + let cairo_job_key = "59732e57-5722-4eb7-98db-8b90b89276f8".to_string(); + + let expected = json!( + { + "status" : "PROCESSED", + "invalid_reason" : "", + "error_log": "", + "validation_done": false + } + ); + let res = get_response(client, base_url, customer_id, cairo_job_key).await; + assert_eq!(res, expected, "Response did not match expected value"); +} + +async fn test_onchain(client: Client, base_url: String) { + let customer_id = "e3133ecb-e6e9-493a-ad64-ab9a4495af57".to_string(); + let cairo_job_key = "39af2c49-0c81-450e-91a9-aeff8dba2318".to_string(); + + let expected = json!( + { + "status" : "ONCHAIN", + "invalid_reason" : "", + "error_log": "", + "validation_done": true + } + ); + let res = get_response(client, base_url, customer_id, cairo_job_key).await; + assert_eq!(res, expected, "Response did not match expected value"); +} + +async fn get_response( + client: Client, + base_url: String, + customer_id: String, + cairo_job_key: String, +) -> Value { + let url = format!( + "{}/v1/gateway/get_status?customer_id={}&cairo_job_key={}", + base_url, customer_id, cairo_job_key + ); + client + .get(&url) + .send() + .await + .expect("Failed to send GET request") + .json::() + .await + .expect("Failed to parse response body as JSON") +} + +async fn setup_database(url: &str) { + let (client, connection) = tokio_postgres::connect(url, NoTls) + .await + .expect("Failed to connect to database"); + + // Spawn the connection in the background + tokio::spawn(async move { + if let Err(e) = connection.await { + eprintln!("Connection error: {}", e); + } + }); + + // SQL to drop and recreate the table + let reset_queries = r#" + INSERT INTO jobs (id, customer_id, cairo_job_key, status, invalid_reason, error_log, validation_done) + VALUES + ('2a3ee88d-e19d-43ed-a79e-da9a28dc9525', '93bc3373-5115-4f99-aecc-1bc57997bfd3', '11395dd2-b874-4c11-8744-ba6482da997d','FAILED', '', 'Sharp task failed', false), + + ('58f667ea-67b3-4b32-b4f8-ef24ea1c8f12', '18dc4b30-8b46-42d1-8b51-aba8c8abc7b0', '09a10775-7294-4e5d-abbc-7659caa1a330', 'INVALID', 'INVALID_CAIRO_PIE_FILE_FORMAT', 'The Cairo PIE file has a wrong format. Deserialization ended with exception: Invalid prefix for zip file..', false), + + ('f2c604b7-52c5-4b69-9a67-de1276f9b8f8', '2dd71442-58ca-4c35-a6de-8e637ff3c24b', 'f946ec7d-c3bf-42df-8bf0-9bcc751a8b3e', 'UNKNOWN', '', '', false), + + ('d7045419-2b0f-4210-9e3d-7fb002839202', 'e703be2c-9ffe-4992-b968-da75da75d0b8', '37e9d193-8e94-4df3-893a-cafa62a418c0', 'IN_PROGRESS', '', '', false), + + ('18ef16cd-4511-4f29-a1d8-cd117d801f77', '0581368e-2a32-4e93-b211-3f0ac9bae790', 'b01d3ad5-10db-4fcd-8746-fdc886de50bc', 'IN_PROGRESS', '', '', true), + + ('549139a0-b288-401c-afb4-0f1018fd99f8', '040832f8-245f-4f05-a165-e2810e30047f', '803eac13-3dbb-4ad2-a1df-311cfc2829cf', 'NOT_CREATED', '', '', false), + + ('2283042d-f102-4ee6-a92f-73f3a86850e8', '8758d917-bbdc-4573-97ae-817e94fa31fb', '59732e57-5722-4eb7-98db-8b90b89276f8', 'PROCESSED', '', '', false), + + ('69f7ae7a-e981-44d2-9eb2-dfa551474870', 'e3133ecb-e6e9-493a-ad64-ab9a4495af57', '39af2c49-0c81-450e-91a9-aeff8dba2318', 'ONCHAIN', '', '', true); + "#; + + client + .batch_execute(reset_queries) + .await + .expect("Failed to reset database"); +} diff --git a/crates/public/src/utils.rs b/crates/public/src/utils.rs new file mode 100644 index 0000000..e6517ac --- /dev/null +++ b/crates/public/src/utils.rs @@ -0,0 +1,71 @@ +use std::fs; +use std::fs::File; +use std::io::Write; +use std::path::PathBuf; + +use base64::Engine; + +pub fn save_cairo_pie(encoded_pie: &String, file_name: &str) -> anyhow::Result { + let path = pie_storage_path(file_name); + + fs::create_dir_all(path.parent().unwrap())?; + let decoded_pie = base64::engine::general_purpose::STANDARD.decode(encoded_pie)?; + let reader = std::io::Cursor::new(decoded_pie); + File::create(&path)?.write_all(reader.get_ref())?; + + check_cairo_pie_zip(file_name)?; + Ok(path) +} + +fn pie_storage_path(file_name: &str) -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("cairo_pies") + .join(format!("{}.zip", file_name)) +} + +fn check_cairo_pie_zip(file_name: &str) -> anyhow::Result<()> { + let path = pie_storage_path(file_name); + let mut zip_archive = zip::ZipArchive::new(File::open(path)?)?; + for file in [ + "additional_data.json", + "execution_resources.json", + "memory.bin", + "metadata.json", + "version.json", + ] { + zip_archive.by_name(file)?; + } + Ok(()) +} + +#[cfg(test)] +mod test { + use std::fs; + use std::fs::File; + use std::io::Read; + + use base64::engine::general_purpose; + use base64::Engine; + + use crate::utils::save_cairo_pie; + + #[test] + fn test_save_cairo_pie() { + let encoded_origin = encode_zip("./src/assets/test_data/fibonacci_with_output.zip"); + + let written_path = save_cairo_pie(&encoded_origin, "test").unwrap(); + let encoded_written = encode_zip(written_path.to_str().unwrap()); + + assert_eq!(encoded_origin, encoded_written); + + let cairo_pie = fs::read_to_string("./src/assets/test_data/encoded_cairo_pie.txt").unwrap(); + save_cairo_pie(&cairo_pie, "test2").unwrap(); + } + + fn encode_zip(path: &str) -> String { + let mut file = File::open(path).unwrap(); + let mut buffer = Vec::new(); + file.read_to_end(&mut buffer).unwrap(); + general_purpose::STANDARD.encode(&buffer) + } +} diff --git a/crates/worker/Cargo.toml b/crates/worker/Cargo.toml index 63751dd..5cf5f37 100644 --- a/crates/worker/Cargo.toml +++ b/crates/worker/Cargo.toml @@ -5,18 +5,17 @@ name = "irelia_worker" version = "0.0.1" [dependencies] +irelia_adapter = { workspace = true } irelia_common = { workspace = true } irelia_core = { workspace = true } -anyhow = { workspace = true } axum = { workspace = true, features = ["macros"] } clap = { workspace = true, features = ["derive"] } +deadpool-diesel = { workspace = true, features = ["postgres", "serde"] } graphile_worker = { workspace = true } opentelemetry = { workspace = true } -rand = { workspace = true } readonly = { workspace = true } serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true } sqlx = { workspace = true, features = [ "chrono", "postgres", @@ -24,7 +23,6 @@ sqlx = { workspace = true, features = [ "macros", "runtime-tokio", ], default-features = false } -testcontainers-modules = { workspace = true, features = ["postgres", "redis"] } thiserror = { workspace = true } tokio = { workspace = true, features = ["full"] } tower-http = { workspace = true, features = ["timeout", "trace"] } diff --git a/crates/worker/config/00-default.toml b/crates/worker/config/00-default.toml index fdc2941..b030212 100644 --- a/crates/worker/config/00-default.toml +++ b/crates/worker/config/00-default.toml @@ -11,4 +11,4 @@ url = "0.0.0.0" [worker] schema = "worker_schema" -concurrent = 4 \ No newline at end of file +concurrent = 4 diff --git a/crates/worker/src/app_state.rs b/crates/worker/src/app_state.rs new file mode 100644 index 0000000..379d802 --- /dev/null +++ b/crates/worker/src/app_state.rs @@ -0,0 +1,21 @@ +use std::fmt::{Debug, Formatter}; +use std::sync::Arc; + +use irelia_core::ports::job::JobPort; + +#[derive(Clone)] +pub struct State { + pub job_port: Arc, +} + +impl Debug for State { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("State").finish() + } +} + +impl State { + pub fn new(job_port: Arc) -> Self { + State { job_port } + } +} diff --git a/crates/worker/src/job_worker.rs b/crates/worker/src/job_worker.rs index f24864f..dc4993b 100644 --- a/crates/worker/src/job_worker.rs +++ b/crates/worker/src/job_worker.rs @@ -2,14 +2,14 @@ use std::time::Duration; use graphile_worker::{IntoTaskHandlerResult, TaskHandler, WorkerContext}; use irelia_common::workers::{Worker, ADD_JOB_WORKER_IDENTIFIER}; -use irelia_core::entities::job::JobEntity; +use irelia_core::entities::worker_job::WorkerJobEntity; use serde::{Deserialize, Serialize}; use tokio::time::sleep; use tracing::{info, instrument, Span}; use tracing_opentelemetry::OpenTelemetrySpanExt; -#[derive(Deserialize, Serialize)] -pub struct JobWorker(Worker); +#[derive(Deserialize, Serialize, Debug)] +pub struct JobWorker(Worker); impl TaskHandler for JobWorker { const IDENTIFIER: &'static str = ADD_JOB_WORKER_IDENTIFIER; @@ -23,6 +23,8 @@ impl TaskHandler for JobWorker { span.set_parent(parent_cx); + //TODO: Processing Data + sleep(Duration::from_secs(5)).await; info!("data: {:?}", self.0.data); } diff --git a/crates/worker/src/lib.rs b/crates/worker/src/lib.rs index fa446b2..66f0885 100644 --- a/crates/worker/src/lib.rs +++ b/crates/worker/src/lib.rs @@ -1,5 +1,5 @@ -pub mod job_worker; +pub mod app_state; pub mod errors; +pub mod job_worker; pub mod options; pub mod router; -pub mod state; diff --git a/crates/worker/src/main.rs b/crates/worker/src/main.rs index c8a72a3..40378c3 100644 --- a/crates/worker/src/main.rs +++ b/crates/worker/src/main.rs @@ -1,13 +1,18 @@ mod options; use std::str::FromStr; +use std::sync::Arc; use std::time::Duration; use clap::{Parser, Subcommand}; +use deadpool_diesel::postgres::Pool; +use deadpool_diesel::{Manager, Runtime}; use graphile_worker::WorkerOptions; +use irelia_adapter::repositories::postgres::job_db::JobDBRepository; use irelia_common::cli_args::CliArgs; use irelia_common::kill_signals; use irelia_common::loggers::telemetry::init_telemetry; +use irelia_worker::app_state::State; use irelia_worker::job_worker::JobWorker; use irelia_worker::router::routes; use opentelemetry::global; @@ -21,7 +26,6 @@ use crate::options::Options; #[tokio::main] async fn main() { let options: Options = CliArgs::default_run_or_get_options(env!("APP_VERSION")); - init_telemetry( options.service_name.as_str(), options.exporter_endpoint.as_str(), @@ -79,8 +83,17 @@ pub async fn serve(options: Options) { } pub async fn run_workers(options: Options) { - let pg_options = PgConnectOptions::from_str(&options.pg.url).unwrap(); + info!("Using postgres database: {}", &options.pg.url); + let manager = Manager::new(&options.pg.url, Runtime::Tokio1); + let pool = Pool::builder(manager) + .max_size(options.pg.max_size.try_into().unwrap()) + .build() + .unwrap(); + let job_port = Arc::new(JobDBRepository::new(pool.clone())); + let state = State::new(job_port); + + let pg_options = PgConnectOptions::from_str(&options.pg.url).unwrap(); let pg_pool = sqlx::postgres::PgPoolOptions::new() .max_connections(options.pg.max_size) .connect_with(pg_options) @@ -90,6 +103,7 @@ pub async fn run_workers(options: Options) { let worker = WorkerOptions::default() .concurrency(options.worker.concurrent) .schema(options.worker.schema.as_str()) + .add_extension(state) .define_job::() .pg_pool(pg_pool) .init() diff --git a/crates/worker/src/state.rs b/crates/worker/src/state.rs deleted file mode 100644 index d8d6450..0000000 --- a/crates/worker/src/state.rs +++ /dev/null @@ -1,14 +0,0 @@ -#[derive(Clone, Debug)] -pub struct State {} - -impl Default for State { - fn default() -> Self { - Self::new() - } -} - -impl State { - pub fn new() -> Self { - State {} - } -} diff --git a/deploy/docker/01_public_custom.toml b/deploy/docker/01_public_server_custom.toml similarity index 66% rename from deploy/docker/01_public_custom.toml rename to deploy/docker/01_public_server_custom.toml index 17502c0..0bb3d95 100644 --- a/deploy/docker/01_public_custom.toml +++ b/deploy/docker/01_public_server_custom.toml @@ -1,13 +1,16 @@ exporter_endpoint = "http://quickwit:7281" -service_name = "irelia" +service_name = "irelia-public" [server] -port = 8888 +port = 8000 url = "0.0.0.0" [log] level = "info" -[db.pg] +[pg] max_size = 10 url = "postgres://postgres:changeme@postgres:5432/postgres" + +[worker] +schema = "worker_schema" diff --git a/deploy/docker/01_worker_custom.toml b/deploy/docker/01_worker_custom.toml new file mode 100644 index 0000000..30d2cea --- /dev/null +++ b/deploy/docker/01_worker_custom.toml @@ -0,0 +1,17 @@ +exporter_endpoint = "http://quickwit:7281" +service_name = "irelia-worker" + +[server] +port = 8000 +url = "0.0.0.0" + +[log] +level = "info" + +[pg] +max_size = 10 +url = "postgres://postgres:changeme@postgres:5432/postgres" + +[worker] +schema = "worker_schema" +concurrent = 4 diff --git a/deploy/docker/docker-compose.yaml b/deploy/docker/docker-compose.yaml index 62d3f56..41e1863 100644 --- a/deploy/docker/docker-compose.yaml +++ b/deploy/docker/docker-compose.yaml @@ -40,17 +40,28 @@ services: - redis:/data ports: - '6379:6379' - public_server: - image: rust-api-server + irelia_public: + image: irelia-public-server build: context: ../.. - target: public-dev + target: irelia-public-server ports: - - '8888:8888' + - '8000:8000' volumes: - - ./01_public_custom.toml:/user/01_custom.toml:ro + - ./01_public_server_custom.toml:/user/01_custom.toml:ro depends_on: - postgres + restart: on-failure:3 + irelia_worker: + image: irelia-worker + build: + context: ../.. + target: irelia-worker + volumes: + - ./01_worker_custom.toml:/user/01_custom.toml:ro + depends_on: + - postgres + restart: on-failure:3 volumes: postgres: redis: diff --git a/taplo/taplo.toml b/taplo/taplo.toml index eda6684..b5bc836 100644 --- a/taplo/taplo.toml +++ b/taplo/taplo.toml @@ -1,10 +1,15 @@ -include = ["**/*.toml"] +include = ["**/Cargo.toml"] [formatting] align_entries = false [[rule]] -keys = ["dependencies", "dev-dependencies", "patch.crates-io"] +keys = [ + "workspace.dependencies", + "dependencies", + "dev-dependencies", + "patch.crates-io", +] [rule.formatting] reorder_keys = true