diff --git a/.gitignore b/.gitignore index 445c6f7..83dbf24 100644 --- a/.gitignore +++ b/.gitignore @@ -24,3 +24,5 @@ scripts/katana/deploy.log scripts/katana/katana.log .cargo/ + +*.pb diff --git a/.tool-versions b/.tool-versions index e285a3d..782f2ed 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1 +1,2 @@ -scarb 2.8.4 \ No newline at end of file +scarb 2.8.5 +starknet-foundry 0.33.0 diff --git a/Cargo.lock b/Cargo.lock index 67f8c05..7a356ac 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -188,9 +188,9 @@ dependencies = [ [[package]] name = "alloy-core" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8316d83e590f4163b221b8180008f302bda5cf5451202855cdd323e588849c" +checksum = "c3d14d531c99995de71558e8e2206c27d709559ee8e5a0452b965ea82405a013" dependencies = [ "alloy-dyn-abi", "alloy-json-abi", @@ -201,9 +201,9 @@ dependencies = [ [[package]] name = "alloy-dyn-abi" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef2364c782a245cf8725ea6dbfca5f530162702b5d685992ea03ce64529136cc" +checksum = "80759b3f57b3b20fa7cd8fef6479930fc95461b58ff8adea6e87e618449c8a1d" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -240,9 +240,9 @@ dependencies = [ [[package]] name = "alloy-eip7702" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6cee6a35793f3db8a5ffe60e86c695f321d081a567211245f503e8c498fce8" +checksum = "4c986539255fb839d1533c128e190e557e52ff652c9ef62939e233a81dd93f7e" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -266,7 +266,7 @@ dependencies = [ "derive_more", "once_cell", "serde", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -276,7 +276,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b6aa3961694b30ba53d41006131a2fca3bdab22e4c344e46db2c639e7c2dfdd" dependencies = [ "alloy-eip2930", - "alloy-eip7702 0.4.1", + "alloy-eip7702 0.4.2", "alloy-primitives", "alloy-rlp", "alloy-serde 0.6.4", @@ -284,7 +284,7 @@ dependencies = [ "derive_more", "once_cell", "serde", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -311,9 +311,9 @@ dependencies = [ [[package]] name = "alloy-json-abi" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b84c506bf264110fa7e90d9924f742f40ef53c6572ea56a0b0bd714a567ed389" +checksum = "ac4b22b3e51cac09fd2adfcc73b55f447b4df669f983c13f7894ec82b607c63f" dependencies = [ "alloy-primitives", "alloy-sol-type-parser", @@ -438,9 +438,9 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fce5dbd6a4f118eecc4719eaa9c7ffc31c315e6c5ccde3642db927802312425" +checksum = "9db948902dfbae96a73c2fbf1f7abec62af034ab883e4c777c3fd29702bd6e2c" dependencies = [ "alloy-rlp", "bytes", @@ -448,7 +448,7 @@ dependencies = [ "const-hex", "derive_more", "foldhash", - "hashbrown 0.15.1", + "hashbrown 0.15.2", "hex-literal", "indexmap 2.6.0", "itoa", @@ -580,7 +580,7 @@ checksum = "2b09cae092c27b6f1bde952653a22708691802e57bfef4a2973b80bea21efd3f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -778,23 +778,23 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9343289b4a7461ed8bab8618504c995c049c082b70c7332efd7b32125633dc05" +checksum = "3bfd7853b65a2b4f49629ec975fee274faf6dff15ab8894c620943398ef283c0" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] name = "alloy-sol-macro-expander" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4222d70bec485ceccc5d8fd4f2909edd65b5d5e43d4aca0b5dcee65d519ae98f" +checksum = "82ec42f342d9a9261699f8078e57a7a4fda8aaa73c1a212ed3987080e6a9cd13" dependencies = [ "alloy-json-abi", "alloy-sol-macro-input", @@ -804,16 +804,16 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "syn-solidity", "tiny-keccak", ] [[package]] name = "alloy-sol-macro-input" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e17f2677369571b976e51ea1430eb41c3690d344fef567b840bfc0b01b6f83a" +checksum = "ed2c50e6a62ee2b4f7ab3c6d0366e5770a21cad426e109c2f40335a1b3aff3df" dependencies = [ "alloy-json-abi", "const-hex", @@ -822,15 +822,15 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.87", + "syn 2.0.89", "syn-solidity", ] [[package]] name = "alloy-sol-type-parser" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa64d80ae58ffaafdff9d5d84f58d03775f66c84433916dc9a64ed16af5755da" +checksum = "ac17c6e89a50fb4a758012e4b409d9a0ba575228e69b539fe37d7a1bd507ca4a" dependencies = [ "serde", "winnow", @@ -838,9 +838,9 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6520d427d4a8eb7aa803d852d7a52ceb0c519e784c292f64bb339e636918cf27" +checksum = "c9dc0fffe397aa17628160e16b89f704098bf3c9d74d5d369ebc239575936de5" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -947,7 +947,7 @@ dependencies = [ "alloy-transport 0.6.4", "futures", "http 1.1.0", - "rustls 0.23.17", + "rustls 0.23.19", "serde_json", "tokio", "tokio-tungstenite", @@ -1063,7 +1063,7 @@ dependencies = [ "blake2", "derivative", "digest 0.10.7", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1296,7 +1296,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1307,7 +1307,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1338,7 +1338,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1512,7 +1512,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1544,7 +1544,7 @@ checksum = "bcfcc3cd946cb52f0bbfdbbcfa2f4e24f75ebb6c0e1002f7c25904fada18b9ec" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1555,9 +1555,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" dependencies = [ "serde", ] @@ -1588,9 +1588,9 @@ dependencies = [ [[package]] name = "cargo-platform" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" +checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" dependencies = [ "serde", ] @@ -1684,7 +1684,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1700,8 +1700,8 @@ dependencies = [ "clap", "common", "eyre", - "host", "mmr-utils", + "publisher", "starknet", "starknet-handler", "thiserror 2.0.3", @@ -1721,10 +1721,10 @@ version = "0.1.0" dependencies = [ "alloy-contract 0.6.4", "dotenv", - "eyre", "ruint", "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "thiserror 2.0.3", + "tracing", "tracing-subscriber 0.3.18", ] @@ -1739,9 +1739,9 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.13.1" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0121754e84117e65f9d90648ee6aa4882a6e63110307ab73967a4c5e7e69e586" +checksum = "4b0485bab839b018a8f1723fc5391819fea5f8f0f32288ef8a735fd096b6160c" dependencies = [ "cfg-if", "cpufeatures", @@ -1785,9 +1785,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ca741a962e1b0bff6d724a1a0958b686406e853bb14061f218562e1896f95e6" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" dependencies = [ "libc", ] @@ -1908,7 +1908,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1919,7 +1919,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -1991,7 +1991,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "unicode-xid", ] @@ -2045,7 +2045,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2159,12 +2159,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2194,7 +2194,7 @@ dependencies = [ "scrypt", "serde", "serde_json", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "sha3", "thiserror 1.0.69", "uuid 0.8.2", @@ -2397,7 +2397,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2494,7 +2494,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -2536,7 +2536,7 @@ checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" [[package]] name = "garaga_rs" version = "0.1.0" -source = "git+https://github.com/ametel01/garaga.git#dbc064bd93b9bdd8f3f49f40557fdc57aa039834" +source = "git+https://github.com/ametel01/garaga.git#b768d2f71ceeb6c778eb67d87254ab4582a05468" dependencies = [ "ark-bls12-381", "ark-bn254", @@ -2548,7 +2548,7 @@ dependencies = [ "num-bigint", "num-traits", "pyo3", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "wasm-bindgen", ] @@ -2600,6 +2600,22 @@ dependencies = [ "subtle", ] +[[package]] +name = "guest-mmr" +version = "0.1.0" +dependencies = [ + "common", + "guest-types", + "hex", + "mmr-utils", + "num-bigint", + "num-traits", + "serde", + "sha2 0.10.8 (git+https://github.com/risc0/RustCrypto-hashes.git?tag=sha2-v0.10.8-risczero.0)", + "thiserror 2.0.3", + "tokio", +] + [[package]] name = "guest-types" version = "0.1.0" @@ -2655,9 +2671,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.1" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" dependencies = [ "allocator-api2", "equivalent", @@ -2668,11 +2684,12 @@ dependencies = [ [[package]] name = "hasher" version = "0.1.0" -source = "git+https://github.com/ametel01/rust-accumulators.git?branch=workspace#177002a6b04b22ecdf360a4335217d5c3922d638" +source = "git+https://github.com/ametel01/rust-accumulators.git?branch=feat/sha2-hasher#2e1c70db54fd319edd8ab87494cab574c3647b0f" dependencies = [ "hex", "num-bigint", "num-traits", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "starknet-core", "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "strum", @@ -2750,34 +2767,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "host" -version = "0.1.0" -dependencies = [ - "clap", - "common", - "dotenv", - "eth-rlp-types", - "ethereum", - "eyre", - "garaga_rs", - "guest-types", - "methods", - "mmr", - "mmr-utils", - "risc0-ethereum-contracts", - "risc0-groth16", - "risc0-zkvm", - "serde", - "sqlx", - "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", - "starknet-handler", - "store", - "thiserror 2.0.3", - "tokio", - "tracing", -] - [[package]] name = "http" version = "0.2.12" @@ -2913,12 +2902,12 @@ dependencies = [ "http 1.1.0", "hyper 1.5.1", "hyper-util", - "rustls 0.23.17", + "rustls 0.23.19", "rustls-pki-types", "tokio", "tokio-rustls 0.26.0", "tower-service", - "webpki-roots 0.26.6", + "webpki-roots 0.26.7", ] [[package]] @@ -3094,7 +3083,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -3180,13 +3169,13 @@ dependencies = [ [[package]] name = "impl-trait-for-tuples" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.89", ] [[package]] @@ -3213,7 +3202,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "equivalent", - "hashbrown 0.15.1", + "hashbrown 0.15.2", "serde", ] @@ -3234,9 +3223,9 @@ dependencies = [ [[package]] name = "interprocess" -version = "2.2.1" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2f4e4a06d42fab3e85ab1b419ad32b09eab58b901d40c57935ff92db3287a13" +checksum = "894148491d817cb36b6f778017b8ac46b17408d522dd90f539d677ea938362eb" dependencies = [ "doctest-file", "futures-core", @@ -3279,9 +3268,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "js-sys" @@ -3302,7 +3291,7 @@ dependencies = [ "ecdsa", "elliptic-curve", "once_cell", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3332,7 +3321,7 @@ checksum = "bbc2a4da0d9e52ccfe6306801a112e81a8fc0c76aa3e4449fefeda7fef72bb34" dependencies = [ "lambdaworks-math 0.10.0", "serde", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "sha3", ] @@ -3343,7 +3332,7 @@ source = "git+https://github.com/lambdaclass/lambdaworks.git#d016a730e106fa2ff61 dependencies = [ "lambdaworks-math 0.11.0", "serde", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "sha3", ] @@ -3387,7 +3376,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -3401,9 +3390,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.164" +version = "0.2.166" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f" +checksum = "c2ccc108bbc0b1331bd061864e7cd823c0cab660bbe6970e66e2c0614decde36" [[package]] name = "libm" @@ -3440,9 +3429,9 @@ checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "litemap" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" [[package]] name = "lock_api" @@ -3466,7 +3455,7 @@ version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ - "hashbrown 0.15.1", + "hashbrown 0.15.2", ] [[package]] @@ -3478,6 +3467,15 @@ dependencies = [ "libc", ] +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + [[package]] name = "maybe-async" version = "0.2.10" @@ -3486,7 +3484,7 @@ checksum = "5cf92c10c7e361d6b99666ec1c6f9805b0bea2c3bd8c78dc6fe98ac5bd78db11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -3572,7 +3570,7 @@ dependencies = [ [[package]] name = "mmr" version = "0.1.0" -source = "git+https://github.com/ametel01/rust-accumulators.git?branch=workspace#177002a6b04b22ecdf360a4335217d5c3922d638" +source = "git+https://github.com/ametel01/rust-accumulators.git?branch=feat/sha2-hasher#2e1c70db54fd319edd8ab87494cab574c3647b0f" dependencies = [ "hasher", "store", @@ -3589,6 +3587,7 @@ dependencies = [ "sqlx", "store", "thiserror 2.0.3", + "tokio", "uuid 1.11.0", ] @@ -3719,7 +3718,7 @@ checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -3769,7 +3768,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -3826,7 +3825,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -3926,7 +3925,7 @@ checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -3970,9 +3969,9 @@ checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "portable-atomic" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2" +checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6" [[package]] name = "powerfmt" @@ -4067,14 +4066,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] name = "proc-macro2" -version = "1.0.89" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] @@ -4093,7 +4092,7 @@ dependencies = [ "rand", "rand_chacha", "rand_xorshift", - "regex-syntax", + "regex-syntax 0.8.5", "rusty-fork", "tempfile", "unarray", @@ -4119,7 +4118,35 @@ dependencies = [ "itertools 0.13.0", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", +] + +[[package]] +name = "publisher" +version = "0.1.0" +dependencies = [ + "clap", + "common", + "dotenv", + "eth-rlp-types", + "ethereum", + "eyre", + "garaga_rs", + "guest-types", + "methods", + "mmr", + "mmr-utils", + "risc0-ethereum-contracts", + "risc0-groth16", + "risc0-zkvm", + "serde", + "sqlx", + "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", + "starknet-handler", + "store", + "thiserror 2.0.3", + "tokio", + "tracing", ] [[package]] @@ -4170,7 +4197,7 @@ dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -4183,7 +4210,7 @@ dependencies = [ "proc-macro2", "pyo3-build-config", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -4203,7 +4230,7 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash", - "rustls 0.23.17", + "rustls 0.23.19", "socket2", "thiserror 2.0.3", "tokio", @@ -4221,7 +4248,7 @@ dependencies = [ "rand", "ring", "rustc-hash", - "rustls 0.23.17", + "rustls 0.23.19", "rustls-pki-types", "slab", "thiserror 2.0.3", @@ -4353,8 +4380,17 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata", - "regex-syntax", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", ] [[package]] @@ -4365,9 +4401,15 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.8.5", ] +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + [[package]] name = "regex-syntax" version = "0.8.5" @@ -4456,13 +4498,13 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.17", + "rustls 0.23.19", "rustls-pemfile 2.2.0", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tokio", "tokio-native-tls", "tokio-rustls 0.26.0", @@ -4473,7 +4515,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots 0.26.6", + "webpki-roots 0.26.7", "windows-registry", ] @@ -4514,7 +4556,7 @@ dependencies = [ "risc0-zkp", "risc0-zkvm-platform", "serde", - "syn 2.0.87", + "syn 2.0.89", "tracing", ] @@ -4628,7 +4670,7 @@ dependencies = [ "risc0-core", "risc0-zkvm-platform", "serde", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "tracing", ] @@ -4659,7 +4701,7 @@ dependencies = [ "rrs-lib", "semver 1.0.23", "serde", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "stability", "tempfile", "tracing", @@ -4709,9 +4751,9 @@ dependencies = [ [[package]] name = "rsa" -version = "0.9.6" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" +checksum = "47c75d7c5c6b673e58bf54d8544a9f432e3a925b0e80f7cd3602ab5c50c55519" dependencies = [ "const-oid", "digest 0.10.7", @@ -4820,9 +4862,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.17" +version = "0.23.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f1a745511c54ba6d4465e8d5dfbd81b45791756de28d4981af70d6dca128f1e" +checksum = "934b404430bb06b3fae2cba809eb45a1ab1aecd64491213d7c3301b88393f8d1" dependencies = [ "once_cell", "ring", @@ -4948,7 +4990,7 @@ dependencies = [ "hmac", "pbkdf2", "salsa20", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -5048,7 +5090,7 @@ checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5113,7 +5155,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5138,6 +5180,16 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "sha2" +version = "0.10.8" +source = "git+https://github.com/risc0/RustCrypto-hashes.git?tag=sha2-v0.10.8-risczero.0#244dc3b08788f7a4ccce14c66896ae3b4f24c166" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + [[package]] name = "sha3" version = "0.10.8" @@ -5212,9 +5264,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", @@ -5292,7 +5344,7 @@ dependencies = [ "percent-encoding", "serde", "serde_json", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec", "sqlformat", "thiserror 1.0.69", @@ -5312,7 +5364,7 @@ dependencies = [ "quote", "sqlx-core", "sqlx-macros-core", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5330,12 +5382,12 @@ dependencies = [ "quote", "serde", "serde_json", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "sqlx-core", "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.87", + "syn 2.0.89", "tempfile", "tokio", "url", @@ -5374,7 +5426,7 @@ dependencies = [ "rsa", "serde", "sha1", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec", "sqlx-core", "stringprep", @@ -5412,7 +5464,7 @@ dependencies = [ "rand", "serde", "serde_json", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec", "sqlx-core", "stringprep", @@ -5451,7 +5503,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d904e7009df136af5297832a3ace3370cd14ff1546a232f4f185036c2736fcac" dependencies = [ "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5530,7 +5582,7 @@ source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#5c67 dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5546,7 +5598,7 @@ dependencies = [ "num-integer", "num-traits", "rfc6979", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "starknet-curve 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "starknet-types-core", "zeroize", @@ -5564,7 +5616,7 @@ dependencies = [ "num-integer", "num-traits", "rfc6979", - "sha2", + "sha2 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "starknet-curve 0.5.1 (git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master)", "starknet-types-core", "zeroize", @@ -5592,10 +5644,11 @@ name = "starknet-handler" version = "0.1.0" dependencies = [ "common", + "crypto-bigint", "starknet", "starknet-crypto 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "thiserror 2.0.3", - "tracing", + "url", ] [[package]] @@ -5604,7 +5657,7 @@ version = "0.2.1" source = "git+https://github.com/xJonathanLEI/starknet-rs.git?branch=master#5c676a64031901b5a203168fd8ef8d6b40a5862f" dependencies = [ "starknet-core", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5666,7 +5719,7 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "store" version = "0.1.0" -source = "git+https://github.com/ametel01/rust-accumulators.git?branch=workspace#177002a6b04b22ecdf360a4335217d5c3922d638" +source = "git+https://github.com/ametel01/rust-accumulators.git?branch=feat/sha2-hasher#2e1c70db54fd319edd8ab87494cab574c3647b0f" dependencies = [ "async-trait", "futures", @@ -5711,7 +5764,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5733,9 +5786,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.87" +version = "2.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" +checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" dependencies = [ "proc-macro2", "quote", @@ -5744,14 +5797,14 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f76fe0a3e1476bdaa0775b9aec5b869ed9520c2b2fedfe9c6df3618f8ea6290b" +checksum = "da0523f59468a2696391f2a772edc089342aacd53c3caa2ac3264e598edf119b" dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5762,9 +5815,9 @@ checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "sync_wrapper" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] @@ -5777,7 +5830,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5852,7 +5905,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5863,7 +5916,7 @@ checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -5960,6 +6013,7 @@ dependencies = [ "bytes", "libc", "mio", + "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", @@ -5975,7 +6029,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -6004,7 +6058,7 @@ version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "rustls 0.23.17", + "rustls 0.23.19", "rustls-pki-types", "tokio", ] @@ -6029,12 +6083,12 @@ checksum = "edc5f74e248dc973e0dbb7b74c7e0d6fcc301c694ff50049504004ef4d0cdcd9" dependencies = [ "futures-util", "log", - "rustls 0.23.17", + "rustls 0.23.19", "rustls-pki-types", "tokio", "tokio-rustls 0.26.0", "tungstenite", - "webpki-roots 0.26.6", + "webpki-roots 0.26.7", ] [[package]] @@ -6095,9 +6149,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -6107,20 +6161,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -6152,10 +6206,14 @@ version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ + "matchers", "nu-ansi-term", + "once_cell", + "regex", "sharded-slab", "smallvec", "thread_local", + "tracing", "tracing-core", "tracing-log", ] @@ -6179,7 +6237,7 @@ dependencies = [ "httparse", "log", "rand", - "rustls 0.23.17", + "rustls 0.23.19", "rustls-pki-types", "sha1", "thiserror 1.0.69", @@ -6236,9 +6294,9 @@ checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-normalization" @@ -6281,9 +6339,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.3" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d157f1b96d14500ffdc1f10ba712e780825526c03d9a49b4d0324b0d9113ada" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", @@ -6403,7 +6461,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "wasm-bindgen-shared", ] @@ -6437,7 +6495,7 @@ checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -6503,9 +6561,9 @@ checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" [[package]] name = "webpki-roots" -version = "0.26.6" +version = "0.26.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841c67bff177718f1d4dfefde8d8f0e78f9b6589319ba88312f567fc5841a958" +checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" dependencies = [ "rustls-pki-types", ] @@ -6796,9 +6854,9 @@ dependencies = [ [[package]] name = "yoke" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" dependencies = [ "serde", "stable_deref_trait", @@ -6808,13 +6866,13 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "synstructure", ] @@ -6836,27 +6894,27 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] name = "zerofrom" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", "synstructure", ] @@ -6877,7 +6935,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] [[package]] @@ -6899,5 +6957,5 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.87", + "syn 2.0.89", ] diff --git a/Cargo.toml b/Cargo.toml index 633ee0b..8b3c4d0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,10 +6,11 @@ members = [ "crates/relayer", "crates/common", "crates/methods", - "crates/host", + "crates/publisher", "crates/guest-types", "crates/ethereum", - "crates/mmr-utils", + "crates/mmr-utils", + "crates/guest-mmr", ] # Always optimize; building and running the guest takes much longer without optimization. diff --git a/README.md b/README.md index f1cbae7..9d7cd13 100644 --- a/README.md +++ b/README.md @@ -90,14 +90,9 @@ Deploy the messaging infrastructure contracts: source .env ``` -2. Access deployment scripts: +2. Execute deployment pipeline: ```bash - cd scripts - ``` - -3. Execute deployment pipeline: - ```bash - ./deploy.sh + ./scripts/deploy.sh ``` > **Technical Note:** Verify `deploy.sh` configuration for correct contract deployment parameters on Katana network. @@ -113,19 +108,14 @@ Initialize the Light Client service: 2. Execute client binary: ```bash - cargo run + cargo run --release ``` ## Terminal 5: Block Hash Relayer Process Initialize the L1->L2 block hash relay service: -1. Access relayer scripts: +1. Execute relayer process: ```bash - cd scripts + ./scripts/run_relayer.sh ``` - -2. Execute relayer process: - ```bash - ./run_relayer.sh - ``` \ No newline at end of file diff --git a/config/anvil.messaging.json b/config/anvil.messaging.json index 7770708..cdb4c0e 100644 --- a/config/anvil.messaging.json +++ b/config/anvil.messaging.json @@ -1,9 +1,9 @@ { "chain": "ethereum", "rpc_url": "http://127.0.0.1:8545", - "contract_address": "0xD185B4846E5fd5419fD4D077dc636084BEfC51C0", + "contract_address": "0xF62eEc897fa5ef36a957702AA4a45B58fE8Fe312", "sender_address": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", "private_key": "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", "interval": 2, - "from_block": 21246828 + "from_block": 21281491 } \ No newline at end of file diff --git a/contracts/starknet/Scarb.lock b/contracts/starknet/Scarb.lock index 60aeb78..65e1b6c 100644 --- a/contracts/starknet/Scarb.lock +++ b/contracts/starknet/Scarb.lock @@ -1,6 +1,13 @@ # Code generated by scarb DO NOT EDIT. version = 1 +[[package]] +name = "fossil_store" +version = "0.1.0" +dependencies = [ + "snforge_std", +] + [[package]] name = "garaga" version = "0.1.0" @@ -10,36 +17,30 @@ source = "git+https://github.com/keep-starknet-strange/garaga.git?rev=65a2fad#65 name = "l1_message_proxy" version = "0.1.0" dependencies = [ + "fossil_store", "snforge_std", - "store", ] [[package]] name = "snforge_scarb_plugin" -version = "0.32.0" +version = "0.33.0" source = "registry+https://scarbs.xyz/" -checksum = "sha256:e5a0e80294b1f5f00955c614ee3fc94c843ff0d27935693c3598d0ac8d79250a" +checksum = "sha256:b4dd6088372decd367652827091e0589bbf6bc550dfc3957baa3e9c61d6eb449" [[package]] name = "snforge_std" -version = "0.32.0" +version = "0.33.0" source = "registry+https://scarbs.xyz/" -checksum = "sha256:0e3cb45c6276334fd142a77212f0592d55744f1c022b7a63f20bcd79d0ce3927" +checksum = "sha256:f7dc3349f8a6ef4915c93df447a00bd5a53a31129fd0990a00afa0ad31d91b06" dependencies = [ "snforge_scarb_plugin", ] -[[package]] -name = "store" -version = "0.1.0" -dependencies = [ - "snforge_std", -] - [[package]] name = "verifier" version = "0.1.0" dependencies = [ + "fossil_store", "garaga", "snforge_std", ] diff --git a/contracts/starknet/Scarb.toml b/contracts/starknet/Scarb.toml index 66cc5a6..24ee6b0 100644 --- a/contracts/starknet/Scarb.toml +++ b/contracts/starknet/Scarb.toml @@ -2,9 +2,12 @@ members = ["l1_message_proxy", "store", "verifier"] [workspace.dependencies] -starknet = "2.8.4" -assert_macros = "2.8.4" -snforge_std = "0.32.0" +starknet = "2.8.5" +assert_macros = "2.8.5" +snforge_std = "0.33.0" + +[dev-dependencies] +snforge_std.workspace = true [[target.starknet-contract]] casm = true diff --git a/contracts/starknet/l1_message_proxy/Scarb.toml b/contracts/starknet/l1_message_proxy/Scarb.toml index 9d68f0f..6b124c6 100644 --- a/contracts/starknet/l1_message_proxy/Scarb.toml +++ b/contracts/starknet/l1_message_proxy/Scarb.toml @@ -7,7 +7,7 @@ edition = "2023_11" [dependencies] starknet = { workspace = true } -store = { path = "../store" } +fossil_store = { path = "../store" } [dev-dependencies] assert_macros.workspace = true diff --git a/contracts/starknet/l1_message_proxy/src/lib.cairo b/contracts/starknet/l1_message_proxy/src/lib.cairo index 4327bf6..790d59f 100644 --- a/contracts/starknet/l1_message_proxy/src/lib.cairo +++ b/contracts/starknet/l1_message_proxy/src/lib.cairo @@ -1,12 +1,12 @@ #[starknet::contract] pub mod L1MessageProxy { + use fossil_store::{IFossilStoreDispatcher, IFossilStoreDispatcherTrait}; use starknet::{ContractAddress, EthAddress}; - use store::{IStoreDispatcher, IStoreDispatcherTrait}; #[storage] struct Storage { l1_messages_sender: EthAddress, - store_dispatcher: IStoreDispatcher, + store_dispatcher: IFossilStoreDispatcher, } #[constructor] @@ -14,7 +14,7 @@ pub mod L1MessageProxy { ref self: ContractState, l1_messages_sender: EthAddress, store_address: ContractAddress ) { self.l1_messages_sender.write(l1_messages_sender); - self.store_dispatcher.write(IStoreDispatcher { contract_address: store_address }); + self.store_dispatcher.write(IFossilStoreDispatcher { contract_address: store_address }); } #[l1_handler] diff --git a/contracts/starknet/store/Scarb.toml b/contracts/starknet/store/Scarb.toml index b53f3dc..cedd305 100644 --- a/contracts/starknet/store/Scarb.toml +++ b/contracts/starknet/store/Scarb.toml @@ -1,16 +1,16 @@ [package] -name = "store" +name = "fossil_store" version = "0.1.0" edition = "2023_11" # See more keys and their definitions at https://docs.swmansion.com/scarb/docs/reference/manifest.html [dependencies] -starknet = { workspace = true } +starknet.workspace = true [dev-dependencies] assert_macros.workspace = true -snforge_std.workspace = true# +snforge_std.workspace = true [tool] fmt.workspace = true diff --git a/contracts/starknet/store/src/lib.cairo b/contracts/starknet/store/src/lib.cairo index 4157d26..e0a00dd 100644 --- a/contracts/starknet/store/src/lib.cairo +++ b/contracts/starknet/store/src/lib.cairo @@ -1,27 +1,33 @@ #[starknet::interface] -pub trait IStore { +pub trait IFossilStore { fn store_latest_blockhash_from_l1(ref self: TContractState, block_number: u64, blockhash: u256); fn update_mmr_state( ref self: TContractState, latest_mmr_block: u64, - mmr_root: felt252, + mmr_root: u256, elements_count: u64, leaves_count: u64, - peaks: Array ); fn get_latest_blockhash_from_l1(self: @TContractState) -> (u64, u256); - fn get_mmr_state(self: @TContractState) -> (u64, felt252, u64, u64, Array); + fn get_mmr_state(self: @TContractState) -> Store::MMRSnapshot; } #[starknet::contract] mod Store { - use core::starknet::storage::{ - StoragePointerReadAccess, StoragePointerWriteAccess, Vec, VecTrait, MutableVecTrait - }; + use core::starknet::storage::{StoragePointerReadAccess, StoragePointerWriteAccess,}; #[starknet::storage_node] pub(crate) struct MMRState { - root_hash: felt252, + latest_block_number: u64, + root_hash: u256, + elements_count: u64, + leaves_count: u64, + } + + #[derive(Copy, Drop, Serde, Debug)] + pub struct MMRSnapshot { + latest_block_number: u64, + root_hash: u256, elements_count: u64, leaves_count: u64, } @@ -29,9 +35,7 @@ mod Store { #[storage] struct Storage { latest_blockhash_from_l1: (u64, u256), - latest_mmr_block: u64, mmr_state: MMRState, - peaks_store: Vec, } #[event] @@ -50,14 +54,13 @@ mod Store { #[derive(Drop, starknet::Event)] struct MmrStateUpdated { latest_mmr_block: u64, - root_hash: felt252, + root_hash: u256, elements_count: u64, leaves_count: u64, - peaks: Array, } #[abi(embed_v0)] - impl StoreImpl of super::IStore { + impl FossilStoreImpl of super::IFossilStore { fn store_latest_blockhash_from_l1( ref self: ContractState, block_number: u64, blockhash: u256 ) { @@ -72,55 +75,32 @@ mod Store { fn update_mmr_state( ref self: ContractState, latest_mmr_block: u64, - mmr_root: felt252, + mmr_root: u256, elements_count: u64, leaves_count: u64, - peaks: Array ) { - self.latest_mmr_block.write(latest_mmr_block); - let mut curr_state = self.mmr_state; + curr_state.latest_block_number.write(latest_mmr_block); curr_state.root_hash.write(mmr_root); curr_state.elements_count.write(elements_count); curr_state.leaves_count.write(leaves_count); - let curr_peaks_len = self.peaks_store.len(); - let mut i = 0; - for peak in peaks - .clone() { - if i >= curr_peaks_len { - self.peaks_store.append().write(peak); - } else { - let mut peak_ptr = self.peaks_store.at(i); - peak_ptr.write(peak); - } - i += 1; - }; - self .emit( MmrStateUpdated { - latest_mmr_block, root_hash: mmr_root, elements_count, leaves_count, peaks + latest_mmr_block, root_hash: mmr_root, elements_count, leaves_count } ); } - fn get_mmr_state(self: @ContractState) -> (u64, felt252, u64, u64, Array) { - let latest_mmr_block = self.latest_mmr_block.read(); - + fn get_mmr_state(self: @ContractState) -> MMRSnapshot { let curr_state = self.mmr_state; - let (mmr_root, elements_count, leaves_count) = ( - curr_state.root_hash.read(), - curr_state.elements_count.read(), - curr_state.leaves_count.read(), - ); - - let mut peaks = array![]; - for i in 0..self.peaks_store.len() { - peaks.append(self.peaks_store.at(i).read()); - }; - - (latest_mmr_block, mmr_root, elements_count, leaves_count, peaks) + MMRSnapshot { + latest_block_number: curr_state.latest_block_number.read(), + root_hash: curr_state.root_hash.read(), + elements_count: curr_state.elements_count.read(), + leaves_count: curr_state.leaves_count.read(), + } } } } diff --git a/contracts/starknet/verifier/Scarb.toml b/contracts/starknet/verifier/Scarb.toml index d3a239a..d14e697 100644 --- a/contracts/starknet/verifier/Scarb.toml +++ b/contracts/starknet/verifier/Scarb.toml @@ -7,11 +7,12 @@ edition = "2023_11" [dependencies] garaga = { git = "https://github.com/keep-starknet-strange/garaga.git", rev = "65a2fad" } -starknet = { workspace = true } +starknet.workspace = true +fossil_store = { path = "../store" } [dev-dependencies] -snforge_std = { workspace = true } -assert_macros = {workspace = true } +snforge_std.workspace = true +assert_macros.workspace = true [tool] fmt.workspace = true diff --git a/contracts/starknet/verifier/src/fossil_verifier.cairo b/contracts/starknet/verifier/src/fossil_verifier.cairo new file mode 100644 index 0000000..0902e35 --- /dev/null +++ b/contracts/starknet/verifier/src/fossil_verifier.cairo @@ -0,0 +1,80 @@ +#[starknet::interface] +pub trait IFossilVerifier { + fn verify_mmr_proof( + ref self: TContractState, + latest_mmr_block: u64, + new_mmr_root: u256, + new_elements_count: u64, + new_leaves_count: u64, + proof: Span, + ) -> bool; + fn get_verifier_address(self: @TContractState) -> starknet::ContractAddress; + fn get_fossil_store_address(self: @TContractState) -> starknet::ContractAddress; +} + +#[starknet::contract] +mod FossilVerifier { + use fossil_store::{IFossilStoreDispatcher, IFossilStoreDispatcherTrait}; + use verifier::groth16_verifier::{ + IRisc0Groth16VerifierBN254Dispatcher, IRisc0Groth16VerifierBN254DispatcherTrait + }; + + #[storage] + struct Storage { + bn254_verifier: IRisc0Groth16VerifierBN254Dispatcher, + fossil_store: IFossilStoreDispatcher, + } + + #[event] + #[derive(Drop, starknet::Event)] + enum Event { + MmrProofVerified: MmrProofVerified, + } + + #[derive(Drop, starknet::Event)] + struct MmrProofVerified { + latest_mmr_block: u64, + new_mmr_root: u256, + new_elements_count: u64, + new_leaves_count: u64, + } + + #[constructor] + fn constructor( + ref self: ContractState, + verifier_address: starknet::ContractAddress, + fossil_store_address: starknet::ContractAddress + ) { + self + .bn254_verifier + .write(IRisc0Groth16VerifierBN254Dispatcher { contract_address: verifier_address }); + self.fossil_store.write(IFossilStoreDispatcher { contract_address: fossil_store_address }); + } + + #[external(v0)] + fn verify_mmr_proof( + ref self: ContractState, + latest_mmr_block: u64, + new_mmr_root: u256, + new_elements_count: u64, + new_leaves_count: u64, + proof: Span, + ) -> bool { + let verified = self.bn254_verifier.read().verify_groth16_proof_bn254(proof); + + if verified { + self.fossil_store.read().update_mmr_state( + latest_mmr_block, new_mmr_root, new_elements_count, new_leaves_count + ); + } + + self.emit(MmrProofVerified { + latest_mmr_block, + new_mmr_root, + new_elements_count, + new_leaves_count, + }); + + verified + } +} diff --git a/contracts/starknet/verifier/src/lib.cairo b/contracts/starknet/verifier/src/lib.cairo index 3ed0da5..a449e94 100644 --- a/contracts/starknet/verifier/src/lib.cairo +++ b/contracts/starknet/verifier/src/lib.cairo @@ -1,3 +1,4 @@ pub mod groth16_verifier; mod groth16_verifier_constants; pub mod universal_ecip; +pub mod fossil_verifier; diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index c90a821..de767ff 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2021" [dependencies] -host = { path = "../host" } +publisher = { path = "../publisher" } starknet-handler = { path = "../starknet-handler" } common = { path = "../common" } mmr-utils = { path = "../mmr-utils" } diff --git a/crates/client/db-store/0.db b/crates/client/db-store/0.db deleted file mode 100644 index d2d0f1a..0000000 Binary files a/crates/client/db-store/0.db and /dev/null differ diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index c7f6e4b..d95e1a6 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1,17 +1,15 @@ use common::{felt, get_env_var}; -use host::update_mmr_and_verify_onchain; use mmr_utils::{create_database_file, ensure_directory_exists}; use starknet::{ - core::types::{BlockId, BlockTag, EventFilter, Felt}, + core::types::{BlockId, BlockTag, EventFilter, Felt, U256}, macros::selector, providers::Provider as EventProvider, }; use starknet_handler::{account::StarknetAccount, provider::StarknetProvider, MmrState}; -use thiserror::Error; use tokio::time::{self, Duration}; use tracing::{error, info, instrument}; -#[derive(Error, Debug)] +#[derive(thiserror::Error, Debug)] pub enum LightClientError { #[error("Starknet handler error: {0}")] StarknetHandler(#[from] starknet_handler::StarknetHandlerError), @@ -19,14 +17,12 @@ pub enum LightClientError { UtilsError(#[from] common::UtilsError), #[error("MMR utils error: {0}")] MmrUtilsError(#[from] mmr_utils::MMRUtilsError), - #[error("Host error: {0}")] - HostError(#[from] host::HostError), + #[error("Publisher error: {0}")] + PublisherError(#[from] publisher::PublisherError), #[error("Starknet provider error: {0}")] StarknetProvider(#[from] starknet::providers::ProviderError), #[error("latest_processed_block regression from {0} to {1}")] StateError(u64, u64), - #[error("Proof verification failed from {0} to {1}")] - ProofVerificationFailed(u64, u64), #[error("New MMR root hash cannot be zero")] StateRootError, #[error("Database file does not exist at path: {0}")] @@ -56,7 +52,7 @@ impl LightClient { // Load environment variables let starknet_rpc_url = get_env_var("STARKNET_RPC_URL")?; let l2_store_addr = felt(&get_env_var("FOSSIL_STORE")?)?; - let verifier_addr = get_env_var("STARKNET_VERIFIER")?; + let verifier_addr = get_env_var("FOSSIL_VERIFIER")?; let starknet_private_key = get_env_var("STARKNET_PRIVATE_KEY")?; let starknet_account_address = get_env_var("STARKNET_ACCOUNT_ADDRESS")?; @@ -64,7 +60,7 @@ impl LightClient { let starknet_provider = StarknetProvider::new(&starknet_rpc_url)?; // Set up the database file path - let current_dir = ensure_directory_exists("db-store")?; + let current_dir = ensure_directory_exists("../../db-instances")?; let db_file = create_database_file(¤t_dir, 0)?; if !std::path::Path::new(&db_file).exists() { @@ -172,18 +168,19 @@ impl LightClient { .await?; // Fetch latest MMR state from L2 - let (latest_mmr_block, _latest_mmr_state) = self + let latest_mmr_state = self .starknet_provider .get_latest_mmr_state(&self.l2_store_addr) .await?; info!( latest_relayed_block, - latest_mmr_block, "State fetched from Starknet" + latest_mmr_block = latest_mmr_state.latest_block_number(), + "State fetched from Starknet" ); // Update MMR and verify proofs - self.update_mmr(latest_mmr_block, latest_relayed_block) + self.update_mmr(latest_mmr_state.latest_block_number(), latest_relayed_block) .await?; Ok(()) @@ -196,32 +193,24 @@ impl LightClient { latest_mmr_block: u64, latest_relayed_block: u64, ) -> Result<(), LightClientError> { - info!("Starting proof verification..."); - - let (proof_verified, new_mmr_state) = update_mmr_and_verify_onchain( - &self.db_file, - latest_mmr_block + 1, - latest_relayed_block, - &self.starknet_provider.rpc_url(), - &self.verifier_addr, - ) - .await?; - - if proof_verified { - self.update_mmr_state_on_starknet(latest_relayed_block, new_mmr_state) - .await?; - } else { + if latest_mmr_block >= latest_relayed_block { error!( - from_block = latest_mmr_block + 1, - to_block = latest_relayed_block, - "Proof verification failed" + latest_mmr_block, + latest_relayed_block, "Latest MMR block is greater than the latest relayed block" ); - // Return an error instead of proceeding - return Err(LightClientError::ProofVerificationFailed( - latest_mmr_block + 1, + return Err(LightClientError::StateError( + latest_mmr_block, latest_relayed_block, )); } + info!("Starting proof verification..."); + + let (new_mmr_state, proof) = + publisher::prove_mmr_update(&self.db_file, latest_mmr_block + 1, latest_relayed_block) + .await?; + + self.update_mmr_state_on_starknet(latest_relayed_block, new_mmr_state, proof) + .await?; Ok(()) } @@ -231,8 +220,9 @@ impl LightClient { &self, latest_relayed_block: u64, new_mmr_state: MmrState, + proof: Vec, ) -> Result<(), LightClientError> { - if new_mmr_state.root_hash() == Felt::ZERO { + if new_mmr_state.root_hash() == U256::from(0_u8) { error!("New MMR root hash cannot be zero"); return Err(LightClientError::StateRootError); } @@ -244,7 +234,7 @@ impl LightClient { )?; starknet_account - .update_mmr_state(self.l2_store_addr, latest_relayed_block, &new_mmr_state) + .verify_mmr_proof(&self.verifier_addr, &new_mmr_state, proof) .await?; info!( diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 03066a5..9ad078b 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -4,11 +4,11 @@ version = "0.1.0" edition = "2021" [dependencies] -eyre = { workspace = true } thiserror = { workspace = true } dotenv = { workspace = true } starknet-crypto = { workspace = true } +tracing = { workspace = true } -tracing-subscriber = "0.3.18" +tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } alloy-contract = { version = "0.6.4" } ruint = "1.12.3" \ No newline at end of file diff --git a/crates/common/src/lib.rs b/crates/common/src/lib.rs index 718324d..e89c329 100644 --- a/crates/common/src/lib.rs +++ b/crates/common/src/lib.rs @@ -1,11 +1,9 @@ #![deny(unused_crate_dependencies)] -use eyre::Result; use starknet_crypto::Felt; -use std::str::FromStr; -use thiserror::Error; +use std::{path::PathBuf, str::FromStr}; -#[derive(Debug, Error)] +#[derive(thiserror::Error, Debug)] pub enum UtilsError { #[error("Environment variable {0} not set")] EnvVarNotSet(String), @@ -17,38 +15,72 @@ pub enum UtilsError { AlloyContractError(#[from] alloy_contract::Error), #[error("Failed to convert Uint to u64")] UintError(#[from] ruint::FromUintError), + #[error("Environment variable error: {0}")] + EnvVarError(#[from] dotenv::Error), + #[error("Parse error: {0}")] + ParseStringError(String), + #[error("Felt conversion error: {0}")] + FeltError(String), } /// Retrieves an environment variable or returns an error if not set. pub fn get_env_var(key: &str) -> Result { - dotenv::var(key).map_err(|_| UtilsError::EnvVarNotSet(key.to_string())) + Ok(dotenv::var(key)?) } /// Parses an environment variable into the desired type or returns an error. pub fn get_var(name: &str) -> Result where - T::Err: std::error::Error + Send + Sync + 'static, + T::Err: std::fmt::Display, { let var_value = get_env_var(name)?; var_value - .parse::() + .parse() .map_err(|e| UtilsError::ParseError(format!("{}: {}", name, e))) } /// Function to initialize logging and environment variables pub fn initialize_logger_and_env() -> Result<(), UtilsError> { dotenv::dotenv().ok(); - tracing_subscriber::fmt().init(); + + let filter = tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| { + let directive = match "sqlx=off".parse() { + Ok(d) => d, + Err(e) => { + tracing::warn!("Failed to parse sqlx filter directive: {}", e); + Default::default() + } + }; + tracing_subscriber::EnvFilter::new("info").add_directive(directive) + }); + + tracing_subscriber::fmt() + .with_env_filter(filter) + .with_file(true) + .init(); Ok(()) } pub fn string_array_to_felt_array(string_array: Vec) -> Result, UtilsError> { - string_array - .iter() - .map(|s| felt(s).map_err(|_| UtilsError::ParseError(s.clone()))) - .collect() + string_array.iter().map(|s| felt(s)).collect() } pub fn felt(str: &str) -> Result { - Felt::from_hex(str).map_err(|_| UtilsError::ParseError(str.to_string())) + Felt::from_hex(str).map_err(|_| UtilsError::FeltError(format!("Invalid hex string: {}", str))) +} + +pub fn get_db_path() -> Result { + // Get path to the db-instances directory relative to the test file + let test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .parent() + .ok_or_else(|| UtilsError::ParseError("Missing parent directory".to_string()))? + .parent() + .ok_or_else(|| UtilsError::ParseError("Missing root directory".to_string()))? + .join("db-instances"); + + let binding = test_dir.join("0.db"); + let store_path = binding + .to_str() + .ok_or_else(|| UtilsError::ParseError("Invalid path".to_string()))?; + Ok(store_path.to_string()) } diff --git a/crates/ethereum/src/lib.rs b/crates/ethereum/src/lib.rs index 78de752..7d6172b 100644 --- a/crates/ethereum/src/lib.rs +++ b/crates/ethereum/src/lib.rs @@ -1,7 +1,7 @@ #![deny(unused_crate_dependencies)] use alloy::{providers::ProviderBuilder, sol}; -use common::{get_env_var, initialize_logger_and_env, UtilsError}; +use common::{get_env_var, UtilsError}; // Codegen from embedded Solidity code and precompiled bytecode. sol! { @@ -20,8 +20,6 @@ sol! { #[allow(dead_code)] pub async fn get_finalized_block_hash() -> Result<(u64, String), UtilsError> { - initialize_logger_and_env()?; - let rpc_url = get_env_var("ETH_RPC_URL")?; let provider = ProviderBuilder::new() .with_recommended_fillers() diff --git a/crates/guest-mmr/Cargo.toml b/crates/guest-mmr/Cargo.toml new file mode 100644 index 0000000..56cfa3a --- /dev/null +++ b/crates/guest-mmr/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "guest-mmr" +version = "0.1.0" +edition = "2021" + +[dependencies] +guest-types = { path = "../guest-types" } + +serde = { version = "1.0.215", features = ["derive"] } +hex = "0.4" +sha2 = { git = 'https://github.com/risc0/RustCrypto-hashes.git', tag = 'sha2-v0.10.8-risczero.0', features = ['compress'] } +num-bigint = "0.4.4" +num-traits = "0.2.19" +thiserror = "2.0.3" + +[dev-dependencies] +mmr-utils = { path = "../mmr-utils" } +common = { path = "../common" } + +tokio = { version = "1.39.0", features = ["full"] } \ No newline at end of file diff --git a/crates/guest-mmr/src/core.rs b/crates/guest-mmr/src/core.rs new file mode 100644 index 0000000..8e965ea --- /dev/null +++ b/crates/guest-mmr/src/core.rs @@ -0,0 +1,407 @@ +use guest_types::AppendResult; +use serde::{Deserialize, Serialize}; +use std::collections::{HashMap, VecDeque}; +use thiserror::Error; + +use crate::formatting::{FormattingError, ProofOptions}; +use crate::helper::{ + element_index_to_leaf_index, find_peaks, find_siblings, get_peak_info, hasher, + leaf_count_to_append_no_merges, leaf_count_to_peaks_count, mmr_size_to_leaf_count, +}; + +#[derive(Error, Debug)] +pub enum MMRError { + #[error("No hash found for index {0}")] + NoHashFoundForIndex(usize), + #[error("Insufficient peaks for merge")] + InsufficientPeaksForMerge, + #[error("From hex error: {0}")] + FromHexError(#[from] hex::FromHexError), + #[error("Parse big int error: {0}")] + ParseBigIntError(#[from] num_bigint::ParseBigIntError), + #[error("Hash error")] + HashError, + #[error("Invalid element index")] + InvalidElementIndex, + #[error("Invalid element count")] + InvalidElementCount, + #[error("Formatting error: {0}")] + FormattingError(#[from] FormattingError), + #[error("Invalid peaks count")] + InvalidPeaksCount, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Proof { + element_index: usize, + element_hash: String, + siblings_hashes: Vec, + peaks_hashes: Vec, + elements_count: usize, +} + +#[derive(Debug)] +pub struct GuestMMR { + hashes: HashMap, + elements_count: usize, + leaves_count: usize, + root_hash: String, +} + +impl GuestMMR { + pub fn new(initial_peaks: Vec, elements_count: usize, leaves_count: usize) -> Self { + let mut hashes = HashMap::new(); + + // Initialize hashes with the peaks at their correct positions + let peak_positions = find_peaks(elements_count); + for (peak, pos) in initial_peaks.into_iter().zip(peak_positions) { + hashes.insert(pos, peak); + } + + Self { + elements_count, + leaves_count, + hashes, + root_hash: "".to_string(), + } + } + + pub fn get_elements_count(&self) -> usize { + self.elements_count + } + + pub fn get_leaves_count(&self) -> usize { + self.leaves_count + } + + pub fn append(&mut self, value: String) -> Result { + let elements_count = self.elements_count; + + let mut peaks = self.retrieve_peaks_hashes(find_peaks(elements_count))?; + + let mut last_element_idx = self.elements_count + 1; + let leaf_element_index = last_element_idx; + + // Store the new leaf in the hash map + self.hashes.insert(last_element_idx, value.clone()); + + peaks.push(value.clone()); + + let no_merges = leaf_count_to_append_no_merges(self.leaves_count); + + for _ in 0..no_merges { + if peaks.len() < 2 { + return Err(MMRError::InsufficientPeaksForMerge); + } + + last_element_idx += 1; + + // Pop the last two peaks to merge + let right_hash = peaks.pop().ok_or(MMRError::InsufficientPeaksForMerge)?; + let left_hash = peaks.pop().ok_or(MMRError::InsufficientPeaksForMerge)?; + + let parent_hash = hasher(vec![left_hash, right_hash])?; + self.hashes.insert(last_element_idx, parent_hash.clone()); + + peaks.push(parent_hash); + } + + self.elements_count = last_element_idx; + self.leaves_count += 1; + + let root_hash = self.calculate_root_hash(last_element_idx)?; + self.root_hash = root_hash; + + Ok(AppendResult::new( + self.leaves_count, + last_element_idx, + leaf_element_index, + value, + )) + } + + pub fn get_proof(&self, element_index: usize) -> Result { + if element_index == 0 { + return Err(MMRError::InvalidElementIndex); + } + + let tree_size = self.elements_count; + + if element_index > tree_size { + return Err(MMRError::InvalidElementIndex); + } + + let peaks = find_peaks(tree_size); + + let siblings = find_siblings(element_index, tree_size)?; + + let peaks_hashes = self.retrieve_peaks_hashes(peaks)?; + + let siblings_hashes = self.get_many_hashes(&siblings)?; + + let element_hash = self + .hashes + .get(&element_index) + .ok_or(MMRError::NoHashFoundForIndex(element_index))?; + + Ok(Proof { + element_index, + element_hash: element_hash.clone(), + siblings_hashes, + peaks_hashes, + elements_count: tree_size, + }) + } + + pub fn verify_proof( + &self, + mut proof: Proof, + element_value: String, + options: Option, + ) -> Result { + let options = options.unwrap_or_default(); + let tree_size = match options.elements_count { + Some(count) => count, + None => self.elements_count, + }; + + let leaf_count = mmr_size_to_leaf_count(tree_size); + let peaks_count = leaf_count_to_peaks_count(leaf_count); + + if peaks_count as usize != proof.peaks_hashes.len() { + return Err(MMRError::InvalidPeaksCount); + } + + if let Some(formatting_opts) = options.formatting_opts { + let proof_format_null_value = &formatting_opts.proof.null_value; + let peaks_format_null_value = &formatting_opts.peaks.null_value; + + let proof_null_values_count = proof + .siblings_hashes + .iter() + .filter(|&s| s == proof_format_null_value) + .count(); + proof + .siblings_hashes + .truncate(proof.siblings_hashes.len() - proof_null_values_count); + + let peaks_null_values_count = proof + .peaks_hashes + .iter() + .filter(|&s| s == peaks_format_null_value) + .count(); + proof + .peaks_hashes + .truncate(proof.peaks_hashes.len() - peaks_null_values_count); + } + let element_index = proof.element_index; + + if element_index == 0 { + return Err(MMRError::InvalidElementIndex); + } + + if element_index > tree_size { + return Err(MMRError::InvalidElementIndex); + } + + let (peak_index, peak_height) = get_peak_info(tree_size, element_index); + if proof.siblings_hashes.len() != peak_height { + return Ok(false); + } + + let mut hash = element_value.clone(); + let mut leaf_index = element_index_to_leaf_index(element_index)?; + + for proof_hash in proof.siblings_hashes.iter() { + let is_right = leaf_index % 2 == 1; + leaf_index /= 2; + + hash = if is_right { + hasher(vec![proof_hash.clone(), hash.clone()])? + } else { + hasher(vec![hash.clone(), proof_hash.clone()])? + }; + } + + let peak_hashes = self.retrieve_peaks_hashes(find_peaks(tree_size))?; + + Ok(peak_hashes[peak_index] == hash) + } + + fn retrieve_peaks_hashes(&self, peak_idxs: Vec) -> Result, MMRError> { + let mut peaks = Vec::new(); + + for &idx in &peak_idxs { + // Use `idx` directly since `self.hashes` expects a `usize` key + if let Some(hash) = self.hashes.get(&idx) { + peaks.push(hash.clone()); + } else { + return Err(MMRError::NoHashFoundForIndex(idx)); + } + } + + Ok(peaks) + } + + pub fn bag_the_peaks(&self) -> Result { + let peaks_idxs = find_peaks(self.elements_count); + + let peaks_hashes = self.retrieve_peaks_hashes(peaks_idxs)?; + + match peaks_hashes.len() { + 0 => Ok("0x0".to_string()), + 1 => Ok(peaks_hashes[0].clone()), + _ => { + let mut peaks_hashes: VecDeque = peaks_hashes.into(); + let last = peaks_hashes + .pop_back() + .ok_or(MMRError::InsufficientPeaksForMerge)?; + let second_last = peaks_hashes + .pop_back() + .ok_or(MMRError::InsufficientPeaksForMerge)?; + let root0 = hasher(vec![second_last, last])?; + + peaks_hashes + .into_iter() + .rev() + .try_fold(root0, |prev, cur| hasher(vec![cur, prev])) + } + } + } + + pub fn calculate_root_hash(&self, elements_count: usize) -> Result { + let bag = self.bag_the_peaks()?; + + match hasher(vec![elements_count.to_string(), bag.to_string()]) { + Ok(root_hash) => Ok(root_hash), + Err(_) => Err(MMRError::HashError), + } + } + + pub fn get_all_hashes(&self) -> Vec<(usize, String)> { + let mut hashes: Vec<_> = self + .hashes + .iter() + .map(|(&index, hash)| (index, hash.clone())) + .collect(); + hashes.sort_by_key(|(index, _)| *index); // Sort by index + hashes + } + + pub fn get_many_hashes(&self, idxs: &[usize]) -> Result, MMRError> { + let mut hashes = Vec::new(); + for &idx in idxs { + hashes.push( + self.hashes + .get(&idx) + .cloned() + .ok_or(MMRError::NoHashFoundForIndex(idx))?, + ); + } + Ok(hashes) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + const INITIAL_PEAK_VALUE: &str = + "0x0000000000000000000000000000000000000000000000000000000000000001"; + const APPEND_VALUE: &str = "0x0000000000000000000000000000000000000000000000000000000000000002"; + + fn create_test_mmr() -> GuestMMR { + // Use a properly formatted hex string for the initial peak + let initial_peaks = vec![INITIAL_PEAK_VALUE.to_string()]; + GuestMMR::new(initial_peaks, 1, 1) + } + + #[test] + fn test_new_mmr() { + let mmr = create_test_mmr(); + assert_eq!(mmr.get_elements_count(), 1); + assert_eq!(mmr.get_leaves_count(), 1); + } + + #[test] + fn test_append() { + let mut mmr = create_test_mmr(); + + let result = mmr.append(APPEND_VALUE.to_string()).unwrap(); + + assert_eq!(result.leaves_count(), 2); + assert_eq!(result.value(), APPEND_VALUE); + assert_eq!(mmr.get_leaves_count(), 2); + } + + #[test] + fn test_get_proof() { + let mut mmr = create_test_mmr(); + mmr.append(APPEND_VALUE.to_string()).unwrap(); + + let proof = mmr.get_proof(1).unwrap(); + + assert_eq!(proof.element_index, 1); + assert_eq!(proof.elements_count, mmr.get_elements_count()); + } + + #[test] + fn test_verify_proof() { + let mut mmr = create_test_mmr(); + mmr.append(APPEND_VALUE.to_string()).unwrap(); + + let proof = mmr.get_proof(1).unwrap(); + println!("proof: {:?}", proof); + let is_valid = mmr + .verify_proof(proof, INITIAL_PEAK_VALUE.to_string(), None) + .unwrap(); + + assert!(is_valid); + } + + #[test] + fn test_invalid_element_index() { + let mmr = create_test_mmr(); + + let result = mmr.get_proof(0); + assert!(matches!(result, Err(MMRError::InvalidElementIndex))); + + let result = mmr.get_proof(999); + assert!(matches!(result, Err(MMRError::InvalidElementIndex))); + } + + #[test] + fn test_bag_the_peaks() { + let mut mmr = create_test_mmr(); + mmr.append(APPEND_VALUE.to_string()).unwrap(); + + let result = mmr.bag_the_peaks(); + assert!(result.is_ok()); + } + + #[test] + fn test_get_many_hashes() { + let mmr = create_test_mmr(); + + // Test with valid index + let result = mmr.get_many_hashes(&[1]); + assert!(result.is_ok()); + + // Test with invalid index + let result = mmr.get_many_hashes(&[999]); + assert!(matches!(result, Err(MMRError::NoHashFoundForIndex(_)))); + } + + #[test] + fn test_get_all_hashes() { + let mut mmr = create_test_mmr(); + + mmr.append(APPEND_VALUE.to_string()).unwrap(); + + let hashes = mmr.get_all_hashes(); + + assert!(!hashes.is_empty()); + assert_eq!(hashes[0].1, INITIAL_PEAK_VALUE); + assert_eq!(hashes[1].1, APPEND_VALUE); + } +} diff --git a/crates/guest-mmr/src/formatting.rs b/crates/guest-mmr/src/formatting.rs new file mode 100644 index 0000000..db56591 --- /dev/null +++ b/crates/guest-mmr/src/formatting.rs @@ -0,0 +1,30 @@ +use thiserror::Error; + +#[derive(Clone)] +pub struct FormattingOptions { + pub output_size: usize, + pub null_value: String, +} + +pub type ProofFormattingOptions = FormattingOptions; +pub type PeaksFormattingOptions = FormattingOptions; + +#[derive(Clone)] +pub struct FormattingOptionsBundle { + pub proof: ProofFormattingOptions, + pub peaks: PeaksFormattingOptions, +} + +#[derive(Clone, Default)] +pub struct ProofOptions { + pub elements_count: Option, + pub formatting_opts: Option, +} + +#[derive(Error, Debug)] +pub enum FormattingError { + #[error("Formatting: Expected peaks output size is smaller than the actual size")] + PeaksOutputSizeError, + #[error("Formatting: Expected proof output size is smaller than the actual size")] + ProofOutputSizeError, +} diff --git a/crates/guest-mmr/src/helper.rs b/crates/guest-mmr/src/helper.rs new file mode 100644 index 0000000..f1948e1 --- /dev/null +++ b/crates/guest-mmr/src/helper.rs @@ -0,0 +1,176 @@ +use num_bigint::BigInt; +use num_traits::Num; +use sha2::{Digest, Sha256}; +use std::str::FromStr; + +use crate::core::MMRError; + +pub fn find_peaks(mut elements_count: usize) -> Vec { + let mut mountain_elements_count = (1 << bit_length(elements_count)) - 1; + let mut mountain_index_shift = 0; + let mut peaks = Vec::new(); + + while mountain_elements_count > 0 { + if mountain_elements_count <= elements_count { + mountain_index_shift += mountain_elements_count; + peaks.push(mountain_index_shift); + elements_count -= mountain_elements_count; + } + mountain_elements_count >>= 1; + } + + if elements_count > 0 { + return Vec::new(); + } + + peaks +} + +pub fn leaf_count_to_peaks_count(leaf_count: usize) -> u32 { + count_ones(leaf_count) as u32 +} + +pub(crate) fn count_ones(mut value: usize) -> usize { + let mut ones_count = 0; + while value > 0 { + value &= value - 1; + ones_count += 1; + } + ones_count +} + +fn bit_length(num: usize) -> usize { + (std::mem::size_of::() * 8) - num.leading_zeros() as usize +} + +pub fn leaf_count_to_append_no_merges(leaf_count: usize) -> usize { + if leaf_count == 0 { + return 0; + } + (!leaf_count).trailing_zeros() as usize +} + +pub fn hasher(data: Vec) -> Result { + let mut sha2 = Sha256::new(); + + //? We deliberately don't validate the size of the elements here, because we want to allow hashing of the RLP encoded block to get a block hash + if data.is_empty() { + sha2.update([]); + } else if data.len() == 1 { + let no_prefix = data[0].strip_prefix("0x").unwrap_or(&data[0]); + sha2.update(&hex::decode(no_prefix)?); + } else { + let mut result: Vec = Vec::new(); + + for e in data.iter() { + let bigint = if e.starts_with("0x") || e.starts_with("0X") { + // Parse hexadecimal + BigInt::from_str_radix(&e[2..], 16)? + } else { + // Parse decimal + BigInt::from_str(e)? + }; + + let hex = format!("{:0>64}", bigint.to_str_radix(16)); + let bytes = hex::decode(hex)?; + result.extend(bytes); + } + + sha2.update(&result); + } + + let hash = sha2.finalize(); + Ok(format!("0x{:0>64}", hex::encode(hash))) +} + +pub fn find_siblings(element_index: usize, elements_count: usize) -> Result, MMRError> { + let mut leaf_index = element_index_to_leaf_index(element_index)?; + println!("leaf index: {}", leaf_index); + let mut height = 0; + let mut siblings = Vec::new(); + let mut current_element_index = element_index; + + while current_element_index <= elements_count { + let siblings_offset = (2 << height) - 1; + println!("siblings offset: {}", siblings_offset); + if leaf_index % 2 == 1 { + // right child + siblings.push(current_element_index - siblings_offset); + current_element_index += 1; + } else { + // left child + siblings.push(current_element_index + siblings_offset); + current_element_index += siblings_offset + 1; + } + leaf_index /= 2; + height += 1; + } + + siblings.pop(); + Ok(siblings) +} + +pub fn element_index_to_leaf_index(element_index: usize) -> Result { + if element_index == 0 { + return Err(MMRError::InvalidElementIndex); + } + elements_count_to_leaf_count(element_index - 1) +} + +pub fn elements_count_to_leaf_count(elements_count: usize) -> Result { + let mut leaf_count = 0; + let mut mountain_leaf_count = 1 << bit_length(elements_count); + let mut current_elements_count = elements_count; + + while mountain_leaf_count > 0 { + let mountain_elements_count = 2 * mountain_leaf_count - 1; + if mountain_elements_count <= current_elements_count { + leaf_count += mountain_leaf_count; + current_elements_count -= mountain_elements_count; + } + mountain_leaf_count >>= 1; + } + + if current_elements_count > 0 { + Err(MMRError::InvalidElementCount) + } else { + Ok(leaf_count) + } +} + +pub fn mmr_size_to_leaf_count(mmr_size: usize) -> usize { + let mut remaining_size = mmr_size; + let bits = bit_length(remaining_size + 1); + let mut mountain_tips = 1 << (bits - 1); // Using bitwise shift to calculate 2^(bits-1) + let mut leaf_count = 0; + + while mountain_tips != 0 { + let mountain_size = 2 * mountain_tips - 1; + if mountain_size <= remaining_size { + remaining_size -= mountain_size; + leaf_count += mountain_tips; + } + mountain_tips >>= 1; // Using bitwise shift for division by 2 + } + + leaf_count +} + +pub fn get_peak_info(mut elements_count: usize, mut element_index: usize) -> (usize, usize) { + let mut mountain_height = bit_length(elements_count); + let mut mountain_elements_count = (1 << mountain_height) - 1; + let mut mountain_index = 0; + + loop { + if mountain_elements_count <= elements_count { + if element_index <= mountain_elements_count { + return (mountain_index, mountain_height - 1); + } + elements_count -= mountain_elements_count; + element_index -= mountain_elements_count; + mountain_index += 1; + } + mountain_elements_count >>= 1; + mountain_height -= 1; + } +} diff --git a/crates/guest-mmr/src/lib.rs b/crates/guest-mmr/src/lib.rs new file mode 100644 index 0000000..266ac97 --- /dev/null +++ b/crates/guest-mmr/src/lib.rs @@ -0,0 +1,3 @@ +pub mod core; +pub(crate) mod formatting; +pub(crate) mod helper; diff --git a/crates/guest-mmr/tests/mmr_test.rs b/crates/guest-mmr/tests/mmr_test.rs new file mode 100644 index 0000000..8c18e4a --- /dev/null +++ b/crates/guest-mmr/tests/mmr_test.rs @@ -0,0 +1,31 @@ +// use mmr::MMR; +use common::get_db_path; +use mmr_utils::initialize_mmr; + +#[tokio::test] +#[ignore = "TODO: add .db file to test"] +async fn test_mmr_proofs() { + // Get path to the db-instances directory relative to the test file + let store_path = get_db_path().unwrap(); + + let (store_manager, mmr, pool) = initialize_mmr(&store_path).await.unwrap(); + + let indices = vec![1, 2, 4, 5, 8, 9, 11, 12]; + + let mut hashes = vec![]; + for index in indices.iter() { + let hash = store_manager + .get_value_for_element_index(&pool, *index) + .await + .unwrap(); + hashes.push(hash.unwrap()); + } + + for (i, index) in indices.iter().enumerate() { + let proof = mmr.get_proof(*index, None).await.unwrap(); + assert!(mmr + .verify_proof(proof, hashes[i].clone(), None) + .await + .unwrap()); + } +} diff --git a/crates/guest-types/src/lib.rs b/crates/guest-types/src/lib.rs index aa95fc8..e760f9a 100644 --- a/crates/guest-types/src/lib.rs +++ b/crates/guest-types/src/lib.rs @@ -25,7 +25,7 @@ pub struct AppendResult { leaves_count: usize, elements_count: usize, element_index: usize, - root_hash: String, + value: String, } impl AppendResult { @@ -33,18 +33,18 @@ impl AppendResult { leaves_count: usize, elements_count: usize, element_index: usize, - root_hash: String, + value: String, ) -> Self { Self { leaves_count, elements_count, element_index, - root_hash, + value, } } - pub fn root_hash(&self) -> &str { - &self.root_hash + pub fn value(&self) -> &str { + &self.value } pub fn element_index(&self) -> usize { @@ -63,21 +63,24 @@ impl AppendResult { // GuestOutput #[derive(Debug, Serialize, Deserialize)] pub struct GuestOutput { - final_peaks: Vec, + root_hash: String, elements_count: usize, leaves_count: usize, + all_hashes: Vec<(usize, String)>, append_results: Vec, } impl GuestOutput { pub fn new( - final_peaks: Vec, + root_hash: String, elements_count: usize, leaves_count: usize, + all_hashes: Vec<(usize, String)>, append_results: Vec, ) -> Self { Self { - final_peaks, + root_hash, + all_hashes, elements_count, leaves_count, append_results, @@ -92,8 +95,8 @@ impl GuestOutput { &self.append_results } - pub fn final_peaks(&self) -> Vec { - self.final_peaks.clone() + pub fn all_hashes(&self) -> Vec<(usize, String)> { + self.all_hashes.clone() } pub fn leaves_count(&self) -> usize { @@ -105,40 +108,52 @@ impl GuestOutput { #[derive(Debug, Serialize, Deserialize, Clone)] pub struct CombinedInput { headers: Vec, - mmr_input: GuestInput, + mmr_input: MMRInput, + skip_proof_verification: bool, } impl CombinedInput { - pub fn new(headers: Vec, mmr_input: GuestInput) -> Self { - Self { headers, mmr_input } + pub fn new( + headers: Vec, + mmr_input: MMRInput, + skip_proof_verification: bool, + ) -> Self { + Self { + headers, + mmr_input, + skip_proof_verification, + } } pub fn headers(&self) -> &Vec { &self.headers } - pub fn mmr_input(&self) -> &GuestInput { + pub fn mmr_input(&self) -> &MMRInput { &self.mmr_input } + + pub fn skip_proof_verification(&self) -> bool { + self.skip_proof_verification + } } -// GuestInput #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct GuestInput { +pub struct MMRInput { initial_peaks: Vec, elements_count: usize, leaves_count: usize, - new_elements: Vec, - previous_proofs: Vec, + new_elements: Option>, + previous_proofs: Option>, } -impl GuestInput { +impl MMRInput { pub fn new( initial_peaks: Vec, elements_count: usize, leaves_count: usize, - new_elements: Vec, - previous_proofs: Vec, + new_elements: Option>, + previous_proofs: Option>, ) -> Self { Self { initial_peaks, @@ -149,8 +164,8 @@ impl GuestInput { } } - pub fn previous_proofs(&self) -> &Vec { - &self.previous_proofs + pub fn previous_proofs(&self) -> Option<&Vec> { + self.previous_proofs.as_ref() } pub fn initial_peaks(&self) -> Vec { @@ -190,3 +205,51 @@ impl BatchProof { self.method_id } } + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct FinalHash { + hash: String, + index: usize, +} + +impl FinalHash { + pub fn new(hash: String, index: usize) -> Self { + Self { hash, index } + } + + pub fn hash(&self) -> &str { + &self.hash + } + + pub fn index(&self) -> usize { + self.index + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BlocksValidityInput { + headers: Vec, + mmr_input: MMRInput, + hash_indexes: Vec, +} +impl BlocksValidityInput { + pub fn new(headers: Vec, mmr_input: MMRInput, hash_indexes: Vec) -> Self { + Self { + headers, + mmr_input, + hash_indexes, + } + } + + pub fn headers(&self) -> &Vec { + &self.headers + } + + pub fn hash_indexes(&self) -> &Vec { + &self.hash_indexes + } + + pub fn mmr_input(&self) -> &MMRInput { + &self.mmr_input + } +} diff --git a/crates/host/bin/update_mmr.rs b/crates/host/bin/update_mmr.rs deleted file mode 100644 index 13607a3..0000000 --- a/crates/host/bin/update_mmr.rs +++ /dev/null @@ -1,47 +0,0 @@ -use clap::Parser; -use common::get_env_var; -use eyre::{eyre, Result}; -use host::{db_access::get_store_path, update_mmr_and_verify_onchain}; - -#[derive(Parser, Debug)] -#[command(version, about, long_about = None)] -struct Args { - /// Path to the SQLite database file. - #[arg(short, long)] - db_file: Option, - - /// Start block - #[arg(short, long)] - start: u64, - - /// End block - #[arg(short, long)] - end: u64, - - /// RPC URL - #[arg(short, long)] - rpc_url: String, - - /// Verifier address - #[arg(short, long)] - verifier: String, -} - -#[tokio::main] -async fn main() -> Result<()> { - let args = Args::parse(); - - let rpc_url = get_env_var("STARKNET_RPC_URL")?; - let verifier = get_env_var("STARKNET_VERIFIER")?; - - let store_path = get_store_path(args.db_file).map_err(|e| eyre!(e))?; - - let (proof_verified, new_mmr_root) = - update_mmr_and_verify_onchain(&store_path, args.start, args.end, &rpc_url, &verifier) - .await?; - - println!("Proof verified: {:?}", proof_verified); - println!("New MMR root: {:?}", new_mmr_root); - - Ok(()) -} diff --git a/crates/methods/Cargo.toml b/crates/methods/Cargo.toml index f22f763..1654ce4 100644 --- a/crates/methods/Cargo.toml +++ b/crates/methods/Cargo.toml @@ -7,4 +7,4 @@ edition = "2021" risc0-build = { version = "1.1.3" } [package.metadata.risc0] -methods = ["guest"] +methods = ["mmr-append", "blocks-validity"] diff --git a/crates/methods/guest/Cargo.toml b/crates/methods/blocks-validity/Cargo.toml similarity index 72% rename from crates/methods/guest/Cargo.toml rename to crates/methods/blocks-validity/Cargo.toml index b23662b..7dab9f3 100644 --- a/crates/methods/guest/Cargo.toml +++ b/crates/methods/blocks-validity/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "mmr-guest" +name = "blocks-validity" version = "0.1.0" edition = "2021" @@ -7,14 +7,10 @@ edition = "2021" [dependencies] guest-types = { path = "../../guest-types" } +guest-mmr = { path = "../../guest-mmr" } common = { path = "../../common" } eth-rlp-types = { git = "https://github.com/ametel01/eth-rlp-verify.git" } eth-rlp-verify = { git = "https://github.com/ametel01/eth-rlp-verify.git" } -thiserror = "2.0.3" risc0-zkvm = { version = "1.1.3", default-features = false, features = ['std'] } -sha3 = "0.10" -serde = { version = "1.0.215", features = ["derive"] } -hex = "0.4" -starknet-crypto = "0.7.3" diff --git a/crates/methods/blocks-validity/src/main.rs b/crates/methods/blocks-validity/src/main.rs new file mode 100644 index 0000000..7999a5a --- /dev/null +++ b/crates/methods/blocks-validity/src/main.rs @@ -0,0 +1,31 @@ +// main.rs +use eth_rlp_verify::are_blocks_and_chain_valid; +use risc0_zkvm::guest::env; +use guest_mmr::core::GuestMMR; +use guest_types::BlocksValidityInput; + +fn main() { + // Read combined input + let input: BlocksValidityInput = env::read(); + + // Verify block headers + if !are_blocks_and_chain_valid(&input.headers()) { + env::commit(&false); + } + // Initialize MMR with previous state + let mmr = GuestMMR::new( + input.mmr_input().initial_peaks(), + input.mmr_input().elements_count(), + input.mmr_input().leaves_count(), + ); + // Append block hashes to MMR + for (i, header) in input.headers().iter().enumerate() { + let block_hash = header.block_hash.clone(); + let proof = mmr.get_proof(input.hash_indexes()[i]).unwrap(); + if !mmr.verify_proof(proof, block_hash, None).unwrap() { + env::commit(&false); + } + } + + env::commit(&true); +} diff --git a/crates/methods/guest/src/guest_mmr.rs b/crates/methods/guest/src/guest_mmr.rs deleted file mode 100644 index 3362d6b..0000000 --- a/crates/methods/guest/src/guest_mmr.rs +++ /dev/null @@ -1,261 +0,0 @@ -use guest_types::{AppendResult, PeaksFormattingOptions, PeaksOptions}; -use serde::{Deserialize, Serialize}; -use starknet_crypto::{poseidon_hash, poseidon_hash_many, poseidon_hash_single, Felt}; -use std::collections::{HashMap, VecDeque}; -use thiserror::Error; -use common::felt; - -#[derive(Error, Debug)] -pub enum FormattingError { - #[error("Formatting: Expected peaks output size is smaller than the actual size")] - PeaksOutputSizeError, -} - -#[derive(Error, Debug)] -pub enum MMRError { - NoHashFoundForIndex(usize), - Formatting(FormattingError), - InsufficientPeaksForMerge, - HashError, -} - -pub struct GuestMMR { - hashes: HashMap, - elements_count: usize, - leaves_count: usize, -} - -impl GuestMMR { - pub fn new(initial_peaks: Vec, elements_count: usize, leaves_count: usize) -> Self { - let mut hashes = HashMap::new(); - - // Initialize hashes with the peaks at their correct positions - let peak_positions = find_peaks(elements_count); - for (peak, pos) in initial_peaks.into_iter().zip(peak_positions) { - hashes.insert(pos, peak); - } - - Self { - elements_count, - leaves_count, - hashes, - } - } - - pub fn get_elements_count(&self) -> usize { - self.elements_count - } - - pub fn get_leaves_count(&self) -> usize { - self.leaves_count - } - - pub fn append(&mut self, value: String) -> Result { - let elements_count = self.elements_count; - - let mut peaks = self.retrieve_peaks_hashes(find_peaks(elements_count))?; - - let mut last_element_idx = self.elements_count + 1; - let leaf_element_index = last_element_idx; - - // Store the new leaf in the hash map - self.hashes.insert(last_element_idx, value.clone()); - - peaks.push(value); - - let no_merges = leaf_count_to_append_no_merges(self.leaves_count); - - for _ in 0..no_merges { - if peaks.len() < 2 { - return Err(MMRError::InsufficientPeaksForMerge); - } - - last_element_idx += 1; - - // Pop the last two peaks to merge - let right_hash = peaks.pop().unwrap(); - let left_hash = peaks.pop().unwrap(); - - let parent_hash = hash(vec![left_hash, right_hash])?; - self.hashes.insert(last_element_idx, parent_hash.clone()); - - peaks.push(parent_hash); - } - - self.elements_count = last_element_idx; - self.leaves_count += 1; - - let bag = self.bag_the_peaks()?; - let root_hash = self.calculate_root_hash(&bag, last_element_idx)?; - - Ok(AppendResult::new( - self.leaves_count, - last_element_idx, - leaf_element_index, - root_hash, - )) - } - - fn retrieve_peaks_hashes(&self, peak_idxs: Vec) -> Result, MMRError> { - let mut peaks = Vec::new(); - - for &idx in &peak_idxs { - // Use `idx` directly since `self.hashes` expects a `usize` key - if let Some(hash) = self.hashes.get(&idx) { - peaks.push(hash.clone()); - } else { - return Err(MMRError::NoHashFoundForIndex(idx)); - } - } - - Ok(peaks) - } - - fn bag_the_peaks(&self) -> Result { - let peaks_idxs = find_peaks(self.elements_count); - - let peaks_hashes = self.retrieve_peaks_hashes(peaks_idxs)?; - - match peaks_hashes.len() { - 0 => Ok("0x0".to_string()), - 1 => Ok(peaks_hashes[0].clone()), - _ => { - let mut peaks_hashes: VecDeque = peaks_hashes.into(); - let last = peaks_hashes.pop_back().unwrap(); - let second_last = peaks_hashes.pop_back().unwrap(); - let root0 = hash(vec![second_last, last])?; - - let final_root = peaks_hashes - .into_iter() - .rev() - .fold(root0, |prev: String, cur: String| { - hash(vec![cur, prev]).unwrap() - }); - - Ok(final_root) - } - } - } - - pub fn calculate_root_hash( - &self, - bag: &str, - elements_count: usize, - ) -> Result { - match hash(vec![elements_count.to_string(), bag.to_string()]) { - Ok(root_hash) => Ok(root_hash), - Err(_) => Err(MMRError::HashError), - } - } - - pub fn get_peaks(&self, option: PeaksOptions) -> Result, MMRError> { - let tree_size = match option.elements_count { - Some(count) => count, - None => self.elements_count, - }; - - let peaks_indices = find_peaks(tree_size); - let peaks = self.retrieve_peaks_hashes(peaks_indices)?; - - if let Some(formatting_opts) = option.formatting_opts { - match format_peaks(peaks, &formatting_opts) { - Ok(formatted_peaks) => Ok(formatted_peaks), - Err(e) => Err(MMRError::Formatting(e)), - } - } else { - Ok(peaks) - } - } -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct Proof { - element_index: usize, - element_hash: String, - siblings_hashes: Vec, - peaks_hashes: Vec, - elements_count: usize, -} - -impl std::fmt::Display for MMRError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - MMRError::NoHashFoundForIndex(idx) => write!(f, "No hash found for index {}", idx), - MMRError::Formatting(e) => write!(f, "Formatting error: {}", e), - MMRError::InsufficientPeaksForMerge => write!(f, "Insufficient peaks for merge"), - MMRError::HashError => write!(f, "Hash error"), - } - } -} - -pub fn format_peaks( - mut peaks: Vec, - formatting_opts: &PeaksFormattingOptions, -) -> Result, FormattingError> { - if peaks.len() > formatting_opts.output_size { - return Err(FormattingError::PeaksOutputSizeError); - } - - let expected_peaks_size_remainder = formatting_opts.output_size - peaks.len(); - let peaks_null_values: Vec = - vec![formatting_opts.null_value.clone(); expected_peaks_size_remainder]; - - peaks.extend(peaks_null_values); - - Ok(peaks) -} - -// Add this function at the bottom with other helper functions -pub fn find_peaks(mut elements_count: usize) -> Vec { - let mut mountain_elements_count = (1 << bit_length(elements_count)) - 1; - let mut mountain_index_shift = 0; - let mut peaks = Vec::new(); - - while mountain_elements_count > 0 { - if mountain_elements_count <= elements_count { - mountain_index_shift += mountain_elements_count; - peaks.push(mountain_index_shift); - elements_count -= mountain_elements_count; - } - mountain_elements_count >>= 1; - } - - if elements_count > 0 { - return Vec::new(); - } - - peaks -} - -fn bit_length(num: usize) -> usize { - (std::mem::size_of::() * 8) - num.leading_zeros() as usize -} - -fn leaf_count_to_append_no_merges(leaf_count: usize) -> usize { - if leaf_count == 0 { - return 0; - } - (!leaf_count).trailing_zeros() as usize -} - -fn hash(data: Vec) -> Result { - // for element in &data { - // self.is_element_size_valid(element)?; - // } - - let field_elements: Vec = data.iter().map(|e| felt(e).unwrap_or_default()).collect(); - - let hash_core = match field_elements.len() { - 0 => return Err(MMRError::HashError), - 1 => poseidon_hash_single(field_elements[0]), - 2 => poseidon_hash(field_elements[0], field_elements[1]), - _ => poseidon_hash_many(&field_elements), - }; - - let hash = format!("{:x}", hash_core); - // if self.should_pad { - // hash = format!("{:0>63}", hash); - // } - let hash = format!("0x{}", hash); - Ok(hash) -} diff --git a/crates/methods/guest/src/lib.rs b/crates/methods/guest/src/lib.rs deleted file mode 100644 index 506ade4..0000000 --- a/crates/methods/guest/src/lib.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod guest_mmr; diff --git a/crates/methods/mmr-append/Cargo.toml b/crates/methods/mmr-append/Cargo.toml new file mode 100644 index 0000000..7e46248 --- /dev/null +++ b/crates/methods/mmr-append/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "mmr-append" +version = "0.1.0" +edition = "2021" + +[workspace] + +[dependencies] +guest-types = { path = "../../guest-types" } +guest-mmr = { path = "../../guest-mmr" } +common = { path = "../../common" } + +eth-rlp-types = { git = "https://github.com/ametel01/eth-rlp-verify.git" } +eth-rlp-verify = { git = "https://github.com/ametel01/eth-rlp-verify.git" } + +risc0-zkvm = { version = "1.1.3", default-features = false, features = ['std'] } diff --git a/crates/methods/guest/src/main.rs b/crates/methods/mmr-append/src/main.rs similarity index 67% rename from crates/methods/guest/src/main.rs rename to crates/methods/mmr-append/src/main.rs index bca660d..836c156 100644 --- a/crates/methods/guest/src/main.rs +++ b/crates/methods/mmr-append/src/main.rs @@ -1,21 +1,23 @@ // main.rs use eth_rlp_verify::are_blocks_and_chain_valid; use risc0_zkvm::guest::env; -mod guest_mmr; -use guest_mmr::GuestMMR; +use guest_mmr::core::GuestMMR; use guest_types::{CombinedInput, GuestOutput}; fn main() { // Read combined input let input: CombinedInput = env::read(); - // Verify previous batch proofs - for proof in input.mmr_input().previous_proofs() { - // Verify each previous proof - proof - .receipt() - .verify(proof.method_id()) - .expect("Invalid previous proof"); + // Only verify proofs if skip_proof_verification is false + if !input.skip_proof_verification() { + if let Some(proofs) = input.mmr_input().previous_proofs() { + for proof in proofs { + proof + .receipt() + .verify(proof.method_id()) + .expect("Invalid previous proof"); + } + } } // Verify block headers @@ -23,14 +25,12 @@ fn main() { are_blocks_and_chain_valid(&input.headers()), "Invalid block headers" ); - // Initialize MMR with previous state let mut mmr = GuestMMR::new( input.mmr_input().initial_peaks(), input.mmr_input().elements_count(), input.mmr_input().leaves_count(), ); - let mut append_results = Vec::new(); // Append block hashes to MMR for (_, header) in input.headers().iter().enumerate() { @@ -45,23 +45,16 @@ fn main() { } } - // Get final peaks - let final_peaks = match mmr.get_peaks(Default::default()) { - Ok(peaks) => peaks, - Err(e) => { - assert!(false, "Failed to get final peaks: {:?}", e); - vec![] // This line will never be reached due to assert - } - }; + let root_hash = mmr.calculate_root_hash(mmr.get_elements_count()).unwrap(); // Create output let output = GuestOutput::new( - final_peaks, + root_hash, mmr.get_elements_count(), mmr.get_leaves_count(), + mmr.get_all_hashes(), append_results, ); - // Commit the output env::commit(&output); } diff --git a/crates/mmr-utils/Cargo.toml b/crates/mmr-utils/Cargo.toml index f63a51f..3df262a 100644 --- a/crates/mmr-utils/Cargo.toml +++ b/crates/mmr-utils/Cargo.toml @@ -9,6 +9,9 @@ thiserror = { workspace = true } uuid = "1.1.2" -store = { git = "https://github.com/ametel01/rust-accumulators.git", branch = "workspace" } -hasher = { git = "https://github.com/ametel01/rust-accumulators.git", branch = "workspace" } -mmr = { git = "https://github.com/ametel01/rust-accumulators.git", branch = "workspace" } +store = { git = "https://github.com/ametel01/rust-accumulators.git", branch = "feat/sha2-hasher" } +hasher = { git = "https://github.com/ametel01/rust-accumulators.git", branch = "feat/sha2-hasher", features = ["sha256"] } +mmr = { git = "https://github.com/ametel01/rust-accumulators.git", branch = "feat/sha2-hasher" } + +[dev-dependencies] +tokio = { workspace = true, features = ["full"] } diff --git a/crates/mmr-utils/src/lib.rs b/crates/mmr-utils/src/lib.rs index 3b6d24d..f2dd45b 100644 --- a/crates/mmr-utils/src/lib.rs +++ b/crates/mmr-utils/src/lib.rs @@ -1,19 +1,18 @@ #![deny(unused_crate_dependencies)] -use hasher::stark_poseidon::StarkPoseidonHasher; +use hasher::sha2::Sha2Hasher; use mmr::MMR; use sqlx::{Row, SqlitePool}; -use std::fs::File; -use std::path::Path; -use std::sync::Arc; -use std::sync::Mutex; -use std::{collections::HashMap, path::PathBuf}; -use std::{env, fs}; +use std::{ + collections::HashMap, + env, + fs::{self, File}, + path::{Path, PathBuf}, + sync::{Arc, Mutex}, +}; use store::{sqlite::SQLiteStore, StoreError}; -use thiserror::Error; -use uuid::Uuid; -#[derive(Error, Debug)] +#[derive(thiserror::Error, Debug)] pub enum MMRUtilsError { #[error("Store error: {0}")] Store(#[from] StoreError), @@ -31,9 +30,7 @@ pub struct StoreFactory; #[allow(dead_code)] impl StoreFactory { pub async fn create_store(path: &str, id: Option<&str>) -> Result { - SQLiteStore::new(path, Some(true), id) - .await - .map_err(StoreError::SQLite) + Ok(SQLiteStore::new(path, Some(true), id).await?) } } @@ -101,6 +98,13 @@ impl StoreManager { Ok(()) } + pub async fn get_all_elements(&self, pool: &SqlitePool) -> Result, sqlx::Error> { + let rows = sqlx::query("SELECT value FROM value_index_map") + .fetch_all(pool) + .await?; + Ok(rows.iter().map(|r| r.get("value")).collect()) + } + /// Retrieves the element index based on the given hash value #[allow(dead_code)] pub async fn get_element_index_for_value( @@ -165,12 +169,12 @@ pub async fn initialize_mmr( let mmr_id = if let Some(id) = get_mmr_id(&pool).await? { id } else { - let new_id = Uuid::new_v4().to_string(); + let new_id = uuid::Uuid::new_v4().to_string(); save_mmr_id(&pool, &new_id).await?; new_id }; - let hasher = Arc::new(StarkPoseidonHasher::new(Some(false))); + let hasher = Arc::new(Sha2Hasher::new()); let mmr = MMR::new(store, hasher, Some(mmr_id.clone())); Ok((store_manager, mmr, pool)) @@ -222,3 +226,151 @@ pub fn create_database_file( Ok(store_path_str.to_string()) } + +#[cfg(test)] +mod tests { + use super::*; + + async fn setup_test_db() -> (StoreManager, SqlitePool) { + // Use in-memory SQLite database for testing + let db_url = "sqlite::memory:"; + let pool = SqlitePool::connect(db_url).await.unwrap(); + + // Create mmr_metadata table first + sqlx::query( + r#" + CREATE TABLE IF NOT EXISTS mmr_metadata ( + mmr_id TEXT PRIMARY KEY + ) + "#, + ) + .execute(&pool) + .await + .unwrap(); + + // Now create the manager which will create value_index_map table + let manager = StoreManager::new(&format!("sqlite:{}", db_url)) + .await + .unwrap(); + + // Initialize the value-to-index table explicitly with the same pool + manager.initialize_value_index_table(&pool).await.unwrap(); + + (manager, pool) + } + + #[tokio::test] + async fn test_store_manager_initialization() { + let (_, pool) = setup_test_db().await; + + // Verify the tables were created + let result = sqlx::query( + "SELECT name FROM sqlite_master WHERE type='table' AND name='value_index_map'", + ) + .fetch_optional(&pool) + .await + .unwrap(); + + assert!(result.is_some(), "value_index_map table should exist"); + } + + #[tokio::test] + async fn test_insert_and_get_value_index_mapping() { + let (manager, pool) = setup_test_db().await; + + // Test inserting a mapping + let test_value = "test_hash"; + let test_index = 42; + + manager + .insert_value_index_mapping(&pool, test_value, test_index) + .await + .unwrap(); + + // Test retrieving the mapping + let result = manager + .get_element_index_for_value(&pool, test_value) + .await + .unwrap(); + + assert_eq!(result, Some(test_index)); + } + + #[tokio::test] + async fn test_get_all_elements() { + let (manager, pool) = setup_test_db().await; + manager + .insert_value_index_mapping(&pool, "test_hash", 42) + .await + .unwrap(); + manager + .insert_value_index_mapping(&pool, "test_hash2", 43) + .await + .unwrap(); + manager + .insert_value_index_mapping(&pool, "test_hash3", 44) + .await + .unwrap(); + let elements = manager.get_all_elements(&pool).await.unwrap(); + assert_eq!(elements.len(), 3); + } + + #[tokio::test] + async fn test_get_nonexistent_value_index() { + let (manager, pool) = setup_test_db().await; + + let result = manager + .get_element_index_for_value(&pool, "nonexistent") + .await + .unwrap(); + + assert_eq!(result, None); + } + + #[tokio::test] + async fn test_get_value_for_element_index() { + let (manager, pool) = setup_test_db().await; + + // First, create the store table that would normally exist + sqlx::query("CREATE TABLE IF NOT EXISTS store (key TEXT PRIMARY KEY, value TEXT NOT NULL)") + .execute(&pool) + .await + .unwrap(); + + // Insert a test value + let test_index = 123; + let test_value = "test_stored_value"; + sqlx::query("INSERT INTO store (key, value) VALUES (?, ?)") + .bind(format!("test:hashes:{}", test_index)) + .bind(test_value) + .execute(&pool) + .await + .unwrap(); + + // Test retrieving the value + let result = manager + .get_value_for_element_index(&pool, test_index) + .await + .unwrap(); + + assert_eq!(result, Some(test_value.to_string())); + } + + #[tokio::test] + async fn test_get_nonexistent_value_for_element_index() { + let (manager, pool) = setup_test_db().await; + + // Create store table + sqlx::query("CREATE TABLE IF NOT EXISTS store (key TEXT PRIMARY KEY, value TEXT NOT NULL)") + .execute(&pool) + .await + .unwrap(); + + let result = manager + .get_value_for_element_index(&pool, 999) + .await + .unwrap(); + + assert_eq!(result, None); + } +} diff --git a/crates/host/Cargo.toml b/crates/publisher/Cargo.toml similarity index 89% rename from crates/host/Cargo.toml rename to crates/publisher/Cargo.toml index d6ac8a9..ef7ba13 100644 --- a/crates/host/Cargo.toml +++ b/crates/publisher/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "host" +name = "publisher" version = "0.1.0" edition = "2021" @@ -22,8 +22,8 @@ starknet-handler = { path = "../starknet-handler" } eth-rlp-types = { workspace = true } garaga_rs = { git = "https://github.com/ametel01/garaga.git" } -mmr = { git = "https://github.com/ametel01/rust-accumulators.git", branch = "workspace" } -store = { git = "https://github.com/ametel01/rust-accumulators.git", branch = "workspace" } +mmr = { git = "https://github.com/ametel01/rust-accumulators.git", branch = "feat/sha2-hasher" } +store = { git = "https://github.com/ametel01/rust-accumulators.git", branch = "feat/sha2-hasher" } eyre = { workspace = true } thiserror = { workspace = true } @@ -35,6 +35,7 @@ starknet-crypto = { workspace = true } clap = { workspace = true, features = ["derive"] } risc0-zkvm = { version = "1.1.3" } +# risc0-zkvm-platform = { version = "1.1.3" } serde = "1.0" risc0-ethereum-contracts = { git = "https://github.com/risc0/risc0-ethereum", tag = "v1.1.4" } risc0-groth16 = "1.1.2" diff --git a/crates/host/bin/build_mmr.rs b/crates/publisher/bin/build_mmr.rs similarity index 53% rename from crates/host/bin/build_mmr.rs rename to crates/publisher/bin/build_mmr.rs index 8f51732..a9a0577 100644 --- a/crates/host/bin/build_mmr.rs +++ b/crates/publisher/bin/build_mmr.rs @@ -1,9 +1,9 @@ use clap::Parser; use common::{get_env_var, initialize_logger_and_env}; -use eyre::{eyre, Result}; -use host::{db_access::get_store_path, AccumulatorBuilder, ProofGenerator, ProofType}; -use methods::{MMR_GUEST_ELF, MMR_GUEST_ID}; -use starknet_handler::provider::StarknetProvider; +use eyre::Result; +use methods::{MMR_APPEND_ELF, MMR_APPEND_ID}; +use publisher::{db_access::get_store_path, AccumulatorBuilder, ProofGenerator, ProofType}; +use starknet_handler::{account::StarknetAccount, provider::StarknetProvider}; use tracing::info; #[derive(Parser, Debug)] @@ -20,6 +20,10 @@ struct Args { /// Number of batches to process. If not specified, processes until block #0. #[arg(short, long)] num_batches: Option, + + /// Skip proof verification + #[arg(short, long, default_value_t = false)] + skip_proof: bool, } #[tokio::main] @@ -27,23 +31,29 @@ async fn main() -> Result<()> { initialize_logger_and_env()?; let rpc_url = get_env_var("STARKNET_RPC_URL")?; - let verifier_address = get_env_var("STARKNET_VERIFIER")?; + let verifier_address = get_env_var("FOSSIL_VERIFIER")?; + let private_key = get_env_var("STARKNET_PRIVATE_KEY")?; + let account_address = get_env_var("STARKNET_ACCOUNT_ADDRESS")?; info!("Starting Publisher..."); // Parse CLI arguments let args = Args::parse(); - let store_path = get_store_path(args.db_file).map_err(|e| eyre!(e))?; + let store_path = get_store_path(args.db_file)?; info!("Initializing proof generator..."); // Initialize proof generator - let proof_generator = ProofGenerator::new(MMR_GUEST_ELF, MMR_GUEST_ID); - + let proof_generator = ProofGenerator::new(MMR_APPEND_ELF, MMR_APPEND_ID, args.skip_proof); info!("Initializing accumulator builder..."); // Initialize accumulator builder with the batch size - let mut builder = - AccumulatorBuilder::new(&store_path, proof_generator, args.batch_size).await?; + let mut builder = AccumulatorBuilder::new( + &store_path, + proof_generator, + args.batch_size, + args.skip_proof, + ) + .await?; info!("Building MMR..."); // Build MMR from finalized block to block #0 or up to the specified number of batches @@ -61,16 +71,23 @@ async fn main() -> Result<()> { result.start_block(), result.end_block() ); + + let new_mmr_state = result.new_mmr_state(); + match result.proof() { Some(ProofType::Stark { .. }) => info!("Generated STARK proof"), Some(ProofType::Groth16 { calldata, .. }) => { - info!("Generated Groth16 proof"); - let provider = StarknetProvider::new(&rpc_url)?; - let result = provider.verify_groth16_proof_onchain(&verifier_address, &calldata); - info!( - "Proof verification result: {:?}", - result.await.map_err(|e| eyre!(e))? - ); + if !args.skip_proof { + info!("Verifying Groth16 proof on Starknet..."); + let provider = StarknetProvider::new(&rpc_url)?; + let account = + StarknetAccount::new(provider.provider(), &private_key, &account_address)?; + let (tx_hash, new_mmr_state) = account + .verify_mmr_proof(&verifier_address, &new_mmr_state, calldata) + .await?; + info!("Final proof verified on Starknet, tx hash: {:?}", tx_hash); + info!("New MMR state: {:?}", new_mmr_state); + } } None => info!("No proof generated"), } diff --git a/crates/publisher/bin/update_mmr.rs b/crates/publisher/bin/update_mmr.rs new file mode 100644 index 0000000..5df5bcb --- /dev/null +++ b/crates/publisher/bin/update_mmr.rs @@ -0,0 +1,47 @@ +use clap::Parser; +use common::{get_env_var, initialize_logger_and_env}; +use eyre::Result; +use publisher::{db_access::get_store_path, prove_mmr_update}; +use starknet_handler::{account::StarknetAccount, provider::StarknetProvider}; + +#[derive(Parser, Debug)] +#[command(version, about, long_about = None)] +struct Args { + /// Path to the SQLite database file. + #[arg(short, long)] + db_file: Option, + + /// Start block + #[arg(short, long)] + start: u64, + + /// End block + #[arg(short, long)] + end: u64, +} + +#[tokio::main] +async fn main() -> Result<()> { + initialize_logger_and_env()?; + + let args = Args::parse(); + + let rpc_url = get_env_var("STARKNET_RPC_URL")?; + let private_key = get_env_var("STARKNET_PRIVATE_KEY")?; + let account_address = get_env_var("STARKNET_ACCOUNT_ADDRESS")?; + let verifier = get_env_var("FOSSIL_VERIFIER")?; + + let store_path = get_store_path(args.db_file)?; + + let (new_mmr_state, proof) = prove_mmr_update(&store_path, args.start, args.end).await?; + + let provider = StarknetProvider::new(&rpc_url)?; + + let account = StarknetAccount::new(provider.provider(), &private_key, &account_address)?; + + account + .verify_mmr_proof(&verifier, &new_mmr_state, proof) + .await?; + + Ok(()) +} diff --git a/crates/host/bin/verify_block_inclusion.rs b/crates/publisher/bin/verify_block_inclusion.rs similarity index 100% rename from crates/host/bin/verify_block_inclusion.rs rename to crates/publisher/bin/verify_block_inclusion.rs diff --git a/crates/host/src/accumulator.rs b/crates/publisher/src/accumulator.rs similarity index 78% rename from crates/host/src/accumulator.rs rename to crates/publisher/src/accumulator.rs index ef13340..b642127 100644 --- a/crates/host/src/accumulator.rs +++ b/crates/publisher/src/accumulator.rs @@ -2,13 +2,13 @@ use crate::db_access::{get_block_headers_by_block_range, DbConnection}; use crate::proof_generator::{ProofGenerator, ProofGeneratorError}; use crate::types::{BatchResult, ProofType}; -use common::{felt, string_array_to_felt_array, UtilsError}; +use common::UtilsError; use ethereum::get_finalized_block_hash; -use guest_types::{BatchProof, CombinedInput, GuestInput, GuestOutput}; -use mmr::{find_peaks, InStoreTableError, MMRError, PeaksOptions, MMR}; +use guest_types::{BatchProof, CombinedInput, GuestOutput, MMRInput}; +use mmr::{InStoreTableError, MMRError, PeaksOptions, MMR}; use mmr_utils::{initialize_mmr, MMRUtilsError, StoreManager}; use starknet_crypto::Felt; -use starknet_handler::MmrState; +use starknet_handler::{u256_from_hex, MmrState}; use store::{SqlitePool, StoreError, SubKey}; use thiserror::Error; use tracing::{debug, info, warn}; @@ -22,7 +22,7 @@ pub enum AccumulatorError { #[error("Expected Groth16 proof but got {got:?}")] ExpectedGroth16Proof { got: ProofType }, #[error("MMR root is not a valid Starknet field element: {0}")] - InvalidFeltHex(String), + InvalidU256Hex(String), #[error("SQLx error: {0}")] Sqlx(#[from] sqlx::Error), #[error("Utils error: {0}")] @@ -37,6 +37,8 @@ pub enum AccumulatorError { MMRUtils(#[from] MMRUtilsError), #[error("InStoreTable error: {0}")] InStoreTable(#[from] InStoreTableError), + #[error("StarknetHandler error: {0}")] + StarknetHandler(#[from] starknet_handler::StarknetHandlerError), } pub struct AccumulatorBuilder { @@ -44,17 +46,19 @@ pub struct AccumulatorBuilder { store_manager: StoreManager, mmr: MMR, pool: SqlitePool, - proof_generator: ProofGenerator, + proof_generator: ProofGenerator, total_batches: u64, current_batch: u64, previous_proofs: Vec, + skip_proof_verification: bool, } impl AccumulatorBuilder { pub async fn new( store_path: &str, - proof_generator: ProofGenerator, + proof_generator: ProofGenerator, batch_size: u64, + skip_proof_verification: bool, ) -> Result { let (store_manager, mmr, pool) = initialize_mmr(store_path).await?; debug!("MMR initialized at {}", store_path); @@ -68,6 +72,7 @@ impl AccumulatorBuilder { total_batches: 0, current_batch: 0, previous_proofs: Vec::new(), + skip_proof_verification, }) } @@ -98,26 +103,27 @@ impl AccumulatorBuilder { let current_leaves_count = self.mmr.leaves_count.get().await?; // Prepare guest input - let mmr_input = GuestInput::new( + let mmr_input = MMRInput::new( current_peaks.clone(), current_elements_count, current_leaves_count, - headers.iter().map(|h| h.block_hash.clone()).collect(), - self.previous_proofs.clone(), // Use the stored proofs + Some(headers.iter().map(|h| h.block_hash.clone()).collect()), + Some(self.previous_proofs.clone()), // Use the stored proofs ); - let combined_input = CombinedInput::new(headers.clone(), mmr_input); + let combined_input = + CombinedInput::new(headers.clone(), mmr_input, self.skip_proof_verification); // Generate appropriate proof let proof = if self.current_batch == self.total_batches - 1 { debug!("Generating final Groth16 proof for batch"); self.proof_generator - .generate_groth16_proof(&combined_input) + .generate_groth16_proof(combined_input) .await? } else { debug!("Generating intermediate STARK proof for batch"); self.proof_generator - .generate_stark_proof(&combined_input) + .generate_stark_proof(combined_input) .await? }; @@ -125,7 +131,7 @@ impl AccumulatorBuilder { let guest_output: GuestOutput = self.proof_generator.decode_journal(&proof)?; // TODO: Remove this and update MMR state after the proof is verified onchain - let new_mmr_state = self.update_mmr_state(&guest_output).await?; + let new_mmr_state = self.update_mmr_state(end_block, &guest_output).await?; // If this is a STARK proof, add it to previous_proofs for the next batch if let ProofType::Stark { @@ -141,12 +147,6 @@ impl AccumulatorBuilder { )); } - // Verify state after update - let final_peaks = self.mmr.get_peaks(PeaksOptions::default()).await?; - if final_peaks != guest_output.final_peaks().to_vec() { - return Err(AccumulatorError::PeaksVerificationError.into()); - } - self.current_batch += 1; debug!("Batch processing completed successfully"); @@ -160,6 +160,7 @@ impl AccumulatorBuilder { async fn update_mmr_state( &mut self, + latest_block_number: u64, guest_output: &GuestOutput, ) -> Result { debug!( @@ -190,34 +191,17 @@ impl AccumulatorBuilder { .await?; // Update all hashes in the store - for result in guest_output.append_results() { + for (index, hash) in guest_output.all_hashes() { // Store the hash in MMR - self.mmr - .hashes - .set(result.root_hash(), SubKey::Usize(result.element_index())) - .await?; + self.mmr.hashes.set(&hash, SubKey::Usize(index)).await?; // Update the mapping self.store_manager - .insert_value_index_mapping(&self.pool, result.root_hash(), result.element_index()) - .await?; - } - - // Update peaks - let peaks_indices = find_peaks(guest_output.elements_count()); - for (peak_hash, &peak_idx) in guest_output.final_peaks().iter().zip(peaks_indices.iter()) { - self.mmr - .hashes - .set(peak_hash, SubKey::Usize(peak_idx)) + .insert_value_index_mapping(&self.pool, &hash, index) .await?; } // Verify the state was properly updated - let stored_peaks = self.mmr.get_peaks(PeaksOptions::default()).await?; - - if stored_peaks != guest_output.final_peaks() { - return Err(AccumulatorError::PeaksVerificationError.into()); - } let bag = self.mmr.bag_the_peaks(None).await?; @@ -225,13 +209,13 @@ impl AccumulatorBuilder { .mmr .calculate_root_hash(&bag, self.mmr.elements_count.get().await?)?; - validate_felt_hex(&new_mmr_root_hash)?; + validate_u256_hex(&new_mmr_root_hash)?; let new_mmr_state = MmrState::new( - felt(&new_mmr_root_hash)?, + latest_block_number, + u256_from_hex(new_mmr_root_hash.trim_start_matches("0x"))?, guest_output.elements_count() as u64, guest_output.leaves_count() as u64, - string_array_to_felt_array(guest_output.final_peaks().to_vec())?, ); debug!("MMR state updated successfully"); @@ -321,23 +305,23 @@ impl AccumulatorBuilder { } } -/// Validates that a hex string represents a valid Starknet field element (252 bits) -fn validate_felt_hex(hex_str: &str) -> Result<(), AccumulatorError> { +/// Validates that a hex string represents a valid U256 (256-bit unsigned integer) +fn validate_u256_hex(hex_str: &str) -> Result<(), AccumulatorError> { // Check if it's a valid hex string with '0x' prefix if !hex_str.starts_with("0x") { - return Err(AccumulatorError::InvalidFeltHex(hex_str.to_string()).into()); + return Err(AccumulatorError::InvalidU256Hex(hex_str.to_string()).into()); } // Remove '0x' prefix and check if remaining string is valid hex let hex_value = &hex_str[2..]; if !hex_value.chars().all(|c| c.is_ascii_hexdigit()) { - return Err(AccumulatorError::InvalidFeltHex(hex_str.to_string()).into()); + return Err(AccumulatorError::InvalidU256Hex(hex_str.to_string()).into()); } - // Check length - maximum 63 hex chars (252 bits = 63 hex digits) + // Check length - maximum 64 hex chars (256 bits = 64 hex digits) // Note: we allow shorter values as they're valid smaller numbers - if hex_value.len() > 63 { - return Err(AccumulatorError::InvalidFeltHex(hex_str.to_string()).into()); + if hex_value.len() > 64 { + return Err(AccumulatorError::InvalidU256Hex(hex_str.to_string()).into()); } Ok(()) diff --git a/crates/host/src/db_access.rs b/crates/publisher/src/db_access.rs similarity index 98% rename from crates/host/src/db_access.rs rename to crates/publisher/src/db_access.rs index 5d0c1b6..379a7cf 100644 --- a/crates/host/src/db_access.rs +++ b/crates/publisher/src/db_access.rs @@ -6,7 +6,7 @@ use eth_rlp_types::BlockHeader; use mmr_utils::{create_database_file, ensure_directory_exists}; use sqlx::{postgres::PgPoolOptions, PgPool, Pool, Postgres}; -use crate::{accumulator::AccumulatorError, HostError}; +use crate::{accumulator::AccumulatorError, PublisherError}; #[derive(Debug)] pub struct DbConnection { @@ -137,7 +137,7 @@ pub async fn get_block_headers_by_block_range( Ok(headers) } -pub fn get_store_path(db_file: Option) -> Result { +pub fn get_store_path(db_file: Option) -> Result { // Load the database file path from the environment or use the provided argument let store_path = if let Some(db_file) = db_file { db_file diff --git a/crates/host/src/lib.rs b/crates/publisher/src/lib.rs similarity index 56% rename from crates/host/src/lib.rs rename to crates/publisher/src/lib.rs index 01d8860..bafa529 100644 --- a/crates/host/src/lib.rs +++ b/crates/publisher/src/lib.rs @@ -9,17 +9,19 @@ pub mod accumulator; pub mod db_access; pub mod proof_generator; pub mod types; +pub mod validator; pub use accumulator::AccumulatorBuilder; -use methods::{MMR_GUEST_ELF, MMR_GUEST_ID}; +use methods::{MMR_APPEND_ELF, MMR_APPEND_ID}; use mmr_utils::MMRUtilsError; pub use proof_generator::{ProofGenerator, ProofType}; use starknet_crypto::Felt; -use starknet_handler::{provider::StarknetProvider, MmrState, StarknetHandlerError}; +use starknet_handler::{MmrState, StarknetHandlerError}; use thiserror::Error; +pub use validator::{ValidatorBuilder, ValidatorError}; #[derive(Error, Debug)] -pub enum HostError { +pub enum PublisherError { #[error("Verification result is empty")] VerificationError, #[error("Accumulator error: {0}")] @@ -28,17 +30,17 @@ pub enum HostError { StarknetHandler(#[from] StarknetHandlerError), #[error("MMRUtils error: {0}")] MMRUtils(#[from] MMRUtilsError), + #[error("Headers Validator error: {0}")] + Validator(#[from] ValidatorError), } -pub async fn update_mmr_and_verify_onchain( +pub async fn prove_mmr_update( db_file: &str, start_block: u64, end_block: u64, - rpc_url: &str, - verifier_address: &str, -) -> Result<(bool, MmrState), HostError> { - let proof_generator = ProofGenerator::new(MMR_GUEST_ELF, MMR_GUEST_ID); - let mut builder = AccumulatorBuilder::new(db_file, proof_generator, 1024).await?; +) -> Result<(MmrState, Vec), PublisherError> { + let proof_generator = ProofGenerator::new(MMR_APPEND_ELF, MMR_APPEND_ID, false); + let mut builder = AccumulatorBuilder::new(db_file, proof_generator, 1024, false).await?; tracing::debug!( db_file, @@ -56,23 +58,22 @@ pub async fn update_mmr_and_verify_onchain( "Successfully generated proof for block range" ); - let provider = StarknetProvider::new(rpc_url)?; - tracing::debug!(verifier_address, "Submitting proof for verification"); - - let verification_result = provider - .verify_groth16_proof_onchain(verifier_address, &proof_calldata) - .await?; + Ok((new_mmr_state, proof_calldata)) +} - let verified = *verification_result - .first() - .ok_or_else(|| HostError::VerificationError)? - == Felt::from(1); +pub async fn prove_headers_validity_and_inclusion( + headers: &Vec, + skip_proof_verification: Option, +) -> Result { + let skip_proof = match skip_proof_verification { + Some(skip) => skip, + None => false, + }; + let validator = ValidatorBuilder::new(skip_proof).await?; - if verified { - tracing::info!("Proof verification successful on-chain"); - } else { - tracing::warn!("Proof verification failed on-chain"); - } + let result = validator + .verify_blocks_validity_and_inclusion(headers) + .await?; - Ok((verified, new_mmr_state)) + Ok(result) } diff --git a/crates/host/src/proof_generator.rs b/crates/publisher/src/proof_generator.rs similarity index 55% rename from crates/host/src/proof_generator.rs rename to crates/publisher/src/proof_generator.rs index bc4fdb4..e09ee48 100644 --- a/crates/host/src/proof_generator.rs +++ b/crates/publisher/src/proof_generator.rs @@ -5,10 +5,10 @@ use garaga_rs::{ }, definitions::CurveID, }; -use guest_types::CombinedInput; use risc0_ethereum_contracts::encode_seal; use risc0_zkvm::{compute_image_id, default_prover, ExecutorEnv, ProverOpts, VerifierContext}; use serde::Deserialize; +use starknet_crypto::Felt; use thiserror::Error; use tokio::task; use tracing::{debug, info, warn}; @@ -27,70 +27,80 @@ pub enum ProofGeneratorError { CalldataError(String), #[error("Failed to spawn blocking task: {0}")] SpawnBlocking(String), - #[error("Task join error: {0}")] + #[error("Tokio task join error: {0}")] Join(#[from] tokio::task::JoinError), #[error("Risc0 serde error: {0}")] Risc0Serde(#[from] risc0_zkvm::serde::Error), } -pub struct ProofGenerator { +pub struct ProofGenerator { method_elf: &'static [u8], method_id: [u32; 8], + skip_proof_verification: bool, + _phantom: std::marker::PhantomData, } -impl ProofGenerator { - pub fn new(method_elf: &'static [u8], method_id: [u32; 8]) -> Self { +impl ProofGenerator +where + T: serde::Serialize + Clone + Send + 'static, +{ + pub fn new( + method_elf: &'static [u8], + method_id: [u32; 8], + skip_proof_verification: bool, + ) -> Self { Self { method_elf, method_id, + skip_proof_verification, + _phantom: std::marker::PhantomData, } } /// Generate a standard Stark proof for intermediate batches - pub async fn generate_stark_proof( - &self, - input: &CombinedInput, - ) -> Result { + pub async fn generate_stark_proof(&self, input: T) -> Result { info!("Generating STARK proof for intermediate batch"); - debug!("Input size: {} bytes", std::mem::size_of_val(input)); - - let method_elf = self.method_elf; - let method_id = self.method_id; - let input = input.clone(); - - let proof = task::spawn_blocking(move || -> eyre::Result { - debug!("Building executor environment"); - let env = ExecutorEnv::builder() - .write(&input) - .map_err(|e| { - warn!("Failed to write input to executor env: {}", e); - ProofGeneratorError::ExecutorEnvError(e.to_string()) - })? - .build() - .map_err(|e| { - warn!("Failed to build executor env: {}", e); - ProofGeneratorError::ExecutorEnvError(e.to_string()) - })?; - - debug!("Generating STARK proof with default prover"); - let receipt = default_prover() - .prove(env, method_elf) - .map_err(|e| { - warn!("Failed to generate STARK proof: {}", e); - ProofGeneratorError::ReceiptError(e.to_string()) - })? - .receipt; - - debug!("Computing image ID"); - let image_id = compute_image_id(method_elf) - .map_err(|e| ProofGeneratorError::ImageIdError(e.to_string()))?; - - info!("Successfully generated STARK proof"); - Ok(ProofType::Stark { - receipt, - image_id: image_id.as_bytes().to_vec(), - method_id, - }) + debug!("Input size: {} bytes", std::mem::size_of_val(&input)); + + let proof = task::spawn_blocking({ + let method_elf = self.method_elf; + let method_id = self.method_id; + let input = input.clone(); + + move || -> eyre::Result { + debug!("Building executor environment"); + let env = ExecutorEnv::builder() + .write(&input) + .map_err(|e| { + warn!("Failed to write input to executor env: {}", e); + ProofGeneratorError::ExecutorEnvError(e.to_string()) + })? + .build() + .map_err(|e| { + warn!("Failed to build executor env: {}", e); + ProofGeneratorError::ExecutorEnvError(e.to_string()) + })?; + + debug!("Generating STARK proof with default prover"); + let receipt = default_prover() + .prove(env, method_elf) + .map_err(|e| { + warn!("Failed to generate STARK proof: {}", e); + ProofGeneratorError::ReceiptError(e.to_string()) + })? + .receipt; + + debug!("Computing image ID"); + let image_id = compute_image_id(method_elf) + .map_err(|e| ProofGeneratorError::ImageIdError(e.to_string()))?; + + info!("Successfully generated STARK proof"); + Ok(ProofType::Stark { + receipt, + image_id: image_id.as_bytes().to_vec(), + method_id, + }) + } }) .await? .map_err(|e| ProofGeneratorError::SpawnBlocking(e.to_string()))?; @@ -99,15 +109,13 @@ impl ProofGenerator { } /// Generate a Groth16 proof for the final batch - pub async fn generate_groth16_proof( - &self, - input: &CombinedInput, - ) -> Result { + pub async fn generate_groth16_proof(&self, input: T) -> Result { info!("Generating Groth16 proof for final batch"); - debug!("Input size: {} bytes", std::mem::size_of_val(input)); + debug!("Input size: {} bytes", std::mem::size_of_val(&input)); let method_elf = self.method_elf; let input = input.clone(); + let skip_proof_verification = self.skip_proof_verification; let proof = task::spawn_blocking(move || -> eyre::Result { debug!("Building executor environment"); @@ -151,15 +159,23 @@ impl ProofGenerator { debug!("Journal size: {} bytes", journal.len()); debug!("Converting to Groth16 proof"); - let groth16_proof = - Groth16Proof::from_risc0(encoded_seal, image_id.as_bytes().to_vec(), journal); + let groth16_proof = if !skip_proof_verification { + Groth16Proof::from_risc0(encoded_seal, image_id.as_bytes().to_vec(), journal) + } else { + Default::default() + }; debug!("Generating calldata"); - let calldata = get_groth16_calldata(&groth16_proof, &get_risc0_vk(), CurveID::BN254) - .map_err(|e| { - warn!("Failed to generate calldata: {}", e); - ProofGeneratorError::CalldataError(e.to_string()) - })?; + let calldata = if !skip_proof_verification { + get_groth16_calldata(&groth16_proof, &get_risc0_vk(), CurveID::BN254).map_err( + |e| { + warn!("Failed to generate calldata: {}", e); + ProofGeneratorError::CalldataError(e.to_string()) + }, + )? + } else { + vec![Felt::ZERO] + }; info!("Successfully generated Groth16 proof"); Ok(ProofType::Groth16 { receipt, calldata }) @@ -170,10 +186,10 @@ impl ProofGenerator { Ok(proof) } - pub fn decode_journal Deserialize<'a>>( + pub fn decode_journal Deserialize<'a>>( &self, proof: &ProofType, - ) -> Result { + ) -> Result { let receipt = match proof { ProofType::Groth16 { receipt, .. } | ProofType::Stark { receipt, .. } => receipt, }; diff --git a/crates/host/src/types.rs b/crates/publisher/src/types.rs similarity index 100% rename from crates/host/src/types.rs rename to crates/publisher/src/types.rs diff --git a/crates/publisher/src/validator.rs b/crates/publisher/src/validator.rs new file mode 100644 index 0000000..dac91d2 --- /dev/null +++ b/crates/publisher/src/validator.rs @@ -0,0 +1,89 @@ +use crate::proof_generator::{ProofGenerator, ProofGeneratorError}; +use common::get_db_path; +use guest_types::{BlocksValidityInput, MMRInput}; +use methods::{BLOCKS_VALIDITY_ELF, BLOCKS_VALIDITY_ID}; +use mmr::{MMRError, PeaksOptions, MMR}; +use mmr_utils::{initialize_mmr, StoreManager}; +use store::SqlitePool; + +#[derive(thiserror::Error, Debug)] +pub enum ValidatorError { + #[error("Utils error: {0}")] + Utils(#[from] common::UtilsError), + #[error("MMR error: {0}")] + MMRUtils(#[from] mmr_utils::MMRUtilsError), + #[error("Store error: {0}")] + Sqlx(#[from] sqlx::Error), + #[error("Store error: {0}")] + Store(#[from] store::StoreError), + #[error("MMR error: {0}")] + MMRError(#[from] MMRError), + #[error("ProofGenerator error: {0}")] + ProofGenerator(#[from] ProofGeneratorError), +} + +pub struct ValidatorBuilder { + store: StoreManager, + mmr: MMR, + pool: SqlitePool, + proof_generator: ProofGenerator, +} + +impl ValidatorBuilder { + pub async fn new(skip_proof: bool) -> Result { + let proof_generator = + ProofGenerator::new(BLOCKS_VALIDITY_ELF, BLOCKS_VALIDITY_ID, skip_proof); + + let store_path = get_db_path()?; + let (store, mmr, pool) = initialize_mmr(&store_path).await?; + + Ok(Self { + store, + mmr, + pool, + proof_generator, + }) + } + + pub async fn verify_blocks_validity_and_inclusion( + &self, + headers: &Vec, + ) -> Result { + let mut block_indexes = Vec::new(); + + for header in headers.iter() { + let index = self + .store + .get_element_index_for_value(&self.pool, &header.block_hash) + .await? + .ok_or(ValidatorError::Store(store::StoreError::GetError))?; + block_indexes.push(index); + } + + // Get and verify current MMR state + let current_peaks = self.mmr.get_peaks(PeaksOptions::default()).await?; + let current_elements_count = self.mmr.elements_count.get().await?; + let current_leaves_count = self.mmr.leaves_count.get().await?; + + // Prepare guest input + let mmr_input = MMRInput::new( + current_peaks.clone(), + current_elements_count, + current_leaves_count, + None, + None, + ); + + let blocks_validity_input = + BlocksValidityInput::new(headers.clone(), mmr_input, block_indexes); + + let proof = self + .proof_generator + .generate_groth16_proof(blocks_validity_input) + .await?; + + let guest_output: bool = self.proof_generator.decode_journal(&proof)?; + + Ok(guest_output) + } +} diff --git a/crates/publisher/tests/0.db b/crates/publisher/tests/0.db new file mode 100644 index 0000000..64346c9 Binary files /dev/null and b/crates/publisher/tests/0.db differ diff --git a/crates/publisher/tests/mmr_proofs_test.rs b/crates/publisher/tests/mmr_proofs_test.rs new file mode 100644 index 0000000..862f7e2 --- /dev/null +++ b/crates/publisher/tests/mmr_proofs_test.rs @@ -0,0 +1,35 @@ +// use mmr::MMR; +use mmr_utils::initialize_mmr; +use std::fs; +use std::path::PathBuf; + +#[tokio::test] +async fn test_mmr_proofs() { + // Get path to the test file's directory + let test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests"); + fs::create_dir_all(&test_dir).expect("Failed to create test directory"); + + let binding = test_dir.join("0.db"); + let store_path = binding.to_str().unwrap(); + let (store_manager, mmr, pool) = initialize_mmr(store_path).await.unwrap(); + + let indexs = vec![1, 2, 4, 5, 8, 9, 11, 12, 16, 17, 19, 20, 23, 24, 26, 27]; + + let mut hashes = vec![]; + for index in indexs.iter() { + let hash = store_manager + .get_value_for_element_index(&pool, *index) + .await + .unwrap(); + hashes.push(hash.unwrap()); + } + + for (i, index) in indexs.iter().enumerate() { + let proof = mmr.get_proof(*index, None).await.unwrap(); + let result = mmr + .verify_proof(proof, hashes[i].clone(), None) + .await + .unwrap(); + assert!(result); + } +} diff --git a/crates/relayer/src/relayer.rs b/crates/relayer/src/relayer.rs index fc4f693..f65cf84 100644 --- a/crates/relayer/src/relayer.rs +++ b/crates/relayer/src/relayer.rs @@ -1,6 +1,10 @@ use alloy::{ - network::EthereumWallet, primitives::U256, providers::ProviderBuilder, - signers::local::PrivateKeySigner, sol_types::sol, + network::EthereumWallet, + primitives::U256, + providers::ProviderBuilder, + signers::local::PrivateKeySigner, + sol_types::sol, + transports::{RpcError, TransportErrorKind}, }; use common::{get_env_var, get_var, UtilsError}; // use eyre::Result; @@ -9,12 +13,12 @@ use tracing::info; #[derive(Debug, Error)] pub enum RelayerError { - #[error("Ethereum provider initialization failed: {0}")] - ProviderError(String), - #[error("Transaction failed: {0}")] - TransactionError(String), #[error("Utils error: {0}")] Utils(#[from] UtilsError), + #[error("RPC error: {0}")] + RpcError(#[from] RpcError), + #[error("Alloy contract error: {0}")] + AlloyContract(#[from] alloy_contract::Error), } sol!( @@ -53,8 +57,7 @@ impl Relayer { .with_recommended_fillers() .wallet(self.wallet.clone()) .on_builtin(&provider_url) - .await - .map_err(|e| RelayerError::ProviderError(e.to_string()))?; + .await?; info!("Connected to Ethereum provider at {}", provider_url); // Load the contract address and initialize the contract @@ -76,10 +79,7 @@ impl Relayer { self.l2_recipient_addr ); - let pending_tx = call_builder - .send() - .await - .map_err(|e| RelayerError::TransactionError(e.to_string()))?; + let pending_tx = call_builder.send().await?; info!( "Transaction sent successfully. Tx hash: {:?}", pending_tx.tx_hash() diff --git a/crates/starknet-handler/Cargo.toml b/crates/starknet-handler/Cargo.toml index c830659..9c78a9c 100644 --- a/crates/starknet-handler/Cargo.toml +++ b/crates/starknet-handler/Cargo.toml @@ -7,6 +7,8 @@ edition = "2021" common = { path = "../common" } thiserror = { workspace = true } -tracing = { workspace = true } starknet = { workspace = true } -starknet-crypto = { workspace = true } \ No newline at end of file +starknet-crypto = { workspace = true } + +crypto-bigint = "0.5.5" +url = "2.5.4" diff --git a/crates/starknet-handler/src/account.rs b/crates/starknet-handler/src/account.rs index fe51f8e..ddb8d60 100644 --- a/crates/starknet-handler/src/account.rs +++ b/crates/starknet-handler/src/account.rs @@ -43,28 +43,28 @@ impl StarknetAccount { self.account.clone() } - pub async fn update_mmr_state( + pub async fn verify_mmr_proof( &self, - store_address: Felt, - latest_mmr_block: u64, + verifier_address: &str, new_mmr_state: &MmrState, - ) -> Result { - let selector = selector!("update_mmr_state"); + proof: Vec, + ) -> Result<(Felt, MmrState), StarknetHandlerError> { + let selector = selector!("verify_mmr_proof"); let mut calldata = vec![]; - calldata.push(Felt::from(latest_mmr_block)); new_mmr_state.encode(&mut calldata)?; + calldata.extend(proof.iter().cloned()); let tx = self .account .execute_v1(vec![starknet::core::types::Call { selector, calldata, - to: store_address, + to: felt(verifier_address)?, }]) .send() .await?; - Ok(tx.transaction_hash) + Ok((tx.transaction_hash, new_mmr_state.clone())) } } diff --git a/crates/starknet-handler/src/lib.rs b/crates/starknet-handler/src/lib.rs index ebb7dd0..91e7d6b 100644 --- a/crates/starknet-handler/src/lib.rs +++ b/crates/starknet-handler/src/lib.rs @@ -1,20 +1,19 @@ #![deny(unused_crate_dependencies)] +use crypto_bigint::U256 as CryptoBigIntU256; pub mod account; pub mod provider; use starknet::accounts::single_owner::SignError; use starknet::accounts::AccountError; -use starknet::core::{ - codec::{Decode, Encode}, - types::Felt, -}; +use starknet::core::codec::{Decode, Encode}; +use starknet::core::types::U256; use starknet::signers::local_wallet::SignError as LocalWalletSignError; use thiserror::Error; #[derive(Error, Debug)] pub enum StarknetHandlerError { #[error("Failed to parse: {0}")] - ParseError(String), + ParseError(#[from] url::ParseError), #[error("Failed to create selector: {0}")] SelectorError(String), #[error("Failed to execute transaction: {0}")] @@ -27,31 +26,87 @@ pub enum StarknetHandlerError { Utils(#[from] common::UtilsError), #[error("Encode error: {0}")] Encode(#[from] starknet::core::codec::Error), + #[error("Error parsing int: {0}")] + ParseIntError(#[from] std::num::ParseIntError), + #[error("Provider error: {0}")] + Provider(#[from] starknet::providers::ProviderError), } #[derive(Clone, Debug, Encode, Decode)] pub struct MmrState { - root_hash: Felt, + latest_block_number: u64, + root_hash: U256, elements_count: u64, leaves_count: u64, - peaks: Vec, } impl MmrState { - pub fn new(root_hash: Felt, elements_count: u64, leaves_count: u64, peaks: Vec) -> Self { + pub fn new( + latest_block_number: u64, + root_hash: U256, + elements_count: u64, + leaves_count: u64, + ) -> Self { Self { + latest_block_number, root_hash, elements_count, leaves_count, - peaks, } } - pub fn root_hash(&self) -> Felt { + pub fn latest_block_number(&self) -> u64 { + self.latest_block_number + } + + pub fn root_hash(&self) -> U256 { self.root_hash } + pub fn elements_count(&self) -> u64 { + self.elements_count + } + pub fn leaves_count(&self) -> u64 { self.leaves_count } } + +pub fn u256_from_hex(hex: &str) -> Result { + let crypto_bigint = CryptoBigIntU256::from_be_hex(hex); + + Ok(U256::from(crypto_bigint)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_u256_from_hex() { + // Test valid hex string + let result = + u256_from_hex("ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80") + .unwrap(); + assert_eq!( + result.to_string(), + "77814517325470205911140941194401928579557062014761831930645393041380819009408" + ); + + // Test max value + let result = + u256_from_hex("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff") + .unwrap(); + assert_eq!( + result.to_string(), + "115792089237316195423570985008687907853269984665640564039457584007913129639935" + ); + } + + #[test] + #[should_panic] + fn test_u256_from_hex_invalid_input() { + // Test invalid hex string (contains non-hex characters) + u256_from_hex("0xghijkl").unwrap(); + } +} diff --git a/crates/starknet-handler/src/provider.rs b/crates/starknet-handler/src/provider.rs index aec80f5..81a6f97 100644 --- a/crates/starknet-handler/src/provider.rs +++ b/crates/starknet-handler/src/provider.rs @@ -1,7 +1,5 @@ -use std::sync::Arc; - -use common::felt; use starknet::providers::Provider; +use std::sync::Arc; use crate::{MmrState, StarknetHandlerError}; use starknet::macros::selector; @@ -21,8 +19,7 @@ pub struct StarknetProvider { impl StarknetProvider { pub fn new(rpc_url: &str) -> Result { - let parsed_url = Url::parse(rpc_url) - .map_err(|_| StarknetHandlerError::ParseError(rpc_url.to_string()))?; + let parsed_url = Url::parse(rpc_url)?; Ok(Self { provider: Arc::new(JsonRpcClient::new(HttpTransport::new(parsed_url))), rpc_url: rpc_url.to_string(), @@ -37,36 +34,36 @@ impl StarknetProvider { self.provider.clone() } - pub async fn verify_groth16_proof_onchain( - &self, - verifier_address: &str, - calldata: &[Felt], - ) -> Result, StarknetHandlerError> { - tracing::info!("Verifying Groth16 proof onchain..."); - let contract_address = felt(verifier_address)?; - - let entry_point_selector = selector!("verify_groth16_proof_bn254"); - - let result = self - .provider - .call( - FunctionCall { - contract_address, - entry_point_selector, - calldata: calldata.to_vec(), - }, - BlockId::Tag(BlockTag::Latest), - ) - .await - .map_err(|e| StarknetHandlerError::TransactionError(e.to_string()))?; - - Ok(result) - } + // pub async fn verify_groth16_proof_onchain( + // &self, + // verifier_address: &str, + // calldata: &[Felt], + // ) -> Result, StarknetHandlerError> { + // tracing::info!("Verifying Groth16 proof onchain..."); + // let contract_address = felt(verifier_address)?; + + // let entry_point_selector = selector!("verify_groth16_proof_bn254"); + + // let result = self + // .provider + // .call( + // FunctionCall { + // contract_address, + // entry_point_selector, + // calldata: calldata.to_vec(), + // }, + // BlockId::Tag(BlockTag::Latest), + // ) + // .await + // .map_err(|e| StarknetHandlerError::TransactionError(e.to_string()))?; + + // Ok(result) + // } pub async fn get_latest_mmr_state( &self, l2_store_address: &Felt, - ) -> Result<(u64, MmrState), StarknetHandlerError> { + ) -> Result { let entry_point_selector = selector!("get_mmr_state"); let data = self @@ -79,16 +76,11 @@ impl StarknetProvider { }, BlockId::Tag(BlockTag::Latest), ) - .await - .map_err(|e| StarknetHandlerError::TransactionError(e.to_string()))?; - - let latest_mmr_block = - u64::from_str_radix(data[0].to_hex_string().trim_start_matches("0x"), 16) - .map_err(|_| StarknetHandlerError::ParseError(data[0].to_hex_string()))?; + .await?; - let mmr_state = MmrState::decode(&data[1..])?; + let mmr_state = MmrState::decode(&data)?; - Ok((latest_mmr_block, mmr_state)) + Ok(mmr_state) } pub async fn get_latest_relayed_block( @@ -107,12 +99,10 @@ impl StarknetProvider { }, BlockId::Tag(BlockTag::Latest), ) - .await - .map_err(|e| StarknetHandlerError::TransactionError(e.to_string()))?; + .await?; let block_number = - u64::from_str_radix(data[0].to_hex_string().trim_start_matches("0x"), 16) - .map_err(|_| StarknetHandlerError::ParseError(data[0].to_hex_string()))?; + u64::from_str_radix(data[0].to_hex_string().trim_start_matches("0x"), 16)?; Ok(block_number) } diff --git a/db-store/0.db b/db-store/0.db deleted file mode 100644 index e69de29..0000000 diff --git a/scripts/deploy.sh b/scripts/deploy.sh index a24810f..ccad1c9 100755 --- a/scripts/deploy.sh +++ b/scripts/deploy.sh @@ -3,13 +3,13 @@ # Ensure the script stops on the first error set -e -source ../.env +source .env -ETHEREUM_DIR="../contracts/ethereum" +ETHEREUM_DIR="contracts/ethereum" cd $ETHEREUM_DIR && forge script script/LocalTesting.s.sol:LocalSetup --broadcast --rpc-url $ANVIL_URL -L1_MESSAGE_SENDER=0xF94AB55a20B32AC37c3A105f12dB535986697945 +L1_MESSAGE_SENDER=0x364C7188028348566E38D762f6095741c49f492B # Function to wait for Katana to be ready # wait_for_katana() { @@ -37,7 +37,7 @@ scarb build # Declare and deploy Fossil Store contract echo "Declaring Fossil Store contract..." -FOSSILSTORE_HASH=$(starkli declare ./target/dev/store_Store.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) +FOSSILSTORE_HASH=$(starkli declare ./target/dev/fossil_store_Store.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) echo "Class hash declared: $FOSSILSTORE_HASH" echo "Deploying Fossil Store contract..." @@ -67,24 +67,32 @@ echo "Deploying Groth16 Verifier contract..." VERIFIER_ADDRESS=$(starkli deploy $VERIFIER_HASH $ECIP_HASH --salt 1 | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) echo "Contract deployed at: $VERIFIER_ADDRESS" +echo "Declaring Fossil Verifier contract..." +FOSSIL_VERIFIER_HASH=$(starkli declare ./target/dev/verifier_FossilVerifier.contract_class.json --compiler-version 2.8.2 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) +echo "Class hash declared: $FOSSIL_VERIFIER_HASH" + +echo "Deploying Fossil Verifier contract..." +FOSSIL_VERIFIER_ADDRESS=$(starkli deploy $FOSSIL_VERIFIER_HASH $VERIFIER_ADDRESS $FOSSILSTORE_ADDRESS --salt 1 -w | grep -o '0x[a-fA-F0-9]\{64\}' | head -1) +echo "Contract deployed at: $FOSSIL_VERIFIER_ADDRESS" + echo "All contracts deployed!" -# Fetch the current Ethereum block number using `cast` -ETH_BLOCK=$(cast block-number) -echo "Current Ethereum block number: $ETH_BLOCK" +# # Fetch the current Ethereum block number using `cast` +# ETH_BLOCK=$(cast block-number) +# echo "Current Ethereum block number: $ETH_BLOCK" -# Ensure `ETH_BLOCK` is a valid number before performing arithmetic -if [[ $ETH_BLOCK =~ ^[0-9]+$ ]]; then - # Subtract 256 from the current block number - ETH_BLOCK=$((ETH_BLOCK - 256)) - echo "Updated Ethereum block number: $ETH_BLOCK" +# # Ensure `ETH_BLOCK` is a valid number before performing arithmetic +# if [[ $ETH_BLOCK =~ ^[0-9]+$ ]]; then +# # Subtract 256 from the current block number +# ETH_BLOCK=$((ETH_BLOCK - 256)) +# echo "Updated Ethereum block number: $ETH_BLOCK" - # Run the Starkli command with the updated block number - starkli invoke $FOSSILSTORE_ADDRESS update_mmr_state $ETH_BLOCK 0x0 0x0 0x0 0x0 - echo "Updated MMR state on Starknet for testing with block number: $ETH_BLOCK" -else - echo "Failed to retrieve a valid block number from 'cast'." -fi +# # Run the Starkli command with the updated block number +# starkli invoke $FOSSILSTORE_ADDRESS update_mmr_state $ETH_BLOCK 0x0 0x0 0x0 0x0 +# echo "Updated MMR state on Starknet for testing with block number: $ETH_BLOCK" +# else +# echo "Failed to retrieve a valid block number from 'cast'." +# fi # Path to the .env file ENV_FILE="../../.env" @@ -106,7 +114,7 @@ update_env_var() { update_env_var "L2_MSG_PROXY" "$L1MESSAGEPROXY_ADDRESS" update_env_var "FOSSIL_STORE" "$FOSSILSTORE_ADDRESS" update_env_var "STARKNET_VERIFIER" "$VERIFIER_ADDRESS" - +update_env_var "FOSSIL_VERIFIER" "$FOSSIL_VERIFIER_ADDRESS" pwd source ../../.env diff --git a/scripts/lint.sh b/scripts/lint.sh index 3776477..755bd8a 100755 --- a/scripts/lint.sh +++ b/scripts/lint.sh @@ -7,6 +7,7 @@ cargo clippy \ -p mmr-utils \ -p relayer \ -p starknet-handler \ + -p guest-mmr \ -- \ -W clippy::single_match \ -W clippy::single_match_else \ diff --git a/scripts/run_relayer.sh b/scripts/run_relayer.sh index 8d795f4..4a0a02d 100755 --- a/scripts/run_relayer.sh +++ b/scripts/run_relayer.sh @@ -2,10 +2,10 @@ set -e -cd ../crates/relayer +cd crates/relayer while true; do - cargo run + cargo run --release echo "Waiting 180 seconds before next run..." sleep 180 done