diff --git a/.gitignore b/.gitignore index efd7916b050..5ba61ed9b8f 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,5 @@ genesis.ssz # VSCode /.vscode + +.DS_Store diff --git a/Cargo.lock b/Cargo.lock index 481fe71df06..319ffb14049 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -37,7 +37,7 @@ dependencies = [ "eth2_keystore", "eth2_wallet", "filesystem", - "rand 0.9.0", + "rand 0.9.2", "regex", "rpassword", "serde", @@ -48,20 +48,31 @@ dependencies = [ "zeroize", ] +[[package]] +name = "addchain" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2e69442aa5628ea6951fa33e24efe8313f4321a91bd729fc2f75bdfc858570" +dependencies = [ + "num-bigint 0.3.3", + "num-integer", + "num-traits", +] + [[package]] name = "addr2line" -version = "0.24.2" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" dependencies = [ "gimli", ] [[package]] name = "adler2" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aead" @@ -113,21 +124,21 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" dependencies = [ "cfg-if", "once_cell", "version_check", - "zerocopy 0.7.35", + "zerocopy", ] [[package]] name = "aho-corasick" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" dependencies = [ "memchr", ] @@ -150,7 +161,7 @@ dependencies = [ "alloy-serde", "alloy-trie", "alloy-tx-macros", - "auto_impl 1.2.1", + "auto_impl 1.3.0", "c-kzg", "derive_more 2.0.1", "either", @@ -160,7 +171,7 @@ dependencies = [ "secp256k1", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -190,7 +201,7 @@ dependencies = [ "itoa", "serde", "serde_json", - "winnow", + "winnow 0.7.13", ] [[package]] @@ -203,14 +214,14 @@ dependencies = [ "alloy-rlp", "crc", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] name = "alloy-eip2930" -version = "0.2.0" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbe3e16484669964c26ac48390245d84c410b1a5f968976076c17184725ef235" +checksum = "9441120fa82df73e8959ae0e4ab8ade03de2aaae61be313fbf5746277847ce25" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -219,21 +230,21 @@ dependencies = [ [[package]] name = "alloy-eip7702" -version = "0.6.1" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d4769c6ffddca380b0070d71c8b7f30bed375543fe76bb2f74ec0acf4b7cd16" +checksum = "2919c5a56a1007492da313e7a3b6d45ef5edc5d33416fdec63c0d7a2702a0d20" dependencies = [ "alloy-primitives", "alloy-rlp", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] name = "alloy-eips" -version = "1.0.42" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07d9a64522a0db6ebcc4ff9c904e329e77dd737c2c25d30f1bdc32ca6c6ce334" +checksum = "7e867b5fd52ed0372a95016f3a37cbff95a9d5409230fbaef2d8ea00e8618098" dependencies = [ "alloy-eip2124", "alloy-eip2930", @@ -241,14 +252,14 @@ dependencies = [ "alloy-primitives", "alloy-rlp", "alloy-serde", - "auto_impl 1.2.1", + "auto_impl 1.3.0", "c-kzg", "derive_more 2.0.1", "either", "serde", "serde_with", - "sha2 0.10.8", - "thiserror 2.0.12", + "sha2 0.10.9", + "thiserror 2.0.17", ] [[package]] @@ -289,16 +300,16 @@ dependencies = [ "const-hex", "derive_more 2.0.1", "foldhash 0.2.0", - "getrandom 0.3.1", + "getrandom 0.3.4", "hashbrown 0.16.0", - "indexmap 2.8.0", + "indexmap 2.12.0", "itoa", "k256 0.13.4", "keccak-asm", "paste", "proptest", "proptest-derive", - "rand 0.9.0", + "rand 0.9.2", "ruint", "rustc-hash 2.1.1", "serde", @@ -325,7 +336,7 @@ checksum = "64b728d511962dda67c1bc7ea7c03736ec275ed2cf4c35d9585298ac9ccf3b73" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -345,14 +356,14 @@ dependencies = [ "itertools 0.14.0", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] name = "alloy-serde" -version = "1.0.42" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "596cfa360922ba9af901cc7370c68640e4f72adb6df0ab064de32f21fec498d7" +checksum = "01e856112bfa0d9adc85bd7c13db03fad0e71d1d6fb4c2010e475b6718108236" dependencies = [ "alloy-primitives", "serde", @@ -370,7 +381,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -382,11 +393,11 @@ dependencies = [ "alloy-sol-macro-input", "const-hex", "heck 0.5.0", - "indexmap 2.8.0", + "indexmap 2.12.0", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", "syn-solidity", "tiny-keccak", ] @@ -403,7 +414,7 @@ dependencies = [ "macro-string", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", "syn-solidity", ] @@ -414,7 +425,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "954d1b2533b9b2c7959652df3076954ecb1122a28cc740aa84e7b0a49f6ac0a9" dependencies = [ "serde", - "winnow", + "winnow 0.7.13", ] [[package]] @@ -447,22 +458,16 @@ dependencies = [ [[package]] name = "alloy-tx-macros" -version = "1.0.42" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab54221eccefa254ce9f65b079c097b1796e48c21c7ce358230f8988d75392fb" +checksum = "7ccf423f6de62e8ce1d6c7a11fb7508ae3536d02e0d68aaeb05c8669337d0937" dependencies = [ "darling 0.21.3", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - [[package]] name = "android_system_properties" version = "0.1.5" @@ -478,11 +483,20 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi", +] + [[package]] name = "anstream" -version = "0.6.18" +version = "0.6.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" dependencies = [ "anstyle", "anstyle-parse", @@ -495,50 +509,50 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.10" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" [[package]] name = "anstyle-parse" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.2" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] name = "anstyle-wincon" -version = "3.0.7" +version = "3.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", - "once_cell", - "windows-sys 0.59.0", + "once_cell_polyfill", + "windows-sys 0.61.2", ] [[package]] name = "anyhow" -version = "1.0.97" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] name = "arbitrary" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" +checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" dependencies = [ "derive_arbitrary", ] @@ -569,7 +583,7 @@ dependencies = [ "ark-serialize 0.3.0", "ark-std 0.3.0", "derivative", - "num-bigint", + "num-bigint 0.4.6", "num-traits", "paste", "rustc_version 0.3.3", @@ -589,7 +603,7 @@ dependencies = [ "derivative", "digest 0.10.7", "itertools 0.10.5", - "num-bigint", + "num-bigint 0.4.6", "num-traits", "paste", "rustc_version 0.4.1", @@ -610,7 +624,7 @@ dependencies = [ "digest 0.10.7", "educe", "itertools 0.13.0", - "num-bigint", + "num-bigint 0.4.6", "num-traits", "paste", "zeroize", @@ -643,7 +657,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" dependencies = [ "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -652,7 +666,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" dependencies = [ - "num-bigint", + "num-bigint 0.4.6", "num-traits", "quote", "syn 1.0.109", @@ -664,7 +678,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" dependencies = [ - "num-bigint", + "num-bigint 0.4.6", "num-traits", "proc-macro2", "quote", @@ -677,11 +691,11 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09be120733ee33f7693ceaa202ca41accd5653b779563608f1234f78ae07c4b3" dependencies = [ - "num-bigint", + "num-bigint 0.4.6", "num-traits", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -702,7 +716,7 @@ checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" dependencies = [ "ark-std 0.4.0", "digest 0.10.7", - "num-bigint", + "num-bigint 0.4.6", ] [[package]] @@ -714,7 +728,7 @@ dependencies = [ "ark-std 0.5.0", "arrayvec", "digest 0.10.7", - "num-bigint", + "num-bigint 0.4.6", ] [[package]] @@ -780,7 +794,7 @@ dependencies = [ "nom", "num-traits", "rusticata-macros", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", ] @@ -792,7 +806,7 @@ checksum = "3109e49b1e4909e9db6515a30c633684d68cdeaa252f215214cb4fa1a5bfee2c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", "synstructure", ] @@ -804,7 +818,7 @@ checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -836,9 +850,9 @@ dependencies = [ [[package]] name = "async-channel" -version = "2.3.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" dependencies = [ "concurrent-queue", "event-listener-strategy", @@ -848,32 +862,20 @@ dependencies = [ [[package]] name = "async-io" -version = "2.4.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059" +checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" dependencies = [ - "async-lock", + "autocfg", "cfg-if", "concurrent-queue", "futures-io", "futures-lite", "parking", "polling", - "rustix 0.38.44", + "rustix 1.1.2", "slab", - "tracing", - "windows-sys 0.59.0", -] - -[[package]] -name = "async-lock" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" -dependencies = [ - "event-listener 5.4.0", - "event-listener-strategy", - "pin-project-lite", + "windows-sys 0.61.2", ] [[package]] @@ -895,18 +897,18 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "async-trait" -version = "0.1.87" +version = "0.1.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d556ec1359574147ec0c4fc5eb525f3f23263a592b1a9c07e0a75b427de55c97" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -946,7 +948,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e2cdb6d5ed835199484bb92bb8b3edd526effe995c61732580439c1a67e2e9" dependencies = [ "base64 0.22.1", - "http 1.3.0", + "http 1.3.1", "log", "url", ] @@ -965,20 +967,20 @@ dependencies = [ [[package]] name = "auto_impl" -version = "1.2.1" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e12882f59de5360c748c4cbf569a042d5fb0eb515f7bea9c1f470b47f6ffbd73" +checksum = "ffdcb70bdbc4d478427380519163274ac86e52916e10f0a8889adf0f96d3fee7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "autocfg" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "axum" @@ -990,9 +992,11 @@ dependencies = [ "axum-core", "bytes", "futures-util", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", "http-body-util", + "hyper 1.8.1", + "hyper-util", "itoa", "matchit", "memchr", @@ -1001,10 +1005,15 @@ dependencies = [ "pin-project-lite", "rustversion", "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", "sync_wrapper 1.0.2", + "tokio", "tower 0.5.2", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -1016,7 +1025,7 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", "http-body-util", "mime", @@ -1025,13 +1034,14 @@ dependencies = [ "sync_wrapper 1.0.2", "tower-layer", "tower-service", + "tracing", ] [[package]] name = "backtrace" -version = "0.3.74" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" dependencies = [ "addr2line", "cfg-if", @@ -1039,7 +1049,8 @@ dependencies = [ "miniz_oxide", "object", "rustc-demangle", - "windows-targets 0.52.6", + "serde", + "windows-link", ] [[package]] @@ -1060,6 +1071,16 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" +[[package]] +name = "base256emoji" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e9430d9a245a77c92176e649af6e275f20839a48389859d1661e9a128d077c" +dependencies = [ + "const-str", + "match-lookup", +] + [[package]] name = "base58" version = "0.1.0" @@ -1102,9 +1123,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" -version = "1.7.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb97d56060ee67d285efb8001fec9d2a4c710c32efd2e14b5cbb5ba71930fc2d" +checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" [[package]] name = "beacon_chain" @@ -1135,15 +1156,15 @@ dependencies = [ "lru", "maplit", "merkle_proof", - "metrics", - "mockall", + "metrics 0.2.0", + "mockall 0.13.1", "mockall_double", "once_cell", "oneshot_broadcast", "operation_pool", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "proto_array", - "rand 0.9.0", + "rand 0.9.2", "rayon", "safe_arith", "sensitive_url", @@ -1155,7 +1176,7 @@ dependencies = [ "ssz_types", "state_processing", "store", - "strum", + "strum 0.24.1", "superstruct", "task_executor", "tempfile", @@ -1165,6 +1186,7 @@ dependencies = [ "tree_hash", "tree_hash_derive", "types", + "zkvm_execution_layer", "zstd 0.13.3", ] @@ -1178,14 +1200,15 @@ dependencies = [ "clap_utils", "client", "directory", - "dirs", + "dirs 3.0.2", + "dummy_el", "environment", "eth2_config", "execution_layer", "genesis", "hex", "http_api", - "hyper 1.6.0", + "hyper 1.8.1", "lighthouse_network", "monitoring_api", "network_utils", @@ -1194,10 +1217,11 @@ dependencies = [ "serde_json", "slasher", "store", - "strum", + "strum 0.24.1", "task_executor", "tracing", "types", + "zkvm_execution_layer", ] [[package]] @@ -1211,7 +1235,7 @@ dependencies = [ "sensitive_url", "serde", "slot_clock", - "strum", + "strum 0.24.1", "task_executor", "tokio", "tracing", @@ -1229,12 +1253,12 @@ dependencies = [ "itertools 0.10.5", "lighthouse_network", "logging", - "metrics", + "metrics 0.2.0", "num_cpus", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "serde", "slot_clock", - "strum", + "strum 0.24.1", "task_executor", "tokio", "tokio-util", @@ -1263,7 +1287,7 @@ version = "0.69.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "cexpr", "clang-sys", "itertools 0.12.1", @@ -1276,7 +1300,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.100", + "syn 2.0.110", "which", ] @@ -1295,6 +1319,30 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" +[[package]] +name = "bitcode" +version = "0.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "648bd963d2e5d465377acecfb4b827f9f553b6bc97a8f61715779e9ed9e52b74" +dependencies = [ + "arrayvec", + "bitcode_derive", + "bytemuck", + "glam", + "serde", +] + +[[package]] +name = "bitcode_derive" +version = "0.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffebfc2d28a12b262c303cb3860ee77b91bd83b1f20f0bd2a9693008e2f55a9e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] + [[package]] name = "bitcoin-io" version = "0.1.3" @@ -1319,9 +1367,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" [[package]] name = "bitvec" @@ -1354,6 +1402,30 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "blake2b_simd" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06e903a20b159e944f91ec8499fe1e55651480c541ea0a584f5d967c49ad9d99" +dependencies = [ + "arrayref", + "arrayvec", + "constant_time_eq 0.3.1", +] + +[[package]] +name = "blake3" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3888aaa89e4b2a40fca9848e400f6a658a5a3978de7be858e209cafa8be9a4a0" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq 0.3.1", +] + [[package]] name = "block-buffer" version = "0.7.3" @@ -1393,6 +1465,15 @@ dependencies = [ "byte-tools", ] +[[package]] +name = "block2" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5" +dependencies = [ + "objc2", +] + [[package]] name = "bls" version = "0.2.0" @@ -1405,13 +1486,26 @@ dependencies = [ "ethereum_ssz", "fixed_bytes", "hex", - "rand 0.9.0", + "rand 0.9.2", "safe_arith", "serde", "tree_hash", "zeroize", ] +[[package]] +name = "bls12_381" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3c196a77437e7cc2fb515ce413a6401291578b5afc8ecb29a3c7ab957f05941" +dependencies = [ + "ff 0.12.1", + "group 0.12.1", + "pairing 0.22.0", + "rand_core 0.6.4", + "subtle", +] + [[package]] name = "blst" version = "0.3.16" @@ -1434,7 +1528,7 @@ dependencies = [ "byte-slice-cast", "ff 0.13.1", "group 0.13.0", - "pairing", + "pairing 0.23.0", "rand_core 0.6.4", "serde", "subtle", @@ -1494,9 +1588,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.17.0" +version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "byte-slice-cast" @@ -1510,6 +1604,26 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" +[[package]] +name = "bytemuck" +version = "1.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4" +dependencies = [ + "bytemuck_derive", +] + +[[package]] +name = "bytemuck_derive" +version = "1.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] + [[package]] name = "byteorder" version = "1.5.0" @@ -1518,9 +1632,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" dependencies = [ "serde", ] @@ -1562,11 +1676,11 @@ dependencies = [ [[package]] name = "camino" -version = "1.1.9" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" +checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609" dependencies = [ - "serde", + "serde_core", ] [[package]] @@ -1586,10 +1700,10 @@ checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" dependencies = [ "camino", "cargo-platform", - "semver 1.0.26", + "semver 1.0.27", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -1600,10 +1714,11 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.2.16" +version = "1.2.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c" +checksum = "b97463e1064cb1b1c1384ad0a0b9c8abd0988e2a91f52606c80ef14aadb63e36" dependencies = [ + "find-msvc-tools", "jobserver", "libc", "shlex", @@ -1620,9 +1735,9 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "cfg_aliases" @@ -1656,14 +1771,14 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.40" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ - "android-tzdata", "iana-time-zone", "js-sys", "num-traits", + "serde", "wasm-bindgen", "windows-link", ] @@ -1728,9 +1843,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.32" +version = "4.5.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6088f3ae8c3608d19260cd7445411865a485688711b78b5be70d78cd96136f83" +checksum = "aa8120877db0e5c011242f96806ce3c94e0737ab8108532a76a3300a01db2ab8" dependencies = [ "clap_builder", "clap_derive", @@ -1738,9 +1853,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.32" +version = "4.5.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22a7ef7f676155edfb82daa97f99441f3ebf4a58d5e32f295a56259f1b6facc8" +checksum = "02576b399397b659c26064fbc92a75fede9d18ffd5f80ca1cd74ddab167016e1" dependencies = [ "anstream", "anstyle", @@ -1751,21 +1866,21 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.32" +version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" +checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "clap_lex" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" +checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" [[package]] name = "clap_utils" @@ -1773,7 +1888,7 @@ version = "0.1.0" dependencies = [ "alloy-primitives", "clap", - "dirs", + "dirs 3.0.2", "eth2_network_config", "ethereum_ssz", "hex", @@ -1790,7 +1905,8 @@ dependencies = [ "beacon_chain", "beacon_processor", "directory", - "dirs", + "dirs 3.0.2", + "dummy_el", "environment", "eth2", "eth2_config", @@ -1803,11 +1919,12 @@ dependencies = [ "kzg", "lighthouse_network", "logging", - "metrics", + "metrics 0.2.0", "monitoring_api", "network", "operation_pool", - "rand 0.9.0", + "proof_generation_service", + "rand 0.9.2", "sensitive_url", "serde", "serde_json", @@ -1824,6 +1941,7 @@ dependencies = [ "tracing", "tracing-subscriber", "types", + "zkvm_execution_layer", ] [[package]] @@ -1845,12 +1963,12 @@ dependencies = [ "bs58 0.4.0", "coins-core", "digest 0.10.7", - "getrandom 0.2.15", + "getrandom 0.2.16", "hmac 0.12.1", "k256 0.11.6", "lazy_static", "serde", - "sha2 0.10.8", + "sha2 0.10.9", "thiserror 1.0.69", ] @@ -1862,12 +1980,12 @@ checksum = "2a11892bcac83b4c6e95ab84b5b06c76d9d70ad73548dd07418269c5c7977171" dependencies = [ "bitvec 0.17.4", "coins-bip32", - "getrandom 0.2.15", + "getrandom 0.2.16", "hex", "hmac 0.12.1", "pbkdf2 0.11.0", "rand 0.8.5", - "sha2 0.10.8", + "sha2 0.10.9", "thiserror 1.0.69", ] @@ -1887,16 +2005,16 @@ dependencies = [ "ripemd", "serde", "serde_derive", - "sha2 0.10.8", + "sha2 0.10.9", "sha3", "thiserror 1.0.69", ] [[package]] name = "colorchoice" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" [[package]] name = "colored" @@ -1977,15 +2095,14 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.14.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b0485bab839b018a8f1723fc5391819fea5f8f0f32288ef8a735fd096b6160c" +checksum = "3bb320cac8a0750d7f25280aa97b09c26edfe161164238ecbbb31092b079e735" dependencies = [ "cfg-if", "cpufeatures", - "hex", "proptest", - "serde", + "serde_core", ] [[package]] @@ -1994,11 +2111,17 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" +[[package]] +name = "const-str" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f421161cb492475f1661ddc9815a745a1c894592070661180fdec3d4872e9c3" + [[package]] name = "const_format" -version = "0.2.34" +version = "0.2.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126f97965c8ad46d6d9163268ff28432e8f6a1196a55578867832e3049df63dd" +checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad" dependencies = [ "const_format_proc_macros", ] @@ -2020,6 +2143,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + [[package]] name = "context_deserialize" version = "0.1.0" @@ -2091,6 +2220,27 @@ dependencies = [ "memchr", ] +[[package]] +name = "core_affinity" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a034b3a7b624016c6e13f5df875747cc25f884156aad2abd12b6c46797971342" +dependencies = [ + "libc", + "num_cpus", + "winapi", +] + +[[package]] +name = "cpu-time" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9e393a7668fe1fad3075085b86c781883000b4ede868f43627b34a87c8b7ded" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "cpufeatures" version = "0.2.17" @@ -2102,9 +2252,9 @@ dependencies = [ [[package]] name = "crc" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" +checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" dependencies = [ "crc-catalog", ] @@ -2117,9 +2267,9 @@ checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc32fast" -version = "1.4.2" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" dependencies = [ "cfg-if", ] @@ -2166,6 +2316,19 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" +[[package]] +name = "crossbeam" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8" +dependencies = [ + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-epoch", + "crossbeam-queue", + "crossbeam-utils", +] + [[package]] name = "crossbeam-channel" version = "0.5.15" @@ -2194,6 +2357,15 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "crossbeam-queue" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.21" @@ -2202,9 +2374,9 @@ checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crunchy" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" [[package]] name = "crypto-bigint" @@ -2232,9 +2404,9 @@ dependencies = [ [[package]] name = "crypto-common" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" dependencies = [ "generic-array 0.14.7", "rand_core 0.6.4", @@ -2251,6 +2423,27 @@ dependencies = [ "subtle", ] +[[package]] +name = "csv" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52cd9d68cf7efc6ddfaaee42e7288d3a99d613d4b50f76ce9827ae0c6e14f938" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde_core", +] + +[[package]] +name = "csv-core" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "704a3c26996a80471189265814dbc2c257598b96b8a7feae2d31ace646bb9782" +dependencies = [ + "memchr", +] + [[package]] name = "ctr" version = "0.8.0" @@ -2271,12 +2464,13 @@ dependencies = [ [[package]] name = "ctrlc" -version = "3.4.5" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90eeab0aa92f3f9b4e87f258c72b139c207d251f9cbc1080a0086b86a8870dd3" +checksum = "73736a89c4aff73035ba2ed2e565061954da00d4970fc9ac25dcc85a2a20d790" dependencies = [ - "nix 0.29.0", - "windows-sys 0.59.0", + "dispatch2", + "nix 0.30.1", + "windows-sys 0.61.2", ] [[package]] @@ -2303,7 +2497,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2318,12 +2512,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ - "darling_core 0.20.10", - "darling_macro 0.20.10", + "darling_core 0.20.11", + "darling_macro 0.20.11", ] [[package]] @@ -2352,16 +2546,16 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim 0.11.1", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2376,7 +2570,7 @@ dependencies = [ "quote", "serde", "strsim 0.11.1", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2392,13 +2586,13 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ - "darling_core 0.20.10", + "darling_core 0.20.11", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2409,7 +2603,7 @@ checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" dependencies = [ "darling_core 0.21.3", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2433,44 +2627,136 @@ dependencies = [ ] [[package]] -name = "data-encoding" -version = "2.8.0" +name = "dashmap" +version = "6.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "575f75dfd25738df5b91b8e43e14d44bda14637a58fae779fd2b064f8bf3e010" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core 0.9.12", +] [[package]] -name = "data-encoding-macro" -version = "0.1.17" +name = "dashu" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f9724adfcf41f45bf652b3995837669d73c4d49a1b5ac1ff82905ac7d9b5558" +checksum = "85b3e5ac1e23ff1995ef05b912e2b012a8784506987a2651552db2c73fb3d7e0" dependencies = [ - "data-encoding", - "data-encoding-macro-internal", + "dashu-base", + "dashu-float", + "dashu-int", + "dashu-macros", + "dashu-ratio", + "rustversion", ] [[package]] -name = "data-encoding-macro-internal" -version = "0.1.15" +name = "dashu-base" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0b80bf6b85aa68c58ffea2ddb040109943049ce3fbdf4385d0380aef08ef289" + +[[package]] +name = "dashu-float" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18e4fdb82bd54a12e42fb58a800dcae6b9e13982238ce2296dc3570b92148e1f" +checksum = "85078445a8dbd2e1bd21f04a816f352db8d333643f0c9b78ca7c3d1df71063e7" dependencies = [ - "data-encoding", - "syn 2.0.100", + "dashu-base", + "dashu-int", + "num-modular", + "num-order", + "rustversion", + "static_assertions", ] [[package]] -name = "database_manager" -version = "0.1.0" +name = "dashu-int" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee99d08031ca34a4d044efbbb21dff9b8c54bb9d8c82a189187c0651ffdb9fbf" dependencies = [ - "beacon_chain", - "beacon_node", - "clap", - "clap_utils", - "environment", - "hex", - "serde", + "cfg-if", + "dashu-base", + "num-modular", + "num-order", + "rustversion", + "static_assertions", +] + +[[package]] +name = "dashu-macros" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93381c3ef6366766f6e9ed9cf09e4ef9dec69499baf04f0c60e70d653cf0ab10" +dependencies = [ + "dashu-base", + "dashu-float", + "dashu-int", + "dashu-ratio", + "paste", + "proc-macro2", + "quote", + "rustversion", +] + +[[package]] +name = "dashu-ratio" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e33b04dd7ce1ccf8a02a69d3419e354f2bbfdf4eb911a0b7465487248764c9" +dependencies = [ + "dashu-base", + "dashu-float", + "dashu-int", + "num-modular", + "num-order", + "rustversion", +] + +[[package]] +name = "data-encoding" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" + +[[package]] +name = "data-encoding-macro" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47ce6c96ea0102f01122a185683611bd5ac8d99e62bc59dd12e6bda344ee673d" +dependencies = [ + "data-encoding", + "data-encoding-macro-internal", +] + +[[package]] +name = "data-encoding-macro-internal" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d162beedaa69905488a8da94f5ac3edb4dd4788b732fadb7bd120b2625c1976" +dependencies = [ + "data-encoding", + "syn 2.0.110", +] + +[[package]] +name = "database_manager" +version = "0.1.0" +dependencies = [ + "beacon_chain", + "beacon_node", + "clap", + "clap_utils", + "environment", + "hex", + "serde", "store", - "strum", + "strum 0.24.1", "tracing", "types", ] @@ -2481,11 +2767,32 @@ version = "0.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b72465f46d518f6015d9cf07f7f3013a95dd6b9c2747c3d65ae0cce43929d14f" +[[package]] +name = "deepsize2" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b5184084af9beed35eecbf4c36baf6e26b9dc47b61b74e02f930c72a58e71b" +dependencies = [ + "deepsize_derive2", + "hashbrown 0.14.5", +] + +[[package]] +name = "deepsize_derive2" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0f8817865cacf3b93b943ca06b0fc5fd8e99eabfdb7ea5d296efcbc4afc4f69" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] + [[package]] name = "delay_map" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df941644b671f05f59433e481ba0d31ac10e3667de725236a4c0d587c496fba1" +checksum = "88e365f083a5cb5972d50ce8b1b2c9f125dc5ec0f50c0248cfb568ae59efcf0b" dependencies = [ "futures", "tokio", @@ -2520,11 +2827,12 @@ dependencies = [ [[package]] name = "der" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" dependencies = [ "const-oid", + "pem-rfc7468", "zeroize", ] @@ -2537,18 +2845,19 @@ dependencies = [ "asn1-rs", "displaydoc", "nom", - "num-bigint", + "num-bigint 0.4.6", "num-traits", "rusticata-macros", ] [[package]] name = "deranged" -version = "0.3.11" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" dependencies = [ "powerfmt", + "serde_core", ] [[package]] @@ -2562,28 +2871,61 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "derive-new" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d150dea618e920167e5973d70ae6ece4385b7164e0d799fe7c122dd0a5d912ad" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] + +[[package]] +name = "derive-new" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cdc8d50f426189eef89dac62fabfa0abb27d5cc008f25bf4156a0203325becc" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] + +[[package]] +name = "derive-where" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef941ded77d15ca19b40374869ac6000af1c9f2a4c0f3d4c70926287e6364a8f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] + [[package]] name = "derive_arbitrary" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" +checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "derive_more" -version = "0.99.19" +version = "0.99.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3da29a38df43d6f156149c9b43ded5e018ddff2a855cf2cfd62e8cd7d079c69f" +checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" dependencies = [ "convert_case 0.4.0", "proc-macro2", "quote", "rustc_version 0.4.1", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2612,7 +2954,8 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", + "unicode-xid", ] [[package]] @@ -2623,7 +2966,7 @@ checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", "unicode-xid", ] @@ -2672,7 +3015,16 @@ version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30baa043103c9d0c2a57cf537cc2f35623889dc0d405e6c3cccfadbc81c71309" dependencies = [ - "dirs-sys", + "dirs-sys 0.3.7", +] + +[[package]] +name = "dirs" +version = "5.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" +dependencies = [ + "dirs-sys 0.4.1", ] [[package]] @@ -2686,11 +3038,23 @@ dependencies = [ "winapi", ] +[[package]] +name = "dirs-sys" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" +dependencies = [ + "libc", + "option-ext", + "redox_users", + "windows-sys 0.48.0", +] + [[package]] name = "discv5" -version = "0.10.1" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a20b702c8491b3325866a4935d0b5101e49144d74540384243b6293794aad6fa" +checksum = "f170f4f6ed0e1df52bf43b403899f0081917ecf1500bfe312505cc3b515a8899" dependencies = [ "aes 0.8.4", "aes-gcm", @@ -2709,16 +3073,28 @@ dependencies = [ "lru", "more-asserts", "multiaddr", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "rand 0.8.5", "smallvec", - "socket2", + "socket2 0.5.10", "tokio", "tracing", "uint 0.10.0", "zeroize", ] +[[package]] +name = "dispatch2" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" +dependencies = [ + "bitflags 2.10.0", + "block2", + "libc", + "objc2", +] + [[package]] name = "displaydoc" version = "0.2.5" @@ -2727,7 +3103,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2739,7 +3115,7 @@ dependencies = [ "eth2", "futures", "logging", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "slot_clock", "task_executor", "tokio", @@ -2754,18 +3130,46 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" +[[package]] +name = "downcast-rs" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2" + [[package]] name = "dtoa" version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6add3b8cff394282be81f3fc1a0605db594ed69890078ca6e2cab1c408bcf04" +[[package]] +name = "dummy_el" +version = "0.1.0" +dependencies = [ + "anyhow", + "axum", + "clap", + "hex", + "jsonwebtoken", + "serde", + "serde_json", + "tokio", + "tracing", + "tracing-subscriber", +] + [[package]] name = "dunce" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" +[[package]] +name = "dyn-clone" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" + [[package]] name = "ecdsa" version = "0.14.8" @@ -2784,7 +3188,7 @@ version = "0.16.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" dependencies = [ - "der 0.7.9", + "der 0.7.10", "digest 0.10.7", "elliptic-curve 0.13.8", "rfc6979 0.4.0", @@ -2805,15 +3209,15 @@ dependencies = [ [[package]] name = "ed25519-dalek" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" +checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9" dependencies = [ "curve25519-dalek", "ed25519", "rand_core 0.6.4", "serde", - "sha2 0.10.8", + "sha2 0.10.9", "subtle", "zeroize", ] @@ -2827,7 +3231,7 @@ dependencies = [ "enum-ordinalize", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2878,7 +3282,7 @@ dependencies = [ "itertools 0.14.0", "serde", "serde_json", - "sha2 0.10.8", + "sha2 0.10.9", ] [[package]] @@ -2912,7 +3316,7 @@ dependencies = [ "blstrs", "ff 0.13.1", "group 0.13.0", - "pairing", + "pairing 0.23.0", "subtle", ] @@ -2941,7 +3345,7 @@ dependencies = [ "ekzg-bls12-381", "ekzg-maybe-rayon", "ekzg-polynomial", - "sha2 0.10.8", + "sha2 0.10.9", ] [[package]] @@ -2988,6 +3392,12 @@ dependencies = [ "serde_json", ] +[[package]] +name = "elf" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4445909572dbd556c457c849c4ca58623d84b27c8fff1e74b0b4227d8b90d17b" + [[package]] name = "elliptic-curve" version = "0.12.3" @@ -3020,6 +3430,8 @@ dependencies = [ "ff 0.13.1", "generic-array 0.14.7", "group 0.13.0", + "hkdf", + "pem-rfc7468", "pkcs8 0.10.2", "rand_core 0.6.4", "sec1 0.7.3", @@ -3037,6 +3449,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "endian-type" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" + [[package]] name = "enr" version = "0.13.0" @@ -3065,27 +3483,39 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "enum-ordinalize" -version = "4.3.0" +version = "4.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea0dcfa4e54eeb516fe454635a95753ddd39acda650ce703031c6973e315dd5" +checksum = "4a1091a7bb1f8f2c4b28f1fe2cef4980ca2d410a3d727d67ecc3178c9b0800f0" dependencies = [ "enum-ordinalize-derive", ] [[package]] name = "enum-ordinalize-derive" -version = "4.3.1" +version = "4.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d28318a75d4aead5c4db25382e8ef717932d0346600cacae6357eb5941bc5ff" +checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", +] + +[[package]] +name = "enum_dispatch" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa18ce2bc66555b3218614519ac839ddb759a7d6720732f979ef8d13be147ecd" +dependencies = [ + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.110", ] [[package]] @@ -3128,12 +3558,12 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.10" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -3152,7 +3582,7 @@ dependencies = [ "scrypt 0.10.0", "serde", "serde_json", - "sha2 0.10.8", + "sha2 0.10.9", "sha3", "thiserror 1.0.69", "uuid 0.8.2", @@ -3177,7 +3607,7 @@ dependencies = [ "multiaddr", "pretty_reqwest_error", "proto_array", - "rand 0.9.0", + "rand 0.9.2", "reqwest 0.11.27", "reqwest-eventsource", "sensitive_url", @@ -3206,7 +3636,7 @@ dependencies = [ "bls", "ethereum_hashing", "hex", - "num-bigint", + "num-bigint 0.4.6", "serde", "serde_yaml", ] @@ -3233,7 +3663,7 @@ dependencies = [ "hex", "hmac 0.11.0", "pbkdf2 0.8.0", - "rand 0.9.0", + "rand 0.9.2", "scrypt 0.7.0", "serde", "serde_json", @@ -3274,7 +3704,7 @@ dependencies = [ "eth2_key_derivation", "eth2_keystore", "hex", - "rand 0.9.0", + "rand 0.9.2", "serde", "serde_json", "serde_repr", @@ -3348,7 +3778,7 @@ checksum = "c853bd72c9e5787f8aafc3df2907c2ed03cff3150c3acd94e2e53a98ab70a8ab" dependencies = [ "cpufeatures", "ring", - "sha2 0.10.8", + "sha2 0.10.9", ] [[package]] @@ -3366,9 +3796,9 @@ dependencies = [ [[package]] name = "ethereum_ssz" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ca8ba45b63c389c6e115b095ca16381534fdcc03cf58176a3f8554db2dbe19b" +checksum = "0dcddb2554d19cde19b099fadddde576929d7a4d0c1cd3512d1fd95cf174375c" dependencies = [ "alloy-primitives", "arbitrary", @@ -3382,14 +3812,14 @@ dependencies = [ [[package]] name = "ethereum_ssz_derive" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd55d08012b4e0dfcc92b8d6081234df65f2986ad34cc76eeed69c5e2ce7506" +checksum = "a657b6b3b7e153637dc6bdc6566ad9279d9ee11a15b12cfb24a2e04360637e9f" dependencies = [ - "darling 0.20.10", + "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -3431,7 +3861,7 @@ dependencies = [ "rlp-derive", "serde", "serde_json", - "strum", + "strum 0.24.1", "syn 1.0.109", "thiserror 1.0.69", "tiny-keccak", @@ -3445,9 +3875,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9713f525348e5dde025d09b0a4217429f8074e8ff22c886263cc191e87d8216" dependencies = [ "ethers-core", - "getrandom 0.2.15", + "getrandom 0.2.16", "reqwest 0.11.27", - "semver 1.0.26", + "semver 1.0.27", "serde", "serde-aux", "serde_json", @@ -3488,13 +3918,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1a9e0597aa6b2fdc810ff58bc95e4eeaa2c219b3e615ed025106ecb027407d8" dependencies = [ "async-trait", - "auto_impl 1.2.1", + "auto_impl 1.3.0", "base64 0.13.1", "ethers-core", "futures-core", "futures-timer", "futures-util", - "getrandom 0.2.15", + "getrandom 0.2.16", "hashers", "hex", "http 0.2.12", @@ -3530,7 +3960,7 @@ dependencies = [ "ethers-core", "hex", "rand 0.8.5", - "sha2 0.10.8", + "sha2 0.10.9", "thiserror 1.0.69", ] @@ -3542,9 +3972,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "event-listener" -version = "5.4.0" +version = "5.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3492acde4c3fc54c845eaab3eed8bd00c7a7d881f78bfc801e43a93dec1331ae" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" dependencies = [ "concurrent-queue", "parking", @@ -3553,11 +3983,11 @@ dependencies = [ [[package]] name = "event-listener-strategy" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" dependencies = [ - "event-listener 5.4.0", + "event-listener 5.4.1", "pin-project-lite", ] @@ -3622,10 +4052,10 @@ dependencies = [ "lighthouse_version", "logging", "lru", - "metrics", - "parking_lot 0.12.3", + "metrics 0.2.0", + "parking_lot 0.12.5", "pretty_reqwest_error", - "rand 0.9.0", + "rand 0.9.2", "reqwest 0.11.27", "sensitive_url", "serde", @@ -3634,7 +4064,7 @@ dependencies = [ "slot_clock", "ssz_types", "state_processing", - "strum", + "strum 0.24.1", "superstruct", "task_executor", "tempfile", @@ -3649,6 +4079,16 @@ dependencies = [ "zeroize", ] +[[package]] +name = "eyre" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" +dependencies = [ + "indenter", + "once_cell", +] + [[package]] name = "fake-simd" version = "0.1.2" @@ -3680,7 +4120,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" dependencies = [ "arrayvec", - "auto_impl 1.2.1", + "auto_impl 1.3.0", "bytes", ] @@ -3691,7 +4131,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce8dba4714ef14b8274c371879b175aa55b16b30f269663f19d576f380018dc4" dependencies = [ "arrayvec", - "auto_impl 1.2.1", + "auto_impl 1.3.0", "bytes", ] @@ -3711,6 +4151,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" dependencies = [ + "bitvec 1.0.1", "rand_core 0.6.4", "subtle", ] @@ -3722,10 +4163,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" dependencies = [ "bitvec 1.0.1", + "byteorder", + "ff_derive", "rand_core 0.6.4", "subtle", ] +[[package]] +name = "ff_derive" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f10d12652036b0e99197587c6ba87a8fc3031986499973c030d8b44fcc151b60" +dependencies = [ + "addchain", + "num-bigint 0.3.3", + "num-integer", + "num-traits", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "ffi-opaque" version = "2.0.1" @@ -3748,6 +4206,16 @@ dependencies = [ "rustc_version 0.4.1", ] +[[package]] +name = "fields" +version = "0.12.0" +source = "git+https://github.com/0xPolygonHermez/pil2-proofman.git?tag=v0.12.0#3e8a367a50193803b09a6d2d272d9ab01129ad44" +dependencies = [ + "num-bigint 0.4.6", + "paste", + "serde", +] + [[package]] name = "filesystem" version = "0.1.0" @@ -3756,6 +4224,12 @@ dependencies = [ "windows-acl", ] +[[package]] +name = "find-msvc-tools" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" + [[package]] name = "fixed-hash" version = "0.8.0" @@ -3778,9 +4252,9 @@ dependencies = [ [[package]] name = "flate2" -version = "1.1.0" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc" +checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb" dependencies = [ "crc32fast", "libz-sys", @@ -3795,9 +4269,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foldhash" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" [[package]] name = "foldhash" @@ -3828,7 +4302,7 @@ dependencies = [ "ethereum_ssz", "ethereum_ssz_derive", "logging", - "metrics", + "metrics 0.2.0", "proto_array", "state_processing", "store", @@ -3840,9 +4314,9 @@ dependencies = [ [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] @@ -3930,9 +4404,9 @@ checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-lite" -version = "2.6.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5edaec856126859abb19ed65f39e90fea3a9574b9707f13539acf4abf7eb532" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" dependencies = [ "futures-core", "pin-project-lite", @@ -3956,7 +4430,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -3966,7 +4440,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f2f12607f92c69b12ed746fabf9ca4f5c482cba46679c1a75b874ed7c26adb" dependencies = [ "futures-io", - "rustls 0.23.23", + "rustls 0.23.35", "rustls-pki-types", ] @@ -4016,17 +4490,16 @@ dependencies = [ ] [[package]] -name = "generator" -version = "0.8.4" +name = "gcd" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6bd114ceda131d3b1d665eba35788690ad37f5916457286b32ab6fd3c438dd" -dependencies = [ - "cfg-if", - "libc", - "log", - "rustversion", - "windows 0.58.0", -] +checksum = "1d758ba1b47b00caf47f24925c0074ecb20d6dfcffe7f6d53395c0465674841a" + +[[package]] +name = "gen_ops" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "304de19db7028420975a296ab0fcbbc8e69438c4ed254a1e41e2a7f37d5f0e0a" [[package]] name = "generic-array" @@ -4065,27 +4538,41 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "wasm-bindgen", ] [[package]] name = "getrandom" -version = "0.3.1" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", + "js-sys", "libc", - "wasi 0.13.3+wasi-0.2.2", - "windows-targets 0.52.6", + "r-efi", + "wasip2", + "wasm-bindgen", +] + +[[package]] +name = "getset" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cf0fc11e47561d47397154977bc219f4cf809b2974facc3ccb3b89e2436f912" +dependencies = [ + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.110", ] [[package]] @@ -4100,15 +4587,21 @@ dependencies = [ [[package]] name = "gimli" -version = "0.31.1" +version = "0.32.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" + +[[package]] +name = "glam" +version = "0.30.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +checksum = "bd47b05dddf0005d850e5644cae7f2b14ac3df487979dbfff3b56f20b1a6ae46" [[package]] name = "glob" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "graffiti_file" @@ -4129,6 +4622,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" dependencies = [ "ff 0.12.1", + "memuse", "rand_core 0.6.4", "subtle", ] @@ -4148,9 +4642,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.26" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" dependencies = [ "bytes", "fnv", @@ -4158,7 +4652,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.8.0", + "indexmap 2.12.0", "slab", "tokio", "tokio-util", @@ -4167,17 +4661,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.8" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5017294ff4bb30944501348f6f8e42e6ad28f42c8bbef7a74029aff064a4e3c2" +checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "http 1.3.0", - "indexmap 2.8.0", + "http 1.3.1", + "indexmap 2.12.0", "slab", "tokio", "tokio-util", @@ -4186,32 +4680,129 @@ dependencies = [ [[package]] name = "half" -version = "2.4.1" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" +checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" dependencies = [ "cfg-if", "crunchy", + "zerocopy", ] [[package]] -name = "hash-db" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d23bd4e7b5eda0d0f3a307e8b381fdc8ba9000f26fbe912250c0a4cc3956364a" - -[[package]] -name = "hash256-std-hasher" -version = "0.15.2" +name = "halo2" +version = "0.1.0-beta.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92c171d55b98633f4ed3860808f004099b36c1cc29c42cfc53aa8591b21efcf2" +checksum = "2a23c779b38253fe1538102da44ad5bd5378495a61d2c4ee18d64eaa61ae5995" dependencies = [ - "crunchy", + "halo2_proofs", ] [[package]] -name = "hashbrown" -version = "0.12.3" +name = "halo2_proofs" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e925780549adee8364c7f2b685c753f6f3df23bde520c67416e93bf615933760" +dependencies = [ + "blake2b_simd", + "ff 0.12.1", + "group 0.12.1", + "pasta_curves 0.4.1", + "rand_core 0.6.4", + "rayon", +] + +[[package]] +name = "halo2curves" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d380afeef3f1d4d3245b76895172018cfb087d9976a7cabcd5597775b2933e07" +dependencies = [ + "blake2", + "digest 0.10.7", + "ff 0.13.1", + "group 0.13.0", + "halo2derive", + "hex", + "lazy_static", + "num-bigint 0.4.6", + "num-integer", + "num-traits", + "pairing 0.23.0", + "pasta_curves 0.5.1", + "paste", + "rand 0.8.5", + "rand_core 0.6.4", + "rayon", + "serde", + "serde_arrays", + "sha2 0.10.9", + "static_assertions", + "subtle", + "unroll", +] + +[[package]] +name = "halo2curves" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b756596082144af6e57105a20403b7b80fe9dccd085700b74fae3af523b74dba" +dependencies = [ + "blake2", + "digest 0.10.7", + "ff 0.13.1", + "group 0.13.0", + "halo2derive", + "hex", + "lazy_static", + "num-bigint 0.4.6", + "num-integer", + "num-traits", + "pairing 0.23.0", + "paste", + "rand 0.8.5", + "rand_core 0.6.4", + "rayon", + "serde", + "serde_arrays", + "sha2 0.10.9", + "static_assertions", + "subtle", + "unroll", +] + +[[package]] +name = "halo2derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdb99e7492b4f5ff469d238db464131b86c2eaac814a78715acba369f64d2c76" +dependencies = [ + "num-bigint 0.4.6", + "num-integer", + "num-traits", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "hash-db" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d23bd4e7b5eda0d0f3a307e8b381fdc8ba9000f26fbe912250c0a4cc3956364a" + +[[package]] +name = "hash256-std-hasher" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92c171d55b98633f4ed3860808f004099b36c1cc29c42cfc53aa8591b21efcf2" +dependencies = [ + "crunchy", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" @@ -4223,17 +4814,18 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash", "allocator-api2", + "serde", ] [[package]] name = "hashbrown" -version = "0.15.2" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ "allocator-api2", "equivalent", - "foldhash 0.1.4", + "foldhash 0.1.5", ] [[package]] @@ -4279,7 +4871,7 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" dependencies = [ - "hashbrown 0.15.2", + "hashbrown 0.15.5", ] [[package]] @@ -4324,7 +4916,7 @@ name = "health_metrics" version = "0.1.0" dependencies = [ "eth2", - "metrics", + "metrics 0.2.0", "procfs", "psutil", ] @@ -4349,15 +4941,9 @@ checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hermit-abi" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" - -[[package]] -name = "hermit-abi" -version = "0.5.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbd780fe5cc30f81464441920d82ac8740e2e46b29a6fad543ddd075229ce37e" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" [[package]] name = "hex" @@ -4399,10 +4985,10 @@ dependencies = [ "idna", "ipnet", "once_cell", - "rand 0.9.0", + "rand 0.9.2", "ring", - "socket2", - "thiserror 2.0.12", + "socket2 0.5.10", + "thiserror 2.0.17", "tinyvec", "tokio", "tracing", @@ -4421,11 +5007,11 @@ dependencies = [ "ipconfig", "moka", "once_cell", - "parking_lot 0.12.3", - "rand 0.9.0", + "parking_lot 0.12.5", + "rand 0.9.2", "resolv-conf", "smallvec", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tracing", ] @@ -4460,22 +5046,11 @@ dependencies = [ [[package]] name = "home" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" -dependencies = [ - "windows-sys 0.59.0", -] - -[[package]] -name = "hostname" -version = "0.3.1" +version = "0.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d" dependencies = [ - "libc", - "match_cfg", - "winapi", + "windows-sys 0.61.2", ] [[package]] @@ -4491,9 +5066,9 @@ dependencies = [ [[package]] name = "http" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a761d192fbf18bdef69f5ceedd0d1333afcbda0ee23840373b8317570d23c65" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" dependencies = [ "bytes", "fnv", @@ -4518,7 +5093,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.3.0", + "http 1.3.1", ] [[package]] @@ -4529,7 +5104,7 @@ checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", "futures-core", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", "pin-project-lite", ] @@ -4557,13 +5132,13 @@ dependencies = [ "lighthouse_version", "logging", "lru", - "metrics", + "metrics 0.2.0", "network", "network_utils", "operation_pool", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "proto_array", - "rand 0.9.0", + "rand 0.9.2", "safe_arith", "sensitive_url", "serde", @@ -4571,7 +5146,7 @@ dependencies = [ "slot_clock", "state_processing", "store", - "sysinfo", + "sysinfo 0.26.9", "system_health", "task_executor", "tokio", @@ -4593,7 +5168,7 @@ dependencies = [ "lighthouse_version", "logging", "malloc_utils", - "metrics", + "metrics 0.2.0", "network_utils", "reqwest 0.11.27", "serde", @@ -4620,9 +5195,18 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "humantime" -version = "2.1.0" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424" + +[[package]] +name = "hybrid-array" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" +checksum = "f2d35805454dc9f8662a98d6d61886ffe26bd465f5960e0e55345c70d5c0d2a9" +dependencies = [ + "typenum", +] [[package]] name = "hyper" @@ -4634,14 +5218,14 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2 0.3.26", + "h2 0.3.27", "http 0.2.12", "http-body 0.4.6", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.5.10", "tokio", "tower-service", "tracing", @@ -4650,20 +5234,22 @@ dependencies = [ [[package]] name = "hyper" -version = "1.6.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" dependencies = [ + "atomic-waker", "bytes", "futures-channel", - "futures-util", - "h2 0.4.8", - "http 1.3.0", + "futures-core", + "h2 0.4.12", + "http 1.3.1", "http-body 1.0.1", "httparse", "httpdate", "itoa", "pin-project-lite", + "pin-utils", "smallvec", "tokio", "want", @@ -4689,7 +5275,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ - "hyper 1.6.0", + "hyper 1.8.1", "hyper-util", "pin-project-lite", "tokio", @@ -4711,18 +5297,23 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.10" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +checksum = "52e9a2a24dc5c6821e71a7030e1e14b7b632acac55c40e9d2e082c621261bb56" dependencies = [ + "base64 0.22.1", "bytes", "futures-channel", + "futures-core", "futures-util", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", - "hyper 1.6.0", + "hyper 1.8.1", + "ipnet", + "libc", + "percent-encoding", "pin-project-lite", - "socket2", + "socket2 0.5.10", "tokio", "tower-service", "tracing", @@ -4730,16 +5321,17 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.61" +version = "0.1.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" +checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", + "log", "wasm-bindgen", - "windows-core 0.52.0", + "windows-core 0.62.2", ] [[package]] @@ -4753,21 +5345,22 @@ dependencies = [ [[package]] name = "icu_collections" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" dependencies = [ "displaydoc", + "potential_utf", "yoke", "zerofrom", "zerovec", ] [[package]] -name = "icu_locid" -version = "1.5.0" +name = "icu_locale_core" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" dependencies = [ "displaydoc", "litemap", @@ -4776,99 +5369,61 @@ dependencies = [ "zerovec", ] -[[package]] -name = "icu_locid_transform" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_locid_transform_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_locid_transform_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" - [[package]] name = "icu_normalizer" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" dependencies = [ - "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", "icu_provider", "smallvec", - "utf16_iter", - "utf8_iter", - "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" [[package]] name = "icu_properties" -version = "1.5.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99" dependencies = [ - "displaydoc", "icu_collections", - "icu_locid_transform", + "icu_locale_core", "icu_properties_data", "icu_provider", - "tinystr", + "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" +checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899" [[package]] name = "icu_provider" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" dependencies = [ "displaydoc", - "icu_locid", - "icu_provider_macros", - "stable_deref_trait", - "tinystr", + "icu_locale_core", "writeable", "yoke", "zerofrom", + "zerotrie", "zerovec", ] -[[package]] -name = "icu_provider_macros" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.100", -] - [[package]] name = "ident_case" version = "1.0.1" @@ -4877,9 +5432,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -4888,9 +5443,9 @@ dependencies = [ [[package]] name = "idna_adapter" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" dependencies = [ "icu_normalizer", "icu_properties", @@ -4939,12 +5494,12 @@ dependencies = [ "attohttpc", "bytes", "futures", - "http 1.3.0", + "http 1.3.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.8.1", "hyper-util", "log", - "rand 0.9.0", + "rand 0.9.2", "tokio", "url", "xmltree", @@ -4985,9 +5540,15 @@ checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] +[[package]] +name = "indenter" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "964de6e86d545b246d84badc0fef527924ace5134f30641c203ef52ba83f58d5" + [[package]] name = "indexmap" version = "1.9.3" @@ -4996,18 +5557,20 @@ checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown 0.12.3", + "serde", ] [[package]] name = "indexmap" -version = "2.8.0" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3954d50fe15b02142bf25d3b8bdadb634ec3948f103d04ffe3031bc8fe9d7058" +checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f" dependencies = [ "arbitrary", "equivalent", - "hashbrown 0.15.2", + "hashbrown 0.16.0", "serde", + "serde_core", ] [[package]] @@ -5020,9 +5583,9 @@ dependencies = [ "eth2_keystore", "filesystem", "lockfile", - "metrics", - "parking_lot 0.12.3", - "rand 0.9.0", + "metrics 0.2.0", + "parking_lot 0.12.5", + "rand 0.9.2", "reqwest 0.11.27", "serde", "serde_json", @@ -5092,8 +5655,8 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2", - "widestring 1.1.0", + "socket2 0.5.10", + "widestring 1.2.1", "windows-sys 0.48.0", "winreg", ] @@ -5104,22 +5667,32 @@ version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +[[package]] +name = "iri-string" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "is-terminal" -version = "0.4.16" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" +checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" dependencies = [ - "hermit-abi 0.5.0", + "hermit-abi 0.5.2", "libc", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] name = "is_terminal_polyfill" -version = "1.70.1" +version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" [[package]] name = "itertools" @@ -5165,18 +5738,19 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jobserver" -version = "0.1.32" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" dependencies = [ + "getrandom 0.3.4", "libc", ] [[package]] name = "js-sys" -version = "0.3.77" +version = "0.3.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65" dependencies = [ "once_cell", "wasm-bindgen", @@ -5197,6 +5771,20 @@ dependencies = [ "simple_asn1", ] +[[package]] +name = "jubjub" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a575df5f985fe1cd5b2b05664ff6accfc46559032b954529fd225a2168d27b0f" +dependencies = [ + "bitvec 1.0.1", + "bls12_381", + "ff 0.12.1", + "group 0.12.1", + "rand_core 0.6.4", + "subtle", +] + [[package]] name = "k256" version = "0.11.6" @@ -5206,7 +5794,7 @@ dependencies = [ "cfg-if", "ecdsa 0.14.8", "elliptic-curve 0.12.3", - "sha2 0.10.8", + "sha2 0.10.9", "sha3", ] @@ -5221,7 +5809,7 @@ dependencies = [ "elliptic-curve 0.13.8", "once_cell", "serdect", - "sha2 0.10.8", + "sha2 0.10.9", "signature 2.2.0", ] @@ -5352,25 +5940,25 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.175" +version = "0.2.177" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" +checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" [[package]] name = "libloading" -version = "0.8.6" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" +checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-link", ] [[package]] name = "libm" -version = "0.2.11" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" +checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" [[package]] name = "libmdbx" @@ -5379,11 +5967,11 @@ source = "git+https://github.com/sigp/libmdbx-rs?rev=e6ff4b9377c1619bcf0bfdf52be dependencies = [ "bitflags 1.3.2", "byteorder", - "derive_more 0.99.19", + "derive_more 0.99.20", "indexmap 1.9.3", "libc", "mdbx-sys", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "thiserror 1.0.69", ] @@ -5397,7 +5985,7 @@ dependencies = [ "either", "futures", "futures-timer", - "getrandom 0.2.15", + "getrandom 0.2.16", "libp2p-allow-block-list", "libp2p-connection-limits", "libp2p-core", @@ -5416,7 +6004,7 @@ dependencies = [ "multiaddr", "pin-project", "rw-stream-sink", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -5455,12 +6043,12 @@ dependencies = [ "multiaddr", "multihash", "multistream-select", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "pin-project", "quick-protobuf", "rand 0.8.5", "rw-stream-sink", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "unsigned-varint 0.8.0", "web-time", @@ -5477,7 +6065,7 @@ dependencies = [ "hickory-resolver", "libp2p-core", "libp2p-identity", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "smallvec", "tracing", ] @@ -5487,7 +6075,7 @@ name = "libp2p-gossipsub" version = "0.50.0" source = "git+https://github.com/sigp/rust-libp2p.git?rev=5acdf89a65d64098f9346efa5769e57bcd19dea9#5acdf89a65d64098f9346efa5769e57bcd19dea9" dependencies = [ - "async-channel 2.3.1", + "async-channel 2.5.0", "asynchronous-codec", "base64 0.22.1", "byteorder", @@ -5496,7 +6084,7 @@ dependencies = [ "fnv", "futures", "futures-timer", - "getrandom 0.2.15", + "getrandom 0.2.16", "hashlink 0.10.0", "hex_fmt", "libp2p-core", @@ -5507,7 +6095,7 @@ dependencies = [ "quick-protobuf-codec", "rand 0.8.5", "regex", - "sha2 0.10.8", + "sha2 0.10.9", "tracing", "web-time", ] @@ -5529,7 +6117,7 @@ dependencies = [ "quick-protobuf", "quick-protobuf-codec", "smallvec", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", ] @@ -5547,8 +6135,8 @@ dependencies = [ "multihash", "quick-protobuf", "rand 0.8.5", - "sha2 0.10.8", - "thiserror 2.0.12", + "sha2 0.10.9", + "thiserror 2.0.17", "tracing", "zeroize", ] @@ -5567,7 +6155,7 @@ dependencies = [ "libp2p-swarm", "rand 0.8.5", "smallvec", - "socket2", + "socket2 0.5.10", "tokio", "tracing", ] @@ -5590,9 +6178,9 @@ dependencies = [ [[package]] name = "libp2p-mplex" -version = "0.43.0" +version = "0.43.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8aaa6fee3722e355443058472fc4705d78681bc2d8e447a0bdeb3fecf40cd197" +checksum = "95a4019ba30c4e42b776113e9778071691fe3f34bf23b6b3bf0dfcf29d801f3d" dependencies = [ "asynchronous-codec", "bytes", @@ -5600,7 +6188,7 @@ dependencies = [ "libp2p-core", "libp2p-identity", "nohash-hasher", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "rand 0.8.5", "smallvec", "tracing", @@ -5624,7 +6212,7 @@ dependencies = [ "rand 0.8.5", "snow", "static_assertions", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "x25519-dalek", "zeroize", @@ -5661,9 +6249,9 @@ dependencies = [ "quinn", "rand 0.8.5", "ring", - "rustls 0.23.23", - "socket2", - "thiserror 2.0.12", + "rustls 0.23.35", + "socket2 0.5.10", + "thiserror 2.0.17", "tokio", "tracing", ] @@ -5698,7 +6286,7 @@ checksum = "dd297cf53f0cb3dee4d2620bb319ae47ef27c702684309f682bdb7e55a18ae9c" dependencies = [ "heck 0.5.0", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -5712,7 +6300,7 @@ dependencies = [ "if-watch", "libc", "libp2p-core", - "socket2", + "socket2 0.5.10", "tokio", "tracing", ] @@ -5729,9 +6317,9 @@ dependencies = [ "libp2p-identity", "rcgen", "ring", - "rustls 0.23.23", - "rustls-webpki 0.103.4", - "thiserror 2.0.12", + "rustls 0.23.35", + "rustls-webpki 0.103.8", + "thiserror 2.0.17", "x509-parser", "yasna", ] @@ -5760,19 +6348,19 @@ dependencies = [ "either", "futures", "libp2p-core", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "yamux 0.12.1", - "yamux 0.13.4", + "yamux 0.13.8", ] [[package]] name = "libredox" -version = "0.1.3" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "libc", ] @@ -5789,9 +6377,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.21" +version = "1.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df9b68e50e6e0b26f672573834882eb57759f6db9b3be2ea3c35c91188bb4eaa" +checksum = "15d118bbf3771060e7311cc7bb0545b01d08a8b4a7de949198dec1fa0ca1c0f7" dependencies = [ "cc", "pkg-config", @@ -5825,7 +6413,7 @@ dependencies = [ "lighthouse_version", "logging", "malloc_utils", - "metrics", + "metrics 0.2.0", "network_utils", "opentelemetry", "opentelemetry-otlp", @@ -5859,7 +6447,7 @@ dependencies = [ "bytes", "delay_map", "directory", - "dirs", + "dirs 3.0.2", "discv5", "either", "eth2", @@ -5877,20 +6465,20 @@ dependencies = [ "logging", "lru", "lru_cache", - "metrics", + "metrics 0.2.0", "network_utils", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "prometheus-client", "quickcheck", "quickcheck_macros", - "rand 0.9.0", + "rand 0.9.2", "regex", "serde", "sha2 0.9.9", "smallvec", "snap", "ssz_types", - "strum", + "strum 0.24.1", "superstruct", "task_executor", "tempfile", @@ -5919,7 +6507,7 @@ dependencies = [ "futures", "initialized_validators", "logging", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "serde", "signing_method", "slashing_protection", @@ -5953,15 +6541,15 @@ checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "linux-raw-sys" -version = "0.9.2" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db9c683daf087dc577b7506e9695b3d556a9f3849903fa28186283afd6809e9" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" [[package]] name = "litemap" -version = "0.7.5" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" [[package]] name = "lmdb-rkv" @@ -5986,23 +6574,22 @@ dependencies = [ [[package]] name = "local-ip-address" -version = "0.6.3" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3669cf5561f8d27e8fc84cc15e58350e70f557d4d65f70e3154e54cd2f8e1782" +checksum = "656b3b27f8893f7bbf9485148ff9a65f019e3f33bd5cdc87c83cab16b3fd9ec8" dependencies = [ "libc", "neli", - "thiserror 1.0.69", + "thiserror 2.0.17", "windows-sys 0.59.0", ] [[package]] name = "lock_api" -version = "0.4.12" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" dependencies = [ - "autocfg", "scopeguard", ] @@ -6014,11 +6601,17 @@ dependencies = [ "tempfile", ] +[[package]] +name = "lockfree-object-pool" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9374ef4228402d4b7e403e5838cb880d9ee663314b0a900d5a6aabf0c213552e" + [[package]] name = "log" -version = "0.4.26" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" +checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" [[package]] name = "logging" @@ -6026,7 +6619,7 @@ version = "0.2.0" dependencies = [ "chrono", "logroller", - "metrics", + "metrics 0.2.0", "serde", "serde_json", "tokio", @@ -6050,28 +6643,21 @@ dependencies = [ "thiserror 1.0.69", ] -[[package]] -name = "loom" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca" -dependencies = [ - "cfg-if", - "generator", - "scoped-tls", - "tracing", - "tracing-subscriber", -] - [[package]] name = "lru" version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ - "hashbrown 0.15.2", + "hashbrown 0.15.5", ] +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + [[package]] name = "lru_cache" version = "0.1.0" @@ -6082,9 +6668,9 @@ dependencies = [ [[package]] name = "mach2" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b955cdeb2a02b9117f121ce63aa52d08ade45de53e48fe6a38b39c10f6f709" +checksum = "d640282b302c0bb0a2a8e0233ead9035e3bed871f0b7e81fe4a1ec829765db44" dependencies = [ "libc", ] @@ -6097,7 +6683,7 @@ checksum = "1b27834086c65ec3f9387b096d66e99f221cf081c2b738042aa252bcd41204e3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -6105,8 +6691,8 @@ name = "malloc_utils" version = "0.1.0" dependencies = [ "libc", - "metrics", - "parking_lot 0.12.3", + "metrics 0.2.0", + "parking_lot 0.12.5", "tikv-jemalloc-ctl", "tikv-jemallocator", ] @@ -6118,10 +6704,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] -name = "match_cfg" -version = "0.1.0" +name = "match-lookup" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" +checksum = "1265724d8cb29dbbc2b0f06fffb8bf1a8c0cf73a78eede9ba73a4a66c52a981e" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] [[package]] name = "matchers" @@ -6163,9 +6754,18 @@ checksum = "33746aadcb41349ec291e7f2f0a3aa6834d1d7c58066fb4b01f68efc4c4b7631" [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" + +[[package]] +name = "memmap2" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "744133e4a0e0a658e1374cf3bf8e415c4052a15a111acd372764c55b4177d490" +dependencies = [ + "libc", +] [[package]] name = "memoffset" @@ -6176,6 +6776,12 @@ dependencies = [ "autocfg", ] +[[package]] +name = "memuse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d97bbf43eb4f088f8ca469930cde17fa036207c9a5e02ccc5107c4e8b17c964" + [[package]] name = "merkle_proof" version = "0.2.0" @@ -6219,19 +6825,65 @@ dependencies = [ ] [[package]] -name = "milhouse" -version = "0.7.0" +name = "metrics" +version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bdb104e38d3a8c5ffb7e9d2c43c522e6bcc34070edbadba565e722f0dee56c7" +checksum = "3045b4193fbdc5b5681f32f11070da9be3609f189a79f3390706d42587f46bb5" dependencies = [ - "alloy-primitives", + "ahash", + "portable-atomic", +] + +[[package]] +name = "metrics-tracing-context" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62a6a1f7141f1d9bc7a886b87536bbfc97752e08b369e1e0453a9acfab5f5da4" +dependencies = [ + "indexmap 2.12.0", + "itoa", + "lockfree-object-pool", + "metrics 0.23.1", + "metrics-util", + "once_cell", + "tracing", + "tracing-core", + "tracing-subscriber", +] + +[[package]] +name = "metrics-util" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4259040465c955f9f2f1a4a8a16dc46726169bca0f88e8fb2dbeced487c3e828" +dependencies = [ + "aho-corasick", + "crossbeam-epoch", + "crossbeam-utils", + "hashbrown 0.14.5", + "indexmap 2.12.0", + "metrics 0.23.1", + "num_cpus", + "ordered-float 4.6.0", + "quanta", + "radix_trie", + "sketches-ddsketch", +] + +[[package]] +name = "milhouse" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bdb104e38d3a8c5ffb7e9d2c43c522e6bcc34070edbadba565e722f0dee56c7" +dependencies = [ + "alloy-primitives", "arbitrary", "educe", "ethereum_hashing", "ethereum_ssz", "ethereum_ssz_derive", "itertools 0.13.0", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "rayon", "serde", "smallvec", @@ -6265,22 +6917,23 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.8.5" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", + "simd-adler32", ] [[package]] name = "mio" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873" dependencies = [ "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.52.0", + "wasi", + "windows-sys 0.61.2", ] [[package]] @@ -6289,6 +6942,21 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9366861eb2a2c436c20b12c8dbec5f798cea6b47ad99216be0282942e2c81ea0" +[[package]] +name = "mockall" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43766c2b5203b10de348ffe19f7e54564b64f3d6018ff7648d1e2d6d3a0f0a48" +dependencies = [ + "cfg-if", + "downcast", + "fragile", + "lazy_static", + "mockall_derive 0.12.1", + "predicates", + "predicates-tree", +] + [[package]] name = "mockall" version = "0.13.1" @@ -6298,11 +6966,23 @@ dependencies = [ "cfg-if", "downcast", "fragile", - "mockall_derive", + "mockall_derive 0.13.1", "predicates", "predicates-tree", ] +[[package]] +name = "mockall_derive" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af7cbce79ec385a1d4f54baa90a76401eb15d9cab93685f62e7e9f942aa00ae2" +dependencies = [ + "cfg-if", + "proc-macro2", + "quote", + "syn 2.0.110", +] + [[package]] name = "mockall_derive" version = "0.13.1" @@ -6312,7 +6992,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -6324,7 +7004,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -6337,13 +7017,13 @@ dependencies = [ "bytes", "colored", "futures-util", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.8.1", "hyper-util", "log", - "rand 0.9.0", + "rand 0.9.2", "regex", "serde_json", "serde_urlencoded", @@ -6353,21 +7033,20 @@ dependencies = [ [[package]] name = "moka" -version = "0.12.10" +version = "0.12.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9321642ca94a4282428e6ea4af8cc2ca4eac48ac7a6a4ea8f33f76d0ce70926" +checksum = "8261cd88c312e0004c1d51baad2980c66528dfdb2bee62003e643a4d8f86b077" dependencies = [ "crossbeam-channel", "crossbeam-epoch", "crossbeam-utils", - "loom", - "parking_lot 0.12.3", + "equivalent", + "parking_lot 0.12.5", "portable-atomic", "rustc_version 0.4.1", "smallvec", "tagptr", - "thiserror 1.0.69", - "uuid 1.15.1", + "uuid 1.18.1", ] [[package]] @@ -6377,7 +7056,7 @@ dependencies = [ "eth2", "health_metrics", "lighthouse_version", - "metrics", + "metrics 0.2.0", "regex", "reqwest 0.11.27", "sensitive_url", @@ -6416,11 +7095,12 @@ dependencies = [ [[package]] name = "multibase" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b3539ec3c1f04ac9748a260728e855f261b4977f5c3406612c884564f329404" +checksum = "8694bb4835f452b0e3bb06dbebb1d6fc5385b6ca1caf2e55fd165c042390ec77" dependencies = [ "base-x", + "base256emoji", "data-encoding", "data-encoding-macro", ] @@ -6539,7 +7219,7 @@ dependencies = [ "log", "netlink-packet-core", "netlink-sys", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -6586,11 +7266,11 @@ dependencies = [ "logging", "lru_cache", "matches", - "metrics", + "metrics 0.2.0", "operation_pool", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "rand 0.8.5", - "rand 0.9.0", + "rand 0.9.2", "rand_chacha 0.3.1", "rand_chacha 0.9.0", "serde_json", @@ -6598,7 +7278,7 @@ dependencies = [ "smallvec", "ssz_types", "store", - "strum", + "strum 0.24.1", "task_executor", "tokio", "tokio-stream", @@ -6615,13 +7295,22 @@ dependencies = [ "hex", "libp2p-identity", "lru_cache", - "metrics", + "metrics 0.2.0", "multiaddr", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "serde", "tiny-keccak", ] +[[package]] +name = "nibble_vec" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a5d83df9f36fe23f0c3648c6bbb8b0298bb5f1939c8f2704431371f4b84d43" +dependencies = [ + "smallvec", +] + [[package]] name = "nix" version = "0.24.3" @@ -6646,11 +7335,11 @@ dependencies = [ [[package]] name = "nix" -version = "0.29.0" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" +checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "cfg-if", "cfg_aliases", "libc", @@ -6700,11 +7389,36 @@ dependencies = [ [[package]] name = "nu-ansi-term" -version = "0.50.1" +version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", +] + +[[package]] +name = "num" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" +dependencies = [ + "num-bigint 0.4.6", + "num-complex", + "num-integer", + "num-iter", + "num-rational", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", ] [[package]] @@ -6715,15 +7429,15 @@ checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", + "rand 0.8.5", ] [[package]] name = "num-bigint-dig" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" dependencies = [ - "byteorder", "lazy_static", "libm", "num-integer", @@ -6735,6 +7449,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "num-complex" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" +dependencies = [ + "num-traits", +] + [[package]] name = "num-conv" version = "0.1.0" @@ -6761,6 +7484,32 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-modular" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17bb261bf36fa7d83f4c294f834e91256769097b3cb505d44831e0a179ac647f" + +[[package]] +name = "num-order" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "537b596b97c40fcf8056d153049eb22f481c17ebce72a513ec9286e4986d1bb6" +dependencies = [ + "num-modular", +] + +[[package]] +name = "num-rational" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" +dependencies = [ + "num-bigint 0.4.6", + "num-integer", + "num-traits", +] + [[package]] name = "num-traits" version = "0.2.19" @@ -6773,14 +7522,47 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" dependencies = [ - "hermit-abi 0.3.9", + "hermit-abi 0.5.2", "libc", ] +[[package]] +name = "num_enum" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "nums" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf3c74f925fb8cfc49a8022f2afce48a0683b70f9e439885594e84c5edbf5b01" +dependencies = [ + "num-bigint 0.4.6", + "num-integer", + "num-traits", + "rand 0.8.5", +] + [[package]] name = "nybbles" version = "0.4.6" @@ -6793,11 +7575,26 @@ dependencies = [ "smallvec", ] +[[package]] +name = "objc2" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c2599ce0ec54857b29ce62166b0ed9b4f6f1a70ccc9a71165b6154caca8c05" +dependencies = [ + "objc2-encode", +] + +[[package]] +name = "objc2-encode" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33" + [[package]] name = "object" -version = "0.36.7" +version = "0.37.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" dependencies = [ "memchr", ] @@ -6813,19 +7610,25 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.21.0" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cde51589ab56b20a6f686b2c68f7a0bd6add753d697abf720d63f8db3ab7b1ad" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" dependencies = [ "critical-section", "portable-atomic", ] +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + [[package]] name = "oneshot_broadcast" version = "0.1.0" dependencies = [ - "parking_lot 0.12.3", + "parking_lot 0.12.5", ] [[package]] @@ -6853,7 +7656,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" dependencies = [ "arrayvec", - "auto_impl 1.2.1", + "auto_impl 1.3.0", "bytes", "ethereum-types", "open-fastrlp-derive", @@ -6873,11 +7676,11 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.72" +version = "0.10.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da" +checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "cfg-if", "foreign-types", "libc", @@ -6894,7 +7697,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -6905,18 +7708,18 @@ checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-src" -version = "300.4.2+3.4.1" +version = "300.5.4+3.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168ce4e058f975fe43e89d9ccf78ca668601887ae736090aacc23ae353c298e2" +checksum = "a507b3792995dae9b0df8a1c1e3771e8418b7c2d9f0baeba32e6fe8b06c7cb72" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.107" +version = "0.9.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07" +checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" dependencies = [ "cc", "libc", @@ -6935,7 +7738,7 @@ dependencies = [ "futures-sink", "js-sys", "pin-project-lite", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", ] @@ -6947,9 +7750,9 @@ checksum = "50f6639e842a97dbea8886e3439710ae463120091e2e064518ba8e716e6ac36d" dependencies = [ "async-trait", "bytes", - "http 1.3.0", + "http 1.3.1", "opentelemetry", - "reqwest 0.12.15", + "reqwest 0.12.24", ] [[package]] @@ -6958,14 +7761,14 @@ version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dbee664a43e07615731afc539ca60c6d9f1a9425e25ca09c57bc36c87c55852b" dependencies = [ - "http 1.3.0", + "http 1.3.1", "opentelemetry", "opentelemetry-http", "opentelemetry-proto", "opentelemetry_sdk", "prost", - "reqwest 0.12.15", - "thiserror 2.0.12", + "reqwest 0.12.24", + "thiserror 2.0.17", "tokio", "tonic 0.13.1", "tracing", @@ -6994,108 +7797,1529 @@ dependencies = [ "futures-util", "opentelemetry", "percent-encoding", - "rand 0.9.0", + "rand 0.9.2", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] -name = "operation_pool" -version = "0.2.0" +name = "openvm-circuit" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" dependencies = [ - "beacon_chain", - "bitvec 1.0.1", - "educe", - "ethereum_ssz", - "ethereum_ssz_derive", - "itertools 0.10.5", - "maplit", - "metrics", - "parking_lot 0.12.3", - "rand 0.9.0", - "rayon", + "backtrace", + "cfg-if", + "dashmap", + "derivative", + "derive-new 0.6.0", + "derive_more 1.0.0", + "enum_dispatch", + "eyre", + "getset", + "itertools 0.14.0", + "libc", + "memmap2", + "openvm-circuit-derive", + "openvm-circuit-primitives", + "openvm-circuit-primitives-derive", + "openvm-instructions", + "openvm-poseidon2-air", + "openvm-stark-backend", + "openvm-stark-sdk", + "p3-baby-bear 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "rustc-hash 2.1.1", "serde", - "state_processing", - "store", - "tokio", - "types", + "serde-big-array", + "static_assertions", + "thiserror 1.0.69", + "tracing", ] [[package]] -name = "ordered-float" -version = "2.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +name = "openvm-circuit-derive" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" dependencies = [ - "num-traits", + "itertools 0.14.0", + "proc-macro2", + "quote", + "syn 2.0.110", ] [[package]] -name = "pairing" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fec4625e73cf41ef4bb6846cafa6d44736525f442ba45e407c4a000a13996f" +name = "openvm-circuit-primitives" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" dependencies = [ - "group 0.13.0", + "derive-new 0.6.0", + "itertools 0.14.0", + "num-bigint 0.4.6", + "num-traits", + "openvm-circuit-primitives-derive", + "openvm-cuda-builder", + "openvm-stark-backend", + "rand 0.8.5", + "tracing", ] [[package]] -name = "parity-scale-codec" -version = "3.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9fde3d0718baf5bc92f577d652001da0f8d54cd03a7974e118d04fc888dc23d" +name = "openvm-circuit-primitives-derive" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" dependencies = [ - "arrayvec", - "bitvec 1.0.1", - "byte-slice-cast", - "const_format", - "impl-trait-for-tuples", - "parity-scale-codec-derive", - "rustversion", - "serde", + "itertools 0.14.0", + "quote", + "syn 2.0.110", ] [[package]] -name = "parity-scale-codec-derive" -version = "3.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581c837bb6b9541ce7faa9377c20616e4fb7650f6b0f68bc93c827ee504fb7b3" +name = "openvm-continuations" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.100", + "derivative", + "openvm-circuit", + "openvm-native-compiler", + "openvm-native-recursion", + "openvm-stark-backend", + "openvm-stark-sdk", + "serde", + "static_assertions", ] [[package]] -name = "parking" -version = "2.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" +name = "openvm-cuda-builder" +version = "1.2.1" +source = "git+https://github.com/openvm-org/stark-backend.git?tag=v1.2.1#dde6cdaf105cc57d1609fd49568c7bce0a066cc2" +dependencies = [ + "cc", + "glob", +] [[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +name = "openvm-custom-insn" +version = "0.1.0" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", + "proc-macro2", + "quote", + "syn 2.0.110", ] [[package]] -name = "parking_lot" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +name = "openvm-instructions" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" dependencies = [ - "lock_api", - "parking_lot_core 0.9.10", + "backtrace", + "derive-new 0.6.0", + "itertools 0.14.0", + "num-bigint 0.4.6", + "num-traits", + "openvm-instructions-derive", + "openvm-stark-backend", + "serde", + "strum 0.26.3", + "strum_macros 0.26.4", ] [[package]] -name = "parking_lot_core" +name = "openvm-instructions-derive" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" +dependencies = [ + "quote", + "syn 2.0.110", +] + +[[package]] +name = "openvm-native-circuit" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" +dependencies = [ + "cfg-if", + "derive-new 0.6.0", + "derive_more 1.0.0", + "eyre", + "itertools 0.14.0", + "openvm-circuit", + "openvm-circuit-derive", + "openvm-circuit-primitives", + "openvm-circuit-primitives-derive", + "openvm-instructions", + "openvm-native-compiler", + "openvm-poseidon2-air", + "openvm-rv32im-circuit", + "openvm-rv32im-transpiler", + "openvm-stark-backend", + "openvm-stark-sdk", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "serde", + "static_assertions", + "strum 0.26.3", +] + +[[package]] +name = "openvm-native-compiler" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" +dependencies = [ + "backtrace", + "itertools 0.14.0", + "num-bigint 0.4.6", + "num-integer", + "openvm-circuit", + "openvm-instructions", + "openvm-instructions-derive", + "openvm-native-compiler-derive", + "openvm-rv32im-transpiler", + "openvm-stark-backend", + "openvm-stark-sdk", + "serde", + "strum 0.26.3", + "strum_macros 0.26.4", + "zkhash 0.2.0 (git+https://github.com/HorizenLabs/poseidon2.git?rev=bb476b9)", +] + +[[package]] +name = "openvm-native-compiler-derive" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" +dependencies = [ + "quote", + "syn 2.0.110", +] + +[[package]] +name = "openvm-native-recursion" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" +dependencies = [ + "cfg-if", + "itertools 0.14.0", + "lazy_static", + "openvm-circuit", + "openvm-native-circuit", + "openvm-native-compiler", + "openvm-native-compiler-derive", + "openvm-stark-backend", + "openvm-stark-sdk", + "p3-dft 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-fri 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-merkle-tree 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "serde", + "serde_json", + "tracing", +] + +[[package]] +name = "openvm-platform" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" +dependencies = [ + "openvm-custom-insn", + "openvm-rv32im-guest", +] + +[[package]] +name = "openvm-poseidon2-air" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" +dependencies = [ + "derivative", + "lazy_static", + "openvm-cuda-builder", + "openvm-stark-backend", + "openvm-stark-sdk", + "p3-monty-31 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-poseidon2 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-poseidon2-air", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "zkhash 0.2.0 (git+https://github.com/HorizenLabs/poseidon2.git?rev=bb476b9)", +] + +[[package]] +name = "openvm-rv32im-circuit" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" +dependencies = [ + "cfg-if", + "derive-new 0.6.0", + "derive_more 1.0.0", + "eyre", + "num-bigint 0.4.6", + "num-integer", + "openvm-circuit", + "openvm-circuit-derive", + "openvm-circuit-primitives", + "openvm-circuit-primitives-derive", + "openvm-instructions", + "openvm-rv32im-transpiler", + "openvm-stark-backend", + "rand 0.8.5", + "serde", + "strum 0.26.3", +] + +[[package]] +name = "openvm-rv32im-guest" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" +dependencies = [ + "openvm-custom-insn", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "strum_macros 0.26.4", +] + +[[package]] +name = "openvm-rv32im-transpiler" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" +dependencies = [ + "openvm-instructions", + "openvm-instructions-derive", + "openvm-rv32im-guest", + "openvm-stark-backend", + "openvm-transpiler", + "rrs-lib", + "serde", + "strum 0.26.3", + "tracing", +] + +[[package]] +name = "openvm-stark-backend" +version = "1.2.1" +source = "git+https://github.com/openvm-org/stark-backend.git?tag=v1.2.1#dde6cdaf105cc57d1609fd49568c7bce0a066cc2" +dependencies = [ + "bitcode", + "cfg-if", + "derivative", + "derive-new 0.7.0", + "eyre", + "itertools 0.14.0", + "p3-air 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-challenger 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-commit 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-uni-stark 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rustc-hash 2.1.1", + "serde", + "serde_json", + "thiserror 1.0.69", + "tracing", +] + +[[package]] +name = "openvm-stark-sdk" +version = "1.2.1" +source = "git+https://github.com/openvm-org/stark-backend.git?tag=v1.2.1#dde6cdaf105cc57d1609fd49568c7bce0a066cc2" +dependencies = [ + "dashmap", + "derivative", + "derive_more 1.0.0", + "ff 0.13.1", + "itertools 0.14.0", + "metrics 0.23.1", + "metrics-tracing-context", + "metrics-util", + "openvm-stark-backend", + "p3-baby-bear 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-blake3 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-bn254-fr 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-dft 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-fri 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-goldilocks 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-keccak 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-koala-bear 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-merkle-tree 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-poseidon", + "p3-poseidon2 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "serde", + "serde_json", + "static_assertions", + "toml", + "tracing", + "tracing-forest", + "tracing-subscriber", + "zkhash 0.2.0 (git+https://github.com/HorizenLabs/poseidon2.git?rev=bb476b9)", +] + +[[package]] +name = "openvm-transpiler" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" +dependencies = [ + "elf", + "eyre", + "openvm-instructions", + "openvm-platform", + "openvm-stark-backend", + "rrs-lib", + "thiserror 1.0.69", +] + +[[package]] +name = "operation_pool" +version = "0.2.0" +dependencies = [ + "beacon_chain", + "bitvec 1.0.1", + "educe", + "ethereum_ssz", + "ethereum_ssz_derive", + "itertools 0.10.5", + "maplit", + "metrics 0.2.0", + "parking_lot 0.12.5", + "rand 0.9.2", + "rayon", + "serde", + "state_processing", + "store", + "tokio", + "types", +] + +[[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + +[[package]] +name = "ordered-float" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +dependencies = [ + "num-traits", +] + +[[package]] +name = "ordered-float" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bb71e1b3fa6ca1c61f383464aaf2bb0e2f8e772a1f01d486832464de363b951" +dependencies = [ + "num-traits", +] + +[[package]] +name = "p256" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" +dependencies = [ + "ecdsa 0.16.9", + "elliptic-curve 0.13.8", + "primeorder", + "sha2 0.10.9", +] + +[[package]] +name = "p3-air" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", +] + +[[package]] +name = "p3-air" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", +] + +[[package]] +name = "p3-air" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-matrix 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", +] + +[[package]] +name = "p3-baby-bear" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "num-bigint 0.4.6", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-mds 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-poseidon2 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-baby-bear" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-mds 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-monty-31 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-poseidon2 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-baby-bear" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-mds 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-monty-31 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-poseidon2 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-symmetric 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-baby-bear" +version = "0.2.3-succinct" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7521838ecab2ddf4f7bc4ceebad06ec02414729598485c1ada516c39900820e8" +dependencies = [ + "num-bigint 0.4.6", + "p3-field 0.2.3-succinct", + "p3-mds 0.2.3-succinct", + "p3-poseidon2 0.2.3-succinct", + "p3-symmetric 0.2.3-succinct", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-blake3" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "blake3", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", +] + +[[package]] +name = "p3-blake3" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "blake3", + "p3-symmetric 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", +] + +[[package]] +name = "p3-bn254-fr" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "ff 0.13.1", + "num-bigint 0.4.6", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-poseidon2 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-bn254-fr" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "ff 0.13.1", + "halo2curves 0.8.0", + "num-bigint 0.4.6", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-poseidon2 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-bn254-fr" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "ff 0.13.1", + "halo2curves 0.7.0", + "num-bigint 0.4.6", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-poseidon2 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-symmetric 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-challenger" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", + "tracing", +] + +[[package]] +name = "p3-challenger" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "tracing", +] + +[[package]] +name = "p3-challenger" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-symmetric 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "tracing", +] + +[[package]] +name = "p3-circle" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "itertools 0.13.0", + "p3-challenger 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-commit 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-dft 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-fri 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-matrix 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "serde", + "tracing", +] + +[[package]] +name = "p3-commit" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "itertools 0.12.1", + "p3-challenger 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", +] + +[[package]] +name = "p3-commit" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "itertools 0.14.0", + "p3-challenger 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-dft 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "serde", +] + +[[package]] +name = "p3-commit" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "itertools 0.13.0", + "p3-challenger 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-dft 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-matrix 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "serde", +] + +[[package]] +name = "p3-dft" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "tracing", +] + +[[package]] +name = "p3-dft" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "itertools 0.14.0", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "tracing", +] + +[[package]] +name = "p3-dft" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "itertools 0.13.0", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-matrix 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "tracing", +] + +[[package]] +name = "p3-dft" +version = "0.2.3-succinct" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46414daedd796f1eefcdc1811c0484e4bced5729486b6eaba9521c572c76761a" +dependencies = [ + "p3-field 0.2.3-succinct", + "p3-matrix 0.2.3-succinct", + "p3-maybe-rayon 0.2.3-succinct", + "p3-util 0.2.3-succinct", + "tracing", +] + +[[package]] +name = "p3-field" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "itertools 0.12.1", + "num-bigint 0.4.6", + "num-traits", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-field" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "itertools 0.14.0", + "num-bigint 0.4.6", + "num-integer", + "num-traits", + "nums", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "serde", + "tracing", +] + +[[package]] +name = "p3-field" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "itertools 0.13.0", + "num-bigint 0.4.6", + "num-integer", + "num-traits", + "nums", + "p3-maybe-rayon 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "rand 0.8.5", + "serde", + "tracing", +] + +[[package]] +name = "p3-field" +version = "0.2.3-succinct" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48948a0516b349e9d1cdb95e7236a6ee010c44e68c5cc78b4b92bf1c4022a0d9" +dependencies = [ + "itertools 0.12.1", + "num-bigint 0.4.6", + "num-traits", + "p3-util 0.2.3-succinct", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-fri" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "itertools 0.12.1", + "p3-challenger 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-commit 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-dft 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-interpolation 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", + "tracing", +] + +[[package]] +name = "p3-fri" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "itertools 0.14.0", + "p3-challenger 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-commit 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-dft 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-interpolation 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "serde", + "tracing", +] + +[[package]] +name = "p3-fri" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "itertools 0.13.0", + "p3-challenger 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-commit 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-dft 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-interpolation 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-matrix 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "rand 0.8.5", + "serde", + "tracing", +] + +[[package]] +name = "p3-goldilocks" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "num-bigint 0.4.6", + "p3-dft 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-mds 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-poseidon", + "p3-poseidon2 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-goldilocks" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "num-bigint 0.4.6", + "p3-dft 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-mds 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-poseidon2 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-symmetric 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-interpolation" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", +] + +[[package]] +name = "p3-interpolation" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", +] + +[[package]] +name = "p3-interpolation" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-matrix 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", +] + +[[package]] +name = "p3-keccak" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "itertools 0.14.0", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "tiny-keccak", +] + +[[package]] +name = "p3-keccak" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "itertools 0.13.0", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-symmetric 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "tiny-keccak", +] + +[[package]] +name = "p3-keccak-air" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "p3-air 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-matrix 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "tracing", +] + +[[package]] +name = "p3-koala-bear" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "num-bigint 0.4.6", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-mds 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-poseidon2 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-koala-bear" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-mds 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-monty-31 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-poseidon2 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-koala-bear" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-mds 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-monty-31 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-poseidon2 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-symmetric 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-matrix" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "itertools 0.12.1", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "rand 0.8.5", + "serde", + "tracing", +] + +[[package]] +name = "p3-matrix" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "itertools 0.14.0", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "serde", + "tracing", + "transpose", +] + +[[package]] +name = "p3-matrix" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "itertools 0.13.0", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "rand 0.8.5", + "serde", + "tracing", + "transpose", +] + +[[package]] +name = "p3-matrix" +version = "0.2.3-succinct" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e4de3f373589477cb735ea58e125898ed20935e03664b4614c7fac258b3c42f" +dependencies = [ + "itertools 0.12.1", + "p3-field 0.2.3-succinct", + "p3-maybe-rayon 0.2.3-succinct", + "p3-util 0.2.3-succinct", + "rand 0.8.5", + "serde", + "tracing", +] + +[[package]] +name = "p3-maybe-rayon" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "rayon", +] + +[[package]] +name = "p3-maybe-rayon" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" + +[[package]] +name = "p3-maybe-rayon" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "rayon", +] + +[[package]] +name = "p3-maybe-rayon" +version = "0.2.3-succinct" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3968ad1160310296eb04f91a5f4edfa38fe1d6b2b8cd6b5c64e6f9b7370979e" + +[[package]] +name = "p3-mds" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "itertools 0.12.1", + "p3-dft 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "rand 0.8.5", +] + +[[package]] +name = "p3-mds" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "itertools 0.14.0", + "p3-dft 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", +] + +[[package]] +name = "p3-mds" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "itertools 0.13.0", + "p3-dft 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-matrix 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-symmetric 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "rand 0.8.5", +] + +[[package]] +name = "p3-mds" +version = "0.2.3-succinct" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2356b1ed0add6d5dfbf7a338ce534a6fde827374394a52cec16a0840af6e97c9" +dependencies = [ + "itertools 0.12.1", + "p3-dft 0.2.3-succinct", + "p3-field 0.2.3-succinct", + "p3-matrix 0.2.3-succinct", + "p3-symmetric 0.2.3-succinct", + "p3-util 0.2.3-succinct", + "rand 0.8.5", +] + +[[package]] +name = "p3-merkle-tree" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "itertools 0.12.1", + "p3-commit 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", + "tracing", +] + +[[package]] +name = "p3-merkle-tree" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "itertools 0.14.0", + "p3-commit 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "serde", + "tracing", +] + +[[package]] +name = "p3-merkle-tree" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "itertools 0.13.0", + "p3-commit 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-matrix 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-symmetric 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "rand 0.8.5", + "serde", + "tracing", +] + +[[package]] +name = "p3-mersenne-31" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "itertools 0.13.0", + "num-bigint 0.4.6", + "p3-dft 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-matrix 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-mds 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-poseidon2 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-symmetric 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-monty-31" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "itertools 0.14.0", + "num-bigint 0.4.6", + "p3-dft 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-mds 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-poseidon2 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "serde", + "tracing", + "transpose", +] + +[[package]] +name = "p3-monty-31" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "itertools 0.13.0", + "num-bigint 0.4.6", + "p3-dft 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-matrix 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-mds 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-poseidon2 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-symmetric 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "rand 0.8.5", + "serde", + "tracing", + "transpose", +] + +[[package]] +name = "p3-poseidon" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-mds 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", +] + +[[package]] +name = "p3-poseidon2" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "gcd", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-mds 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-poseidon2" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "gcd", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-mds 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", +] + +[[package]] +name = "p3-poseidon2" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "gcd", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-mds 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-symmetric 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "rand 0.8.5", +] + +[[package]] +name = "p3-poseidon2" +version = "0.2.3-succinct" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da1eec7e1b6900581bedd95e76e1ef4975608dd55be9872c9d257a8a9651c3a" +dependencies = [ + "gcd", + "p3-field 0.2.3-succinct", + "p3-mds 0.2.3-succinct", + "p3-symmetric 0.2.3-succinct", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "p3-poseidon2-air" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "p3-air 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-poseidon2 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "rand 0.8.5", + "tikv-jemallocator", + "tracing", +] + +[[package]] +name = "p3-symmetric" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "itertools 0.12.1", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", +] + +[[package]] +name = "p3-symmetric" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "itertools 0.14.0", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "serde", +] + +[[package]] +name = "p3-symmetric" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "itertools 0.13.0", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "serde", +] + +[[package]] +name = "p3-symmetric" +version = "0.2.3-succinct" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edb439bea1d822623b41ff4b51e3309e80d13cadf8b86d16ffd5e6efb9fdc360" +dependencies = [ + "itertools 0.12.1", + "p3-field 0.2.3-succinct", + "serde", +] + +[[package]] +name = "p3-uni-stark" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "itertools 0.12.1", + "p3-air 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-challenger 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-commit 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-dft 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", + "tracing", +] + +[[package]] +name = "p3-uni-stark" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "itertools 0.14.0", + "p3-air 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-challenger 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-commit 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-dft 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "serde", + "tracing", +] + +[[package]] +name = "p3-uni-stark" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "itertools 0.13.0", + "p3-air 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-challenger 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-commit 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-dft 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-matrix 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "serde", + "tracing", +] + +[[package]] +name = "p3-util" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6#ce9cdfa52326beb93d77669cee52e23287fdb16d" +dependencies = [ + "serde", +] + +[[package]] +name = "p3-util" +version = "0.1.0" +source = "git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb#539bbc84085efb609f4f62cb03cf49588388abdb" +dependencies = [ + "serde", +] + +[[package]] +name = "p3-util" +version = "0.1.0" +source = "git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b#a4d376babf5d09497f1fab1df7f1ffce01260973" +dependencies = [ + "serde", +] + +[[package]] +name = "p3-util" +version = "0.2.3-succinct" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c2c2010678b9332b563eaa38364915b585c1a94b5ca61e2c7541c087ddda5c" +dependencies = [ + "serde", +] + +[[package]] +name = "pairing" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "135590d8bdba2b31346f9cd1fb2a912329f5135e832a4f422942eb6ead8b6b3b" +dependencies = [ + "group 0.12.1", +] + +[[package]] +name = "pairing" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fec4625e73cf41ef4bb6846cafa6d44736525f442ba45e407c4a000a13996f" +dependencies = [ + "group 0.13.0", +] + +[[package]] +name = "parity-scale-codec" +version = "3.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "799781ae679d79a948e13d4824a40970bfa500058d245760dd857301059810fa" +dependencies = [ + "arrayvec", + "bitvec 1.0.1", + "byte-slice-cast", + "const_format", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "rustversion", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "3.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34b4653168b563151153c9e4c08ebed57fb8262bebfa79711552fa983c623e7a" +dependencies = [ + "proc-macro-crate 3.4.0", + "proc-macro2", + "quote", + "syn 2.0.110", +] + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.12", +] + +[[package]] +name = "parking_lot_core" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" @@ -7110,15 +9334,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.10" +version = "0.9.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.10", + "redox_syscall 0.5.18", "smallvec", - "windows-targets 0.52.6", + "windows-link", ] [[package]] @@ -7132,6 +9356,38 @@ dependencies = [ "subtle", ] +[[package]] +name = "pasta_curves" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cc65faf8e7313b4b1fbaa9f7ca917a0eed499a9663be71477f87993604341d8" +dependencies = [ + "blake2b_simd", + "ff 0.12.1", + "group 0.12.1", + "lazy_static", + "rand 0.8.5", + "static_assertions", + "subtle", +] + +[[package]] +name = "pasta_curves" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e57598f73cc7e1b2ac63c79c517b31a0877cd7c402cdcaa311b5208de7a095" +dependencies = [ + "blake2b_simd", + "ff 0.13.1", + "group 0.13.0", + "hex", + "lazy_static", + "rand 0.8.5", + "serde", + "static_assertions", + "subtle", +] + [[package]] name = "paste" version = "1.0.15" @@ -7156,33 +9412,41 @@ dependencies = [ "digest 0.10.7", "hmac 0.12.1", "password-hash", - "sha2 0.10.8", + "sha2 0.10.9", ] [[package]] name = "pem" -version = "3.0.5" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38af38e8470ac9dee3ce1bae1af9c1671fffc44ddfd8bd1d0a3445bf349a8ef3" +checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be" dependencies = [ "base64 0.22.1", - "serde", + "serde_core", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", ] [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" -version = "2.7.15" +version = "2.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" +checksum = "989e7521a040efde50c3ab6bbadafbe15ab6dc042686926be59ac35d74607df4" dependencies = [ "memchr", - "thiserror 2.0.12", "ucd-trie", ] @@ -7196,6 +9460,101 @@ dependencies = [ "rustc_version 0.4.1", ] +[[package]] +name = "pico-derive" +version = "0.1.0" +source = "git+https://github.com/ethproofs/pico.git?branch=ethproofs-pico-prism#5fa05fc4e105fd65bb01912e2d1ffce92c836a86" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "pico-vm" +version = "1.1.6" +source = "git+https://github.com/ethproofs/pico.git?branch=ethproofs-pico-prism#5fa05fc4e105fd65bb01912e2d1ffce92c836a86" +dependencies = [ + "anyhow", + "arrayref", + "backtrace", + "bincode", + "bytemuck", + "cfg-if", + "clap", + "core_affinity", + "cpu-time", + "crossbeam", + "csv", + "curve25519-dalek", + "dashmap", + "dashu", + "derive_more 2.0.1", + "elf", + "elliptic-curve 0.13.8", + "eyre", + "ff 0.13.1", + "halo2curves 0.7.0", + "hashbrown 0.14.5", + "hex", + "hybrid-array", + "itertools 0.13.0", + "k256 0.13.4", + "lazy_static", + "log", + "num", + "num-bigint 0.4.6", + "num-traits", + "num_cpus", + "once_cell", + "p256", + "p3-air 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-baby-bear 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-blake3 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-bn254-fr 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-challenger 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-circle", + "p3-commit 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-dft 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-field 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-fri 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-goldilocks 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-keccak 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-keccak-air", + "p3-koala-bear 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-matrix 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-maybe-rayon 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-mds 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-merkle-tree 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-mersenne-31", + "p3-poseidon2 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-symmetric 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-uni-stark 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "p3-util 0.1.0 (git+https://github.com/brevis-network/Plonky3.git?rev=a4d376b)", + "paste", + "pico-derive", + "rand 0.8.5", + "rayon", + "rayon-scan", + "rrs-succinct", + "serde", + "serde_json", + "serde_with", + "snowbridge-amcl", + "static_assertions", + "strum 0.26.3", + "strum_macros 0.26.4", + "sysinfo 0.30.13", + "thiserror 1.0.69", + "tiny-keccak", + "tracing", + "tracing-forest", + "tracing-subscriber", + "typenum", + "vec_map", + "zkhash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "pin-project" version = "1.1.10" @@ -7213,7 +9572,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -7244,7 +9603,7 @@ version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ - "der 0.7.9", + "der 0.7.10", "spki 0.7.3", ] @@ -7290,17 +9649,16 @@ dependencies = [ [[package]] name = "polling" -version = "3.7.4" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" dependencies = [ "cfg-if", "concurrent-queue", - "hermit-abi 0.4.0", + "hermit-abi 0.5.2", "pin-project-lite", - "rustix 0.38.44", - "tracing", - "windows-sys 0.59.0", + "rustix 1.1.2", + "windows-sys 0.61.2", ] [[package]] @@ -7328,9 +9686,18 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.11.0" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" + +[[package]] +name = "potential_utf" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] [[package]] name = "powerfmt" @@ -7344,7 +9711,7 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ - "zerocopy 0.8.23", + "zerocopy", ] [[package]] @@ -7383,12 +9750,21 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.30" +version = "0.2.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1ccf34da56fc294e7d4ccf69a85992b7dfb826b7cf57bac6a70bba3494cc08a" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" dependencies = [ "proc-macro2", - "syn 2.0.100", + "syn 2.0.110", +] + +[[package]] +name = "primeorder" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +dependencies = [ + "elliptic-curve 0.13.8", ] [[package]] @@ -7407,11 +9783,21 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "3.3.0" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit 0.19.15", +] + +[[package]] +name = "proc-macro-crate" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" +checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" dependencies = [ - "toml_edit", + "toml_edit 0.23.7", ] [[package]] @@ -7457,14 +9843,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "proc-macro2" -version = "1.0.94" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" +checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" dependencies = [ "unicode-ident", ] @@ -7494,7 +9880,7 @@ dependencies = [ "fnv", "lazy_static", "memchr", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "thiserror 1.0.69", ] @@ -7506,7 +9892,7 @@ checksum = "cf41c1a7c32ed72abe5082fb19505b969095c12da9f5732a4bc9878757fd087c" dependencies = [ "dtoa", "itoa", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "prometheus-client-derive-encode", ] @@ -7518,23 +9904,47 @@ checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", +] + +[[package]] +name = "proof_generation_service" +version = "0.1.0" +dependencies = [ + "beacon_chain", + "lighthouse_network", + "logging", + "network", + "tokio", + "tracing", + "types", + "zkvm_execution_layer", +] + +[[package]] +name = "proofman-verifier" +version = "0.12.0" +source = "git+https://github.com/0xPolygonHermez/pil2-proofman.git?tag=v0.12.0#3e8a367a50193803b09a6d2d272d9ab01129ad44" +dependencies = [ + "bytemuck", + "fields", + "tracing", + "zstd 0.13.3", ] [[package]] name = "proptest" -version = "1.6.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14cae93065090804185d3b75f0bf93b8eeda30c7a9b4a33d3bdb3988d6229e50" +checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.9.0", - "lazy_static", + "bitflags 2.10.0", "num-traits", - "rand 0.8.5", - "rand_chacha 0.3.1", - "rand_xorshift 0.3.0", + "rand 0.9.2", + "rand_chacha 0.9.0", + "rand_xorshift 0.4.0", "regex-syntax", "rusty-fork", "tempfile", @@ -7549,7 +9959,7 @@ checksum = "095a99f75c69734802359b682be8daaf8980296731f6470434ea2c652af1dd30" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -7572,7 +9982,7 @@ dependencies = [ "itertools 0.14.0", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -7605,7 +10015,7 @@ checksum = "5e617cc9058daa5e1fe5a0d23ed745773a5ee354111dad1ec0235b0cc16b6730" dependencies = [ "cfg-if", "darwin-libproc", - "derive_more 0.99.19", + "derive_more 0.99.20", "glob", "mach2", "nix 0.24.3", @@ -7616,6 +10026,21 @@ dependencies = [ "unescape", ] +[[package]] +name = "quanta" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3ab5a9d756f0d97bdc89019bd2e4ea098cf9cde50ee7564dde6b81ccc8f06c7" +dependencies = [ + "crossbeam-utils", + "libc", + "once_cell", + "raw-cpuid", + "wasi", + "web-sys", + "winapi", +] + [[package]] name = "quick-error" version = "1.2.3" @@ -7656,49 +10081,52 @@ dependencies = [ [[package]] name = "quickcheck_macros" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b22a693222d716a9587786f37ac3f6b4faedb5b80c23914e7303ff5a1d8016e9" +checksum = "f71ee38b42f8459a88d3362be6f9b841ad2d5421844f61eb1c59c11bff3ac14a" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.110", ] [[package]] name = "quinn" -version = "0.11.6" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62e96808277ec6f97351a2380e6c25114bc9e67037775464979f3037c92d05ef" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" dependencies = [ "bytes", + "cfg_aliases", "futures-io", "pin-project-lite", "quinn-proto", "quinn-udp", "rustc-hash 2.1.1", - "rustls 0.23.23", - "socket2", - "thiserror 2.0.12", + "rustls 0.23.35", + "socket2 0.5.10", + "thiserror 2.0.17", "tokio", "tracing", + "web-time", ] [[package]] name = "quinn-proto" -version = "0.11.9" +version = "0.11.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2fe5ef3495d7d2e377ff17b1a8ce2ee2ec2a18cde8b6ad6619d65d0701c135d" +checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" dependencies = [ "bytes", - "getrandom 0.2.15", - "rand 0.8.5", + "getrandom 0.3.4", + "lru-slab", + "rand 0.9.2", "ring", "rustc-hash 2.1.1", - "rustls 0.23.23", + "rustls 0.23.35", "rustls-pki-types", "slab", - "thiserror 2.0.12", + "thiserror 2.0.17", "tinyvec", "tracing", "web-time", @@ -7706,27 +10134,33 @@ dependencies = [ [[package]] name = "quinn-udp" -version = "0.5.10" +version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e46f3055866785f6b92bc6164b76be02ca8f2eb4b002c0354b28cf4c119e5944" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" dependencies = [ "cfg_aliases", "libc", "once_cell", - "socket2", + "socket2 0.5.10", "tracing", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] name = "quote" -version = "1.0.39" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1f1914ce909e1658d9907913b4b91947430c7d9be598b15a1912935b8c04801" +checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + [[package]] name = "r2d2" version = "0.8.10" @@ -7734,7 +10168,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93" dependencies = [ "log", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "scheduled-thread-pool", ] @@ -7760,6 +10194,16 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" +[[package]] +name = "radix_trie" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c069c179fcdc6a2fe24d8d18305cf085fdbd4f922c041943e203685d6a1c58fd" +dependencies = [ + "endian-type", + "nibble_vec", +] + [[package]] name = "rand" version = "0.8.5" @@ -7774,14 +10218,13 @@ dependencies = [ [[package]] name = "rand" -version = "0.9.0" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" dependencies = [ "rand_chacha 0.9.0", "rand_core 0.9.3", "serde", - "zerocopy 0.8.23", ] [[package]] @@ -7810,7 +10253,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", ] [[package]] @@ -7819,7 +10262,7 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ - "getrandom 0.3.1", + "getrandom 0.3.4", "serde", ] @@ -7841,11 +10284,32 @@ dependencies = [ "rand_core 0.9.3", ] +[[package]] +name = "range-set-blaze" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8421b5d459262eabbe49048d362897ff3e3830b44eac6cfe341d6acb2f0f13d2" +dependencies = [ + "gen_ops", + "itertools 0.12.1", + "num-integer", + "num-traits", +] + +[[package]] +name = "raw-cpuid" +version = "11.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "498cd0dc59d73224351ee52a95fee0f1a617a2eae0e7d9d720cc622c73a54186" +dependencies = [ + "bitflags 2.10.0", +] + [[package]] name = "rayon" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" dependencies = [ "either", "rayon-core", @@ -7853,14 +10317,23 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.1" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" dependencies = [ "crossbeam-deque", "crossbeam-utils", ] +[[package]] +name = "rayon-scan" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f87cc11a0140b4b0da0ffc889885760c61b13672d80a908920b2c0df078fa14" +dependencies = [ + "rayon", +] + [[package]] name = "rcgen" version = "0.13.2" @@ -7876,9 +10349,9 @@ dependencies = [ [[package]] name = "redb" -version = "2.4.0" +version = "2.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea0a72cd7140de9fc3e318823b883abf819c20d478ec89ce880466dc2ef263c6" +checksum = "8eca1e9d98d5a7e9002d0013e18d5a9b000aee942eb134883a82f06ebffb6c01" dependencies = [ "libc", ] @@ -7894,11 +10367,11 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.10" +version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", ] [[package]] @@ -7907,16 +10380,36 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "libredox", "thiserror 1.0.69", ] +[[package]] +name = "ref-cast" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.110", +] + [[package]] name = "regex" -version = "1.11.1" +version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", @@ -7926,9 +10419,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.9" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", @@ -7937,9 +10430,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.5" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "reqwest" @@ -7952,7 +10445,7 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2 0.3.26", + "h2 0.3.27", "http 0.2.12", "http-body 0.4.6", "hyper 0.14.32", @@ -7989,25 +10482,22 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.15" +version = "0.12.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" +checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" dependencies = [ "base64 0.22.1", "bytes", "futures-channel", "futures-core", "futures-util", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.8.1", "hyper-util", - "ipnet", "js-sys", "log", - "mime", - "once_cell", "percent-encoding", "pin-project-lite", "serde", @@ -8016,12 +10506,12 @@ dependencies = [ "sync_wrapper 1.0.2", "tokio", "tower 0.5.2", + "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "windows-registry", ] [[package]] @@ -8042,13 +10532,9 @@ dependencies = [ [[package]] name = "resolv-conf" -version = "0.7.0" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52e44394d2086d010551b14b53b1f24e31647570cd1deb0379e2c21b329aba00" -dependencies = [ - "hostname", - "quick-error", -] +checksum = "1e061d1b48cb8d38042de4ae0a7a6401009d6143dc80d2e2d6f31f0bdd6470c7" [[package]] name = "rfc6979" @@ -8079,7 +10565,7 @@ checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.15", + "getrandom 0.2.16", "libc", "untrusted", "windows-sys 0.52.0", @@ -8134,6 +10620,27 @@ dependencies = [ "archery", ] +[[package]] +name = "rrs-lib" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4382d3af3a4ebdae7f64ba6edd9114fff92c89808004c4943b393377a25d001" +dependencies = [ + "downcast-rs", + "paste", +] + +[[package]] +name = "rrs-succinct" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3372685893a9f67d18e98e792d690017287fd17379a83d798d958e517d380fa9" +dependencies = [ + "downcast-rs", + "num_enum", + "paste", +] + [[package]] name = "rtnetlink" version = "0.13.1" @@ -8166,14 +10673,14 @@ dependencies = [ "bytes", "fastrlp 0.3.1", "fastrlp 0.4.0", - "num-bigint", + "num-bigint 0.4.6", "num-integer", "num-traits", "parity-scale-codec", "primitive-types", "proptest", "rand 0.8.5", - "rand 0.9.0", + "rand 0.9.2", "rlp", "ruint-macro", "serde_core", @@ -8220,9 +10727,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.24" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" [[package]] name = "rustc-hash" @@ -8257,7 +10764,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ - "semver 1.0.26", + "semver 1.0.27", ] [[package]] @@ -8289,7 +10796,7 @@ version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "errno", "libc", "linux-raw-sys 0.4.15", @@ -8298,15 +10805,15 @@ dependencies = [ [[package]] name = "rustix" -version = "1.0.2" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7178faa4b75a30e269c71e61c353ce2748cf3d76f0c44c393f4e60abf49b825" +checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "errno", "libc", - "linux-raw-sys 0.9.2", - "windows-sys 0.59.0", + "linux-raw-sys 0.11.0", + "windows-sys 0.52.0", ] [[package]] @@ -8337,29 +10844,29 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.23" +version = "0.23.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47796c98c480fce5406ef69d1c76378375492c3b0a0de587be0c1d9feb12f395" +checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" dependencies = [ "log", "once_cell", "ring", "rustls-pki-types", - "rustls-webpki 0.102.8", + "rustls-webpki 0.103.8", "subtle", "zeroize", ] [[package]] name = "rustls-native-certs" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +checksum = "9980d917ebb0c0536119ba501e90834767bffc3d60641457fd84a1f3fd337923" dependencies = [ "openssl-probe", "rustls-pki-types", "schannel", - "security-framework 3.3.0", + "security-framework 3.5.1", ] [[package]] @@ -8382,9 +10889,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +checksum = "94182ad936a0c91c324cd46c6511b9510ed16af436d7b5bab34beab0afd55f7a" dependencies = [ "web-time", "zeroize", @@ -8413,9 +10920,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.4" +version = "0.103.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" +checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" dependencies = [ "ring", "rustls-pki-types", @@ -8424,15 +10931,15 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.20" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "rusty-fork" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" dependencies = [ "fnv", "quick-error", @@ -8508,28 +11015,52 @@ version = "2.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6630024bf739e2179b91fb424b28898baf819414262c5d376677dbff1fe7ebf" dependencies = [ - "proc-macro-crate", + "proc-macro-crate 3.4.0", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "schannel" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] name = "scheduled-thread-pool" version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" +checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" +dependencies = [ + "parking_lot 0.12.5", +] + +[[package]] +name = "schemars" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "schemars" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9558e172d4e8533736ba97870c4b2cd63f84b382a3d6eb063da41b91cce17289" dependencies = [ - "parking_lot 0.12.3", + "dyn-clone", + "ref-cast", + "serde", + "serde_json", ] [[package]] @@ -8565,7 +11096,7 @@ dependencies = [ "hmac 0.12.1", "pbkdf2 0.11.0", "salsa20 0.10.2", - "sha2 0.10.8", + "sha2 0.10.9", ] [[package]] @@ -8599,7 +11130,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" dependencies = [ "base16ct 0.2.0", - "der 0.7.9", + "der 0.7.10", "generic-array 0.14.7", "pkcs8 0.10.2", "serdect", @@ -8634,7 +11165,7 @@ version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "core-foundation 0.9.4", "core-foundation-sys", "libc", @@ -8643,11 +11174,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "3.3.0" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80fb1d92c5028aa318b4b8bd7302a5bfcf48be96a37fc6fc790f806b0004ee0c" +checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "core-foundation 0.10.1", "core-foundation-sys", "libc", @@ -8656,9 +11187,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.14.0" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" dependencies = [ "core-foundation-sys", "libc", @@ -8675,11 +11206,12 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.26" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" dependencies = [ "serde", + "serde_core", ] [[package]] @@ -8719,22 +11251,31 @@ dependencies = [ [[package]] name = "serde-aux" -version = "4.6.0" +version = "4.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5290c39c5f6992b9dddbda28541d965dba46468294e6018a408fa297e6c602de" +checksum = "207f67b28fe90fb596503a9bf0bf1ea5e831e21307658e177c5dfcdfc3ab8a0a" dependencies = [ "serde", "serde-value", "serde_json", ] +[[package]] +name = "serde-big-array" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11fc7cc2c76d73e0f27ee52abbd64eec84d46f370c88371120433196934e4b7f" +dependencies = [ + "serde", +] + [[package]] name = "serde-value" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" dependencies = [ - "ordered-float", + "ordered-float 2.10.1", "serde", ] @@ -8748,6 +11289,15 @@ dependencies = [ "serde_urlencoded", ] +[[package]] +name = "serde_arrays" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38636132857f68ec3d5f3eb121166d2af33cb55174c4d5ff645db6165cbef0fd" +dependencies = [ + "serde", +] + [[package]] name = "serde_core" version = "1.0.228" @@ -8765,19 +11315,31 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "serde_json" -version = "1.0.140" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ "itoa", "memchr", "ryu", "serde", + "serde_core", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" +dependencies = [ + "itoa", + "serde", + "serde_core", ] [[package]] @@ -8788,7 +11350,16 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", ] [[package]] @@ -8805,24 +11376,33 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.15.1" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa66c845eee442168b2c8134fec70ac50dc20e760769c8ba0ad1319ca1959b04" +checksum = "10574371d41b0d9b2cff89418eda27da52bcaff2cc8741db26382a77c29131f1" dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.12.0", + "schemars 0.9.0", + "schemars 1.1.0", "serde_core", + "serde_json", "serde_with_macros", + "time", ] [[package]] name = "serde_with_macros" -version = "3.15.1" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b91a903660542fced4e99881aa481bdbaec1634568ee02e0b8bd57c64cb38955" +checksum = "08a72d8216842fdd57820dc78d840bef99248e35fb2554ff923319e60f2d686b" dependencies = [ "darling 0.21.3", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -8831,7 +11411,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.8.0", + "indexmap 2.12.0", "itoa", "ryu", "serde", @@ -8886,9 +11466,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.8" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", "cpufeatures", @@ -8932,9 +11512,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.2" +version = "1.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" dependencies = [ "libc", ] @@ -8966,7 +11546,7 @@ dependencies = [ "eth2_keystore", "ethereum_serde_utils", "lockfile", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "reqwest 0.11.27", "serde", "task_executor", @@ -8975,6 +11555,12 @@ dependencies = [ "validator_metrics", ] +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + [[package]] name = "similar" version = "2.7.0" @@ -8987,9 +11573,9 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" dependencies = [ - "num-bigint", + "num-bigint 0.4.6", "num-traits", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", ] @@ -9004,7 +11590,7 @@ dependencies = [ "kzg", "logging", "node_test_rig", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "rayon", "sensitive_url", "serde_json", @@ -9014,14 +11600,17 @@ dependencies = [ "types", ] +[[package]] +name = "sketches-ddsketch" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85636c14b73d81f541e525f585c0a2109e6744e1565b5c1668e31c70c10ed65c" + [[package]] name = "slab" -version = "0.4.9" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" [[package]] name = "slasher" @@ -9039,15 +11628,15 @@ dependencies = [ "lmdb-rkv-sys", "lru", "maplit", - "metrics", - "parking_lot 0.12.3", - "rand 0.9.0", + "metrics 0.2.0", + "parking_lot 0.12.5", + "rand 0.9.2", "rayon", "redb", "safe_arith", "serde", "ssz_types", - "strum", + "strum 0.24.1", "tempfile", "tracing", "tree_hash", @@ -9056,55 +11645,457 @@ dependencies = [ ] [[package]] -name = "slasher_service" -version = "0.1.0" +name = "slasher_service" +version = "0.1.0" +dependencies = [ + "beacon_chain", + "directory", + "lighthouse_network", + "network", + "slasher", + "slot_clock", + "state_processing", + "task_executor", + "tokio", + "tracing", + "types", +] + +[[package]] +name = "slashing_protection" +version = "0.1.0" +dependencies = [ + "arbitrary", + "eip_3076", + "ethereum_serde_utils", + "filesystem", + "r2d2", + "r2d2_sqlite", + "rayon", + "rusqlite", + "serde", + "serde_json", + "tempfile", + "tracing", + "types", +] + +[[package]] +name = "slop-air" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "p3-air 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", +] + +[[package]] +name = "slop-algebra" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "itertools 0.13.0", + "p3-field 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", +] + +[[package]] +name = "slop-alloc" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "serde", + "slop-algebra", + "thiserror 1.0.69", +] + +[[package]] +name = "slop-baby-bear" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "lazy_static", + "p3-baby-bear 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", + "slop-algebra", + "slop-challenger", + "slop-poseidon2", + "slop-symmetric", +] + +[[package]] +name = "slop-basefold" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "derive-where", + "itertools 0.13.0", + "serde", + "slop-algebra", + "slop-alloc", + "slop-baby-bear", + "slop-bn254", + "slop-challenger", + "slop-koala-bear", + "slop-merkle-tree", + "slop-multilinear", + "slop-primitives", + "slop-tensor", + "slop-utils", + "thiserror 1.0.69", +] + +[[package]] +name = "slop-basefold-prover" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "derive-where", + "itertools 0.13.0", + "rand 0.8.5", + "serde", + "slop-algebra", + "slop-alloc", + "slop-baby-bear", + "slop-basefold", + "slop-bn254", + "slop-challenger", + "slop-commit", + "slop-dft", + "slop-fri", + "slop-futures", + "slop-koala-bear", + "slop-merkle-tree", + "slop-multilinear", + "slop-tensor", + "thiserror 1.0.69", + "tokio", +] + +[[package]] +name = "slop-bn254" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "ff 0.13.1", + "p3-bn254-fr 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", + "slop-algebra", + "slop-challenger", + "slop-poseidon2", + "slop-symmetric", + "zkhash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "slop-challenger" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "futures", + "p3-challenger 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", + "slop-algebra", + "slop-symmetric", +] + +[[package]] +name = "slop-commit" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "p3-commit 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", + "slop-alloc", +] + +[[package]] +name = "slop-dft" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "p3-dft 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", + "slop-algebra", + "slop-alloc", + "slop-matrix", + "slop-tensor", +] + +[[package]] +name = "slop-fri" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "p3-fri 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", +] + +[[package]] +name = "slop-futures" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "crossbeam", + "futures", + "pin-project", + "rayon", + "thiserror 1.0.69", + "tokio", + "tracing", +] + +[[package]] +name = "slop-jagged" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "derive-where", + "futures", + "itertools 0.13.0", + "num_cpus", + "rand 0.8.5", + "rayon", + "serde", + "slop-algebra", + "slop-alloc", + "slop-baby-bear", + "slop-basefold", + "slop-basefold-prover", + "slop-bn254", + "slop-challenger", + "slop-commit", + "slop-futures", + "slop-koala-bear", + "slop-multilinear", + "slop-stacked", + "slop-sumcheck", + "slop-symmetric", + "slop-tensor", + "slop-utils", + "thiserror 1.0.69", + "tokio", + "tracing", +] + +[[package]] +name = "slop-koala-bear" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "lazy_static", + "p3-koala-bear 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", + "slop-algebra", + "slop-challenger", + "slop-poseidon2", + "slop-symmetric", +] + +[[package]] +name = "slop-matrix" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "p3-matrix 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", +] + +[[package]] +name = "slop-maybe-rayon" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "p3-maybe-rayon 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", +] + +[[package]] +name = "slop-merkle-tree" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "derive-where", + "ff 0.13.1", + "itertools 0.13.0", + "p3-merkle-tree 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "serde", + "slop-algebra", + "slop-alloc", + "slop-baby-bear", + "slop-bn254", + "slop-challenger", + "slop-commit", + "slop-futures", + "slop-koala-bear", + "slop-matrix", + "slop-poseidon2", + "slop-symmetric", + "slop-tensor", + "thiserror 1.0.69", + "tokio", + "zkhash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "slop-multilinear" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "derive-where", + "futures", + "num_cpus", + "rand 0.8.5", + "rayon", + "serde", + "slop-algebra", + "slop-alloc", + "slop-challenger", + "slop-commit", + "slop-futures", + "slop-matrix", + "slop-tensor", + "tokio", +] + +[[package]] +name = "slop-poseidon2" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "p3-poseidon2 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", +] + +[[package]] +name = "slop-primitives" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "slop-algebra", +] + +[[package]] +name = "slop-stacked" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "derive-where", + "futures", + "itertools 0.13.0", + "serde", + "slop-algebra", + "slop-alloc", + "slop-challenger", + "slop-commit", + "slop-futures", + "slop-multilinear", + "slop-tensor", + "thiserror 1.0.69", + "tokio", +] + +[[package]] +name = "slop-sumcheck" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "futures", + "itertools 0.13.0", + "rayon", + "serde", + "slop-algebra", + "slop-alloc", + "slop-baby-bear", + "slop-challenger", + "slop-multilinear", + "thiserror 1.0.69", +] + +[[package]] +name = "slop-symmetric" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "p3-symmetric 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", +] + +[[package]] +name = "slop-tensor" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "arrayvec", + "derive-where", + "itertools 0.13.0", + "rand 0.8.5", + "rayon", + "serde", + "slop-algebra", + "slop-alloc", + "slop-futures", + "slop-matrix", + "thiserror 1.0.69", + "tokio", + "transpose", +] + +[[package]] +name = "slop-uni-stark" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "p3-uni-stark 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", +] + +[[package]] +name = "slop-utils" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" dependencies = [ - "beacon_chain", - "directory", - "lighthouse_network", - "network", - "slasher", - "slot_clock", - "state_processing", - "task_executor", - "tokio", - "tracing", - "types", + "p3-util 0.1.0 (git+https://github.com/Plonky3/Plonky3/?branch=sp1-v6)", + "tracing-forest", + "tracing-subscriber", ] [[package]] -name = "slashing_protection" -version = "0.1.0" +name = "slop-whir" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" dependencies = [ - "arbitrary", - "eip_3076", - "ethereum_serde_utils", - "filesystem", - "r2d2", - "r2d2_sqlite", + "derive-where", + "futures", + "itertools 0.13.0", + "rand 0.8.5", "rayon", - "rusqlite", "serde", - "serde_json", - "tempfile", - "tracing", - "types", + "slop-algebra", + "slop-alloc", + "slop-baby-bear", + "slop-basefold", + "slop-challenger", + "slop-commit", + "slop-dft", + "slop-jagged", + "slop-koala-bear", + "slop-matrix", + "slop-merkle-tree", + "slop-multilinear", + "slop-stacked", + "slop-tensor", + "slop-utils", + "thiserror 1.0.69", ] [[package]] name = "slot_clock" version = "0.2.0" dependencies = [ - "metrics", - "parking_lot 0.12.3", + "metrics 0.2.0", + "parking_lot 0.12.5", "types", ] [[package]] name = "smallvec" -version = "1.14.0" +version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" dependencies = [ "arbitrary", "serde", @@ -9129,20 +12120,220 @@ dependencies = [ "rand_core 0.6.4", "ring", "rustc_version 0.4.1", - "sha2 0.10.8", + "sha2 0.10.9", "subtle", ] +[[package]] +name = "snowbridge-amcl" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460a9ed63cdf03c1b9847e8a12a5f5ba19c4efd5869e4a737e05be25d7c427e5" +dependencies = [ + "parity-scale-codec", + "scale-info", +] + [[package]] name = "socket2" -version = "0.5.8" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" dependencies = [ "libc", "windows-sys 0.52.0", ] +[[package]] +name = "socket2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "sp1-derive" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "sp1-hypercube" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "arrayref", + "deepsize2", + "derive-where", + "futures", + "hashbrown 0.14.5", + "itertools 0.13.0", + "num-bigint 0.4.6", + "num-traits", + "num_cpus", + "rayon", + "rayon-scan", + "serde", + "slop-air", + "slop-algebra", + "slop-alloc", + "slop-basefold", + "slop-basefold-prover", + "slop-challenger", + "slop-commit", + "slop-futures", + "slop-jagged", + "slop-koala-bear", + "slop-matrix", + "slop-merkle-tree", + "slop-multilinear", + "slop-poseidon2", + "slop-sumcheck", + "slop-symmetric", + "slop-tensor", + "slop-uni-stark", + "slop-whir", + "sp1-derive", + "sp1-primitives 6.0.0", + "strum 0.27.2", + "thiserror 1.0.69", + "thousands", + "tokio", + "tracing", +] + +[[package]] +name = "sp1-lib" +version = "5.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb1a9935d58cb1dcd757a1b10d727090f5b718f1f03b512d48f0c1952e6ead00" +dependencies = [ + "bincode", + "elliptic-curve 0.13.8", + "serde", + "sp1-primitives 5.2.3", +] + +[[package]] +name = "sp1-primitives" +version = "5.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7d2a6187e394c30097ea7a975a4832f172918690dc89a979f0fad67422d3a8b" +dependencies = [ + "bincode", + "blake3", + "cfg-if", + "hex", + "lazy_static", + "num-bigint 0.4.6", + "p3-baby-bear 0.2.3-succinct", + "p3-field 0.2.3-succinct", + "p3-poseidon2 0.2.3-succinct", + "p3-symmetric 0.2.3-succinct", + "serde", + "sha2 0.10.9", +] + +[[package]] +name = "sp1-primitives" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "bincode", + "blake3", + "elf", + "hex", + "itertools 0.13.0", + "lazy_static", + "num-bigint 0.4.6", + "serde", + "sha2 0.10.9", + "slop-algebra", + "slop-bn254", + "slop-challenger", + "slop-koala-bear", + "slop-poseidon2", + "slop-primitives", + "slop-symmetric", +] + +[[package]] +name = "sp1-recursion-executor" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "backtrace", + "cfg-if", + "hashbrown 0.14.5", + "itertools 0.13.0", + "range-set-blaze", + "serde", + "slop-algebra", + "slop-maybe-rayon", + "slop-poseidon2", + "slop-symmetric", + "smallvec", + "sp1-derive", + "sp1-hypercube", + "static_assertions", + "thiserror 1.0.69", + "tracing", +] + +[[package]] +name = "sp1-recursion-machine" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "itertools 0.13.0", + "rand 0.8.5", + "slop-air", + "slop-algebra", + "slop-basefold", + "slop-matrix", + "slop-maybe-rayon", + "slop-symmetric", + "sp1-derive", + "sp1-hypercube", + "sp1-primitives 6.0.0", + "sp1-recursion-executor", + "strum 0.27.2", + "tracing", + "zkhash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "sp1-verifier" +version = "6.0.0" +source = "git+https://github.com/succinctlabs/hypercube-verifier.git?branch=ethproofs_demo#7058050bfb410cb47dff982049f964c33d59d585" +dependencies = [ + "bincode", + "blake3", + "cfg-if", + "dirs 5.0.1", + "hex", + "lazy_static", + "serde", + "sha2 0.10.9", + "slop-algebra", + "slop-primitives", + "slop-symmetric", + "sp1-hypercube", + "sp1-primitives 6.0.0", + "sp1-recursion-executor", + "sp1-recursion-machine", + "strum 0.27.2", + "substrate-bn-succinct", + "thiserror 2.0.17", +] + [[package]] name = "spin" version = "0.9.8" @@ -9166,7 +12357,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", - "der 0.7.9", + "der 0.7.10", ] [[package]] @@ -9188,9 +12379,9 @@ dependencies = [ [[package]] name = "stable_deref_trait" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "state_processing" @@ -9207,8 +12398,8 @@ dependencies = [ "integer-sqrt", "itertools 0.10.5", "merkle_proof", - "metrics", - "rand 0.9.0", + "metrics 0.2.0", + "rand 0.9.2", "rayon", "safe_arith", "smallvec", @@ -9252,16 +12443,16 @@ dependencies = [ "leveldb", "logging", "lru", - "metrics", - "parking_lot 0.12.3", - "rand 0.9.0", + "metrics 0.2.0", + "parking_lot 0.12.5", + "rand 0.9.2", "redb", "safe_arith", "serde", "smallvec", "ssz_types", "state_processing", - "strum", + "strum 0.24.1", "superstruct", "tempfile", "tracing", @@ -9271,6 +12462,12 @@ dependencies = [ "zstd 0.13.3", ] +[[package]] +name = "strength_reduce" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe895eb47f22e2ddd4dabc02bce419d2e643c8e3b585c78158b349195bc24d82" + [[package]] name = "strsim" version = "0.10.0" @@ -9289,7 +12486,25 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ - "strum_macros", + "strum_macros 0.24.3", +] + +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros 0.26.4", +] + +[[package]] +name = "strum" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" +dependencies = [ + "strum_macros 0.27.2", ] [[package]] @@ -9305,6 +12520,48 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.110", +] + +[[package]] +name = "strum_macros" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.110", +] + +[[package]] +name = "substrate-bn-succinct" +version = "0.6.0-v5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ba32f1b74728f92887c3ad17c42bf82998eb52c9091018f35294e9cd388b0c8" +dependencies = [ + "bytemuck", + "byteorder", + "cfg-if", + "crunchy", + "lazy_static", + "num-bigint 0.4.6", + "rand 0.8.5", + "rustc-hex", + "sp1-lib", +] + [[package]] name = "subtle" version = "2.6.1" @@ -9317,12 +12574,12 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b986e4a629907f20a2c2a639a75bc22a8b5d99b444e0d83c395f4cb309022bf" dependencies = [ - "darling 0.20.10", + "darling 0.20.11", "itertools 0.13.0", "proc-macro2", "quote", "smallvec", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -9348,9 +12605,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.100" +version = "2.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" +checksum = "a99801b5bd34ede4cf3fc688c5919368fea4e4814a4664359503e6015b280aea" dependencies = [ "proc-macro2", "quote", @@ -9366,7 +12623,7 @@ dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -9386,13 +12643,13 @@ dependencies = [ [[package]] name = "synstructure" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -9410,6 +12667,21 @@ dependencies = [ "winapi", ] +[[package]] +name = "sysinfo" +version = "0.30.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a5b4ddaee55fb2bea2bf0e5000747e5f5c0de765e5a5ff87f4cd106439f4bb3" +dependencies = [ + "cfg-if", + "core-foundation-sys", + "libc", + "ntapi", + "once_cell", + "rayon", + "windows 0.52.0", +] + [[package]] name = "system-configuration" version = "0.5.1" @@ -9427,7 +12699,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "core-foundation 0.9.4", "system-configuration-sys 0.6.0", ] @@ -9457,11 +12729,11 @@ name = "system_health" version = "0.1.0" dependencies = [ "lighthouse_network", - "metrics", + "metrics 0.2.0", "network_utils", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "serde", - "sysinfo", + "sysinfo 0.26.9", "types", ] @@ -9490,7 +12762,7 @@ version = "0.1.0" dependencies = [ "async-channel 1.9.0", "futures", - "metrics", + "metrics 0.2.0", "num_cpus", "rayon", "tokio", @@ -9499,26 +12771,25 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.18.0" +version = "3.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c317e0a526ee6120d8dabad239c8dadca62b24b6f168914bbbc8e2fb1f0e567" +checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" dependencies = [ - "cfg-if", "fastrand", - "getrandom 0.3.1", + "getrandom 0.3.4", "once_cell", - "rustix 1.0.2", - "windows-sys 0.59.0", + "rustix 1.1.2", + "windows-sys 0.52.0", ] [[package]] name = "terminal_size" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed" +checksum = "60b8cb979cb11c32ce1603f8137b22262a9d131aaa5c37b5678025f22b8becd0" dependencies = [ - "rustix 1.0.2", - "windows-sys 0.59.0", + "rustix 1.1.2", + "windows-sys 0.60.2", ] [[package]] @@ -9546,11 +12817,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ - "thiserror-impl 2.0.12", + "thiserror-impl 2.0.17", ] [[package]] @@ -9561,28 +12832,33 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "thiserror-impl" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] +[[package]] +name = "thousands" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3bf63baf9f5039dadc247375c29eb13706706cfde997d0330d05aa63a77d8820" + [[package]] name = "thread_local" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" dependencies = [ "cfg-if", - "once_cell", ] [[package]] @@ -9596,9 +12872,9 @@ dependencies = [ [[package]] name = "tikv-jemalloc-ctl" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f21f216790c8df74ce3ab25b534e0718da5a1916719771d3fec23315c99e468b" +checksum = "661f1f6a57b3a36dc9174a2c10f19513b4866816e13425d3e418b11cc37bc24c" dependencies = [ "libc", "paste", @@ -9607,9 +12883,9 @@ dependencies = [ [[package]] name = "tikv-jemalloc-sys" -version = "0.6.0+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" +version = "0.6.1+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd3c60906412afa9c2b5b5a48ca6a5abe5736aec9eb48ad05037a677e52e4e2d" +checksum = "cd8aa5b2ab86a2cefa406d889139c162cbb230092f7d1d7cbc1716405d852a3b" dependencies = [ "cc", "libc", @@ -9617,9 +12893,9 @@ dependencies = [ [[package]] name = "tikv-jemallocator" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cec5ff18518d81584f477e9bfdf957f5bb0979b0bac3af4ca30b5b3ae2d2865" +checksum = "0359b4327f954e0567e69fb191cf1436617748813819c94b8cd4a431422d053a" dependencies = [ "libc", "tikv-jemalloc-sys", @@ -9627,9 +12903,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.39" +version = "0.3.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dad298b01a40a23aac4580b67e3dbedb7cc8402f3592d7f49469de2ea4aecdd8" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" dependencies = [ "deranged", "itoa", @@ -9642,15 +12918,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.3" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "765c97a5b985b7c11d7bc27fa927dc4fe6af3a6dfb021d28deb60d3bf51e76ef" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" [[package]] name = "time-macros" -version = "0.2.20" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8093bc3e81c3bc5f7879de09619d06c9a5a5e45ca44dfeeb7225bae38005c5c" +checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" dependencies = [ "num-conv", "time-core", @@ -9679,7 +12955,7 @@ dependencies = [ "pbkdf2 0.11.0", "rand 0.8.5", "rustc-hash 1.1.0", - "sha2 0.10.8", + "sha2 0.10.9", "thiserror 1.0.69", "unicode-normalization", "wasm-bindgen", @@ -9697,9 +12973,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.7.6" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" dependencies = [ "displaydoc", "zerovec", @@ -9717,9 +12993,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" dependencies = [ "tinyvec_macros", ] @@ -9732,32 +13008,31 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.44.0" +version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9975ea0f48b5aa3972bf2d888c238182458437cc2a19374b81b25cdf1023fb3a" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ - "backtrace", "bytes", "libc", "mio", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.6.1", "tokio-macros", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] name = "tokio-macros" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -9793,11 +13068,11 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.2" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" dependencies = [ - "rustls 0.23.23", + "rustls 0.23.35", "tokio", ] @@ -9815,9 +13090,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.13" +version = "0.7.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" +checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594" dependencies = [ "bytes", "futures-core", @@ -9828,23 +13103,88 @@ dependencies = [ "tokio", ] +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime 0.6.11", + "toml_edit 0.22.27", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + [[package]] name = "toml_datetime" -version = "0.6.8" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_edit" +version = "0.19.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +dependencies = [ + "indexmap 2.12.0", + "toml_datetime 0.6.11", + "winnow 0.5.40", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap 2.12.0", + "serde", + "serde_spanned", + "toml_datetime 0.6.11", + "toml_write", + "winnow 0.7.13", +] + +[[package]] +name = "toml_edit" +version = "0.23.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +checksum = "6485ef6d0d9b5d0ec17244ff7eb05310113c3f316f2d14200d4de56b3cb98f8d" +dependencies = [ + "indexmap 2.12.0", + "toml_datetime 0.7.3", + "toml_parser", + "winnow 0.7.13", +] [[package]] -name = "toml_edit" -version = "0.22.24" +name = "toml_parser" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" +checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e" dependencies = [ - "indexmap 2.8.0", - "toml_datetime", - "winnow", + "winnow 0.7.13", ] +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + [[package]] name = "tonic" version = "0.12.3" @@ -9856,17 +13196,17 @@ dependencies = [ "axum", "base64 0.22.1", "bytes", - "h2 0.4.8", - "http 1.3.0", + "h2 0.4.12", + "http 1.3.1", "http-body 1.0.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.8.1", "hyper-timeout", "hyper-util", "percent-encoding", "pin-project", "prost", - "socket2", + "socket2 0.5.10", "tokio", "tokio-stream", "tower 0.4.13", @@ -9884,10 +13224,10 @@ dependencies = [ "async-trait", "base64 0.22.1", "bytes", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.8.1", "hyper-timeout", "hyper-util", "percent-encoding", @@ -9895,7 +13235,7 @@ dependencies = [ "prost", "rustls-native-certs", "tokio", - "tokio-rustls 0.26.2", + "tokio-rustls 0.26.4", "tokio-stream", "tower 0.5.2", "tower-layer", @@ -9931,7 +13271,7 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", - "indexmap 2.8.0", + "indexmap 2.12.0", "pin-project-lite", "slab", "sync_wrapper 1.0.2", @@ -9942,6 +13282,24 @@ dependencies = [ "tracing", ] +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags 2.10.0", + "bytes", + "futures-util", + "http 1.3.1", + "http-body 1.0.1", + "iri-string", + "pin-project-lite", + "tower 0.5.2", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-layer" version = "0.3.3" @@ -9980,25 +13338,38 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.28" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "tracing-core" -version = "0.1.33" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" dependencies = [ "once_cell", "valuable", ] +[[package]] +name = "tracing-forest" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee40835db14ddd1e3ba414292272eddde9dad04d3d4b65509656414d1c42592f" +dependencies = [ + "ansi_term", + "smallvec", + "thiserror 1.0.69", + "tracing", + "tracing-subscriber", +] + [[package]] name = "tracing-futures" version = "0.2.5" @@ -10069,6 +13440,16 @@ dependencies = [ "tracing-serde", ] +[[package]] +name = "transpose" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad61aed86bc3faea4300c7aee358b4c6d0c8d6ccc36524c96e4c92ccf26e77e" +dependencies = [ + "num-integer", + "strength_reduce", +] + [[package]] name = "tree_hash" version = "0.10.0" @@ -10088,10 +13469,10 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bee2ea1551f90040ab0e34b6fb7f2fa3bad8acc925837ac654f2c78a13e3089" dependencies = [ - "darling 0.20.10", + "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -10106,9 +13487,9 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85" +checksum = "dd69c5aa8f924c7519d6372789a74eac5b94fb0f8fcf0d4a97eb0bfc3e785f39" dependencies = [ "serde", "stable_deref_trait", @@ -10122,9 +13503,9 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" [[package]] name = "types" @@ -10153,9 +13534,9 @@ dependencies = [ "merkle_proof", "metastruct", "milhouse", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "paste", - "rand 0.9.0", + "rand 0.9.2", "rand_xorshift 0.4.0", "rayon", "regex", @@ -10228,15 +13609,15 @@ checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" -version = "1.0.18" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" [[package]] name = "unicode-normalization" -version = "0.1.24" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8" dependencies = [ "tinyvec", ] @@ -10263,6 +13644,16 @@ dependencies = [ "subtle", ] +[[package]] +name = "unroll" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ad948c1cb799b1a70f836077721a92a35ac177d4daddf4c20a633786d4cf618" +dependencies = [ + "quote", + "syn 1.0.109", +] + [[package]] name = "unsafe-libyaml" version = "0.2.11" @@ -10294,21 +13685,16 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.4" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", "percent-encoding", + "serde", ] -[[package]] -name = "utf16_iter" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" - [[package]] name = "utf8_iter" version = "1.0.4" @@ -10327,17 +13713,19 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "serde", ] [[package]] name = "uuid" -version = "1.15.1" +version = "1.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0f540e3240398cce6128b64ba83fdbdd86129c16a3aa1a3a252efd66eb3d587" +checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" dependencies = [ - "getrandom 0.3.1", + "getrandom 0.3.4", + "js-sys", + "wasm-bindgen", ] [[package]] @@ -10349,18 +13737,18 @@ dependencies = [ "clap", "clap_utils", "directory", - "dirs", + "dirs 3.0.2", "doppelganger_service", "environment", "eth2", "fdlimit", "graffiti_file", - "hyper 1.6.0", + "hyper 1.8.1", "initialized_validators", "lighthouse_validator_store", - "metrics", + "metrics 0.2.0", "monitoring_api", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "reqwest 0.11.27", "sensitive_url", "serde", @@ -10387,7 +13775,7 @@ dependencies = [ "filesystem", "hex", "lockfile", - "rand 0.9.0", + "rand 0.9.2", "tempfile", "tree_hash", "types", @@ -10402,7 +13790,7 @@ dependencies = [ "bls", "deposit_contract", "directory", - "dirs", + "dirs 3.0.2", "doppelganger_service", "eth2", "eth2_keystore", @@ -10416,15 +13804,15 @@ dependencies = [ "lighthouse_validator_store", "lighthouse_version", "logging", - "parking_lot 0.12.3", - "rand 0.9.0", + "parking_lot 0.12.5", + "rand 0.9.2", "sensitive_url", "serde", "serde_json", "signing_method", "slashing_protection", "slot_clock", - "sysinfo", + "sysinfo 0.26.9", "system_health", "task_executor", "tempfile", @@ -10450,8 +13838,8 @@ dependencies = [ "lighthouse_version", "logging", "malloc_utils", - "metrics", - "parking_lot 0.12.3", + "metrics 0.2.0", + "parking_lot 0.12.5", "serde", "slot_clock", "tracing", @@ -10494,7 +13882,7 @@ dependencies = [ name = "validator_metrics" version = "0.1.0" dependencies = [ - "metrics", + "metrics 0.2.0", ] [[package]] @@ -10508,7 +13896,7 @@ dependencies = [ "futures", "graffiti_file", "logging", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "safe_arith", "slot_clock", "task_executor", @@ -10559,6 +13947,26 @@ name = "vec_map" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" +dependencies = [ + "serde", +] + +[[package]] +name = "verify-stark" +version = "1.4.1" +source = "git+https://github.com/openvm-org/openvm.git?branch=feat%2Fv1-verify-stark#cdb0029cd0d07197adb8f2b9da1a01ca484c6a18" +dependencies = [ + "bitcode", + "eyre", + "openvm-circuit", + "openvm-continuations", + "openvm-native-compiler", + "openvm-native-recursion", + "p3-fri 0.1.0 (git+https://github.com/Plonky3/Plonky3.git?rev=539bbc84085efb609f4f62cb03cf49588388abdb)", + "serde", + "thiserror 1.0.69", + "zstd 0.13.3", +] [[package]] name = "version_check" @@ -10641,50 +14049,37 @@ dependencies = [ [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] -name = "wasi" -version = "0.13.3+wasi-0.2.2" +name = "wasip2" +version = "1.0.1+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" dependencies = [ - "wit-bindgen-rt", + "wit-bindgen", ] [[package]] name = "wasm-bindgen" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60" dependencies = [ "cfg-if", "once_cell", "rustversion", "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn 2.0.100", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.50" +version = "0.4.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0" dependencies = [ "cfg-if", "js-sys", @@ -10695,9 +14090,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -10705,22 +14100,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc" dependencies = [ + "bumpalo", "proc-macro2", "quote", - "syn 2.0.100", - "wasm-bindgen-backend", + "syn 2.0.110", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76" dependencies = [ "unicode-ident", ] @@ -10755,9 +14150,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.77" +version = "0.3.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1" dependencies = [ "js-sys", "wasm-bindgen", @@ -10787,7 +14182,7 @@ dependencies = [ "initialized_validators", "lighthouse_validator_store", "logging", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "reqwest 0.11.27", "serde", "serde_json", @@ -10829,9 +14224,9 @@ checksum = "c168940144dd21fd8046987c16a46a33d5fc84eec29ef9dcddc2ac9e31526b7c" [[package]] name = "widestring" -version = "1.1.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7219d36b6eac893fa81e84ebe06485e7dcbb616177469b142df14f1f4deb1311" +checksum = "72069c3113ab32ab29e5584db3c6ec55d416895e60715417b5b883a357c3e471" [[package]] name = "winapi" @@ -10851,11 +14246,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -10866,21 +14261,21 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" -version = "0.53.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efc5cf48f83140dcaab716eeaea345f9e93d0018fb81162753a3f76c3397b538" +checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" dependencies = [ - "windows-core 0.53.0", + "windows-core 0.52.0", "windows-targets 0.52.6", ] [[package]] name = "windows" -version = "0.58.0" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" +checksum = "efc5cf48f83140dcaab716eeaea345f9e93d0018fb81162753a3f76c3397b538" dependencies = [ - "windows-core 0.58.0", + "windows-core 0.53.0", "windows-targets 0.52.6", ] @@ -10917,55 +14312,44 @@ dependencies = [ [[package]] name = "windows-core" -version = "0.58.0" +version = "0.62.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" dependencies = [ "windows-implement", "windows-interface", - "windows-result 0.2.0", - "windows-strings 0.1.0", - "windows-targets 0.52.6", + "windows-link", + "windows-result 0.4.1", + "windows-strings", ] [[package]] name = "windows-implement" -version = "0.58.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "windows-interface" -version = "0.58.0" +version = "0.59.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "windows-link" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3" - -[[package]] -name = "windows-registry" -version = "0.4.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" -dependencies = [ - "windows-result 0.3.1", - "windows-strings 0.3.1", - "windows-targets 0.53.2", -] +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" [[package]] name = "windows-result" @@ -10978,37 +14362,18 @@ dependencies = [ [[package]] name = "windows-result" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-result" -version = "0.3.1" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06374efe858fab7e4f881500e6e86ec8bc28f9462c47e5a9941a0142ad86b189" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" dependencies = [ "windows-link", ] [[package]] name = "windows-strings" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" -dependencies = [ - "windows-result 0.2.0", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-strings" -version = "0.3.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" dependencies = [ "windows-link", ] @@ -11049,6 +14414,24 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -11097,18 +14480,19 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.53.2" +version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ - "windows_aarch64_gnullvm 0.53.0", - "windows_aarch64_msvc 0.53.0", - "windows_i686_gnu 0.53.0", - "windows_i686_gnullvm 0.53.0", - "windows_i686_msvc 0.53.0", - "windows_x86_64_gnu 0.53.0", - "windows_x86_64_gnullvm 0.53.0", - "windows_x86_64_msvc 0.53.0", + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", ] [[package]] @@ -11131,9 +14515,9 @@ checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" [[package]] name = "windows_aarch64_msvc" @@ -11155,9 +14539,9 @@ checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_aarch64_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" [[package]] name = "windows_i686_gnu" @@ -11179,9 +14563,9 @@ checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnu" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" [[package]] name = "windows_i686_gnullvm" @@ -11191,9 +14575,9 @@ checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" [[package]] name = "windows_i686_msvc" @@ -11215,9 +14599,9 @@ checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_i686_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" [[package]] name = "windows_x86_64_gnu" @@ -11239,9 +14623,9 @@ checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnu" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" [[package]] name = "windows_x86_64_gnullvm" @@ -11263,9 +14647,9 @@ checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" [[package]] name = "windows_x86_64_msvc" @@ -11287,15 +14671,24 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "windows_x86_64_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] name = "winnow" -version = "0.7.3" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1" +checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" dependencies = [ "memchr", ] @@ -11311,13 +14704,10 @@ dependencies = [ ] [[package]] -name = "wit-bindgen-rt" -version = "0.33.0" +name = "wit-bindgen" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" -dependencies = [ - "bitflags 2.9.0", -] +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" [[package]] name = "workspace_members" @@ -11327,23 +14717,17 @@ dependencies = [ "quote", ] -[[package]] -name = "write16" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" - [[package]] name = "writeable" -version = "0.5.5" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" [[package]] name = "ws_stream_wasm" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7999f5f4217fe3818726b66257a4475f71e74ffd190776ad053fa159e50737f5" +checksum = "6c173014acad22e83f16403ee360115b38846fe754e735c5d9d3803fe70c6abc" dependencies = [ "async_io_stream", "futures", @@ -11352,7 +14736,7 @@ dependencies = [ "pharos", "rustc_version 0.4.1", "send_wrapper", - "thiserror 1.0.69", + "thiserror 2.0.17", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -11392,7 +14776,7 @@ dependencies = [ "nom", "oid-registry", "rusticata-macros", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", ] @@ -11412,9 +14796,9 @@ dependencies = [ [[package]] name = "xml-rs" -version = "0.8.25" +version = "0.8.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5b940ebc25896e71dd073bad2dbaa2abfe97b0a391415e22ad1326d9c54e3c4" +checksum = "3ae8337f8a065cfc972643663ea4279e04e7256de865aa66fe25cec5fb912d3f" [[package]] name = "xmltree" @@ -11445,7 +14829,7 @@ dependencies = [ "futures", "log", "nohash-hasher", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "pin-project", "rand 0.8.5", "static_assertions", @@ -11453,16 +14837,16 @@ dependencies = [ [[package]] name = "yamux" -version = "0.13.4" +version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17610762a1207ee816c6fadc29220904753648aba0a9ed61c7b8336e80a559c4" +checksum = "deab71f2e20691b4728b349c6cee8fc7223880fa67b6b4f92225ec32225447e5" dependencies = [ "futures", "log", "nohash-hasher", - "parking_lot 0.12.3", + "parking_lot 0.12.5", "pin-project", - "rand 0.8.5", + "rand 0.9.2", "static_assertions", "web-time", ] @@ -11478,11 +14862,10 @@ dependencies = [ [[package]] name = "yoke" -version = "0.7.5" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" dependencies = [ - "serde", "stable_deref_trait", "yoke-derive", "zerofrom", @@ -11490,54 +14873,34 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.7.5" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", "synstructure", ] [[package]] name = "zerocopy" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" -dependencies = [ - "zerocopy-derive 0.7.35", -] - -[[package]] -name = "zerocopy" -version = "0.8.23" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd97444d05a4328b90e75e503a34bad781f14e28a823ad3557f0750df1ebcbc6" +checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" dependencies = [ - "zerocopy-derive 0.8.23", + "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.35" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6352c01d0edd5db859a63e2605f4ea3183ddbd15e2c4a9e7d32184df75e4f154" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -11557,15 +14920,15 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", "synstructure", ] [[package]] name = "zeroize" -version = "1.8.1" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" dependencies = [ "serde", "zeroize_derive", @@ -11579,14 +14942,25 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", +] + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", ] [[package]] name = "zerovec" -version = "0.10.4" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" dependencies = [ "yoke", "zerofrom", @@ -11595,13 +14969,13 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.10.3" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -11613,7 +14987,7 @@ dependencies = [ "aes 0.8.4", "byteorder", "bzip2", - "constant_time_eq", + "constant_time_eq 0.1.5", "crc32fast", "crossbeam-utils", "flate2", @@ -11624,6 +14998,87 @@ dependencies = [ "zstd 0.11.2+zstd.1.5.2", ] +[[package]] +name = "zkhash" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4352d1081da6922701401cdd4cbf29a2723feb4cfabb5771f6fee8e9276da1c7" +dependencies = [ + "ark-ff 0.4.2", + "ark-std 0.4.0", + "bitvec 1.0.1", + "blake2", + "bls12_381", + "byteorder", + "cfg-if", + "group 0.12.1", + "group 0.13.0", + "halo2", + "hex", + "jubjub", + "lazy_static", + "pasta_curves 0.5.1", + "rand 0.8.5", + "serde", + "sha2 0.10.9", + "sha3", + "subtle", +] + +[[package]] +name = "zkhash" +version = "0.2.0" +source = "git+https://github.com/HorizenLabs/poseidon2.git?rev=bb476b9#bb476b9ca38198cf5092487283c8b8c5d4317c4e" +dependencies = [ + "ark-ff 0.4.2", + "ark-std 0.4.0", + "bitvec 1.0.1", + "blake2", + "bls12_381", + "byteorder", + "cfg-if", + "group 0.12.1", + "group 0.13.0", + "halo2", + "hex", + "jubjub", + "lazy_static", + "pasta_curves 0.5.1", + "rand 0.8.5", + "serde", + "sha2 0.10.9", + "sha3", + "subtle", +] + +[[package]] +name = "zkvm_execution_layer" +version = "0.1.0" +dependencies = [ + "async-trait", + "bincode", + "bitcode", + "execution_layer", + "hashbrown 0.15.5", + "lru", + "mockall 0.12.1", + "once_cell", + "pico-vm", + "proofman-verifier", + "rand 0.9.2", + "reqwest 0.11.27", + "serde", + "serde_json", + "sp1-verifier", + "tempfile", + "thiserror 2.0.17", + "tokio", + "tracing", + "types", + "uuid 0.8.2", + "verify-stark", +] + [[package]] name = "zstd" version = "0.11.2+zstd.1.5.2" @@ -11639,7 +15094,7 @@ version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" dependencies = [ - "zstd-safe 7.2.3", + "zstd-safe 7.2.4", ] [[package]] @@ -11654,18 +15109,18 @@ dependencies = [ [[package]] name = "zstd-safe" -version = "7.2.3" +version = "7.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3051792fbdc2e1e143244dc28c60f73d8470e93f3f9cbd0ead44da5ed802722" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" dependencies = [ "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.14+zstd.1.5.7" +version = "2.0.16+zstd.1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fb060d4926e4ac3a3ad15d864e99ceb5f343c6b34f5bd6d81ae6ed417311be5" +checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index 03116b3db18..f9f6b3bf86a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,6 +14,7 @@ members = [ "beacon_node/lighthouse_tracing", "beacon_node/network", "beacon_node/operation_pool", + "beacon_node/proof_generation_service", "beacon_node/store", "beacon_node/timer", "boot_node", @@ -63,6 +64,7 @@ members = [ "crypto/eth2_wallet", "crypto/kzg", "database_manager", + "dummy_el", "lcli", "lighthouse", "lighthouse/environment", @@ -88,6 +90,7 @@ members = [ "validator_client/validator_metrics", "validator_client/validator_services", "validator_manager", + "zkvm_execution_layer", ] resolver = "2" diff --git a/Dockerfile b/Dockerfile index f925836e48e..50bf1e5898e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,13 +1,20 @@ FROM rust:1.88.0-bullseye AS builder RUN apt-get update && apt-get -y upgrade && apt-get install -y cmake libclang-dev -COPY . lighthouse +WORKDIR /lighthouse + ARG FEATURES ARG PROFILE=release ARG CARGO_USE_GIT_CLI=true ENV FEATURES=$FEATURES ENV PROFILE=$PROFILE ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_USE_GIT_CLI -RUN cd lighthouse && make +ENV CARGO_INCREMENTAL=1 + +COPY . . +# Persist the registry and target file across builds. See: https://docs.docker.com/build/cache/optimize/#use-cache-mounts +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/lighthouse/target \ + make FROM ubuntu:22.04 RUN apt-get update && apt-get -y upgrade && apt-get install -y --no-install-recommends \ @@ -15,4 +22,4 @@ RUN apt-get update && apt-get -y upgrade && apt-get install -y --no-install-reco ca-certificates \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* -COPY --from=builder /usr/local/cargo/bin/lighthouse /usr/local/bin/lighthouse +COPY --from=builder /usr/local/cargo/bin/lighthouse /usr/local/bin/lighthouse \ No newline at end of file diff --git a/README.md b/README.md index 147a06e5040..ffae6ca06e7 100644 --- a/README.md +++ b/README.md @@ -79,3 +79,4 @@ hard and we're grateful for the donations we receive from the community via: - [Gitcoin Grants](https://gitcoin.co/grants/25/lighthouse-ethereum-20-client). - Ethereum address: `0x25c4a76E7d118705e7Ea2e9b7d8C59930d8aCD3b` (donation.sigmaprime.eth). + diff --git a/beacon_node/Cargo.toml b/beacon_node/Cargo.toml index fd013559785..aecbb7dbf34 100644 --- a/beacon_node/Cargo.toml +++ b/beacon_node/Cargo.toml @@ -28,6 +28,9 @@ dirs = { workspace = true } environment = { workspace = true } eth2_config = { workspace = true } execution_layer = { workspace = true } +# TODO(zkproofs): add as a workspace dependency +zkvm_execution_layer = { path = "../zkvm_execution_layer" } +dummy_el = { path = "../dummy_el" } genesis = { workspace = true } hex = { workspace = true } http_api = { workspace = true } diff --git a/beacon_node/beacon_chain/Cargo.toml b/beacon_node/beacon_chain/Cargo.toml index e889f53bb01..7d7332da575 100644 --- a/beacon_node/beacon_chain/Cargo.toml +++ b/beacon_node/beacon_chain/Cargo.toml @@ -26,6 +26,8 @@ ethereum_serde_utils = { workspace = true } ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } execution_layer = { workspace = true } +# TODO(zkproofs): add as a workspace dependency +zkvm_execution_layer = { path = "../../zkvm_execution_layer" } fork_choice = { workspace = true } futures = { workspace = true } genesis = { workspace = true } diff --git a/beacon_node/beacon_chain/src/beacon_chain.rs b/beacon_node/beacon_chain/src/beacon_chain.rs index 5ffdf951ac1..af70b006add 100644 --- a/beacon_node/beacon_chain/src/beacon_chain.rs +++ b/beacon_node/beacon_chain/src/beacon_chain.rs @@ -31,6 +31,9 @@ use crate::early_attester_cache::EarlyAttesterCache; use crate::errors::{BeaconChainError as Error, BlockProductionError}; use crate::events::ServerSentEventHandler; use crate::execution_payload::{NotifyExecutionLayer, PreparePayloadHandle, get_execution_payload}; +use crate::execution_proof_verification::{ + GossipExecutionProofError, GossipVerifiedExecutionProof, +}; use crate::fetch_blobs::EngineGetBlobsOutput; use crate::fork_choice_signal::{ForkChoiceSignalRx, ForkChoiceSignalTx, ForkChoiceWaitResult}; use crate::graffiti_calculator::GraffitiCalculator; @@ -55,6 +58,7 @@ use crate::observed_attesters::{ }; use crate::observed_block_producers::ObservedBlockProducers; use crate::observed_data_sidecars::ObservedDataSidecars; +use crate::observed_execution_proofs::ObservedExecutionProofs; use crate::observed_operations::{ObservationOutcome, ObservedOperations}; use crate::observed_slashable::ObservedSlashable; use crate::persisted_beacon_chain::PersistedBeaconChain; @@ -126,6 +130,7 @@ use store::{ KeyValueStore, KeyValueStoreOp, StoreItem, StoreOp, }; use task_executor::{RayonPoolType, ShutdownReason, TaskExecutor}; +use tokio::sync::mpsc::UnboundedSender; use tokio_stream::Stream; use tracing::{Span, debug, debug_span, error, info, info_span, instrument, trace, warn}; use tree_hash::TreeHash; @@ -133,6 +138,7 @@ use types::blob_sidecar::FixedBlobSidecarList; use types::data_column_sidecar::ColumnIndex; use types::payload::BlockProductionVersion; use types::*; +use zkvm_execution_layer::GeneratorRegistry; pub type ForkChoiceError = fork_choice::Error; @@ -343,6 +349,8 @@ pub enum BlockProcessStatus { pub type LightClientProducerEvent = (Hash256, Slot, SyncAggregate); +pub type ProofGenerationEvent = (Hash256, Slot, Arc>); + pub type BeaconForkChoice = ForkChoice< BeaconForkChoiceStore< ::EthSpec, @@ -414,6 +422,8 @@ pub struct BeaconChain { pub observed_blob_sidecars: RwLock>>, /// Maintains a record of column sidecars seen over the gossip network. pub observed_column_sidecars: RwLock>>, + /// Maintains a record of execution proofs seen over the gossip network. + pub observed_execution_proofs: RwLock, /// Maintains a record of slashable message seen over the gossip network or RPC. pub observed_slashable: RwLock>, /// Maintains a record of which validators have submitted voluntary exits. @@ -482,6 +492,10 @@ pub struct BeaconChain { pub kzg: Arc, /// RNG instance used by the chain. Currently used for shuffling column sidecars in block publishing. pub rng: Arc>>, + /// Registry of zkVM proof generators for altruistic proof generation + pub zkvm_generator_registry: Option>, + /// Sender to notify proof generation service of blocks needing proofs + pub proof_generation_tx: Option>>, } pub enum BeaconBlockResponseWrapper { @@ -2208,6 +2222,15 @@ impl BeaconChain { }) } + #[instrument(skip_all, level = "trace")] + pub fn verify_execution_proof_for_gossip( + self: &Arc, + execution_proof: Arc, + ) -> Result, GossipExecutionProofError> { + // TODO(zkproofs): Add metrics + GossipVerifiedExecutionProof::new(execution_proof, self) + } + #[instrument(skip_all, level = "trace")] pub fn verify_blob_sidecar_for_gossip( self: &Arc, @@ -3045,6 +3068,33 @@ impl BeaconChain { self.check_gossip_blob_availability_and_import(blob).await } + /// Process a gossip-verified execution proof by storing it in the DA checker. + /// + /// This method takes an execution proof that has already been validated via gossip + /// and stores it in the DataAvailabilityChecker. If all components for a block are + /// now available, the block will be imported to fork choice. + #[instrument(skip_all, level = "debug")] + pub async fn process_gossip_execution_proof( + self: &Arc, + execution_proof: GossipVerifiedExecutionProof, + publish_fn: impl FnOnce() -> Result<(), BlockError>, + ) -> Result { + let block_root = execution_proof.block_root(); + + // If this block has already been imported to forkchoice it must have been available, so + // we don't need to process its execution proofs again. + if self + .canonical_head + .fork_choice_read_lock() + .contains_block(&block_root) + { + return Err(BlockError::DuplicateFullyImported(block_root)); + } + + self.check_gossip_execution_proof_availability_and_import(execution_proof, publish_fn) + .await + } + /// Cache the data columns in the processing cache, process it, then evict it from the cache if it was /// imported or errors. #[instrument(skip_all, level = "debug")] @@ -3128,6 +3178,45 @@ impl BeaconChain { .await } + /// Process execution proofs retrieved via RPC and returns the `AvailabilityProcessingStatus`. + /// + /// This method handles execution proofs received from peers during block sync. The proofs + /// are verified and stored in the data availability checker. If all required components + /// (block, blobs/columns, and proofs) are available, the block is imported into fork choice. + pub async fn process_rpc_execution_proofs( + self: &Arc, + slot: Slot, + block_root: Hash256, + execution_proofs: Vec>, + ) -> Result { + // If this block has already been imported to forkchoice it must have been available, so + // we don't need to process its execution proofs again. + if self + .canonical_head + .fork_choice_read_lock() + .contains_block(&block_root) + { + return Err(BlockError::DuplicateFullyImported(block_root)); + } + + // Validate that all proofs are for the expected block_root + for proof in &execution_proofs { + if proof.block_root != block_root { + return Err(BlockError::AvailabilityCheck( + AvailabilityCheckError::Unexpected(format!( + "Proof block_root mismatch: expected {}, got {}", + block_root, proof.block_root + )), + )); + } + } + + // TODO(zkproofs): We can emit SSE events for execution proofs yet + + self.check_rpc_execution_proof_availability_and_import(slot, block_root, execution_proofs) + .await + } + /// Process blobs retrieved from the EL and returns the `AvailabilityProcessingStatus`. pub async fn process_engine_blobs( self: &Arc, @@ -3570,6 +3659,30 @@ impl BeaconChain { .await } + /// Checks if the provided execution proof can make any cached blocks available, and imports + /// immediately if so, otherwise caches the proof in the data availability checker. + async fn check_gossip_execution_proof_availability_and_import( + self: &Arc, + execution_proof: GossipVerifiedExecutionProof, + publish_fn: impl FnOnce() -> Result<(), BlockError>, + ) -> Result { + let block_root = execution_proof.block_root(); + let slot = execution_proof.slot(); + + // TODO(zkproofs): Can we avoid the clone + let proof_arc = execution_proof.into_inner(); + let proof = (*proof_arc).clone(); + + // Store the proof in the DA checker + let availability = self + .data_availability_checker + .put_verified_execution_proofs(block_root, std::iter::once(proof)) + .map_err(BlockError::AvailabilityCheck)?; + + self.process_availability(slot, availability, publish_fn) + .await + } + fn check_blob_header_signature_and_slashability<'a>( self: &Arc, block_root: Hash256, @@ -3674,6 +3787,28 @@ impl BeaconChain { .await } + /// Checks if the provided execution proofs can make any cached blocks available, and imports + /// immediately if so, otherwise caches the proofs in the data availability checker. + async fn check_rpc_execution_proof_availability_and_import( + self: &Arc, + slot: Slot, + block_root: Hash256, + execution_proofs: Vec>, + ) -> Result { + // TODO(zkproofs): For optional proofs, they are currently not signed + // so we can't add any slashability checks here. We also don't want this + // because it could cause issues where we slash a validator for giving us bad + // proofs, but for nodes that don't need proofs (most of the network), they will + // not see this slashing or care about. + + let availability = self + .data_availability_checker + .put_rpc_execution_proofs(block_root, execution_proofs)?; + + self.process_availability(slot, availability, || Ok(())) + .await + } + fn check_data_column_sidecar_header_signature_and_slashability<'a>( self: &Arc, block_root: Hash256, @@ -4053,6 +4188,20 @@ impl BeaconChain { current_slot, ); + // Notify proof generation service for altruistic proof generation + if let Some(ref proof_gen_tx) = self.proof_generation_tx { + let slot = signed_block.slot(); + let event = (block_root, slot, signed_block.clone()); + + if let Err(e) = proof_gen_tx.send(event) { + debug!( + error = ?e, + ?block_root, + "Failed to send proof generation event" + ); + } + } + Ok(block_root) } @@ -7402,6 +7551,34 @@ impl BeaconChain { && self.spec.is_peer_das_enabled_for_epoch(block_epoch) } + /// Returns true if epoch is within the execution proof retention boundary + pub fn execution_proof_check_required_for_epoch(&self, epoch: Epoch) -> bool { + self.data_availability_checker + .execution_proof_check_required_for_epoch(epoch) + } + + /// Returns true if we should fetch execution proofs for this block + pub fn should_fetch_execution_proofs(&self, block_epoch: Epoch) -> bool { + // Check if ZK-VM mode is enabled + if self.min_execution_proofs_required().is_none() { + return false; + } + + // Only fetch proofs within retention window + self.execution_proof_check_required_for_epoch(block_epoch) + } + + /// Returns the minimum number of execution proofs required + pub fn min_execution_proofs_required(&self) -> Option { + self.data_availability_checker + .min_execution_proofs_required() + } + + /// Returns the execution proof retention boundary epoch + pub fn execution_proof_boundary(&self) -> Option { + self.data_availability_checker.execution_proof_boundary() + } + /// Gets the `LightClientBootstrap` object for a requested block root. /// /// Returns `None` when the state or block is not found in the database. @@ -7501,6 +7678,59 @@ impl BeaconChain { .custody_context() .custody_columns_for_epoch(epoch_opt, &self.spec) } + + /// Returns a deterministic list of execution proof subnet IDs to request for a block in the given epoch. + /// + /// The selection is deterministic based on the epoch, ensuring all nodes request the same + /// subnets for blocks in the same epoch. Different epochs will result in different subnet + /// selections, providing rotation over time. + /// + /// # Arguments + /// * `epoch` - The epoch of the block + /// * `count` - Number of subnets to select (typically min_execution_proofs_required) + /// + /// # Returns + /// A vector of `count` subnet IDs, deterministically selected based on the epoch. + pub fn execution_proof_subnets_for_epoch( + &self, + epoch: Epoch, + count: usize, + ) -> Vec { + use types::EXECUTION_PROOF_TYPE_COUNT; + + let total_subnets = EXECUTION_PROOF_TYPE_COUNT as usize; + let count = std::cmp::min(count, total_subnets); + + if count == 0 { + return vec![]; + } + + // Use epoch as a deterministic seed + // Hash the epoch to get a pseudo-random but deterministic ordering + let mut hasher = std::collections::hash_map::DefaultHasher::new(); + use std::hash::{Hash, Hasher}; + epoch.hash(&mut hasher); + let seed = hasher.finish(); + + // Create a deterministic permutation of subnet IDs based on the seed + let mut subnet_ids: Vec = (0..EXECUTION_PROOF_TYPE_COUNT).collect(); + + // Simple deterministic shuffle using the seed + // This is a Fisher-Yates shuffle variant using deterministic randomness + for i in (1..subnet_ids.len()).rev() { + // Use seed + i for deterministic pseudo-random index + let j = ((seed.wrapping_add(i as u64).wrapping_mul(2654435761)) % ((i + 1) as u64)) + as usize; + subnet_ids.swap(i, j); + } + + // Take the first `count` subnet IDs and convert to ExecutionProofId + subnet_ids + .into_iter() + .take(count) + .filter_map(|id| types::ExecutionProofId::new(id).ok()) + .collect() + } } impl Drop for BeaconChain { diff --git a/beacon_node/beacon_chain/src/builder.rs b/beacon_node/beacon_chain/src/builder.rs index 719c24b9561..522cf18c63a 100644 --- a/beacon_node/beacon_chain/src/builder.rs +++ b/beacon_node/beacon_chain/src/builder.rs @@ -2,6 +2,7 @@ use crate::ChainConfig; use crate::CustodyContext; use crate::beacon_chain::{ BEACON_CHAIN_DB_KEY, CanonicalHead, LightClientProducerEvent, OP_POOL_DB_KEY, + ProofGenerationEvent, }; use crate::beacon_proposer_cache::BeaconProposerCache; use crate::custody_context::NodeCustodyType; @@ -13,6 +14,7 @@ use crate::kzg_utils::build_data_column_sidecars; use crate::light_client_server_cache::LightClientServerCache; use crate::migrate::{BackgroundMigrator, MigratorConfig}; use crate::observed_data_sidecars::ObservedDataSidecars; +use crate::observed_execution_proofs::ObservedExecutionProofs; use crate::persisted_beacon_chain::PersistedBeaconChain; use crate::persisted_custody::load_custody_context; use crate::shuffling_cache::{BlockShufflingIds, ShufflingCache}; @@ -39,6 +41,7 @@ use std::sync::Arc; use std::time::Duration; use store::{Error as StoreError, HotColdDB, ItemStore, KeyValueStoreOp}; use task_executor::{ShutdownReason, TaskExecutor}; +use tokio::sync::mpsc::UnboundedSender; use tracing::{debug, error, info}; use types::{ BeaconBlock, BeaconState, BlobSidecarList, ChainSpec, DataColumnSidecarList, Epoch, EthSpec, @@ -103,6 +106,16 @@ pub struct BeaconChainBuilder { validator_monitor_config: Option, node_custody_type: NodeCustodyType, rng: Option>, + /// ZK-VM execution layer configuration. + /// + /// TODO(zkproofs): When this is Some(_), the traditional ExecutionLayer should + /// be replaced with ZkVmEngineApi from zkvm_execution_layer. This would allow the + /// --execution-endpoint CLI flag to be optional when running in ZK-VM mode. + zkvm_execution_layer_config: Option, + /// Registry of zkVM proof generators for currently altruistic proof generation + zkvm_generator_registry: Option>, + /// Sender to notify proof generation service of blocks needing proofs + proof_generation_tx: Option>>, } impl @@ -142,6 +155,9 @@ where validator_monitor_config: None, node_custody_type: NodeCustodyType::Fullnode, rng: None, + zkvm_execution_layer_config: None, + zkvm_generator_registry: None, + proof_generation_tx: None, } } @@ -647,6 +663,16 @@ where self } + /// Sets the ZK-VM execution layer configuration. + /// When set, enables ZK-VM execution proof verification mode. + pub fn zkvm_execution_layer_config( + mut self, + config: Option, + ) -> Self { + self.zkvm_execution_layer_config = config; + self + } + /// Sets the `BeaconChain` event handler backend. /// /// For example, provide `ServerSentEventHandler` as a `handler`. @@ -682,6 +708,21 @@ where self } + /// Sets the zkVM generator registry for altruistic proof generation. + pub fn zkvm_generator_registry( + mut self, + registry: Arc, + ) -> Self { + self.zkvm_generator_registry = Some(registry); + self + } + + /// Sets a `Sender` to notify the proof generation service of new blocks. + pub fn proof_generation_tx(mut self, sender: UnboundedSender>) -> Self { + self.proof_generation_tx = Some(sender); + self + } + /// Creates a new, empty operation pool. fn empty_op_pool(mut self) -> Self { self.op_pool = Some(OperationPool::new()); @@ -952,6 +993,9 @@ where }; debug!(?custody_context, "Loaded persisted custody context"); + let has_execution_layer_and_proof_gen = + self.execution_layer.is_some() && self.zkvm_generator_registry.is_some(); + let beacon_chain = BeaconChain { spec: self.spec.clone(), config: self.chain_config, @@ -984,6 +1028,7 @@ where observed_block_producers: <_>::default(), observed_column_sidecars: RwLock::new(ObservedDataSidecars::new(self.spec.clone())), observed_blob_sidecars: RwLock::new(ObservedDataSidecars::new(self.spec.clone())), + observed_execution_proofs: RwLock::new(ObservedExecutionProofs::default()), observed_slashable: <_>::default(), observed_voluntary_exits: <_>::default(), observed_proposer_slashings: <_>::default(), @@ -1029,11 +1074,22 @@ where store, Arc::new(custody_context), self.spec, + // Create verifier registry if zkvm mode is enabled + // For now, we use dummy verifiers for all subnets + self.zkvm_execution_layer_config + .as_ref() + .map(|_| Arc::new(zkvm_execution_layer::registry_proof_verification::VerifierRegistry::new_with_dummy_verifiers())), + // Pass whether this node has an execution layer AND generates proofs + // Nodes with EL+proof-gen validate via traditional execution + // Nodes with EL but no proof-gen wait for proofs (lightweight verifier) + has_execution_layer_and_proof_gen, ) .map_err(|e| format!("Error initializing DataAvailabilityChecker: {:?}", e))?, ), kzg: self.kzg.clone(), rng: Arc::new(Mutex::new(rng)), + zkvm_generator_registry: self.zkvm_generator_registry, + proof_generation_tx: self.proof_generation_tx, }; let head = beacon_chain.head_snapshot(); diff --git a/beacon_node/beacon_chain/src/canonical_head.rs b/beacon_node/beacon_chain/src/canonical_head.rs index 7dd4c88c513..228e5eb2d27 100644 --- a/beacon_node/beacon_chain/src/canonical_head.rs +++ b/beacon_node/beacon_chain/src/canonical_head.rs @@ -951,6 +951,13 @@ impl BeaconChain { .start_slot(T::EthSpec::slots_per_epoch()), ); + self.observed_execution_proofs.write().prune( + new_view + .finalized_checkpoint + .epoch + .start_slot(T::EthSpec::slots_per_epoch()), + ); + self.observed_slashable.write().prune( new_view .finalized_checkpoint diff --git a/beacon_node/beacon_chain/src/data_availability_checker.rs b/beacon_node/beacon_chain/src/data_availability_checker.rs index 644c4716985..77325feccb2 100644 --- a/beacon_node/beacon_chain/src/data_availability_checker.rs +++ b/beacon_node/beacon_chain/src/data_availability_checker.rs @@ -18,12 +18,13 @@ use std::num::NonZeroUsize; use std::sync::Arc; use std::time::Duration; use task_executor::TaskExecutor; -use tracing::{debug, error, instrument}; +use tracing::{debug, error, instrument, warn}; use types::blob_sidecar::{BlobIdentifier, BlobSidecar, FixedBlobSidecarList}; use types::{ BlobSidecarList, BlockImportSource, ChainSpec, DataColumnSidecar, DataColumnSidecarList, Epoch, - EthSpec, Hash256, SignedBeaconBlock, Slot, + EthSpec, ExecutionProof, ExecutionProofId, Hash256, SignedBeaconBlock, Slot, }; +use zkvm_execution_layer::registry_proof_verification::VerifierRegistry; mod error; mod overflow_lru_cache; @@ -86,6 +87,8 @@ pub struct DataAvailabilityChecker { kzg: Arc, custody_context: Arc>, spec: Arc, + /// Registry of proof verifiers for different zkVM proof IDs. + verifier_registry: Option>, } pub type AvailabilityAndReconstructedColumns = (Availability, DataColumnSidecarList); @@ -118,6 +121,7 @@ impl Debug for Availability { } impl DataAvailabilityChecker { + #[allow(clippy::too_many_arguments)] pub fn new( complete_blob_backfill: bool, slot_clock: T::SlotClock, @@ -125,12 +129,15 @@ impl DataAvailabilityChecker { store: BeaconStore, custody_context: Arc>, spec: Arc, + verifier_registry: Option>, + has_execution_layer_and_proof_gen: bool, ) -> Result { let inner = DataAvailabilityCheckerInner::new( OVERFLOW_LRU_CAPACITY_NON_ZERO, store, custody_context.clone(), spec.clone(), + has_execution_layer_and_proof_gen, )?; Ok(Self { complete_blob_backfill, @@ -139,6 +146,7 @@ impl DataAvailabilityChecker { kzg, custody_context, spec, + verifier_registry, }) } @@ -169,6 +177,54 @@ impl DataAvailabilityChecker { }) } + /// Return the set of cached execution proof IDs for `block_root`. Returns None if there is + /// no block component for `block_root`. + pub fn cached_execution_proof_subnet_ids( + &self, + block_root: &Hash256, + ) -> Option> { + self.availability_cache + .peek_pending_components(block_root, |components| { + components.map(|components| { + components + .get_cached_execution_proofs() + .iter() + .map(|proof| proof.proof_id) + .collect::>() + }) + }) + } + + /// Get proof IDs we already have for a block. + /// Used when creating RPC requests to tell peers what we don't need. + pub fn get_existing_proof_ids(&self, block_root: &Hash256) -> Option> { + self.availability_cache + .peek_pending_components(block_root, |components| { + components.map(|components| { + components + .get_cached_execution_proofs() + .iter() + .map(|proof| proof.proof_id) + .collect::>() + }) + }) + } + + /// Get all execution proofs we have for a block. + /// Used when responding to RPC requests. + pub fn get_execution_proofs(&self, block_root: &Hash256) -> Option>> { + self.availability_cache + .peek_pending_components(block_root, |components| { + components.map(|components| { + components + .get_cached_execution_proofs() + .iter() + .map(|proof| Arc::new(proof.clone())) + .collect::>() + }) + }) + } + /// Return the set of cached custody column indexes for `block_root`. Returns None if there is /// no block component for `block_root`. pub fn cached_data_column_indexes(&self, block_root: &Hash256) -> Option> { @@ -193,6 +249,63 @@ impl DataAvailabilityChecker { }) } + /// Check if an execution proof is already cached in the availability cache. + /// + /// We usually call this method if the proof was made available ia RPC, and we later receive it via Gossip. + /// If it exists in the cache, we know it has already passed validation, + /// even though this particular instance may not have been seen/published on gossip yet. + pub fn is_execution_proof_cached( + &self, + block_root: &Hash256, + execution_proof: &ExecutionProof, + ) -> bool { + self.availability_cache + .peek_pending_components(block_root, |components| { + components.is_some_and(|components| { + components + .get_cached_execution_proofs() + .iter() + .any(|cached| cached == execution_proof) + }) + }) + } + + /// Verify a single execution proof for gossip. + /// + /// This performs cryptographic verification of the proof without requiring the full block. + /// + /// Returns: + /// - Ok(true) if proof is valid + /// - Ok(false) if proof is invalid + /// - Err if no verifier is configured or verification fails + pub fn verify_execution_proof_for_gossip( + &self, + proof: &ExecutionProof, + ) -> Result { + let Some(verifier_registry) = &self.verifier_registry else { + // No verifier configured but receiving proofs - this is a configuration error. + // If the chain spec enables execution proofs, the node must have --execution-proofs flag set. + return Err(AvailabilityCheckError::ProofVerificationError( + "Node is receiving execution proofs but proof verification is not enabled. \ + Use --execution-proofs flag to enable proof verification." + .to_string(), + )); + }; + + let subnet_id = proof.proof_id; + let verifier = verifier_registry.get_verifier(subnet_id).ok_or_else(|| { + warn!(?subnet_id, "No verifier registered for subnet"); + AvailabilityCheckError::UnsupportedProofID(subnet_id) + })?; + + verifier.verify(proof).map_err(|e| { + AvailabilityCheckError::ProofVerificationError(format!( + "Proof verification failed: {:?}", + e + )) + }) + } + /// Get a blob from the availability cache. pub fn get_blob( &self, @@ -269,6 +382,117 @@ impl DataAvailabilityChecker { .put_kzg_verified_data_columns(block_root, verified_custody_columns) } + /// Put a list of execution proofs received via RPC into the availability cache. + /// This performs cryptographic verification on the proofs. + #[instrument(skip_all, level = "trace")] + pub fn put_rpc_execution_proofs( + &self, + block_root: Hash256, + proofs: Vec>, + ) -> Result, AvailabilityCheckError> { + debug!( + ?block_root, + num_proofs = proofs.len(), + "Verifying and storing execution proofs in DA checker" + ); + + // If no verifier registry is configured, skip verification + let Some(verifier_registry) = &self.verifier_registry else { + debug!( + ?block_root, + "No verifier registry configured, storing proofs without verification" + ); + let owned_proofs = proofs.iter().map(|p| (**p).clone()); + return self + .availability_cache + .put_verified_execution_proofs(block_root, owned_proofs); + }; + + // Get the execution payload hash from the block + let execution_payload_hash = self + .availability_cache + .peek_pending_components(&block_root, |components| { + components.and_then(|c| c.block.as_ref().and_then(|b| b.execution_payload_hash())) + }) + .ok_or_else(|| { + warn!( + ?block_root, + "Cannot verify proofs: block not in cache or has no execution payload" + ); + AvailabilityCheckError::MissingExecutionPayload + })?; + + debug!( + ?block_root, + ?execution_payload_hash, + "Got execution payload hash for proof verification" + ); + + let mut verified_proofs = Vec::new(); + for proof in proofs { + let proof_id = proof.proof_id; + + // Check that the proof's block_hash matches the execution payload hash + if proof.block_hash != execution_payload_hash { + warn!( + ?block_root, + ?proof_id, + proof_hash = ?proof.block_hash, + ?execution_payload_hash, + "Proof execution payload hash mismatch" + ); + return Err(AvailabilityCheckError::ExecutionPayloadHashMismatch { + proof_hash: proof.block_hash, + block_hash: execution_payload_hash, + }); + } + + let verifier = verifier_registry.get_verifier(proof_id).ok_or_else(|| { + warn!(?proof_id, "No verifier registered for proof ID"); + AvailabilityCheckError::UnsupportedProofID(proof_id) + })?; + + // Verify the proof (proof contains block_hash internally) + match verifier.verify(&proof) { + Ok(true) => { + debug!(?proof_id, ?block_root, "Proof verification succeeded"); + verified_proofs.push((*proof).clone()); + } + Ok(false) => { + debug!( + ?proof_id, + ?block_root, + "Proof verification failed: proof is invalid" + ); + return Err(AvailabilityCheckError::InvalidProof { + proof_id, + reason: "Proof verification returns false".to_string(), + }); + } + Err(e) => { + warn!( + ?proof_id, + ?block_root, + error = ?e, + "Proof verification error" + ); + return Err(AvailabilityCheckError::ProofVerificationError( + e.to_string(), + )); + } + } + } + + debug!( + ?block_root, + verified_count = verified_proofs.len(), + "All proofs verified successfully" + ); + + self.availability_cache + .put_verified_execution_proofs(block_root, verified_proofs) + } + /// Check if we've cached other blobs for this block. If it completes a set and we also /// have a block cached, return the `Availability` variant triggering block import. /// Otherwise cache the blob sidecar. @@ -338,6 +562,20 @@ impl DataAvailabilityChecker { .put_kzg_verified_data_columns(block_root, custody_columns) } + /// Put execution proofs into the availability cache as pending components. + /// + /// Returns `Availability` which has information about whether all components have been + /// received or more are required. + #[instrument(skip_all, level = "trace")] + pub fn put_verified_execution_proofs>( + &self, + block_root: Hash256, + execution_proofs: I, + ) -> Result, AvailabilityCheckError> { + self.availability_cache + .put_verified_execution_proofs(block_root, execution_proofs) + } + /// Check if we have all the blobs for a block. Returns `Availability` which has information /// about whether all components have been received or more are required. pub fn put_executed_block( @@ -566,6 +804,44 @@ impl DataAvailabilityChecker { }) } + /// The epoch at which we require execution proofs for block processing. + /// + /// Note: This follows the same pattern as blob retention: proofs are required starting from + /// the zkvm_fork epoch, but only retained for a configured number of epochs. + /// + /// TODO(zkproofs): We don't store proofs forever and we also don't store + /// blobs forever, perhaps we should because when the blob disappears, we may not + /// be able to remake the proof when we put blobs in blocks. + /// We don't for now because proofs are quite large at the moment. + /// + /// Returns `None` if ZK-VM mode is disabled. + pub fn execution_proof_boundary(&self) -> Option { + let zkvm_fork_epoch = self.spec.zkvm_fork_epoch()?; + + let current_epoch = self.slot_clock.now()?.epoch(T::EthSpec::slots_per_epoch()); + + // Calculate retention boundary + let proof_retention_epoch = + current_epoch.saturating_sub(self.spec.min_epochs_for_execution_proof_requests); + + // Return max of fork epoch and retention boundary + // This ensures: + // 1. Proofs are never required before the zkvm fork + // 2. Proofs are only retained for the configured number of epochs + Some(std::cmp::max(zkvm_fork_epoch, proof_retention_epoch)) + } + + /// Returns true if the given epoch lies within the proof retention boundary. + pub fn execution_proof_check_required_for_epoch(&self, block_epoch: Epoch) -> bool { + self.execution_proof_boundary() + .is_some_and(|boundary_epoch| block_epoch >= boundary_epoch) + } + + /// Returns the minimum number of execution proofs required for ZK-VM mode. + pub fn min_execution_proofs_required(&self) -> Option { + self.spec.zkvm_min_proofs_required() + } + /// Collects metrics from the data availability checker. pub fn metrics(&self) -> DataAvailabilityCheckerMetrics { DataAvailabilityCheckerMetrics { @@ -1207,6 +1483,8 @@ mod test { store, custody_context, spec, + None, + false, ) .expect("should initialise data availability checker") } diff --git a/beacon_node/beacon_chain/src/data_availability_checker/error.rs b/beacon_node/beacon_chain/src/data_availability_checker/error.rs index c9efb7a4149..e5158827479 100644 --- a/beacon_node/beacon_chain/src/data_availability_checker/error.rs +++ b/beacon_node/beacon_chain/src/data_availability_checker/error.rs @@ -1,5 +1,5 @@ use kzg::{Error as KzgError, KzgCommitment}; -use types::{BeaconStateError, ColumnIndex, Hash256}; +use types::{BeaconStateError, ColumnIndex, ExecutionProofId, Hash256}; #[derive(Debug)] pub enum Error { @@ -22,6 +22,27 @@ pub enum Error { BlockReplayError(state_processing::BlockReplayError), RebuildingStateCaches(BeaconStateError), SlotClockError, + /// Execution proof verification failed - proof is invalid. + /// Penalize peer, a peer should not forward invalid proofs + InvalidProof { + proof_id: ExecutionProofId, + reason: String, + }, + /// No verifier registered for this proof ID. + /// Internal error; no peer penalization. + UnsupportedProofID(ExecutionProofId), + /// Error during proof verification process. + /// Internal error; no peer penalization. + ProofVerificationError(String), + /// Could not extract execution payload from block. + /// Internal error; no peer penalization. + MissingExecutionPayload, + /// Execution payload hash mismatch between proof and block. + /// Penalize peer, similar to an invalid proof. + ExecutionPayloadHashMismatch { + proof_hash: types::ExecutionBlockHash, + block_hash: types::ExecutionBlockHash, + }, } #[derive(PartialEq, Eq)] @@ -44,13 +65,18 @@ impl Error { | Error::ParentStateMissing(_) | Error::BlockReplayError(_) | Error::RebuildingStateCaches(_) - | Error::SlotClockError => ErrorCategory::Internal, + | Error::SlotClockError + | Error::UnsupportedProofID(_) + | Error::ProofVerificationError(_) + | Error::MissingExecutionPayload => ErrorCategory::Internal, Error::InvalidBlobs { .. } | Error::InvalidColumn { .. } | Error::ReconstructColumnsError { .. } | Error::BlobIndexInvalid(_) | Error::DataColumnIndexInvalid(_) - | Error::KzgCommitmentMismatch { .. } => ErrorCategory::Malicious, + | Error::KzgCommitmentMismatch { .. } + | Error::InvalidProof { .. } + | Error::ExecutionPayloadHashMismatch { .. } => ErrorCategory::Malicious, } } } diff --git a/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs b/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs index 5e6322ae95a..499f8354232 100644 --- a/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs +++ b/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs @@ -15,7 +15,7 @@ use parking_lot::{MappedRwLockReadGuard, RwLock, RwLockReadGuard, RwLockWriteGua use std::cmp::Ordering; use std::num::NonZeroUsize; use std::sync::Arc; -use tracing::{Span, debug, debug_span}; +use tracing::{Span, debug, debug_span, info}; use types::beacon_block_body::KzgCommitments; use types::blob_sidecar::BlobIdentifier; use types::{ @@ -55,6 +55,16 @@ impl CachedBlock { .blob_kzg_commitments() .map_or(0, |commitments| commitments.len()) } + + /// Get the execution payload hash if this block has an execution payload + pub fn execution_payload_hash(&self) -> Option { + self.as_block() + .message() + .body() + .execution_payload() + .ok() + .map(|payload| payload.execution_payload_ref().block_hash()) + } } /// This represents the components of a partially available block @@ -74,6 +84,7 @@ pub struct PendingComponents { pub block_root: Hash256, pub verified_blobs: RuntimeFixedVector>>, pub verified_data_columns: Vec>, + pub verified_execution_proofs: Vec, pub block: Option>, pub reconstruction_started: bool, span: Span, @@ -199,6 +210,50 @@ impl PendingComponents { Ok(()) } + /// Returns an immutable reference to the cached execution proofs. + pub fn get_cached_execution_proofs(&self) -> &[types::ExecutionProof] { + &self.verified_execution_proofs + } + + /// Check if we have a specific proof + pub fn has_proof_with_id(&self, proof_id: types::ExecutionProofId) -> bool { + self.verified_execution_proofs + .iter() + .any(|proof| proof.proof_id == proof_id) + } + + /// Get the number of unique subnet proofs we have + pub fn execution_proof_subnet_count(&self) -> usize { + self.verified_execution_proofs.len() + } + + /// Merges a single execution proof into the cache. + /// + /// Proofs are only inserted if: + /// 1. We don't already have a proof from this subnet for this block + /// 2. The proof's block_hash matches the cached block_root (if block exists) + pub fn merge_execution_proof(&mut self, proof: types::ExecutionProof) { + // Verify the proof is for the correct block + // ExecutionBlockHash is a wrapper around Hash256, so we need to convert + + // Don't insert duplicate proofs + if self.has_proof_with_id(proof.proof_id) { + return; + } + + self.verified_execution_proofs.push(proof); + } + + /// Merges a given set of execution proofs into the cache. + pub fn merge_execution_proofs>( + &mut self, + execution_proofs: I, + ) { + for proof in execution_proofs { + self.merge_execution_proof(proof); + } + } + /// Inserts a new block and revalidates the existing blobs against it. /// /// Blobs that don't match the new block's commitments are evicted. @@ -213,10 +268,11 @@ impl PendingComponents { /// /// WARNING: This function can potentially take a lot of time if the state needs to be /// reconstructed from disk. Ensure you are not holding any write locks while calling this. - pub fn make_available( + fn make_available( &self, spec: &Arc, num_expected_columns_opt: Option, + has_execution_layer_and_proof_gen: bool, recover: R, ) -> Result>, AvailabilityCheckError> where @@ -294,6 +350,42 @@ impl PendingComponents { return Ok(None); }; + // Check if this node needs execution proofs to validate blocks. + // Nodes that have EL and generate proofs validate via EL execution. + // Nodes that have EL but DON'T generate proofs are lightweight verifiers and wait for proofs. + // TODO(zkproofs): This is a technicality mainly because we cannot remove the EL on kurtosis + // ie each CL is coupled with an EL + let needs_execution_proofs = + spec.zkvm_min_proofs_required().is_some() && !has_execution_layer_and_proof_gen; + + if needs_execution_proofs { + let min_proofs = spec.zkvm_min_proofs_required().unwrap(); + let num_proofs = self.execution_proof_subnet_count(); + if num_proofs < min_proofs { + // Not enough execution proofs yet + return Ok(None); + } + + // Log when minimum proofs requirement is met + let proof_ids: Vec<_> = self + .verified_execution_proofs + .iter() + .map(|p| p.proof_id.as_u8().to_string()) + .collect(); + + let slot = self.verified_execution_proofs.first().map(|p| p.slot); + + self.span.in_scope(|| { + if let Some(slot) = slot { + info!("[Ethproofs] Minimum required execution proofs received: {}/{} proof_ids=[{}] slot={}", + num_proofs, min_proofs, proof_ids.join(", "), slot); + } else { + info!("[Ethproofs] Minimum required execution proofs received: {}/{} proof_ids=[{}]", + num_proofs, min_proofs, proof_ids.join(", ")); + } + }); + } + // Block is available, construct `AvailableExecutedBlock` let blobs_available_timestamp = match blob_data { @@ -323,6 +415,21 @@ impl PendingComponents { }; self.span.in_scope(|| { + let proof_count = self.execution_proof_subnet_count(); + if proof_count > 0 { + let slot = self.block.as_ref().map(|b| b.as_block().slot()); + if let Some(slot) = slot { + info!( + "[Ethproofs] Block ready for validation with {} execution proofs slot={}", + proof_count, slot + ); + } else { + info!( + "[Ethproofs] Block ready for validation with {} execution proofs", + proof_count + ); + } + } debug!("Block and all data components are available"); }); Ok(Some(AvailableExecutedBlock::new( @@ -340,6 +447,7 @@ impl PendingComponents { block_root, verified_blobs: RuntimeFixedVector::new(vec![None; max_len]), verified_data_columns: vec![], + verified_execution_proofs: vec![], block: None, reconstruction_started: false, span, @@ -372,7 +480,9 @@ impl PendingComponents { pub fn status_str(&self, num_expected_columns_opt: Option) -> String { let block_count = if self.block.is_some() { 1 } else { 0 }; - if let Some(num_expected_columns) = num_expected_columns_opt { + let proof_count = self.execution_proof_subnet_count(); + + let base_status = if let Some(num_expected_columns) = num_expected_columns_opt { format!( "block {} data_columns {}/{}", block_count, @@ -391,6 +501,13 @@ impl PendingComponents { self.verified_blobs.iter().flatten().count(), num_expected_blobs ) + }; + + // Append execution proof count if we have any + if proof_count > 0 { + format!("{} proofs {}", base_status, proof_count) + } else { + base_status } } } @@ -405,6 +522,10 @@ pub struct DataAvailabilityCheckerInner { state_cache: StateLRUCache, custody_context: Arc>, spec: Arc, + /// Whether this node has an execution layer AND generates proofs. + /// - true: Node has EL and generates proofs → validates via EL execution + /// - false: Node either has no EL, or has EL but doesn't generate → waits for proofs (lightweight verifier) + has_execution_layer_and_proof_gen: bool, } // This enum is only used internally within the crate in the reconstruction function to improve @@ -422,12 +543,14 @@ impl DataAvailabilityCheckerInner { beacon_store: BeaconStore, custody_context: Arc>, spec: Arc, + has_execution_layer_and_proof_gen: bool, ) -> Result { Ok(Self { critical: RwLock::new(LruCache::new(capacity)), state_cache: StateLRUCache::new(beacon_store, spec.clone()), custody_context, spec, + has_execution_layer_and_proof_gen, }) } @@ -575,6 +698,65 @@ impl DataAvailabilityCheckerInner { ) } + /// Puts execution proofs into the availability cache as pending components. + pub fn put_verified_execution_proofs>( + &self, + block_root: Hash256, + execution_proofs: I, + ) -> Result, AvailabilityCheckError> { + let mut execution_proofs = execution_proofs.into_iter().peekable(); + + if execution_proofs.peek().is_none() { + // No proofs to process + return Ok(Availability::MissingComponents(block_root)); + } + + // TODO(ethproofs): Added for demo. + // Check if we already have the minimum required proofs + // If so, don't add more to avoid unnecessary processing + if let Some(min_proofs) = self.spec.zkvm_min_proofs_required() { + if let Some(pending) = self.critical.read().peek(&block_root) { + if pending.execution_proof_subnet_count() >= min_proofs { + // Already have minimum required proofs, skip adding more + return Ok(Availability::MissingComponents(block_root)); + } + } + } + + // Try to get epoch from existing pending components (if block already arrived) + // Otherwise use Epoch::new(0) as placeholder (will be corrected when block arrives) + // Also the component cannot be marked as available, if the block is missing + let epoch = self + .critical + .read() + .peek(&block_root) + .and_then(|pending| pending.epoch()) + .unwrap_or_else(|| types::Epoch::new(0)); + + let pending_components = + self.update_or_insert_pending_components(block_root, epoch, |pending_components| { + pending_components.merge_execution_proofs(execution_proofs); + Ok(()) + })?; + + let num_expected_columns_opt = self.get_num_expected_columns(epoch); + + pending_components.span.in_scope(|| { + debug!( + component = "execution_proofs", + status = pending_components.status_str(num_expected_columns_opt), + num_proofs = pending_components.execution_proof_subnet_count(), + "Component added to data availability checker" + ); + }); + + self.check_availability_and_cache_components( + block_root, + pending_components, + num_expected_columns_opt, + ) + } + fn check_availability_and_cache_components( &self, block_root: Hash256, @@ -584,6 +766,7 @@ impl DataAvailabilityCheckerInner { if let Some(available_block) = pending_components.make_available( &self.spec, num_expected_columns_opt, + self.has_execution_layer_and_proof_gen, |block, span| self.state_cache.recover_pending_executed_block(block, span), )? { // Explicitly drop read lock before acquiring write lock @@ -1030,6 +1213,7 @@ mod test { test_store, custody_context, spec.clone(), + false, ) .expect("should create cache"), ); diff --git a/beacon_node/beacon_chain/src/errors.rs b/beacon_node/beacon_chain/src/errors.rs index 9dc6e897fb1..0effe0ec9d2 100644 --- a/beacon_node/beacon_chain/src/errors.rs +++ b/beacon_node/beacon_chain/src/errors.rs @@ -98,6 +98,7 @@ pub enum BeaconChainError { ObservedAttestersError(ObservedAttestersError), ObservedBlockProducersError(ObservedBlockProducersError), ObservedDataSidecarsError(ObservedDataSidecarsError), + ObservedExecutionProofError(String), AttesterCacheError(AttesterCacheError), PruningError(PruningError), ArithError(ArithError), diff --git a/beacon_node/beacon_chain/src/execution_proof_verification.rs b/beacon_node/beacon_chain/src/execution_proof_verification.rs new file mode 100644 index 00000000000..e80e95caf64 --- /dev/null +++ b/beacon_node/beacon_chain/src/execution_proof_verification.rs @@ -0,0 +1,625 @@ +use crate::observed_data_sidecars::{ObservationStrategy, Observe}; +use crate::{BeaconChain, BeaconChainError, BeaconChainTypes}; +use slot_clock::SlotClock; +use std::marker::PhantomData; +use std::sync::Arc; +use tracing::{debug, error}; +use types::{ChainSpec, EthSpec, ExecutionProof, ExecutionProofId, Hash256, Slot}; + +/// An error occurred while validating a gossip execution proof. +#[derive(Debug)] +pub enum GossipExecutionProofError { + /// There was an error whilst processing the execution proof. It is not known if it is + /// valid or invalid. + /// + /// ## Peer scoring + /// + /// We were unable to process this proof due to an internal error. It's unclear if the proof + /// is valid. + BeaconChainError(Box), + + /// The execution proof is from a slot that is later than the current slot (with respect to + /// the gossip clock disparity). + /// + /// ## Peer scoring + /// + /// Assuming the local clock is correct, the peer has sent an invalid message. + FutureSlot { + message_slot: Slot, + latest_permissible_slot: Slot, + }, + + /// The proof corresponds to a slot older than the finalized head slot. + /// + /// ## Peer scoring + /// + /// It's unclear if this proof is valid, but this proof is for a finalized slot and is + /// therefore useless to us. + PastFinalizedSlot { + proof_slot: Slot, + finalized_slot: Slot, + }, + + /// The proof's parent block is unknown. + /// + /// ## Peer scoring + /// + /// We cannot process the proof without validating its parent, the peer isn't necessarily + /// faulty. + ParentUnknown { parent_root: Hash256 }, + + /// The proof conflicts with finalization, no need to propagate. + /// + /// ## Peer scoring + /// + /// It's unclear if this proof is valid, but it conflicts with finality and shouldn't be + /// imported. + NotFinalizedDescendant { block_parent_root: Hash256 }, + + /// An execution proof has already been seen for the given `(proof.block_root, + /// proof_id)` tuple over gossip or no gossip sources. + /// + /// ## Peer scoring + /// + /// The peer isn't faulty, but we do not forward it over gossip. + PriorKnown { + slot: Slot, + block_root: Hash256, + proof_id: ExecutionProofId, + }, + + /// An execution proof has already been processed from non-gossip source and has not yet been + /// seen on the gossip network. This proof should be accepted and forwarded over gossip. + PriorKnownUnpublished, + + /// The proof verification failed (invalid zkVM proof). + /// + /// ## Peer scoring + /// + /// The proof is invalid and the peer is faulty. + ProofVerificationFailed(String), + + /// The proof size exceeds the maximum allowed size. + /// + /// ## Peer scoring + /// + /// The proof is invalid and the peer is faulty. + ProofTooLarge { size: usize, max_size: usize }, + + /// The block for this proof is not yet available. + /// + /// ## Peer scoring + /// + /// The peer may have sent a proof before we've seen the block. Not necessarily faulty. + BlockNotAvailable { block_root: Hash256 }, +} + +impl From for GossipExecutionProofError { + fn from(e: BeaconChainError) -> Self { + GossipExecutionProofError::BeaconChainError(Box::new(e)) + } +} + +/// A wrapper around an `ExecutionProof` that has been verified for propagation on the gossip +/// network. +pub struct GossipVerifiedExecutionProof { + block_root: Hash256, + execution_proof: Arc, + _phantom: PhantomData<(T, O)>, +} + +impl std::fmt::Debug + for GossipVerifiedExecutionProof +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("GossipVerifiedExecutionProof") + .field("block_root", &self.block_root) + .field("execution_proof", &self.execution_proof) + .finish() + } +} + +impl Clone for GossipVerifiedExecutionProof { + fn clone(&self) -> Self { + Self { + block_root: self.block_root, + execution_proof: self.execution_proof.clone(), + _phantom: PhantomData, + } + } +} + +impl GossipVerifiedExecutionProof { + pub fn new( + execution_proof: Arc, + chain: &BeaconChain, + ) -> Result { + validate_execution_proof_for_gossip::(execution_proof, chain) + } + + pub fn slot(&self) -> Slot { + self.execution_proof.slot + } + + pub fn block_root(&self) -> Hash256 { + self.block_root + } + + pub fn execution_proof(&self) -> &Arc { + &self.execution_proof + } + + pub fn subnet_id(&self) -> ExecutionProofId { + self.execution_proof.proof_id + } + + /// Get the block root for this proof. + pub fn into_inner(self) -> Arc { + self.execution_proof + } +} + +/// Validate an execution proof for gossip +pub fn validate_execution_proof_for_gossip( + execution_proof: Arc, + chain: &BeaconChain, +) -> Result, GossipExecutionProofError> { + let block_root = execution_proof.block_root; + let proof_slot = execution_proof.slot; + + // 1. Verify proof is not from the future + verify_proof_not_from_future_slot(chain, proof_slot)?; + + // 2. Verify proof slot is greater than finalized slot + verify_slot_greater_than_latest_finalized_slot(chain, proof_slot)?; + + // 3. Check if proof is already known via gossip + verify_is_unknown_execution_proof(chain, &execution_proof)?; + + // 4. Check if the proof is already in the DA checker cache + // If it exists in the cache, we know it has already passed validation. + if chain + .data_availability_checker + .is_execution_proof_cached(&block_root, &execution_proof) + { + if O::observe() { + observe_gossip_execution_proof(&execution_proof, chain)?; + } + return Err(GossipExecutionProofError::PriorKnownUnpublished); + } + + // 5. Verify proof size limits + verify_proof_size(&execution_proof, &chain.spec)?; + + // Note: We intentionally do NOT verify the block exists yet + // Execution proofs can arrive via gossip before their corresponding blocks, + // so we cache them in the DA checker and match them up when the block arrives. + // This is kind of similar to how blob sidecars work. + + // 6. Run zkVM proof verification + verify_zkvm_proof(&execution_proof, chain)?; + + // 7. Observe the proof to prevent reprocessing + if O::observe() { + observe_gossip_execution_proof(&execution_proof, chain)?; + } + + Ok(GossipVerifiedExecutionProof { + block_root, + execution_proof, + _phantom: PhantomData, + }) +} + +/// Verify that this execution proof has not been seen before via gossip +fn verify_is_unknown_execution_proof( + chain: &BeaconChain, + execution_proof: &ExecutionProof, +) -> Result<(), GossipExecutionProofError> { + let block_root = execution_proof.block_root; + let proof_id = execution_proof.proof_id; + let slot = execution_proof.slot; + + if chain + .observed_execution_proofs + .read() + .is_known(slot, block_root, proof_id) + .map_err(|e| { + GossipExecutionProofError::BeaconChainError(Box::new( + BeaconChainError::ObservedExecutionProofError(format!("{:?}", e)), + )) + })? + { + return Err(GossipExecutionProofError::PriorKnown { + slot, + block_root, + proof_id, + }); + } + + Ok(()) +} + +/// Verify that the proof size is within acceptable limits. +fn verify_proof_size( + execution_proof: &ExecutionProof, + _spec: &ChainSpec, +) -> Result<(), GossipExecutionProofError> { + use types::MAX_PROOF_DATA_BYTES; + + let proof_size = execution_proof.proof_data.len(); + if proof_size > MAX_PROOF_DATA_BYTES { + return Err(GossipExecutionProofError::ProofTooLarge { + size: proof_size, + max_size: MAX_PROOF_DATA_BYTES, + }); + } + + Ok(()) +} + +/// Mark this execution proof as observed in gossip, to prevet reprocessing +fn observe_gossip_execution_proof( + execution_proof: &ExecutionProof, + chain: &BeaconChain, +) -> Result<(), GossipExecutionProofError> { + let block_root = execution_proof.block_root; + let proof_id = execution_proof.proof_id; + let slot = execution_proof.slot; + + chain + .observed_execution_proofs + .write() + .observe_proof(slot, block_root, proof_id) + .map_err(|e| { + GossipExecutionProofError::BeaconChainError(Box::new( + BeaconChainError::ObservedExecutionProofError(format!("{:?}", e)), + )) + })?; + + debug!( + %block_root, + %proof_id, + %slot, + "Marked execution proof as observed" + ); + + Ok(()) +} + +/// Verify that the execution proof is not from a future slot. +fn verify_proof_not_from_future_slot( + chain: &BeaconChain, + proof_slot: Slot, +) -> Result<(), GossipExecutionProofError> { + let latest_permissible_slot = chain + .slot_clock + .now_with_future_tolerance(chain.spec.maximum_gossip_clock_disparity()) + .ok_or(BeaconChainError::UnableToReadSlot)?; + + if proof_slot > latest_permissible_slot { + return Err(GossipExecutionProofError::FutureSlot { + message_slot: proof_slot, + latest_permissible_slot, + }); + } + + Ok(()) +} + +/// Verify that the execution proof slot is greater than the latest finalized slot. +fn verify_slot_greater_than_latest_finalized_slot( + chain: &BeaconChain, + proof_slot: Slot, +) -> Result<(), GossipExecutionProofError> { + let latest_finalized_slot = chain + .head() + .finalized_checkpoint() + .epoch + .start_slot(T::EthSpec::slots_per_epoch()); + + if proof_slot <= latest_finalized_slot { + return Err(GossipExecutionProofError::PastFinalizedSlot { + proof_slot, + finalized_slot: latest_finalized_slot, + }); + } + + Ok(()) +} + +/// Verify the zkVM proof. +/// +/// Note: This is expensive +fn verify_zkvm_proof( + execution_proof: &ExecutionProof, + chain: &BeaconChain, +) -> Result<(), GossipExecutionProofError> { + let block_root = execution_proof.block_root; + let subnet_id = execution_proof.proof_id; + + match chain + .data_availability_checker + .verify_execution_proof_for_gossip(execution_proof) + { + Ok(true) => { + debug!(%block_root, %subnet_id, "Proof verification succeeded"); + Ok(()) + } + Ok(false) => { + debug!(%block_root, %subnet_id, "Proof verification failed: proof is invalid"); + Err(GossipExecutionProofError::ProofVerificationFailed(format!( + "zkVM proof verification failed for block_root={}, subnet_id={}", + block_root, subnet_id + ))) + } + Err(e) => { + error!(%block_root, %subnet_id, ?e, "Proof verification error"); + Err(GossipExecutionProofError::BeaconChainError(Box::new( + e.into(), + ))) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::test_utils::{AttestationStrategy, BeaconChainHarness, BlockStrategy}; + use types::{ExecutionBlockHash, ForkName, MainnetEthSpec}; + + type E = MainnetEthSpec; + + /// Helper to create a test execution proof + fn create_test_execution_proof( + subnet_id: ExecutionProofId, + slot: Slot, + block_root: Hash256, + ) -> ExecutionProof { + let block_hash = ExecutionBlockHash::zero(); + let proof_data = vec![0u8; 32]; // Dummy proof data + ExecutionProof::new(subnet_id, slot, block_hash, block_root, proof_data) + .expect("Valid test proof") + } + + #[tokio::test] + async fn test_reject_future_slot() { + let spec = ForkName::Fulu.make_genesis_spec(E::default_spec()); + let harness = BeaconChainHarness::builder(E::default()) + .spec(spec.into()) + .deterministic_keypairs(64) + .fresh_ephemeral_store() + .mock_execution_layer() + .build(); + + let current_slot = harness.get_current_slot(); + let future_slot = current_slot + 100; + let proof_id = ExecutionProofId::new(0).expect("Valid proof id"); + let proof = create_test_execution_proof(proof_id, future_slot, Hash256::random()); + + let result = + validate_execution_proof_for_gossip::<_, Observe>(Arc::new(proof), &harness.chain); + + assert!(matches!( + result.err(), + Some(GossipExecutionProofError::FutureSlot { .. }) + )); + } + + #[tokio::test] + async fn test_reject_past_finalized_slot() { + let spec = ForkName::Fulu.make_genesis_spec(E::default_spec()); + let harness = BeaconChainHarness::builder(E::default()) + .spec(spec.into()) + .deterministic_keypairs(64) + .fresh_ephemeral_store() + .mock_execution_layer() + .build(); + + // Advance to slot 1 first + harness.advance_slot(); + + // Advance chain to create finalized slot + harness + .extend_chain( + 32, + BlockStrategy::OnCanonicalHead, + AttestationStrategy::AllValidators, + ) + .await; + + let finalized_slot = harness + .finalized_checkpoint() + .epoch + .start_slot(E::slots_per_epoch()); + // Create proof for slot before finalized + let old_slot = finalized_slot.saturating_sub(1u64); + let proof_id = ExecutionProofId::new(0).expect("Valid proof id"); + let proof = create_test_execution_proof(proof_id, old_slot, Hash256::random()); + + let result = + validate_execution_proof_for_gossip::<_, Observe>(Arc::new(proof), &harness.chain); + + assert!(matches!( + result.err(), + Some(GossipExecutionProofError::PastFinalizedSlot { .. }) + )); + } + + #[tokio::test] + async fn test_successful_validation() { + let spec = ForkName::Fulu.make_genesis_spec(E::default_spec()); + let harness = BeaconChainHarness::builder(E::default()) + .spec(spec.into()) + .deterministic_keypairs(64) + .fresh_ephemeral_store() + .mock_execution_layer() + .build(); + + harness.advance_slot(); + let current_slot = harness.get_current_slot(); + let proof_id = ExecutionProofId::new(0).expect("Valid subnet id"); + + // Use a realistic block root from the chain + let block_root = harness.chain.head_beacon_block_root(); + let proof = create_test_execution_proof(proof_id, current_slot, block_root); + + let result = + validate_execution_proof_for_gossip::<_, Observe>(Arc::new(proof), &harness.chain); + + match result { + Ok(_) => {} + Err(GossipExecutionProofError::FutureSlot { .. }) + | Err(GossipExecutionProofError::PastFinalizedSlot { .. }) => { + panic!("Should not fail basic validation checks"); + } + Err(_) => {} + } + } + + /// This test verifies that: + /// 1. First gossip proof is accepted and marked as observed + /// 2. Duplicate gossip proof is rejected with PriorKnown + /// 3. DoS protection: Expensive verification only happens once + #[tokio::test] + async fn test_gossip_duplicate_proof_rejected() { + let spec = ForkName::Fulu.make_genesis_spec(E::default_spec()); + let harness = BeaconChainHarness::builder(E::default()) + .spec(spec.into()) + .deterministic_keypairs(64) + .fresh_ephemeral_store() + .mock_execution_layer() + .zkvm_with_dummy_verifiers() + .build(); + + harness.advance_slot(); + let current_slot = harness.get_current_slot(); + let proof_id = ExecutionProofId::new(0).expect("Valid proof id"); + let block_root = Hash256::random(); + let proof = Arc::new(create_test_execution_proof( + proof_id, + current_slot, + block_root, + )); + + let result1 = + validate_execution_proof_for_gossip::<_, Observe>(proof.clone(), &harness.chain); + assert!(result1.is_ok()); + + // Should now be rejected as duplicate + let result2 = + validate_execution_proof_for_gossip::<_, Observe>(proof.clone(), &harness.chain); + + assert!( + matches!( + result2.err(), + Some(GossipExecutionProofError::PriorKnown { slot, block_root: br, proof_id: sid }) + if slot == current_slot && br == block_root && sid == proof_id + ), + "Duplicate proof must be rejected with PriorKnown error" + ); + + assert!( + harness + .chain + .observed_execution_proofs + .read() + .is_known(current_slot, block_root, proof_id) + .unwrap(), + "Proof should be marked as observed" + ); + } + + /// Test that proofs in the DA checker cache are detected and marked as observed. + /// + /// When a proof arrives via gossip but is already in the DA checker cache (from RPC), + /// we should: + /// 1. Accept it for gossip propagation + /// 2. Mark it as observed to prevent reprocessing + /// 3. Return PriorKnownUnpublished + #[tokio::test] + async fn test_da_cached_proof_accepted_and_observed() { + let spec = ForkName::Fulu.make_genesis_spec(E::default_spec()); + let harness = BeaconChainHarness::builder(E::default()) + .spec(spec.into()) + .deterministic_keypairs(64) + .fresh_ephemeral_store() + .mock_execution_layer() + .build(); + + harness.advance_slot(); + let subnet_id = ExecutionProofId::new(0).expect("Valid subnet id"); + let current_slot = harness.get_current_slot(); + let block_root = Hash256::random(); + + let proof = Arc::new(create_test_execution_proof( + subnet_id, + current_slot, + block_root, + )); + + // Put the proof directly into the DA checker cache (this can happen if it arritves via RPC) + harness + .chain + .data_availability_checker + .put_rpc_execution_proofs(block_root, vec![proof.clone()]) + .expect("Should put proof in DA cache"); + + // Verify it's in the cache + assert!( + harness + .chain + .data_availability_checker + .is_execution_proof_cached(&block_root, &proof), + "Proof should be in DA cache" + ); + + // Verify it's NOT in observed cache yet + assert!( + !harness + .chain + .observed_execution_proofs + .read() + .is_known(current_slot, block_root, subnet_id) + .unwrap(), + "Proof should not be in observed cache initially" + ); + + // Now it arrives via gossip + let result = + validate_execution_proof_for_gossip::<_, Observe>(proof.clone(), &harness.chain); + + // Should be rejected with PriorKnownUnpublished (safe to propagate) + assert!( + matches!( + result.as_ref().err(), + Some(GossipExecutionProofError::PriorKnownUnpublished) + ), + "DA cached proof should return PriorKnownUnpublished, got: {:?}", + result + ); + + // Should now be marked as observed + assert!( + harness + .chain + .observed_execution_proofs + .read() + .is_known(current_slot, block_root, subnet_id) + .unwrap(), + "Proof should be marked as observed after DA cache check" + ); + + // Second gossip attempt should be rejected as PriorKnown (not PriorKnownUnpublished) + let result2 = + validate_execution_proof_for_gossip::<_, Observe>(proof.clone(), &harness.chain); + + assert!( + matches!( + result2.err(), + Some(GossipExecutionProofError::PriorKnown { .. }) + ), + "Second gossip should be rejected as PriorKnown (already observed)" + ); + } +} diff --git a/beacon_node/beacon_chain/src/lib.rs b/beacon_node/beacon_chain/src/lib.rs index 4ac3e54742d..686b56e63eb 100644 --- a/beacon_node/beacon_chain/src/lib.rs +++ b/beacon_node/beacon_chain/src/lib.rs @@ -24,6 +24,7 @@ mod early_attester_cache; mod errors; pub mod events; pub mod execution_payload; +pub mod execution_proof_verification; pub mod fetch_blobs; pub mod fork_choice_signal; pub mod fork_revert; @@ -41,6 +42,7 @@ pub mod observed_aggregates; mod observed_attesters; pub mod observed_block_producers; pub mod observed_data_sidecars; +pub mod observed_execution_proofs; pub mod observed_operations; mod observed_slashable; pub mod persisted_beacon_chain; @@ -64,7 +66,7 @@ pub use self::beacon_chain::{ BeaconBlockResponseWrapper, BeaconChain, BeaconChainTypes, BeaconStore, BlockProcessStatus, ChainSegmentResult, ForkChoiceError, INVALID_FINALIZED_MERGE_TRANSITION_BLOCK_SHUTDOWN_REASON, INVALID_JUSTIFIED_PAYLOAD_SHUTDOWN_REASON, LightClientProducerEvent, OverrideForkchoiceUpdate, - ProduceBlockVerification, StateSkipConfig, WhenSlotSkipped, + ProduceBlockVerification, ProofGenerationEvent, StateSkipConfig, WhenSlotSkipped, }; pub use self::beacon_snapshot::BeaconSnapshot; pub use self::chain_config::ChainConfig; diff --git a/beacon_node/beacon_chain/src/observed_execution_proofs.rs b/beacon_node/beacon_chain/src/observed_execution_proofs.rs new file mode 100644 index 00000000000..e927ecad68f --- /dev/null +++ b/beacon_node/beacon_chain/src/observed_execution_proofs.rs @@ -0,0 +1,402 @@ +//! Provides the `ObservedExecutionProofs` struct which allows for rejecting execution proofs +//! that we have already seen over the gossip network. +//! +//! This cache prevents DoS attacks where an attacker repeatedly gossips the same execution proof, +//! forcing expensive zkVM verification operations. Only proofs that have passed basic gossip +//! validation and proof verification should be added to this cache. +//! +//! TODO(zkproofs): we want the proofs to be signed and then we can just add them to the cache +//! once the signature has been verified like `observed_data_sidecars` + +use std::collections::{HashMap, HashSet}; +use types::{ExecutionProofId, Hash256, Slot}; + +#[derive(Debug, PartialEq)] +pub enum Error { + /// The slot of the provided execution proof is prior to finalization. + FinalizedExecutionProof { slot: Slot, finalized_slot: Slot }, +} + +/// Key for tracking observed execution proofs. +/// We track by (slot, block_root) to efficiently prune old entries. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +struct ProofKey { + slot: Slot, + block_root: Hash256, +} + +impl ProofKey { + fn new(slot: Slot, block_root: Hash256) -> Self { + Self { slot, block_root } + } +} + +/// Maintains a cache of seen execution proofs that were received over gossip. +/// +/// The cache tracks (slot, block_root, proof_id) tuples and prunes entries from finalized slots. +/// +/// ## DoS Resistance +/// +/// This cache is critical for preventing DoS attacks where an attacker repeatedly gossips +/// the same execution proof. zkVM verification is expensive (50-100ms), so we must avoid +/// re-verifying proofs we've already seen. +/// +/// ## Pruning +/// +/// Call `prune` on finalization to remove entries from finalized slots. This basically matches the +/// pattern used for observed blobs and data columns. +pub struct ObservedExecutionProofs { + /// The finalized slot. Proofs at or below this slot are rejected. + finalized_slot: Slot, + /// Map from (slot, block_root) to the set of subnet IDs we've seen for that block. + items: HashMap>, +} + +impl ObservedExecutionProofs { + /// Create a new cache with the given finalized slot. + /// + /// Proofs at or below `finalized_slot` will be rejected. + pub fn new(finalized_slot: Slot) -> Self { + Self { + finalized_slot, + items: HashMap::new(), + } + } + + /// Observe an execution proof from gossip. + /// + /// Returns `true` if the proof was already observed (duplicate), `false` if it's new. + /// + /// Returns an error if the proof's slot is at or below the finalized slot. + /// Note: This shouldn't happen because it means we've received a proof for + /// a finalized block + pub fn observe_proof( + &mut self, + slot: Slot, + block_root: Hash256, + proof_id: ExecutionProofId, + ) -> Result { + // Reject finalized proofs + if self.finalized_slot > 0 && slot <= self.finalized_slot { + return Err(Error::FinalizedExecutionProof { + slot, + finalized_slot: self.finalized_slot, + }); + } + + let key = ProofKey::new(slot, block_root); + let proof_ids = self.items.entry(key).or_default(); + + let was_duplicate = !proof_ids.insert(proof_id); + + Ok(was_duplicate) + } + + /// Check if we have already observed this proof. + /// + /// Returns `true` if the proof has been seen, `false` if it's new. + /// + /// Returns an error if the proof's slot is at or below the finalized slot. + pub fn is_known( + &self, + slot: Slot, + block_root: Hash256, + proof_id: ExecutionProofId, + ) -> Result { + // Reject finalized proofs + if self.finalized_slot > 0 && slot <= self.finalized_slot { + return Err(Error::FinalizedExecutionProof { + slot, + finalized_slot: self.finalized_slot, + }); + } + + let key = ProofKey::new(slot, block_root); + let is_known = self + .items + .get(&key) + .is_some_and(|proof_ids| proof_ids.contains(&proof_id)); + + Ok(is_known) + } + + /// Prune execution proof observations for slots less than or equal to the given slot. + /// + /// This matches the pruning behavior of observed blobs and data columns. + pub fn prune(&mut self, finalized_slot: Slot) { + if finalized_slot == 0 { + return; + } + + self.finalized_slot = finalized_slot; + self.items.retain(|key, _| key.slot > finalized_slot); + } + + /// Get the current finalized slot boundary. + /// + /// Proofs at or below this slot will be rejected. + pub fn finalized_slot(&self) -> Slot { + self.finalized_slot + } + + /// Get the number of unique (slot, block_root) keys being tracked. + pub fn len(&self) -> usize { + self.items.len() + } + + /// Check if the cache is empty. + pub fn is_empty(&self) -> bool { + self.items.is_empty() + } + + /// Clear all entries from the cache. + #[cfg(test)] + pub fn clear(&mut self) { + self.items.clear(); + } +} + +impl Default for ObservedExecutionProofs { + fn default() -> Self { + Self::new(Slot::new(0)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use types::FixedBytesExtended; + + fn test_proof_key(slot: u64) -> (Slot, Hash256, ExecutionProofId) { + ( + Slot::new(slot), + Hash256::from_low_u64_be(slot), + ExecutionProofId::new(0).unwrap(), + ) + } + + #[test] + fn test_observe_new_proof() { + let mut cache = ObservedExecutionProofs::new(Slot::new(0)); + let (slot, block_root, subnet_id) = test_proof_key(10); + + // First observation should return false (not a duplicate) + assert_eq!( + cache.observe_proof(slot, block_root, subnet_id), + Ok(false), + "first observation should not be duplicate" + ); + + // Second observation should return true (is a duplicate) + assert_eq!( + cache.observe_proof(slot, block_root, subnet_id), + Ok(true), + "second observation should be duplicate" + ); + } + + #[test] + fn test_observe_different_subnets() { + let mut cache = ObservedExecutionProofs::new(Slot::new(0)); + let slot = Slot::new(10); + let block_root = Hash256::from_low_u64_be(10); + let proof_0 = ExecutionProofId::new(0).unwrap(); + let proof_1 = ExecutionProofId::new(1).unwrap(); + + assert_eq!( + cache.observe_proof(slot, block_root, proof_0), + Ok(false), + "proof 0 is new" + ); + + // Observe proof from subnet 1 (same block, different proofID) + assert_eq!( + cache.observe_proof(slot, block_root, proof_1), + Ok(false), + "proof 1 is new" + ); + + // Re-observe proof 0 + assert_eq!( + cache.observe_proof(slot, block_root, proof_0), + Ok(true), + "proof 0 is duplicate" + ); + + assert!(cache.is_known(slot, block_root, proof_0).unwrap()); + assert!(cache.is_known(slot, block_root, proof_1).unwrap()); + } + + #[test] + fn test_is_known() { + let mut cache = ObservedExecutionProofs::new(Slot::new(0)); + let (slot, block_root, proof_id) = test_proof_key(10); + + // Before observation + assert_eq!( + cache.is_known(slot, block_root, proof_id), + Ok(false), + "not yet observed" + ); + + // After observation + cache.observe_proof(slot, block_root, proof_id).unwrap(); + assert_eq!( + cache.is_known(slot, block_root, proof_id), + Ok(true), + "now observed" + ); + } + + #[test] + fn test_reject_finalized_proofs() { + let finalized_slot = Slot::new(100); + let mut cache = ObservedExecutionProofs::new(finalized_slot); + + let old_slot = Slot::new(100); + let block_root = Hash256::from_low_u64_be(100); + let proof_id = ExecutionProofId::new(0).unwrap(); + + // Observing finalized proof should error + assert_eq!( + cache.observe_proof(old_slot, block_root, proof_id), + Err(Error::FinalizedExecutionProof { + slot: old_slot, + finalized_slot, + }), + "finalized proofs should be rejected" + ); + + // Checking finalized proof should error + assert_eq!( + cache.is_known(old_slot, block_root, proof_id), + Err(Error::FinalizedExecutionProof { + slot: old_slot, + finalized_slot, + }), + "finalized proofs should be rejected in is_known" + ); + } + + #[test] + fn test_pruning() { + let mut cache = ObservedExecutionProofs::new(Slot::new(0)); + + // Add proofs at different slots + for slot in 0..100 { + let (s, br, pid) = test_proof_key(slot); + cache.observe_proof(s, br, pid).unwrap(); + } + + assert_eq!(cache.len(), 100, "should have 100 entries"); + + // Prune at finalized_slot = 50 + // Should remove slots <= 50, keep slots > 50 + let finalized_slot = Slot::new(50); + cache.prune(finalized_slot); + + assert_eq!( + cache.finalized_slot(), + finalized_slot, + "finalized slot should be updated" + ); + + // Check that finalized entries were removed + let old_slot = Slot::new(50); + let old_block_root = Hash256::from_low_u64_be(50); + let proof_id = ExecutionProofId::new(0).unwrap(); + + assert!( + cache.is_known(old_slot, old_block_root, proof_id).is_err(), + "finalized entries should be rejected after pruning" + ); + + // Check that non-finalized entries are still present + let recent_slot = Slot::new(51); + let recent_block_root = Hash256::from_low_u64_be(51); + assert!( + cache + .is_known(recent_slot, recent_block_root, proof_id) + .unwrap(), + "non-finalized entries should still be present" + ); + } + + #[test] + fn test_prune_removes_exact_boundary() { + let mut cache = ObservedExecutionProofs::new(Slot::new(0)); + + // Add proofs at slots 50, 51, 52 + for slot in 50..=52 { + let (s, br, pid) = test_proof_key(slot); + cache.observe_proof(s, br, pid).unwrap(); + } + + // Prune at finalized_slot = 50 + // Should remove slots <= 50, keep slots > 50 + cache.prune(Slot::new(50)); + + assert_eq!(cache.finalized_slot(), Slot::new(50)); + + let proof_id = ExecutionProofId::new(0).unwrap(); + + // Slot 50 should be rejected (finalized) + assert!( + cache + .is_known(Slot::new(50), Hash256::from_low_u64_be(50), proof_id) + .is_err() + ); + + // Slot 51 should still be present (> finalized) + assert!( + cache + .is_known(Slot::new(51), Hash256::from_low_u64_be(51), proof_id) + .unwrap() + ); + + // Slot 52 should still be present + assert!( + cache + .is_known(Slot::new(52), Hash256::from_low_u64_be(52), proof_id) + .unwrap() + ); + } + + #[test] + fn test_different_blocks_same_slot() { + let mut cache = ObservedExecutionProofs::new(Slot::new(0)); + let slot = Slot::new(10); + let block_root_a = Hash256::from_low_u64_be(100); + let block_root_b = Hash256::from_low_u64_be(200); + let proof_id = ExecutionProofId::new(0).unwrap(); + + // Observe proof for block A + cache.observe_proof(slot, block_root_a, proof_id).unwrap(); + + // Proof for block B should be new (different block_root) + assert_eq!( + cache.observe_proof(slot, block_root_b, proof_id), + Ok(false), + "different block_root should not be duplicate" + ); + + assert!(cache.is_known(slot, block_root_a, proof_id).unwrap()); + assert!(cache.is_known(slot, block_root_b, proof_id).unwrap()); + } + + #[test] + fn test_len_counts_blocks_not_subnets() { + let mut cache = ObservedExecutionProofs::new(Slot::new(0)); + let slot = Slot::new(10); + let block_root = Hash256::from_low_u64_be(10); + + // Add multiple proof IDs for same block + for i in 0..8 { + let proof_id = ExecutionProofId::new(i).unwrap(); + cache.observe_proof(slot, block_root, proof_id).unwrap(); + } + + // Length should be 1 (one unique (slot, block_root) key) + assert_eq!(cache.len(), 1, "len counts unique keys, not proofIDs"); + } +} diff --git a/beacon_node/beacon_chain/src/test_utils.rs b/beacon_node/beacon_chain/src/test_utils.rs index 9601618e927..4b28964943a 100644 --- a/beacon_node/beacon_chain/src/test_utils.rs +++ b/beacon_node/beacon_chain/src/test_utils.rs @@ -220,6 +220,7 @@ pub struct Builder { testing_slot_clock: Option, validator_monitor_config: Option, genesis_state_builder: Option>, + zkvm_execution_layer_config: Option, node_custody_type: NodeCustodyType, runtime: TestRuntime, } @@ -366,6 +367,7 @@ where testing_slot_clock: None, validator_monitor_config: None, genesis_state_builder: None, + zkvm_execution_layer_config: None, node_custody_type: NodeCustodyType::Fullnode, runtime, } @@ -540,6 +542,13 @@ where self } + /// Enable zkVM execution proof verification with dummy verifiers for testing. + pub fn zkvm_with_dummy_verifiers(mut self) -> Self { + self.zkvm_execution_layer_config = + Some(zkvm_execution_layer::ZKVMExecutionLayerConfig::default()); + self + } + pub fn with_genesis_state_builder( mut self, f: impl FnOnce(InteropGenesisBuilder) -> InteropGenesisBuilder, @@ -580,6 +589,12 @@ where .validator_monitor_config(validator_monitor_config) .rng(Box::new(StdRng::seed_from_u64(42))); + builder = if let Some(zkvm_config) = self.zkvm_execution_layer_config { + builder.zkvm_execution_layer_config(Some(zkvm_config)) + } else { + builder + }; + builder = if let Some(mutator) = self.initial_mutator { mutator(builder) } else { diff --git a/beacon_node/beacon_processor/src/lib.rs b/beacon_node/beacon_processor/src/lib.rs index 1cdf3693ff2..2bdec31d465 100644 --- a/beacon_node/beacon_processor/src/lib.rs +++ b/beacon_node/beacon_processor/src/lib.rs @@ -121,6 +121,7 @@ pub struct BeaconProcessorQueueLengths { gossip_block_queue: usize, gossip_blob_queue: usize, gossip_data_column_queue: usize, + gossip_execution_proof_queue: usize, delayed_block_queue: usize, status_queue: usize, block_brange_queue: usize, @@ -187,6 +188,7 @@ impl BeaconProcessorQueueLengths { gossip_block_queue: 1024, gossip_blob_queue: 1024, gossip_data_column_queue: 1024, + gossip_execution_proof_queue: 1024, delayed_block_queue: 1024, status_queue: 1024, block_brange_queue: 1024, @@ -579,6 +581,7 @@ pub enum Work { GossipBlock(AsyncFn), GossipBlobSidecar(AsyncFn), GossipDataColumnSidecar(AsyncFn), + GossipExecutionProof(AsyncFn), DelayedImportBlock { beacon_block_slot: Slot, beacon_block_root: Hash256, @@ -597,6 +600,9 @@ pub enum Work { RpcBlobs { process_fn: AsyncFn, }, + RpcExecutionProofs { + process_fn: AsyncFn, + }, RpcCustodyColumn(AsyncFn), ColumnReconstruction(AsyncFn), IgnoredRpcBlock { @@ -609,6 +615,7 @@ pub enum Work { BlocksByRootsRequest(AsyncFn), BlobsByRangeRequest(BlockingFn), BlobsByRootsRequest(BlockingFn), + ExecutionProofsByRootsRequest(BlockingFn), DataColumnsByRootsRequest(BlockingFn), DataColumnsByRangeRequest(BlockingFn), GossipBlsToExecutionChange(BlockingFn), @@ -641,6 +648,7 @@ pub enum WorkType { GossipBlock, GossipBlobSidecar, GossipDataColumnSidecar, + GossipExecutionProof, DelayedImportBlock, GossipVoluntaryExit, GossipProposerSlashing, @@ -651,6 +659,7 @@ pub enum WorkType { GossipLightClientOptimisticUpdate, RpcBlock, RpcBlobs, + RpcExecutionProofs, RpcCustodyColumn, ColumnReconstruction, IgnoredRpcBlock, @@ -661,6 +670,7 @@ pub enum WorkType { BlocksByRootsRequest, BlobsByRangeRequest, BlobsByRootsRequest, + ExecutionProofsByRootsRequest, DataColumnsByRootsRequest, DataColumnsByRangeRequest, GossipBlsToExecutionChange, @@ -688,6 +698,7 @@ impl Work { Work::GossipBlock(_) => WorkType::GossipBlock, Work::GossipBlobSidecar(_) => WorkType::GossipBlobSidecar, Work::GossipDataColumnSidecar(_) => WorkType::GossipDataColumnSidecar, + Work::GossipExecutionProof(_) => WorkType::GossipExecutionProof, Work::DelayedImportBlock { .. } => WorkType::DelayedImportBlock, Work::GossipVoluntaryExit(_) => WorkType::GossipVoluntaryExit, Work::GossipProposerSlashing(_) => WorkType::GossipProposerSlashing, @@ -701,6 +712,7 @@ impl Work { Work::GossipBlsToExecutionChange(_) => WorkType::GossipBlsToExecutionChange, Work::RpcBlock { .. } => WorkType::RpcBlock, Work::RpcBlobs { .. } => WorkType::RpcBlobs, + Work::RpcExecutionProofs { .. } => WorkType::RpcExecutionProofs, Work::RpcCustodyColumn { .. } => WorkType::RpcCustodyColumn, Work::ColumnReconstruction(_) => WorkType::ColumnReconstruction, Work::IgnoredRpcBlock { .. } => WorkType::IgnoredRpcBlock, @@ -711,6 +723,7 @@ impl Work { Work::BlocksByRootsRequest(_) => WorkType::BlocksByRootsRequest, Work::BlobsByRangeRequest(_) => WorkType::BlobsByRangeRequest, Work::BlobsByRootsRequest(_) => WorkType::BlobsByRootsRequest, + Work::ExecutionProofsByRootsRequest(_) => WorkType::ExecutionProofsByRootsRequest, Work::DataColumnsByRootsRequest(_) => WorkType::DataColumnsByRootsRequest, Work::DataColumnsByRangeRequest(_) => WorkType::DataColumnsByRangeRequest, Work::LightClientBootstrapRequest(_) => WorkType::LightClientBootstrapRequest, @@ -873,6 +886,8 @@ impl BeaconProcessor { let mut gossip_block_queue = FifoQueue::new(queue_lengths.gossip_block_queue); let mut gossip_blob_queue = FifoQueue::new(queue_lengths.gossip_blob_queue); let mut gossip_data_column_queue = FifoQueue::new(queue_lengths.gossip_data_column_queue); + let mut gossip_execution_proof_queue = + FifoQueue::new(queue_lengths.gossip_execution_proof_queue); let mut delayed_block_queue = FifoQueue::new(queue_lengths.delayed_block_queue); let mut status_queue = FifoQueue::new(queue_lengths.status_queue); @@ -1055,6 +1070,8 @@ impl BeaconProcessor { Some(item) } else if let Some(item) = gossip_data_column_queue.pop() { Some(item) + } else if let Some(item) = gossip_execution_proof_queue.pop() { + Some(item) } else if let Some(item) = column_reconstruction_queue.pop() { Some(item) // Check the priority 0 API requests after blocks and blobs, but before attestations. @@ -1325,6 +1342,9 @@ impl BeaconProcessor { Work::GossipDataColumnSidecar { .. } => { gossip_data_column_queue.push(work, work_id) } + Work::GossipExecutionProof { .. } => { + gossip_execution_proof_queue.push(work, work_id) + } Work::DelayedImportBlock { .. } => { delayed_block_queue.push(work, work_id) } @@ -1351,6 +1371,8 @@ impl BeaconProcessor { rpc_block_queue.push(work, work_id) } Work::RpcBlobs { .. } => rpc_blob_queue.push(work, work_id), + // TODO(zkproofs): Making a note that we are reusing the blob_queue + Work::RpcExecutionProofs { .. } => rpc_blob_queue.push(work, work_id), Work::RpcCustodyColumn { .. } => { rpc_custody_column_queue.push(work, work_id) } @@ -1399,6 +1421,9 @@ impl BeaconProcessor { Work::DataColumnsByRangeRequest { .. } => { dcbrange_queue.push(work, work_id) } + Work::ExecutionProofsByRootsRequest { .. } => { + blob_broots_queue.push(work, work_id) + } Work::UnknownLightClientOptimisticUpdate { .. } => { unknown_light_client_update_queue.push(work, work_id) } @@ -1424,6 +1449,7 @@ impl BeaconProcessor { WorkType::GossipBlock => gossip_block_queue.len(), WorkType::GossipBlobSidecar => gossip_blob_queue.len(), WorkType::GossipDataColumnSidecar => gossip_data_column_queue.len(), + WorkType::GossipExecutionProof => gossip_execution_proof_queue.len(), WorkType::DelayedImportBlock => delayed_block_queue.len(), WorkType::GossipVoluntaryExit => gossip_voluntary_exit_queue.len(), WorkType::GossipProposerSlashing => gossip_proposer_slashing_queue.len(), @@ -1437,7 +1463,9 @@ impl BeaconProcessor { lc_gossip_optimistic_update_queue.len() } WorkType::RpcBlock => rpc_block_queue.len(), - WorkType::RpcBlobs | WorkType::IgnoredRpcBlock => rpc_blob_queue.len(), + WorkType::RpcBlobs + | WorkType::RpcExecutionProofs + | WorkType::IgnoredRpcBlock => rpc_blob_queue.len(), WorkType::RpcCustodyColumn => rpc_custody_column_queue.len(), WorkType::ColumnReconstruction => column_reconstruction_queue.len(), WorkType::ChainSegment => chain_segment_queue.len(), @@ -1449,6 +1477,7 @@ impl BeaconProcessor { WorkType::BlobsByRootsRequest => blob_broots_queue.len(), WorkType::DataColumnsByRootsRequest => dcbroots_queue.len(), WorkType::DataColumnsByRangeRequest => dcbrange_queue.len(), + WorkType::ExecutionProofsByRootsRequest => blob_broots_queue.len(), WorkType::GossipBlsToExecutionChange => { gossip_bls_to_execution_change_queue.len() } @@ -1594,16 +1623,19 @@ impl BeaconProcessor { } => task_spawner.spawn_async(process_fn), Work::RpcBlock { process_fn } | Work::RpcBlobs { process_fn } + | Work::RpcExecutionProofs { process_fn } | Work::RpcCustodyColumn(process_fn) | Work::ColumnReconstruction(process_fn) => task_spawner.spawn_async(process_fn), Work::IgnoredRpcBlock { process_fn } => task_spawner.spawn_blocking(process_fn), Work::GossipBlock(work) | Work::GossipBlobSidecar(work) - | Work::GossipDataColumnSidecar(work) => task_spawner.spawn_async(async move { + | Work::GossipDataColumnSidecar(work) + | Work::GossipExecutionProof(work) => task_spawner.spawn_async(async move { work.await; }), Work::BlobsByRangeRequest(process_fn) | Work::BlobsByRootsRequest(process_fn) + | Work::ExecutionProofsByRootsRequest(process_fn) | Work::DataColumnsByRootsRequest(process_fn) | Work::DataColumnsByRangeRequest(process_fn) => { task_spawner.spawn_blocking(process_fn) diff --git a/beacon_node/client/Cargo.toml b/beacon_node/client/Cargo.toml index 3c4b2572c9a..ada47e888bd 100644 --- a/beacon_node/client/Cargo.toml +++ b/beacon_node/client/Cargo.toml @@ -14,6 +14,10 @@ eth2 = { workspace = true } eth2_config = { workspace = true } ethereum_ssz = { workspace = true } execution_layer = { workspace = true } +dummy_el = { path = "../../dummy_el" } +# TODO(zkproofs): add as a workspace dependency +zkvm_execution_layer = { path = "../../zkvm_execution_layer" } +proof_generation_service = { path = "../proof_generation_service" } futures = { workspace = true } genesis = { workspace = true } http_api = { workspace = true } diff --git a/beacon_node/client/src/builder.rs b/beacon_node/client/src/builder.rs index c3c827f0aae..f405d3705f5 100644 --- a/beacon_node/client/src/builder.rs +++ b/beacon_node/client/src/builder.rs @@ -4,6 +4,7 @@ use crate::compute_light_client_updates::{ }; use crate::config::{ClientGenesis, Config as ClientConfig}; use crate::notifier::spawn_notifier; +use beacon_chain::ProofGenerationEvent; use beacon_chain::attestation_simulator::start_attestation_simulator_service; use beacon_chain::data_availability_checker::start_availability_cache_maintenance_service; use beacon_chain::graffiti_calculator::start_engine_version_cache_refresh_service; @@ -19,6 +20,7 @@ use beacon_chain::{ use beacon_chain::{Kzg, LightClientProducerEvent}; use beacon_processor::{BeaconProcessor, BeaconProcessorChannels}; use beacon_processor::{BeaconProcessorConfig, BeaconProcessorQueueLengths}; +use dummy_el; use environment::RuntimeContext; use eth2::{ BeaconNodeHttpClient, Error as ApiError, Timeouts, @@ -31,6 +33,7 @@ use genesis::{DEFAULT_ETH1_BLOCK_HASH, interop_genesis_state}; use lighthouse_network::{NetworkGlobals, prometheus_client::registry::Registry}; use monitoring_api::{MonitoringHttpClient, ProcessType}; use network::{NetworkConfig, NetworkSenders, NetworkService}; +use proof_generation_service; use rand::SeedableRng; use rand::rngs::{OsRng, StdRng}; use slasher::Slasher; @@ -47,6 +50,7 @@ use types::{ BeaconState, BlobSidecarList, ChainSpec, EthSpec, ExecutionBlockHash, Hash256, SignedBeaconBlock, test_utils::generate_deterministic_keypairs, }; +use zkvm_execution_layer; /// Interval between polling the eth1 node for genesis information. pub const ETH1_GENESIS_UPDATE_INTERVAL_MILLIS: u64 = 7_000; @@ -88,6 +92,8 @@ pub struct ClientBuilder { beacon_processor_config: Option, beacon_processor_channels: Option>, light_client_server_rv: Option>>, + proof_generation_rx: + Option>>, eth_spec_instance: T::EthSpec, } @@ -122,6 +128,7 @@ where beacon_processor_config: None, beacon_processor_channels: None, light_client_server_rv: None, + proof_generation_rx: None, } } @@ -184,6 +191,40 @@ where None }; + // Spawn the dummy execution layer + if config.use_dummy_el { + info!("--execution-proofs: spawning in-process execution layer"); + + let dummy_el_config = dummy_el::DummyElConfig { + host: "127.0.0.1".to_string(), + engine_port: 8551, + rpc_port: 8545, + ws_port: 8546, + metrics_port: 9001, + p2p_port: 30303, + jwt_secret_path: None, + }; + + // Create a channel to wait for the dummy EL to be ready + let (ready_tx, ready_rx) = tokio::sync::oneshot::channel(); + + // Spawn the dummy EL in a background task + tokio::spawn(async move { + if let Err(e) = dummy_el::prepare_and_start_dummy_el(dummy_el_config, ready_tx).await + { + eprintln!("Error starting dummy execution layer: {:?}", e); + } + }); + + // Wait for the dummy EL to be ready before continuing + if let Err(_) = ready_rx.await { + return Err( + "Dummy execution layer failed to start or signal readiness".to_string(), + ); + } + info!("Dummy execution layer is ready"); + } + let kzg_err_msg = |e| format!("Failed to load trusted setup: {:?}", e); let kzg = if spec.is_peer_das_scheduled() { Kzg::new_from_trusted_setup(&config.trusted_setup).map_err(kzg_err_msg)? @@ -191,6 +232,18 @@ where Kzg::new_from_trusted_setup_no_precomp(&config.trusted_setup).map_err(kzg_err_msg)? }; + // Modify spec if zkvm mode is enabled via CLI + let spec = if let Some(zkvm_config) = &config.zkvm_execution_layer { + let mut modified_spec = (*spec).clone(); + + modified_spec.zkvm_enabled = true; + modified_spec.zkvm_min_proofs_required = zkvm_config.min_proofs_required; + + Arc::new(modified_spec) + } else { + spec + }; + let builder = BeaconChainBuilder::new(eth_spec_instance, Arc::new(kzg)) .store(store) .task_executor(context.executor.clone()) @@ -202,6 +255,7 @@ where .beacon_graffiti(beacon_graffiti) .event_handler(event_handler) .execution_layer(execution_layer) + .zkvm_execution_layer_config(config.zkvm_execution_layer.clone()) .node_custody_type(config.chain.node_custody_type) .validator_monitor_config(config.validator_monitor.clone()) .rng(Box::new( @@ -225,6 +279,44 @@ where builder }; + // Set up proof generation service if zkVM is configured with generation proof types + let builder = if let Some(ref zkvm_config) = config.zkvm_execution_layer { + if !zkvm_config.generation_proof_types.is_empty() { + // Validate that proof generation requires an execution layer + // Proof-generating nodes will validate blocks via EL execution, not proofs + if config.execution_layer.is_none() { + return Err( + "Proof generation requires an EL. \ + Nodes generating proofs must validate blocks via an execution layer. \ + To run a lightweight verifier node (without EL), omit --zkvm-generation-proof-types." + .into(), + ); + } + + // Create channel for proof generation events + let (proof_gen_tx, proof_gen_rx) = + tokio::sync::mpsc::unbounded_channel::>(); + + // Create generator registry with enabled proof types + let registry = Arc::new( + zkvm_execution_layer::GeneratorRegistry::new_with_dummy_generators( + zkvm_config.generation_proof_types.clone(), + ), + ); + + // Store receiver for later when we spawn the service + self.proof_generation_rx = Some(proof_gen_rx); + + builder + .zkvm_generator_registry(registry) + .proof_generation_tx(proof_gen_tx) + } else { + builder + } + } else { + builder + }; + let chain_exists = builder.store_contains_beacon_chain().unwrap_or(false); // If the client is expect to resume but there's no beacon chain in the database, @@ -777,6 +869,26 @@ where beacon_chain.task_executor.clone(), beacon_chain.clone(), ); + + // Start proof generation service if configured + if let Some(proof_gen_rx) = self.proof_generation_rx { + let network_tx = self + .network_senders + .as_ref() + .ok_or("proof_generation_service requires network_senders")? + .network_send(); + + let service = proof_generation_service::ProofGenerationService::new( + beacon_chain.clone(), + proof_gen_rx, + network_tx, + ); + + runtime_context.executor.spawn( + async move { service.run().await }, + "proof_generation_service", + ); + } } Ok(Client { diff --git a/beacon_node/client/src/config.rs b/beacon_node/client/src/config.rs index aeaa196df86..534b4088f6e 100644 --- a/beacon_node/client/src/config.rs +++ b/beacon_node/client/src/config.rs @@ -69,6 +69,8 @@ pub struct Config { pub network: network::NetworkConfig, pub chain: beacon_chain::ChainConfig, pub execution_layer: Option, + pub use_dummy_el: bool, + pub zkvm_execution_layer: Option, pub trusted_setup: Vec, pub http_api: http_api::Config, pub http_metrics: http_metrics::Config, @@ -94,6 +96,8 @@ impl Default for Config { network: NetworkConfig::default(), chain: <_>::default(), execution_layer: None, + use_dummy_el: false, + zkvm_execution_layer: None, trusted_setup: get_trusted_setup(), beacon_graffiti: GraffitiOrigin::default(), http_api: <_>::default(), diff --git a/beacon_node/lighthouse_network/src/discovery/enr.rs b/beacon_node/lighthouse_network/src/discovery/enr.rs index 4c285ea86c8..f2268b39b26 100644 --- a/beacon_node/lighthouse_network/src/discovery/enr.rs +++ b/beacon_node/lighthouse_network/src/discovery/enr.rs @@ -29,6 +29,8 @@ pub const ATTESTATION_BITFIELD_ENR_KEY: &str = "attnets"; pub const SYNC_COMMITTEE_BITFIELD_ENR_KEY: &str = "syncnets"; /// The ENR field specifying the peerdas custody group count. pub const PEERDAS_CUSTODY_GROUP_COUNT_ENR_KEY: &str = "cgc"; +/// The ENR field specifying whether zkVM execution proofs are enabled. +pub const ZKVM_ENABLED_ENR_KEY: &str = "zkvm"; /// Extension trait for ENR's within Eth2. pub trait Eth2Enr { @@ -43,6 +45,9 @@ pub trait Eth2Enr { /// The peerdas custody group count associated with the ENR. fn custody_group_count(&self, spec: &ChainSpec) -> Result; + /// Whether zkVM execution proofs are enabled for this node. + fn zkvm_enabled(&self) -> bool; + /// The next fork digest associated with the ENR. fn next_fork_digest(&self) -> Result<[u8; 4], &'static str>; @@ -85,6 +90,13 @@ impl Eth2Enr for Enr { } } + fn zkvm_enabled(&self) -> bool { + // If the key exists and is true, zkVM is enabled, otherwise false + self.get_decodable::(ZKVM_ENABLED_ENR_KEY) + .and_then(|result| result.ok()) + .unwrap_or(false) + } + fn next_fork_digest(&self) -> Result<[u8; 4], &'static str> { self.get_decodable::<[u8; 4]>(NEXT_FORK_DIGEST_ENR_KEY) .ok_or("ENR next fork digest non-existent")? @@ -278,6 +290,10 @@ pub fn build_enr( &bitfield.as_ssz_bytes().into(), ); + if spec.is_zkvm_enabled() { + builder.add_value(ZKVM_ENABLED_ENR_KEY, &true); + } + // only set `cgc` and `nfd` if PeerDAS fork (Fulu) epoch has been scheduled if spec.is_peer_das_scheduled() { builder.add_value(PEERDAS_CUSTODY_GROUP_COUNT_ENR_KEY, &custody_group_count); @@ -308,11 +324,12 @@ fn compare_enr(local_enr: &Enr, disk_enr: &Enr) -> bool { && (local_enr.udp4().is_none() || local_enr.udp4() == disk_enr.udp4()) && (local_enr.udp6().is_none() || local_enr.udp6() == disk_enr.udp6()) // we need the ATTESTATION_BITFIELD_ENR_KEY and SYNC_COMMITTEE_BITFIELD_ENR_KEY and - // PEERDAS_CUSTODY_GROUP_COUNT_ENR_KEY key to match, otherwise we use a new ENR. This will - // likely only be true for non-validating nodes. + // PEERDAS_CUSTODY_GROUP_COUNT_ENR_KEY and ZKVM_ENABLED_ENR_KEY key to match, + // otherwise we use a new ENR. This will likely only be true for non-validating nodes. && local_enr.get_decodable::(ATTESTATION_BITFIELD_ENR_KEY) == disk_enr.get_decodable(ATTESTATION_BITFIELD_ENR_KEY) && local_enr.get_decodable::(SYNC_COMMITTEE_BITFIELD_ENR_KEY) == disk_enr.get_decodable(SYNC_COMMITTEE_BITFIELD_ENR_KEY) && local_enr.get_decodable::(PEERDAS_CUSTODY_GROUP_COUNT_ENR_KEY) == disk_enr.get_decodable(PEERDAS_CUSTODY_GROUP_COUNT_ENR_KEY) + && local_enr.get_decodable::(ZKVM_ENABLED_ENR_KEY) == disk_enr.get_decodable(ZKVM_ENABLED_ENR_KEY) } /// Loads enr from the given directory diff --git a/beacon_node/lighthouse_network/src/discovery/mod.rs b/beacon_node/lighthouse_network/src/discovery/mod.rs index 3589882ae9b..018bf580504 100644 --- a/beacon_node/lighthouse_network/src/discovery/mod.rs +++ b/beacon_node/lighthouse_network/src/discovery/mod.rs @@ -15,7 +15,10 @@ pub use libp2p::identity::{Keypair, PublicKey}; use network_utils::enr_ext::{CombinedKeyExt, EnrExt, peer_id_to_node_id}; use alloy_rlp::bytes::Bytes; -use enr::{ATTESTATION_BITFIELD_ENR_KEY, ETH2_ENR_KEY, SYNC_COMMITTEE_BITFIELD_ENR_KEY}; +use enr::{ + ATTESTATION_BITFIELD_ENR_KEY, ETH2_ENR_KEY, SYNC_COMMITTEE_BITFIELD_ENR_KEY, + ZKVM_ENABLED_ENR_KEY, +}; use futures::prelude::*; use futures::stream::FuturesUnordered; use libp2p::core::transport::PortUse; @@ -560,6 +563,12 @@ impl Discovery { } // Data column subnets are computed from node ID. No subnet bitfield in the ENR. Subnet::DataColumn(_) => return Ok(()), + // Execution proof uses a simple boolean flag in the ENR + Subnet::ExecutionProof => { + self.discv5 + .enr_insert(ZKVM_ENABLED_ENR_KEY, &value) + .map_err(|e| format!("{:?}", e))?; + } } // replace the global version @@ -904,6 +913,7 @@ impl Discovery { Subnet::Attestation(_) => "attestation", Subnet::SyncCommittee(_) => "sync_committee", Subnet::DataColumn(_) => "data_column", + Subnet::ExecutionProof => "execution_proof", }; if let Some(v) = metrics::get_int_counter( diff --git a/beacon_node/lighthouse_network/src/discovery/subnet_predicate.rs b/beacon_node/lighthouse_network/src/discovery/subnet_predicate.rs index 6e841c25a50..dc1ac54e97b 100644 --- a/beacon_node/lighthouse_network/src/discovery/subnet_predicate.rs +++ b/beacon_node/lighthouse_network/src/discovery/subnet_predicate.rs @@ -41,6 +41,10 @@ where false } } + Subnet::ExecutionProof => { + // Check if ENR advertises zkVM support + enr.zkvm_enabled() + } }); if !predicate { diff --git a/beacon_node/lighthouse_network/src/peer_manager/config.rs b/beacon_node/lighthouse_network/src/peer_manager/config.rs index b2ed6524861..57a5fa68a23 100644 --- a/beacon_node/lighthouse_network/src/peer_manager/config.rs +++ b/beacon_node/lighthouse_network/src/peer_manager/config.rs @@ -20,6 +20,8 @@ pub struct Config { pub metrics_enabled: bool, /// Whether quic is enabled. pub quic_enabled: bool, + /// Whether execution proofs are enabled. + pub execution_proof_enabled: bool, /// Target number of peers to connect to. pub target_peer_count: usize, @@ -40,6 +42,7 @@ impl Default for Config { discovery_enabled: true, metrics_enabled: false, quic_enabled: true, + execution_proof_enabled: false, target_peer_count: DEFAULT_TARGET_PEERS, status_interval: DEFAULT_STATUS_INTERVAL, ping_interval_inbound: DEFAULT_PING_INTERVAL_INBOUND, diff --git a/beacon_node/lighthouse_network/src/peer_manager/mod.rs b/beacon_node/lighthouse_network/src/peer_manager/mod.rs index ad16bb0421c..453224cbe22 100644 --- a/beacon_node/lighthouse_network/src/peer_manager/mod.rs +++ b/beacon_node/lighthouse_network/src/peer_manager/mod.rs @@ -1,7 +1,7 @@ //! Implementation of Lighthouse's peer management system. use crate::rpc::{GoodbyeReason, MetaData, Protocol, RPCError, RpcErrorResponse}; -use crate::service::TARGET_SUBNET_PEERS; +use crate::service::{TARGET_EXECUTION_PROOF_PEERS, TARGET_SUBNET_PEERS}; use crate::{Gossipsub, NetworkGlobals, PeerId, Subnet, SubnetDiscovery, metrics}; use delay_map::HashSetDelay; use discv5::Enr; @@ -113,6 +113,8 @@ pub struct PeerManager { /// discovery queries for subnet peers if we disconnect from existing sync /// committee subnet peers. sync_committee_subnets: HashMap, + /// Keeps track of whether this node has zkVM execution proof support enabled. + execution_proof_enabled: bool, /// A mapping of all custody groups to column subnets to avoid re-computation. subnets_by_custody_group: HashMap>, /// The heartbeat interval to perform routine maintenance. @@ -162,6 +164,7 @@ impl PeerManager { let config::Config { discovery_enabled, metrics_enabled, + execution_proof_enabled, target_peer_count, status_interval, ping_interval_inbound, @@ -199,6 +202,7 @@ impl PeerManager { target_peers: target_peer_count, temporary_banned_peers: LRUTimeCache::new(PEER_RECONNECTION_TIMEOUT), sync_committee_subnets: Default::default(), + execution_proof_enabled, subnets_by_custody_group, heartbeat, discovery_enabled, @@ -601,6 +605,7 @@ impl PeerManager { Protocol::BlobsByRoot => PeerAction::MidToleranceError, Protocol::DataColumnsByRoot => PeerAction::MidToleranceError, Protocol::DataColumnsByRange => PeerAction::MidToleranceError, + Protocol::ExecutionProofsByRoot => PeerAction::MidToleranceError, Protocol::Goodbye => PeerAction::LowToleranceError, Protocol::MetaData => PeerAction::LowToleranceError, Protocol::Status => PeerAction::LowToleranceError, @@ -621,6 +626,7 @@ impl PeerManager { Protocol::BlobsByRoot => return, Protocol::DataColumnsByRoot => return, Protocol::DataColumnsByRange => return, + Protocol::ExecutionProofsByRoot => return, Protocol::Goodbye => return, Protocol::LightClientBootstrap => return, Protocol::LightClientOptimisticUpdate => return, @@ -644,6 +650,7 @@ impl PeerManager { Protocol::BlobsByRoot => PeerAction::MidToleranceError, Protocol::DataColumnsByRoot => PeerAction::MidToleranceError, Protocol::DataColumnsByRange => PeerAction::MidToleranceError, + Protocol::ExecutionProofsByRoot => PeerAction::MidToleranceError, Protocol::LightClientBootstrap => return, Protocol::LightClientOptimisticUpdate => return, Protocol::LightClientFinalityUpdate => return, @@ -1004,6 +1011,46 @@ impl PeerManager { } } + /// Run discovery query for zkVM-enabled peers if we fall below `TARGET_EXECUTION_PROOF_PEERS`. + fn maintain_execution_proof_peers(&mut self) { + // Only maintain peers if zkVM is enabled + if !self.execution_proof_enabled { + return; + } + + // Check if we have enough zkVM-enabled peers + // Count peers subscribed to the execution_proof gossip topic + // TODO(zkproofs): Note that since peers do not advertise whether + // they are proof generating, we cannot favour them. This is + // fine for optional proofs and mandatory proofs will imply + // that the builder who is well connected will propagate it + // to most of the network. + let zkvm_peer_count = self + .network_globals + .peers + .read() + .connected_peers() + .filter(|(_, info)| { + // Check if peer is subscribed to ExecutionProof gossip topic + info.on_subnet_gossipsub(&Subnet::ExecutionProof) + }) + .count(); + + if zkvm_peer_count < TARGET_EXECUTION_PROOF_PEERS { + debug!( + current_peers = zkvm_peer_count, + target = TARGET_EXECUTION_PROOF_PEERS, + "Making discovery query for zkVM-enabled peers" + ); + self.events.push(PeerManagerEvent::DiscoverSubnetPeers(vec![ + SubnetDiscovery { + subnet: Subnet::ExecutionProof, + min_ttl: None, + }, + ])); + } + } + fn maintain_trusted_peers(&mut self) { let trusted_peers = self.trusted_peers.clone(); for trusted_peer in trusted_peers { @@ -1081,6 +1128,10 @@ impl PeerManager { Subnet::DataColumn(id) => { peer_info.custody_subnets.insert(id); } + Subnet::ExecutionProof => { + // ExecutionProof uses a single topic, not subnet-based + // So there is no subnet assignment to track + } } } @@ -1449,6 +1500,9 @@ impl PeerManager { // Maintain minimum count for sync committee peers. self.maintain_sync_committee_peers(); + // Maintain minimum count for zkVM-enabled peers (if zkVM is enabled). + self.maintain_execution_proof_peers(); + // Prune any excess peers back to our target in such a way that incentivises good scores and // a uniform distribution of subnets. self.prune_excess_peers(); diff --git a/beacon_node/lighthouse_network/src/peer_manager/peerdb/peer_info.rs b/beacon_node/lighthouse_network/src/peer_manager/peerdb/peer_info.rs index c289cb9a69c..483da11be0b 100644 --- a/beacon_node/lighthouse_network/src/peer_manager/peerdb/peer_info.rs +++ b/beacon_node/lighthouse_network/src/peer_manager/peerdb/peer_info.rs @@ -105,6 +105,14 @@ impl PeerInfo { Subnet::DataColumn(subnet_id) => { return self.is_assigned_to_custody_subnet(subnet_id); } + Subnet::ExecutionProof => { + // ExecutionProof capability is advertised via ENR zkvm flag, not metadata + // A node cannot dynamically change what the support. + if let Some(enr) = self.enr.as_ref() { + return enr.zkvm_enabled(); + } + return false; + } } } false @@ -272,6 +280,11 @@ impl PeerInfo { return true; } + // Check if the peer has zkVM enabled (execution proof support) + if let Some(enr) = self.enr.as_ref() { + return enr.zkvm_enabled(); + } + false } diff --git a/beacon_node/lighthouse_network/src/rpc/codec.rs b/beacon_node/lighthouse_network/src/rpc/codec.rs index 77d2a34e16e..34c42fdd041 100644 --- a/beacon_node/lighthouse_network/src/rpc/codec.rs +++ b/beacon_node/lighthouse_network/src/rpc/codec.rs @@ -16,10 +16,10 @@ use std::marker::PhantomData; use std::sync::Arc; use tokio_util::codec::{Decoder, Encoder}; use types::{ - BlobSidecar, ChainSpec, DataColumnSidecar, DataColumnsByRootIdentifier, EthSpec, ForkContext, - ForkName, Hash256, LightClientBootstrap, LightClientFinalityUpdate, - LightClientOptimisticUpdate, LightClientUpdate, RuntimeVariableList, SignedBeaconBlock, - SignedBeaconBlockAltair, SignedBeaconBlockBase, SignedBeaconBlockBellatrix, + BlobSidecar, ChainSpec, DataColumnSidecar, DataColumnsByRootIdentifier, EthSpec, + ExecutionProof, ForkContext, ForkName, Hash256, LightClientBootstrap, + LightClientFinalityUpdate, LightClientOptimisticUpdate, LightClientUpdate, RuntimeVariableList, + SignedBeaconBlock, SignedBeaconBlockAltair, SignedBeaconBlockBase, SignedBeaconBlockBellatrix, SignedBeaconBlockCapella, SignedBeaconBlockDeneb, SignedBeaconBlockElectra, SignedBeaconBlockFulu, SignedBeaconBlockGloas, }; @@ -80,6 +80,7 @@ impl SSZSnappyInboundCodec { RpcSuccessResponse::BlobsByRoot(res) => res.as_ssz_bytes(), RpcSuccessResponse::DataColumnsByRoot(res) => res.as_ssz_bytes(), RpcSuccessResponse::DataColumnsByRange(res) => res.as_ssz_bytes(), + RpcSuccessResponse::ExecutionProofsByRoot(res) => res.as_ssz_bytes(), RpcSuccessResponse::LightClientBootstrap(res) => res.as_ssz_bytes(), RpcSuccessResponse::LightClientOptimisticUpdate(res) => res.as_ssz_bytes(), RpcSuccessResponse::LightClientFinalityUpdate(res) => res.as_ssz_bytes(), @@ -360,6 +361,7 @@ impl Encoder> for SSZSnappyOutboundCodec { RequestType::BlobsByRoot(req) => req.blob_ids.as_ssz_bytes(), RequestType::DataColumnsByRange(req) => req.as_ssz_bytes(), RequestType::DataColumnsByRoot(req) => req.data_column_ids.as_ssz_bytes(), + RequestType::ExecutionProofsByRoot(req) => req.as_ssz_bytes(), RequestType::Ping(req) => req.as_ssz_bytes(), RequestType::LightClientBootstrap(req) => req.as_ssz_bytes(), RequestType::LightClientUpdatesByRange(req) => req.as_ssz_bytes(), @@ -568,6 +570,16 @@ fn handle_rpc_request( )?, }, ))), + SupportedProtocol::ExecutionProofsByRootV1 => { + let request = ExecutionProofsByRootRequest::from_ssz_bytes(decoded_buffer) + .map_err(RPCError::SSZDecodeError)?; + + request + .validate(spec) + .map_err(RPCError::InvalidData)?; + + Ok(Some(RequestType::ExecutionProofsByRoot(request))) + } SupportedProtocol::PingV1 => Ok(Some(RequestType::Ping(Ping { data: u64::from_ssz_bytes(decoded_buffer)?, }))), @@ -731,6 +743,11 @@ fn handle_rpc_response( ), )), }, + SupportedProtocol::ExecutionProofsByRootV1 => { + Ok(Some(RpcSuccessResponse::ExecutionProofsByRoot(Arc::new( + ExecutionProof::from_ssz_bytes(decoded_buffer)?, + )))) + } SupportedProtocol::PingV1 => Ok(Some(RpcSuccessResponse::Pong(Ping { data: u64::from_ssz_bytes(decoded_buffer)?, }))), @@ -910,8 +927,8 @@ mod tests { use crate::types::{EnrAttestationBitfield, EnrSyncCommitteeBitfield}; use types::{ BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockBellatrix, BeaconBlockHeader, - DataColumnsByRootIdentifier, EmptyBlock, Epoch, FixedBytesExtended, FullPayload, - KzgCommitment, KzgProof, Signature, SignedBeaconBlockHeader, Slot, + DataColumnsByRootIdentifier, EmptyBlock, Epoch, ExecutionProofId, FixedBytesExtended, + FullPayload, KzgCommitment, KzgProof, Signature, SignedBeaconBlockHeader, Slot, blob_sidecar::BlobIdentifier, data_column_sidecar::Cell, }; @@ -1109,6 +1126,18 @@ mod tests { .unwrap() } + fn execution_proofs_by_root_request( + _fork_name: ForkName, + _spec: &ChainSpec, + ) -> ExecutionProofsByRootRequest { + ExecutionProofsByRootRequest::new( + Hash256::zero(), + vec![ExecutionProofId::new(0).unwrap()], + 2, + ) + .unwrap() + } + fn ping_message() -> Ping { Ping { data: 1 } } @@ -1263,6 +1292,9 @@ mod tests { RequestType::DataColumnsByRange(dcbrange) => { assert_eq!(decoded, RequestType::DataColumnsByRange(dcbrange)) } + RequestType::ExecutionProofsByRoot(exec_proofs) => { + assert_eq!(decoded, RequestType::ExecutionProofsByRoot(exec_proofs)) + } RequestType::Ping(ping) => { assert_eq!(decoded, RequestType::Ping(ping)) } @@ -2004,6 +2036,10 @@ mod tests { RequestType::BlocksByRoot(bbroot_request_v1(fork_name, &chain_spec)), RequestType::BlocksByRoot(bbroot_request_v2(fork_name, &chain_spec)), RequestType::DataColumnsByRoot(dcbroot_request(fork_name, &chain_spec)), + RequestType::ExecutionProofsByRoot(execution_proofs_by_root_request( + fork_name, + &chain_spec, + )), ] }; for fork_name in ForkName::list_all() { diff --git a/beacon_node/lighthouse_network/src/rpc/config.rs b/beacon_node/lighthouse_network/src/rpc/config.rs index b0ee6fea64b..d23c16f8fa1 100644 --- a/beacon_node/lighthouse_network/src/rpc/config.rs +++ b/beacon_node/lighthouse_network/src/rpc/config.rs @@ -93,6 +93,7 @@ pub struct RateLimiterConfig { pub(super) blobs_by_root_quota: Quota, pub(super) data_columns_by_root_quota: Quota, pub(super) data_columns_by_range_quota: Quota, + pub(super) execution_proofs_by_root_quota: Quota, pub(super) light_client_bootstrap_quota: Quota, pub(super) light_client_optimistic_update_quota: Quota, pub(super) light_client_finality_update_quota: Quota, @@ -122,6 +123,9 @@ impl RateLimiterConfig { Quota::n_every(NonZeroU64::new(16384).unwrap(), 10); pub const DEFAULT_DATA_COLUMNS_BY_ROOT_QUOTA: Quota = Quota::n_every(NonZeroU64::new(16384).unwrap(), 10); + // TODO(zkproofs): Configure this to be less arbitrary + pub const DEFAULT_EXECUTION_PROOFS_BY_ROOT_QUOTA: Quota = + Quota::n_every(NonZeroU64::new(128).unwrap(), 10); pub const DEFAULT_LIGHT_CLIENT_BOOTSTRAP_QUOTA: Quota = Quota::one_every(10); pub const DEFAULT_LIGHT_CLIENT_OPTIMISTIC_UPDATE_QUOTA: Quota = Quota::one_every(10); pub const DEFAULT_LIGHT_CLIENT_FINALITY_UPDATE_QUOTA: Quota = Quota::one_every(10); @@ -141,6 +145,7 @@ impl Default for RateLimiterConfig { blobs_by_root_quota: Self::DEFAULT_BLOBS_BY_ROOT_QUOTA, data_columns_by_root_quota: Self::DEFAULT_DATA_COLUMNS_BY_ROOT_QUOTA, data_columns_by_range_quota: Self::DEFAULT_DATA_COLUMNS_BY_RANGE_QUOTA, + execution_proofs_by_root_quota: Self::DEFAULT_EXECUTION_PROOFS_BY_ROOT_QUOTA, light_client_bootstrap_quota: Self::DEFAULT_LIGHT_CLIENT_BOOTSTRAP_QUOTA, light_client_optimistic_update_quota: Self::DEFAULT_LIGHT_CLIENT_OPTIMISTIC_UPDATE_QUOTA, @@ -201,6 +206,7 @@ impl FromStr for RateLimiterConfig { let mut blobs_by_root_quota = None; let mut data_columns_by_root_quota = None; let mut data_columns_by_range_quota = None; + let mut execution_proofs_by_root_quota = None; let mut light_client_bootstrap_quota = None; let mut light_client_optimistic_update_quota = None; let mut light_client_finality_update_quota = None; @@ -222,6 +228,9 @@ impl FromStr for RateLimiterConfig { Protocol::DataColumnsByRange => { data_columns_by_range_quota = data_columns_by_range_quota.or(quota) } + Protocol::ExecutionProofsByRoot => { + execution_proofs_by_root_quota = execution_proofs_by_root_quota.or(quota) + } Protocol::Ping => ping_quota = ping_quota.or(quota), Protocol::MetaData => meta_data_quota = meta_data_quota.or(quota), Protocol::LightClientBootstrap => { @@ -257,6 +266,8 @@ impl FromStr for RateLimiterConfig { .unwrap_or(Self::DEFAULT_DATA_COLUMNS_BY_ROOT_QUOTA), data_columns_by_range_quota: data_columns_by_range_quota .unwrap_or(Self::DEFAULT_DATA_COLUMNS_BY_RANGE_QUOTA), + execution_proofs_by_root_quota: execution_proofs_by_root_quota + .unwrap_or(Self::DEFAULT_EXECUTION_PROOFS_BY_ROOT_QUOTA), light_client_bootstrap_quota: light_client_bootstrap_quota .unwrap_or(Self::DEFAULT_LIGHT_CLIENT_BOOTSTRAP_QUOTA), light_client_optimistic_update_quota: light_client_optimistic_update_quota diff --git a/beacon_node/lighthouse_network/src/rpc/methods.rs b/beacon_node/lighthouse_network/src/rpc/methods.rs index 9aab0799521..b297ce8f08f 100644 --- a/beacon_node/lighthouse_network/src/rpc/methods.rs +++ b/beacon_node/lighthouse_network/src/rpc/methods.rs @@ -16,9 +16,9 @@ use types::blob_sidecar::BlobIdentifier; use types::light_client_update::MAX_REQUEST_LIGHT_CLIENT_UPDATES; use types::{ ChainSpec, ColumnIndex, DataColumnSidecar, DataColumnsByRootIdentifier, Epoch, EthSpec, - ForkContext, Hash256, LightClientBootstrap, LightClientFinalityUpdate, - LightClientOptimisticUpdate, LightClientUpdate, RuntimeVariableList, SignedBeaconBlock, Slot, - blob_sidecar::BlobSidecar, + ExecutionProof, ExecutionProofId, ForkContext, Hash256, LightClientBootstrap, + LightClientFinalityUpdate, LightClientOptimisticUpdate, LightClientUpdate, RuntimeVariableList, + SignedBeaconBlock, Slot, blob_sidecar::BlobSidecar, }; /// Maximum length of error message. @@ -546,6 +546,63 @@ impl DataColumnsByRootRequest { } } +/// Request execution proofs by block root and proof IDs. +#[derive(Encode, Decode, Clone, Debug, PartialEq)] +pub struct ExecutionProofsByRootRequest { + /// The block root we need proofs for + pub block_root: Hash256, + /// How many additional proofs we need + /// TODO(zkproofs): Remove. This can be inferred since `MIN_PROOFS_REQUIRED` + /// is a global value + pub count_needed: u64, + /// Proof IDs we already have (responder should exclude these) + pub already_have: Vec, +} + +impl ExecutionProofsByRootRequest { + pub fn new( + block_root: Hash256, + already_have: Vec, + count_needed: usize, + ) -> Result { + if already_have.len() > types::execution_proof::MAX_PROOFS { + return Err("Too many proof IDs in already_have"); + } + if count_needed == 0 { + return Err("count_needed must be > 0"); + } + if count_needed > types::execution_proof::MAX_PROOFS { + return Err("count_needed too large"); + } + Ok(Self { + block_root, + count_needed: count_needed as u64, + already_have, + }) + } + + pub fn validate(&self, _spec: &ChainSpec) -> Result<(), String> { + if self.already_have.len() > types::execution_proof::MAX_PROOFS { + return Err("Too many proof IDs in already_have".to_string()); + } + if self.count_needed == 0 { + return Err("count_needed must be > 0".to_string()); + } + if self.count_needed > types::execution_proof::MAX_PROOFS as u64 { + return Err(format!( + "count_needed too large: {} > {}", + self.count_needed, + types::execution_proof::MAX_PROOFS + )); + } + Ok(()) + } + + pub fn max_requested(&self) -> usize { + self.count_needed as usize + } +} + /// Request a number of beacon data columns from a peer. #[derive(Encode, Decode, Clone, Debug, PartialEq)] pub struct LightClientUpdatesByRangeRequest { @@ -613,6 +670,9 @@ pub enum RpcSuccessResponse { /// A response to a get DATA_COLUMN_SIDECARS_BY_RANGE request. DataColumnsByRange(Arc>), + /// A response to a get EXECUTION_PROOFS_BY_ROOT request. + ExecutionProofsByRoot(Arc), + /// A PONG response to a PING request. Pong(Ping), @@ -641,6 +701,9 @@ pub enum ResponseTermination { /// Data column sidecars by range stream termination. DataColumnsByRange, + /// Execution proofs by root stream termination. + ExecutionProofsByRoot, + /// Light client updates by range stream termination. LightClientUpdatesByRange, } @@ -654,6 +717,7 @@ impl ResponseTermination { ResponseTermination::BlobsByRoot => Protocol::BlobsByRoot, ResponseTermination::DataColumnsByRoot => Protocol::DataColumnsByRoot, ResponseTermination::DataColumnsByRange => Protocol::DataColumnsByRange, + ResponseTermination::ExecutionProofsByRoot => Protocol::ExecutionProofsByRoot, ResponseTermination::LightClientUpdatesByRange => Protocol::LightClientUpdatesByRange, } } @@ -749,6 +813,7 @@ impl RpcSuccessResponse { RpcSuccessResponse::BlobsByRoot(_) => Protocol::BlobsByRoot, RpcSuccessResponse::DataColumnsByRoot(_) => Protocol::DataColumnsByRoot, RpcSuccessResponse::DataColumnsByRange(_) => Protocol::DataColumnsByRange, + RpcSuccessResponse::ExecutionProofsByRoot(_) => Protocol::ExecutionProofsByRoot, RpcSuccessResponse::Pong(_) => Protocol::Ping, RpcSuccessResponse::MetaData(_) => Protocol::MetaData, RpcSuccessResponse::LightClientBootstrap(_) => Protocol::LightClientBootstrap, @@ -773,7 +838,11 @@ impl RpcSuccessResponse { Self::LightClientFinalityUpdate(r) => Some(r.get_attested_header_slot()), Self::LightClientOptimisticUpdate(r) => Some(r.get_slot()), Self::LightClientUpdatesByRange(r) => Some(r.attested_header_slot()), - Self::MetaData(_) | Self::Status(_) | Self::Pong(_) => None, + // TODO(zkproofs): Change this when we add Slot to ExecutionProof + Self::ExecutionProofsByRoot(_) + | Self::MetaData(_) + | Self::Status(_) + | Self::Pong(_) => None, } } } @@ -833,6 +902,9 @@ impl std::fmt::Display for RpcSuccessResponse { sidecar.slot() ) } + RpcSuccessResponse::ExecutionProofsByRoot(proof) => { + write!(f, "ExecutionProofsByRoot: Block root: {}", proof.block_root) + } RpcSuccessResponse::Pong(ping) => write!(f, "Pong: {}", ping.data), RpcSuccessResponse::MetaData(metadata) => { write!(f, "Metadata: {}", metadata.seq_number()) @@ -943,3 +1015,15 @@ impl std::fmt::Display for DataColumnsByRootRequest { ) } } + +impl std::fmt::Display for ExecutionProofsByRootRequest { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "Request: ExecutionProofsByRoot: Block Root: {}, Already Have: {}, Count Needed: {}", + self.block_root, + self.already_have.len(), + self.count_needed + ) + } +} diff --git a/beacon_node/lighthouse_network/src/rpc/protocol.rs b/beacon_node/lighthouse_network/src/rpc/protocol.rs index 08085f3c271..0428f8787a3 100644 --- a/beacon_node/lighthouse_network/src/rpc/protocol.rs +++ b/beacon_node/lighthouse_network/src/rpc/protocol.rs @@ -17,10 +17,11 @@ use tokio_util::{ }; use types::{ BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BlobSidecar, ChainSpec, DataColumnSidecar, - EmptyBlock, Epoch, EthSpec, EthSpecId, ForkContext, ForkName, LightClientBootstrap, - LightClientBootstrapAltair, LightClientFinalityUpdate, LightClientFinalityUpdateAltair, - LightClientOptimisticUpdate, LightClientOptimisticUpdateAltair, LightClientUpdate, - MainnetEthSpec, MinimalEthSpec, Signature, SignedBeaconBlock, + EmptyBlock, Epoch, EthSpec, EthSpecId, ExecutionProof, ForkContext, ForkName, + LightClientBootstrap, LightClientBootstrapAltair, LightClientFinalityUpdate, + LightClientFinalityUpdateAltair, LightClientOptimisticUpdate, + LightClientOptimisticUpdateAltair, LightClientUpdate, MainnetEthSpec, MinimalEthSpec, + Signature, SignedBeaconBlock, }; // Note: Hardcoding the `EthSpec` type for `SignedBeaconBlock` as min/max values is @@ -249,6 +250,9 @@ pub enum Protocol { /// The `DataColumnSidecarsByRange` protocol name. #[strum(serialize = "data_column_sidecars_by_range")] DataColumnsByRange, + /// The `ExecutionProofsByRoot` protocol name. + #[strum(serialize = "execution_proofs_by_root")] + ExecutionProofsByRoot, /// The `Ping` protocol name. Ping, /// The `MetaData` protocol name. @@ -279,6 +283,7 @@ impl Protocol { Protocol::BlobsByRoot => Some(ResponseTermination::BlobsByRoot), Protocol::DataColumnsByRoot => Some(ResponseTermination::DataColumnsByRoot), Protocol::DataColumnsByRange => Some(ResponseTermination::DataColumnsByRange), + Protocol::ExecutionProofsByRoot => Some(ResponseTermination::ExecutionProofsByRoot), Protocol::Ping => None, Protocol::MetaData => None, Protocol::LightClientBootstrap => None, @@ -309,6 +314,7 @@ pub enum SupportedProtocol { BlobsByRootV1, DataColumnsByRootV1, DataColumnsByRangeV1, + ExecutionProofsByRootV1, PingV1, MetaDataV1, MetaDataV2, @@ -333,6 +339,7 @@ impl SupportedProtocol { SupportedProtocol::BlobsByRootV1 => "1", SupportedProtocol::DataColumnsByRootV1 => "1", SupportedProtocol::DataColumnsByRangeV1 => "1", + SupportedProtocol::ExecutionProofsByRootV1 => "1", SupportedProtocol::PingV1 => "1", SupportedProtocol::MetaDataV1 => "1", SupportedProtocol::MetaDataV2 => "2", @@ -357,6 +364,7 @@ impl SupportedProtocol { SupportedProtocol::BlobsByRootV1 => Protocol::BlobsByRoot, SupportedProtocol::DataColumnsByRootV1 => Protocol::DataColumnsByRoot, SupportedProtocol::DataColumnsByRangeV1 => Protocol::DataColumnsByRange, + SupportedProtocol::ExecutionProofsByRootV1 => Protocol::ExecutionProofsByRoot, SupportedProtocol::PingV1 => Protocol::Ping, SupportedProtocol::MetaDataV1 => Protocol::MetaData, SupportedProtocol::MetaDataV2 => Protocol::MetaData, @@ -407,6 +415,12 @@ impl SupportedProtocol { ProtocolId::new(SupportedProtocol::DataColumnsByRangeV1, Encoding::SSZSnappy), ]); } + if fork_context.spec.is_zkvm_enabled() { + supported.push(ProtocolId::new( + SupportedProtocol::ExecutionProofsByRootV1, + Encoding::SSZSnappy, + )); + } supported } } @@ -519,6 +533,7 @@ impl ProtocolId { DataColumnsByRangeRequest::ssz_min_len(), DataColumnsByRangeRequest::ssz_max_len::(), ), + Protocol::ExecutionProofsByRoot => RpcLimits::new(0, spec.max_blocks_by_root_request), Protocol::Ping => RpcLimits::new( ::ssz_fixed_len(), ::ssz_fixed_len(), @@ -555,6 +570,7 @@ impl ProtocolId { Protocol::DataColumnsByRange => { rpc_data_column_limits::(fork_context.current_fork_epoch(), &fork_context.spec) } + Protocol::ExecutionProofsByRoot => rpc_execution_proof_limits(), Protocol::Ping => RpcLimits::new( ::ssz_fixed_len(), ::ssz_fixed_len(), @@ -596,6 +612,7 @@ impl ProtocolId { | SupportedProtocol::StatusV2 | SupportedProtocol::BlocksByRootV1 | SupportedProtocol::BlocksByRangeV1 + | SupportedProtocol::ExecutionProofsByRootV1 | SupportedProtocol::PingV1 | SupportedProtocol::MetaDataV1 | SupportedProtocol::MetaDataV2 @@ -645,6 +662,11 @@ pub fn rpc_data_column_limits( ) } +pub fn rpc_execution_proof_limits() -> RpcLimits { + // TODO(zkproofs): Can max proof size change over hardforks? + RpcLimits::new(ExecutionProof::min_size(), ExecutionProof::max_size()) +} + /* Inbound upgrade */ // The inbound protocol reads the request, decodes it and returns the stream to the protocol @@ -724,6 +746,7 @@ pub enum RequestType { BlobsByRoot(BlobsByRootRequest), DataColumnsByRoot(DataColumnsByRootRequest), DataColumnsByRange(DataColumnsByRangeRequest), + ExecutionProofsByRoot(ExecutionProofsByRootRequest), LightClientBootstrap(LightClientBootstrapRequest), LightClientOptimisticUpdate, LightClientFinalityUpdate, @@ -747,6 +770,7 @@ impl RequestType { RequestType::BlobsByRoot(req) => req.blob_ids.len() as u64, RequestType::DataColumnsByRoot(req) => req.max_requested() as u64, RequestType::DataColumnsByRange(req) => req.max_requested::(), + RequestType::ExecutionProofsByRoot(req) => req.max_requested() as u64, RequestType::Ping(_) => 1, RequestType::MetaData(_) => 1, RequestType::LightClientBootstrap(_) => 1, @@ -776,6 +800,7 @@ impl RequestType { RequestType::BlobsByRoot(_) => SupportedProtocol::BlobsByRootV1, RequestType::DataColumnsByRoot(_) => SupportedProtocol::DataColumnsByRootV1, RequestType::DataColumnsByRange(_) => SupportedProtocol::DataColumnsByRangeV1, + RequestType::ExecutionProofsByRoot(_) => SupportedProtocol::ExecutionProofsByRootV1, RequestType::Ping(_) => SupportedProtocol::PingV1, RequestType::MetaData(req) => match req { MetadataRequest::V1(_) => SupportedProtocol::MetaDataV1, @@ -807,6 +832,7 @@ impl RequestType { RequestType::BlobsByRoot(_) => ResponseTermination::BlobsByRoot, RequestType::DataColumnsByRoot(_) => ResponseTermination::DataColumnsByRoot, RequestType::DataColumnsByRange(_) => ResponseTermination::DataColumnsByRange, + RequestType::ExecutionProofsByRoot(_) => ResponseTermination::ExecutionProofsByRoot, RequestType::Status(_) => unreachable!(), RequestType::Goodbye(_) => unreachable!(), RequestType::Ping(_) => unreachable!(), @@ -853,6 +879,10 @@ impl RequestType { SupportedProtocol::DataColumnsByRangeV1, Encoding::SSZSnappy, )], + RequestType::ExecutionProofsByRoot(_) => vec![ProtocolId::new( + SupportedProtocol::ExecutionProofsByRootV1, + Encoding::SSZSnappy, + )], RequestType::Ping(_) => vec![ProtocolId::new( SupportedProtocol::PingV1, Encoding::SSZSnappy, @@ -891,6 +921,7 @@ impl RequestType { RequestType::BlobsByRoot(_) => false, RequestType::DataColumnsByRoot(_) => false, RequestType::DataColumnsByRange(_) => false, + RequestType::ExecutionProofsByRoot(_) => false, RequestType::Ping(_) => true, RequestType::MetaData(_) => true, RequestType::LightClientBootstrap(_) => true, @@ -1004,6 +1035,9 @@ impl std::fmt::Display for RequestType { RequestType::DataColumnsByRange(req) => { write!(f, "Data columns by range: {:?}", req) } + RequestType::ExecutionProofsByRoot(req) => { + write!(f, "Execution proofs by root: {:?}", req) + } RequestType::Ping(ping) => write!(f, "Ping: {}", ping.data), RequestType::MetaData(_) => write!(f, "MetaData request"), RequestType::LightClientBootstrap(bootstrap) => { diff --git a/beacon_node/lighthouse_network/src/rpc/rate_limiter.rs b/beacon_node/lighthouse_network/src/rpc/rate_limiter.rs index 8b364f506cc..f70b29cfe45 100644 --- a/beacon_node/lighthouse_network/src/rpc/rate_limiter.rs +++ b/beacon_node/lighthouse_network/src/rpc/rate_limiter.rs @@ -105,6 +105,8 @@ pub struct RPCRateLimiter { dcbroot_rl: Limiter, /// DataColumnsByRange rate limiter. dcbrange_rl: Limiter, + /// ExecutionProofsByRoot rate limiter. + execution_proofs_by_root_rl: Limiter, /// LightClientBootstrap rate limiter. lc_bootstrap_rl: Limiter, /// LightClientOptimisticUpdate rate limiter. @@ -148,6 +150,8 @@ pub struct RPCRateLimiterBuilder { dcbroot_quota: Option, /// Quota for the DataColumnsByRange protocol. dcbrange_quota: Option, + /// Quota for the ExecutionProofsByRoot protocol. + execution_proofs_by_root_quota: Option, /// Quota for the LightClientBootstrap protocol. lcbootstrap_quota: Option, /// Quota for the LightClientOptimisticUpdate protocol. @@ -173,6 +177,7 @@ impl RPCRateLimiterBuilder { Protocol::BlobsByRoot => self.blbroot_quota = q, Protocol::DataColumnsByRoot => self.dcbroot_quota = q, Protocol::DataColumnsByRange => self.dcbrange_quota = q, + Protocol::ExecutionProofsByRoot => self.execution_proofs_by_root_quota = q, Protocol::LightClientBootstrap => self.lcbootstrap_quota = q, Protocol::LightClientOptimisticUpdate => self.lc_optimistic_update_quota = q, Protocol::LightClientFinalityUpdate => self.lc_finality_update_quota = q, @@ -221,6 +226,10 @@ impl RPCRateLimiterBuilder { .dcbrange_quota .ok_or("DataColumnsByRange quota not specified")?; + let execution_proofs_by_root_quota = self + .execution_proofs_by_root_quota + .ok_or("ExecutionProofsByRoot quota not specified")?; + // create the rate limiters let ping_rl = Limiter::from_quota(ping_quota)?; let metadata_rl = Limiter::from_quota(metadata_quota)?; @@ -232,6 +241,7 @@ impl RPCRateLimiterBuilder { let blbroot_rl = Limiter::from_quota(blbroots_quota)?; let dcbroot_rl = Limiter::from_quota(dcbroot_quota)?; let dcbrange_rl = Limiter::from_quota(dcbrange_quota)?; + let execution_proofs_by_root_rl = Limiter::from_quota(execution_proofs_by_root_quota)?; let lc_bootstrap_rl = Limiter::from_quota(lc_bootstrap_quota)?; let lc_optimistic_update_rl = Limiter::from_quota(lc_optimistic_update_quota)?; let lc_finality_update_rl = Limiter::from_quota(lc_finality_update_quota)?; @@ -255,6 +265,7 @@ impl RPCRateLimiterBuilder { blbroot_rl, dcbroot_rl, dcbrange_rl, + execution_proofs_by_root_rl, lc_bootstrap_rl, lc_optimistic_update_rl, lc_finality_update_rl, @@ -308,6 +319,7 @@ impl RPCRateLimiter { blobs_by_root_quota, data_columns_by_root_quota, data_columns_by_range_quota, + execution_proofs_by_root_quota, light_client_bootstrap_quota, light_client_optimistic_update_quota, light_client_finality_update_quota, @@ -325,6 +337,10 @@ impl RPCRateLimiter { .set_quota(Protocol::BlobsByRoot, blobs_by_root_quota) .set_quota(Protocol::DataColumnsByRoot, data_columns_by_root_quota) .set_quota(Protocol::DataColumnsByRange, data_columns_by_range_quota) + .set_quota( + Protocol::ExecutionProofsByRoot, + execution_proofs_by_root_quota, + ) .set_quota(Protocol::LightClientBootstrap, light_client_bootstrap_quota) .set_quota( Protocol::LightClientOptimisticUpdate, @@ -372,6 +388,7 @@ impl RPCRateLimiter { Protocol::BlobsByRoot => &mut self.blbroot_rl, Protocol::DataColumnsByRoot => &mut self.dcbroot_rl, Protocol::DataColumnsByRange => &mut self.dcbrange_rl, + Protocol::ExecutionProofsByRoot => &mut self.execution_proofs_by_root_rl, Protocol::LightClientBootstrap => &mut self.lc_bootstrap_rl, Protocol::LightClientOptimisticUpdate => &mut self.lc_optimistic_update_rl, Protocol::LightClientFinalityUpdate => &mut self.lc_finality_update_rl, @@ -396,6 +413,7 @@ impl RPCRateLimiter { blbroot_rl, dcbroot_rl, dcbrange_rl, + execution_proofs_by_root_rl, lc_bootstrap_rl, lc_optimistic_update_rl, lc_finality_update_rl, @@ -413,6 +431,7 @@ impl RPCRateLimiter { blbroot_rl.prune(time_since_start); dcbrange_rl.prune(time_since_start); dcbroot_rl.prune(time_since_start); + execution_proofs_by_root_rl.prune(time_since_start); lc_bootstrap_rl.prune(time_since_start); lc_optimistic_update_rl.prune(time_since_start); lc_finality_update_rl.prune(time_since_start); diff --git a/beacon_node/lighthouse_network/src/service/api_types.rs b/beacon_node/lighthouse_network/src/service/api_types.rs index f1a4d87de76..d97506653b5 100644 --- a/beacon_node/lighthouse_network/src/service/api_types.rs +++ b/beacon_node/lighthouse_network/src/service/api_types.rs @@ -3,7 +3,7 @@ use libp2p::PeerId; use std::fmt::{Display, Formatter}; use std::sync::Arc; use types::{ - BlobSidecar, DataColumnSidecar, Epoch, EthSpec, LightClientBootstrap, + BlobSidecar, DataColumnSidecar, Epoch, EthSpec, ExecutionProof, LightClientBootstrap, LightClientFinalityUpdate, LightClientOptimisticUpdate, LightClientUpdate, SignedBeaconBlock, }; @@ -22,6 +22,8 @@ pub enum SyncRequestId { SingleBlock { id: SingleLookupReqId }, /// Request searching for a set of blobs given a hash. SingleBlob { id: SingleLookupReqId }, + /// Request searching for execution proofs given a block hash and proof IDs. + SingleExecutionProof { id: SingleLookupReqId }, /// Request searching for a set of data columns given a hash and list of column indices. DataColumnsByRoot(DataColumnsByRootRequestId), /// Blocks by range request @@ -164,6 +166,8 @@ pub enum Response { BlobsByRoot(Option>>), /// A response to a get DATA_COLUMN_SIDECARS_BY_ROOT request. DataColumnsByRoot(Option>>), + /// A response to a get EXECUTION_PROOFS_BY_ROOT request. + ExecutionProofsByRoot(Option>), /// A response to a LightClientUpdate request. LightClientBootstrap(Arc>), /// A response to a LightClientOptimisticUpdate request. @@ -201,6 +205,10 @@ impl std::convert::From> for RpcResponse { Some(d) => RpcResponse::Success(RpcSuccessResponse::DataColumnsByRange(d)), None => RpcResponse::StreamTermination(ResponseTermination::DataColumnsByRange), }, + Response::ExecutionProofsByRoot(r) => match r { + Some(p) => RpcResponse::Success(RpcSuccessResponse::ExecutionProofsByRoot(p)), + None => RpcResponse::StreamTermination(ResponseTermination::ExecutionProofsByRoot), + }, Response::Status(s) => RpcResponse::Success(RpcSuccessResponse::Status(s)), Response::LightClientBootstrap(b) => { RpcResponse::Success(RpcSuccessResponse::LightClientBootstrap(b)) diff --git a/beacon_node/lighthouse_network/src/service/gossip_cache.rs b/beacon_node/lighthouse_network/src/service/gossip_cache.rs index 120b9e6c245..227317f79ea 100644 --- a/beacon_node/lighthouse_network/src/service/gossip_cache.rs +++ b/beacon_node/lighthouse_network/src/service/gossip_cache.rs @@ -44,6 +44,8 @@ pub struct GossipCache { light_client_finality_update: Option, /// Timeout for light client optimistic updates. light_client_optimistic_update: Option, + /// Timeout for execution proofs. + execution_proof: Option, } #[derive(Default)] @@ -75,6 +77,8 @@ pub struct GossipCacheBuilder { light_client_finality_update: Option, /// Timeout for light client optimistic updates. light_client_optimistic_update: Option, + /// Timeout for execution proofs. + execution_proof: Option, } #[allow(dead_code)] @@ -151,6 +155,12 @@ impl GossipCacheBuilder { self } + /// Timeout for execution proof messages. + pub fn execution_proof_timeout(mut self, timeout: Duration) -> Self { + self.execution_proof = Some(timeout); + self + } + pub fn build(self) -> GossipCache { let GossipCacheBuilder { default_timeout, @@ -167,6 +177,7 @@ impl GossipCacheBuilder { bls_to_execution_change, light_client_finality_update, light_client_optimistic_update, + execution_proof, } = self; GossipCache { expirations: DelayQueue::default(), @@ -184,6 +195,7 @@ impl GossipCacheBuilder { bls_to_execution_change: bls_to_execution_change.or(default_timeout), light_client_finality_update: light_client_finality_update.or(default_timeout), light_client_optimistic_update: light_client_optimistic_update.or(default_timeout), + execution_proof: execution_proof.or(default_timeout), } } } @@ -211,6 +223,7 @@ impl GossipCache { GossipKind::BlsToExecutionChange => self.bls_to_execution_change, GossipKind::LightClientFinalityUpdate => self.light_client_finality_update, GossipKind::LightClientOptimisticUpdate => self.light_client_optimistic_update, + GossipKind::ExecutionProof => self.execution_proof, }; let Some(expire_timeout) = expire_timeout else { return; diff --git a/beacon_node/lighthouse_network/src/service/mod.rs b/beacon_node/lighthouse_network/src/service/mod.rs index 1df17dffbaf..82a6bb1ab61 100644 --- a/beacon_node/lighthouse_network/src/service/mod.rs +++ b/beacon_node/lighthouse_network/src/service/mod.rs @@ -52,6 +52,10 @@ pub mod utils; /// The number of peers we target per subnet for discovery queries. pub const TARGET_SUBNET_PEERS: usize = 3; +/// The number of peers we target for execution proof peer discovery. +/// Set to 1 since we don't expect many nodes to run it +pub const TARGET_EXECUTION_PROOF_PEERS: usize = 1; + const MAX_IDENTIFY_ADDRESSES: usize = 10; /// The types of events than can be obtained from polling the behaviour. @@ -255,6 +259,7 @@ impl Network { // .signed_contribution_and_proof_timeout(timeout) // Do not retry // .sync_committee_message_timeout(timeout) // Do not retry .bls_to_execution_change_timeout(half_epoch * 2) + .execution_proof_timeout(slot_duration) .build() }; @@ -411,6 +416,7 @@ impl Network { quic_enabled: !config.disable_quic_support, metrics_enabled: config.metrics_enabled, target_peer_count: config.target_peers, + execution_proof_enabled: ctx.chain_spec.is_zkvm_enabled(), ..Default::default() }; PeerManager::new(peer_manager_cfg, network_globals.clone())? @@ -1563,6 +1569,17 @@ impl Network { request_type, }) } + RequestType::ExecutionProofsByRoot(_) => { + metrics::inc_counter_vec( + &metrics::TOTAL_RPC_REQUESTS, + &["execution_proofs_by_root"], + ); + Some(NetworkEvent::RequestReceived { + peer_id, + inbound_request_id, + request_type, + }) + } RequestType::LightClientBootstrap(_) => { metrics::inc_counter_vec( &metrics::TOTAL_RPC_REQUESTS, @@ -1648,6 +1665,11 @@ impl Network { RpcSuccessResponse::DataColumnsByRange(resp) => { self.build_response(id, peer_id, Response::DataColumnsByRange(Some(resp))) } + RpcSuccessResponse::ExecutionProofsByRoot(resp) => self.build_response( + id, + peer_id, + Response::ExecutionProofsByRoot(Some(resp)), + ), // Should never be reached RpcSuccessResponse::LightClientBootstrap(bootstrap) => { self.build_response(id, peer_id, Response::LightClientBootstrap(bootstrap)) @@ -1677,6 +1699,9 @@ impl Network { ResponseTermination::BlobsByRoot => Response::BlobsByRoot(None), ResponseTermination::DataColumnsByRoot => Response::DataColumnsByRoot(None), ResponseTermination::DataColumnsByRange => Response::DataColumnsByRange(None), + ResponseTermination::ExecutionProofsByRoot => { + Response::ExecutionProofsByRoot(None) + } ResponseTermination::LightClientUpdatesByRange => { Response::LightClientUpdatesByRange(None) } diff --git a/beacon_node/lighthouse_network/src/service/utils.rs b/beacon_node/lighthouse_network/src/service/utils.rs index a0026837e37..8851fb39153 100644 --- a/beacon_node/lighthouse_network/src/service/utils.rs +++ b/beacon_node/lighthouse_network/src/service/utils.rs @@ -288,6 +288,8 @@ pub(crate) fn create_whitelist_filter( for id in 0..spec.data_column_sidecar_subnet_count { add(DataColumnSidecar(DataColumnSubnetId::new(id))); } + // Add ExecutionProof topic + add(ExecutionProof); } gossipsub::WhitelistSubscriptionFilter(possible_hashes) } diff --git a/beacon_node/lighthouse_network/src/types/pubsub.rs b/beacon_node/lighthouse_network/src/types/pubsub.rs index 72f2873def9..c0cb8ee0a49 100644 --- a/beacon_node/lighthouse_network/src/types/pubsub.rs +++ b/beacon_node/lighthouse_network/src/types/pubsub.rs @@ -8,7 +8,7 @@ use std::io::{Error, ErrorKind}; use std::sync::Arc; use types::{ AttesterSlashing, AttesterSlashingBase, AttesterSlashingElectra, BlobSidecar, - DataColumnSidecar, DataColumnSubnetId, EthSpec, ForkContext, ForkName, + DataColumnSidecar, DataColumnSubnetId, EthSpec, ExecutionProof, ForkContext, ForkName, LightClientFinalityUpdate, LightClientOptimisticUpdate, ProposerSlashing, SignedAggregateAndProof, SignedAggregateAndProofBase, SignedAggregateAndProofElectra, SignedBeaconBlock, SignedBeaconBlockAltair, SignedBeaconBlockBase, SignedBeaconBlockBellatrix, @@ -26,6 +26,8 @@ pub enum PubsubMessage { BlobSidecar(Box<(u64, Arc>)>), /// Gossipsub message providing notification of a [`DataColumnSidecar`] along with the subnet id where it was received. DataColumnSidecar(Box<(DataColumnSubnetId, Arc>)>), + /// Gossipsub message providing notification of an [`ExecutionProof`]. + ExecutionProof(Arc), /// Gossipsub message providing notification of a Aggregate attestation and associated proof. AggregateAndProofAttestation(Box>), /// Gossipsub message providing notification of a `SingleAttestation` with its subnet id. @@ -135,6 +137,7 @@ impl PubsubMessage { PubsubMessage::DataColumnSidecar(column_sidecar_data) => { GossipKind::DataColumnSidecar(column_sidecar_data.0) } + PubsubMessage::ExecutionProof(_) => GossipKind::ExecutionProof, PubsubMessage::AggregateAndProofAttestation(_) => GossipKind::BeaconAggregateAndProof, PubsubMessage::Attestation(attestation_data) => { GossipKind::Attestation(attestation_data.0) @@ -290,6 +293,24 @@ impl PubsubMessage { )), } } + GossipKind::ExecutionProof => { + match fork_context.get_fork_from_context_bytes(gossip_topic.fork_digest) { + // TODO(ethproofs): Changed to Electra fork for demo. + // TODO(zkproofs): we don't have the ChainSpec here, so if we change this to + // be for gloas, then we should change it here too + Some(fork) if fork.electra_enabled() => { + let execution_proof = Arc::new( + ExecutionProof::from_ssz_bytes(data) + .map_err(|e| format!("{:?}", e))?, + ); + Ok(PubsubMessage::ExecutionProof(execution_proof)) + } + Some(_) | None => Err(format!( + "execution_proof topic invalid for given fork digest {:?}", + gossip_topic.fork_digest + )), + } + } GossipKind::VoluntaryExit => { let voluntary_exit = SignedVoluntaryExit::from_ssz_bytes(data) .map_err(|e| format!("{:?}", e))?; @@ -403,6 +424,7 @@ impl PubsubMessage { PubsubMessage::BeaconBlock(data) => data.as_ssz_bytes(), PubsubMessage::BlobSidecar(data) => data.1.as_ssz_bytes(), PubsubMessage::DataColumnSidecar(data) => data.1.as_ssz_bytes(), + PubsubMessage::ExecutionProof(data) => data.as_ssz_bytes(), PubsubMessage::AggregateAndProofAttestation(data) => data.as_ssz_bytes(), PubsubMessage::VoluntaryExit(data) => data.as_ssz_bytes(), PubsubMessage::ProposerSlashing(data) => data.as_ssz_bytes(), @@ -438,6 +460,12 @@ impl std::fmt::Display for PubsubMessage { data.1.slot(), data.1.index, ), + PubsubMessage::ExecutionProof(data) => write!( + f, + "ExecutionProof: block_root: {}, proof_id: {}", + data.block_root, + data.proof_id.as_u8(), + ), PubsubMessage::AggregateAndProofAttestation(att) => write!( f, "Aggregate and Proof: slot: {}, index: {:?}, aggregator_index: {}", diff --git a/beacon_node/lighthouse_network/src/types/subnet.rs b/beacon_node/lighthouse_network/src/types/subnet.rs index 1892dcc83af..2d5ca95bf50 100644 --- a/beacon_node/lighthouse_network/src/types/subnet.rs +++ b/beacon_node/lighthouse_network/src/types/subnet.rs @@ -14,6 +14,13 @@ pub enum Subnet { SyncCommittee(SyncSubnetId), /// Represents a gossipsub data column subnet. DataColumn(DataColumnSubnetId), + /// Represents execution proof support. + // + /// Note: ExecutionProof uses a single gossip topic (not multiple topics), + /// but we track it here for ENR-based peer discovery to find zkVM-enabled peers. + /// TODO(zkproofs): Is there a way to have peer discovery without adding the global topic + /// into Subnet? + ExecutionProof, } /// A subnet to discover peers on along with the instant after which it's no longer useful. diff --git a/beacon_node/lighthouse_network/src/types/topics.rs b/beacon_node/lighthouse_network/src/types/topics.rs index cfdee907b9a..cdb572ea7bf 100644 --- a/beacon_node/lighthouse_network/src/types/topics.rs +++ b/beacon_node/lighthouse_network/src/types/topics.rs @@ -16,6 +16,7 @@ pub const BEACON_AGGREGATE_AND_PROOF_TOPIC: &str = "beacon_aggregate_and_proof"; pub const BEACON_ATTESTATION_PREFIX: &str = "beacon_attestation_"; pub const BLOB_SIDECAR_PREFIX: &str = "blob_sidecar_"; pub const DATA_COLUMN_SIDECAR_PREFIX: &str = "data_column_sidecar_"; +pub const EXECUTION_PROOF_TOPIC: &str = "execution_proof"; pub const VOLUNTARY_EXIT_TOPIC: &str = "voluntary_exit"; pub const PROPOSER_SLASHING_TOPIC: &str = "proposer_slashing"; pub const ATTESTER_SLASHING_TOPIC: &str = "attester_slashing"; @@ -84,6 +85,14 @@ pub fn core_topics_to_subscribe( } } + // Subscribe to execution proof topic if zkVM mode is enabled for this fork. + // TODO(zkproofs): this looks different than the other checks because + // there is no official zkvm_fork and we enable this alongside a current fork + let zkvm_check = spec.is_zkvm_enabled_for_fork(fork_name); + if zkvm_check { + topics.push(GossipKind::ExecutionProof); + } + topics } @@ -102,6 +111,7 @@ pub fn is_fork_non_core_topic(topic: &GossipTopic, _fork_name: ForkName) -> bool | GossipKind::BeaconAggregateAndProof | GossipKind::BlobSidecar(_) | GossipKind::DataColumnSidecar(_) + | GossipKind::ExecutionProof | GossipKind::VoluntaryExit | GossipKind::ProposerSlashing | GossipKind::AttesterSlashing @@ -148,6 +158,8 @@ pub enum GossipKind { BlobSidecar(u64), /// Topic for publishing DataColumnSidecars. DataColumnSidecar(DataColumnSubnetId), + /// Topic for publishing ExecutionProofs + ExecutionProof, /// Topic for publishing raw attestations on a particular subnet. #[strum(serialize = "beacon_attestation")] Attestation(SubnetId), @@ -248,6 +260,7 @@ impl GossipTopic { PROPOSER_SLASHING_TOPIC => GossipKind::ProposerSlashing, ATTESTER_SLASHING_TOPIC => GossipKind::AttesterSlashing, BLS_TO_EXECUTION_CHANGE_TOPIC => GossipKind::BlsToExecutionChange, + EXECUTION_PROOF_TOPIC => GossipKind::ExecutionProof, LIGHT_CLIENT_FINALITY_UPDATE => GossipKind::LightClientFinalityUpdate, LIGHT_CLIENT_OPTIMISTIC_UPDATE => GossipKind::LightClientOptimisticUpdate, topic => match subnet_topic_index(topic) { @@ -312,6 +325,7 @@ impl std::fmt::Display for GossipTopic { GossipKind::DataColumnSidecar(column_subnet_id) => { format!("{}{}", DATA_COLUMN_SIDECAR_PREFIX, *column_subnet_id) } + GossipKind::ExecutionProof => EXECUTION_PROOF_TOPIC.into(), GossipKind::BlsToExecutionChange => BLS_TO_EXECUTION_CHANGE_TOPIC.into(), GossipKind::LightClientFinalityUpdate => LIGHT_CLIENT_FINALITY_UPDATE.into(), GossipKind::LightClientOptimisticUpdate => LIGHT_CLIENT_OPTIMISTIC_UPDATE.into(), @@ -333,6 +347,7 @@ impl From for GossipKind { Subnet::Attestation(s) => GossipKind::Attestation(s), Subnet::SyncCommittee(s) => GossipKind::SyncCommitteeMessage(s), Subnet::DataColumn(s) => GossipKind::DataColumnSidecar(s), + Subnet::ExecutionProof => GossipKind::ExecutionProof, } } } diff --git a/beacon_node/lighthouse_network/tests/common.rs b/beacon_node/lighthouse_network/tests/common.rs index 8a3047692f3..50ecc7d7d99 100644 --- a/beacon_node/lighthouse_network/tests/common.rs +++ b/beacon_node/lighthouse_network/tests/common.rs @@ -29,6 +29,8 @@ pub fn spec_with_all_forks_enabled() -> ChainSpec { chain_spec.electra_fork_epoch = Some(Epoch::new(5)); chain_spec.fulu_fork_epoch = Some(Epoch::new(6)); chain_spec.gloas_fork_epoch = Some(Epoch::new(7)); + // Enable zkVM + chain_spec.zkvm_enabled = true; // check that we have all forks covered assert!(chain_spec.fork_epoch(ForkName::latest()).is_some()); diff --git a/beacon_node/lighthouse_network/tests/rpc_tests.rs b/beacon_node/lighthouse_network/tests/rpc_tests.rs index 81d08764a5f..0ff397ab0fa 100644 --- a/beacon_node/lighthouse_network/tests/rpc_tests.rs +++ b/beacon_node/lighthouse_network/tests/rpc_tests.rs @@ -17,8 +17,9 @@ use tracing::{Instrument, debug, error, info_span, warn}; use types::{ BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockBellatrix, BeaconBlockHeader, BlobSidecar, ChainSpec, DataColumnSidecar, DataColumnsByRootIdentifier, EmptyBlock, Epoch, - EthSpec, FixedBytesExtended, ForkName, Hash256, KzgCommitment, KzgProof, MinimalEthSpec, - RuntimeVariableList, Signature, SignedBeaconBlock, SignedBeaconBlockHeader, Slot, + EthSpec, ExecutionBlockHash, ExecutionProof, ExecutionProofId, FixedBytesExtended, ForkName, + Hash256, KzgCommitment, KzgProof, MinimalEthSpec, RuntimeVariableList, Signature, + SignedBeaconBlock, SignedBeaconBlockHeader, Slot, }; type E = MinimalEthSpec; @@ -1738,3 +1739,370 @@ fn test_active_requests() { } }) } + +// Tests ExecutionProofsByRoot RPC - basic single proof request +#[test] +#[allow(clippy::single_match)] +fn test_tcp_execution_proofs_by_root_single() { + // Set up the logging. + let log_level = "debug"; + let enable_logging = true; + let _subscriber = build_tracing_subscriber(log_level, enable_logging); + + let spec = Arc::new(spec_with_all_forks_enabled()); + // TODO(ethproofs): Changed to Electra fork for demo. + let current_fork_name = ForkName::Electra; + + let rt = Arc::new(Runtime::new().unwrap()); + rt.block_on(async { + let (mut sender, mut receiver) = common::build_node_pair( + Arc::downgrade(&rt), + current_fork_name, + spec.clone(), + Protocol::Tcp, + false, + None, + ) + .await; + + let block_root = Hash256::random(); + let block_hash = ExecutionBlockHash::from_root(Hash256::random()); + let subnet_id = ExecutionProofId::new(0).unwrap(); + + // ExecutionProofsByRoot Request + let rpc_request = RequestType::ExecutionProofsByRoot( + ExecutionProofsByRootRequest::new( + block_root, + vec![], // No proofs already have + 2, // Request 2 proofs + ) + .unwrap(), + ); + + // ExecutionProofsByRoot Response + let proof = Arc::new( + ExecutionProof::new( + subnet_id, + Slot::new(100), + block_hash, + block_root, + vec![1, 2, 3, 4], + ) + .unwrap(), + ); + let rpc_response = Response::ExecutionProofsByRoot(Some(proof.clone())); + + // Build the sender future + let sender_future = async { + loop { + match sender.next_event().await { + NetworkEvent::PeerConnectedOutgoing(peer_id) => { + debug!("Sending RPC"); + sender + .send_request(peer_id, AppRequestId::Router, rpc_request.clone()) + .unwrap(); + } + NetworkEvent::ResponseReceived { + peer_id: _, + app_request_id: AppRequestId::Router, + response, + } => match response { + Response::ExecutionProofsByRoot(Some(received_proof)) => { + debug!("Proof received"); + assert_eq!(received_proof.block_root, block_root); + assert_eq!(received_proof.block_hash, block_hash); + assert_eq!(received_proof.proof_id, subnet_id); + } + Response::ExecutionProofsByRoot(None) => { + debug!("Stream terminated"); + return; + } + _ => {} + }, + _ => {} + } + } + } + .instrument(info_span!("Sender")); + + // Build the receiver future + let receiver_future = async { + loop { + match receiver.next_event().await { + NetworkEvent::RequestReceived { + peer_id, + inbound_request_id, + request_type, + } => { + if request_type == rpc_request { + debug!("Receiver got request"); + // Send the proof + receiver.send_response( + peer_id, + inbound_request_id, + rpc_response.clone(), + ); + // Send stream termination + receiver.send_response( + peer_id, + inbound_request_id, + Response::ExecutionProofsByRoot(None), + ); + debug!("Sent proof and termination"); + } + } + _ => {} + } + } + } + .instrument(info_span!("Receiver")); + + tokio::select! { + _ = sender_future => {} + _ = receiver_future => {} + _ = sleep(Duration::from_secs(30)) => { + panic!("Future timed out"); + } + } + }) +} + +// Tests ExecutionProofsByRoot RPC - multiple proofs chunked response +#[test] +#[allow(clippy::single_match)] +fn test_tcp_execution_proofs_by_root_chunked() { + // Set up the logging. + let log_level = "debug"; + let enable_logging = true; + let _subscriber = build_tracing_subscriber(log_level, enable_logging); + + let spec = Arc::new(spec_with_all_forks_enabled()); + let current_fork_name = ForkName::Deneb; + + let messages_to_send = 3; + + let rt = Arc::new(Runtime::new().unwrap()); + rt.block_on(async { + let (mut sender, mut receiver) = common::build_node_pair( + Arc::downgrade(&rt), + current_fork_name, + spec.clone(), + Protocol::Tcp, + false, + None, + ) + .await; + + let block_root = Hash256::random(); + let block_hash = ExecutionBlockHash::from_root(Hash256::random()); + let proof_ids = [ + ExecutionProofId::new(0).unwrap(), + ExecutionProofId::new(1).unwrap(), + ExecutionProofId::new(2).unwrap(), + ]; + assert_eq!(proof_ids.len(), messages_to_send); + + // ExecutionProofsByRoot Request for multiple proofs + let rpc_request = RequestType::ExecutionProofsByRoot( + ExecutionProofsByRootRequest::new(block_root, vec![], proof_ids.len()).unwrap(), + ); + + // Create proofs for each proof ID + let proofs: Vec> = proof_ids + .iter() + .map(|subnet_id| { + Arc::new( + ExecutionProof::new( + *subnet_id, + Slot::new(100), + block_hash, + block_root, + vec![1, 2, 3, 4], + ) + .unwrap(), + ) + }) + .collect(); + + let mut messages_received = 0; + + // Build the sender future + let sender_future = async { + loop { + match sender.next_event().await { + NetworkEvent::PeerConnectedOutgoing(peer_id) => { + debug!("Sending RPC"); + sender + .send_request(peer_id, AppRequestId::Router, rpc_request.clone()) + .unwrap(); + } + NetworkEvent::ResponseReceived { + peer_id: _, + app_request_id: AppRequestId::Router, + response, + } => match response { + Response::ExecutionProofsByRoot(Some(received_proof)) => { + debug!("Chunk received"); + assert_eq!(received_proof.block_root, block_root); + assert_eq!(received_proof.block_hash, block_hash); + messages_received += 1; + } + Response::ExecutionProofsByRoot(None) => { + debug!("Stream terminated"); + assert_eq!(messages_received, messages_to_send); + return; + } + _ => {} + }, + _ => {} + } + } + } + .instrument(info_span!("Sender")); + + // Build the receiver future + let receiver_future = async { + loop { + match receiver.next_event().await { + NetworkEvent::RequestReceived { + peer_id, + inbound_request_id, + request_type, + } => { + if request_type == rpc_request { + debug!("Receiver got request"); + // Send all proofs + for proof in &proofs { + receiver.send_response( + peer_id, + inbound_request_id, + Response::ExecutionProofsByRoot(Some(proof.clone())), + ); + debug!("Sent proof chunk"); + } + // Send stream termination + receiver.send_response( + peer_id, + inbound_request_id, + Response::ExecutionProofsByRoot(None), + ); + debug!("Sent termination"); + } + } + _ => {} + } + } + } + .instrument(info_span!("Receiver")); + + tokio::select! { + _ = sender_future => {} + _ = receiver_future => {} + _ = sleep(Duration::from_secs(30)) => { + panic!("Future timed out"); + } + } + }) +} + +// Tests ExecutionProofsByRoot RPC - empty response (peer has no proofs) +#[test] +#[allow(clippy::single_match)] +fn test_tcp_execution_proofs_by_root_empty_response() { + // Set up the logging. + let log_level = "debug"; + let enable_logging = true; + let _subscriber = build_tracing_subscriber(log_level, enable_logging); + + let spec = Arc::new(spec_with_all_forks_enabled()); + // TODO(ethproofs): Changed to Electra fork for demo. + let current_fork_name = ForkName::Electra; + + let rt = Arc::new(Runtime::new().unwrap()); + rt.block_on(async { + let (mut sender, mut receiver) = common::build_node_pair( + Arc::downgrade(&rt), + current_fork_name, + spec.clone(), + Protocol::Tcp, + false, + None, + ) + .await; + + let block_root = Hash256::random(); + + let rpc_request = RequestType::ExecutionProofsByRoot( + ExecutionProofsByRootRequest::new(block_root, vec![], 2).unwrap(), + ); + + let mut received_termination = false; + + // Build the sender future + let sender_future = async { + loop { + match sender.next_event().await { + NetworkEvent::PeerConnectedOutgoing(peer_id) => { + debug!("Sending RPC"); + sender + .send_request(peer_id, AppRequestId::Router, rpc_request.clone()) + .unwrap(); + } + NetworkEvent::ResponseReceived { + peer_id: _, + app_request_id: AppRequestId::Router, + response, + } => match response { + Response::ExecutionProofsByRoot(Some(_)) => { + panic!("Should not receive any proofs in empty response test"); + } + Response::ExecutionProofsByRoot(None) => { + debug!("Stream terminated (empty response)"); + received_termination = true; + return; + } + _ => {} + }, + _ => {} + } + } + } + .instrument(info_span!("Sender")); + + // Build the receiver future + let receiver_future = async { + loop { + match receiver.next_event().await { + NetworkEvent::RequestReceived { + peer_id, + inbound_request_id, + request_type, + } => { + if request_type == rpc_request { + debug!("Receiver got request"); + // Send only stream termination (no proofs) + receiver.send_response( + peer_id, + inbound_request_id, + Response::ExecutionProofsByRoot(None), + ); + debug!("Sent empty response (termination only)"); + } + } + _ => {} + } + } + } + .instrument(info_span!("Receiver")); + + tokio::select! { + _ = sender_future => { + assert!(received_termination, "Should have received stream termination"); + } + _ = receiver_future => {} + _ = sleep(Duration::from_secs(30)) => { + panic!("Future timed out"); + } + } + }) +} diff --git a/beacon_node/lighthouse_tracing/src/lib.rs b/beacon_node/lighthouse_tracing/src/lib.rs index 56dccadaa94..dd9e9f1ebb2 100644 --- a/beacon_node/lighthouse_tracing/src/lib.rs +++ b/beacon_node/lighthouse_tracing/src/lib.rs @@ -39,6 +39,8 @@ pub const SPAN_HANDLE_BLOBS_BY_RANGE_REQUEST: &str = "handle_blobs_by_range_requ pub const SPAN_HANDLE_DATA_COLUMNS_BY_RANGE_REQUEST: &str = "handle_data_columns_by_range_request"; pub const SPAN_HANDLE_BLOCKS_BY_ROOT_REQUEST: &str = "handle_blocks_by_root_request"; pub const SPAN_HANDLE_BLOBS_BY_ROOT_REQUEST: &str = "handle_blobs_by_root_request"; +pub const SPAN_HANDLE_EXECUTION_PROOFS_BY_ROOT_REQUEST: &str = + "handle_execution_proofs_by_root_request"; pub const SPAN_HANDLE_DATA_COLUMNS_BY_ROOT_REQUEST: &str = "handle_data_columns_by_root_request"; pub const SPAN_HANDLE_LIGHT_CLIENT_UPDATES_BY_RANGE: &str = "handle_light_client_updates_by_range"; pub const SPAN_HANDLE_LIGHT_CLIENT_BOOTSTRAP: &str = "handle_light_client_bootstrap"; @@ -70,6 +72,7 @@ pub const LH_BN_ROOT_SPAN_NAMES: &[&str] = &[ SPAN_HANDLE_DATA_COLUMNS_BY_RANGE_REQUEST, SPAN_HANDLE_BLOCKS_BY_ROOT_REQUEST, SPAN_HANDLE_BLOBS_BY_ROOT_REQUEST, + SPAN_HANDLE_EXECUTION_PROOFS_BY_ROOT_REQUEST, SPAN_HANDLE_DATA_COLUMNS_BY_ROOT_REQUEST, SPAN_HANDLE_LIGHT_CLIENT_UPDATES_BY_RANGE, SPAN_HANDLE_LIGHT_CLIENT_BOOTSTRAP, diff --git a/beacon_node/network/src/network_beacon_processor/gossip_methods.rs b/beacon_node/network/src/network_beacon_processor/gossip_methods.rs index eb70147c6ef..93d5cd343d4 100644 --- a/beacon_node/network/src/network_beacon_processor/gossip_methods.rs +++ b/beacon_node/network/src/network_beacon_processor/gossip_methods.rs @@ -7,6 +7,9 @@ use crate::{ use beacon_chain::blob_verification::{GossipBlobError, GossipVerifiedBlob}; use beacon_chain::block_verification_types::AsBlock; use beacon_chain::data_column_verification::{GossipDataColumnError, GossipVerifiedDataColumn}; +use beacon_chain::execution_proof_verification::{ + GossipExecutionProofError, GossipVerifiedExecutionProof, +}; use beacon_chain::store::Error; use beacon_chain::{ AvailabilityProcessingStatus, BeaconChainError, BeaconChainTypes, BlockError, ForkChoiceError, @@ -37,10 +40,11 @@ use store::hot_cold_store::HotColdDBError; use tracing::{Instrument, Span, debug, error, info, instrument, trace, warn}; use types::{ Attestation, AttestationData, AttestationRef, AttesterSlashing, BlobSidecar, DataColumnSidecar, - DataColumnSubnetId, EthSpec, Hash256, IndexedAttestation, LightClientFinalityUpdate, - LightClientOptimisticUpdate, ProposerSlashing, SignedAggregateAndProof, SignedBeaconBlock, - SignedBlsToExecutionChange, SignedContributionAndProof, SignedVoluntaryExit, SingleAttestation, - Slot, SubnetId, SyncCommitteeMessage, SyncSubnetId, beacon_block::BlockImportSource, + DataColumnSubnetId, EthSpec, ExecutionProof, Hash256, IndexedAttestation, + LightClientFinalityUpdate, LightClientOptimisticUpdate, ProposerSlashing, + SignedAggregateAndProof, SignedBeaconBlock, SignedBlsToExecutionChange, + SignedContributionAndProof, SignedVoluntaryExit, SingleAttestation, Slot, SubnetId, + SyncCommitteeMessage, SyncSubnetId, beacon_block::BlockImportSource, }; use beacon_processor::work_reprocessing_queue::QueuedColumnReconstruction; @@ -767,6 +771,231 @@ impl NetworkBeaconProcessor { } } + /// Process a gossip execution proof. + /// + /// Validates the execution proof according to the gossip spec and processes it + /// through the DataAvailabilityChecker if valid. + pub async fn process_gossip_execution_proof( + self: &Arc, + message_id: MessageId, + peer_id: PeerId, + execution_proof: Arc, + _seen_timestamp: Duration, + ) { + let block_root = execution_proof.block_root; + let proof_id = execution_proof.proof_id; + + debug!( + %peer_id, + %proof_id, + %block_root, + "Received execution proof via gossip" + ); + + // Verify the execution proof for gossip + match self + .chain + .verify_execution_proof_for_gossip(execution_proof.clone()) + { + Ok(gossip_verified_proof) => { + debug!( + %block_root, + subnet_id = %gossip_verified_proof.subnet_id(), + "Successfully verified gossip execution proof" + ); + + self.propagate_validation_result(message_id, peer_id, MessageAcceptance::Accept); + + let gossip_verified_proof_slot = gossip_verified_proof.slot(); + let gossip_verified_proof_subnet = gossip_verified_proof.subnet_id(); + + info!( + %block_root, + subnet_id = %gossip_verified_proof_subnet, + slot = %gossip_verified_proof_slot, + "[Ethproofs] Execution proof accepted and gossiped to peers" + ); + + // Process the verified proof through DA checker + self.process_gossip_verified_execution_proof( + peer_id, + gossip_verified_proof, + _seen_timestamp, + ) + .await + } + Err(err) => { + match err { + GossipExecutionProofError::PriorKnownUnpublished => { + debug!( + %block_root, + %proof_id, + "Gossip execution proof already processed via the EL. Accepting the proof without re-processing." + ); + self.propagate_validation_result( + message_id, + peer_id, + MessageAcceptance::Accept, + ); + } + GossipExecutionProofError::PriorKnown { + block_root, + proof_id, + .. + } => { + // Proof already known via gossip. No penalty, gossip filter should + // filter duplicates. + debug!( + %block_root, + %proof_id, + "Received already known execution proof. Ignoring the proof" + ); + } + GossipExecutionProofError::ParentUnknown { parent_root } => { + debug!( + action = "requesting parent", + %block_root, + %parent_root, + "Unknown parent hash for execution proof" + ); + // TODO(zkproofs): Implement parent lookup for execution proofs + // This might require creating a new SyncMessage variant + // For now, we just ignore the proof + self.propagate_validation_result( + message_id, + peer_id, + MessageAcceptance::Ignore, + ); + } + GossipExecutionProofError::BeaconChainError(_) => { + crit!( + error = ?err, + "Internal error when verifying execution proof" + ) + } + GossipExecutionProofError::ProofVerificationFailed(ref reason) => { + debug!( + error = ?err, + %block_root, + %proof_id, + %reason, + "Execution proof verification failed. Rejecting the proof" + ); + self.gossip_penalize_peer( + peer_id, + PeerAction::LowToleranceError, + "gossip_execution_proof_verification_failed", + ); + self.propagate_validation_result( + message_id, + peer_id, + MessageAcceptance::Reject, + ); + } + GossipExecutionProofError::ProofTooLarge { size, max_size } => { + warn!( + error = ?err, + %block_root, + %proof_id, + %size, + %max_size, + "Execution proof exceeds maximum size. Rejecting the proof" + ); + self.gossip_penalize_peer( + peer_id, + PeerAction::LowToleranceError, + "gossip_execution_proof_too_large", + ); + self.propagate_validation_result( + message_id, + peer_id, + MessageAcceptance::Reject, + ); + } + GossipExecutionProofError::BlockNotAvailable { block_root } => { + debug!( + error = ?err, + %block_root, + %proof_id, + "Block for execution proof not yet available. Ignoring the proof" + ); + // Block might arrive later, so don't penalize heavily + self.propagate_validation_result( + message_id, + peer_id, + MessageAcceptance::Ignore, + ); + } + GossipExecutionProofError::NotFinalizedDescendant { block_parent_root } => { + debug!( + error = ?err, + %block_root, + %block_parent_root, + %proof_id, + "Execution proof conflicts with finality. Rejecting the proof" + ); + self.gossip_penalize_peer( + peer_id, + PeerAction::LowToleranceError, + "gossip_execution_proof_not_finalized_descendant", + ); + self.propagate_validation_result( + message_id, + peer_id, + MessageAcceptance::Reject, + ); + } + GossipExecutionProofError::FutureSlot { + message_slot, + latest_permissible_slot, + } => { + debug!( + error = ?err, + %block_root, + %proof_id, + %message_slot, + %latest_permissible_slot, + "Execution proof from future slot. Ignoring the proof" + ); + self.gossip_penalize_peer( + peer_id, + PeerAction::HighToleranceError, + "gossip_execution_proof_future_slot", + ); + self.propagate_validation_result( + message_id, + peer_id, + MessageAcceptance::Ignore, + ); + } + GossipExecutionProofError::PastFinalizedSlot { + proof_slot, + finalized_slot, + } => { + debug!( + error = ?err, + %block_root, + %proof_id, + %proof_slot, + %finalized_slot, + "Execution proof from past finalized slot. Ignoring the proof" + ); + self.gossip_penalize_peer( + peer_id, + PeerAction::LowToleranceError, + "gossip_execution_proof_past_finalized", + ); + self.propagate_validation_result( + message_id, + peer_id, + MessageAcceptance::Ignore, + ); + } + } + } + } + } + #[allow(clippy::too_many_arguments)] #[instrument( name = SPAN_PROCESS_GOSSIP_BLOB, @@ -1126,6 +1355,83 @@ impl NetworkBeaconProcessor { } } + async fn process_gossip_verified_execution_proof( + self: &Arc, + peer_id: PeerId, + verified_proof: GossipVerifiedExecutionProof, + _seen_duration: Duration, + ) { + let processing_start_time = Instant::now(); + let block_root = verified_proof.block_root(); + let proof_slot = verified_proof.slot(); + let subnet_id = verified_proof.subnet_id(); + + let result = self + .chain + .process_gossip_execution_proof(verified_proof, || Ok(())) + .await; + register_process_result_metrics(&result, metrics::BlockSource::Gossip, "execution_proof"); + + match &result { + Ok(availability) => match availability { + AvailabilityProcessingStatus::Imported(block_root) => { + info!( + %block_root, + %subnet_id, + "[Ethproofs] Block fully available, imported with execution proofs" + ); + self.chain.recompute_head_at_current_slot().await; + + debug!( + processing_time_ms = processing_start_time.elapsed().as_millis(), + "Execution proof full verification complete" + ); + } + AvailabilityProcessingStatus::MissingComponents(slot, block_root) => { + trace!( + %slot, + %subnet_id, + "Execution proof cached, block still needs more components" + ); + debug!( + %block_root, + %proof_slot, + %subnet_id, + "Execution proof cached for pending block" + ); + } + }, + Err(BlockError::DuplicateFullyImported(_)) => { + debug!( + ?block_root, + %subnet_id, + "Ignoring gossip execution proof for already imported block" + ); + } + Err(err) => { + debug!( + outcome = ?err, + ?block_root, + block_slot = %proof_slot, + %subnet_id, + "Invalid gossip execution proof" + ); + self.gossip_penalize_peer( + peer_id, + PeerAction::MidToleranceError, + "bad_gossip_execution_proof", + ); + } + } + + if matches!(result, Ok(AvailabilityProcessingStatus::Imported(_))) { + self.send_sync_message(SyncMessage::GossipBlockProcessResult { + block_root, + imported: true, + }); + } + } + /// Process the beacon block received from the gossip network and: /// /// - If it passes gossip propagation criteria, tell the network thread to forward it. diff --git a/beacon_node/network/src/network_beacon_processor/mod.rs b/beacon_node/network/src/network_beacon_processor/mod.rs index bebda36d71c..7db2790920e 100644 --- a/beacon_node/network/src/network_beacon_processor/mod.rs +++ b/beacon_node/network/src/network_beacon_processor/mod.rs @@ -14,7 +14,7 @@ use beacon_processor::{ use lighthouse_network::rpc::InboundRequestId; use lighthouse_network::rpc::methods::{ BlobsByRangeRequest, BlobsByRootRequest, DataColumnsByRangeRequest, DataColumnsByRootRequest, - LightClientUpdatesByRangeRequest, + ExecutionProofsByRootRequest, LightClientUpdatesByRangeRequest, }; use lighthouse_network::service::api_types::CustodyBackfillBatchId; use lighthouse_network::{ @@ -249,6 +249,32 @@ impl NetworkBeaconProcessor { }) } + /// Create a new `Work` event for some execution proof. + pub fn send_gossip_execution_proof( + self: &Arc, + message_id: MessageId, + peer_id: PeerId, + execution_proof: Arc, + seen_timestamp: Duration, + ) -> Result<(), Error> { + let processor = self.clone(); + let process_fn = async move { + processor + .process_gossip_execution_proof( + message_id, + peer_id, + execution_proof, + seen_timestamp, + ) + .await + }; + + self.try_send(BeaconWorkEvent { + drop_during_sync: false, + work: Work::GossipExecutionProof(Box::pin(process_fn)), + }) + } + /// Create a new `Work` event for some sync committee signature. pub fn send_gossip_sync_signature( self: &Arc, @@ -469,6 +495,30 @@ impl NetworkBeaconProcessor { }) } + /// Create a new `Work` event for some execution proofs. `process_rpc_execution_proofs` reports + /// the result back to sync. + pub fn send_rpc_execution_proofs( + self: &Arc, + block_root: Hash256, + proofs: Vec>, + seen_timestamp: Duration, + process_type: BlockProcessType, + ) -> Result<(), Error> { + if proofs.is_empty() { + return Ok(()); + } + let process_fn = self.clone().generate_rpc_execution_proofs_process_fn( + block_root, + proofs, + seen_timestamp, + process_type, + ); + self.try_send(BeaconWorkEvent { + drop_during_sync: false, + work: Work::RpcExecutionProofs { process_fn }, + }) + } + /// Create a new `Work` event for some custody columns. `process_rpc_custody_columns` reports /// the result back to sync. pub fn send_rpc_custody_columns( @@ -631,6 +681,24 @@ impl NetworkBeaconProcessor { }) } + /// Create a new work event to process `ExecutionProofsByRootRequest`s from the RPC network. + pub fn send_execution_proofs_by_roots_request( + self: &Arc, + peer_id: PeerId, + inbound_request_id: InboundRequestId, + request: ExecutionProofsByRootRequest, + ) -> Result<(), Error> { + let processor = self.clone(); + let process_fn = move || { + processor.handle_execution_proofs_by_root_request(peer_id, inbound_request_id, request) + }; + + self.try_send(BeaconWorkEvent { + drop_during_sync: false, + work: Work::ExecutionProofsByRootsRequest(Box::new(process_fn)), + }) + } + /// Create a new work event to process `DataColumnsByRootRequest`s from the RPC network. pub fn send_data_columns_by_roots_request( self: &Arc, diff --git a/beacon_node/network/src/network_beacon_processor/rpc_methods.rs b/beacon_node/network/src/network_beacon_processor/rpc_methods.rs index ac24b648e05..f063d7e8380 100644 --- a/beacon_node/network/src/network_beacon_processor/rpc_methods.rs +++ b/beacon_node/network/src/network_beacon_processor/rpc_methods.rs @@ -7,6 +7,7 @@ use beacon_chain::{BeaconChainError, BeaconChainTypes, BlockProcessStatus, WhenS use itertools::{Itertools, process_results}; use lighthouse_network::rpc::methods::{ BlobsByRangeRequest, BlobsByRootRequest, DataColumnsByRangeRequest, DataColumnsByRootRequest, + ExecutionProofsByRootRequest, }; use lighthouse_network::rpc::*; use lighthouse_network::{PeerId, ReportSource, Response, SyncInfo}; @@ -14,8 +15,9 @@ use lighthouse_tracing::{ SPAN_HANDLE_BLOBS_BY_RANGE_REQUEST, SPAN_HANDLE_BLOBS_BY_ROOT_REQUEST, SPAN_HANDLE_BLOCKS_BY_RANGE_REQUEST, SPAN_HANDLE_BLOCKS_BY_ROOT_REQUEST, SPAN_HANDLE_DATA_COLUMNS_BY_RANGE_REQUEST, SPAN_HANDLE_DATA_COLUMNS_BY_ROOT_REQUEST, - SPAN_HANDLE_LIGHT_CLIENT_BOOTSTRAP, SPAN_HANDLE_LIGHT_CLIENT_FINALITY_UPDATE, - SPAN_HANDLE_LIGHT_CLIENT_OPTIMISTIC_UPDATE, SPAN_HANDLE_LIGHT_CLIENT_UPDATES_BY_RANGE, + SPAN_HANDLE_EXECUTION_PROOFS_BY_ROOT_REQUEST, SPAN_HANDLE_LIGHT_CLIENT_BOOTSTRAP, + SPAN_HANDLE_LIGHT_CLIENT_FINALITY_UPDATE, SPAN_HANDLE_LIGHT_CLIENT_OPTIMISTIC_UPDATE, + SPAN_HANDLE_LIGHT_CLIENT_UPDATES_BY_RANGE, }; use methods::LightClientUpdatesByRangeRequest; use slot_clock::SlotClock; @@ -390,6 +392,100 @@ impl NetworkBeaconProcessor { Ok(()) } + /// Handle an `ExecutionProofsByRoot` request from the peer. + #[instrument( + name = SPAN_HANDLE_EXECUTION_PROOFS_BY_ROOT_REQUEST, + parent = None, + level = "debug", + skip_all, + fields( + peer_id = %peer_id, + client = tracing::field::Empty, + ) + )] + pub fn handle_execution_proofs_by_root_request( + self: Arc, + peer_id: PeerId, + inbound_request_id: InboundRequestId, + request: ExecutionProofsByRootRequest, + ) { + let client = self.network_globals.client(&peer_id); + Span::current().record("client", field::display(client.kind)); + + self.terminate_response_stream( + peer_id, + inbound_request_id, + self.handle_execution_proofs_by_root_request_inner( + peer_id, + inbound_request_id, + request, + ), + Response::ExecutionProofsByRoot, + ); + } + + /// Handle an `ExecutionProofsByRoot` request from the peer. + fn handle_execution_proofs_by_root_request_inner( + &self, + peer_id: PeerId, + inbound_request_id: InboundRequestId, + request: ExecutionProofsByRootRequest, + ) -> Result<(), (RpcErrorResponse, &'static str)> { + let block_root = request.block_root; + let already_have_set: std::collections::HashSet<_> = + request.already_have.iter().copied().collect(); + let count_needed = request.count_needed as usize; + + // Get all execution proofs we have for this block from the DA checker + let Some(available_proofs) = self + .chain + .data_availability_checker + .get_execution_proofs(&block_root) + else { + // No proofs available for this block + debug!( + %peer_id, + %block_root, + "No execution proofs available for peer" + ); + return Ok(()); + }; + + // Filter out proofs the peer already has and send up to count_needed + let mut sent_count = 0; + for proof in available_proofs { + // Skip proofs the peer already has + if already_have_set.contains(&proof.proof_id) { + continue; + } + + // Send the proof + self.send_response( + peer_id, + inbound_request_id, + Response::ExecutionProofsByRoot(Some(proof)), + ); + + sent_count += 1; + + // Stop when we've sent the requested count + if sent_count >= count_needed { + break; + } + } + + debug!( + %peer_id, + %block_root, + requested = count_needed, + already_have = already_have_set.len(), + sent = sent_count, + "ExecutionProofsByRoot outgoing response processed" + ); + + Ok(()) + } + /// Handle a `DataColumnsByRoot` request from the peer. #[instrument( name = SPAN_HANDLE_DATA_COLUMNS_BY_ROOT_REQUEST, diff --git a/beacon_node/network/src/network_beacon_processor/sync_methods.rs b/beacon_node/network/src/network_beacon_processor/sync_methods.rs index 41160fcfe45..97977ae4f04 100644 --- a/beacon_node/network/src/network_beacon_processor/sync_methods.rs +++ b/beacon_node/network/src/network_beacon_processor/sync_methods.rs @@ -259,6 +259,21 @@ impl NetworkBeaconProcessor { Box::pin(process_fn) } + pub fn generate_rpc_execution_proofs_process_fn( + self: Arc, + block_root: Hash256, + proofs: Vec>, + seen_timestamp: Duration, + process_type: BlockProcessType, + ) -> AsyncFn { + let process_fn = async move { + self.clone() + .process_rpc_execution_proofs(block_root, proofs, seen_timestamp, process_type) + .await; + }; + Box::pin(process_fn) + } + /// Attempt to process a list of blobs received from a direct RPC request. #[instrument( name = SPAN_PROCESS_RPC_BLOBS, @@ -987,4 +1002,79 @@ impl NetworkBeaconProcessor { } } } + + /// Process execution proofs received via RPC. + pub async fn process_rpc_execution_proofs( + self: Arc>, + block_root: Hash256, + proofs: Vec>, + _seen_timestamp: Duration, + process_type: BlockProcessType, + ) { + // Get slot directly from the first proof. All proofs should be for the same block. + let slot = match proofs.first() { + Some(proof) => proof.slot, + None => { + debug!(?block_root, "No execution proofs to process"); + return; + } + }; + + let proof_ids: Vec<_> = proofs.iter().map(|p| p.proof_id).collect(); + + debug!( + ?proof_ids, + %block_root, + %slot, + proof_count = proofs.len(), + "RPC execution proofs received" + ); + + if let Ok(current_slot) = self.chain.slot() + && current_slot == slot + { + // let delay = get_slot_delay_ms(seen_timestamp, slot, &self.chain.slot_clock); + // TODO(zkproofs): Add dedicated metrics for execution proofs + } + + let result = self + .chain + .process_rpc_execution_proofs(slot, block_root, proofs) + .await; + + // TODO(zkproofs): Add dedicated metrics for execution proof processing + // register_process_result_metrics(&result, metrics::BlockSource::Rpc, "execution_proofs"); + + match &result { + Ok(AvailabilityProcessingStatus::Imported(hash)) => { + debug!( + result = "imported block with execution proofs", + %slot, + block_hash = %hash, + "Block components retrieved" + ); + self.chain.recompute_head_at_current_slot().await; + } + Ok(AvailabilityProcessingStatus::MissingComponents(_, _)) => { + debug!( + block_hash = %block_root, + %slot, + "Missing components over rpc (still need more proofs or other components)" + ); + } + Err(BlockError::DuplicateFullyImported(_)) => { + debug!( + block_hash = %block_root, + %slot, + "Execution proofs have already been imported" + ); + } + Err(_) => {} + } + + self.send_sync_message(SyncMessage::BlockComponentProcessed { + process_type, + result: result.into(), + }); + } } diff --git a/beacon_node/network/src/router.rs b/beacon_node/network/src/router.rs index 60fe094bb7c..eb02ddad921 100644 --- a/beacon_node/network/src/router.rs +++ b/beacon_node/network/src/router.rs @@ -24,7 +24,9 @@ use std::time::{Duration, SystemTime, UNIX_EPOCH}; use tokio::sync::mpsc; use tokio_stream::wrappers::UnboundedReceiverStream; use tracing::{debug, error, trace, warn}; -use types::{BlobSidecar, DataColumnSidecar, EthSpec, ForkContext, SignedBeaconBlock}; +use types::{ + BlobSidecar, DataColumnSidecar, EthSpec, ExecutionProof, ForkContext, SignedBeaconBlock, +}; /// Handles messages from the network and routes them to the appropriate service to be handled. pub struct Router { @@ -272,6 +274,15 @@ impl Router { request, ), ), + RequestType::ExecutionProofsByRoot(request) => self + .handle_beacon_processor_send_result( + self.network_beacon_processor + .send_execution_proofs_by_roots_request( + peer_id, + inbound_request_id, + request, + ), + ), _ => {} } } @@ -309,6 +320,9 @@ impl Router { Response::DataColumnsByRange(data_column) => { self.on_data_columns_by_range_response(peer_id, app_request_id, data_column); } + Response::ExecutionProofsByRoot(execution_proof) => { + self.on_execution_proofs_by_root_response(peer_id, app_request_id, execution_proof); + } // Light client responses should not be received Response::LightClientBootstrap(_) | Response::LightClientOptimisticUpdate(_) @@ -384,6 +398,15 @@ impl Router { ), ) } + PubsubMessage::ExecutionProof(execution_proof) => self + .handle_beacon_processor_send_result( + self.network_beacon_processor.send_gossip_execution_proof( + message_id, + peer_id, + execution_proof, + timestamp_now(), + ), + ), PubsubMessage::VoluntaryExit(exit) => { debug!(%peer_id, "Received a voluntary exit"); self.handle_beacon_processor_send_result( @@ -670,6 +693,40 @@ impl Router { }); } + /// Handle an `ExecutionProofsByRoot` response from the peer. + pub fn on_execution_proofs_by_root_response( + &mut self, + peer_id: PeerId, + app_request_id: AppRequestId, + execution_proof: Option>, + ) { + let sync_request_id = match app_request_id { + AppRequestId::Sync(sync_id) => match sync_id { + id @ SyncRequestId::SingleExecutionProof { .. } => id, + other => { + crit!(request = ?other, "ExecutionProofsByRoot response on incorrect request"); + return; + } + }, + AppRequestId::Router => { + crit!(%peer_id, "All ExecutionProofsByRoot requests belong to sync"); + return; + } + AppRequestId::Internal => unreachable!("Handled internally"), + }; + + trace!( + %peer_id, + "Received ExecutionProofsByRoot Response" + ); + self.send_to_sync(SyncMessage::RpcExecutionProof { + sync_request_id, + peer_id, + execution_proof, + seen_timestamp: timestamp_now(), + }); + } + /// Handle a `DataColumnsByRoot` response from the peer. pub fn on_data_columns_by_root_response( &mut self, diff --git a/beacon_node/network/src/sync/block_lookups/common.rs b/beacon_node/network/src/sync/block_lookups/common.rs index c6b05190871..64da1ae61fc 100644 --- a/beacon_node/network/src/sync/block_lookups/common.rs +++ b/beacon_node/network/src/sync/block_lookups/common.rs @@ -2,7 +2,7 @@ use crate::sync::block_lookups::single_block_lookup::{ LookupRequestError, SingleBlockLookup, SingleLookupRequestState, }; use crate::sync::block_lookups::{ - BlobRequestState, BlockRequestState, CustodyRequestState, PeerId, + BlobRequestState, BlockRequestState, CustodyRequestState, PeerId, ProofRequestState, }; use crate::sync::manager::BlockProcessType; use crate::sync::network_context::{LookupRequestResult, SyncNetworkContext}; @@ -12,7 +12,7 @@ use parking_lot::RwLock; use std::collections::HashSet; use std::sync::Arc; use types::blob_sidecar::FixedBlobSidecarList; -use types::{DataColumnSidecarList, SignedBeaconBlock}; +use types::{DataColumnSidecarList, ExecutionProof, SignedBeaconBlock}; use super::SingleLookupId; use super::single_block_lookup::{ComponentRequests, DownloadResult}; @@ -22,6 +22,7 @@ pub enum ResponseType { Block, Blob, CustodyColumn, + ExecutionProof, } /// This trait unifies common single block lookup functionality across blocks and blobs. This @@ -215,3 +216,57 @@ impl RequestState for CustodyRequestState { &mut self.state } } + +impl RequestState for ProofRequestState { + type VerifiedResponseType = Vec>; + + fn make_request( + &self, + id: Id, + lookup_peers: Arc>>, + _min_proofs: usize, + cx: &mut SyncNetworkContext, + ) -> Result { + cx.execution_proof_lookup_request( + id, + lookup_peers, + self.block_root, + self.min_proofs_required, + ) + .map_err(LookupRequestError::SendFailedNetwork) + } + + fn send_for_processing( + id: Id, + download_result: DownloadResult, + cx: &SyncNetworkContext, + ) -> Result<(), LookupRequestError> { + let DownloadResult { + value, + block_root, + seen_timestamp, + .. + } = download_result; + cx.send_execution_proofs_for_processing(id, block_root, value, seen_timestamp) + .map_err(LookupRequestError::SendFailedProcessor) + } + + fn response_type() -> ResponseType { + ResponseType::ExecutionProof + } + + fn request_state_mut(request: &mut SingleBlockLookup) -> Result<&mut Self, &'static str> { + request + .proof_request + .as_mut() + .ok_or("no active proof request") + } + + fn get_state(&self) -> &SingleLookupRequestState { + &self.state + } + + fn get_state_mut(&mut self) -> &mut SingleLookupRequestState { + &mut self.state + } +} diff --git a/beacon_node/network/src/sync/block_lookups/mod.rs b/beacon_node/network/src/sync/block_lookups/mod.rs index f8ffd298caf..6212c63a119 100644 --- a/beacon_node/network/src/sync/block_lookups/mod.rs +++ b/beacon_node/network/src/sync/block_lookups/mod.rs @@ -39,7 +39,9 @@ use fnv::FnvHashMap; use lighthouse_network::service::api_types::SingleLookupReqId; use lighthouse_network::{PeerAction, PeerId}; use lru_cache::LRUTimeCache; -pub use single_block_lookup::{BlobRequestState, BlockRequestState, CustodyRequestState}; +pub use single_block_lookup::{ + BlobRequestState, BlockRequestState, CustodyRequestState, ProofRequestState, +}; use std::collections::hash_map::Entry; use std::sync::Arc; use std::time::Duration; @@ -532,6 +534,9 @@ impl BlockLookups { BlockProcessType::SingleCustodyColumn(id) => { self.on_processing_result_inner::>(id, result, cx) } + BlockProcessType::SingleExecutionProof { id } => { + self.on_processing_result_inner::(id, result, cx) + } }; self.on_lookup_result(process_type.id(), lookup_result, "processing_result", cx); } @@ -673,6 +678,9 @@ impl BlockLookups { ResponseType::CustodyColumn => { "lookup_custody_column_processing_failure" } + ResponseType::ExecutionProof => { + "lookup_execution_proof_processing_failure" + } }, ); } diff --git a/beacon_node/network/src/sync/block_lookups/single_block_lookup.rs b/beacon_node/network/src/sync/block_lookups/single_block_lookup.rs index 46897b2283b..46a8deb0ab2 100644 --- a/beacon_node/network/src/sync/block_lookups/single_block_lookup.rs +++ b/beacon_node/network/src/sync/block_lookups/single_block_lookup.rs @@ -17,7 +17,7 @@ use store::Hash256; use strum::IntoStaticStr; use tracing::{Span, debug_span}; use types::blob_sidecar::FixedBlobSidecarList; -use types::{DataColumnSidecarList, EthSpec, SignedBeaconBlock, Slot}; +use types::{DataColumnSidecarList, EthSpec, ExecutionProof, SignedBeaconBlock, Slot}; // Dedicated enum for LookupResult to force its usage #[must_use = "LookupResult must be handled with on_lookup_result"] @@ -63,6 +63,7 @@ pub struct SingleBlockLookup { pub id: Id, pub block_request_state: BlockRequestState, pub component_requests: ComponentRequests, + pub proof_request: Option, /// Peers that claim to have imported this set of block components. This state is shared with /// the custody request to have an updated view of the peers that claim to have imported the /// block associated with this lookup. The peer set of a lookup can change rapidly, and faster @@ -102,6 +103,7 @@ impl SingleBlockLookup { id, block_request_state: BlockRequestState::new(requested_block_root), component_requests: ComponentRequests::WaitingForBlock, + proof_request: None, peers: Arc::new(RwLock::new(HashSet::from_iter(peers.iter().copied()))), block_root: requested_block_root, awaiting_parent, @@ -168,32 +170,51 @@ impl SingleBlockLookup { /// Returns true if the block has already been downloaded. pub fn all_components_processed(&self) -> bool { - self.block_request_state.state.is_processed() - && match &self.component_requests { - ComponentRequests::WaitingForBlock => false, - ComponentRequests::ActiveBlobRequest(request, _) => request.state.is_processed(), - ComponentRequests::ActiveCustodyRequest(request) => request.state.is_processed(), - ComponentRequests::NotNeeded { .. } => true, - } + let block_processed = self.block_request_state.state.is_processed(); + + let da_component_processed = match &self.component_requests { + ComponentRequests::WaitingForBlock => false, + ComponentRequests::ActiveBlobRequest(request, _) => request.state.is_processed(), + ComponentRequests::ActiveCustodyRequest(request) => request.state.is_processed(), + ComponentRequests::NotNeeded { .. } => true, + }; + + let proof_processed = self + .proof_request + .as_ref() + .map(|request| request.state.is_processed()) + .unwrap_or(true); // If no proof request, consider it processed + + block_processed && da_component_processed && proof_processed } /// Returns true if this request is expecting some event to make progress pub fn is_awaiting_event(&self) -> bool { - self.awaiting_parent.is_some() - || self.block_request_state.state.is_awaiting_event() - || match &self.component_requests { - // If components are waiting for the block request to complete, here we should - // check if the`block_request_state.state.is_awaiting_event(). However we already - // checked that above, so `WaitingForBlock => false` is equivalent. - ComponentRequests::WaitingForBlock => false, - ComponentRequests::ActiveBlobRequest(request, _) => { - request.state.is_awaiting_event() - } - ComponentRequests::ActiveCustodyRequest(request) => { - request.state.is_awaiting_event() - } - ComponentRequests::NotNeeded { .. } => false, - } + if self.awaiting_parent.is_some() { + return true; + } + + if self.block_request_state.state.is_awaiting_event() { + return true; + } + + let da_awaiting = match &self.component_requests { + // If components are waiting for the block request to complete, here we should + // check if the`block_request_state.state.is_awaiting_event(). However we already + // checked that above, so `WaitingForBlock => false` is equivalent. + ComponentRequests::WaitingForBlock => false, + ComponentRequests::ActiveBlobRequest(request, _) => request.state.is_awaiting_event(), + ComponentRequests::ActiveCustodyRequest(request) => request.state.is_awaiting_event(), + ComponentRequests::NotNeeded { .. } => false, + }; + + let proof_awaiting = self + .proof_request + .as_ref() + .map(|request| request.state.is_awaiting_event()) + .unwrap_or(false); + + da_awaiting || proof_awaiting } /// Makes progress on all requests of this lookup. Any error is not recoverable and must result @@ -239,6 +260,11 @@ impl SingleBlockLookup { } else { self.component_requests = ComponentRequests::NotNeeded("outside da window"); } + + if cx.chain.should_fetch_execution_proofs(block_epoch) { + self.proof_request = cx.chain.min_execution_proofs_required() + .map(|min_proofs| ProofRequestState::new(self.block_root, min_proofs)); + } } else { // Wait to download the block before downloading blobs. Then we can be sure that the // block has data, so there's no need to do "blind" requests for all possible blobs and @@ -253,6 +279,7 @@ impl SingleBlockLookup { } } + // Progress DA component requests match &self.component_requests { ComponentRequests::WaitingForBlock => {} // do nothing ComponentRequests::ActiveBlobRequest(_, expected_blobs) => { @@ -264,6 +291,11 @@ impl SingleBlockLookup { ComponentRequests::NotNeeded { .. } => {} // do nothing } + // Progress proof request (separate from DA components) + if let Some(request) = &self.proof_request { + self.continue_request::(cx, request.min_proofs_required)?; + } + // If all components of this lookup are already processed, there will be no future events // that can make progress so it must be dropped. Consider the lookup completed. // This case can happen if we receive the components from gossip during a retry. @@ -404,6 +436,26 @@ impl CustodyRequestState { } } +/// The state of the execution proof request component of a `SingleBlockLookup`. +#[derive(Educe)] +#[educe(Debug)] +pub struct ProofRequestState { + #[educe(Debug(ignore))] + pub block_root: Hash256, + pub state: SingleLookupRequestState>>, + pub min_proofs_required: usize, +} + +impl ProofRequestState { + pub fn new(block_root: Hash256, min_proofs_required: usize) -> Self { + Self { + block_root, + state: SingleLookupRequestState::new(), + min_proofs_required, + } + } +} + /// The state of the block request component of a `SingleBlockLookup`. #[derive(Educe)] #[educe(Debug)] diff --git a/beacon_node/network/src/sync/manager.rs b/beacon_node/network/src/sync/manager.rs index 338f21ce987..c0af69d7a40 100644 --- a/beacon_node/network/src/sync/manager.rs +++ b/beacon_node/network/src/sync/manager.rs @@ -45,6 +45,7 @@ use crate::service::NetworkMessage; use crate::status::ToStatusMessage; use crate::sync::block_lookups::{ BlobRequestState, BlockComponent, BlockRequestState, CustodyRequestState, DownloadResult, + ProofRequestState, }; use crate::sync::custody_backfill_sync::CustodyBackFillSync; use crate::sync::network_context::{PeerGroup, RpcResponseResult}; @@ -73,7 +74,8 @@ use std::time::Duration; use tokio::sync::mpsc; use tracing::{debug, error, info, trace}; use types::{ - BlobSidecar, DataColumnSidecar, EthSpec, ForkContext, Hash256, SignedBeaconBlock, Slot, + BlobSidecar, DataColumnSidecar, EthSpec, ExecutionProof, ForkContext, Hash256, + SignedBeaconBlock, Slot, }; /// The number of slots ahead of us that is allowed before requesting a long-range (batch) Sync @@ -132,6 +134,14 @@ pub enum SyncMessage { seen_timestamp: Duration, }, + /// An execution proof has been received from the RPC + RpcExecutionProof { + sync_request_id: SyncRequestId, + peer_id: PeerId, + execution_proof: Option>, + seen_timestamp: Duration, + }, + /// A block with an unknown parent has been received. UnknownParentBlock(PeerId, Arc>, Hash256), @@ -183,6 +193,7 @@ pub enum BlockProcessType { SingleBlock { id: Id }, SingleBlob { id: Id }, SingleCustodyColumn(Id), + SingleExecutionProof { id: Id }, } impl BlockProcessType { @@ -190,7 +201,8 @@ impl BlockProcessType { match self { BlockProcessType::SingleBlock { id } | BlockProcessType::SingleBlob { id } - | BlockProcessType::SingleCustodyColumn(id) => *id, + | BlockProcessType::SingleCustodyColumn(id) + | BlockProcessType::SingleExecutionProof { id } => *id, } } } @@ -491,6 +503,9 @@ impl SyncManager { SyncRequestId::SingleBlob { id } => { self.on_single_blob_response(id, peer_id, RpcEvent::RPCError(error)) } + SyncRequestId::SingleExecutionProof { id } => { + self.on_single_execution_proof_response(id, peer_id, RpcEvent::RPCError(error)) + } SyncRequestId::DataColumnsByRoot(req_id) => { self.on_data_columns_by_root_response(req_id, peer_id, RpcEvent::RPCError(error)) } @@ -833,6 +848,17 @@ impl SyncManager { } => { self.rpc_data_column_received(sync_request_id, peer_id, data_column, seen_timestamp) } + SyncMessage::RpcExecutionProof { + sync_request_id, + peer_id, + execution_proof, + seen_timestamp, + } => self.rpc_execution_proof_received( + sync_request_id, + peer_id, + execution_proof, + seen_timestamp, + ), SyncMessage::UnknownParentBlock(peer_id, block, block_root) => { let block_slot = block.slot(); let parent_root = block.parent_root(); @@ -1186,6 +1212,25 @@ impl SyncManager { } } + fn rpc_execution_proof_received( + &mut self, + sync_request_id: SyncRequestId, + peer_id: PeerId, + execution_proof: Option>, + seen_timestamp: Duration, + ) { + match sync_request_id { + SyncRequestId::SingleExecutionProof { id } => self.on_single_execution_proof_response( + id, + peer_id, + RpcEvent::from_chunk(execution_proof, seen_timestamp), + ), + _ => { + crit!(%peer_id, "bad request id for execution_proof"); + } + } + } + fn on_single_blob_response( &mut self, id: SingleLookupReqId, @@ -1204,6 +1249,27 @@ impl SyncManager { } } + fn on_single_execution_proof_response( + &mut self, + id: SingleLookupReqId, + peer_id: PeerId, + execution_proof: RpcEvent>, + ) { + if let Some(resp) = + self.network + .on_single_execution_proof_response(id, peer_id, execution_proof) + { + self.block_lookups + .on_download_response::( + id, + resp.map(|(value, seen_timestamp)| { + (value, PeerGroup::from_single(peer_id), seen_timestamp) + }), + &mut self.network, + ) + } + } + fn on_data_columns_by_root_response( &mut self, req_id: DataColumnsByRootRequestId, diff --git a/beacon_node/network/src/sync/network_context.rs b/beacon_node/network/src/sync/network_context.rs index 2e0c56db23f..73afabe60d2 100644 --- a/beacon_node/network/src/sync/network_context.rs +++ b/beacon_node/network/src/sync/network_context.rs @@ -37,6 +37,7 @@ pub use requests::LookupVerifyError; use requests::{ ActiveRequests, BlobsByRangeRequestItems, BlobsByRootRequestItems, BlocksByRangeRequestItems, BlocksByRootRequestItems, DataColumnsByRangeRequestItems, DataColumnsByRootRequestItems, + ExecutionProofsByRootRequestItems, ExecutionProofsByRootSingleBlockRequest, }; #[cfg(test)] use slot_clock::SlotClock; @@ -52,7 +53,7 @@ use tracing::{Span, debug, debug_span, error, warn}; use types::blob_sidecar::FixedBlobSidecarList; use types::{ BlobSidecar, BlockImportSource, ColumnIndex, DataColumnSidecar, DataColumnSidecarList, EthSpec, - ForkContext, Hash256, SignedBeaconBlock, Slot, + ExecutionProof, ForkContext, Hash256, SignedBeaconBlock, Slot, }; pub mod custody; @@ -204,6 +205,9 @@ pub struct SyncNetworkContext { /// A mapping of active DataColumnsByRoot requests data_columns_by_root_requests: ActiveRequests>, + /// A mapping of active ExecutionProofsByRoot requests + execution_proofs_by_root_requests: + ActiveRequests>, /// A mapping of active BlocksByRange requests blocks_by_range_requests: ActiveRequests>, @@ -295,6 +299,7 @@ impl SyncNetworkContext { blocks_by_root_requests: ActiveRequests::new("blocks_by_root"), blobs_by_root_requests: ActiveRequests::new("blobs_by_root"), data_columns_by_root_requests: ActiveRequests::new("data_columns_by_root"), + execution_proofs_by_root_requests: ActiveRequests::new("execution_proofs_by_root"), blocks_by_range_requests: ActiveRequests::new("blocks_by_range"), blobs_by_range_requests: ActiveRequests::new("blobs_by_range"), data_columns_by_range_requests: ActiveRequests::new("data_columns_by_range"), @@ -323,6 +328,7 @@ impl SyncNetworkContext { blocks_by_root_requests, blobs_by_root_requests, data_columns_by_root_requests, + execution_proofs_by_root_requests, blocks_by_range_requests, blobs_by_range_requests, data_columns_by_range_requests, @@ -349,6 +355,10 @@ impl SyncNetworkContext { .active_requests_of_peer(peer_id) .into_iter() .map(|req_id| SyncRequestId::DataColumnsByRoot(*req_id)); + let execution_proofs_by_root_ids = execution_proofs_by_root_requests + .active_requests_of_peer(peer_id) + .into_iter() + .map(|id| SyncRequestId::SingleExecutionProof { id: *id }); let blocks_by_range_ids = blocks_by_range_requests .active_requests_of_peer(peer_id) .into_iter() @@ -364,6 +374,7 @@ impl SyncNetworkContext { blocks_by_root_ids .chain(blobs_by_root_ids) .chain(data_column_by_root_ids) + .chain(execution_proofs_by_root_ids) .chain(blocks_by_range_ids) .chain(blobs_by_range_ids) .chain(data_column_by_range_ids) @@ -420,6 +431,7 @@ impl SyncNetworkContext { blocks_by_root_requests, blobs_by_root_requests, data_columns_by_root_requests, + execution_proofs_by_root_requests, blocks_by_range_requests, blobs_by_range_requests, data_columns_by_range_requests, @@ -442,6 +454,7 @@ impl SyncNetworkContext { .iter_request_peers() .chain(blobs_by_root_requests.iter_request_peers()) .chain(data_columns_by_root_requests.iter_request_peers()) + .chain(execution_proofs_by_root_requests.iter_request_peers()) .chain(blocks_by_range_requests.iter_request_peers()) .chain(blobs_by_range_requests.iter_request_peers()) .chain(data_columns_by_range_requests.iter_request_peers()) @@ -1026,6 +1039,100 @@ impl SyncNetworkContext { Ok(LookupRequestResult::RequestSent(id.req_id)) } + /// Request execution proofs for `block_root` + pub fn execution_proof_lookup_request( + &mut self, + lookup_id: SingleLookupId, + lookup_peers: Arc>>, + block_root: Hash256, + min_proofs_required: usize, + ) -> Result { + let active_request_count_by_peer = self.active_request_count_by_peer(); + let Some(peer_id) = lookup_peers + .read() + .iter() + .map(|peer| { + ( + // Prefer peers with less overall requests + active_request_count_by_peer.get(peer).copied().unwrap_or(0), + // Random factor to break ties, otherwise the PeerID breaks ties + rand::random::(), + peer, + ) + }) + .min() + .map(|(_, _, peer)| *peer) + else { + return Ok(LookupRequestResult::Pending("no peers")); + }; + + // Query DA checker for proofs we already have + let already_have = self + .chain + .data_availability_checker + .get_existing_proof_ids(&block_root) + .unwrap_or_default(); + + let current_count = already_have.len(); + + // Calculate how many more proofs we need + if current_count >= min_proofs_required { + // Already have enough proofs, no request needed + return Ok(LookupRequestResult::NoRequestNeeded( + "already have minimum proofs", + )); + } + + let count_needed = min_proofs_required - current_count; + + let id = SingleLookupReqId { + lookup_id, + req_id: self.next_id(), + }; + + let request = ExecutionProofsByRootSingleBlockRequest { + block_root, + already_have: already_have.clone(), + count_needed, + }; + + let network_request = RequestType::ExecutionProofsByRoot( + request + .clone() + .into_request() + .map_err(RpcRequestSendError::InternalError)?, + ); + + self.network_send + .send(NetworkMessage::SendRequest { + peer_id, + request: network_request, + app_request_id: AppRequestId::Sync(SyncRequestId::SingleExecutionProof { id }), + }) + .map_err(|_| RpcRequestSendError::InternalError("network send error".to_owned()))?; + + debug!( + method = "ExecutionProofsByRoot", + ?block_root, + already_have_count = already_have.len(), + count_needed, + peer = %peer_id, + %id, + "Sync RPC request sent" + ); + + self.execution_proofs_by_root_requests.insert( + id, + peer_id, + // Don't expect max responses since peer might not have all the proofs we need + false, + ExecutionProofsByRootRequestItems::new(request), + Span::none(), + ); + + Ok(LookupRequestResult::RequestSent(id.req_id)) + } + /// Request to send a single `data_columns_by_root` request to the network. pub fn data_column_lookup_request( &mut self, @@ -1460,6 +1567,20 @@ impl SyncNetworkContext { self.on_rpc_response_result(id, "BlobsByRoot", resp, peer_id, |_| 1) } + pub(crate) fn on_single_execution_proof_response( + &mut self, + id: SingleLookupReqId, + peer_id: PeerId, + rpc_event: RpcEvent>, + ) -> Option>>> { + let resp = self + .execution_proofs_by_root_requests + .on_response(id, rpc_event); + self.on_rpc_response_result(id, "ExecutionProofsByRoot", resp, peer_id, |proofs| { + proofs.len() + }) + } + #[allow(clippy::type_complexity)] pub(crate) fn on_data_columns_by_root_response( &mut self, @@ -1657,6 +1778,36 @@ impl SyncNetworkContext { }) } + pub fn send_execution_proofs_for_processing( + &self, + id: Id, + block_root: Hash256, + proofs: Vec>, + seen_timestamp: Duration, + ) -> Result<(), SendErrorProcessor> { + let beacon_processor = self + .beacon_processor_if_enabled() + .ok_or(SendErrorProcessor::ProcessorNotAvailable)?; + + debug!(?block_root, ?id, "Sending execution proofs for processing"); + // Lookup sync event safety: If `beacon_processor.send_rpc_execution_proofs` returns Ok() sync + // must receive a single `SyncMessage::BlockComponentProcessed` event with this process type + beacon_processor + .send_rpc_execution_proofs( + block_root, + proofs, + seen_timestamp, + BlockProcessType::SingleExecutionProof { id }, + ) + .map_err(|e| { + error!( + error = ?e, + "Failed to send sync execution proofs to processor" + ); + SendErrorProcessor::SendError + }) + } + pub fn send_custody_columns_for_processing( &self, _id: Id, diff --git a/beacon_node/network/src/sync/network_context/requests.rs b/beacon_node/network/src/sync/network_context/requests.rs index 3183c06d762..63249ed2a4b 100644 --- a/beacon_node/network/src/sync/network_context/requests.rs +++ b/beacon_node/network/src/sync/network_context/requests.rs @@ -5,7 +5,7 @@ use fnv::FnvHashMap; use lighthouse_network::PeerId; use strum::IntoStaticStr; use tracing::Span; -use types::{Hash256, Slot}; +use types::{ExecutionProofId, Hash256, Slot}; pub use blobs_by_range::BlobsByRangeRequestItems; pub use blobs_by_root::{BlobsByRootRequestItems, BlobsByRootSingleBlockRequest}; @@ -15,6 +15,9 @@ pub use data_columns_by_range::DataColumnsByRangeRequestItems; pub use data_columns_by_root::{ DataColumnsByRootRequestItems, DataColumnsByRootSingleBlockRequest, }; +pub use execution_proofs_by_root::{ + ExecutionProofsByRootRequestItems, ExecutionProofsByRootSingleBlockRequest, +}; use crate::metrics; @@ -26,6 +29,7 @@ mod blocks_by_range; mod blocks_by_root; mod data_columns_by_range; mod data_columns_by_root; +mod execution_proofs_by_root; #[derive(Debug, PartialEq, Eq, IntoStaticStr)] pub enum LookupVerifyError { @@ -34,8 +38,10 @@ pub enum LookupVerifyError { UnrequestedBlockRoot(Hash256), UnrequestedIndex(u64), UnrequestedSlot(Slot), + UnrequestedProof(ExecutionProofId), InvalidInclusionProof, DuplicatedData(Slot, u64), + DuplicatedProofIDs(ExecutionProofId), InternalError(String), } diff --git a/beacon_node/network/src/sync/network_context/requests/execution_proofs_by_root.rs b/beacon_node/network/src/sync/network_context/requests/execution_proofs_by_root.rs new file mode 100644 index 00000000000..257d6e1a311 --- /dev/null +++ b/beacon_node/network/src/sync/network_context/requests/execution_proofs_by_root.rs @@ -0,0 +1,68 @@ +use lighthouse_network::rpc::methods::ExecutionProofsByRootRequest; +use std::sync::Arc; +use types::{EthSpec, ExecutionProof, ExecutionProofId, Hash256}; + +use super::{ActiveRequestItems, LookupVerifyError}; + +#[derive(Debug, Clone)] +pub struct ExecutionProofsByRootSingleBlockRequest { + pub block_root: Hash256, + pub already_have: Vec, + pub count_needed: usize, +} + +impl ExecutionProofsByRootSingleBlockRequest { + pub fn into_request(self) -> Result { + ExecutionProofsByRootRequest::new(self.block_root, self.already_have, self.count_needed) + .map_err(|e| e.to_string()) + } +} + +pub struct ExecutionProofsByRootRequestItems { + request: ExecutionProofsByRootSingleBlockRequest, + items: Vec>, + _phantom: std::marker::PhantomData, +} + +impl ExecutionProofsByRootRequestItems { + pub fn new(request: ExecutionProofsByRootSingleBlockRequest) -> Self { + Self { + request, + items: vec![], + _phantom: std::marker::PhantomData, + } + } +} + +impl ActiveRequestItems for ExecutionProofsByRootRequestItems { + type Item = Arc; + + /// Appends a proof to this multi-item request. + /// Note: This is very similar to `DataColumnsByRootSingleBlockRequest` + fn add(&mut self, proof: Self::Item) -> Result { + let block_root = proof.block_root; + if self.request.block_root != block_root { + return Err(LookupVerifyError::UnrequestedBlockRoot(block_root)); + } + + // Verify proof is not in the already_have list + // We should not receive proofs we already have + if self.request.already_have.contains(&proof.proof_id) { + return Err(LookupVerifyError::UnrequestedProof(proof.proof_id)); + } + + // Check for duplicate proof IDs + if self.items.iter().any(|p| p.proof_id == proof.proof_id) { + return Err(LookupVerifyError::DuplicatedProofIDs(proof.proof_id)); + } + + self.items.push(proof); + + // We've received all requested proofs when we have count_needed proofs + Ok(self.items.len() >= self.request.count_needed) + } + + fn consume(&mut self) -> Vec { + std::mem::take(&mut self.items) + } +} diff --git a/beacon_node/network/src/sync/tests/execution_proof_tests.rs b/beacon_node/network/src/sync/tests/execution_proof_tests.rs new file mode 100644 index 00000000000..ce006172187 --- /dev/null +++ b/beacon_node/network/src/sync/tests/execution_proof_tests.rs @@ -0,0 +1,509 @@ +use super::*; +use crate::sync::block_lookups::common::ResponseType; +use lighthouse_network::rpc::{RPCError, RpcErrorResponse}; +use lighthouse_network::service::api_types::SyncRequestId; +use types::{ExecutionBlockHash, ExecutionProof, ExecutionProofId, Hash256, Slot}; + +/// Test successful execution proof fetch and verification +#[test] +fn test_proof_lookup_happy_path() { + let Some(mut rig) = TestRig::test_setup_after_fulu() else { + return; + }; + + let block = rig.rand_block(); + let block_root = block.canonical_root(); + let peer_id = rig.new_connected_peer(); + + // Get execution payload hash from the block + let block_hash = block + .message() + .body() + .execution_payload() + .ok() + .map(|p| p.execution_payload_ref().block_hash()) + .unwrap_or_else(ExecutionBlockHash::zero); + + // Trigger the unknown block (which should trigger proof request) + rig.trigger_unknown_block_from_attestation(block_root, peer_id); + + // Expect block request + let block_id = rig.expect_block_lookup_request(block_root); + + // Send the block + rig.single_lookup_block_response(block_id, peer_id, Some(block.into())); + rig.expect_block_process(ResponseType::Block); + + // Now expect proof request + let proof_id = rig.expect_proof_lookup_request(block_root); + + // Send all requested proofs + // TODO(zkproofs): We should use min_required instead of hardcoding 2 proofs here + let proof_ids = vec![ + ExecutionProofId::new(0).unwrap(), + ExecutionProofId::new(1).unwrap(), + ]; + rig.complete_single_lookup_proof_download(proof_id, peer_id, block_root, block_hash, proof_ids); + + // Proofs should be processed + rig.expect_block_process(ResponseType::ExecutionProof); + + // Block should be imported + rig.proof_component_processed_imported(block_root); + rig.expect_empty_network(); + rig.expect_no_active_lookups(); +} + +/// Test that empty proof response results in peer penalization +#[test] +fn test_proof_lookup_empty_response() { + let Some(mut rig) = TestRig::test_setup_after_fulu() else { + return; + }; + + let block = rig.rand_block(); + let block_root = block.canonical_root(); + let peer_id = rig.new_connected_peer(); + + // Trigger lookup + rig.trigger_unknown_block_from_attestation(block_root, peer_id); + let block_id = rig.expect_block_lookup_request(block_root); + rig.single_lookup_block_response(block_id, peer_id, Some(block.into())); + rig.expect_block_process(ResponseType::Block); + + let proof_id = rig.expect_proof_lookup_request(block_root); + + // Peer sends stream terminator with no proofs + rig.single_lookup_proof_response(proof_id, peer_id, None); + + // Peer should be penalized for not providing proofs + rig.expect_penalty(peer_id, "NotEnoughResponsesReturned"); + + // Should retry with different peer + let _new_peer = rig.new_connected_peer(); + rig.expect_proof_lookup_request(block_root); +} + +/// Test partial proof response (peer doesn't have all requested proofs) +#[test] +fn test_proof_lookup_partial_response() { + let Some(mut rig) = TestRig::test_setup_after_fulu() else { + return; + }; + + let block = rig.rand_block(); + let block_root = block.canonical_root(); + let peer_id = rig.new_connected_peer(); + let block_hash = block + .message() + .body() + .execution_payload() + .ok() + .map(|p| p.execution_payload_ref().block_hash()) + .unwrap_or_else(ExecutionBlockHash::zero); + + // Trigger lookup + rig.trigger_unknown_block_from_attestation(block_root, peer_id); + let block_id = rig.expect_block_lookup_request(block_root); + rig.single_lookup_block_response(block_id, peer_id, Some(block.into())); + rig.expect_block_process(ResponseType::Block); + + let proof_id = rig.expect_proof_lookup_request(block_root); + + // Requested 2 proofs but peer only sends 1 + let proof_0 = Arc::new( + ExecutionProof::new( + ExecutionProofId::new(0).unwrap(), + Slot::new(0), + block_hash, + block_root, + vec![1, 2, 3], + ) + .unwrap(), + ); + + rig.single_lookup_proof_response(proof_id, peer_id, Some(proof_0)); + rig.single_lookup_proof_response(proof_id, peer_id, None); // End stream early + + // Should penalize peer for not providing all requested proofs + rig.expect_penalty(peer_id, "NotEnoughResponsesReturned"); + + // Should retry with another peer + let new_peer = rig.new_connected_peer(); + let retry_proof_id = rig.expect_proof_lookup_request(block_root); + + // Complete with all proofs + rig.complete_single_lookup_proof_download( + retry_proof_id, + new_peer, + block_root, + block_hash, + vec![ + ExecutionProofId::new(0).unwrap(), + ExecutionProofId::new(1).unwrap(), + ], + ); + + rig.expect_block_process(ResponseType::ExecutionProof); + rig.proof_component_processed_imported(block_root); + rig.expect_no_active_lookups(); +} + +/// Test unrequested proof triggers penalization +#[test] +fn test_proof_lookup_unrequested_proof() { + let Some(mut rig) = TestRig::test_setup_after_fulu() else { + return; + }; + + let block = rig.rand_block(); + let block_root = block.canonical_root(); + let peer_id = rig.new_connected_peer(); + let block_hash = block + .message() + .body() + .execution_payload() + .ok() + .map(|p| p.execution_payload_ref().block_hash()) + .unwrap_or_else(ExecutionBlockHash::zero); + + // Trigger lookup + rig.trigger_unknown_block_from_attestation(block_root, peer_id); + let block_id = rig.expect_block_lookup_request(block_root); + rig.single_lookup_block_response(block_id, peer_id, Some(block.into())); + rig.expect_block_process(ResponseType::Block); + + let proof_id = rig.expect_proof_lookup_request(block_root); + + // Requested proofs 0, 1 but peer sends proofs 5 (unrequested) + let unrequested_proof = Arc::new( + ExecutionProof::new( + ExecutionProofId::new(5).unwrap(), + Slot::new(0), + block_hash, + block_root, + vec![1, 2, 3], + ) + .unwrap(), + ); + + rig.single_lookup_proof_response(proof_id, peer_id, Some(unrequested_proof)); + + // Should penalize peer for sending unrequested data + rig.expect_penalty(peer_id, "UnrequestedProof"); + + // Should retry + let _new_peer = rig.new_connected_peer(); + rig.expect_proof_lookup_request(block_root); +} + +/// Test duplicate proofs triggers penalization +#[test] +fn test_proof_lookup_duplicate_proof() { + let Some(mut rig) = TestRig::test_setup_after_fulu() else { + return; + }; + + let block = rig.rand_block(); + let block_root = block.canonical_root(); + let peer_id = rig.new_connected_peer(); + let block_hash = block + .message() + .body() + .execution_payload() + .ok() + .map(|p| p.execution_payload_ref().block_hash()) + .unwrap_or_else(ExecutionBlockHash::zero); + + // Trigger lookup + rig.trigger_unknown_block_from_attestation(block_root, peer_id); + let block_id = rig.expect_block_lookup_request(block_root); + rig.single_lookup_block_response(block_id, peer_id, Some(block.into())); + rig.expect_block_process(ResponseType::Block); + + let proof_id = rig.expect_proof_lookup_request(block_root); + + // Send proof 0 twice + let proof_0_a = Arc::new( + ExecutionProof::new( + ExecutionProofId::new(0).unwrap(), + Slot::new(0), + block_hash, + block_root, + vec![1, 2, 3], + ) + .unwrap(), + ); + // TODO(zkproofs): In this case we have the same proofID but different proof_data + // zkVMs should be deterministic, so if this happens there is likely an issue somewhere + let proof_0_b = Arc::new( + ExecutionProof::new( + ExecutionProofId::new(0).unwrap(), + Slot::new(0), + block_hash, + block_root, + vec![4, 5, 6], // Different data + ) + .unwrap(), + ); + + rig.single_lookup_proof_response(proof_id, peer_id, Some(proof_0_a)); + rig.single_lookup_proof_response(proof_id, peer_id, Some(proof_0_b)); + + // Should penalize peer for duplicate proof + rig.expect_penalty(peer_id, "DuplicatedProof"); + + // Should retry + let _new_peer = rig.new_connected_peer(); + rig.expect_proof_lookup_request(block_root); +} + +/// Test wrong block root in proof triggers penalization +#[test] +fn test_proof_lookup_wrong_block_root() { + let Some(mut rig) = TestRig::test_setup_after_fulu() else { + return; + }; + + let block = rig.rand_block(); + let block_root = block.canonical_root(); + let wrong_root = Hash256::random(); + let peer_id = rig.new_connected_peer(); + let block_hash = block + .message() + .body() + .execution_payload() + .ok() + .map(|p| p.execution_payload_ref().block_hash()) + .unwrap_or_else(ExecutionBlockHash::zero); + + // Trigger lookup + rig.trigger_unknown_block_from_attestation(block_root, peer_id); + let block_id = rig.expect_block_lookup_request(block_root); + rig.single_lookup_block_response(block_id, peer_id, Some(block.into())); + rig.expect_block_process(ResponseType::Block); + + let proof_id = rig.expect_proof_lookup_request(block_root); + + // Send proof with wrong block_root + let wrong_proof = Arc::new( + ExecutionProof::new( + ExecutionProofId::new(0).unwrap(), + Slot::new(0), + block_hash, + wrong_root, + vec![1, 2, 3], + ) + .unwrap(), + ); + + rig.single_lookup_proof_response(proof_id, peer_id, Some(wrong_proof)); + + // Should penalize peer + rig.expect_penalty(peer_id, "UnrequestedBlockRoot"); + + // Should retry + let _new_peer = rig.new_connected_peer(); + rig.expect_proof_lookup_request(block_root); +} + +/// Test proof request timeout +#[test] +fn test_proof_lookup_timeout() { + let Some(mut rig) = TestRig::test_setup_after_fulu() else { + return; + }; + + let block = rig.rand_block(); + let block_root = block.canonical_root(); + let peer_id = rig.new_connected_peer(); + + // Trigger lookup + rig.trigger_unknown_block_from_attestation(block_root, peer_id); + let block_id = rig.expect_block_lookup_request(block_root); + rig.single_lookup_block_response(block_id, peer_id, Some(block.into())); + rig.expect_block_process(ResponseType::Block); + + let proof_id = rig.expect_proof_lookup_request(block_root); + + // Simulate timeout by sending error + rig.send_sync_message(SyncMessage::RpcError { + sync_request_id: SyncRequestId::SingleExecutionProof { id: proof_id }, + peer_id, + error: RPCError::ErrorResponse(RpcErrorResponse::ServerError, "timeout".to_string()), + }); + + // Should penalize peer for timeout + rig.expect_penalty(peer_id, "rpc_error"); + + // Should retry with different peer + let _new_peer = rig.new_connected_peer(); + rig.expect_proof_lookup_request(block_root); +} + +/// Test peer disconnection during proof request +#[test] +fn test_proof_lookup_peer_disconnected() { + let Some(mut rig) = TestRig::test_setup_after_fulu() else { + return; + }; + + let block = rig.rand_block(); + let block_root = block.canonical_root(); + let peer_id = rig.new_connected_peer(); + + // Trigger lookup + rig.trigger_unknown_block_from_attestation(block_root, peer_id); + let block_id = rig.expect_block_lookup_request(block_root); + rig.single_lookup_block_response(block_id, peer_id, Some(block.into())); + rig.expect_block_process(ResponseType::Block); + + let proof_id = rig.expect_proof_lookup_request(block_root); + + // Peer disconnects + rig.send_sync_message(SyncMessage::RpcError { + sync_request_id: SyncRequestId::SingleExecutionProof { id: proof_id }, + peer_id, + error: RPCError::Disconnected, + }); + + // Should retry with different peer (no penalty for disconnect) + let _new_peer = rig.new_connected_peer(); + rig.expect_proof_lookup_request(block_root); +} + +/// Test multiple retries on failure +#[test] +fn test_proof_lookup_multiple_retries() { + let Some(mut rig) = TestRig::test_setup_after_fulu() else { + return; + }; + + let block = rig.rand_block(); + let block_root = block.canonical_root(); + let block_hash = block + .message() + .body() + .execution_payload() + .ok() + .map(|p| p.execution_payload_ref().block_hash()) + .unwrap_or_else(ExecutionBlockHash::zero); + + let peer_id = rig.new_connected_peer(); + + // Trigger lookup + rig.trigger_unknown_block_from_attestation(block_root, peer_id); + let block_id = rig.expect_block_lookup_request(block_root); + rig.single_lookup_block_response(block_id, peer_id, Some(block.into())); + rig.expect_block_process(ResponseType::Block); + + // First attempt - empty response + let proof_id_1 = rig.expect_proof_lookup_request(block_root); + rig.single_lookup_proof_response(proof_id_1, peer_id, None); + rig.expect_penalty(peer_id, "NotEnoughResponsesReturned"); + + // Second attempt - different peer, also fails + let peer_id_2 = rig.new_connected_peer(); + let proof_id_2 = rig.expect_proof_lookup_request(block_root); + rig.single_lookup_proof_response(proof_id_2, peer_id_2, None); + rig.expect_penalty(peer_id_2, "NotEnoughResponsesReturned"); + + // Third attempt - succeeds + let peer_id_3 = rig.new_connected_peer(); + let proof_id_3 = rig.expect_proof_lookup_request(block_root); + rig.complete_single_lookup_proof_download( + proof_id_3, + peer_id_3, + block_root, + block_hash, + vec![ + ExecutionProofId::new(0).unwrap(), + ExecutionProofId::new(1).unwrap(), + ], + ); + + rig.expect_block_process(ResponseType::ExecutionProof); + rig.proof_component_processed_imported(block_root); + rig.expect_no_active_lookups(); +} + +/// Test proof lookup with no peers available +#[test] +fn test_proof_lookup_no_peers() { + let Some(mut rig) = TestRig::test_setup_after_fulu() else { + return; + }; + + let block = rig.rand_block(); + let block_root = block.canonical_root(); + let peer_id = rig.new_connected_peer(); + + // Trigger lookup + rig.trigger_unknown_block_from_attestation(block_root, peer_id); + let block_id = rig.expect_block_lookup_request(block_root); + rig.single_lookup_block_response(block_id, peer_id, Some(block.into())); + rig.expect_block_process(ResponseType::Block); + + let proof_id = rig.expect_proof_lookup_request(block_root); + + // Peer fails and disconnects + rig.send_sync_message(SyncMessage::RpcError { + sync_request_id: SyncRequestId::SingleExecutionProof { id: proof_id }, + peer_id, + error: RPCError::Disconnected, + }); + + // Disconnect the peer + rig.peer_disconnected(peer_id); + + // Should not be able to find another peer immediately + // The lookup should remain active waiting for peers + assert_eq!(rig.active_single_lookups_count(), 1); +} + +/// Test successful proof verification after block already has blobs +#[test] +fn test_proof_lookup_with_existing_blobs() { + let Some(mut rig) = TestRig::test_setup_after_fulu() else { + return; + }; + + let block = rig.rand_block(); + let block_root = block.canonical_root(); + let block_hash = block + .message() + .body() + .execution_payload() + .ok() + .map(|p| p.execution_payload_ref().block_hash()) + .unwrap_or_else(ExecutionBlockHash::zero); + let peer_id = rig.new_connected_peer(); + + // Trigger lookup + rig.trigger_unknown_block_from_attestation(block_root, peer_id); + + // Get block + let block_id = rig.expect_block_lookup_request(block_root); + rig.single_lookup_block_response(block_id, peer_id, Some(block.clone().into())); + rig.expect_block_process(ResponseType::Block); + + // Block might still be missing proofs even if blobs present + // Proofs are an additional requirement + let proof_id = rig.expect_proof_lookup_request(block_root); + + // Send proofs + rig.complete_single_lookup_proof_download( + proof_id, + peer_id, + block_root, + block_hash, + vec![ + ExecutionProofId::new(0).unwrap(), + ExecutionProofId::new(1).unwrap(), + ], + ); + + rig.expect_block_process(ResponseType::ExecutionProof); + rig.proof_component_processed_imported(block_root); + rig.expect_no_active_lookups(); +} diff --git a/beacon_node/network/src/sync/tests/lookups.rs b/beacon_node/network/src/sync/tests/lookups.rs index 63bcd176f52..0fdc505ab98 100644 --- a/beacon_node/network/src/sync/tests/lookups.rs +++ b/beacon_node/network/src/sync/tests/lookups.rs @@ -42,7 +42,8 @@ use tokio::sync::mpsc; use tracing::info; use types::{ BeaconState, BeaconStateBase, BlobSidecar, BlockImportSource, DataColumnSidecar, EthSpec, - ForkContext, ForkName, Hash256, MinimalEthSpec as E, SignedBeaconBlock, Slot, + ExecutionBlockHash, ExecutionProof, ExecutionProofId, ForkContext, ForkName, Hash256, + MinimalEthSpec as E, SignedBeaconBlock, Slot, data_column_sidecar::ColumnIndex, test_utils::{SeedableRng, TestRandom, XorShiftRng}, }; @@ -171,7 +172,11 @@ impl TestRig { self.send_sync_message(SyncMessage::UnknownParentBlob(peer_id, blob.into())); } - fn trigger_unknown_block_from_attestation(&mut self, block_root: Hash256, peer_id: PeerId) { + pub(super) fn trigger_unknown_block_from_attestation( + &mut self, + block_root: Hash256, + peer_id: PeerId, + ) { self.send_sync_message(SyncMessage::UnknownBlockHashFromAttestation( peer_id, block_root, )); @@ -184,7 +189,7 @@ impl TestRig { } } - fn rand_block(&mut self) -> SignedBeaconBlock { + pub(super) fn rand_block(&mut self) -> SignedBeaconBlock { self.rand_block_and_blobs(NumBlobs::None).0 } @@ -228,7 +233,7 @@ impl TestRig { self.sync_manager.active_single_lookups() } - fn active_single_lookups_count(&self) -> usize { + pub(super) fn active_single_lookups_count(&self) -> usize { self.sync_manager.active_single_lookups().len() } @@ -321,7 +326,7 @@ impl TestRig { } #[track_caller] - fn expect_no_active_lookups(&self) { + pub(super) fn expect_no_active_lookups(&self) { self.expect_no_active_single_lookups(); } @@ -445,7 +450,7 @@ impl TestRig { }); } - fn single_lookup_block_response( + pub(super) fn single_lookup_block_response( &mut self, id: SingleLookupReqId, peer_id: PeerId, @@ -527,6 +532,69 @@ impl TestRig { ); } + /// Send a single execution proof response + pub(super) fn single_lookup_proof_response( + &mut self, + id: SingleLookupReqId, + peer_id: PeerId, + proof: Option>, + ) { + self.send_sync_message(SyncMessage::RpcExecutionProof { + sync_request_id: SyncRequestId::SingleExecutionProof { id }, + peer_id, + execution_proof: proof, + seen_timestamp: D, + }); + } + + /// Complete execution proof download by sending all requested proofs + pub(super) fn complete_single_lookup_proof_download( + &mut self, + id: SingleLookupReqId, + peer_id: PeerId, + block_root: Hash256, + block_hash: ExecutionBlockHash, + subnet_ids: Vec, + ) { + for subnet_id in subnet_ids { + let proof = Arc::new( + ExecutionProof::new( + subnet_id, + types::Slot::new(0), + block_hash, + block_root, + vec![1, 2, 3, 4], + ) + .unwrap(), + ); + self.single_lookup_proof_response(id, peer_id, Some(proof)); + } + // Send stream terminator + self.single_lookup_proof_response(id, peer_id, None); + } + + /// Expect an execution proof request for a specific block + pub(super) fn expect_proof_lookup_request(&mut self, block_root: Hash256) -> SingleLookupReqId { + self.pop_received_network_event(|ev| match ev { + NetworkMessage::SendRequest { + request: RequestType::ExecutionProofsByRoot(req), + app_request_id: AppRequestId::Sync(SyncRequestId::SingleExecutionProof { id }), + .. + } if req.block_root == block_root => Some(*id), + _ => None, + }) + .unwrap_or_else(|_| panic!("Expected proof request for {block_root}")) + } + + /// Send a processing result indicating proofs were processed and block imported + pub(super) fn proof_component_processed_imported(&mut self, block_root: Hash256) { + let id = self.find_single_lookup_for(block_root); + self.send_sync_message(SyncMessage::BlockComponentProcessed { + process_type: BlockProcessType::SingleBlock { id }, + result: BlockProcessingResult::Ok(AvailabilityProcessingStatus::Imported(block_root)), + }); + } + fn complete_lookup_block_download(&mut self, block: SignedBeaconBlock) { let block_root = block.canonical_root(); let id = self.expect_block_lookup_request(block_root); @@ -786,7 +854,7 @@ impl TestRig { } #[track_caller] - fn expect_block_lookup_request(&mut self, for_block: Hash256) -> SingleLookupReqId { + pub(super) fn expect_block_lookup_request(&mut self, for_block: Hash256) -> SingleLookupReqId { self.find_block_lookup_request(for_block) .unwrap_or_else(|e| panic!("Expected block request for {for_block:?}: {e}")) } @@ -910,7 +978,7 @@ impl TestRig { } #[track_caller] - fn expect_block_process(&mut self, response_type: ResponseType) { + pub(super) fn expect_block_process(&mut self, response_type: ResponseType) { match response_type { ResponseType::Block => self .pop_received_processor_event(|ev| { @@ -927,6 +995,11 @@ impl TestRig { (ev.work_type() == beacon_processor::WorkType::RpcCustodyColumn).then_some(()) }) .unwrap_or_else(|e| panic!("Expected column work event: {e}")), + ResponseType::ExecutionProof => self + .pop_received_processor_event(|ev| { + (ev.work_type() == beacon_processor::WorkType::RpcExecutionProofs).then_some(()) + }) + .unwrap_or_else(|e| panic!("Expected execution proofs work event: {e}")), } } diff --git a/beacon_node/network/src/sync/tests/mod.rs b/beacon_node/network/src/sync/tests/mod.rs index 23c14ff63ef..9b82f830bcb 100644 --- a/beacon_node/network/src/sync/tests/mod.rs +++ b/beacon_node/network/src/sync/tests/mod.rs @@ -18,6 +18,7 @@ use tracing_subscriber::layer::SubscriberExt; use tracing_subscriber::util::SubscriberInitExt; use types::{ChainSpec, ForkName, MinimalEthSpec as E}; +mod execution_proof_tests; mod lookups; mod range; diff --git a/beacon_node/proof_generation_service/Cargo.toml b/beacon_node/proof_generation_service/Cargo.toml new file mode 100644 index 00000000000..bbd043e0fdd --- /dev/null +++ b/beacon_node/proof_generation_service/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "proof_generation_service" +version = "0.1.0" +edition = "2021" + +[dependencies] +beacon_chain = { path = "../beacon_chain" } +lighthouse_network = { workspace = true } +network = { workspace = true } +types = { path = "../../consensus/types" } +zkvm_execution_layer = { path = "../../zkvm_execution_layer" } +tokio = { workspace = true } +logging = { workspace = true } +tracing = { workspace = true } + +[dev-dependencies] diff --git a/beacon_node/proof_generation_service/src/lib.rs b/beacon_node/proof_generation_service/src/lib.rs new file mode 100644 index 00000000000..80c2a83e98c --- /dev/null +++ b/beacon_node/proof_generation_service/src/lib.rs @@ -0,0 +1,385 @@ +use beacon_chain::{BeaconChain, BeaconChainTypes, ProofGenerationEvent}; +use lighthouse_network::PubsubMessage; +use network::NetworkMessage; +use std::sync::Arc; +use tokio::sync::mpsc::{UnboundedReceiver, UnboundedSender}; +use tracing::{debug, error, info}; +use types::{EthSpec, ExecPayload, ExecutionProofId, Hash256, SignedBeaconBlock, Slot}; + +/// Service responsible for "altruistic" proof generation +/// +/// This service receives notifications about newly imported blocks and generates +/// execution proofs for blocks that don't have proofs yet. This allows any node +/// (not just the block proposer) to generate and publish proofs. +/// +/// Note: While proofs are optional, we don't have the proposer making proofs +/// for their own block. The proposer should insert the block into their own +/// chain, so this should trigger. +pub struct ProofGenerationService { + /// Reference to the beacon chain + chain: Arc>, + /// Receiver for proof generation events + event_rx: UnboundedReceiver>, + /// Sender to publish proofs to the network + network_tx: UnboundedSender>, +} + +impl ProofGenerationService { + pub fn new( + chain: Arc>, + event_rx: UnboundedReceiver>, + network_tx: UnboundedSender>, + ) -> Self { + Self { + chain, + event_rx, + network_tx, + } + } + + /// Run the service event loop + pub async fn run(mut self) { + info!("Proof generation service started"); + + while let Some(event) = self.event_rx.recv().await { + let (block_root, slot, block) = event; + + debug!( + slot = ?slot, + block_root = ?block_root, + "Received block import notification" + ); + + // Handle the event + self.handle_block_import(block_root, slot, block).await; + } + + info!("Proof generation service stopped"); + } + + /// Handle a block import event + async fn handle_block_import( + &self, + block_root: Hash256, + slot: Slot, + block: Arc>, + ) { + // Check if proofs are required for this epoch + // TODO(zkproofs): alternative is to only enable this when + // the zkvm fork is enabled. Check if this is possible + let block_epoch = slot.epoch(T::EthSpec::slots_per_epoch()); + if !self + .chain + .data_availability_checker + .execution_proof_check_required_for_epoch(block_epoch) + { + debug!( + slot = ?slot, + epoch = ?block_epoch, + "Proofs not required for this epoch, skipping proof generation" + ); + return; + } + + // Check if we have a proof generator registry + let registry = match &self.chain.zkvm_generator_registry { + Some(registry) => registry.clone(), + None => { + debug!( + slot = ?slot, + "No generator registry configured, skipping proof generation" + ); + return; + } + }; + + // Get the list of proof types we should generate + let proof_types = registry.proof_ids(); + + if proof_types.is_empty() { + debug!( + slot = ?slot, + "No proof generators registered" + ); + return; + } + + debug!( + slot = ?slot, + block_root = ?block_root, + proof_types = proof_types.len(), + "Checking for locally missing proofs" + ); + + // Check which proofs are missing/we haven't received yet + for proof_id in proof_types { + // Check if we already have this proof + let has_proof = self.check_if_proof_exists(slot, block_root, proof_id); + + if has_proof { + debug!( + slot = ?slot, + proof_id = ?proof_id, + "Proof already exists, skipping" + ); + continue; + } + + self.spawn_proof_generation( + block_root, + slot, + block.clone(), + proof_id, + registry.clone(), + self.network_tx.clone(), + ); + } + } + + /// Check if a proof already exists for this block + fn check_if_proof_exists( + &self, + slot: Slot, + block_root: Hash256, + proof_id: ExecutionProofId, + ) -> bool { + let observed = self.chain.observed_execution_proofs.read(); + observed + .is_known(slot, block_root, proof_id) + .unwrap_or(false) + } + + /// Spawn a task to generate a proof + fn spawn_proof_generation( + &self, + block_root: Hash256, + slot: Slot, + block: Arc>, + proof_id: ExecutionProofId, + registry: Arc, + network_tx: UnboundedSender>, + ) { + let chain = self.chain.clone(); + + // Get the generator for this proof type + let Some(generator) = registry.get_generator(proof_id) else { + debug!( + slot = ?slot, + proof_id = ?proof_id, + "No generator found for proof type" + ); + return; + }; + + // Spawn the generation task (async because generator.generate() is async) + self.chain.task_executor.spawn( + async move { + info!( + slot = ?slot, + block_root = ?block_root, + proof_id = ?proof_id, + "Generating execution proof" + ); + + // Extract execution payload hash from the block + let block_hash = match block.message().execution_payload() { + Ok(payload) => payload.block_hash(), + Err(e) => { + debug!( + slot = ?slot, + block_root = ?block_root, + error = ?e, + "Block has no execution payload, skipping proof generation" + ); + return; + } + }; + + // Generate the proof using the generator + let proof_result = generator.generate(slot, &block_hash, &block_root).await; + + match proof_result { + Ok(proof) => { + info!( + slot = ?slot, + proof_id = ?proof_id, + "Successfully generated proof" + ); + + // Double-check that proof didn't arrive via gossip while we were generating + let observed = chain.observed_execution_proofs.read(); + if observed + .is_known(slot, block_root, proof_id) + .unwrap_or(false) + { + info!( + slot = ?slot, + proof_id = ?proof_id, + "Proof arrived via gossip while generating, discarding our copy" + ); + return; + } + drop(observed); + + // Note: We don't store the proof in the data availability checker because: + // 1. The block has already been imported and is no longer in the availability cache + // 2. This is altruistic proof generation - we're generating proofs for OTHER nodes + // 3. We already have the block, so we don't need the proof for ourselves + + // Publish the proof to the network + let pubsub_message = PubsubMessage::ExecutionProof(Arc::new(proof)); + + let network_message = NetworkMessage::Publish { + messages: vec![pubsub_message], + }; + + if let Err(e) = network_tx.send(network_message) { + error!( + slot = ?slot, + proof_id = ?proof_id, + error = ?e, + "Failed to send proof to network service" + ); + } else { + info!( + slot = ?slot, + proof_id = ?proof_id, + "Proof successfully published to network" + ); + + // Mark the proof as observed so we don't regenerate it + if let Err(e) = chain + .observed_execution_proofs + .write() + .observe_proof(slot, block_root, proof_id) + { + error!( + slot = ?slot, + proof_id = ?proof_id, + error = ?e, + "Failed to mark proof as observed" + ); + } + } + } + Err(e) => { + error!( + slot = ?slot, + proof_id = ?proof_id, + error = %e, + "Failed to generate proof" + ); + } + } + }, + "proof_generation", + ); + } +} + +#[cfg(test)] +mod tests { + use super::*; + use beacon_chain::test_utils::{ + AttestationStrategy, BeaconChainHarness, BlockStrategy, EphemeralHarnessType, + }; + use tokio::sync::mpsc; + use types::MinimalEthSpec as E; + + type TestHarness = BeaconChainHarness>; + + /// Create a test harness with minimal setup + fn build_test_harness(validator_count: usize) -> TestHarness { + BeaconChainHarness::builder(E) + .default_spec() + .deterministic_keypairs(validator_count) + .fresh_ephemeral_store() + .build() + } + + #[tokio::test] + async fn test_check_if_proof_exists_returns_false_for_new_proof() { + let harness = build_test_harness(8); + let chain = harness.chain.clone(); + + let (_event_tx, event_rx) = mpsc::unbounded_channel(); + let (network_tx, _network_rx) = mpsc::unbounded_channel(); + + let service = ProofGenerationService::new(chain, event_rx, network_tx); + + let block_root = Hash256::random(); + let slot = types::Slot::new(1); + let proof_id = ExecutionProofId::new(0).unwrap(); + + // Should return false for a proof that hasn't been observed + assert!( + !service.check_if_proof_exists(slot, block_root, proof_id) + ); + } + + #[tokio::test] + async fn test_check_if_proof_exists_returns_true_after_observation() { + let harness = build_test_harness(8); + let chain = harness.chain.clone(); + + let (_event_tx, event_rx) = mpsc::unbounded_channel(); + let (network_tx, _network_rx) = mpsc::unbounded_channel(); + + let service = ProofGenerationService::new(chain.clone(), event_rx, network_tx); + + let block_root = Hash256::random(); + let slot = types::Slot::new(1); + let proof_id = ExecutionProofId::new(0).unwrap(); + + // Mark the proof as observed + chain + .observed_execution_proofs + .write() + .observe_proof(slot, block_root, proof_id) + .unwrap(); + + // Should return true for an observed proof + assert!( + service.check_if_proof_exists(slot, block_root, proof_id) + ); + } + + #[tokio::test] + async fn test_handle_block_import_skips_when_epoch_not_required() { + let harness = build_test_harness(8); + let chain = harness.chain.clone(); + + // Note: zkVM is NOT enabled in this harness + // TODO(zkproofs): can we make a harness with zkVM enabled to test this functionality in a unit test + + let (_event_tx, event_rx) = mpsc::unbounded_channel(); + let (network_tx, mut network_rx) = mpsc::unbounded_channel(); + + let service = ProofGenerationService::new(chain.clone(), event_rx, network_tx); + + harness.advance_slot(); + + harness + .extend_chain( + 1, + BlockStrategy::OnCanonicalHead, + AttestationStrategy::AllValidators, + ) + .await; + + let block = harness.chain.head_snapshot().beacon_block.clone(); + let block_root = block.canonical_root(); + let slot = block.slot(); + + service.handle_block_import(block_root, slot, block).await; + + // Give async tasks time to complete + tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; + + // Should not have published any proofs because epoch doesn't require them + assert!( + network_rx.try_recv().is_err(), + "Should not publish proofs when epoch doesn't require them" + ); + } +} diff --git a/beacon_node/src/cli.rs b/beacon_node/src/cli.rs index e4c7c6ff1fe..7ec18ef4350 100644 --- a/beacon_node/src/cli.rs +++ b/beacon_node/src/cli.rs @@ -818,7 +818,6 @@ pub fn cli_app() -> Command { .help("Server endpoint for an execution layer JWT-authenticated HTTP \ JSON-RPC connection. Uses the same endpoint to populate the \ deposit cache.") - .required(true) .action(ArgAction::Set) .display_order(0) ) @@ -829,7 +828,7 @@ pub fn cli_app() -> Command { .alias("jwt-secrets") .help("File path which contains the hex-encoded JWT secret for the \ execution endpoint provided in the --execution-endpoint flag.") - .requires("execution-endpoint") + .requires("execution-source") .action(ArgAction::Set) .display_order(0) ) @@ -840,7 +839,7 @@ pub fn cli_app() -> Command { .alias("jwt-secret-key") .help("Hex-encoded JWT secret for the \ execution endpoint provided in the --execution-endpoint flag.") - .requires("execution-endpoint") + .requires("execution-source") .conflicts_with("execution-jwt") .action(ArgAction::Set) .display_order(0) @@ -876,7 +875,7 @@ pub fn cli_app() -> Command { .help("Emergency fallback fee recipient for use in case the validator client does \ not have one configured. You should set this flag on the validator \ client instead of (or in addition to) setting it here.") - .requires("execution-endpoint") + .requires("execution-source") .action(ArgAction::Set) .display_order(0) ) @@ -886,7 +885,7 @@ pub fn cli_app() -> Command { .alias("payload-builder") .alias("payload-builders") .help("The URL of a service compatible with the MEV-boost API.") - .requires("execution-endpoint") + .requires("execution-source") .action(ArgAction::Set) .display_order(0) ) @@ -933,6 +932,29 @@ pub fn cli_app() -> Command { .action(ArgAction::Set) .display_order(0) ) + /* Execution Proofs settings */ + .arg( + Arg::new("execution-proofs") + .long("execution-proofs") + .help("Activates execution proof mode. Enables the node to subscribe to the \ + execution_proof gossip topic, receive and verify execution proofs from peers, \ + and advertise execution proof support in its ENR for peer discovery. \ + Use --execution-proof-types to specify which proof types this node \ + should generate (optional - nodes can verify without generating).") + .action(ArgAction::SetTrue) + .display_order(0) + ) + .arg( + Arg::new("execution-proof-types") + .long("execution-proof-types") + .value_name("PROOF_TYPE_IDS") + .help("Comma-separated list of proof type IDs to generate \ + (e.g., '0,1' where 0=SP1+Reth, 1=Risc0+Geth). \ + Optional - nodes can verify proofs without generating them.") + .requires("execution-proofs") + .action(ArgAction::Set) + .display_order(0) + ) /* Deneb settings */ .arg( Arg::new("trusted-setup-file-override") @@ -1633,5 +1655,9 @@ pub fn cli_app() -> Command { .action(ArgAction::Set) .hide(true) ) + .group(ArgGroup::new("execution-source") + .args(&["execution-endpoint", "execution-proofs"]) + .required(true) + .multiple(true)) .group(ArgGroup::new("enable_http").args(["http", "gui", "staking"]).multiple(true)) } diff --git a/beacon_node/src/config.rs b/beacon_node/src/config.rs index 0f169ffaad6..00a777f6e95 100644 --- a/beacon_node/src/config.rs +++ b/beacon_node/src/config.rs @@ -29,7 +29,8 @@ use std::str::FromStr; use std::time::Duration; use tracing::{error, info, warn}; use types::graffiti::GraffitiString; -use types::{Checkpoint, Epoch, EthSpec, Hash256, PublicKeyBytes}; +use types::{Checkpoint, Epoch, EthSpec, ExecutionProofId, Hash256, PublicKeyBytes}; +use zkvm_execution_layer::ZKVMExecutionLayerConfig; const PURGE_DB_CONFIRMATION: &str = "confirm"; @@ -268,73 +269,154 @@ pub fn get_config( client_config.http_metrics.allocator_metrics_enabled = false; } - // `--execution-endpoint` is required now. - let endpoints: String = clap_utils::parse_required(cli_args, "execution-endpoint")?; - let mut el_config = execution_layer::Config::default(); - - // Parse a single execution endpoint, logging warnings if multiple endpoints are supplied. - let execution_endpoint = parse_only_one_value( - endpoints.as_str(), - SensitiveUrl::parse, - "--execution-endpoint", - )?; - - // JWTs are required if `--execution-endpoint` is supplied. They can be either passed via - // file_path or directly as string. - let secret_file: PathBuf; - // Parse a single JWT secret from a given file_path, logging warnings if multiple are supplied. - if let Some(secret_files) = cli_args.get_one::("execution-jwt") { - secret_file = parse_only_one_value(secret_files, PathBuf::from_str, "--execution-jwt")?; - // Check if the JWT secret key is passed directly via cli flag and persist it to the default - // file location. - } else if let Some(jwt_secret_key) = cli_args.get_one::("execution-jwt-secret-key") { - use std::fs::File; - use std::io::Write; - secret_file = client_config.data_dir().join(DEFAULT_JWT_FILE); - let mut jwt_secret_key_file = File::create(secret_file.clone()) - .map_err(|e| format!("Error while creating jwt_secret_key file: {:?}", e))?; - jwt_secret_key_file - .write_all(jwt_secret_key.as_bytes()) - .map_err(|e| { - format!( - "Error occurred while writing to jwt_secret_key file: {:?}", - e - ) - })?; + // Auto-enable in-process dummy execution layer if --execution-proofs is set without + // --execution-proof-types and no explicit --execution-endpoint is provided. + let use_dummy_el = cli_args.get_flag("execution-proofs") + && cli_args + .get_one::("execution-proof-types") + .is_none() + && cli_args.get_one::("execution-endpoint").is_none(); + + client_config.use_dummy_el = use_dummy_el; + + // Configure execution layer: either use provided endpoint or dummy EL (auto-enabled with --execution-proofs) + if !use_dummy_el { + let endpoints: Option = clap_utils::parse_optional(cli_args, "execution-endpoint")?; + let endpoints = endpoints + .ok_or("Error! Either --execution-endpoint or --execution-proofs must be provided")?; + + let mut el_config = execution_layer::Config::default(); + + // Parse a single execution endpoint, logging warnings if multiple endpoints are supplied. + let execution_endpoint = parse_only_one_value( + endpoints.as_str(), + SensitiveUrl::parse, + "--execution-endpoint", + )?; + + // JWTs are required if `--execution-endpoint` is supplied. They can be either passed via + // file_path or directly as string. + let secret_file: PathBuf; + // Parse a single JWT secret from a given file_path, logging warnings if multiple are supplied. + if let Some(secret_files) = cli_args.get_one::("execution-jwt") { + secret_file = parse_only_one_value(secret_files, PathBuf::from_str, "--execution-jwt")?; + // Check if the JWT secret key is passed directly via cli flag and persist it to the default + // file location. + } else if let Some(jwt_secret_key) = cli_args.get_one::("execution-jwt-secret-key") + { + use std::fs::File; + use std::io::Write; + secret_file = client_config.data_dir().join(DEFAULT_JWT_FILE); + let mut jwt_secret_key_file = File::create(secret_file.clone()) + .map_err(|e| format!("Error while creating jwt_secret_key file: {:?}", e))?; + jwt_secret_key_file + .write_all(jwt_secret_key.as_bytes()) + .map_err(|e| { + format!( + "Error occurred while writing to jwt_secret_key file: {:?}", + e + ) + })?; + } else { + return Err("Error! Please set either --execution-jwt file_path or --execution-jwt-secret-key directly via cli when using --execution-endpoint".to_string()); + } + + // Parse and set the payload builder, if any. + if let Some(endpoint) = cli_args.get_one::("builder") { + let payload_builder = parse_only_one_value(endpoint, SensitiveUrl::parse, "--builder")?; + el_config.builder_url = Some(payload_builder); + + el_config.builder_user_agent = + clap_utils::parse_optional(cli_args, "builder-user-agent")?; + + el_config.builder_header_timeout = + clap_utils::parse_optional(cli_args, "builder-header-timeout")? + .map(Duration::from_millis); + + el_config.disable_builder_ssz_requests = cli_args.get_flag("builder-disable-ssz"); + } + + // Set config values from parse values. + el_config.secret_file = Some(secret_file.clone()); + el_config.execution_endpoint = Some(execution_endpoint.clone()); + el_config.suggested_fee_recipient = + clap_utils::parse_optional(cli_args, "suggested-fee-recipient")?; + el_config.jwt_id = clap_utils::parse_optional(cli_args, "execution-jwt-id")?; + el_config.jwt_version = clap_utils::parse_optional(cli_args, "execution-jwt-version")?; + el_config + .default_datadir + .clone_from(client_config.data_dir()); + let execution_timeout_multiplier = + clap_utils::parse_required(cli_args, "execution-timeout-multiplier")?; + el_config.execution_timeout_multiplier = Some(execution_timeout_multiplier); + + // Store the EL config in the client config. + client_config.execution_layer = Some(el_config); } else { - return Err("Error! Please set either --execution-jwt file_path or --execution-jwt-secret-key directly via cli when using --execution-endpoint".to_string()); - } + // Create an execution_layer config pointing to localhost + info!("Using in-process dummy execution layer (--execution-proofs)"); - // Parse and set the payload builder, if any. - if let Some(endpoint) = cli_args.get_one::("builder") { - let payload_builder = parse_only_one_value(endpoint, SensitiveUrl::parse, "--builder")?; - el_config.builder_url = Some(payload_builder); + let mut el_config = execution_layer::Config::default(); - el_config.builder_user_agent = clap_utils::parse_optional(cli_args, "builder-user-agent")?; + // Point to the local dummy EL running on the default engine port + el_config.execution_endpoint = Some( + SensitiveUrl::parse("http://127.0.0.1:8551") + .map_err(|e| format!("Failed to parse dummy EL endpoint: {:?}", e))?, + ); - el_config.builder_header_timeout = - clap_utils::parse_optional(cli_args, "builder-header-timeout")? - .map(Duration::from_millis); + // For dummy EL, let ExecutionLayer handle JWT as usual + // Dummy EL will not validate JWT (no need for local testing) + el_config + .default_datadir + .clone_from(client_config.data_dir()); - el_config.disable_builder_ssz_requests = cli_args.get_flag("builder-disable-ssz"); + client_config.execution_layer = Some(el_config); } - // Set config values from parse values. - el_config.secret_file = Some(secret_file.clone()); - el_config.execution_endpoint = Some(execution_endpoint.clone()); - el_config.suggested_fee_recipient = - clap_utils::parse_optional(cli_args, "suggested-fee-recipient")?; - el_config.jwt_id = clap_utils::parse_optional(cli_args, "execution-jwt-id")?; - el_config.jwt_version = clap_utils::parse_optional(cli_args, "execution-jwt-version")?; - el_config - .default_datadir - .clone_from(client_config.data_dir()); - let execution_timeout_multiplier = - clap_utils::parse_required(cli_args, "execution-timeout-multiplier")?; - el_config.execution_timeout_multiplier = Some(execution_timeout_multiplier); + // Parse execution proofs config if provided + if cli_args.get_flag("execution-proofs") { + let generation_proof_types = if let Some(gen_types_str) = + clap_utils::parse_optional::(cli_args, "execution-proof-types")? + { + gen_types_str + .split(',') + .map(|s| s.trim().parse::()) + .collect::, _>>() + .map_err(|e| { + format!( + "Invalid proof type ID in --execution-proof-types: {}", + e + ) + })? + .into_iter() + .map(ExecutionProofId::new) + .collect::, _>>() + .map_err(|e| format!("Invalid subnet ID: {}", e))? + } else { + // No generation proof types provided - running in verification-only mode + if client_config.use_dummy_el { + info!("--execution-proofs: no EL needed for proof verification"); + } + HashSet::new() + }; - // Store the EL config in the client config. - client_config.execution_layer = Some(el_config); + // Build and validate the config + let zkvm_config = ZKVMExecutionLayerConfig::builder() + .generation_proof_types(generation_proof_types) + .build() + .map_err(|e| format!("Invalid ZK-VM configuration: {}", e))?; + + client_config.zkvm_execution_layer = Some(zkvm_config); + + info!( + "ZKVM mode activated with generation_proof_types={:?}", + client_config + .zkvm_execution_layer + .as_ref() + .unwrap() + .generation_proof_types + ); + } // Override default trusted setup file if required if let Some(trusted_setup_file_path) = cli_args.get_one::("trusted-setup-file-override") diff --git a/consensus/types/src/chain_spec.rs b/consensus/types/src/chain_spec.rs index a66080ada6f..e177c02d0e9 100644 --- a/consensus/types/src/chain_spec.rs +++ b/consensus/types/src/chain_spec.rs @@ -221,6 +221,18 @@ pub struct ChainSpec { /// The Gloas fork epoch is optional, with `None` representing "Gloas never happens". pub gloas_fork_epoch: Option, + /* + * Execution proof params + */ + /// Whether execution proofs are enabled via CLI flag --execution-proofs. + /// When true, the node will subscribe to execution proof gossip, verify proofs, + /// TODO(ethproofs): Changed to Electra fork for demo. + /// and optionally generate proofs. zkVM activates at the Fulu fork. + /// Unlike other forks, this is not a network-wide activation but a per-node opt-in. + pub zkvm_enabled: bool, + /// Minimum number of execution proofs required from different subnets. + /// Only used when zkvm_enabled is true. + pub zkvm_min_proofs_required: usize, /* * Networking */ @@ -263,6 +275,11 @@ pub struct ChainSpec { pub(crate) blob_schedule: BlobSchedule, pub min_epochs_for_data_column_sidecars_requests: u64, + /* + * Networking zkvm + */ + pub min_epochs_for_execution_proof_requests: u64, + /* * Networking Gloas */ @@ -479,6 +496,48 @@ impl ChainSpec { .is_some_and(|gloas_fork_epoch| gloas_fork_epoch != self.far_future_epoch) } + /// Returns true if zkVM mode is enabled via CLI flag. + /// Unlike other forks, this is set via CLI and indicates per-node opt-in. + pub fn is_zkvm_enabled(&self) -> bool { + self.zkvm_enabled + } + + /// TODO(ethproofs): Changed to Electra fork epoch for demo. + /// + /// Returns the epoch at which zkVM activates. + /// Currently uses Fulu fork epoch. + /// Returns None if zkVM is disabled or Fulu is not scheduled. + pub fn zkvm_fork_epoch(&self) -> Option { + if self.zkvm_enabled { + self.electra_fork_epoch + } else { + None + } + } + + /// Returns true if zkVM mode is enabled for the given epoch. + pub fn is_zkvm_enabled_for_epoch(&self, epoch: Epoch) -> bool { + self.zkvm_fork_epoch() + .is_some_and(|zkvm_fork_epoch| epoch >= zkvm_fork_epoch) + } + + /// TODO(ethproofs): Changed to Electra fork for demo. + /// + /// Returns true if zkVM mode can be used at the given fork. + pub fn is_zkvm_enabled_for_fork(&self, fork_name: ForkName) -> bool { + self.is_zkvm_enabled() && fork_name.electra_enabled() + } + + /// Returns the minimum number of execution proofs required. + /// Only meaningful when zkVM is enabled. + pub fn zkvm_min_proofs_required(&self) -> Option { + if self.is_zkvm_enabled() { + Some(self.zkvm_min_proofs_required) + } else { + None + } + } + /// Returns a full `Fork` struct for a given epoch. pub fn fork_at_epoch(&self, epoch: Epoch) -> Fork { let current_fork_name = self.fork_name_at_epoch(epoch); @@ -1124,6 +1183,12 @@ impl ChainSpec { gloas_fork_version: [0x07, 0x00, 0x00, 0x00], gloas_fork_epoch: None, + /* + * zkVM execution proof params + */ + zkvm_enabled: false, + zkvm_min_proofs_required: default_zkvm_min_proofs_required(), + /* * Network specific */ @@ -1184,6 +1249,12 @@ impl ChainSpec { default_min_epochs_for_data_column_sidecars_requests(), max_data_columns_by_root_request: default_data_columns_by_root_request(), + /* + * Networking zkvm specific + */ + min_epochs_for_execution_proof_requests: + default_min_epochs_for_execution_proof_requests(), + /* * Application specific */ @@ -1259,6 +1330,10 @@ impl ChainSpec { // Gloas gloas_fork_version: [0x07, 0x00, 0x00, 0x00], gloas_fork_epoch: None, + // zkVM + zkvm_enabled: false, + zkvm_min_proofs_required: 0, + min_epochs_for_execution_proof_requests: 2, // Other network_id: 2, // lighthouse testnet network id deposit_chain_id: 5, @@ -1484,6 +1559,12 @@ impl ChainSpec { gloas_fork_version: [0x07, 0x00, 0x00, 0x64], gloas_fork_epoch: None, + /* + * zkVM execution proof params + */ + zkvm_enabled: false, + zkvm_min_proofs_required: default_zkvm_min_proofs_required(), + /* * Network specific */ @@ -1535,6 +1616,12 @@ impl ChainSpec { default_min_epochs_for_data_column_sidecars_requests(), max_data_columns_by_root_request: default_data_columns_by_root_request(), + /* + * Networking zkvm specific + */ + min_epochs_for_execution_proof_requests: + default_min_epochs_for_execution_proof_requests(), + /* * Application specific */ @@ -1995,6 +2082,11 @@ const fn default_min_epochs_for_blob_sidecars_requests() -> u64 { 4096 } +const fn default_min_epochs_for_execution_proof_requests() -> u64 { + // TODO(zkproofs): add into consensus-specs with rational + 2 +} + const fn default_blob_sidecar_subnet_count() -> u64 { 6 } @@ -2025,6 +2117,12 @@ const fn default_max_blobs_per_block_electra() -> u64 { 9 } +/// Minimum number of execution proofs required from different subnets +/// before marking an execution payload as available in ZK-VM mode. +pub const fn default_zkvm_min_proofs_required() -> usize { + crate::execution_proof::DEFAULT_MIN_PROOFS_REQUIRED +} + const fn default_attestation_propagation_slot_range() -> u64 { 32 } diff --git a/consensus/types/src/execution_proof.rs b/consensus/types/src/execution_proof.rs new file mode 100644 index 00000000000..25e6ceade04 --- /dev/null +++ b/consensus/types/src/execution_proof.rs @@ -0,0 +1,179 @@ +use crate::{ExecutionBlockHash, Hash256, Slot, VariableList}; +use serde::{Deserialize, Serialize}; +use ssz::Encode; +use ssz_derive::{Decode, Encode as DeriveEncode}; +use ssz_types::typenum; +use std::fmt::{self, Debug}; +use tree_hash_derive::TreeHash; + +use super::ExecutionProofId; + +/// TODO(ethproofs): Set to 2MB for the demo. +/// +/// Maximum size of proof data in bytes +/// +/// Note: Most proofs will fit within 300KB. Some zkVMs have 1MB proofs (currently) +/// and so this number was set to accommodate for the most zkVMs. +pub const MAX_PROOF_DATA_BYTES: usize = 2_097_152; + +/// Minimum number of execution proofs required from different proof types +/// before marking an execution payload as available in ZK-VM mode. +/// +/// This provides client diversity - nodes wait for proofs from K different +/// zkVM+EL combinations before considering an execution payload available. +pub const DEFAULT_MIN_PROOFS_REQUIRED: usize = 2; + +/// Maximum number of execution proofs that can be requested or stored. +/// This corresponds to the maximum number of proof types (zkVM+EL combinations) +/// that can be supported, which is currently 8 (ExecutionProofId is 0-7). +pub const MAX_PROOFS: usize = 8; + +type ProofData = VariableList; + +/// ExecutionProof represents a cryptographic `proof of execution` that +/// an execution payload is valid. +/// +/// In short, it is proof that if we were to run a particular execution layer client +/// with the given execution payload, they would return the output values that are attached +/// to the proof. +/// +/// Each proof is associated with a specific proof_id, which identifies the +/// zkVM and EL combination used to generate it. Multiple proofs from different +/// proof IDs can exist for the same execution payload, providing both zkVM and EL diversity. +#[derive(Clone, Serialize, Deserialize, DeriveEncode, Decode, TreeHash, PartialEq, Eq)] +pub struct ExecutionProof { + /// Which proof type (zkVM+EL combination) this proof belongs to + /// Examples: 0=SP1+Reth, 1=Risc0+Geth, 2=SP1+Geth, etc. + pub proof_id: ExecutionProofId, + + /// The slot of the beacon block this proof validates + pub slot: Slot, + + /// The block hash of the execution payload this proof validates + pub block_hash: ExecutionBlockHash, + + /// The beacon block root corresponding to the beacon block + /// with the execution payload, that this proof attests to. + pub block_root: Hash256, + + /// The actual proof data + pub proof_data: ProofData, +} + +impl ExecutionProof { + pub fn new( + proof_id: ExecutionProofId, + slot: Slot, + block_hash: ExecutionBlockHash, + block_root: Hash256, + proof_data: Vec, + ) -> Result { + let proof_data = ProofData::new(proof_data) + .map_err(|e| format!("Failed to create proof data: {:?}", e))?; + + Ok(Self { + proof_id, + slot, + block_hash, + block_root, + proof_data, + }) + } + + /// Returns the size of the proof data in bytes + pub fn proof_data_size(&self) -> usize { + self.proof_data.len() + } + + /// Get a reference to the proof data as a slice + pub fn proof_data_slice(&self) -> &[u8] { + &self.proof_data + } + + /// Check if this proof is for a specific execution block hash + pub fn is_for_block(&self, block_hash: &ExecutionBlockHash) -> bool { + &self.block_hash == block_hash + } + + /// Check if this proof is from a specific proof type + pub fn is_from_proof_type(&self, proof_id: ExecutionProofId) -> bool { + self.proof_id == proof_id + } + + /// Get the proof type ID + pub fn proof_id(&self) -> ExecutionProofId { + self.proof_id + } + + /// Minimum size of an ExecutionProof in SSZ bytes (with empty proof_data) + /// TODO(zkproofs): If the proof_data is empty, then that is an invalid proof + pub fn min_size() -> usize { + use bls::FixedBytesExtended; + Self { + proof_id: ExecutionProofId::new(0).unwrap(), + slot: Slot::new(0), + block_hash: ExecutionBlockHash::zero(), + block_root: Hash256::zero(), + proof_data: ProofData::new(vec![]).unwrap(), + } + .as_ssz_bytes() + .len() + } + + /// Maximum size of an ExecutionProof in SSZ bytes (with max proof_data) + pub fn max_size() -> usize { + use bls::FixedBytesExtended; + Self { + proof_id: ExecutionProofId::new(0).unwrap(), + slot: Slot::new(0), + block_hash: ExecutionBlockHash::zero(), + block_root: Hash256::zero(), + proof_data: ProofData::new(vec![0u8; MAX_PROOF_DATA_BYTES]).unwrap(), + } + .as_ssz_bytes() + .len() + } +} + +impl Debug for ExecutionProof { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ExecutionProof") + .field("proof_id", &self.proof_id) + .field("slot", &self.slot) + .field("block_hash", &self.block_hash) + .field("block_root", &self.block_root) + .field("proof_data_size", &self.proof_data.len()) + .finish() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use bls::FixedBytesExtended; + + #[test] + fn test_execution_proof_too_large() { + let subnet_id = ExecutionProofId::new(0).unwrap(); + let slot = Slot::new(100); + let block_hash = ExecutionBlockHash::zero(); + let block_root = Hash256::zero(); + let proof_data = vec![0u8; MAX_PROOF_DATA_BYTES + 1]; + + let result = ExecutionProof::new(subnet_id, slot, block_hash, block_root, proof_data); + assert!(result.is_err()); + assert!(result.unwrap_err().contains("Failed to create proof data")); + } + + #[test] + fn test_execution_proof_max_size() { + let subnet_id = ExecutionProofId::new(0).unwrap(); + let slot = Slot::new(100); + let block_hash = ExecutionBlockHash::zero(); + let block_root = Hash256::zero(); + let proof_data = vec![0u8; MAX_PROOF_DATA_BYTES]; + + let result = ExecutionProof::new(subnet_id, slot, block_hash, block_root, proof_data); + assert!(result.is_ok()); + } +} diff --git a/consensus/types/src/execution_proof_id.rs b/consensus/types/src/execution_proof_id.rs new file mode 100644 index 00000000000..4d85d02a18d --- /dev/null +++ b/consensus/types/src/execution_proof_id.rs @@ -0,0 +1,146 @@ +use serde::{Deserialize, Serialize}; +use ssz::{Decode, DecodeError, Encode}; +use std::fmt::{self, Display}; +use tree_hash::TreeHash; + +/// Number of execution proofs +/// Each proof represents a different zkVM+EL combination +/// +/// TODO(zkproofs): The number 8 is a parameter that we will want to configure in the future +pub const EXECUTION_PROOF_TYPE_COUNT: u8 = 8; + +/// ExecutionProofId identifies which zkVM/proof system a proof belongs to. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)] +pub struct ExecutionProofId(u8); + +impl Encode for ExecutionProofId { + fn is_ssz_fixed_len() -> bool { + ::is_ssz_fixed_len() + } + + fn ssz_fixed_len() -> usize { + ::ssz_fixed_len() + } + + fn ssz_bytes_len(&self) -> usize { + self.0.ssz_bytes_len() + } + + fn ssz_append(&self, buf: &mut Vec) { + self.0.ssz_append(buf) + } + + fn as_ssz_bytes(&self) -> Vec { + self.0.as_ssz_bytes() + } +} + +impl Decode for ExecutionProofId { + fn is_ssz_fixed_len() -> bool { + ::is_ssz_fixed_len() + } + + fn ssz_fixed_len() -> usize { + ::ssz_fixed_len() + } + + fn from_ssz_bytes(bytes: &[u8]) -> Result { + let value = u8::from_ssz_bytes(bytes)?; + Self::new(value).map_err(DecodeError::BytesInvalid) + } +} + +impl TreeHash for ExecutionProofId { + fn tree_hash_type() -> tree_hash::TreeHashType { + ::tree_hash_type() + } + + fn tree_hash_packed_encoding(&self) -> tree_hash::PackedEncoding { + self.0.tree_hash_packed_encoding() + } + + fn tree_hash_packing_factor() -> usize { + ::tree_hash_packing_factor() + } + + fn tree_hash_root(&self) -> tree_hash::Hash256 { + self.0.tree_hash_root() + } +} + +impl ExecutionProofId { + /// Creates a new ExecutionProofId if the value is valid + /// + /// Valid IDs are in the range [0, EXECUTION_PROOF_TYPE_COUNT). + pub fn new(id: u8) -> Result { + if id < EXECUTION_PROOF_TYPE_COUNT { + Ok(Self(id)) + } else { + Err(format!( + "Invalid ExecutionProofId: {}, must be < {}", + id, EXECUTION_PROOF_TYPE_COUNT + )) + } + } + + /// Returns the inner u8 value + pub fn as_u8(&self) -> u8 { + self.0 + } + + /// Returns the proof ID as a usize + pub fn as_usize(&self) -> usize { + self.0 as usize + } + + /// Returns all valid proof IDs + pub fn all() -> Vec { + (0..EXECUTION_PROOF_TYPE_COUNT).map(Self).collect() + } +} + +impl Display for ExecutionProofId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl From for u8 { + fn from(subnet_id: ExecutionProofId) -> u8 { + subnet_id.0 + } +} + +impl TryFrom for ExecutionProofId { + type Error = String; + + fn try_from(value: u8) -> Result { + Self::new(value) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_valid_proof_ids() { + for id in 0..EXECUTION_PROOF_TYPE_COUNT { + assert!(ExecutionProofId::new(id).is_ok()); + } + } + + #[test] + fn test_invalid_proof_ids() { + assert!(ExecutionProofId::new(EXECUTION_PROOF_TYPE_COUNT).is_err()); + } + + #[test] + fn test_all_proof_ids() { + let all = ExecutionProofId::all(); + assert_eq!(all.len(), EXECUTION_PROOF_TYPE_COUNT as usize); + for (idx, proof_id) in all.iter().enumerate() { + assert_eq!(proof_id.as_usize(), idx); + } + } +} diff --git a/consensus/types/src/lib.rs b/consensus/types/src/lib.rs index 8e83fed1d9a..5aba30246fa 100644 --- a/consensus/types/src/lib.rs +++ b/consensus/types/src/lib.rs @@ -42,6 +42,8 @@ pub mod eth_spec; pub mod execution_block_hash; pub mod execution_payload; pub mod execution_payload_header; +pub mod execution_proof; +pub mod execution_proof_id; pub mod fork; pub mod fork_data; pub mod fork_name; @@ -177,6 +179,8 @@ pub use crate::execution_payload_header::{ ExecutionPayloadHeaderDeneb, ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, ExecutionPayloadHeaderGloas, ExecutionPayloadHeaderRef, ExecutionPayloadHeaderRefMut, }; +pub use crate::execution_proof::{ExecutionProof, MAX_PROOF_DATA_BYTES}; +pub use crate::execution_proof_id::{EXECUTION_PROOF_TYPE_COUNT, ExecutionProofId}; pub use crate::execution_requests::{ExecutionRequests, RequestType}; pub use crate::fork::Fork; pub use crate::fork_context::ForkContext; diff --git a/dummy_el/Cargo.toml b/dummy_el/Cargo.toml new file mode 100644 index 00000000000..ba05daadaca --- /dev/null +++ b/dummy_el/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "dummy_el" +version = "0.1.0" +edition = "2021" + +[lib] +name = "dummy_el" +path = "src/lib.rs" + +[[bin]] +name = "dummy_el" +path = "src/main.rs" + +[dependencies] +axum = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +tokio = { workspace = true } +tracing = { workspace = true } +tracing-subscriber = { workspace = true, features = ["env-filter", "json"] } +clap = { workspace = true } +anyhow = { workspace = true } +jsonwebtoken = "9" +hex = { workspace = true } diff --git a/dummy_el/Dockerfile b/dummy_el/Dockerfile new file mode 100644 index 00000000000..1ece25c7225 --- /dev/null +++ b/dummy_el/Dockerfile @@ -0,0 +1,32 @@ +# Multi-stage build for dummy_el +FROM rust:1.88.0-bullseye AS builder + +WORKDIR /build + +# Copy the entire workspace (needed for workspace structure) +COPY . . + +# Build only dummy_el in release mode +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/build/target \ + cargo build --release -p dummy_el && \ + cp target/release/dummy_el /dummy_el + +# Runtime stage with minimal Ubuntu image +FROM ubuntu:22.04 + +RUN apt-get update && apt-get -y upgrade && apt-get install -y --no-install-recommends \ + ca-certificates \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Copy the binary from builder +COPY --from=builder /dummy_el /usr/local/bin/dummy_el + +# Create a fake 'geth' binary that runs dummy_el instead +# Kurtosis will call "geth init ..." and "geth --..." but we'll run dummy_el +COPY --from=builder /build/dummy_el/geth-wrapper.sh /usr/local/bin/geth +RUN chmod +x /usr/local/bin/geth + +# Expose default Engine API port +EXPOSE 8551 diff --git a/dummy_el/README.md b/dummy_el/README.md new file mode 100644 index 00000000000..0c3361a4a94 --- /dev/null +++ b/dummy_el/README.md @@ -0,0 +1,24 @@ +# Using Dummy EL + +This is a dummy EL that can be used with proof verification nodes. These nodes do not require an EL to function since they just take in proofs. + +## Quick Start + +### 1. Build the Docker Image + +From the lighthouse repository root: + +```bash +docker build -f dummy_el/Dockerfile -t dummy_el:local . +``` + +### 2. Adding to Kurtosis + +In Kurtosis, you can add the following: + +```yaml + - el_type: geth + el_image: dummy_el:local +``` + +Note that we need to use el_type `geth` as kurtosis will be looking for a binary named geth. We wrap calls to the Geth binary so that they are processed by our dummy_el. \ No newline at end of file diff --git a/dummy_el/geth-wrapper.sh b/dummy_el/geth-wrapper.sh new file mode 100644 index 00000000000..8112bb44e9c --- /dev/null +++ b/dummy_el/geth-wrapper.sh @@ -0,0 +1,29 @@ +#!/bin/sh +set -e + +# This is a wrapper that pretends to be geth but actually runs dummy_el +# Kurtosis calls: geth init ... && geth --authrpc.port=8551 ... +# We ignore the init, and when we see the actual geth command with authrpc.port, we start dummy_el + +echo "[dummy_el geth-wrapper] Called with: $@" + +# Check if this is the "geth init" command and ignore it +if echo "$@" | grep -q "init"; then + echo "[dummy_el geth-wrapper] Ignoring 'geth init' command" + exit 0 +fi + +# If we're here, it's the actual geth run command +# Kurtosis mounts JWT secret at /jwt/jwtsecret +JWT_PATH="/jwt/jwtsecret" + +echo "[dummy_el geth-wrapper] Starting dummy_el instead of geth" + +# Run dummy_el with JWT if available, otherwise without +if [ -f "$JWT_PATH" ]; then + echo "[dummy_el geth-wrapper] Using JWT from $JWT_PATH" + exec /usr/local/bin/dummy_el --host 0.0.0.0 --port 8551 --jwt-secret "$JWT_PATH" +else + echo "[dummy_el geth-wrapper] WARNING: No JWT file found at $JWT_PATH" + exec /usr/local/bin/dummy_el --host 0.0.0.0 --port 8551 +fi diff --git a/dummy_el/src/lib.rs b/dummy_el/src/lib.rs new file mode 100644 index 00000000000..ea386612c8f --- /dev/null +++ b/dummy_el/src/lib.rs @@ -0,0 +1,525 @@ +//! Dummy Execution Layer for zkproofs demo +//! +//! This module provides an in-process execution layer that returns success +//! for all Engine API calls. It's designed to be used with zkproofs to validate +//! blocks without needing a full execution layer. +//! +//! TODO(ethproofs): Changed to debug logs for demo purposes. + +use axum::{ + extract::State, + http::{Request, StatusCode}, + middleware::{self, Next}, + response::Response, + routing::post, + Json, Router, +}; +use jsonwebtoken::{Algorithm, DecodingKey, Validation}; +use serde::{Deserialize, Serialize}; +use serde_json::{json, Value as JsonValue}; +use std::net::SocketAddr; +use std::path::PathBuf; +use std::sync::Arc; +use tokio::sync::oneshot; +use tracing::{debug, error, warn}; + +const JSONRPC_VERSION: &str = "2.0"; +const JWT_SECRET_LENGTH: usize = 32; + +#[derive(Debug, Clone)] +pub struct DummyElConfig { + pub host: String, + pub engine_port: u16, + pub rpc_port: u16, + pub ws_port: u16, + pub metrics_port: u16, + pub p2p_port: u16, + pub jwt_secret_path: Option, +} + +/// Represents a prepared dummy execution layer ready to run +pub struct PreparedDummyEl { + engine_listener: tokio::net::TcpListener, + engine_app: Router, + rpc_listener: tokio::net::TcpListener, + rpc_app: Router, + ws_listener: tokio::net::TcpListener, + ws_app: Router, + metrics_listener: tokio::net::TcpListener, + metrics_app: Router, + p2p_tcp_task: tokio::task::JoinHandle<()>, + p2p_udp_task: tokio::task::JoinHandle<()>, +} + +#[derive(Debug, Clone)] +struct AppState { + jwt_secret: Option>, +} + +#[derive(Debug, Serialize, Deserialize)] +struct JwtClaims { + iat: u64, + #[serde(skip_serializing_if = "Option::is_none")] + id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + clv: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +struct JsonRpcRequest { + jsonrpc: String, + method: String, + params: JsonValue, + id: JsonValue, +} + +#[derive(Debug, Serialize, Deserialize)] +struct JsonRpcResponse { + jsonrpc: String, + #[serde(skip_serializing_if = "Option::is_none")] + result: Option, + #[serde(skip_serializing_if = "Option::is_none")] + error: Option, + id: JsonValue, +} + +#[derive(Debug, Serialize, Deserialize)] +struct JsonRpcError { + code: i64, + message: String, +} + +async fn auth_middleware( + State(state): State>, + request: Request, + next: Next, +) -> Result { + // If no JWT secret is configured, skip auth + if state.jwt_secret.is_none() { + return Ok(next.run(request).await); + } + + let jwt_secret = state.jwt_secret.as_ref().unwrap(); + + // Check for Authorization header + let auth_header = request + .headers() + .get("Authorization") + .and_then(|h| h.to_str().ok()); + + match auth_header { + Some(auth) if auth.starts_with("Bearer ") => { + let token = &auth[7..]; // Skip "Bearer " + + // Validate JWT token + let mut validation = Validation::new(Algorithm::HS256); + validation.validate_exp = false; + validation.required_spec_claims.remove("exp"); + + match jsonwebtoken::decode::( + token, + &DecodingKey::from_secret(jwt_secret), + &validation, + ) { + Ok(_) => { + debug!("JWT authentication successful"); + Ok(next.run(request).await) + } + Err(e) => { + warn!("JWT validation failed: {:?}", e); + Err((StatusCode::UNAUTHORIZED, "Invalid JWT token".to_string())) + } + } + } + Some(_) => { + warn!("Authorization header present but not in Bearer format"); + Err(( + StatusCode::UNAUTHORIZED, + "Authorization header must be in format: Bearer ".to_string(), + )) + } + None => { + warn!("Missing Authorization header"); + Err(( + StatusCode::UNAUTHORIZED, + "Missing Authorization header".to_string(), + )) + } + } +} + +async fn handle_rpc( + State(_state): State>, + Json(request): Json, +) -> (StatusCode, Json) { + debug!( + method = %request.method, + params = ?request.params, + "Received RPC request" + ); + + let result = match request.method.as_str() { + "eth_syncing" => { + debug!("eth_syncing: returning false (not syncing)"); + Ok(json!(false)) + } + "eth_getBlockByNumber" => { + debug!("eth_getBlockByNumber: returning null"); + Ok(json!(null)) + } + "eth_getBlockByHash" => { + debug!("eth_getBlockByHash: returning null"); + Ok(json!(null)) + } + "engine_newPayloadV1" + | "engine_newPayloadV2" + | "engine_newPayloadV3" + | "engine_newPayloadV4" => { + debug!("{}: returning VALID status", request.method); + // Extract blockHash from the ExecutionPayload (params[0]) + let block_hash = request + .params + .get(0) + .and_then(|payload| payload.get("blockHash")) + .and_then(|hash| hash.as_str()) + .unwrap_or("0x0000000000000000000000000000000000000000000000000000000000000000"); + + Ok(json!({ + "status": "VALID", + "latestValidHash": block_hash, + "validationError": null + })) + } + "engine_forkchoiceUpdatedV1" + | "engine_forkchoiceUpdatedV2" + | "engine_forkchoiceUpdatedV3" => { + debug!("{}: returning VALID status", request.method); + // Extract headBlockHash from the ForkchoiceState (params[0]) + let head_block_hash = request + .params + .get(0) + .and_then(|state| state.get("headBlockHash")) + .and_then(|hash| hash.as_str()) + .unwrap_or("0x0000000000000000000000000000000000000000000000000000000000000000"); + + Ok(json!({ + "payloadStatus": { + "status": "VALID", + "latestValidHash": head_block_hash, + "validationError": null + }, + "payloadId": null + })) + } + "engine_getPayloadV1" + | "engine_getPayloadV2" + | "engine_getPayloadV3" + | "engine_getPayloadV4" + | "engine_getPayloadV5" => { + debug!( + "{}: returning error (payload not available)", + request.method + ); + Err(JsonRpcError { + code: -38001, + message: "Unknown payload".to_string(), + }) + } + "engine_getPayloadBodiesByHashV1" => { + debug!("engine_getPayloadBodiesByHashV1: returning empty array"); + Ok(json!([])) + } + "engine_getPayloadBodiesByRangeV1" => { + debug!("engine_getPayloadBodiesByRangeV1: returning empty array"); + Ok(json!([])) + } + "engine_exchangeCapabilities" => { + let capabilities = vec![ + "engine_newPayloadV1", + "engine_newPayloadV2", + "engine_newPayloadV3", + "engine_newPayloadV4", + "engine_getPayloadV1", + "engine_getPayloadV2", + "engine_getPayloadV3", + "engine_getPayloadV4", + "engine_getPayloadV5", + "engine_forkchoiceUpdatedV1", + "engine_forkchoiceUpdatedV2", + "engine_forkchoiceUpdatedV3", + "engine_getPayloadBodiesByHashV1", + "engine_getPayloadBodiesByRangeV1", + "engine_getClientVersionV1", + "engine_getBlobsV1", + "engine_getBlobsV2", + ]; + debug!( + "engine_exchangeCapabilities: returning {} capabilities", + capabilities.len() + ); + Ok(json!(capabilities)) + } + "engine_getClientVersionV1" => { + debug!("engine_getClientVersionV1: returning client info"); + Ok(json!([{ + "code": "DM", + "name": "Dummy-EL", + "version": "v0.1.0", + "commit": "00000000" + }])) + } + "engine_getBlobsV1" | "engine_getBlobsV2" => { + debug!("{}: returning empty array", request.method); + Ok(json!([])) + } + _ => { + debug!(method = %request.method, "Method not found"); + Err(JsonRpcError { + code: -32601, + message: format!("Method not found: {}", request.method), + }) + } + }; + + let response = match result { + Ok(result) => JsonRpcResponse { + jsonrpc: JSONRPC_VERSION.to_string(), + result: Some(result), + error: None, + id: request.id, + }, + Err(error) => JsonRpcResponse { + jsonrpc: JSONRPC_VERSION.to_string(), + result: None, + error: Some(error), + id: request.id, + }, + }; + + debug!(method = %request.method, success = response.error.is_none(), "RPC response sent"); + (StatusCode::OK, Json(response)) +} + +async fn handle_simple_rpc( + Json(request): Json, +) -> (StatusCode, Json) { + debug!(method = %request.method, "Received simple RPC request"); + + let result: Result = match request.method.as_str() { + "admin_nodeInfo" => Ok(json!({ + "id": "0ecd4a2c5f7c2a304e3acbec67efea275510d31c304fe47f4e626a2ebd5fb101", + "name": "Dummy-EL/v0.1.0", + "enode": "enode://dummy@127.0.0.1:30303", + "enr": "enr:-Iq4QDummy0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001", + "ip": "127.0.0.1", + "ports": { + "discovery": 30303, + "listener": 30303 + } + })), + _ => { + // For any other method, just return a success response + Ok(json!(null)) + } + }; + + let response = JsonRpcResponse { + jsonrpc: JSONRPC_VERSION.to_string(), + result: Some(result.unwrap_or(json!(null))), + error: None, + id: request.id, + }; + + (StatusCode::OK, Json(response)) +} + +fn strip_prefix(s: &str) -> &str { + s.strip_prefix("0x").unwrap_or(s) +} + +fn read_jwt_secret(path: &PathBuf) -> anyhow::Result> { + let contents = std::fs::read_to_string(path)?; + let hex_str = strip_prefix(contents.trim()); + let bytes = hex::decode(hex_str)?; + + if bytes.len() != JWT_SECRET_LENGTH { + anyhow::bail!( + "Invalid JWT secret length. Expected {} bytes, got {}", + JWT_SECRET_LENGTH, + bytes.len() + ); + } + + Ok(bytes) +} + +/// Prepare the dummy execution layer for startup +/// +/// This function binds all necessary ports and prepares the servers, +/// then signals readiness via the oneshot channel before running the servers. +/// The function does not return until the servers are shut down. +pub async fn prepare_and_start_dummy_el( + config: DummyElConfig, + ready_tx: oneshot::Sender<()>, +) -> anyhow::Result<()> { + let prepared = prepare_dummy_el(config).await?; + + // Signal that we're ready + let _ = ready_tx.send(()); + + // Now run the servers + prepared.run().await +} + +/// Prepare the dummy execution layer server without starting it +/// +/// This binds all ports and prepares the servers but does not start accepting connections. +/// Returns a `PreparedDummyEl` that can be run with the `run()` method. +pub async fn prepare_dummy_el(config: DummyElConfig) -> anyhow::Result { + // Read JWT secret if provided + let jwt_secret = match &config.jwt_secret_path { + Some(path) => match read_jwt_secret(path) { + Ok(secret) => { + debug!("JWT secret loaded from {:?}", path); + Some(secret) + } + Err(e) => { + error!("Failed to read JWT secret from {:?}: {}", path, e); + return Err(e); + } + }, + None => { + warn!("No JWT secret provided - authentication disabled!"); + warn!("This is insecure and should only be used for testing"); + None + } + }; + + debug!( + host = %config.host, + engine_port = config.engine_port, + rpc_port = config.rpc_port, + ws_port = config.ws_port, + metrics_port = config.metrics_port, + p2p_port = config.p2p_port, + jwt_auth = jwt_secret.is_some(), + "Starting Dummy Execution Layer" + ); + + let state = Arc::new(AppState { jwt_secret }); + + // Engine API server (port 8551) with JWT auth + let engine_app = Router::new() + .route("/", post(handle_rpc)) + .layer(middleware::from_fn_with_state( + state.clone(), + auth_middleware, + )) + .with_state(state.clone()); + + let engine_addr = format!("{}:{}", config.host, config.engine_port) + .parse::() + .expect("Invalid engine address"); + + debug!("Engine API listening on http://{}", engine_addr); + + // Simple RPC server for HTTP RPC (port 8545) - no JWT auth + let rpc_app = Router::new().route("/", post(handle_simple_rpc)); + let rpc_addr = format!("{}:{}", config.host, config.rpc_port) + .parse::() + .expect("Invalid RPC address"); + debug!("HTTP RPC listening on http://{}", rpc_addr); + + // Simple RPC server for WebSocket (port 8546) - no JWT auth + let ws_app = Router::new().route("/", post(handle_simple_rpc)); + let ws_addr = format!("{}:{}", config.host, config.ws_port) + .parse::() + .expect("Invalid WebSocket address"); + debug!("WebSocket RPC listening on http://{}", ws_addr); + + // Simple server for metrics (port 9001) + let metrics_app = Router::new().route("/", post(handle_simple_rpc)); + let metrics_addr = format!("{}:{}", config.host, config.metrics_port) + .parse::() + .expect("Invalid metrics address"); + debug!("Metrics listening on http://{}", metrics_addr); + + // Bind P2P discovery ports (TCP and UDP) - just to satisfy Kurtosis port checks + let p2p_tcp_addr = format!("{}:{}", config.host, config.p2p_port) + .parse::() + .expect("Invalid P2P TCP address"); + let p2p_udp_addr = format!("{}:{}", config.host, config.p2p_port) + .parse::() + .expect("Invalid P2P UDP address"); + + // Spawn P2P TCP listener in a task to keep it alive + let p2p_tcp_listener = tokio::net::TcpListener::bind(p2p_tcp_addr).await?; + debug!("P2P TCP listening on {}", p2p_tcp_addr); + let p2p_tcp_task = tokio::spawn(async move { + loop { + // Accept connections but do nothing with them + if let Ok((_socket, _addr)) = p2p_tcp_listener.accept().await { + // Connection accepted, just drop it + } + } + }); + + // Spawn P2P UDP listener in a task to keep it alive + let p2p_udp_socket = tokio::net::UdpSocket::bind(p2p_udp_addr).await?; + debug!("P2P UDP listening on {}", p2p_udp_addr); + let p2p_udp_task = tokio::spawn(async move { + let mut buf = [0u8; 1024]; + loop { + // Receive packets but do nothing with them + let _ = p2p_udp_socket.recv(&mut buf).await; + } + }); + + // Bind all servers without starting them + let engine_listener = tokio::net::TcpListener::bind(engine_addr).await?; + let rpc_listener = tokio::net::TcpListener::bind(rpc_addr).await?; + let ws_listener = tokio::net::TcpListener::bind(ws_addr).await?; + let metrics_listener = tokio::net::TcpListener::bind(metrics_addr).await?; + + debug!("All listeners bound and ready"); + + Ok(PreparedDummyEl { + engine_listener, + engine_app, + rpc_listener, + rpc_app, + ws_listener, + ws_app, + metrics_listener, + metrics_app, + p2p_tcp_task, + p2p_udp_task, + }) +} + +impl PreparedDummyEl { + /// Run the prepared dummy execution layer servers + pub async fn run(self) -> anyhow::Result<()> { + debug!("Running dummy execution layer servers"); + + tokio::select! { + result = axum::serve(self.engine_listener, self.engine_app) => result?, + result = axum::serve(self.rpc_listener, self.rpc_app) => result?, + result = axum::serve(self.ws_listener, self.ws_app) => result?, + result = axum::serve(self.metrics_listener, self.metrics_app) => result?, + _ = self.p2p_tcp_task => {}, + _ = self.p2p_udp_task => {}, + } + + Ok(()) + } +} + +/// Start the dummy execution layer server (legacy function) +/// +/// This is a convenience function that prepares and starts the dummy EL. +/// For more control, use `prepare_dummy_el()` and `prepare_and_start_dummy_el()`. +pub async fn start_dummy_el(config: DummyElConfig) -> anyhow::Result<()> { + let prepared = prepare_dummy_el(config).await?; + prepared.run().await +} diff --git a/dummy_el/src/main.rs b/dummy_el/src/main.rs new file mode 100644 index 00000000000..22cd04dfe57 --- /dev/null +++ b/dummy_el/src/main.rs @@ -0,0 +1,53 @@ +use clap::Parser; +use dummy_el::DummyElConfig; +use std::path::PathBuf; +use tracing_subscriber; + +#[derive(Parser, Debug)] +#[command(author, version, about, long_about = None)] +struct Args { + #[arg(long, default_value = "8551", help = "Engine API port")] + port: u16, + + #[arg(long, default_value = "127.0.0.1")] + host: String, + + #[arg(long, help = "Path to JWT secret file (hex encoded)")] + jwt_secret: Option, + + #[arg(long, default_value = "8545", help = "HTTP RPC port")] + rpc_port: u16, + + #[arg(long, default_value = "8546", help = "WebSocket port")] + ws_port: u16, + + #[arg(long, default_value = "9001", help = "Metrics port")] + metrics_port: u16, + + #[arg(long, default_value = "30303", help = "P2P discovery port (TCP/UDP)")] + p2p_port: u16, +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + tracing_subscriber::fmt() + .with_env_filter( + tracing_subscriber::EnvFilter::try_from_default_env() + .unwrap_or_else(|_| tracing_subscriber::EnvFilter::new("info")), + ) + .init(); + + let args = Args::parse(); + + let config = DummyElConfig { + host: args.host, + engine_port: args.port, + rpc_port: args.rpc_port, + ws_port: args.ws_port, + metrics_port: args.metrics_port, + p2p_port: args.p2p_port, + jwt_secret_path: args.jwt_secret, + }; + + dummy_el::start_dummy_el(config).await +} diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 00000000000..e7905dad012 --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,3 @@ +[toolchain] +channel = "nightly-2025-08-19" +components = ["clippy", "rustfmt", "rust-analyzer"] diff --git a/scripts/local_testnet/network_params_mixed_proof_gen_verify.yaml b/scripts/local_testnet/network_params_mixed_proof_gen_verify.yaml new file mode 100644 index 00000000000..85534a968e1 --- /dev/null +++ b/scripts/local_testnet/network_params_mixed_proof_gen_verify.yaml @@ -0,0 +1,32 @@ +# 3 nodes generate proofs, 1 node only verifies +participants: + # Proof generating nodes (nodes 1-3) + - el_type: geth + el_image: ethereum/client-go:latest + cl_type: lighthouse + cl_image: lighthouse:local + cl_extra_params: + - --execution-proofs + - --execution-proof-types=0,1 + - --target-peers=3 + count: 3 + # Proof verifying only node (node 4) + # TODO(zkproofs): Currently there is no way to add no client here + # We likely want to use our dummy zkvm EL here + - el_type: geth + el_image: dummy_el:local + cl_type: lighthouse + cl_image: lighthouse:local + cl_extra_params: + - --execution-proofs + - --target-peers=3 + count: 1 +network_params: + electra_fork_epoch: 0 + fulu_fork_epoch: 1 + seconds_per_slot: 2 +global_log_level: debug +snooper_enabled: false +additional_services: + - dora + - prometheus_grafana diff --git a/scripts/local_testnet/network_params_proof_gen_only.sh b/scripts/local_testnet/network_params_proof_gen_only.sh new file mode 100755 index 00000000000..70c2c8f5c69 --- /dev/null +++ b/scripts/local_testnet/network_params_proof_gen_only.sh @@ -0,0 +1,155 @@ +#!/bin/bash + +# Helper script for monitoring execution proof generation and gossip +# Usage: ./network_params_proof_gen_only.sh [command] +# ENCLAVE=my-testnet ./network_params_proof_gen_only.sh [command] +# +# Set ENCLAVE environment variable to use a different testnet. +# Default: local-testnet + +ENCLAVE="${ENCLAVE:-local-testnet}" + +# Color output +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +case "${1:-help}" in + generation) + echo -e "${GREEN}=== Proof Generation and Publishing ===${NC}" + for i in 1 2 3 4; do + echo -e "\n${YELLOW}--- Node $i ---${NC}" + kurtosis service logs $ENCLAVE cl-$i-lighthouse-geth -a 2>&1 | grep -E "(Generating execution proof|Proof successfully published)" | tail -5 + done + ;; + + gossip-subscribe) + echo -e "${GREEN}=== ExecutionProof Topic Subscriptions ===${NC}" + for i in 1 2 3 4; do + echo -e "\n${YELLOW}--- Node $i ---${NC}" + kurtosis service logs $ENCLAVE cl-$i-lighthouse-geth -a 2>&1 | grep "Subscribed to topic.*execution_proof" + done + ;; + + gossip-receive) + echo -e "${GREEN}=== Received Execution Proofs via Gossip ===${NC}" + for i in 1 2 3 4; do + count=$(kurtosis service logs $ENCLAVE cl-$i-lighthouse-geth -a 2>&1 | grep "Received execution proof via gossip" | wc -l) + echo -e "${YELLOW}Node $i:${NC} $count proofs received" + done + ;; + + gossip-verified) + echo -e "${GREEN}=== Verified Execution Proofs ===${NC}" + for i in 1 2 3 4; do + count=$(kurtosis service logs $ENCLAVE cl-$i-lighthouse-geth -a 2>&1 | grep "Successfully verified gossip execution proof" | wc -l) + echo -e "${YELLOW}Node $i:${NC} $count proofs verified" + done + ;; + + errors) + echo -e "${GREEN}=== Checking for Errors ===${NC}" + for i in 1 2 3 4; do + echo -e "\n${YELLOW}--- Node $i ---${NC}" + no_peers=$(kurtosis service logs $ENCLAVE cl-$i-lighthouse-geth -a 2>&1 | grep "NoPeersSubscribedToTopic.*execution_proof" | wc -l) + failed_sub=$(kurtosis service logs $ENCLAVE cl-$i-lighthouse-geth -a 2>&1 | grep "Failed to subscribe.*execution_proof" | wc -l) + + if [ "$no_peers" -gt 0 ]; then + echo -e "${RED}NoPeersSubscribedToTopic errors: $no_peers${NC}" + else + echo -e "${GREEN}NoPeersSubscribedToTopic errors: 0${NC}" + fi + + if [ "$failed_sub" -gt 0 ]; then + echo -e "${RED}Failed subscription errors: $failed_sub${NC}" + else + echo -e "${GREEN}Failed subscription errors: 0${NC}" + fi + done + ;; + + zkvm-logs) + echo -e "${GREEN}=== ZKVM Debug Logs ===${NC}" + for i in 1 2 3 4; do + echo -e "\n${YELLOW}--- Node $i ---${NC}" + kurtosis service logs $ENCLAVE cl-$i-lighthouse-geth -a 2>&1 | grep "ZKVM:" | head -5 + done + ;; + + fork-transition) + echo -e "${GREEN}=== Fork Transition Logs ===${NC}" + for i in 1 2 3 4; do + echo -e "\n${YELLOW}--- Node $i ---${NC}" + kurtosis service logs $ENCLAVE cl-$i-lighthouse-geth -a 2>&1 | grep -E "(Subscribing to new fork|subscribe_new_fork_topics called)" + done + ;; + + stats) + echo -e "${GREEN}=== Execution Proof Statistics ===${NC}" + for i in 1 2 3 4; do + generated=$(kurtosis service logs $ENCLAVE cl-$i-lighthouse-geth -a 2>&1 | grep "Generating execution proof" | wc -l) + published=$(kurtosis service logs $ENCLAVE cl-$i-lighthouse-geth -a 2>&1 | grep "Proof successfully published" | wc -l) + received=$(kurtosis service logs $ENCLAVE cl-$i-lighthouse-geth -a 2>&1 | grep "Received execution proof via gossip" | wc -l) + verified=$(kurtosis service logs $ENCLAVE cl-$i-lighthouse-geth -a 2>&1 | grep "Successfully verified gossip execution proof" | wc -l) + + echo -e "${YELLOW}Node $i:${NC}" + echo -e " Generated: $generated" + echo -e " Published: $published" + echo -e " Received: $received" + echo -e " Verified: $verified" + done + ;; + + follow) + NODE="${2:-1}" + echo -e "${GREEN}=== Following Execution Proof Logs for Node $NODE ===${NC}" + echo -e "${YELLOW}Press Ctrl+C to stop${NC}" + kurtosis service logs $ENCLAVE cl-$NODE-lighthouse-geth -f | grep --line-buffered -E "(Generating execution proof|Proof successfully published|Received execution proof via gossip|Successfully verified gossip execution proof)" + ;; + + all) + echo -e "${GREEN}=== Complete Execution Proof Report ===${NC}\n" + $0 zkvm-logs + echo -e "\n" + $0 fork-transition + echo -e "\n" + $0 gossip-subscribe + echo -e "\n" + $0 stats + echo -e "\n" + $0 errors + ;; + + help|*) + echo "Helper script for monitoring execution proof generation and gossip" + echo "" + echo "Usage: $0 [command]" + echo " ENCLAVE=name $0 [command]" + echo "" + echo "Environment Variables:" + echo " ENCLAVE - Testnet enclave name (default: local-testnet)" + echo "" + echo "Commands:" + echo " generation - Show proof generation and publishing logs" + echo " gossip-subscribe - Show ExecutionProof topic subscriptions" + echo " gossip-receive - Count received proofs on each node" + echo " gossip-verified - Count verified proofs on each node" + echo " errors - Check for gossip errors" + echo " zkvm-logs - Show ZKVM debug logs" + echo " fork-transition - Show fork transition logs" + echo " stats - Show proof statistics for all nodes" + echo " follow [node] - Follow proof logs in real-time (default: node 1)" + echo " all - Show complete report" + echo " help - Show this help message" + echo "" + echo "Examples:" + echo " # Use default testnet (local-testnet)" + echo " $0 stats" + echo " $0 follow 2" + echo " $0 all" + echo "" + echo " # Use custom testnet enclave" + echo " ENCLAVE=my-testnet $0 stats" + ;; +esac diff --git a/scripts/local_testnet/network_params_proof_gen_only.yaml b/scripts/local_testnet/network_params_proof_gen_only.yaml new file mode 100644 index 00000000000..2919485a17f --- /dev/null +++ b/scripts/local_testnet/network_params_proof_gen_only.yaml @@ -0,0 +1,21 @@ +# Network configuration for testing execution proof generation +# All nodes have execution layers and are configured to generate proofs +participants: + - el_type: geth + el_image: ethereum/client-go:latest + cl_type: lighthouse + cl_image: lighthouse:local + cl_extra_params: + - --execution-proofs + - --execution-proof-types=0,1 + - --target-peers=3 + count: 4 +network_params: + electra_fork_epoch: 0 + fulu_fork_epoch: 1 + seconds_per_slot: 2 +global_log_level: debug +snooper_enabled: false +additional_services: + - dora + - prometheus_grafana \ No newline at end of file diff --git a/zkvm_execution_layer/Cargo.toml b/zkvm_execution_layer/Cargo.toml new file mode 100644 index 00000000000..e76aa22b504 --- /dev/null +++ b/zkvm_execution_layer/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "zkvm_execution_layer" +version = "0.1.0" +edition = "2021" + +[dependencies] +async-trait = "0.1" #TODO(zkproofs): Remove +lru = "0.12" +hashbrown = "0.15" +serde_json = "1.0" +tokio = { version = "1", features = ["full"] } +thiserror = "2" +types = { path = "../consensus/types" } +execution_layer = { path = "../beacon_node/execution_layer" } +pico-prism-vm = { git = "https://github.com/ethproofs/pico.git", package = "pico-vm", branch = "ethproofs-pico-prism" } +proofman-verifier = { git = "https://github.com/0xPolygonHermez/pil2-proofman.git", tag = "v0.12.0" } +sp1-verifier = { git = "https://github.com/succinctlabs/hypercube-verifier.git", branch = "ethproofs_demo" } +verify-stark = { git = "https://github.com/openvm-org/openvm.git", branch = "feat/v1-verify-stark", package = "verify-stark" } +bitcode = { version = "0.6.5", default-features = false, features = ["serde"] } +serde = { version = "1.0.201", default-features = false, features = ["derive"] } +bincode = { workspace = true } +tempfile = { workspace = true } +reqwest = { workspace = true } +uuid = { workspace = true } +rand = { workspace = true } +once_cell = { workspace = true } +tracing = { workspace = true } + +[dev-dependencies] +mockall = "0.12" diff --git a/zkvm_execution_layer/src/config.rs b/zkvm_execution_layer/src/config.rs new file mode 100644 index 00000000000..7ed6f6125c1 --- /dev/null +++ b/zkvm_execution_layer/src/config.rs @@ -0,0 +1,176 @@ +use serde::{Deserialize, Serialize}; +use std::collections::HashSet; +use types::{execution_proof::DEFAULT_MIN_PROOFS_REQUIRED, ExecutionProofId}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ZKVMExecutionLayerConfig { + /// Minimum number of proofs required from _different_ proof types (proof_ids) + /// in order for the node to mark an execution payload as VALID. + /// + /// Note: All nodes receive ALL proof types via the single execution_proof gossip topic. + pub min_proofs_required: usize, + + /// Which proof types to generate (empty if not generating proofs) + /// The proof ID identifies the zkVM+EL combination (e.g., 0=SP1+Reth, 1=Risc0+Geth) + pub generation_proof_types: HashSet, + + /// Proof cache size (number of execution block hashes to cache proofs for) + /// TODO(zkproofs): remove since we use da_checker for proof caches + pub proof_cache_size: usize, +} + +impl Default for ZKVMExecutionLayerConfig { + fn default() -> Self { + Self { + min_proofs_required: DEFAULT_MIN_PROOFS_REQUIRED, + generation_proof_types: HashSet::new(), + // TODO(zkproofs): This is somewhat arbitrary. The number was computed + // by NUMBER_OF_BLOCKS_BEFORE_FINALIZATION * NUM_PROOFS_PER_BLOCK = 64 * 8 + // We can change it to be more rigorous/scientific + proof_cache_size: 64 * 8, + } + } +} + +impl ZKVMExecutionLayerConfig { + pub fn validate(&self) -> Result<(), String> { + if self.min_proofs_required == 0 { + return Err("min_proofs_required must be at least 1".to_string()); + } + + if self.proof_cache_size == 0 { + return Err("proof_cache_size must be at least 1".to_string()); + } + + // Note: We do NOT validate that generation_proof_types.len() >= min_proofs_required + // because proof-generating nodes validate via their execution layer, not via proofs. + // Only lightweight verifier nodes (without EL) need to wait for min_proofs_required. + + Ok(()) + } + + /// Create a builder for the config + /// TODO(zkproofs): I think we can remove this + pub fn builder() -> ZKVMExecutionLayerConfigBuilder { + ZKVMExecutionLayerConfigBuilder::default() + } +} + +#[derive(Default)] +pub struct ZKVMExecutionLayerConfigBuilder { + min_proofs_required: Option, + generation_proof_types: HashSet, + proof_cache_size: Option, +} + +impl ZKVMExecutionLayerConfigBuilder { + pub fn min_proofs_required(mut self, min: usize) -> Self { + self.min_proofs_required = Some(min); + self + } + + pub fn generation_proof_types(mut self, proof_types: HashSet) -> Self { + self.generation_proof_types = proof_types; + self + } + + pub fn add_generation_proof_type(mut self, proof_type: ExecutionProofId) -> Self { + self.generation_proof_types.insert(proof_type); + self + } + + pub fn proof_cache_size(mut self, size: usize) -> Self { + self.proof_cache_size = Some(size); + self + } + + /// Build the configuration + pub fn build(self) -> Result { + let config = ZKVMExecutionLayerConfig { + min_proofs_required: self + .min_proofs_required + .unwrap_or(DEFAULT_MIN_PROOFS_REQUIRED), + generation_proof_types: self.generation_proof_types, + proof_cache_size: self.proof_cache_size.unwrap_or(1024), + }; + + config.validate()?; + Ok(config) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_valid_config() { + let proof_type_0 = ExecutionProofId::new(0).unwrap(); + let proof_type_1 = ExecutionProofId::new(1).unwrap(); + + let config = ZKVMExecutionLayerConfig::builder() + .add_generation_proof_type(proof_type_0) + .add_generation_proof_type(proof_type_1) + .min_proofs_required(2) + .build(); + + assert!(config.is_ok()); + } + + #[test] + fn test_valid_config_with_generation() { + let proof_type_0 = ExecutionProofId::new(0).unwrap(); + let proof_type_1 = ExecutionProofId::new(1).unwrap(); + + let config = ZKVMExecutionLayerConfig::builder() + .add_generation_proof_type(proof_type_0) + .add_generation_proof_type(proof_type_1) + .min_proofs_required(1) + .proof_cache_size(512) + .build(); + + assert!(config.is_ok()); + let config = config.unwrap(); + assert_eq!(config.generation_proof_types.len(), 2); + assert_eq!(config.min_proofs_required, 1); + assert_eq!(config.proof_cache_size, 512); + } + + #[test] + fn test_min_proofs_required_zero() { + let config = ZKVMExecutionLayerConfig::builder() + .min_proofs_required(0) // Invalid: must be > 0 + .build(); + + assert!(config.is_err()); + } + + #[test] + fn test_no_generation_proof_types() { + // Node can receive and verify proofs without generating any + let config = ZKVMExecutionLayerConfig::builder() + .min_proofs_required(2) + .build(); + + assert!(config.is_ok()); + let config = config.unwrap(); + assert!(config.generation_proof_types.is_empty()); + } + + #[test] + fn test_generation_proof_types_less_than_min() { + // Proof-generating nodes validate via EL, not proofs + // They can generate any number of proof types regardless of min_proofs_required + let proof_type_0 = ExecutionProofId::new(0).unwrap(); + + let config = ZKVMExecutionLayerConfig::builder() + .add_generation_proof_type(proof_type_0) + .min_proofs_required(2) + .build(); + + assert!(config.is_ok()); + let config = config.unwrap(); + assert_eq!(config.generation_proof_types.len(), 1); + assert_eq!(config.min_proofs_required, 2); + } +} diff --git a/zkvm_execution_layer/src/dummy_proof_gen.rs b/zkvm_execution_layer/src/dummy_proof_gen.rs new file mode 100644 index 00000000000..10cf7ed2c93 --- /dev/null +++ b/zkvm_execution_layer/src/dummy_proof_gen.rs @@ -0,0 +1,232 @@ +use crate::ethproofs_demo::{download_proof_binary, fetch_proof_from_ethproofs, VERIFIER_STORE}; +use crate::proof_generation::{ProofGenerationError, ProofGenerationResult, ProofGenerator}; +use async_trait::async_trait; +use std::time::Duration; +use tokio::time::sleep; +use tracing::{debug, info}; +use types::{ExecutionBlockHash, ExecutionProof, ExecutionProofId, Hash256, Slot}; + +/// TODO(ethproofs): Implementation of proof generation for demo. +/// +/// Dummy proof generator for testing +/// +/// This generator simulates the proof generation process with a configurable delay +/// and creates dummy proofs. +pub struct DummyProofGenerator { + proof_id: ExecutionProofId, + generation_delay: Duration, +} + +impl DummyProofGenerator { + /// Create a new dummy generator for the specified proof ID + pub fn new(proof_id: ExecutionProofId) -> Self { + Self { + proof_id, + generation_delay: Duration::from_millis(0), + } + } + + /// Create a new dummy generator with custom generation delay + pub fn with_delay(proof_id: ExecutionProofId, delay: Duration) -> Self { + Self { + proof_id, + generation_delay: delay, + } + } + + /// TODO(ethproofs): Fallback when the Ethproofs API fails or test verification fails. + /// + /// Create a fallback dummy proof using proof_id 0, which maps to the FallbackVerifier. + /// The FallbackVerifier skips cryptographic verification and accepts all proofs. + fn create_dummy_proof( + &self, + slot: Slot, + payload_hash: &ExecutionBlockHash, + block_root: &Hash256, + ) -> ProofGenerationResult { + let dummy_data = format!( + "ethproofs_fallback_proof_id_0_slot_{}_hash_{}", + slot.as_u64(), + payload_hash + ) + .into_bytes(); + + // Use proof_id 0 (Fallback verifier) to mark this as a dummy proof + let fallback_proof_id = ExecutionProofId::new(0).expect("proof_id 0 is always valid"); + ExecutionProof::new( + fallback_proof_id, + slot, + *payload_hash, + *block_root, + dummy_data, + ) + .map_err(ProofGenerationError::ProofGenerationFailed) + } +} + +#[async_trait] +impl ProofGenerator for DummyProofGenerator { + async fn generate( + &self, + slot: Slot, + payload_hash: &ExecutionBlockHash, + block_root: &Hash256, + ) -> ProofGenerationResult { + // Simulate proof generation work + if !self.generation_delay.is_zero() { + sleep(self.generation_delay).await; + } + + // Get the Ethproofs prover UUID corresponding to this proof_id + let prover_uuid = match VERIFIER_STORE.get_prover_uuid_for_proof_id(self.proof_id) { + Some(uuid) => uuid, + None => { + debug!( + proof_id = %self.proof_id, + "[Ethproofs] No prover UUID mapping found, cannot query API" + ); + return self.create_dummy_proof(slot, payload_hash, block_root); + } + }; + + let cluster = prover_uuid.to_string(); + + info!( + proof_id = %self.proof_id, + slot = %slot, + "[Ethproofs] Starting proof generation" + ); + + // Fetch proof from Ethproofs API for this proof_id's cluster + match fetch_proof_from_ethproofs(*payload_hash, cluster).await { + Ok(proofs) => { + // Try to download and verify the proof + if let Some(proof_entry) = proofs.first() { + // Download the proof binary + match download_proof_binary(proof_entry.proof_id).await { + Ok(proof_binary) => { + // Create proof for verification + match ExecutionProof::new( + self.proof_id, + slot, + *payload_hash, + *block_root, + proof_binary, + ) { + Ok(proof) => { + return Ok(proof); + } + Err(e) => { + debug!( + proof_id = %self.proof_id, + error = %e, + "[Ethproofs] Failed to create ExecutionProof" + ); + // Proof structure creation failed, will fallback below + } + } + } + Err(e) => { + debug!( + proof_id = proof_entry.proof_id, + error = %e, + "[Ethproofs] Failed to download proof" + ); + } + } + } else { + debug!( + proof_id = %self.proof_id, + "[Ethproofs] No proofs returned from API" + ); + } + + debug!( + proof_id = %self.proof_id, + block_hash = %payload_hash, + "[Ethproofs] API proof generation failed, using fallback" + ); + self.create_dummy_proof(slot, payload_hash, block_root) + } + Err(e) => { + debug!( + proof_id = %self.proof_id, + block_hash = %payload_hash, + error = %e, + "[Ethproofs] Failed to fetch proofs, using fallback" + ); + self.create_dummy_proof(slot, payload_hash, block_root) + } + } + } + + fn proof_id(&self) -> ExecutionProofId { + self.proof_id + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn test_dummy_generator_success() { + let subnet = ExecutionProofId::new(0).unwrap(); + let generator = DummyProofGenerator::new(subnet); + let slot = Slot::new(100); + let block_hash = ExecutionBlockHash::repeat_byte(1); + let block_root = Hash256::repeat_byte(2); + + let result = generator.generate(slot, &block_hash, &block_root).await; + assert!(result.is_ok()); + + let proof = result.unwrap(); + + // Should create a fallback proof (proof_id = 0) + assert_eq!(proof.proof_id.as_u8(), 0); + assert_eq!(proof.slot, slot); + assert_eq!(proof.block_hash, block_hash); + assert_eq!(proof.block_root, block_root); + assert!(proof.proof_data_size() > 0); + } + + #[tokio::test] + async fn test_dummy_generator_deterministic() { + let subnet = ExecutionProofId::new(1).unwrap(); + let generator = DummyProofGenerator::new(subnet); + let slot = Slot::new(200); + let block_hash = ExecutionBlockHash::repeat_byte(42); + let block_root = Hash256::repeat_byte(99); + + // Generate twice + let proof1 = generator + // TODO(ethproofs): Changed so we don't make API calls here. + .create_dummy_proof(slot, &block_hash, &block_root) + .unwrap(); + let proof2 = generator + // TODO(ethproofs): Changed so we don't make API calls here. + .create_dummy_proof(slot, &block_hash, &block_root) + .unwrap(); + + // Should be identical + assert_eq!(proof1.proof_data_slice(), proof2.proof_data_slice()); + } + + #[tokio::test] + async fn test_dummy_generator_custom_delay() { + // TODO(zkproofs): Maybe remove, mainly need it as a temp check + let subnet = ExecutionProofId::new(0).unwrap(); + let delay = Duration::from_millis(1); + let generator = DummyProofGenerator::with_delay(subnet, delay); + let slot = Slot::new(100); + let block_hash = ExecutionBlockHash::repeat_byte(1); + let block_root = Hash256::repeat_byte(2); + + let start = tokio::time::Instant::now(); + let result = generator.generate(slot, &block_hash, &block_root).await; + let elapsed = start.elapsed(); + + assert!(result.is_ok()); + assert!(elapsed >= delay); + } +} diff --git a/zkvm_execution_layer/src/dummy_proof_verifier.rs b/zkvm_execution_layer/src/dummy_proof_verifier.rs new file mode 100644 index 00000000000..1fae4a8e066 --- /dev/null +++ b/zkvm_execution_layer/src/dummy_proof_verifier.rs @@ -0,0 +1,139 @@ +use crate::ethproofs_demo::{EthproofsValidator, ProofValidator}; +use crate::proof_verification::{ProofVerificationResult, ProofVerifier, VerificationError}; +use std::sync::Arc; +use std::time::Duration; +use tracing::debug; +use types::{ExecutionProof, ExecutionProofId}; + +/// TODO(ethproofs): Ethproofs demo implementation of proof verification. +/// +/// Dummy proof verifier for testing +/// +/// This verifier simulates the verification process with a configurable delay. +pub struct DummyVerifier { + proof_id: ExecutionProofId, + verification_delay: Duration, + validator: Arc, +} + +impl DummyVerifier { + /// Create a new dummy verifier for the specified proof ID + pub fn new(proof_id: ExecutionProofId) -> Self { + Self { + proof_id, + verification_delay: Duration::from_millis(0), + validator: Arc::new(EthproofsValidator), + } + } + + /// Create a new dummy verifier with custom verification delay + pub fn with_delay(proof_id: ExecutionProofId, delay: Duration) -> Self { + Self { + proof_id, + verification_delay: delay, + validator: Arc::new(EthproofsValidator), + } + } + + /// Create a new dummy verifier with a custom validator (for testing) + #[cfg(test)] + fn with_validator(proof_id: ExecutionProofId, validator: Arc) -> Self { + Self { + proof_id, + verification_delay: Duration::from_millis(0), + validator, + } + } +} + +impl ProofVerifier for DummyVerifier { + fn verify(&self, proof: &ExecutionProof) -> ProofVerificationResult { + // Check that the proof is for the correct subnet + if proof.proof_id != self.proof_id { + return Err(VerificationError::UnsupportedProofID(proof.proof_id)); + } + + // Simulate verification work + if !self.verification_delay.is_zero() { + std::thread::sleep(self.verification_delay); + } + + debug!( + proof_id = %self.proof_id, + block_hash = %proof.block_hash, + "[Ethproofs] Verifying proof" + ); + + // Perform cryptographic verification using the injected validator + Ok(self.validator.validate(proof)) + } + + fn proof_id(&self) -> ExecutionProofId { + self.proof_id + } +} + +#[cfg(test)] +mod tests { + use super::*; + use mockall::mock; + use types::{ExecutionBlockHash, FixedBytesExtended, Hash256, Slot}; + + mock! { + TestValidator {} + impl ProofValidator for TestValidator { + fn validate(&self, proof: &ExecutionProof) -> bool; + } + } + + fn create_test_proof( + subnet_id: ExecutionProofId, + block_hash: types::ExecutionBlockHash, + ) -> ExecutionProof { + ExecutionProof::new( + subnet_id, + Slot::new(100), + block_hash, + Hash256::zero(), + vec![1, 2, 3, 4], + ) + .unwrap() + } + + #[tokio::test] + async fn test_dummy_verifier_success() { + let subnet = ExecutionProofId::new(0).unwrap(); + let block_hash = ExecutionBlockHash::zero(); + let proof = create_test_proof(subnet, block_hash); + + let mut mock_validator = MockTestValidator::new(); + mock_validator + .expect_validate() + .withf(move |p| p.proof_id == subnet) + .returning(|_| true); + + let verifier = DummyVerifier::with_validator(subnet, Arc::new(mock_validator)); + + let result = verifier.verify(&proof); + assert!(result.is_ok()); + assert!(result.unwrap()); + } + + #[tokio::test] + async fn test_dummy_verifier_wrong_subnet() { + let subnet_0 = ExecutionProofId::new(0).unwrap(); + let subnet_1 = ExecutionProofId::new(1).unwrap(); + let block_hash = ExecutionBlockHash::zero(); + let proof = create_test_proof(subnet_1, block_hash); + + let mock_validator = MockTestValidator::new(); + let verifier = DummyVerifier::with_validator(subnet_0, Arc::new(mock_validator)); + + let result = verifier.verify(&proof); + assert!(result.is_err()); + assert!(matches!( + result.unwrap_err(), + VerificationError::UnsupportedProofID(_) + )); + } +} diff --git a/zkvm_execution_layer/src/engine_api.rs b/zkvm_execution_layer/src/engine_api.rs new file mode 100644 index 00000000000..c0f7c4ebde2 --- /dev/null +++ b/zkvm_execution_layer/src/engine_api.rs @@ -0,0 +1,50 @@ +use execution_layer::{BlockProposalContentsType, Error as ExecutionLayerError, PayloadStatus}; +use types::{EthSpec, ExecPayload, ExecutionBlockHash}; + +type PayloadId = [u8; 8]; + +pub struct ZKVMEngineApi { + _phantom: std::marker::PhantomData, +} + +impl Default for ZKVMEngineApi { + fn default() -> Self { + Self::new() + } +} + +impl ZKVMEngineApi { + pub fn new() -> Self { + Self { + _phantom: std::marker::PhantomData, + } + } + + /// Verify a new execution payload using ZK proof + pub async fn new_payload( + &self, + _execution_payload: &impl ExecPayload, + ) -> Result { + // TODO(zkproofs): There are some engine_api checks that should be made, but these should be + // done when we have the proof, check the EL newPayload method to see what these are + Ok(PayloadStatus::Syncing) + } + + /// Update fork choice state + pub async fn forkchoice_updated( + &self, + _head_block_hash: ExecutionBlockHash, + ) -> Result { + // For now, just return Valid status + Ok(PayloadStatus::Valid) + } + + /// Get a payload for block production + pub async fn get_payload( + &self, + _payload_id: PayloadId, + ) -> Result, ExecutionLayerError> { + // TODO(zkproofs): use mev-boost + Err(ExecutionLayerError::CannotProduceHeader) + } +} diff --git a/zkvm_execution_layer/src/ethproofs_demo.rs b/zkvm_execution_layer/src/ethproofs_demo.rs new file mode 100644 index 00000000000..28e37c62c48 --- /dev/null +++ b/zkvm_execution_layer/src/ethproofs_demo.rs @@ -0,0 +1,272 @@ +use crate::verification_keys::VerificationKeyStore; +use crate::verifiers::VerifierStore; +use once_cell::sync::Lazy; +use reqwest::StatusCode; +use serde::{Deserialize, Serialize}; +use std::time::{Duration, Instant}; +use tracing::{debug, info}; +use types::ExecutionProof; + +/// Trait for validating proofs +pub trait ProofValidator: Send + Sync { + /// Validate a proof using the verifier store + fn validate(&self, proof: &ExecutionProof) -> bool; +} + +/// Default implementation using the Ethproofs verifier store +pub struct EthproofsValidator; + +impl ProofValidator for EthproofsValidator { + fn validate(&self, proof: &ExecutionProof) -> bool { + validate_proof(proof) + } +} + +/// Global verification key store, loaded once on first access +pub static VERIFICATION_KEY_STORE: Lazy> = + Lazy::new(|| match VerificationKeyStore::load_embedded() { + Ok(store) => Some(store), + Err(e) => { + debug!(error = %e, "[Ethproofs] Failed to load verification keys"); + None + } + }); + +/// Global verifier store, initialized with default verifiers +pub static VERIFIER_STORE: Lazy = Lazy::new(VerifierStore::with_defaults); + +/// Represents a proof from the Ethproofs proofs list endpoint +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Ethproof { + /// The proof ID from Ethproofs + pub proof_id: u64, + /// The cluster ID that generated this proof (matches against available prover_ids) + pub cluster_id: String, +} + +/// Represents the response from the Ethproofs proofs list endpoint +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProofsListResponse { + pub proofs: Vec, +} + +/// Fetch a proof for a block from Ethproofs API. +/// +/// Polls the endpoint for a single cluster until a proof is found or a timeout is reached, +/// using exponential backoff. This accepts the block hash and a single cluster ID to query. +/// +/// Returns the proof for the requested cluster, or an error if not found within the timeout window. +pub async fn fetch_proof_from_ethproofs( + block_hash: types::ExecutionBlockHash, + cluster: String, +) -> Result, String> { + const MAX_WAIT_TIME_SECS: u64 = 60; + const INITIAL_DELAY_MS: u64 = 100; + const MAX_DELAY_MS: u64 = 5000; + + let client = reqwest::Client::new(); + let url = format!( + "https://ethproofs.org/api/v0/proofs?block={}&clusters={}", + block_hash, cluster + ); + + let start = Instant::now(); + let mut delay_ms = INITIAL_DELAY_MS; + + loop { + // Check if we've exceeded max wait time + if start.elapsed() > Duration::from_secs(MAX_WAIT_TIME_SECS) { + info!( + block_hash = %block_hash, + cluster = %cluster, + "[Ethproofs] Timeout waiting for proof" + ); + return Err(format!( + "No proof found for block {} in cluster {} within {} seconds", + block_hash, cluster, MAX_WAIT_TIME_SECS + )); + } + + let mut request = client.get(&url); + + // Add API key header if environment variable is set + if let Ok(api_key) = std::env::var("ETHPROOFS_API_KEY") { + request = request.header("Authorization", format!("Bearer {}", api_key)); + } + + let response = request + .send() + .await + .map_err(|e| format!("Request failed: {}", e))?; + + match response.status() { + StatusCode::OK => { + let response_data: ProofsListResponse = response + .json() + .await + .map_err(|e| format!("Failed to parse response: {}", e))?; + + // Return the first proof found for this cluster + if !response_data.proofs.is_empty() { + return Ok(response_data.proofs); + } + } + StatusCode::NOT_FOUND => { + // Proof not ready yet, will retry with exponential backoff + } + status => { + return Err(format!( + "Request failed with status: {} for block {}", + status, block_hash + )); + } + } + + // Wait before retrying + tokio::time::sleep(Duration::from_millis(delay_ms)).await; + + // Exponential backoff: double the delay, up to MAX_DELAY_MS + delay_ms = (delay_ms * 2).min(MAX_DELAY_MS); + } +} + +/// Download a proof binary directly from Ethproofs using the proof_id. +/// +/// Returns the binary proof data. +pub async fn download_proof_binary(proof_id: u64) -> Result, String> { + let client = reqwest::Client::new(); + let url = format!("https://ethproofs.org/api/v0/proofs/download/{}", proof_id); + + info!(proof_id, "[Ethproofs] Downloading proof binary"); + + let mut request = client.get(&url); + + // Add API key header if environment variable is set + if let Ok(api_key) = std::env::var("ETHPROOFS_API_KEY") { + request = request.header("Authorization", format!("Bearer {}", api_key)); + } + + let response = request + .send() + .await + .map_err(|e| format!("Request failed: {}", e))?; + + match response.status() { + StatusCode::OK => { + let proof_data = response + .bytes() + .await + .map_err(|e| format!("Failed to read response: {}", e))?; + Ok(proof_data.to_vec()) + } + StatusCode::NOT_FOUND => Err(format!("Proof {} not found", proof_id)), + status => Err(format!( + "Request failed with status: {} for proof {}", + status, proof_id + )), + } +} + +/// Validate a proof using the verifier store +/// +/// This function performs cryptographic verification of a proof by: +/// 1. Looking up the verifier for the proof's proof_id +/// 2. Running the cryptographic verification function +/// 3. Returning whether the proof is valid +/// +/// Note: Fallback proofs (proof_id = 0) bypass this pipeline and are accepted immediately. +pub fn validate_proof(proof: &ExecutionProof) -> bool { + // Fallback proofs (proof_id 0) are accepted without verification + if proof.proof_id.as_u8() == 0 { + debug!( + slot = %proof.slot, + block_hash = %proof.block_hash, + "[Ethproofs] Fallback proof accepted" + ); + return true; + } + + // Get the prover UUID for this proof_id from the hardcoded mapping + let prover_uuid = match VERIFIER_STORE.get_prover_uuid_for_proof_id(proof.proof_id) { + Some(uuid) => uuid, + None => { + debug!( + proof_id = %proof.proof_id, + "[Ethproofs] No prover UUID mapping found for this proof_id" + ); + return false; + } + }; + + match &*VERIFICATION_KEY_STORE { + Some(store) => { + match store.get(&prover_uuid) { + Some(vk) => { + debug!( + slot = %proof.slot, + block_hash = %proof.block_hash, + prover_id = %prover_uuid, + vk_size = vk.size(), + proof_size = proof.proof_data.len(), + "[Ethproofs] Found vk for prover" + ); + + // Look up the verifier for this prover + match VERIFIER_STORE.get(&prover_uuid) { + Some(verifier_entry) => { + info!( + "[Ethproofs] Verification started: verifier={} slot={}", + verifier_entry.name, proof.slot + ); + + // Run the actual cryptographic verification + match (verifier_entry.verify_fn)(&proof.proof_data, &vk.vk) { + Ok(result) => { + info!( + "[Ethproofs] Verification completed: verifier={} slot={} result={}", + verifier_entry.name, + proof.slot, + result + ); + result + } + Err(e) => { + debug!( + slot = %proof.slot, + block_hash = %proof.block_hash, + verifier = verifier_entry.name, + error = %e, + "[Ethproofs] Verification failed" + ); + false + } + } + } + None => { + debug!( + slot = %proof.slot, + block_hash = %proof.block_hash, + prover_id = %prover_uuid, + "[Ethproofs] No registered verifier" + ); + false + } + } + } + None => { + debug!( + slot = %proof.slot, + block_hash = %proof.block_hash, + prover_id = %prover_uuid, + "[Ethproofs] No verification key found" + ); + false + } + } + } + None => { + debug!("[Ethproofs] Verification key store not initialized"); + false + } + } +} diff --git a/zkvm_execution_layer/src/lib.rs b/zkvm_execution_layer/src/lib.rs new file mode 100644 index 00000000000..354f2cf018c --- /dev/null +++ b/zkvm_execution_layer/src/lib.rs @@ -0,0 +1,23 @@ +pub mod config; + +pub mod proof_generation; +pub mod proof_verification; + +pub mod registry_proof_gen; +pub mod registry_proof_verification; + +pub mod dummy_proof_gen; +pub mod dummy_proof_verifier; + +/// Engine API implementation for ZK-VM execution +pub mod engine_api; + +pub use config::ZKVMExecutionLayerConfig; +/// Re-export the main ZK-VM engine API and config +pub use engine_api::ZKVMEngineApi; +pub use registry_proof_gen::GeneratorRegistry; + +/// TODO(ethproofs): Used for Ethproofs demo testing. +pub mod ethproofs_demo; +pub mod verification_keys; +pub mod verifiers; diff --git a/zkvm_execution_layer/src/proof_generation.rs b/zkvm_execution_layer/src/proof_generation.rs new file mode 100644 index 00000000000..9254d5fe560 --- /dev/null +++ b/zkvm_execution_layer/src/proof_generation.rs @@ -0,0 +1,51 @@ +use async_trait::async_trait; +use std::sync::Arc; +use thiserror::Error; +use types::{ExecutionProof, ExecutionProofId}; + +/// Result type for proof generation operations +pub type ProofGenerationResult = Result; + +/// Errors that can occur during proof generation +#[derive(Debug, Error)] +pub enum ProofGenerationError { + #[error("Proof generation failed: {0}")] + ProofGenerationFailed(String), + + #[error("Missing execution witness data: {0}")] + MissingWitnessData(String), + + #[error("Invalid execution witness: {0}")] + InvalidWitness(String), + + #[error("Proof generation timeout")] + Timeout, + + #[error("Insufficient resources: {0}")] + InsufficientResources(String), + + #[error("Internal error: {0}")] + Internal(String), +} + +/// Trait for proof generation (one implementation per zkVM+EL combo) +/// +/// Each proof system (RISC Zero, SP1, etc.) + zkVM combination implements this trait +/// to generate proofs for execution payloads from their subnet. +#[async_trait] +pub trait ProofGenerator: Send + Sync { + /// Generate a proof for the given execution payload + async fn generate( + &self, + slot: types::Slot, + payload_hash: &types::ExecutionBlockHash, + block_root: &types::Hash256, + ) -> ProofGenerationResult; + + /// Get the proof ID this generator produces proofs for + fn proof_id(&self) -> ExecutionProofId; +} + +/// Type-erased proof generator mainly for convenience +/// TODO(zkproofs): Check if we can remove this +pub type DynProofGenerator = Arc; diff --git a/zkvm_execution_layer/src/proof_verification.rs b/zkvm_execution_layer/src/proof_verification.rs new file mode 100644 index 00000000000..164f56bd1ef --- /dev/null +++ b/zkvm_execution_layer/src/proof_verification.rs @@ -0,0 +1,43 @@ +use std::sync::Arc; +use thiserror::Error; +use types::{ExecutionProof, ExecutionProofId}; + +/// Result type for proof verification operations +pub type ProofVerificationResult = Result; + +/// Errors that can occur during proof verification +#[derive(Debug, Error)] +pub enum VerificationError { + #[error("Proof verification failed: {0}")] + VerificationFailed(String), + + #[error("Invalid proof format: {0}")] + InvalidProofFormat(String), + + #[error("Unsupported proof ID: {0}")] + UnsupportedProofID(ExecutionProofId), + + #[error("Proof size mismatch: expected {expected}, got {actual}")] + ProofSizeMismatch { expected: usize, actual: usize }, + + #[error("Internal error: {0}")] + Internal(String), +} + +/// Trait for proof verification (one implementation per zkVM+EL combination) +pub trait ProofVerifier: Send + Sync { + /// Verify that the proof is valid. + /// + /// TODO(zkproofs): we can probably collapse Ok(false) and Err or make Ok(false) an enum variant + /// + /// Returns: + /// - Ok(true) if valid, + /// - Ok(false) if invalid (but well-formed) + /// - Err if the proof is malformed or verification cannot be performed. + fn verify(&self, proof: &ExecutionProof) -> ProofVerificationResult; + + fn proof_id(&self) -> ExecutionProofId; +} + +/// Type-erased proof verifier +pub type DynProofVerifier = Arc; diff --git a/zkvm_execution_layer/src/registry_proof_gen.rs b/zkvm_execution_layer/src/registry_proof_gen.rs new file mode 100644 index 00000000000..01ded0af454 --- /dev/null +++ b/zkvm_execution_layer/src/registry_proof_gen.rs @@ -0,0 +1,132 @@ +use crate::dummy_proof_gen::DummyProofGenerator; +use crate::proof_generation::DynProofGenerator; +use hashbrown::HashMap; +use std::collections::HashSet; +use std::sync::Arc; +use types::ExecutionProofId; + +/// Registry mapping proof IDs to proof generators +/// +/// Each proof ID represents a different zkVM/proof system, and this registry +/// maintains the mapping from proof ID to the appropriate generator implementation. +#[derive(Clone)] +pub struct GeneratorRegistry { + generators: HashMap, +} + +impl GeneratorRegistry { + /// Create a new empty generator registry + pub fn new() -> Self { + Self { + generators: HashMap::new(), + } + } + + /// Create a registry with dummy generators for specified proof IDs + pub fn new_with_dummy_generators(enabled_subnets: HashSet) -> Self { + let mut generators = HashMap::new(); + + for subnet_id in enabled_subnets { + generators.insert( + subnet_id, + Arc::new(DummyProofGenerator::new(subnet_id)) as DynProofGenerator, + ); + } + + Self { generators } + } + + pub fn register_generator(&mut self, generator: DynProofGenerator) { + let proof_id = generator.proof_id(); + self.generators.insert(proof_id, generator); + } + + pub fn get_generator(&self, proof_id: ExecutionProofId) -> Option { + self.generators.get(&proof_id).cloned() + } + + /// Check if a generator is registered for a proof ID + pub fn has_generator(&self, proof_id: ExecutionProofId) -> bool { + self.generators.contains_key(&proof_id) + } + + /// Get the number of registered generators + pub fn len(&self) -> usize { + self.generators.len() + } + + /// Check if the registry is empty + pub fn is_empty(&self) -> bool { + self.generators.is_empty() + } + + pub fn proof_ids(&self) -> Vec { + self.generators.keys().copied().collect() + } +} + +impl Default for GeneratorRegistry { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_dummy_generators_registry() { + let mut enabled_subnets = HashSet::new(); + enabled_subnets.insert(ExecutionProofId::new(0).unwrap()); + enabled_subnets.insert(ExecutionProofId::new(1).unwrap()); + + let registry = GeneratorRegistry::new_with_dummy_generators(enabled_subnets); + assert!(!registry.is_empty()); + assert_eq!(registry.len(), 2); + + assert!(registry.has_generator(ExecutionProofId::new(0).unwrap())); + assert!(registry.has_generator(ExecutionProofId::new(1).unwrap())); + assert!(!registry.has_generator(ExecutionProofId::new(2).unwrap())); + } + + #[test] + fn test_register_generator() { + let mut registry = GeneratorRegistry::new(); + let subnet_id = ExecutionProofId::new(0).unwrap(); + let generator = Arc::new(DummyProofGenerator::new(subnet_id)); + + registry.register_generator(generator); + + assert_eq!(registry.len(), 1); + assert!(registry.has_generator(subnet_id)); + } + + #[test] + fn test_get_generator() { + let mut enabled_subnets = HashSet::new(); + enabled_subnets.insert(ExecutionProofId::new(3).unwrap()); + + let registry = GeneratorRegistry::new_with_dummy_generators(enabled_subnets); + let subnet_id = ExecutionProofId::new(3).unwrap(); + + let generator = registry.get_generator(subnet_id); + assert!(generator.is_some()); + assert_eq!(generator.unwrap().proof_id(), subnet_id); + } + + #[test] + fn test_subnet_ids() { + let mut enabled_subnets = HashSet::new(); + enabled_subnets.insert(ExecutionProofId::new(0).unwrap()); + enabled_subnets.insert(ExecutionProofId::new(5).unwrap()); + + let registry = GeneratorRegistry::new_with_dummy_generators(enabled_subnets.clone()); + let subnet_ids = registry.proof_ids(); + + assert_eq!(subnet_ids.len(), 2); + for subnet_id in enabled_subnets { + assert!(subnet_ids.contains(&subnet_id)); + } + } +} diff --git a/zkvm_execution_layer/src/registry_proof_verification.rs b/zkvm_execution_layer/src/registry_proof_verification.rs new file mode 100644 index 00000000000..e2f914e1965 --- /dev/null +++ b/zkvm_execution_layer/src/registry_proof_verification.rs @@ -0,0 +1,138 @@ +use crate::dummy_proof_verifier::DummyVerifier; +use crate::proof_verification::DynProofVerifier; +use hashbrown::HashMap; +use std::sync::Arc; +use types::ExecutionProofId; + +/// Registry mapping subnet IDs to proof verifiers +/// +/// Each subnet can have a different zkVM/proof system, and this registry +/// maintains the mapping from subnet ID to the appropriate verifier implementation. +#[derive(Clone)] +pub struct VerifierRegistry { + verifiers: HashMap, +} + +impl VerifierRegistry { + /// Create a new empty verifier registry + pub fn new() -> Self { + Self { + verifiers: HashMap::new(), + } + } + + /// Create a registry with dummy verifiers for all subnets + /// This is useful for Phase 1 testing + pub fn new_with_dummy_verifiers() -> Self { + let mut verifiers = HashMap::new(); + + // Register dummy verifiers for all 8 subnets + for id in 0..types::EXECUTION_PROOF_TYPE_COUNT { + if let Ok(proof_id) = ExecutionProofId::new(id) { + verifiers.insert( + proof_id, + Arc::new(DummyVerifier::new(proof_id)) as DynProofVerifier, + ); + } + } + + Self { verifiers } + } + + /// Register a verifier for a specific subnet + pub fn register_verifier(&mut self, verifier: DynProofVerifier) { + let subnet_id = verifier.proof_id(); + self.verifiers.insert(subnet_id, verifier); + } + + /// Get a verifier for a specific proof ID + pub fn get_verifier(&self, proof_id: ExecutionProofId) -> Option { + self.verifiers.get(&proof_id).cloned() + } + + /// Check if a verifier is registered for a proof ID + pub fn has_verifier(&self, proof_id: ExecutionProofId) -> bool { + self.verifiers.contains_key(&proof_id) + } + + /// Get the number of registered verifiers + pub fn len(&self) -> usize { + self.verifiers.len() + } + + /// Check if the registry is empty + pub fn is_empty(&self) -> bool { + self.verifiers.is_empty() + } + + /// Get all registered subnet IDs + pub fn proof_ids(&self) -> Vec { + self.verifiers.keys().copied().collect() + } +} + +impl Default for VerifierRegistry { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_empty_registry() { + let registry = VerifierRegistry::new(); + assert!(registry.is_empty()); + assert_eq!(registry.len(), 0); + } + + #[test] + fn test_dummy_verifiers_registry() { + let registry = VerifierRegistry::new_with_dummy_verifiers(); + assert!(!registry.is_empty()); + assert_eq!(registry.len(), 8); // All 8 subnets + + // Check all proof IDs are registered + for id in 0..8 { + let proof_id = ExecutionProofId::new(id).unwrap(); + assert!(registry.has_verifier(proof_id)); + assert!(registry.get_verifier(proof_id).is_some()); + } + } + + #[test] + fn test_register_verifier() { + let mut registry = VerifierRegistry::new(); + let proof_id = ExecutionProofId::new(0).unwrap(); + let verifier = Arc::new(DummyVerifier::new(proof_id)); + + registry.register_verifier(verifier); + + assert_eq!(registry.len(), 1); + assert!(registry.has_verifier(proof_id)); + } + + #[test] + fn test_get_verifier() { + let registry = VerifierRegistry::new_with_dummy_verifiers(); + let proof_id = ExecutionProofId::new(3).unwrap(); + + let verifier = registry.get_verifier(proof_id); + assert!(verifier.is_some()); + assert_eq!(verifier.unwrap().proof_id(), proof_id); + } + + #[test] + fn test_proof_ids() { + let registry = VerifierRegistry::new_with_dummy_verifiers(); + let proof_ids = registry.proof_ids(); + + assert_eq!(proof_ids.len(), 8); + for id in 0..8 { + let proof_id = ExecutionProofId::new(id).unwrap(); + assert!(proof_ids.contains(&proof_id)); + } + } +} diff --git a/zkvm_execution_layer/src/test_proofs/openvm_9b6768c0-831d-488c-ba72-05f93975a3be.bin b/zkvm_execution_layer/src/test_proofs/openvm_9b6768c0-831d-488c-ba72-05f93975a3be.bin new file mode 100644 index 00000000000..e4508b33483 Binary files /dev/null and b/zkvm_execution_layer/src/test_proofs/openvm_9b6768c0-831d-488c-ba72-05f93975a3be.bin differ diff --git a/zkvm_execution_layer/src/test_proofs/pico_f404c187-88d6-4927-963c-61760a639900.bin b/zkvm_execution_layer/src/test_proofs/pico_f404c187-88d6-4927-963c-61760a639900.bin new file mode 100644 index 00000000000..238c610207e Binary files /dev/null and b/zkvm_execution_layer/src/test_proofs/pico_f404c187-88d6-4927-963c-61760a639900.bin differ diff --git a/zkvm_execution_layer/src/test_proofs/sp1_fbef2553-8cd0-4f45-b328-570b5c8688b2.bin b/zkvm_execution_layer/src/test_proofs/sp1_fbef2553-8cd0-4f45-b328-570b5c8688b2.bin new file mode 100644 index 00000000000..cdf69b938b9 Binary files /dev/null and b/zkvm_execution_layer/src/test_proofs/sp1_fbef2553-8cd0-4f45-b328-570b5c8688b2.bin differ diff --git a/zkvm_execution_layer/src/test_proofs/zisk_534e6cf4-3dfe-47de-bba2-a0b11d544557.bin b/zkvm_execution_layer/src/test_proofs/zisk_534e6cf4-3dfe-47de-bba2-a0b11d544557.bin new file mode 100644 index 00000000000..0ac658bde5e Binary files /dev/null and b/zkvm_execution_layer/src/test_proofs/zisk_534e6cf4-3dfe-47de-bba2-a0b11d544557.bin differ diff --git a/zkvm_execution_layer/src/test_proofs/zisk_817bbf03-07b4-466d-879b-e476322bd080.bin b/zkvm_execution_layer/src/test_proofs/zisk_817bbf03-07b4-466d-879b-e476322bd080.bin new file mode 100644 index 00000000000..d9ddc9685d9 Binary files /dev/null and b/zkvm_execution_layer/src/test_proofs/zisk_817bbf03-07b4-466d-879b-e476322bd080.bin differ diff --git a/zkvm_execution_layer/src/test_proofs/zkcloud_884fcc21-d522-4b4a-b535-7cfde199485c.bin b/zkvm_execution_layer/src/test_proofs/zkcloud_884fcc21-d522-4b4a-b535-7cfde199485c.bin new file mode 100644 index 00000000000..9873f58b11d Binary files /dev/null and b/zkvm_execution_layer/src/test_proofs/zkcloud_884fcc21-d522-4b4a-b535-7cfde199485c.bin differ diff --git a/zkvm_execution_layer/src/verification_keys/mod.rs b/zkvm_execution_layer/src/verification_keys/mod.rs new file mode 100644 index 00000000000..be7ff1c91f4 --- /dev/null +++ b/zkvm_execution_layer/src/verification_keys/mod.rs @@ -0,0 +1,262 @@ +//! Execution proof verification key management +//! +//! This module handles loading and managing verification keys for execution proofs. +//! Verification keys are stored as .bin files in the verification_keys directory, +//! with each file named by its prover UUID. + +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use tracing::{debug, warn}; +use uuid::Uuid; + +/// Represents a verification key for validating execution proofs +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ExecutionProofVerificationKey { + /// Unique identifier for the prover that generated this key + pub prover_id: Uuid, + /// The binary verification key data + pub vk: Vec, +} + +impl ExecutionProofVerificationKey { + /// Create a new verification key + pub fn new(prover_id: Uuid, vk: Vec) -> Self { + Self { prover_id, vk } + } + + /// Get the size of the verification key in bytes + pub fn size(&self) -> usize { + self.vk.len() + } +} + +/// Manager for loading and accessing verification keys +#[derive(Debug, Default)] +pub struct VerificationKeyStore { + /// Map of prover_id to verification key + keys: HashMap, +} + +impl VerificationKeyStore { + /// Create a new empty verification key store + pub fn new() -> Self { + Self { + keys: HashMap::new(), + } + } + + /// Load all verification keys from a directory + /// + /// Expected file format: `{prover}_{uuid}.bin` + /// where prover_id is a valid UUID string + /// + /// # Example + /// ```ignore + /// let store = VerificationKeyStore::load_from_directory("./verification_keys")?; + /// ``` + pub fn load_from_directory>(dir: P) -> Result { + let dir_path = dir.as_ref(); + + if !dir_path.exists() { + return Err(format!("Directory does not exist: {:?}", dir_path)); + } + + if !dir_path.is_dir() { + return Err(format!("Path is not a directory: {:?}", dir_path)); + } + + let mut store = Self::new(); + let entries = std::fs::read_dir(dir_path) + .map_err(|e| format!("Failed to read directory {:?}: {}", dir_path, e))?; + + for entry in entries { + let entry = entry.map_err(|e| format!("Failed to read directory entry: {}", e))?; + let path = entry.path(); + + // Only process .bin files + if path.extension().and_then(|s| s.to_str()) != Some("bin") { + continue; + } + + // Extract prover_id from filename + // Expected format: {prover}_{uuid}.bin + // We split on '_' and take index [1] for the UUID + let file_stem = match path.file_stem().and_then(|s| s.to_str()) { + Some(stem) => stem, + None => { + warn!("Skipping file with invalid name: {:?}", path); + continue; + } + }; + + // Split filename on '_' and extract UUID from second part + let parts: Vec<&str> = file_stem.split('_').collect(); + let uuid_str = if parts.len() >= 2 { + parts[1] + } else { + warn!( + "Skipping file {:?}: filename does not match pattern {{name}}_{{uuid}}", + path + ); + continue; + }; + + let prover_id = match Uuid::parse_str(uuid_str) { + Ok(uuid) => uuid, + Err(e) => { + warn!( + "Skipping file {:?}: '{}' is not a valid UUID: {}", + path, uuid_str, e + ); + continue; + } + }; + + // Read the verification key binary data + let vk_data = std::fs::read(&path) + .map_err(|e| format!("Failed to read file {:?}: {}", path, e))?; + + debug!( + prover_id = %prover_id, + size_bytes = vk_data.len(), + path = ?path, + "Loaded verification key" + ); + + let vk = ExecutionProofVerificationKey::new(prover_id, vk_data); + store.add_key(vk); + } + + debug!( + key_count = store.keys.len(), + "Loaded verification keys from directory" + ); + + Ok(store) + } + + /// Add a verification key to the store + pub fn add_key(&mut self, key: ExecutionProofVerificationKey) { + self.keys.insert(key.prover_id, key); + } + + /// Get a verification key by prover ID + pub fn get(&self, prover_id: &Uuid) -> Option<&ExecutionProofVerificationKey> { + self.keys.get(prover_id) + } + + /// Check if a verification key exists for a prover + pub fn contains(&self, prover_id: &Uuid) -> bool { + self.keys.contains_key(prover_id) + } + + /// Get the number of verification keys in the store + pub fn len(&self) -> usize { + self.keys.len() + } + + /// Check if the store is empty + pub fn is_empty(&self) -> bool { + self.keys.is_empty() + } + + /// Get all prover IDs + pub fn prover_ids(&self) -> Vec { + self.keys.keys().copied().collect() + } + + /// Load verification keys from the embedded directory + /// + /// This looks for .bin files in the beacon_chain/src/verification_keys directory + pub fn load_embedded() -> Result { + let vk_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("src") + .join("verification_keys"); + + Self::load_from_directory(vk_dir) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::TempDir; + + #[test] + fn test_verification_key_creation() { + let prover_id = Uuid::new_v4(); + let vk_data = vec![1, 2, 3, 4, 5]; + let vk = ExecutionProofVerificationKey::new(prover_id, vk_data.clone()); + + assert_eq!(vk.prover_id, prover_id); + assert_eq!(vk.vk, vk_data); + assert_eq!(vk.size(), 5); + } + + #[test] + fn test_verification_key_store() { + let mut store = VerificationKeyStore::new(); + assert!(store.is_empty()); + assert_eq!(store.len(), 0); + + let prover_id = Uuid::new_v4(); + let vk = ExecutionProofVerificationKey::new(prover_id, vec![1, 2, 3]); + + store.add_key(vk.clone()); + assert_eq!(store.len(), 1); + assert!(store.contains(&prover_id)); + + let retrieved = store.get(&prover_id).unwrap(); + assert_eq!(retrieved.prover_id, prover_id); + assert_eq!(retrieved.vk, vec![1, 2, 3]); + } + + #[test] + fn test_load_from_directory() { + let temp_dir = TempDir::new().unwrap(); + let vk_dir = temp_dir.path(); + + // Create test verification key files with format: {prover}_{uuid}.bin + let prover_id1 = Uuid::new_v4(); + let prover_id2 = Uuid::new_v4(); + + let vk1_path = vk_dir.join(format!("pico_{}.bin", prover_id1)); + let vk2_path = vk_dir.join(format!("openvm_{}.bin", prover_id2)); + + fs::write(&vk1_path, vec![1, 2, 3, 4]).unwrap(); + fs::write(&vk2_path, vec![5, 6, 7, 8, 9]).unwrap(); + + // Also create a non-.bin file that should be ignored + fs::write(vk_dir.join("ignored.txt"), "ignore me").unwrap(); + + // Load the verification keys + let store = VerificationKeyStore::load_from_directory(vk_dir).unwrap(); + + assert_eq!(store.len(), 2); + assert!(store.contains(&prover_id1)); + assert!(store.contains(&prover_id2)); + + let vk1 = store.get(&prover_id1).unwrap(); + assert_eq!(vk1.vk, vec![1, 2, 3, 4]); + + let vk2 = store.get(&prover_id2).unwrap(); + assert_eq!(vk2.vk, vec![5, 6, 7, 8, 9]); + } + + #[test] + fn test_invalid_filename_ignored() { + let temp_dir = TempDir::new().unwrap(); + let vk_dir = temp_dir.path(); + + // Create file with invalid format (no underscore) + fs::write(vk_dir.join("not-valid-format.bin"), vec![1, 2, 3]).unwrap(); + + // Create file with invalid UUID after underscore + fs::write(vk_dir.join("prover_not-a-uuid.bin"), vec![1, 2, 3]).unwrap(); + + let store = VerificationKeyStore::load_from_directory(vk_dir).unwrap(); + assert_eq!(store.len(), 0); // Should be empty since filenames are invalid + } +} diff --git a/zkvm_execution_layer/src/verification_keys/openvm_9b6768c0-831d-488c-ba72-05f93975a3be.bin b/zkvm_execution_layer/src/verification_keys/openvm_9b6768c0-831d-488c-ba72-05f93975a3be.bin new file mode 100644 index 00000000000..a8bc5332735 Binary files /dev/null and b/zkvm_execution_layer/src/verification_keys/openvm_9b6768c0-831d-488c-ba72-05f93975a3be.bin differ diff --git a/zkvm_execution_layer/src/verification_keys/pico_f404c187-88d6-4927-963c-61760a639900.bin b/zkvm_execution_layer/src/verification_keys/pico_f404c187-88d6-4927-963c-61760a639900.bin new file mode 100644 index 00000000000..83c0fbbe57d Binary files /dev/null and b/zkvm_execution_layer/src/verification_keys/pico_f404c187-88d6-4927-963c-61760a639900.bin differ diff --git a/zkvm_execution_layer/src/verification_keys/sp1_fbef2553-8cd0-4f45-b328-570b5c8688b2.bin b/zkvm_execution_layer/src/verification_keys/sp1_fbef2553-8cd0-4f45-b328-570b5c8688b2.bin new file mode 100644 index 00000000000..ede50251a23 Binary files /dev/null and b/zkvm_execution_layer/src/verification_keys/sp1_fbef2553-8cd0-4f45-b328-570b5c8688b2.bin differ diff --git a/zkvm_execution_layer/src/verification_keys/zisk_534e6cf4-3dfe-47de-bba2-a0b11d544557.bin b/zkvm_execution_layer/src/verification_keys/zisk_534e6cf4-3dfe-47de-bba2-a0b11d544557.bin new file mode 100644 index 00000000000..b0eb233c4c1 --- /dev/null +++ b/zkvm_execution_layer/src/verification_keys/zisk_534e6cf4-3dfe-47de-bba2-a0b11d544557.bin @@ -0,0 +1 @@ +XkY rAZ䦎 * \ No newline at end of file diff --git a/zkvm_execution_layer/src/verification_keys/zisk_817bbf03-07b4-466d-879b-e476322bd080.bin b/zkvm_execution_layer/src/verification_keys/zisk_817bbf03-07b4-466d-879b-e476322bd080.bin new file mode 100644 index 00000000000..b0eb233c4c1 --- /dev/null +++ b/zkvm_execution_layer/src/verification_keys/zisk_817bbf03-07b4-466d-879b-e476322bd080.bin @@ -0,0 +1 @@ +XkY rAZ䦎 * \ No newline at end of file diff --git a/zkvm_execution_layer/src/verification_keys/zkcloud_884fcc21-d522-4b4a-b535-7cfde199485c.bin b/zkvm_execution_layer/src/verification_keys/zkcloud_884fcc21-d522-4b4a-b535-7cfde199485c.bin new file mode 100644 index 00000000000..6681002e055 --- /dev/null +++ b/zkvm_execution_layer/src/verification_keys/zkcloud_884fcc21-d522-4b4a-b535-7cfde199485c.bin @@ -0,0 +1 @@ +T*v[l[qo)_'qy?$,@FC \ No newline at end of file diff --git a/zkvm_execution_layer/src/verifiers/airbender.rs b/zkvm_execution_layer/src/verifiers/airbender.rs new file mode 100644 index 00000000000..ae4394ddf1e --- /dev/null +++ b/zkvm_execution_layer/src/verifiers/airbender.rs @@ -0,0 +1,65 @@ +//! Airbender STARK proof verifier +//! +//! This module implements proof verification for Airbender zkVM. +//! Currently a placeholder implementation - full verification logic will be implemented separately. + +use super::{panic_safe, ProofVerifier, VerificationResult}; +use tracing::debug; + +/// Airbender verifier +/// +/// Placeholder implementation for Airbender STARK proof verification. +/// Returns true for any valid proof data. +pub struct AirbenderVerifier; + +impl ProofVerifier for AirbenderVerifier { + fn verify(proof_data: &[u8], _vk_data: &[u8]) -> VerificationResult { + panic_safe::safe_verify(|| { + debug!( + proof_size = proof_data.len(), + "Starting Airbender verification (placeholder)" + ); + + // Validate proof data is not empty + if proof_data.is_empty() { + debug!("Invalid input: proof data is empty"); + return Ok(false); + } + + // Placeholder: always return true for valid proof data + // TODO(ethproofs): Implement full verification logic with execution_utils + debug!("Airbender verification placeholder - returning true"); + Ok(true) + }) + } + + fn name() -> &'static str { + "airbender" + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_airbender_verifier_name() { + assert_eq!(AirbenderVerifier::name(), "airbender"); + } + + #[test] + fn test_airbender_empty_proof() { + // Empty proof data should return false + let result = AirbenderVerifier::verify(&[], &[]); + assert!(result.is_ok()); + assert!(!result.unwrap()); + } + + #[test] + fn test_airbender_valid_proof() { + // Valid proof data should return true + let result = AirbenderVerifier::verify(&[1u8; 32], &[]); + assert!(result.is_ok()); + assert!(result.unwrap()); + } +} diff --git a/zkvm_execution_layer/src/verifiers/fallback.rs b/zkvm_execution_layer/src/verifiers/fallback.rs new file mode 100644 index 00000000000..9d11ee7323d --- /dev/null +++ b/zkvm_execution_layer/src/verifiers/fallback.rs @@ -0,0 +1,53 @@ +//! Fallback proof verifier +//! +//! This module implements a pass-through verifier for fallback proofs. +//! Fallback proofs are created when the Ethproofs API fails or times out, +//! and they are used to allow blocks to progress without cryptographic verification. + +use super::{ProofVerifier, VerificationResult}; +use tracing::debug; + +/// Fallback verifier that accepts all proofs without cryptographic verification. +/// +/// This verifier is used for fallback proofs created when: +/// - The Ethproofs API times out +/// - The Ethproofs API returns an error +/// - Other proof generation systems are unavailable +/// +/// Since fallback proofs are created locally and represent a "best effort" state, +/// they bypass the full cryptographic verification pipeline. +pub struct FallbackVerifier; + +impl ProofVerifier for FallbackVerifier { + fn verify(_proof_data: &[u8], _vk_data: &[u8]) -> VerificationResult { + debug!("Fallback verifier: accepting proof without cryptographic verification"); + Ok(true) + } + + fn name() -> &'static str { + "fallback" + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_fallback_verifier_name() { + assert_eq!(FallbackVerifier::name(), "fallback"); + } + + #[test] + fn test_fallback_verifier_always_accepts() { + // Fallback verifier should accept any input without verification + let result = FallbackVerifier::verify(&[], &[]); + assert_eq!(result, Ok(true)); + + // Test with non-empty data + let proof_data = vec![1, 2, 3, 4, 5]; + let vk_data = vec![6, 7, 8, 9, 10]; + let result = FallbackVerifier::verify(&proof_data, &vk_data); + assert_eq!(result, Ok(true)); + } +} diff --git a/zkvm_execution_layer/src/verifiers/mod.rs b/zkvm_execution_layer/src/verifiers/mod.rs new file mode 100644 index 00000000000..f1c91c984e7 --- /dev/null +++ b/zkvm_execution_layer/src/verifiers/mod.rs @@ -0,0 +1,262 @@ +//! Execution proof verifiers +//! +//! This module manages different proof verification systems based on prover type. +//! Each verifier implements cryptographic proof verification for a specific zkVM or proof system. + +pub mod airbender; +pub mod fallback; +pub mod openvm; +pub mod panic_safe; +pub mod pico; +pub mod sp1_hypercube; +pub mod zisk; +pub mod zkcloud; + +use std::collections::HashMap; +use types::ExecutionProofId; +use uuid::Uuid; + +/// Result type for proof verification +pub type VerificationResult = Result; + +/// Ethproofs demo prover UUIDs - hardcoded mapping for demo testing +/// These constants define the relationship between internal proof_ids (0, 1, 2, etc.) +/// and Ethproofs prover UUIDs. +/// +/// Proof ID Mapping (alphabetical order, Fallback first, Airbender reserved): +/// - proof_id 0 → Fallback verifier (used when Ethproofs API fails/times out) +/// - proof_id 1 → Airbender verifier (reserved for future use) +/// - proof_id 2 → OpenVM verifier +/// - proof_id 3 → Pico Prism verifier +/// - proof_id 4 → SP1-Hypercube verifier +/// - proof_id 5 → ZisK 1 (Girona) verifier +/// - proof_id 6 → ZisK 2 (Sevilla) verifier +/// - proof_id 7 → ZisK-ZkCloud verifier +pub mod ethproofs_ids { + use uuid::Uuid; + + /// Fallback verifier UUID (proof_id = 0) + /// Used for dummy proofs when Ethproofs API fails or times out + pub const FALLBACK_UUID: &str = "00000000-0000-0000-0000-000000000000"; + + /// Airbender verifier UUID (proof_id = 1) + pub const AIRBENDER_UUID: &str = "b18507c4-50f3-4638-854a-ed625c7e685a"; + + /// OpenVM verifier UUID (proof_id = 2) + pub const OPENVM_UUID: &str = "9b6768c0-831d-488c-ba72-05f93975a3be"; + + /// Pico Prism verifier UUID (proof_id = 3) + pub const PICO_UUID: &str = "f404c187-88d6-4927-963c-61760a639900"; + + /// SP1-Hypercube verifier UUID (proof_id = 4) + pub const SP1_HYPERCUBE_UUID: &str = "fbef2553-8cd0-4f45-b328-570b5c8688b2"; + + /// ZisK 1 (Girona) verifier UUID (proof_id = 5) + pub const ZISK_1_GIRONA_UUID: &str = "817bbf03-07b4-466d-879b-e476322bd080"; + + /// ZisK 2 (Sevilla) verifier UUID (proof_id = 6) + pub const ZISK_2_SEVILLA_UUID: &str = "534e6cf4-3dfe-47de-bba2-a0b11d544557"; + + /// ZisK-ZkCloud verifier UUID (proof_id = 7) + pub const ZISK_ZKCLOUD_UUID: &str = "884fcc21-d522-4b4a-b535-7cfde199485c"; + + /// Parse a Fallback UUID + pub fn fallback() -> Uuid { + Uuid::parse_str(FALLBACK_UUID).expect("Valid UUID") + } + + /// Parse an Airbender UUID + pub fn airbender() -> Uuid { + Uuid::parse_str(AIRBENDER_UUID).expect("Valid UUID") + } + + /// Parse an OpenVM UUID + pub fn openvm() -> Uuid { + Uuid::parse_str(OPENVM_UUID).expect("Valid UUID") + } + + /// Parse a Pico UUID + pub fn pico() -> Uuid { + Uuid::parse_str(PICO_UUID).expect("Valid UUID") + } + + /// Parse a SP1-Hypercube UUID + pub fn sp1_hypercube() -> Uuid { + Uuid::parse_str(SP1_HYPERCUBE_UUID).expect("Valid UUID") + } + + /// Parse a ZisK 1 (Girona) UUID + pub fn zisk_1_girona() -> Uuid { + Uuid::parse_str(ZISK_1_GIRONA_UUID).expect("Valid UUID") + } + + /// Parse a ZisK 2 (Sevilla) UUID + pub fn zisk_2_sevilla() -> Uuid { + Uuid::parse_str(ZISK_2_SEVILLA_UUID).expect("Valid UUID") + } + + /// Parse a ZisK-ZkCloud UUID + pub fn zisk_zkcloud() -> Uuid { + Uuid::parse_str(ZISK_ZKCLOUD_UUID).expect("Valid UUID") + } +} + +/// Trait for proof verifiers +pub trait ProofVerifier: Send + Sync { + /// Verify a proof given the proof data and verification key + fn verify(proof_data: &[u8], vk_data: &[u8]) -> VerificationResult + where + Self: Sized; + + /// Get the name of this verifier + fn name() -> &'static str + where + Self: Sized; +} + +/// Type for verifier function +pub type VerifierFn = fn(&[u8], &[u8]) -> VerificationResult; + +/// Verifier entry with name and verification function +pub struct VerifierEntry { + pub name: &'static str, + pub verify_fn: VerifierFn, +} + +/// Manager for multiple proof verifiers, keyed by prover UUID +#[derive(Default)] +pub struct VerifierStore { + /// Map of prover_id to verifier function + verifiers: HashMap, +} + +impl VerifierStore { + /// Create a new empty verifier store + pub fn new() -> Self { + Self { + verifiers: HashMap::new(), + } + } + + /// Register a verifier for a specific prover UUID + pub fn register(&mut self, prover_id: Uuid, name: &'static str, verify_fn: VerifierFn) { + self.verifiers + .insert(prover_id, VerifierEntry { name, verify_fn }); + } + + /// Get a verifier entry for a specific prover UUID + pub fn get(&self, prover_id: &Uuid) -> Option<&VerifierEntry> { + self.verifiers.get(prover_id) + } + + /// Check if a verifier exists for a prover + pub fn contains(&self, prover_id: &Uuid) -> bool { + self.verifiers.contains_key(prover_id) + } + + /// Get the number of registered verifiers + pub fn len(&self) -> usize { + self.verifiers.len() + } + + /// Check if the store is empty + pub fn is_empty(&self) -> bool { + self.verifiers.is_empty() + } + + /// Get all registered prover IDs + pub fn prover_ids(&self) -> Vec { + self.verifiers.keys().copied().collect() + } + + /// Get the prover UUID corresponding to a proof_id (Ethproofs demo mapping) + /// + /// For Ethproofs demo testing, this provides a hardcoded mapping of proof_ids to prover UUIDs: + /// - proof_id 0 → fallback + /// - proof_id 1 → airbender + /// - proof_id 2 → openvm + /// - proof_id 3 → pico + /// - proof_id 4 → sp1_hypercube + /// - proof_id 5 → zisk_1_girona + /// - proof_id 6 → zisk_2_sevilla + /// - proof_id 7 → zisk_zkcloud + pub fn get_prover_uuid_for_proof_id(&self, proof_id: ExecutionProofId) -> Option { + let id = proof_id.as_u8() as u32; + match id { + 0 => Some(ethproofs_ids::fallback()), + 1 => Some(ethproofs_ids::airbender()), + 2 => Some(ethproofs_ids::openvm()), + 3 => Some(ethproofs_ids::pico()), + 4 => Some(ethproofs_ids::sp1_hypercube()), + 5 => Some(ethproofs_ids::zisk_1_girona()), + 6 => Some(ethproofs_ids::zisk_2_sevilla()), + 7 => Some(ethproofs_ids::zisk_zkcloud()), + _ => None, + } + } + + /// Create a store with default verifiers registered + /// + /// This registers verifiers for known Ethproofs prover UUIDs + pub fn with_defaults() -> Self { + let mut store = Self::new(); + + // Register Fallback verifier (proof_id 0) + store.register( + ethproofs_ids::fallback(), + fallback::FallbackVerifier::name(), + fallback::FallbackVerifier::verify, + ); + + // Register Airbender verifier (proof_id 1) + store.register( + ethproofs_ids::airbender(), + airbender::AirbenderVerifier::name(), + airbender::AirbenderVerifier::verify, + ); + + // Register OpenVM verifier (proof_id 2) + store.register( + ethproofs_ids::openvm(), + openvm::OpenVmVerifier::name(), + openvm::OpenVmVerifier::verify, + ); + + // Register Pico verifier (proof_id 3) + store.register( + ethproofs_ids::pico(), + pico::PicoVerifier::name(), + pico::PicoVerifier::verify, + ); + + // Register SP1-Hypercube verifier (proof_id 4) + store.register( + ethproofs_ids::sp1_hypercube(), + sp1_hypercube::Sp1HypercubeVerifier::name(), + sp1_hypercube::Sp1HypercubeVerifier::verify, + ); + + // Register ZisK 1 (Girona) verifier (proof_id 5) + store.register( + ethproofs_ids::zisk_1_girona(), + zisk::ZiskVerifier::name(), + zisk::ZiskVerifier::verify, + ); + + // Register ZisK 2 (Sevilla) verifier (proof_id 6) + store.register( + ethproofs_ids::zisk_2_sevilla(), + zisk::ZiskVerifier::name(), + zisk::ZiskVerifier::verify, + ); + + // Register ZisK-ZkCloud verifier (proof_id 7) + store.register( + ethproofs_ids::zisk_zkcloud(), + zkcloud::ZkCloudVerifier::name(), + zkcloud::ZkCloudVerifier::verify, + ); + + store + } +} diff --git a/zkvm_execution_layer/src/verifiers/openvm.rs b/zkvm_execution_layer/src/verifiers/openvm.rs new file mode 100644 index 00000000000..a24e39d8efb --- /dev/null +++ b/zkvm_execution_layer/src/verifiers/openvm.rs @@ -0,0 +1,77 @@ +//! OpenVM STARK proof verifier +//! +//! This module implements proof verification for OpenVM using the OpenVM verify-stark library. + +use super::{panic_safe, ProofVerifier, VerificationResult}; +use tracing::debug; +use verify_stark::{verify_vm_stark_proof, vk::VmStarkVerifyingKey}; + +/// OpenVM verifier +pub struct OpenVmVerifier; + +impl ProofVerifier for OpenVmVerifier { + fn verify(proof_data: &[u8], vk_data: &[u8]) -> VerificationResult { + panic_safe::safe_verify(|| { + debug!( + proof_size = proof_data.len(), + vk_size = vk_data.len(), + "Starting OpenVM verification" + ); + + // Deserialize the verification key from bitcode bytes + let vk: VmStarkVerifyingKey = match bitcode::deserialize(vk_data) { + Ok(vk) => vk, + Err(e) => { + debug!(error = ?e, "Failed to deserialize OpenVM verification key"); + return Ok(false); + } + }; + + // Verify the proof using the OpenVM verify-stark library + match verify_vm_stark_proof(&vk, proof_data) { + Ok(()) => { + debug!("OpenVM verification succeeded"); + Ok(true) + } + Err(e) => { + debug!(error = ?e, "OpenVM verification failed"); + Ok(false) + } + } + }) + } + + fn name() -> &'static str { + "openvm" + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::path::PathBuf; + + #[test] + fn test_openvm_verifier_name() { + assert_eq!(OpenVmVerifier::name(), "openvm"); + } + + #[test] + fn test_openvm_verification() { + // Load test proof and verification key + let test_proof_path = + PathBuf::from("src/test_proofs/openvm_9b6768c0-831d-488c-ba72-05f93975a3be.bin"); + let vk_path = + PathBuf::from("src/verification_keys/openvm_9b6768c0-831d-488c-ba72-05f93975a3be.bin"); + + let proof_data = std::fs::read(&test_proof_path).expect("Failed to read test proof file"); + let vk_data = std::fs::read(&vk_path).expect("Failed to read verification key file"); + + // Verify the proof + let result = OpenVmVerifier::verify(&proof_data, &vk_data); + + // The test should succeed + assert!(result.is_ok(), "OpenVM verification failed: {:?}", result); + assert!(result.unwrap(), "OpenVM proof verification returned false"); + } +} diff --git a/zkvm_execution_layer/src/verifiers/panic_safe.rs b/zkvm_execution_layer/src/verifiers/panic_safe.rs new file mode 100644 index 00000000000..a1b3a7afdd3 --- /dev/null +++ b/zkvm_execution_layer/src/verifiers/panic_safe.rs @@ -0,0 +1,79 @@ +//! Panic-safe verification wrapper +//! +//! This module provides a wrapper function that catches panics from verifier operations +//! and converts them to `Ok(false)` so that verification failures don't crash the client. + +use super::VerificationResult; +use std::panic::{catch_unwind, AssertUnwindSafe}; +use tracing::error; + +/// Safely calls a verifier function and catches any panics, returning false instead +/// +/// This wrapper ensures that if a verifier panics for any reason, the panic is caught +/// and logged, and the function returns `Ok(false)` instead of crashing the client. +pub fn safe_verify(verify_fn: F) -> VerificationResult +where + F: FnOnce() -> VerificationResult, +{ + match catch_unwind(AssertUnwindSafe(verify_fn)) { + Ok(result) => result, + Err(panic_info) => { + let panic_msg = if let Some(s) = panic_info.downcast_ref::() { + s.clone() + } else if let Some(s) = panic_info.downcast_ref::<&str>() { + s.to_string() + } else { + "Unknown panic occurred during verification".to_string() + }; + error!(panic_message = %panic_msg, "Verifier panicked, returning false"); + Ok(false) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_safe_verify_with_panic() { + // Test that panics are caught and converted to Ok(false) + let result = safe_verify(|| { + panic!("Test panic in verifier"); + }); + + assert!(result.is_ok(), "safe_verify should return Ok, not Err"); + assert_eq!( + result.unwrap(), + false, + "safe_verify should return false when panic occurs" + ); + } + + #[test] + fn test_safe_verify_with_ok_result() { + // Test that normal Ok results pass through + let result = safe_verify(|| Ok(true)); + + assert!(result.is_ok()); + assert_eq!(result.unwrap(), true); + } + + #[test] + fn test_safe_verify_with_err_result() { + // Test that Err results pass through + let result = safe_verify(|| Err("Verification failed".to_string())); + + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), "Verification failed".to_string()); + } + + #[test] + fn test_safe_verify_with_false_result() { + // Test that Ok(false) results pass through + let result = safe_verify(|| Ok(false)); + + assert!(result.is_ok()); + assert_eq!(result.unwrap(), false); + } +} diff --git a/zkvm_execution_layer/src/verifiers/pico.rs b/zkvm_execution_layer/src/verifiers/pico.rs new file mode 100644 index 00000000000..58f63a95f58 --- /dev/null +++ b/zkvm_execution_layer/src/verifiers/pico.rs @@ -0,0 +1,137 @@ +//! Pico Prism zkVM STARK proof verifier +//! +//! This module implements proof verification for Pico Prism zkVM using KoalaBear field arithmetic. + +use super::{panic_safe, ProofVerifier, VerificationResult}; +use pico_prism_vm::{ + configs::{ + config::{StarkGenericConfig, Val}, + stark_config::KoalaBearPoseidon2, + }, + instances::{ + chiptype::recursion_chiptype::RecursionChipType, machine::combine::CombineMachine, + }, + machine::{ + keys::BaseVerifyingKey, + machine::MachineBehavior, + proof::{BaseProof, MetaProof}, + }, + primitives::consts::RECURSION_NUM_PVS, +}; +use serde::{Deserialize, Serialize}; +use tracing::debug; + +// Serializable wrappers for MetaProof +#[derive(Serialize, Deserialize)] +struct SerializableKoalaBearMetaProof { + proofs: Vec>, + vks: Vec>, + pv_stream: Option>, +} + +impl SerializableKoalaBearMetaProof { + fn to_meta_proof(self) -> MetaProof { + MetaProof::new(self.proofs.into(), self.vks.into(), self.pv_stream) + } +} + +struct KoalaBearCombineVerifier { + machine: CombineMachine>>, +} + +impl KoalaBearCombineVerifier { + fn new() -> Self { + let machine = CombineMachine::new( + KoalaBearPoseidon2::new(), + RecursionChipType::combine_chips(), + RECURSION_NUM_PVS, + ); + Self { machine } + } + + fn verify( + &self, + proof: &MetaProof, + riscv_vk: &BaseVerifyingKey, + ) -> bool { + self.machine.verify(proof, riscv_vk).is_ok() + } +} + +/// Pico Prism verifier using KoalaBear field +pub struct PicoVerifier; + +impl ProofVerifier for PicoVerifier { + fn verify(proof_data: &[u8], vk_data: &[u8]) -> VerificationResult { + panic_safe::safe_verify(|| { + debug!( + proof_size = proof_data.len(), + vk_size = vk_data.len(), + "Starting Pico verification" + ); + + // Deserialize the KoalaBear proof + let serializable_proof: SerializableKoalaBearMetaProof = + bincode::deserialize(proof_data) + .map_err(|e| format!("Failed to deserialize proof: {}", e))?; + let proof = serializable_proof.to_meta_proof(); + + // Deserialize KoalaBear verification key + let riscv_vk: BaseVerifyingKey = bincode::deserialize(vk_data) + .map_err(|e| format!("Failed to deserialize verification key: {}", e))?; + + // Create and run verifier + let verifier = KoalaBearCombineVerifier::new(); + let result = verifier.verify(&proof, &riscv_vk); + + debug!(verification_result = result, "Completed Pico verification"); + + Ok(result) + }) + } + + fn name() -> &'static str { + "pico" + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::path::PathBuf; + + #[test] + fn test_pico_verifier_name() { + assert_eq!(PicoVerifier::name(), "pico"); + } + + #[test] + fn test_pico_verifier_with_real_proof() { + // Path to the test proof file + let proof_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("src/test_proofs/pico_f404c187-88d6-4927-963c-61760a639900.bin"); + + // Path to the verification key file + let vk_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("src/verification_keys/pico_f404c187-88d6-4927-963c-61760a639900.bin"); + + // Read the proof and verification key + let proof_data = std::fs::read(&proof_path).expect("Failed to read test proof file"); + let vk_data = std::fs::read(&vk_path).expect("Failed to read test verification key file"); + + // Verify the proof + let result = PicoVerifier::verify(&proof_data, &vk_data); + + // Log the result for debugging + eprintln!("Proof size: {} bytes", proof_data.len()); + eprintln!("VK size: {} bytes", vk_data.len()); + eprintln!("Verification result: {:?}", result); + + // The result should be Ok with a boolean (true if valid, false if invalid) + assert!( + result.is_ok(), + "Verification should not error: {:?}", + result.err() + ); + } +} diff --git a/zkvm_execution_layer/src/verifiers/sp1_hypercube.rs b/zkvm_execution_layer/src/verifiers/sp1_hypercube.rs new file mode 100644 index 00000000000..cb80d8576e0 --- /dev/null +++ b/zkvm_execution_layer/src/verifiers/sp1_hypercube.rs @@ -0,0 +1,77 @@ +//! SP1-Hypercube proof verifier +//! +//! This module implements proof verification for SP1-Hypercube zkVM using the sp1-verifier. + +use super::{panic_safe, ProofVerifier, VerificationResult}; +use sp1_verifier::compressed::SP1CompressedVerifierRaw; +use tracing::debug; + +/// SP1-Hypercube verifier +pub struct Sp1HypercubeVerifier; + +impl ProofVerifier for Sp1HypercubeVerifier { + fn verify(proof_data: &[u8], vk_data: &[u8]) -> VerificationResult { + panic_safe::safe_verify(|| { + debug!( + proof_size = proof_data.len(), + vk_size = vk_data.len(), + "Starting SP1-Hypercube verification" + ); + + // Call the sp1-verifier verify function via SP1CompressedVerifierRaw + // vk_data should be the serialized vkey hash (bincode serialized [SP1Field; 8]) + // Returns Result<(), CompressedError> where Ok(()) means verification succeeded + match SP1CompressedVerifierRaw::verify(proof_data, vk_data) { + Ok(()) => { + debug!("SP1-Hypercube verification succeeded"); + Ok(true) + } + Err(e) => { + debug!(error = ?e, "SP1-Hypercube verification failed"); + Ok(false) + } + } + }) + } + + fn name() -> &'static str { + "sp1-hypercube" + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::path::PathBuf; + + #[test] + fn test_sp1_hypercube_verifier_name() { + assert_eq!(Sp1HypercubeVerifier::name(), "sp1-hypercube"); + } + + #[test] + fn test_sp1_hypercube_verification() { + // Load test proof and verification key + let test_proof_path = + PathBuf::from("src/test_proofs/sp1_fbef2553-8cd0-4f45-b328-570b5c8688b2.bin"); + let vk_path = + PathBuf::from("src/verification_keys/sp1_fbef2553-8cd0-4f45-b328-570b5c8688b2.bin"); + + let proof_data = std::fs::read(&test_proof_path).expect("Failed to read test proof file"); + let vk_data = std::fs::read(&vk_path).expect("Failed to read verification key file"); + + // Verify the proof + let result = Sp1HypercubeVerifier::verify(&proof_data, &vk_data); + + // The test should succeed + assert!( + result.is_ok(), + "SP1-Hypercube verification failed: {:?}", + result + ); + assert!( + result.unwrap(), + "SP1-Hypercube proof verification returned false" + ); + } +} diff --git a/zkvm_execution_layer/src/verifiers/zisk.rs b/zkvm_execution_layer/src/verifiers/zisk.rs new file mode 100644 index 00000000000..6acdb8f7710 --- /dev/null +++ b/zkvm_execution_layer/src/verifiers/zisk.rs @@ -0,0 +1,95 @@ +//! ZisK zkVM STARK proof verifier +//! +//! This module implements proof verification for ZisK zkVM using the proofman-verifier. + +use super::{panic_safe, ProofVerifier, VerificationResult}; +use tracing::debug; + +/// ZisK verifier +pub struct ZiskVerifier; + +impl ProofVerifier for ZiskVerifier { + fn verify(proof_data: &[u8], vk_data: &[u8]) -> VerificationResult { + panic_safe::safe_verify(|| { + debug!( + proof_size = proof_data.len(), + vk_size = vk_data.len(), + "Starting ZisK verification" + ); + + // Call the proofman-verifier verify function + let result = proofman_verifier::verify(proof_data, vk_data); + + debug!(verification_result = result, "Completed ZisK verification"); + + Ok(result) + }) + } + + fn name() -> &'static str { + "zisk" + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::path::PathBuf; + + #[test] + fn test_zisk_verifier_name() { + assert_eq!(ZiskVerifier::name(), "zisk"); + } + + #[test] + fn test_zisk_1_girona_verification() { + // Load test proof and verification key for ZisK 1 (Girona) + let test_proof_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("src/test_proofs/zisk_817bbf03-07b4-466d-879b-e476322bd080.bin"); + let vk_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("src/verification_keys/zisk_817bbf03-07b4-466d-879b-e476322bd080.bin"); + + let proof_data = std::fs::read(&test_proof_path).expect("Failed to read test proof file"); + let vk_data = std::fs::read(&vk_path).expect("Failed to read verification key file"); + + // Verify the proof + let result = ZiskVerifier::verify(&proof_data, &vk_data); + + // The test should succeed + assert!( + result.is_ok(), + "ZisK 1 (Girona) verification failed: {:?}", + result + ); + assert!( + result.unwrap(), + "ZisK 1 (Girona) proof verification returned false" + ); + } + + #[test] + fn test_zisk_2_sevilla_verification() { + // Load test proof and verification key for ZisK 2 (Sevilla) + let test_proof_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("src/test_proofs/zisk_534e6cf4-3dfe-47de-bba2-a0b11d544557.bin"); + let vk_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("src/verification_keys/zisk_534e6cf4-3dfe-47de-bba2-a0b11d544557.bin"); + + let proof_data = std::fs::read(&test_proof_path).expect("Failed to read test proof file"); + let vk_data = std::fs::read(&vk_path).expect("Failed to read verification key file"); + + // Verify the proof + let result = ZiskVerifier::verify(&proof_data, &vk_data); + + // The test should succeed + assert!( + result.is_ok(), + "ZisK 2 (Sevilla) verification failed: {:?}", + result + ); + assert!( + result.unwrap(), + "ZisK 2 (Sevilla) proof verification returned false" + ); + } +} diff --git a/zkvm_execution_layer/src/verifiers/zkcloud.rs b/zkvm_execution_layer/src/verifiers/zkcloud.rs new file mode 100644 index 00000000000..921ec554968 --- /dev/null +++ b/zkvm_execution_layer/src/verifiers/zkcloud.rs @@ -0,0 +1,72 @@ +//! ZisK-ZkCloud proof verifier +//! +//! This module implements proof verification for ZisK-ZkCloud using ZisK. + +use super::{panic_safe, ProofVerifier, VerificationResult}; +use tracing::debug; + +/// ZisK-ZkCloud verifier (uses ZisK) +pub struct ZkCloudVerifier; + +impl ProofVerifier for ZkCloudVerifier { + fn verify(proof_data: &[u8], vk_data: &[u8]) -> VerificationResult { + panic_safe::safe_verify(|| { + debug!( + proof_size = proof_data.len(), + vk_size = vk_data.len(), + "Starting ZisK-ZkCloud verification" + ); + + // Delegate to ZisK verifier implementation + let result = super::zisk::ZiskVerifier::verify(proof_data, vk_data)?; + + debug!( + verification_result = result, + "Completed ZisK-ZkCloud verification" + ); + + Ok(result) + }) + } + + fn name() -> &'static str { + "zisk-zkcloud" + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::path::PathBuf; + + #[test] + fn test_zisk_zkcloud_verifier_name() { + assert_eq!(ZkCloudVerifier::name(), "zisk-zkcloud"); + } + + #[test] + fn test_zisk_zkcloud_verification() { + // Load test proof and verification key + let test_proof_path = + PathBuf::from("src/test_proofs/zkcloud_884fcc21-d522-4b4a-b535-7cfde199485c.bin"); + let vk_path = + PathBuf::from("src/verification_keys/zkcloud_884fcc21-d522-4b4a-b535-7cfde199485c.bin"); + + let proof_data = std::fs::read(&test_proof_path).expect("Failed to read test proof file"); + let vk_data = std::fs::read(&vk_path).expect("Failed to read verification key file"); + + // Verify the proof + let result = ZkCloudVerifier::verify(&proof_data, &vk_data); + + // The test should succeed + assert!( + result.is_ok(), + "ZisK-ZkCloud verification failed: {:?}", + result + ); + assert!( + result.unwrap(), + "ZisK-ZkCloud proof verification returned false" + ); + } +}