diff --git a/.cargo/audit.toml b/.cargo/audit.toml new file mode 100644 index 00000000..38e462f5 --- /dev/null +++ b/.cargo/audit.toml @@ -0,0 +1,27 @@ +# RUSTSEC-2026-0049: CRL revocation checking bug in rustls-webpki 0.101.7. +# +# Background: CRL (Certificate Revocation List) checking is an optional TLS +# feature where a client fetches a list of revoked certificates from URLs +# embedded in the cert itself, to confirm it hasn't been invalidated since +# issuance. This is distinct from normal certificate validation. +# +# The bug: when a cert lists multiple CRL distribution point URLs, only the +# first URL is checked; the rest are silently ignored. This matters only when +# CRL checking is enabled AND the UnknownStatusPolicy is set to Allow (meaning +# "if I can't determine revocation status, accept the cert anyway"). With that +# combination, a revoked certificate from a compromised CA could be accepted. +# +# Why this does not affect Commit-Boost: the vulnerable code path is never +# reached because no code in this codebase enables CRL checking at all. +# TLS is used in four places: (1) relay communication via reqwest with +# rustls-tls uses default CA validation with no CRL configured; (2) the signer +# server presents a TLS certificate but does not check client revocation; +# (3) the signer client pins a single self-signed certificate via +# add_root_certificate — CRL is irrelevant for self-signed certs; (4) the Dirk +# remote signer uses mTLS with a custom CA but again no CRL. In all cases the +# buggy CRL code in rustls-webpki is never invoked. +# +# Blocked on sigp/lighthouse upgrading past v8.0.1 without a compilation +# regression (SseEventSource missing cfg guard in eth2 error.rs). +[advisories] +ignore = ["RUSTSEC-2026-0049"] \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8a9b42ae..381d4e12 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -26,7 +26,7 @@ jobs: - name: Install Rust toolchain uses: dtolnay/rust-toolchain@master with: - toolchain: nightly-2025-06-26 + toolchain: nightly-2026-01-01 components: clippy, rustfmt - name: Install protoc diff --git a/.gitignore b/.gitignore index 739e111a..22136e88 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,7 @@ targets.json .idea/ logs .vscode/ +certs/ # Nix .direnv/ diff --git a/Cargo.lock b/Cargo.lock index a1ba2329..2de971f8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 4 [[package]] name = "addr2line" -version = "0.24.2" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" dependencies = [ "gimli", ] @@ -25,7 +25,7 @@ checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8" dependencies = [ "cfg-if", "cipher 0.3.0", - "cpufeatures", + "cpufeatures 0.2.17", "ctr 0.8.0", "opaque-debug", ] @@ -38,7 +38,7 @@ checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" dependencies = [ "cfg-if", "cipher 0.4.4", - "cpufeatures", + "cpufeatures 0.2.17", ] [[package]] @@ -55,9 +55,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" dependencies = [ "memchr", ] @@ -70,9 +70,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "alloy" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d8f4cc1a6f6e5d3adf05f93123932bfd5168078a556d90dd9897bc0a75dee24" +checksum = "4973038846323e4e69a433916522195dce2947770076c03078fc21c80ea0f1c4" dependencies = [ "alloy-consensus", "alloy-contract", @@ -96,28 +96,29 @@ dependencies = [ [[package]] name = "alloy-chains" -version = "0.2.12" +version = "0.2.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3008b4f680adca5a81fad5f6cdbb561cca0cee7e97050756c2c1f3e41d2103c" +checksum = "9247f0a399ef71aeb68f497b2b8fb348014f742b50d3b83b1e00dfe1b7d64b3d" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "num_enum", "strum", ] [[package]] name = "alloy-consensus" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bf3c28aa7a5765042739f964e335408e434819b96fdda97f12eb1beb46dead0" +checksum = "b0c0dc44157867da82c469c13186015b86abef209bf0e41625e4b68bac61d728" dependencies = [ "alloy-eips", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rlp", "alloy-serde", "alloy-trie", "alloy-tx-macros", "auto_impl", + "borsh", "c-kzg", "derive_more", "either", @@ -128,18 +129,18 @@ dependencies = [ "serde", "serde_json", "serde_with", - "thiserror 2.0.16", + "thiserror 2.0.18", ] [[package]] name = "alloy-consensus-any" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbfda7b14f1664b6c23d7f38bca2b73c460f2497cf93dd1589753890cb0da158" +checksum = "ba4cdb42df3871cd6b346d6a938ec2ba69a9a0f49d1f82714bc5c48349268434" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rlp", "alloy-serde", "serde", @@ -147,16 +148,16 @@ dependencies = [ [[package]] name = "alloy-contract" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cb079f711129dd32d6c3a0581013c927eb30d32e929d606cd8c0fe1022ec041" +checksum = "ca63b7125a981415898ffe2a2a696c83696c9c6bdb1671c8a912946bbd8e49e7" dependencies = [ "alloy-consensus", "alloy-dyn-abi", "alloy-json-abi", "alloy-network", "alloy-network-primitives", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-provider", "alloy-pubsub", "alloy-rpc-types-eth", @@ -165,36 +166,36 @@ dependencies = [ "futures", "futures-util", "serde_json", - "thiserror 2.0.16", + "thiserror 2.0.18", ] [[package]] name = "alloy-core" -version = "1.3.1" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfe6c56d58fbfa9f0f6299376e8ce33091fc6494239466814c3f54b55743cb09" +checksum = "23e8604b0c092fabc80d075ede181c9b9e596249c70b99253082d7e689836529" dependencies = [ "alloy-dyn-abi", "alloy-json-abi", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rlp", "alloy-sol-types", ] [[package]] name = "alloy-dyn-abi" -version = "1.3.1" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3f56873f3cac7a2c63d8e98a4314b8311aa96adb1a0f82ae923eb2119809d2c" +checksum = "cc2db5c583aaef0255aa63a4fe827f826090142528bba48d1bf4119b62780cad" dependencies = [ "alloy-json-abi", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-sol-type-parser", "alloy-sol-types", "itoa", "serde", "serde_json", - "winnow", + "winnow 0.7.15", ] [[package]] @@ -203,50 +204,66 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "741bdd7499908b3aa0b159bba11e71c8cddd009a2c2eb7a06e825f1ec87900a5" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rlp", "crc", "serde", - "thiserror 2.0.16", + "thiserror 2.0.18", ] [[package]] name = "alloy-eip2930" -version = "0.2.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b82752a889170df67bbb36d42ca63c531eb16274f0d7299ae2a680facba17bd" +checksum = "9441120fa82df73e8959ae0e4ab8ade03de2aaae61be313fbf5746277847ce25" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rlp", + "borsh", "serde", ] [[package]] name = "alloy-eip7702" -version = "0.6.1" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d4769c6ffddca380b0070d71c8b7f30bed375543fe76bb2f74ec0acf4b7cd16" +checksum = "2919c5a56a1007492da313e7a3b6d45ef5edc5d33416fdec63c0d7a2702a0d20" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rlp", + "borsh", "k256", "serde", - "thiserror 2.0.16", + "thiserror 2.0.18", +] + +[[package]] +name = "alloy-eip7928" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8222b1d88f9a6d03be84b0f5e76bb60cd83991b43ad8ab6477f0e4a7809b98d" +dependencies = [ + "alloy-primitives 1.5.7", + "alloy-rlp", + "borsh", + "serde", ] [[package]] name = "alloy-eips" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72e57928382e5c7890ef90ded9f814d85a1c3db79ceb4a3c5079f1be4cadeeb4" +checksum = "b9f7ef09f21bd1e9cb8a686f168cb4a206646804567f0889eadb8dcc4c9288c8" dependencies = [ "alloy-eip2124", "alloy-eip2930", "alloy-eip7702", - "alloy-primitives 1.3.1", + "alloy-eip7928", + "alloy-primitives 1.5.7", "alloy-rlp", "alloy-serde", "auto_impl", + "borsh", "c-kzg", "derive_more", "either", @@ -255,30 +272,31 @@ dependencies = [ "serde", "serde_with", "sha2 0.10.9", - "thiserror 2.0.16", + "thiserror 2.0.18", ] [[package]] name = "alloy-genesis" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3419410cdd67fb7d5d016d9d16cf3ea8cc365fcbcf15d086afdd02eaef17e4" +checksum = "7c9cf3b99f46615fbf7dc1add0c96553abb7bf88fc9ec70dfbe7ad0b47ba7fe8" dependencies = [ "alloy-eips", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-serde", "alloy-trie", + "borsh", "serde", "serde_with", ] [[package]] name = "alloy-json-abi" -version = "1.3.1" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "125a1c373261b252e53e04d6e92c37d881833afc1315fceab53fd46045695640" +checksum = "e9dbe713da0c737d9e5e387b0ba790eb98b14dd207fe53eef50e19a5a8ec3dac" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-sol-type-parser", "serde", "serde_json", @@ -286,31 +304,31 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17248e392e79658b1faca7946bfe59825b891c3f6e382044499d99c57ba36a89" +checksum = "ff42cd777eea61f370c0b10f2648a1c81e0b783066cd7269228aa993afd487f7" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-sol-types", - "http 1.3.1", + "http 1.4.0", "serde", "serde_json", - "thiserror 2.0.16", + "thiserror 2.0.18", "tracing", ] [[package]] name = "alloy-network" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe43d21867dc0dcf71aacffc891ae75fd587154f0d907ceb7340fc5f0271276d" +checksum = "8cbca04f9b410fdc51aaaf88433cbac761213905a65fe832058bcf6690585762" dependencies = [ "alloy-consensus", "alloy-consensus-any", "alloy-eips", "alloy-json-rpc", "alloy-network-primitives", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rpc-types-any", "alloy-rpc-types-eth", "alloy-serde", @@ -322,36 +340,36 @@ dependencies = [ "futures-utils-wasm", "serde", "serde_json", - "thiserror 2.0.16", + "thiserror 2.0.18", ] [[package]] name = "alloy-network-primitives" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67f3b37447082a47289f26e26c0686ac6407710fdd4e818043d9b6d37f2ab55c" +checksum = "42d6d15e069a8b11f56bef2eccbad2a873c6dd4d4c81d04dda29710f5ea52f04" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-serde", "serde", ] [[package]] name = "alloy-primitives" -version = "0.8.25" +version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c77490fe91a0ce933a1f219029521f20fc28c2c0ca95d53fa4da9c00b8d9d4e" +checksum = "777d58b30eb9a4db0e5f59bc30e8c2caef877fee7dc8734cf242a51a60f22e05" dependencies = [ "alloy-rlp", "bytes", "cfg-if", "const-hex", "derive_more", - "foldhash", + "foldhash 0.1.5", "hashbrown 0.15.5", - "indexmap 2.11.4", + "indexmap 2.13.0", "itoa", "k256", "keccak-asm", @@ -367,37 +385,37 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "1.3.1" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc9485c56de23438127a731a6b4c87803d49faf1a7068dcd1d8768aca3a9edb9" +checksum = "de3b431b4e72cd8bd0ec7a50b4be18e73dab74de0dba180eef171055e5d5926e" dependencies = [ "alloy-rlp", "bytes", "cfg-if", "const-hex", "derive_more", - "foldhash", - "getrandom 0.3.3", - "hashbrown 0.15.5", - "indexmap 2.11.4", + "foldhash 0.2.0", + "getrandom 0.4.2", + "hashbrown 0.16.1", + "indexmap 2.13.0", "itoa", "k256", "keccak-asm", "paste", "proptest", "rand 0.9.2", + "rapidhash", "ruint", "rustc-hash", "serde", "sha3", - "tiny-keccak", ] [[package]] name = "alloy-provider" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b6377212f3e659173b939e8d3ec3292e246cb532eafd5a4f91e57fdb104b43c" +checksum = "d181c8cc7cf4805d7e589bf4074d56d55064fa1a979f005a45a62b047616d870" dependencies = [ "alloy-chains", "alloy-consensus", @@ -405,7 +423,7 @@ dependencies = [ "alloy-json-rpc", "alloy-network", "alloy-network-primitives", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-pubsub", "alloy-rpc-client", "alloy-rpc-types-anvil", @@ -429,10 +447,10 @@ dependencies = [ "lru", "parking_lot", "pin-project", - "reqwest 0.12.23", + "reqwest 0.12.28", "serde", "serde_json", - "thiserror 2.0.16", + "thiserror 2.0.18", "tokio", "tracing", "url", @@ -441,12 +459,12 @@ dependencies = [ [[package]] name = "alloy-pubsub" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d27b4f1ac3a0388065f933f957f80e03d06c47ce6a4389ac8cb9f72c30d8d823" +checksum = "e8bd82953194dec221aa4cbbbb0b1e2df46066fe9d0333ac25b43a311e122d13" dependencies = [ "alloy-json-rpc", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-transport", "auto_impl", "bimap", @@ -456,16 +474,16 @@ dependencies = [ "serde_json", "tokio", "tokio-stream", - "tower 0.5.2", + "tower 0.5.3", "tracing", "wasmtimer", ] [[package]] name = "alloy-rlp" -version = "0.3.12" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f70d83b765fdc080dbcd4f4db70d8d23fe4761f2f02ebfa9146b833900634b4" +checksum = "e93e50f64a77ad9c5470bf2ad0ca02f228da70c792a8f06634801e202579f35e" dependencies = [ "alloy-rlp-derive", "arrayvec", @@ -474,23 +492,23 @@ dependencies = [ [[package]] name = "alloy-rlp-derive" -version = "0.3.12" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64b728d511962dda67c1bc7ea7c03736ec275ed2cf4c35d9585298ac9ccf3b73" +checksum = "ce8849c74c9ca0f5a03da1c865e3eb6f768df816e67dd3721a398a8a7e398011" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "alloy-rpc-client" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b80c8cafc1735ce6776bccc25f0c3b7583074897b8ec4f3a129e4d25e09d65c" +checksum = "f2792758a93ae32a32e9047c843d536e1448044f78422d71bf7d7c05149e103f" dependencies = [ "alloy-json-rpc", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-pubsub", "alloy-transport", "alloy-transport-http", @@ -498,12 +516,12 @@ dependencies = [ "alloy-transport-ws", "futures", "pin-project", - "reqwest 0.12.23", + "reqwest 0.12.28", "serde", "serde_json", "tokio", "tokio-stream", - "tower 0.5.2", + "tower 0.5.3", "tracing", "url", "wasmtimer", @@ -511,11 +529,11 @@ dependencies = [ [[package]] name = "alloy-rpc-types" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bc0818982bb868acc877f2623ad1fc8f2a4b244074919212bfe476fcadca6d3" +checksum = "7bdcbf9dfd5eea8bfeb078b1d906da8cd3a39c4d4dbe7a628025648e323611f6" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rpc-types-anvil", "alloy-rpc-types-beacon", "alloy-rpc-types-debug", @@ -529,11 +547,11 @@ dependencies = [ [[package]] name = "alloy-rpc-types-anvil" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "410403528db87ab4618e7f517b0f54e493c8a17bb61102cbccbb7a35e8719b5b" +checksum = "e0a3100b76987c1b1dc81f3abe592b7edc29e92b1242067a69d65e0030b35cf9" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rpc-types-eth", "alloy-serde", "serde", @@ -541,9 +559,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-any" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af8448a1eb2c81115fc8d9d50da24156c9ce8fca78a19a997184dcd81f99c229" +checksum = "dd720b63f82b457610f2eaaf1f32edf44efffe03ae25d537632e7d23e7929e1a" dependencies = [ "alloy-consensus-any", "alloy-rpc-types-eth", @@ -552,30 +570,31 @@ dependencies = [ [[package]] name = "alloy-rpc-types-beacon" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c20f653a4c1ab8289470e8eed55fe4f11354865b730685bb70b69a375524b27" +checksum = "4a22e13215866f5dfd5d3278f4c41f1fad9410dc68ce39022f58593c873c26f8" dependencies = [ "alloy-eips", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rpc-types-engine", + "derive_more", "ethereum_ssz", "ethereum_ssz_derive", "serde", "serde_json", "serde_with", - "thiserror 2.0.16", + "thiserror 2.0.18", "tree_hash", "tree_hash_derive 0.10.0", ] [[package]] name = "alloy-rpc-types-debug" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fb22d465e02c015648138bc0d46951d267827551fc85922b60f58caa6a0e9c9" +checksum = "e1b21e1ad18ff1b31ff1030e046462ab8168cf8894e6778cd805c8bdfe2bd649" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "derive_more", "serde", "serde_with", @@ -583,13 +602,13 @@ dependencies = [ [[package]] name = "alloy-rpc-types-engine" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b968beee2ada53ef150fd90fbd2b7a3e5bcb66650e4d01757ff769c8af3d5ee" +checksum = "e4ac61f03f1edabccde1c687b5b25fff28f183afee64eaa2e767def3929e4457" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rlp", "alloy-serde", "derive_more", @@ -602,15 +621,15 @@ dependencies = [ [[package]] name = "alloy-rpc-types-eth" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd7c1bc07b6c9222c4ad822da3cea0fbbfcbe2876cf5d4780e147a0da6fe2862" +checksum = "9b2dc411f13092f237d2bf6918caf80977fc2f51485f9b90cb2a2f956912c8c9" dependencies = [ "alloy-consensus", "alloy-consensus-any", "alloy-eips", "alloy-network-primitives", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rlp", "alloy-serde", "alloy-sol-types", @@ -618,30 +637,30 @@ dependencies = [ "serde", "serde_json", "serde_with", - "thiserror 2.0.16", + "thiserror 2.0.18", ] [[package]] name = "alloy-rpc-types-trace" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e54b3f616d9f30e11bc73e685f71da6f1682da5a3c2ca5206ec47f1d3bc96c7" +checksum = "1ad79f1e27e161943b5a4f99fe5534ef0849876214be411e0032c12f38e94daa" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rpc-types-eth", "alloy-serde", "serde", "serde_json", - "thiserror 2.0.16", + "thiserror 2.0.18", ] [[package]] name = "alloy-rpc-types-txpool" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15fc6b7b9465393a5b3fd38aba979f44438f172d9d0e6de732243c17d4246060" +checksum = "d459f902a2313737bc66d18ed094c25d2aeb268b74d98c26bbbda2aa44182ab0" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rpc-types-eth", "alloy-serde", "serde", @@ -649,84 +668,84 @@ dependencies = [ [[package]] name = "alloy-serde" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8603b89af4ba0acb94465319e506b8c0b40a5daf563046bedd58d26c98dbd62c" +checksum = "e2ce1e0dbf7720eee747700e300c99aac01b1a95bb93f493a01e78ee28bb1a37" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "serde", "serde_json", ] [[package]] name = "alloy-signer" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ddbea0531837cc7784ae6669b4a66918e6fb34c2daa2a7a888549dd565151c" +checksum = "2425c6f314522c78e8198979c8cbf6769362be4da381d4152ea8eefce383535d" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "async-trait", "auto_impl", "either", "elliptic-curve", "k256", - "thiserror 2.0.16", + "thiserror 2.0.18", ] [[package]] name = "alloy-signer-local" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3497f79c8a818f736d8de1c157a1ec66c0ce1da3fbb2f54c005097798282e59b" +checksum = "c3ecb71ee53d8d9c3fa7bac17542c8116ebc7a9726c91b1bf333ec3d04f5a789" dependencies = [ "alloy-consensus", "alloy-network", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-signer", "async-trait", "k256", "rand 0.8.5", - "thiserror 2.0.16", + "thiserror 2.0.18", ] [[package]] name = "alloy-sol-macro" -version = "1.3.1" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d20d867dcf42019d4779519a1ceb55eba8d7f3d0e4f0a89bcba82b8f9eb01e48" +checksum = "ab81bab693da9bb79f7a95b64b394718259fdd7e41dceeced4cad57cb71c4f6a" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "alloy-sol-macro-expander" -version = "1.3.1" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b74e91b0b553c115d14bd0ed41898309356dc85d0e3d4b9014c4e7715e48c8ad" +checksum = "489f1620bb7e2483fb5819ed01ab6edc1d2f93939dce35a5695085a1afd1d699" dependencies = [ "alloy-json-abi", "alloy-sol-macro-input", "const-hex", "heck", - "indexmap 2.11.4", + "indexmap 2.13.0", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.106", + "sha3", + "syn 2.0.117", "syn-solidity", - "tiny-keccak", ] [[package]] name = "alloy-sol-macro-input" -version = "1.3.1" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84194d31220803f5f62d0a00f583fd3a062b36382e2bea446f1af96727754565" +checksum = "56cef806ad22d4392c5fc83cf8f2089f988eb99c7067b4e0c6f1971fc1cca318" dependencies = [ "alloy-json-abi", "const-hex", @@ -736,40 +755,39 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.106", + "syn 2.0.117", "syn-solidity", ] [[package]] name = "alloy-sol-type-parser" -version = "1.3.1" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe8c27b3cf6b2bb8361904732f955bc7c05e00be5f469cec7e2280b6167f3ff0" +checksum = "a6df77fea9d6a2a75c0ef8d2acbdfd92286cc599983d3175ccdc170d3433d249" dependencies = [ "serde", - "winnow", + "winnow 0.7.15", ] [[package]] name = "alloy-sol-types" -version = "1.3.1" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5383d34ea00079e6dd89c652bcbdb764db160cef84e6250926961a0b2295d04" +checksum = "64612d29379782a5dde6f4b6570d9c756d734d760c0c94c254d361e678a6591f" dependencies = [ "alloy-json-abi", - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-sol-macro", "serde", ] [[package]] name = "alloy-transport" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d259738315db0a2460581e22a1ca73ff02ef44687b43c0dad0834999090b3e7e" +checksum = "fa186e560d523d196580c48bf00f1bf62e63041f28ecf276acc22f8b27bb9f53" dependencies = [ "alloy-json-rpc", - "alloy-primitives 1.3.1", "auto_impl", "base64 0.22.1", "derive_more", @@ -778,9 +796,9 @@ dependencies = [ "parking_lot", "serde", "serde_json", - "thiserror 2.0.16", + "thiserror 2.0.18", "tokio", - "tower 0.5.2", + "tower 0.5.3", "tracing", "url", "wasmtimer", @@ -788,24 +806,25 @@ dependencies = [ [[package]] name = "alloy-transport-http" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6332f6d470e465bf00f9306743ff172f54b83e7e31edfe28f1444c085ccb0e4" +checksum = "aa501ad58dd20acddbfebc65b52e60f05ebf97c52fa40d1b35e91f5e2da0ad0e" dependencies = [ "alloy-json-rpc", "alloy-transport", - "reqwest 0.12.23", + "itertools 0.14.0", + "reqwest 0.12.28", "serde_json", - "tower 0.5.2", + "tower 0.5.3", "tracing", "url", ] [[package]] name = "alloy-transport-ipc" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "865c13b9ce32b1a5227ac0f796faa9c08416aa4ea4e22b3a61a21ef110bda5ad" +checksum = "c2ef85688e5ac2da72afc804e0a1f153a1f309f05a864b1998bbbed7804dbaab" dependencies = [ "alloy-json-rpc", "alloy-pubsub", @@ -823,15 +842,14 @@ dependencies = [ [[package]] name = "alloy-transport-ws" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da655a5099cc037cad636425cec389320a694b6ec0302472a74f71b3637d842d" +checksum = "b9f00445db69d63298e2b00a0ea1d859f00e6424a3144ffc5eba9c31da995e16" dependencies = [ "alloy-pubsub", "alloy-transport", "futures", - "http 1.3.1", - "rustls 0.23.32", + "http 1.4.0", "serde_json", "tokio", "tokio-tungstenite", @@ -841,31 +859,30 @@ dependencies = [ [[package]] name = "alloy-trie" -version = "0.9.1" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3412d52bb97c6c6cc27ccc28d4e6e8cf605469101193b50b0bd5813b1f990b5" +checksum = "3f14b5d9b2c2173980202c6ff470d96e7c5e202c65a9f67884ad565226df7fbb" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rlp", - "arrayvec", "derive_more", "nybbles", "serde", "smallvec", + "thiserror 2.0.18", "tracing", ] [[package]] name = "alloy-tx-macros" -version = "1.0.35" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2765badc6f621e1fc26aa70c520315866f0db6b8bd6bf3c560920d4fb33b08de" +checksum = "6fa0c53e8c1e1ef4d01066b01c737fb62fc9397ab52c6e7bb5669f97d281b9bc" dependencies = [ - "alloy-primitives 1.3.1", "darling 0.21.3", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -885,9 +902,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.6.20" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192" +checksum = "824a212faf96e9acacdbd09febd34438f8f711fb84e09a8916013cd7815ca28d" dependencies = [ "anstyle", "anstyle-parse", @@ -900,44 +917,44 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" +checksum = "940b3a0ca603d1eade50a4846a2afffd5ef57a9feac2c0e2ec2e14f9ead76000" [[package]] name = "anstyle-parse" -version = "0.2.7" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +checksum = "52ce7f38b242319f7cabaa6813055467063ecdc9d355bbb4ce0c68908cd8130e" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.4" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] name = "anstyle-wincon" -version = "3.0.10" +version = "3.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", "once_cell_polyfill", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] name = "anyhow" -version = "1.0.100" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" [[package]] name = "arbitrary" @@ -948,6 +965,15 @@ dependencies = [ "derive_arbitrary", ] +[[package]] +name = "arc-swap" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a07d1f37ff60921c83bdfc7407723bdefe89b44b98a9b772f225c8f9d67141a6" +dependencies = [ + "rustversion", +] + [[package]] name = "archery" version = "0.4.0" @@ -995,6 +1021,26 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ark-ff" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a177aba0ed1e0fbb62aa9f6d0502e9b46dad8c2eab04c14258a1212d2557ea70" +dependencies = [ + "ark-ff-asm 0.5.0", + "ark-ff-macros 0.5.0", + "ark-serialize 0.5.0", + "ark-std 0.5.0", + "arrayvec", + "digest 0.10.7", + "educe", + "itertools 0.13.0", + "num-bigint", + "num-traits", + "paste", + "zeroize", +] + [[package]] name = "ark-ff-asm" version = "0.3.0" @@ -1015,6 +1061,16 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "ark-ff-asm" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" +dependencies = [ + "quote", + "syn 2.0.117", +] + [[package]] name = "ark-ff-macros" version = "0.3.0" @@ -1040,6 +1096,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "ark-ff-macros" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09be120733ee33f7693ceaa202ca41accd5653b779563608f1234f78ae07c4b3" +dependencies = [ + "num-bigint", + "num-traits", + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "ark-serialize" version = "0.3.0" @@ -1061,6 +1130,18 @@ dependencies = [ "num-bigint", ] +[[package]] +name = "ark-serialize" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f4d068aaf107ebcd7dfb52bc748f8030e0fc930ac8e360146ca54c1203088f7" +dependencies = [ + "ark-std 0.5.0", + "arrayvec", + "digest 0.10.7", + "num-bigint", +] + [[package]] name = "ark-std" version = "0.3.0" @@ -1081,6 +1162,16 @@ dependencies = [ "rand 0.8.5", ] +[[package]] +name = "ark-std" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "246a225cc6131e9ee4f24619af0f19d67761fff15d7ccc22e42b80846e69449a" +dependencies = [ + "num-traits", + "rand 0.8.5", +] + [[package]] name = "arrayref" version = "0.3.9" @@ -1092,15 +1183,12 @@ name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" -dependencies = [ - "serde", -] [[package]] name = "assert_cmd" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c5bcfa8749ac45dd12cb11055aeeb6b27a3895560d60d71e3c23bf979e60514" +checksum = "9a686bbee5efb88a82df0621b236e74d925f470e5445d3220a5648b892ec99c9" dependencies = [ "anstyle", "bstr", @@ -1130,7 +1218,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -1141,7 +1229,7 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -1169,7 +1257,7 @@ checksum = "ffdcb70bdbc4d478427380519163274ac86e52916e10f0a8889adf0f96d3fee7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -1178,6 +1266,28 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +[[package]] +name = "aws-lc-rs" +version = "1.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a054912289d18629dc78375ba2c3726a3afe3ff71b4edba9dedfca0e3446d1fc" +dependencies = [ + "aws-lc-sys", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa7e52a4c5c547c741610a2c6f123f3881e409b714cd27e6798ef020c514f0a" +dependencies = [ + "cc", + "cmake", + "dunce", + "fs_extra", +] + [[package]] name = "axum" version = "0.7.9" @@ -1188,7 +1298,7 @@ dependencies = [ "axum-core 0.4.5", "bytes", "futures-util", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", "http-body-util", "itoa", @@ -1200,26 +1310,26 @@ dependencies = [ "rustversion", "serde", "sync_wrapper 1.0.2", - "tower 0.5.2", + "tower 0.5.3", "tower-layer", "tower-service", ] [[package]] name = "axum" -version = "0.8.4" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5" +checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8" dependencies = [ - "axum-core 0.5.2", + "axum-core 0.5.6", "axum-macros", "bytes", "form_urlencoded", "futures-util", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", "http-body-util", - "hyper 1.7.0", + "hyper 1.8.1", "hyper-util", "itoa", "matchit 0.8.4", @@ -1227,14 +1337,13 @@ dependencies = [ "mime", "percent-encoding", "pin-project-lite", - "rustversion", - "serde", + "serde_core", "serde_json", "serde_path_to_error", "serde_urlencoded", "sync_wrapper 1.0.2", "tokio", - "tower 0.5.2", + "tower 0.5.3", "tower-layer", "tower-service", "tracing", @@ -1249,7 +1358,7 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", "http-body-util", "mime", @@ -1262,18 +1371,17 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.5.2" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6" +checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1" dependencies = [ "bytes", "futures-core", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", - "rustversion", "sync_wrapper 1.0.2", "tower-layer", "tower-service", @@ -1282,25 +1390,25 @@ dependencies = [ [[package]] name = "axum-extra" -version = "0.10.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45bf463831f5131b7d3c756525b305d40f1185b688565648a92e1392ca35713d" +checksum = "9963ff19f40c6102c76756ef0a46004c0d58957d87259fc9208ff8441c12ab96" dependencies = [ - "axum 0.8.4", - "axum-core 0.5.2", + "axum 0.8.8", + "axum-core 0.5.6", "bytes", "futures-util", "headers", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", "rustversion", - "serde", - "tower 0.5.2", + "serde_core", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -1311,14 +1419,36 @@ checksum = "604fde5e028fea851ce1d8570bbdc034bec850d157f7569d10f347d06808c05c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", +] + +[[package]] +name = "axum-server" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1ab4a3ec9ea8a657c72d99a03a824af695bd0fb5ec639ccbd9cd3543b41a5f9" +dependencies = [ + "arc-swap", + "bytes", + "fs-err", + "http 1.4.0", + "http-body 1.0.1", + "hyper 1.8.1", + "hyper-util", + "pin-project-lite", + "rustls 0.23.37", + "rustls-pemfile 2.2.0", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.4", + "tower-service", ] [[package]] name = "backtrace" -version = "0.3.75" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" +checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" dependencies = [ "addr2line", "cfg-if", @@ -1326,7 +1456,7 @@ dependencies = [ "miniz_oxide", "object", "rustc-demangle", - "windows-targets 0.52.6", + "windows-link", ] [[package]] @@ -1365,9 +1495,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" -version = "1.8.0" +version = "1.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" +checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" [[package]] name = "bimap" @@ -1395,15 +1525,15 @@ checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" [[package]] name = "bitcoin-io" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b47c4ab7a93edb0c7198c5535ed9b52b63095f4e9b45279c6736cec4b856baf" +checksum = "2dee39a0ee5b4095224a0cfc6bf4cc1baf0f9624b96b367e53b66d974e51d953" [[package]] name = "bitcoin_hashes" -version = "0.14.0" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb18c03d0db0247e147a21a6faafd5a7eb851c743db062de72018b6b7e8e4d16" +checksum = "26ec84b80c482df901772e931a9a681e26a1b9ee2302edeff23cb30328745c8b" dependencies = [ "bitcoin-io", "hex-conservative", @@ -1417,9 +1547,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.4" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" [[package]] name = "bitvec" @@ -1454,9 +1584,9 @@ dependencies = [ [[package]] name = "bls" version = "0.2.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "arbitrary", "blst", "ethereum_hashing", @@ -1543,6 +1673,30 @@ dependencies = [ "zeroize", ] +[[package]] +name = "borsh" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfd1e3f8955a5d7de9fab72fc8373fade9fb8a703968cb200ae3dc6cf08e185a" +dependencies = [ + "borsh-derive", + "bytes", + "cfg_aliases", +] + +[[package]] +name = "borsh-derive" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfcfdc083699101d5a7965e49925975f2f55060f94f9a05e7187be95d530ca59" +dependencies = [ + "once_cell", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "bs58" version = "0.5.1" @@ -1565,9 +1719,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.19.0" +version = "3.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" [[package]] name = "byte-slice-cast" @@ -1583,18 +1737,18 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.10.1" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" dependencies = [ "serde", ] [[package]] name = "c-kzg" -version = "2.1.4" +version = "2.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "137a2a2878ed823ef1bd73e5441e245602aae5360022113b8ad259ca4b5b8727" +checksum = "6648ed1e4ea8e8a1a4a2c78e1cda29a3fd500bc622899c340d8525ea9a76b24a" dependencies = [ "blst", "cc", @@ -1616,7 +1770,7 @@ name = "cb-bench-micro" version = "0.9.3" dependencies = [ "alloy", - "axum 0.8.4", + "axum 0.8.8", "cb-common", "cb-pbs", "cb-tests", @@ -1634,7 +1788,7 @@ dependencies = [ "comfy-table", "histogram", "rand 0.9.2", - "reqwest 0.12.23", + "reqwest 0.12.28", "serde", "serde_json", "tokio", @@ -1649,7 +1803,7 @@ dependencies = [ "clap", "docker-compose-types", "eyre", - "indexmap 2.11.4", + "indexmap 2.13.0", "serde_yaml", "tempfile", "toml", @@ -1662,11 +1816,12 @@ dependencies = [ "aes 0.8.4", "alloy", "async-trait", - "axum 0.8.4", + "axum 0.8.8", "base64 0.22.1", "bimap", "bytes", "cipher 0.4.4", + "const_format", "ctr 0.9.2", "derive_more", "docker-image", @@ -1677,19 +1832,23 @@ dependencies = [ "ethereum_ssz_derive", "eyre", "futures", + "headers-accept", "jsonwebtoken", "lazy_static", + "mediatype 0.20.0", "notify", "pbkdf2 0.12.2", "rand 0.9.2", "rayon", - "reqwest 0.12.23", + "reqwest 0.12.28", + "reqwest-eventsource", "serde", "serde_json", "serde_yaml", "sha2 0.10.9", "ssz_types", - "thiserror 2.0.16", + "tempfile", + "thiserror 2.0.18", "tokio", "toml", "tonic", @@ -1707,11 +1866,11 @@ dependencies = [ name = "cb-metrics" version = "0.9.3" dependencies = [ - "axum 0.8.4", + "axum 0.8.8", "cb-common", "eyre", "prometheus", - "thiserror 2.0.16", + "thiserror 2.0.18", "tokio", "tracing", ] @@ -1722,25 +1881,29 @@ version = "0.9.3" dependencies = [ "alloy", "async-trait", - "axum 0.8.4", + "axum 0.8.8", "axum-extra", "cb-common", "cb-metrics", + "ethereum_serde_utils 0.7.0", + "ethereum_ssz", "eyre", "futures", + "headers", "lazy_static", "notify", "parking_lot", "prometheus", - "reqwest 0.12.23", + "reqwest 0.12.28", "serde", "serde_json", "tokio", "tower-http", "tracing", "tree_hash", + "types", "url", - "uuid 1.18.1", + "uuid 1.22.0", ] [[package]] @@ -1748,8 +1911,9 @@ name = "cb-signer" version = "0.9.3" dependencies = [ "alloy", - "axum 0.8.4", + "axum 0.8.8", "axum-extra", + "axum-server", "bimap", "blsful", "cb-common", @@ -1763,13 +1927,14 @@ dependencies = [ "prometheus", "prost", "rand 0.9.2", - "thiserror 2.0.16", + "rustls 0.23.37", + "thiserror 2.0.18", "tokio", "tonic", "tonic-build", "tracing", "tree_hash", - "uuid 1.18.1", + "uuid 1.22.0", ] [[package]] @@ -1777,12 +1942,15 @@ name = "cb-tests" version = "0.9.3" dependencies = [ "alloy", - "axum 0.8.4", + "axum 0.8.8", "cb-common", "cb-pbs", "cb-signer", + "ethereum_ssz", "eyre", - "reqwest 0.12.23", + "jsonwebtoken", + "rcgen", + "reqwest 0.12.28", "serde", "serde_json", "tempfile", @@ -1798,19 +1966,21 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.38" +version = "1.2.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80f41ae168f955c12fb8960b057d70d0ca153fb83182b57d86380443527be7e9" +checksum = "7a0dd1ca384932ff3641c8718a02769f1698e7563dc6974ffd03346116310423" dependencies = [ "find-msvc-tools", + "jobserver", + "libc", "shlex", ] [[package]] name = "cfg-if" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "cfg_aliases" @@ -1818,16 +1988,27 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" +[[package]] +name = "chacha20" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f8d983286843e49675a4b7a2d174efe136dc93a18d69130dd18198a6c167601" +dependencies = [ + "cfg-if", + "cpufeatures 0.3.0", + "rand_core 0.10.0", +] + [[package]] name = "chrono" -version = "0.4.42" +version = "0.4.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" +checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0" dependencies = [ "iana-time-zone", "num-traits", "serde", - "windows-link 0.2.0", + "windows-link", ] [[package]] @@ -1878,9 +2059,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.48" +version = "4.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae" +checksum = "b193af5b67834b676abd72466a96c1024e6a6ad978a1f484bd90b85c94041351" dependencies = [ "clap_builder", "clap_derive", @@ -1888,33 +2069,42 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.48" +version = "4.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9" +checksum = "714a53001bf66416adb0e2ef5ac857140e7dc3a0c48fb28b2f10762fc4b5069f" dependencies = [ "anstream", "anstyle", "clap_lex", - "strsim 0.11.1", + "strsim", ] [[package]] name = "clap_derive" -version = "4.5.47" +version = "4.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c" +checksum = "1110bd8a634a1ab8cb04345d8d878267d57c3cf1b38d91b71af6686408bbca6a" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "clap_lex" -version = "0.7.5" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8d4a3bb8b1e0c1050499d1815f5ab16d04f0959b233085fb31653fbfc9d98f9" + +[[package]] +name = "cmake" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" +checksum = "75443c44cd6b379beb8c5b45d85d0773baf31cce901fe7bb252f4eff3008ef7d" +dependencies = [ + "cc", +] [[package]] name = "color-eyre" @@ -1945,15 +2135,15 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" +checksum = "1d07550c9036bf2ae0c684c4297d503f838287c83c53686d05370d0e139ae570" [[package]] name = "comfy-table" -version = "7.2.1" +version = "7.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b03b7db8e0b4b2fdad6c551e634134e99ec000e5c8c3b6856c65e8bbaded7a3b" +checksum = "958c5d6ecf1f214b4c2bbbbf6ab9523a864bd136dcf71a7e8904799acfe1ad47" dependencies = [ "crossterm", "unicode-segmentation", @@ -1985,7 +2175,7 @@ dependencies = [ [[package]] name = "compare_fields" version = "0.2.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "itertools 0.10.5", ] @@ -1993,7 +2183,7 @@ dependencies = [ [[package]] name = "compare_fields_derive" version = "0.2.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "quote", "syn 1.0.109", @@ -2001,12 +2191,12 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.16.0" +version = "1.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6407bff74dea37e0fa3dc1c1c974e5d46405f0c987bf9997a0762adce71eda6" +checksum = "531185e432bb31db1ecda541e9e7ab21468d4d844ad7505e0546a49b4945d49b" dependencies = [ "cfg-if", - "cpufeatures", + "cpufeatures 0.2.17", "proptest", "serde_core", ] @@ -2025,9 +2215,9 @@ checksum = "2f421161cb492475f1661ddc9815a745a1c894592070661180fdec3d4872e9c3" [[package]] name = "const_format" -version = "0.2.34" +version = "0.2.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126f97965c8ad46d6d9163268ff28432e8f6a1196a55578867832e3049df63dd" +checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad" dependencies = [ "const_format_proc_macros", ] @@ -2046,7 +2236,7 @@ dependencies = [ [[package]] name = "context_deserialize" version = "0.1.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "context_deserialize_derive", "milhouse", @@ -2057,12 +2247,21 @@ dependencies = [ [[package]] name = "context_deserialize_derive" version = "0.1.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "quote", "syn 1.0.109", ] +[[package]] +name = "convert_case" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "core-foundation" version = "0.9.4" @@ -2073,6 +2272,16 @@ dependencies = [ "libc", ] +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -2097,11 +2306,20 @@ dependencies = [ "libc", ] +[[package]] +name = "cpufeatures" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b2a41393f66f16b0823bb79094d54ac5fbd34ab292ddafb9a0456ac9f87d201" +dependencies = [ + "libc", +] + [[package]] name = "crc" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" +checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" dependencies = [ "crc-catalog", ] @@ -2188,7 +2406,7 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.11.0", "crossterm_winapi", "document-features", "parking_lot", @@ -2225,9 +2443,9 @@ dependencies = [ [[package]] name = "crypto-common" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" dependencies = [ "generic-array 0.14.7", "typenum", @@ -2268,7 +2486,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" dependencies = [ "cfg-if", - "cpufeatures", + "cpufeatures 0.2.17", "curve25519-dalek-derive", "digest 0.10.7", "fiat-crypto", @@ -2285,7 +2503,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2304,16 +2522,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "darling" -version = "0.13.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" -dependencies = [ - "darling_core 0.13.4", - "darling_macro 0.13.4", -] - [[package]] name = "darling" version = "0.20.11" @@ -2335,17 +2543,13 @@ dependencies = [ ] [[package]] -name = "darling_core" -version = "0.13.4" +name = "darling" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +checksum = "25ae13da2f202d56bd7f91c25fba009e7717a1e4a1cc98a76d844b65ae912e9d" dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim 0.10.0", - "syn 1.0.109", + "darling_core 0.23.0", + "darling_macro 0.23.0", ] [[package]] @@ -2358,8 +2562,8 @@ dependencies = [ "ident_case", "proc-macro2", "quote", - "strsim 0.11.1", - "syn 2.0.106", + "strsim", + "syn 2.0.117", ] [[package]] @@ -2373,19 +2577,21 @@ dependencies = [ "proc-macro2", "quote", "serde", - "strsim 0.11.1", - "syn 2.0.106", + "strsim", + "syn 2.0.117", ] [[package]] -name = "darling_macro" -version = "0.13.4" +name = "darling_core" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +checksum = "9865a50f7c335f53564bb694ef660825eb8610e0a53d3e11bf1b0d3df31e03b0" dependencies = [ - "darling_core 0.13.4", + "ident_case", + "proc-macro2", "quote", - "syn 1.0.109", + "strsim", + "syn 2.0.117", ] [[package]] @@ -2396,7 +2602,7 @@ checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core 0.20.11", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2407,7 +2613,18 @@ checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" dependencies = [ "darling_core 0.21.3", "quote", - "syn 2.0.106", + "syn 2.0.117", +] + +[[package]] +name = "darling_macro" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3984ec7bd6cfa798e62b4a642426a5be0e68f9401cfc2a01e3fa9ea2fcdb8d" +dependencies = [ + "darling_core 0.23.0", + "quote", + "syn 2.0.117", ] [[package]] @@ -2426,15 +2643,15 @@ dependencies = [ [[package]] name = "data-encoding" -version = "2.9.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" +checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" [[package]] name = "data-encoding-macro" -version = "0.1.18" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47ce6c96ea0102f01122a185683611bd5ac8d99e62bc59dd12e6bda344ee673d" +checksum = "8142a83c17aa9461d637e649271eae18bf2edd00e91f2e105df36c3c16355bdb" dependencies = [ "data-encoding", "data-encoding-macro-internal", @@ -2442,12 +2659,12 @@ dependencies = [ [[package]] name = "data-encoding-macro-internal" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d162beedaa69905488a8da94f5ac3edb4dd4788b732fadb7bd120b2625c1976" +checksum = "7ab67060fc6b8ef687992d439ca0fa36e7ed17e9a0b16b25b601e8757df720de" dependencies = [ "data-encoding", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2462,12 +2679,12 @@ dependencies = [ [[package]] name = "deranged" -version = "0.5.3" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d630bccd429a5bb5a64b5e94f693bfc48c9f8566418fda4c494cc94f911f87cc" +checksum = "7cd812cc2bc1d69d4764bd80df88b4317eaef9e773c75226407d9bc0876b211c" dependencies = [ "powerfmt", - "serde", + "serde_core", ] [[package]] @@ -2489,7 +2706,7 @@ checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2510,7 +2727,7 @@ dependencies = [ "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2520,27 +2737,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" dependencies = [ "derive_builder_core", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "derive_more" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" +checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" dependencies = [ "derive_more-impl", ] [[package]] name = "derive_more-impl" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" +checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" dependencies = [ + "convert_case", "proc-macro2", "quote", - "syn 2.0.106", + "rustc_version 0.4.1", + "syn 2.0.117", "unicode-xid", ] @@ -2579,7 +2798,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2589,7 +2808,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f5e899a3da7a90647ef302f7e3050b00ed7f3f02c7b32683a04f3fbd9052541" dependencies = [ "derive_builder", - "indexmap 2.11.4", + "indexmap 2.13.0", "serde", "serde_yaml", ] @@ -2606,15 +2825,15 @@ dependencies = [ [[package]] name = "doctest-file" -version = "1.0.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aac81fa3e28d21450aa4d2ac065992ba96a1d7303efbce51a95f4fd175b67562" +checksum = "c2db04e74f0a9a93103b50e90b96024c9b2bdca8bce6a632ec71b88736d3d359" [[package]] name = "document-features" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95249b50c6c185bee49034bcb378a49dc2b5dff0be90ff6616d31d64febab05d" +checksum = "d4b8a88685455ed29a21542a33abd9cb6510b6b129abadabdcef0f4c55bc8f61" dependencies = [ "litrs", ] @@ -2680,14 +2899,14 @@ dependencies = [ "enum-ordinalize", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "eip4844" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa86cda6af15a9a5e4cf680850addaee8cd427be95be3ec9d022b9d7b98a66c0" +checksum = "82ab45fc63db6bbe5c3eb7c79303b2aff7ee529c991b2111c46879d1ea38407e" dependencies = [ "ekzg-bls12-381", "ekzg-maybe-rayon", @@ -2702,6 +2921,16 @@ dependencies = [ "sha2 0.10.9", ] +[[package]] +name = "eip_3076" +version = "0.1.0" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" +dependencies = [ + "ethereum_serde_utils 0.8.0", + "serde", + "types", +] + [[package]] name = "either" version = "1.15.0" @@ -2713,9 +2942,9 @@ dependencies = [ [[package]] name = "ekzg-bls12-381" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f0e00a7689af7f4f17e85ae07f5a92b568a47297a165f685b828edfd82e02b" +checksum = "05c599a59deba6188afd9f783507e4d89efc997f0fa340a758f0d0992b322416" dependencies = [ "blst", "blstrs", @@ -2727,9 +2956,9 @@ dependencies = [ [[package]] name = "ekzg-erasure-codes" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bfc7ab684a7bb0c5ee37fd6a73da7425858cdd28f4a285c70361f001d6d0efc" +checksum = "8474a41a30ddd2b651798b1aa9ce92011207c3667186fe9044184683250109e7" dependencies = [ "ekzg-bls12-381", "ekzg-polynomial", @@ -2737,15 +2966,15 @@ dependencies = [ [[package]] name = "ekzg-maybe-rayon" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e0a4876a612b9317be470768e134b671b8e645e412a82eb12fdd9b1958fa6f9" +checksum = "9cf94d1385185c1f7caef4973be49702c7d9ffdeaf832d126dbb9ed6efe09d40" [[package]] name = "ekzg-multi-open" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f7964754aa0921aaa89b1589100e4cae9b31f87f137eeb0af5403fdfca68bfc" +checksum = "e6d37456a32cf79bdbddd6685a2adec73210e2d60332370bc0e9a502b6d93beb" dependencies = [ "ekzg-bls12-381", "ekzg-maybe-rayon", @@ -2755,9 +2984,9 @@ dependencies = [ [[package]] name = "ekzg-polynomial" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed36d2ddf86661c9d18e9d5dfc47dce6c9b6e44db385e2da71952b10ba32df1" +checksum = "704751bac85af4754bb8a14457ef24d820738062d0b6f3763534d0980b1a1e81" dependencies = [ "ekzg-bls12-381", "ekzg-maybe-rayon", @@ -2765,9 +2994,9 @@ dependencies = [ [[package]] name = "ekzg-serialization" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c83402d591ac3534d1ae654feb8f56ee64cc2bacfe80bece7977c24ca5e72e2" +checksum = "3cb983d9f75b2804c00246def8d52c01cf05f70c22593b8d314fbcf0cf89042b" dependencies = [ "ekzg-bls12-381", "hex", @@ -2775,9 +3004,9 @@ dependencies = [ [[package]] name = "ekzg-single-open" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05e1dbb13023ccebbb24593e4753c87f77b7fb78254a20aef1a028e979145092" +checksum = "799d5806d51e1453fa0f528d6acf4127e2a89e98312c826151ebc24ee3448ec3" dependencies = [ "ekzg-bls12-381", "ekzg-polynomial", @@ -2786,9 +3015,9 @@ dependencies = [ [[package]] name = "ekzg-trusted-setup" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff1cb3e907b27fa51f35def95eeabe47e97765e2b6bac7e55967500937f94282" +checksum = "85314d56718dc2c6dd77c3b3630f1839defcb6f47d9c20195608a0f7976095ab" dependencies = [ "ekzg-bls12-381", "ekzg-serialization", @@ -2848,22 +3077,22 @@ dependencies = [ [[package]] name = "enum-ordinalize" -version = "4.3.0" +version = "4.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea0dcfa4e54eeb516fe454635a95753ddd39acda650ce703031c6973e315dd5" +checksum = "4a1091a7bb1f8f2c4b28f1fe2cef4980ca2d410a3d727d67ecc3178c9b0800f0" dependencies = [ "enum-ordinalize-derive", ] [[package]] name = "enum-ordinalize-derive" -version = "4.3.1" +version = "4.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d28318a75d4aead5c4db25382e8ef717932d0346600cacae6357eb5941bc5ff" +checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2879,15 +3108,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.61.0", + "windows-sys 0.61.2", ] [[package]] name = "eth2" version = "0.1.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "derivative", + "eip_3076", "either", "enr", "eth2_keystore", @@ -2897,7 +3127,7 @@ dependencies = [ "futures", "futures-util", "libp2p-identity", - "mediatype", + "mediatype 0.19.20", "multiaddr", "pretty_reqwest_error", "proto_array", @@ -2907,7 +3137,6 @@ dependencies = [ "sensitive_url", "serde", "serde_json", - "slashing_protection", "ssz_types", "test_random_derive", "types", @@ -2917,7 +3146,7 @@ dependencies = [ [[package]] name = "eth2_interop_keypairs" version = "0.2.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "bls", "ethereum_hashing", @@ -2930,7 +3159,7 @@ dependencies = [ [[package]] name = "eth2_key_derivation" version = "0.1.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "bls", "num-bigint-dig", @@ -2942,7 +3171,7 @@ dependencies = [ [[package]] name = "eth2_keystore" version = "0.1.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "aes 0.7.5", "bls", @@ -2967,7 +3196,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c853bd72c9e5787f8aafc3df2907c2ed03cff3150c3acd94e2e53a98ab70a8ab" dependencies = [ - "cpufeatures", + "cpufeatures 0.2.17", "ring", "sha2 0.10.9", ] @@ -2978,7 +3207,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70cbccfccf81d67bff0ab36e591fa536c8a935b078a7b0e58c1d00d418332fc9" dependencies = [ - "alloy-primitives 0.8.25", + "alloy-primitives 0.8.26", "hex", "serde", "serde_derive", @@ -2991,7 +3220,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3dc1355dbb41fbbd34ec28d4fb2a57d9a70c67ac3c19f6a5ca4d4a176b9e997a" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "hex", "serde", "serde_derive", @@ -3000,11 +3229,11 @@ dependencies = [ [[package]] name = "ethereum_ssz" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ca8ba45b63c389c6e115b095ca16381534fdcc03cf58176a3f8554db2dbe19b" +checksum = "0dcddb2554d19cde19b099fadddde576929d7a4d0c1cd3512d1fd95cf174375c" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "ethereum_serde_utils 0.8.0", "itertools 0.13.0", "serde", @@ -3015,14 +3244,14 @@ dependencies = [ [[package]] name = "ethereum_ssz_derive" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd55d08012b4e0dfcc92b8d6081234df65f2986ad34cc76eeed69c5e2ce7506" +checksum = "a657b6b3b7e153637dc6bdc6566ad9279d9ee11a15b12cfb24a2e04360637e9f" dependencies = [ "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -3103,30 +3332,11 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" -[[package]] -name = "field-offset" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38e2275cc4e4fc009b0669731a1e5ab7ebf11f469eaede2bab9309a5b4d6057f" -dependencies = [ - "memoffset", - "rustc_version 0.4.1", -] - -[[package]] -name = "filesystem" -version = "0.1.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" -dependencies = [ - "winapi", - "windows-acl", -] - [[package]] name = "find-msvc-tools" -version = "0.1.2" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ced73b1dacfc750a6db6c0a0c3a3853c8b41997e2e2c563dc90804ae6867959" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" [[package]] name = "fixed-hash" @@ -3143,9 +3353,9 @@ dependencies = [ [[package]] name = "fixed_bytes" version = "0.1.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "safe_arith", ] @@ -3176,6 +3386,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" +[[package]] +name = "foldhash" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" + [[package]] name = "foreign-types" version = "0.3.2" @@ -3200,6 +3416,22 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fs-err" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73fde052dbfc920003cfd2c8e2c6e6d4cc7c1091538c3a24226cec0665ab08c0" +dependencies = [ + "autocfg", + "tokio", +] + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + [[package]] name = "fsevent-sys" version = "4.1.0" @@ -3217,9 +3449,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" dependencies = [ "futures-channel", "futures-core", @@ -3232,9 +3464,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", "futures-sink", @@ -3242,15 +3474,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" [[package]] name = "futures-executor" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" dependencies = [ "futures-core", "futures-task", @@ -3259,32 +3491,32 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" [[package]] name = "futures-macro" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "futures-sink" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" [[package]] name = "futures-task" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" [[package]] name = "futures-timer" @@ -3294,9 +3526,9 @@ checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" [[package]] name = "futures-util" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures-channel", "futures-core", @@ -3306,7 +3538,6 @@ dependencies = [ "futures-task", "memchr", "pin-project-lite", - "pin-utils", "slab", ] @@ -3329,45 +3560,60 @@ dependencies = [ [[package]] name = "generic-array" -version = "1.2.0" +version = "1.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8c8444bc9d71b935156cc0ccab7f622180808af7867b1daae6547d773591703" +checksum = "eaf57c49a95fd1fe24b90b3033bee6dc7e8f1288d51494cb44e627c295e38542" dependencies = [ + "rustversion", "typenum", ] [[package]] name = "getrandom" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.1+wasi-snapshot-preview1", + "wasi", "wasm-bindgen", ] [[package]] name = "getrandom" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", "js-sys", "libc", - "r-efi", - "wasi 0.14.7+wasi-0.2.4", + "r-efi 5.3.0", + "wasip2", "wasm-bindgen", ] +[[package]] +name = "getrandom" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" +dependencies = [ + "cfg-if", + "libc", + "r-efi 6.0.0", + "rand_core 0.10.0", + "wasip2", + "wasip3", +] + [[package]] name = "gimli" -version = "0.31.1" +version = "0.32.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" [[package]] name = "glob" @@ -3400,7 +3646,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.11.4", + "indexmap 2.13.0", "slab", "tokio", "tokio-util", @@ -3409,17 +3655,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "http 1.3.1", - "indexmap 2.11.4", + "http 1.4.0", + "indexmap 2.13.0", "slab", "tokio", "tokio-util", @@ -3459,17 +3705,22 @@ version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ - "allocator-api2", - "equivalent", - "foldhash", + "foldhash 0.1.5", "serde", ] [[package]] name = "hashbrown" -version = "0.16.0" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash 0.2.0", + "serde", + "serde_core", +] [[package]] name = "hashlink" @@ -3489,19 +3740,30 @@ dependencies = [ "base64 0.22.1", "bytes", "headers-core", - "http 1.3.1", + "http 1.4.0", "httpdate", "mime", "sha1", ] +[[package]] +name = "headers-accept" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479bcb872e714e11f72fcc6a71afadbc86d0dbe887bc44252b04cfbc63272897" +dependencies = [ + "headers-core", + "http 1.4.0", + "mediatype 0.20.0", +] + [[package]] name = "headers-core" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" dependencies = [ - "http 1.3.1", + "http 1.4.0", ] [[package]] @@ -3524,20 +3786,20 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hex-conservative" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5313b072ce3c597065a808dbf612c4c8e8590bdbf8b579508bf7a762c5eae6cd" +checksum = "fda06d18ac606267c40c04e41b9947729bf8b9efe74bd4e82b61a5f26a510b9f" dependencies = [ "arrayvec", ] [[package]] name = "histogram" -version = "0.11.3" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95aebe0dec9a429e3207e5e34d97f2a7d1064d5ee6d8ed13ce0a26456de000ae" +checksum = "099d45a031296a7a40e01137b56c0c552f2a545568ef6058e47d674046def0db" dependencies = [ - "thiserror 1.0.69", + "thiserror 2.0.18", ] [[package]] @@ -3581,12 +3843,11 @@ dependencies = [ [[package]] name = "http" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" dependencies = [ "bytes", - "fnv", "itoa", ] @@ -3608,7 +3869,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.3.1", + "http 1.4.0", ] [[package]] @@ -3619,7 +3880,7 @@ checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", "futures-core", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", "pin-project-lite", ] @@ -3662,16 +3923,16 @@ dependencies = [ [[package]] name = "hyper" -version = "1.7.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" dependencies = [ "atomic-waker", "bytes", "futures-channel", "futures-core", - "h2 0.4.12", - "http 1.3.1", + "h2 0.4.13", + "http 1.4.0", "http-body 1.0.1", "httparse", "httpdate", @@ -3703,15 +3964,15 @@ version = "0.27.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ - "http 1.3.1", - "hyper 1.7.0", + "http 1.4.0", + "hyper 1.8.1", "hyper-util", - "rustls 0.23.32", + "rustls 0.23.37", "rustls-pki-types", "tokio", - "tokio-rustls 0.26.3", + "tokio-rustls 0.26.4", "tower-service", - "webpki-roots 1.0.2", + "webpki-roots 1.0.6", ] [[package]] @@ -3720,7 +3981,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ - "hyper 1.7.0", + "hyper 1.8.1", "hyper-util", "pin-project-lite", "tokio", @@ -3748,7 +4009,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.7.0", + "hyper 1.8.1", "hyper-util", "native-tls", "tokio", @@ -3758,24 +4019,23 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.17" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c6995591a8f1380fcb4ba966a252a4b29188d51d2b89e3a252f5305be65aea8" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" dependencies = [ "base64 0.22.1", "bytes", "futures-channel", - "futures-core", "futures-util", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", - "hyper 1.7.0", + "hyper 1.8.1", "ipnet", "libc", "percent-encoding", "pin-project-lite", - "socket2 0.6.0", - "system-configuration 0.6.1", + "socket2 0.6.3", + "system-configuration 0.7.0", "tokio", "tower-service", "tracing", @@ -3784,9 +4044,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.64" +version = "0.1.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -3808,9 +4068,9 @@ dependencies = [ [[package]] name = "icu_collections" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" dependencies = [ "displaydoc", "potential_utf", @@ -3821,9 +4081,9 @@ dependencies = [ [[package]] name = "icu_locale_core" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" dependencies = [ "displaydoc", "litemap", @@ -3834,11 +4094,10 @@ dependencies = [ [[package]] name = "icu_normalizer" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" dependencies = [ - "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", @@ -3849,42 +4108,38 @@ dependencies = [ [[package]] name = "icu_normalizer_data" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" [[package]] name = "icu_properties" -version = "2.0.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" dependencies = [ - "displaydoc", "icu_collections", "icu_locale_core", "icu_properties_data", "icu_provider", - "potential_utf", "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "2.0.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" [[package]] name = "icu_provider" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" dependencies = [ "displaydoc", "icu_locale_core", - "stable_deref_trait", - "tinystr", "writeable", "yoke", "zerofrom", @@ -3892,6 +4147,12 @@ dependencies = [ "zerovec", ] +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + [[package]] name = "ident_case" version = "1.0.1" @@ -3936,7 +4197,7 @@ checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -3958,23 +4219,23 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.11.4" +version = "2.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" dependencies = [ "equivalent", - "hashbrown 0.16.0", + "hashbrown 0.16.1", "serde", "serde_core", ] [[package]] name = "inotify" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" +checksum = "bd5b3eaf1a28b758ac0faa5a4254e8ab2705605496f1b1f3fbbc3988ad73d199" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.11.0", "inotify-sys", "libc", ] @@ -4000,48 +4261,37 @@ dependencies = [ [[package]] name = "int_to_bytes" version = "0.2.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "bytes", ] [[package]] name = "interprocess" -version = "2.2.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d941b405bd2322993887859a8ee6ac9134945a24ec5ec763a8a962fc64dfec2d" +checksum = "6be5e5c847dbdb44564bd85294740d031f4f8aeb3464e5375ef7141f7538db69" dependencies = [ "doctest-file", "futures-core", "libc", "recvmsg", "tokio", - "widestring 1.2.0", + "widestring", "windows-sys 0.52.0", ] -[[package]] -name = "io-uring" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b" -dependencies = [ - "bitflags 2.9.4", - "cfg-if", - "libc", -] - [[package]] name = "ipnet" -version = "2.11.0" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2" [[package]] name = "iri-string" -version = "0.7.8" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" dependencies = [ "memchr", "serde", @@ -4055,14 +4305,14 @@ checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" dependencies = [ "hermit-abi", "libc", - "windows-sys 0.61.0", + "windows-sys 0.61.2", ] [[package]] name = "is_terminal_polyfill" -version = "1.70.1" +version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" [[package]] name = "itertools" @@ -4093,15 +4343,25 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.15" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" +checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682" + +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.4", + "libc", +] [[package]] name = "js-sys" -version = "0.3.80" +version = "0.3.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852f13bec5eba4ba9afbeb93fd7c13fe56147f055939ae21c43a29a0ecb2702e" +checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c" dependencies = [ "once_cell", "wasm-bindgen", @@ -4137,18 +4397,18 @@ dependencies = [ [[package]] name = "keccak" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +checksum = "cb26cec98cce3a3d96cbb7bced3c4b16e3d13f27ec56dbd62cbc8f39cfb9d653" dependencies = [ - "cpufeatures", + "cpufeatures 0.2.17", ] [[package]] name = "keccak-asm" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "505d1856a39b200489082f90d897c3f07c455563880bc5952e38eabf731c83b6" +checksum = "b646a74e746cd25045aa0fd42f4f7f78aa6d119380182c7e63a5593c4ab8df6f" dependencies = [ "digest 0.10.7", "sha3-asm", @@ -4177,7 +4437,7 @@ dependencies = [ [[package]] name = "kzg" version = "0.1.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "arbitrary", "c-kzg", @@ -4204,30 +4464,35 @@ dependencies = [ "spin", ] +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + [[package]] name = "libc" -version = "0.2.176" +version = "0.2.183" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174" +checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" [[package]] name = "libm" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" +checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" [[package]] name = "libp2p-identity" -version = "0.2.12" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3104e13b51e4711ff5738caa1fb54467c8604c2e94d607e27745bcf709068774" +checksum = "f0c7892c221730ba55f7196e98b0b8ba5e04b4155651736036628e9f73ed6fc3" dependencies = [ "bs58", "hkdf", "multihash", - "quick-protobuf", "sha2 0.10.9", - "thiserror 2.0.16", + "thiserror 2.0.18", "tracing", ] @@ -4244,45 +4509,44 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" [[package]] name = "litemap" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" [[package]] name = "litrs" -version = "0.4.2" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed" +checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092" [[package]] name = "lock_api" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" dependencies = [ - "autocfg", "scopeguard", ] [[package]] name = "log" -version = "0.4.28" +version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] name = "lru" -version = "0.13.0" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "227748d55f2f0ab4735d87fd623798cb6b664512fe979705f829c9f81c934465" +checksum = "a1dc47f592c06f33f8e3aea9591776ec7c9f9e4124778ff8a3c3b87159f7e593" dependencies = [ - "hashbrown 0.15.5", + "hashbrown 0.16.1", ] [[package]] @@ -4299,7 +4563,7 @@ checksum = "1b27834086c65ec3f9387b096d66e99f221cf081c2b738042aa252bcd41204e3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4310,13 +4574,13 @@ checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] name = "match-lookup" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1265724d8cb29dbbc2b0f06fffb8bf1a8c0cf73a78eede9ba73a4a66c52a981e" +checksum = "757aee279b8bdbb9f9e676796fd459e4207a1f986e87886700abf589f5abf771" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.117", ] [[package]] @@ -4347,26 +4611,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33746aadcb41349ec291e7f2f0a3aa6834d1d7c58066fb4b01f68efc4c4b7631" [[package]] -name = "memchr" -version = "2.7.5" +name = "mediatype" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" +checksum = "f490ea2ae935dd8ac89c472d4df28c7f6b87cc20767e1b21fd5ed6a16e7f61e4" [[package]] -name = "memoffset" -version = "0.9.1" +name = "memchr" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" -dependencies = [ - "autocfg", -] +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" [[package]] name = "merkle_proof" version = "0.2.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "ethereum_hashing", "fixed_bytes", "safe_arith", @@ -4386,25 +4647,25 @@ dependencies = [ [[package]] name = "metastruct" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d74f54f231f9a18d77393ecc5cc7ab96709b2a61ee326c2b2b291009b0cc5a07" +checksum = "969a1be9bd80794bdf93b23ab552c2ec6f3e83b33164824553fd996cdad513b8" dependencies = [ "metastruct_macro", ] [[package]] name = "metastruct_macro" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "985e7225f3a4dfbec47a0c6a730a874185fda840d365d7bbd6ba199dd81796d5" +checksum = "de9164f767d73a507c19205868c84da411dc7795f4bdabf497d3dd93cfef9930" dependencies = [ - "darling 0.13.4", - "itertools 0.10.5", + "darling 0.23.0", + "itertools 0.14.0", "proc-macro2", "quote", "smallvec", - "syn 1.0.109", + "syn 2.0.117", ] [[package]] @@ -4413,7 +4674,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bdb104e38d3a8c5ffb7e9d2c43c522e6bcc34070edbadba565e722f0dee56c7" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "educe", "ethereum_hashing", "ethereum_ssz", @@ -4452,14 +4713,14 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.4" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" dependencies = [ "libc", "log", - "wasi 0.11.1+wasi-snapshot-preview1", - "windows-sys 0.59.0", + "wasi", + "windows-sys 0.61.2", ] [[package]] @@ -4511,9 +4772,9 @@ checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" [[package]] name = "native-tls" -version = "0.2.14" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +checksum = "465500e14ea162429d264d44189adc38b199b62b1c21eea9f69e4b73cb03bbf2" dependencies = [ "libc", "log", @@ -4548,7 +4809,7 @@ version = "8.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.11.0", "fsevent-sys", "inotify", "kqueue", @@ -4562,17 +4823,20 @@ dependencies = [ [[package]] name = "notify-types" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d" +checksum = "42b8cfee0e339a0337359f3c88165702ac6e600dc01c0cc9579a92d62b08477a" +dependencies = [ + "bitflags 2.11.0", +] [[package]] name = "nu-ansi-term" -version = "0.50.1" +version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -4587,11 +4851,10 @@ dependencies = [ [[package]] name = "num-bigint-dig" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" dependencies = [ - "byteorder", "lazy_static", "libm", "num-integer", @@ -4605,9 +4868,9 @@ dependencies = [ [[package]] name = "num-conv" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" +checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050" [[package]] name = "num-integer" @@ -4651,9 +4914,9 @@ dependencies = [ [[package]] name = "num_enum" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a973b4e44ce6cad84ce69d797acf9a044532e4184c4f267913d1b546a0727b7a" +checksum = "5d0bca838442ec211fa11de3a8b0e0e8f3a4522575b5c4c06ed722e005036f26" dependencies = [ "num_enum_derive", "rustversion", @@ -4661,20 +4924,20 @@ dependencies = [ [[package]] name = "num_enum_derive" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77e878c846a8abae00dd069496dbe8751b16ac1c3d6bd2a7283a938e8228f90d" +checksum = "680998035259dcfcafe653688bf2aa6d3e2dc05e98be6ab46afb089dc84f1df8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "nybbles" -version = "0.4.5" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa11e84403164a9f12982ab728f3c67c6fd4ab5b5f0254ffc217bdbd3b28ab0" +checksum = "0d49ff0c0d00d4a502b39df9af3a525e1efeb14b9dabb5bb83335284c1309210" dependencies = [ "alloy-rlp", "cfg-if", @@ -4686,24 +4949,24 @@ dependencies = [ [[package]] name = "object" -version = "0.36.7" +version = "0.37.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.21.3" +version = "1.21.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" +checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50" [[package]] name = "once_cell_polyfill" -version = "1.70.1" +version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" [[package]] name = "oorandom" @@ -4719,11 +4982,11 @@ checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] name = "openssl" -version = "0.10.73" +version = "0.10.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" +checksum = "951c002c75e16ea2c65b8c7e4d3d51d5530d8dfa7d060b4776828c88cfb18ecf" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.11.0", "cfg-if", "foreign-types", "libc", @@ -4740,29 +5003,29 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "openssl-probe" -version = "0.1.6" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" +checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" [[package]] name = "openssl-src" -version = "300.5.3+3.5.4" +version = "300.5.5+3.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc6bad8cd0233b63971e232cc9c5e83039375b8586d2312f31fda85db8f888c2" +checksum = "3f1787d533e03597a7934fd0a765f0d28e94ecc5fb7789f8053b1e699a56f709" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.109" +version = "0.9.112" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" +checksum = "57d55af3b3e226502be1526dfdba67ab0e9c96fc293004e79576b2b9edb0dbdb" dependencies = [ "cc", "libc", @@ -4773,9 +5036,9 @@ dependencies = [ [[package]] name = "owo-colors" -version = "4.2.2" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48dd4f4a2c8405440fd0462561f0e5806bd0f77e86f51c761481bdd4018b545e" +checksum = "d211803b9b6b570f68772237e415a029d5a50c65d382910b879fb19d3271f94d" [[package]] name = "pairing" @@ -4811,14 +5074,14 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "parking_lot" -version = "0.12.4" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" dependencies = [ "lock_api", "parking_lot_core", @@ -4826,15 +5089,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.11" +version = "0.9.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-targets 0.52.6", + "windows-link", ] [[package]] @@ -4862,6 +5125,16 @@ dependencies = [ "hmac 0.12.1", ] +[[package]] +name = "pem" +version = "3.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be" +dependencies = [ + "base64 0.22.1", + "serde_core", +] + [[package]] name = "percent-encoding" version = "2.3.2" @@ -4870,12 +5143,11 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" -version = "2.8.2" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21e0a3a33733faeaf8651dfee72dd0f388f0c8e5ad496a3478fa5a922f49cfa8" +checksum = "e0848c601009d37dfa3430c4666e147e49cdcf1b92ecd3e63657d8a5f19da662" dependencies = [ "memchr", - "thiserror 2.0.16", "ucd-trie", ] @@ -4886,7 +5158,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" dependencies = [ "fixedbitset", - "indexmap 2.11.4", + "indexmap 2.13.0", ] [[package]] @@ -4901,29 +5173,29 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.10" +version = "1.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +checksum = "f1749c7ed4bcaf4c3d0a3efc28538844fb29bcdd7d2b67b2be7e20ba861ff517" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.10" +version = "1.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" +checksum = "d9b20ed30f105399776b9c883e68e536ef602a16ae6f596d2c473591d6ad64c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "pin-project-lite" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" [[package]] name = "pin-utils" @@ -4977,9 +5249,9 @@ dependencies = [ [[package]] name = "potential_utf" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" dependencies = [ "zerovec", ] @@ -5032,7 +5304,7 @@ dependencies = [ [[package]] name = "pretty_reqwest_error" version = "0.1.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "reqwest 0.11.27", "sensitive_url", @@ -5045,7 +5317,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" dependencies = [ "proc-macro2", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -5061,11 +5333,11 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "3.4.0" +version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" +checksum = "e67ba7e9b2b56446f1d419b1d807906278ffa1a658a8a5d8a39dcb1f5a78614f" dependencies = [ - "toml_edit 0.23.6", + "toml_edit 0.25.5+spec-1.1.0", ] [[package]] @@ -5087,14 +5359,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "proc-macro2" -version = "1.0.101" +version = "1.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" dependencies = [ "unicode-ident", ] @@ -5111,19 +5383,18 @@ dependencies = [ "memchr", "parking_lot", "protobuf", - "thiserror 2.0.16", + "thiserror 2.0.18", ] [[package]] name = "proptest" -version = "1.8.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bb0be07becd10686a0bb407298fb425360a5c44a663774406340c59a22de4ce" +checksum = "37566cb3fdacef14c0737f9546df7cfeadbfbc9fef10991038bf5015d0c80532" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.9.4", - "lazy_static", + "bitflags 2.11.0", "num-traits", "rand 0.9.2", "rand_chacha 0.9.0", @@ -5160,7 +5431,7 @@ dependencies = [ "prost", "prost-types", "regex", - "syn 2.0.106", + "syn 2.0.117", "tempfile", ] @@ -5174,7 +5445,7 @@ dependencies = [ "itertools 0.14.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -5189,7 +5460,7 @@ dependencies = [ [[package]] name = "proto_array" version = "0.2.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "ethereum_ssz", "ethereum_ssz_derive", @@ -5226,15 +5497,6 @@ version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" -[[package]] -name = "quick-protobuf" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d6da84cc204722a989e01ba2f6e1e276e190f22263d0cb6ce8526fcdb0d2e1f" -dependencies = [ - "byteorder", -] - [[package]] name = "quinn" version = "0.11.9" @@ -5247,9 +5509,9 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash", - "rustls 0.23.32", - "socket2 0.6.0", - "thiserror 2.0.16", + "rustls 0.23.37", + "socket2 0.6.3", + "thiserror 2.0.18", "tokio", "tracing", "web-time", @@ -5257,20 +5519,20 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.11.13" +version = "0.11.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" +checksum = "434b42fec591c96ef50e21e886936e66d3cc3f737104fdb9b737c40ffb94c098" dependencies = [ "bytes", - "getrandom 0.3.3", + "getrandom 0.3.4", "lru-slab", "rand 0.9.2", "ring", "rustc-hash", - "rustls 0.23.32", + "rustls 0.23.37", "rustls-pki-types", "slab", - "thiserror 2.0.16", + "thiserror 2.0.18", "tinyvec", "tracing", "web-time", @@ -5285,16 +5547,16 @@ dependencies = [ "cfg_aliases", "libc", "once_cell", - "socket2 0.6.0", + "socket2 0.6.3", "tracing", "windows-sys 0.60.2", ] [[package]] name = "quote" -version = "1.0.40" +version = "1.0.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" dependencies = [ "proc-macro2", ] @@ -5306,25 +5568,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] -name = "r2d2" -version = "0.8.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93" -dependencies = [ - "log", - "parking_lot", - "scheduled-thread-pool", -] - -[[package]] -name = "r2d2_sqlite" -version = "0.21.0" +name = "r-efi" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4f5d0337e99cd5cacd91ffc326c6cc9d8078def459df560c4f9bf9ba4a51034" -dependencies = [ - "r2d2", - "rusqlite", -] +checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" [[package]] name = "radium" @@ -5351,10 +5598,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" dependencies = [ "rand_chacha 0.9.0", - "rand_core 0.9.3", + "rand_core 0.9.5", "serde", ] +[[package]] +name = "rand" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc266eb313df6c5c09c1c7b1fbe2510961e5bcd3add930c1e31f7ed9da0feff8" +dependencies = [ + "chacha20", + "getrandom 0.4.2", + "rand_core 0.10.0", +] + [[package]] name = "rand_chacha" version = "0.3.1" @@ -5372,7 +5630,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", - "rand_core 0.9.3", + "rand_core 0.9.5", ] [[package]] @@ -5381,19 +5639,25 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.16", + "getrandom 0.2.17", ] [[package]] name = "rand_core" -version = "0.9.3" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" dependencies = [ - "getrandom 0.3.3", + "getrandom 0.3.4", "serde", ] +[[package]] +name = "rand_core" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c8d0fd677905edcbeedbf2edb6494d676f0e98d54d5cf9bda0b061cb8fb8aba" + [[package]] name = "rand_xorshift" version = "0.3.0" @@ -5409,7 +5673,16 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" dependencies = [ - "rand_core 0.9.3", + "rand_core 0.9.5", +] + +[[package]] +name = "rapidhash" +version = "4.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e48930979c155e2f33aa36ab3119b5ee81332beb6482199a8ecd6029b80b59" +dependencies = [ + "rustversion", ] [[package]] @@ -5432,6 +5705,19 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "rcgen" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75e669e5202259b5314d1ea5397316ad400819437857b90861765f24c4cf80a2" +dependencies = [ + "pem", + "ring", + "rustls-pki-types", + "time", + "yasna", +] + [[package]] name = "recvmsg" version = "1.0.0" @@ -5440,38 +5726,38 @@ checksum = "d3edd4d5d42c92f0a659926464d4cce56b562761267ecf0f469d85b7de384175" [[package]] name = "redox_syscall" -version = "0.5.17" +version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.11.0", ] [[package]] name = "ref-cast" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" dependencies = [ "ref-cast-impl", ] [[package]] name = "ref-cast-impl" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "regex" -version = "1.11.2" +version = "1.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" dependencies = [ "aho-corasick", "memchr", @@ -5481,9 +5767,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.10" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b9458fa0bfeeac22b5ca447c63aaf45f28439a709ccd244698632f9aa6394d6" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" dependencies = [ "aho-corasick", "memchr", @@ -5492,9 +5778,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.6" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" +checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" [[package]] name = "reqwest" @@ -5544,20 +5830,20 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.23" +version = "0.12.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" dependencies = [ "base64 0.22.1", "bytes", "encoding_rs", "futures-core", "futures-util", - "h2 0.4.12", - "http 1.3.1", + "h2 0.4.13", + "http 1.4.0", "http-body 1.0.1", "http-body-util", - "hyper 1.7.0", + "hyper 1.8.1", "hyper-rustls 0.27.7", "hyper-tls 0.6.0", "hyper-util", @@ -5568,7 +5854,7 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.32", + "rustls 0.23.37", "rustls-pki-types", "serde", "serde_json", @@ -5576,9 +5862,9 @@ dependencies = [ "sync_wrapper 1.0.2", "tokio", "tokio-native-tls", - "tokio-rustls 0.26.3", + "tokio-rustls 0.26.4", "tokio-util", - "tower 0.5.2", + "tower 0.5.3", "tower-http", "tower-service", "url", @@ -5586,7 +5872,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots 1.0.2", + "webpki-roots 1.0.6", ] [[package]] @@ -5623,7 +5909,7 @@ checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.16", + "getrandom 0.2.17", "libc", "untrusted", "windows-sys 0.52.0", @@ -5650,13 +5936,14 @@ dependencies = [ [[package]] name = "ruint" -version = "1.16.0" +version = "1.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ecb38f82477f20c5c3d62ef52d7c4e536e38ea9b73fb570a20c5cae0e14bcf6" +checksum = "c141e807189ad38a07276942c6623032d3753c8859c146104ac2e4d68865945a" dependencies = [ "alloy-rlp", "ark-ff 0.3.0", "ark-ff 0.4.2", + "ark-ff 0.5.0", "bytes", "fastrlp 0.3.1", "fastrlp 0.4.0", @@ -5670,7 +5957,7 @@ dependencies = [ "rand 0.9.2", "rlp", "ruint-macro", - "serde", + "serde_core", "valuable", "zeroize", ] @@ -5697,9 +5984,9 @@ dependencies = [ [[package]] name = "rust_eth_kzg" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dc46814bb8e72bff20fe117db43b7455112e6fafdae7466f8f24d451ad773c0" +checksum = "1522b7a740cd7f5bc52ea49863618511c8de138dcdf3f8a80b15b3f764942a5b" dependencies = [ "eip4844", "ekzg-bls12-381", @@ -5714,9 +6001,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" +checksum = "b50b8869d9fc858ce7266cce0194bd74df58b9d0e3f6df3a9fc8eb470d95c09d" [[package]] name = "rustc-hash" @@ -5750,15 +6037,15 @@ dependencies = [ [[package]] name = "rustix" -version = "1.1.2" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.11.0", "errno", "libc", "linux-raw-sys", - "windows-sys 0.61.0", + "windows-sys 0.61.2", ] [[package]] @@ -5775,15 +6062,16 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.32" +version = "0.23.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd3c25631629d034ce7cd9940adc9d45762d46de2b0f57193c4443b92c6d4d40" +checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4" dependencies = [ + "aws-lc-rs", "log", "once_cell", "ring", "rustls-pki-types", - "rustls-webpki 0.103.6", + "rustls-webpki 0.103.10", "subtle", "zeroize", ] @@ -5808,9 +6096,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.12.0" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" dependencies = [ "web-time", "zeroize", @@ -5828,10 +6116,11 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.6" +version = "0.103.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8572f3c2cb9934231157b45499fc41e1f58c589fdfb81a844ba873265e80f8eb" +checksum = "df33b2b81ac578cabaf06b89b0631153a3f416b0a886e8a7a1707fb51abbd1ef" dependencies = [ + "aws-lc-rs", "ring", "rustls-pki-types", "untrusted", @@ -5845,9 +6134,9 @@ checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "rusty-fork" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" dependencies = [ "fnv", "quick-error", @@ -5857,14 +6146,15 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.20" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" [[package]] name = "safe_arith" version = "0.1.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b147bb6111014916d3ef9d4c85173124a8e12193a67f6176d67244afd558d6c1" [[package]] name = "salsa20" @@ -5886,20 +6176,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" -dependencies = [ - "windows-sys 0.61.0", -] - -[[package]] -name = "scheduled-thread-pool" -version = "0.2.7" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" +checksum = "91c1b7e4904c873ef0710c1f407dde2e6287de2bebc1bbbf7d430bb7cbffd939" dependencies = [ - "parking_lot", + "windows-sys 0.61.2", ] [[package]] @@ -5916,9 +6197,9 @@ dependencies = [ [[package]] name = "schemars" -version = "1.0.4" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" +checksum = "a2b42f36aa1cd011945615b92222f6bf73c599a102a300334cd7f8dbeec726cc" dependencies = [ "dyn-clone", "ref-cast", @@ -5992,12 +6273,12 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.11.1" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d" dependencies = [ - "bitflags 2.9.4", - "core-foundation", + "bitflags 2.11.0", + "core-foundation 0.10.1", "core-foundation-sys", "libc", "security-framework-sys", @@ -6005,9 +6286,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.15.0" +version = "2.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +checksum = "6ce2691df843ecc5d231c0b14ece2acc3efb62c0a398c7e1d875f3983ce020e3" dependencies = [ "core-foundation-sys", "libc", @@ -6046,7 +6327,7 @@ checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" [[package]] name = "sensitive_url" version = "0.1.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "serde", "url", @@ -6054,9 +6335,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.226" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dca6411025b24b60bfa7ec1fe1f8e710ac09782dca409ee8237ba74b51295fd" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ "serde_core", "serde_derive", @@ -6073,35 +6354,35 @@ dependencies = [ [[package]] name = "serde_core" -version = "1.0.226" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba2ba63999edb9dac981fb34b3e5c0d111a69b0924e253ed29d83f7c99e966a4" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.226" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8db53ae22f34573731bafa1db20f04027b2d25e02d8205921b569171699cdb33" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "serde_json" -version = "1.0.145" +version = "1.0.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" dependencies = [ "itoa", "memchr", - "ryu", "serde", "serde_core", + "zmij", ] [[package]] @@ -6123,7 +6404,7 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -6149,19 +6430,18 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.14.1" +version = "3.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c522100790450cf78eeac1507263d0a350d4d5b30df0c8e1fe051a10c22b376e" +checksum = "dd5414fad8e6907dbdd5bc441a50ae8d6e26151a03b1de04d89a5576de61d01f" dependencies = [ "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.11.4", + "indexmap 2.13.0", "schemars 0.9.0", - "schemars 1.0.4", - "serde", - "serde_derive", + "schemars 1.2.1", + "serde_core", "serde_json", "serde_with_macros", "time", @@ -6169,14 +6449,14 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.14.1" +version = "3.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327ada00f7d64abaac1e55a6911e90cf665aa051b9a561c7006c157f4633135e" +checksum = "d3db8978e608f1fe7357e211969fd9abdcae80bac1ba7a3369bb7eb6b404eb65" dependencies = [ - "darling 0.21.3", + "darling 0.23.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -6185,7 +6465,7 @@ version = "0.9.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0623d197252096520c6f2a5e1171ee436e5af99a5d7caa2891e55e61950e6d9" dependencies = [ - "indexmap 2.11.4", + "indexmap 2.13.0", "itoa", "ryu", "serde", @@ -6209,7 +6489,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", - "cpufeatures", + "cpufeatures 0.2.17", "digest 0.10.7", ] @@ -6221,7 +6501,7 @@ checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" dependencies = [ "block-buffer 0.9.0", "cfg-if", - "cpufeatures", + "cpufeatures 0.2.17", "digest 0.9.0", "opaque-debug", ] @@ -6233,7 +6513,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", - "cpufeatures", + "cpufeatures 0.2.17", "digest 0.10.7", ] @@ -6249,9 +6529,9 @@ dependencies = [ [[package]] name = "sha3-asm" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28efc5e327c837aa837c59eae585fc250715ef939ac32881bcc11677cd02d46" +checksum = "b31139435f327c93c6038ed350ae4588e2c70a13d50599509fee6349967ba35a" dependencies = [ "cc", "cfg-if", @@ -6274,10 +6554,11 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.6" +version = "1.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" dependencies = [ + "errno", "libc", ] @@ -6293,27 +6574,9 @@ dependencies = [ [[package]] name = "slab" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" - -[[package]] -name = "slashing_protection" -version = "0.1.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" -dependencies = [ - "arbitrary", - "ethereum_serde_utils 0.8.0", - "filesystem", - "r2d2", - "r2d2_sqlite", - "rusqlite", - "serde", - "serde_json", - "tempfile", - "tracing", - "types", -] +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" [[package]] name = "smallvec" @@ -6337,12 +6600,12 @@ dependencies = [ [[package]] name = "socket2" -version = "0.6.0" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807" +checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -6379,9 +6642,9 @@ dependencies = [ [[package]] name = "stable_deref_trait" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "static_assertions" @@ -6394,24 +6657,18 @@ name = "status_api" version = "0.9.3" dependencies = [ "async-trait", - "axum 0.8.4", + "axum 0.8.8", "color-eyre", "commit-boost", "eyre", "lazy_static", "prometheus", - "reqwest 0.12.23", + "reqwest 0.12.28", "serde", "tokio", "tracing", ] -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - [[package]] name = "strsim" version = "0.11.1" @@ -6436,7 +6693,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -6447,24 +6704,24 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "superstruct" -version = "0.8.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf0f31f730ad9e579364950e10d6172b4a9bd04b447edf5988b066a860cc340e" +checksum = "bae4a9ccd7882533c1f210e400763ec6ee64c390fc12248c238276281863719e" dependencies = [ - "darling 0.13.4", - "itertools 0.10.5", + "darling 0.23.0", + "itertools 0.14.0", "proc-macro2", "quote", "smallvec", - "syn 1.0.109", + "syn 2.0.117", ] [[package]] name = "swap_or_not_shuffle" version = "0.2.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "ethereum_hashing", "fixed_bytes", ] @@ -6482,9 +6739,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.106" +version = "2.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" dependencies = [ "proc-macro2", "quote", @@ -6493,14 +6750,14 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "1.3.1" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0b198d366dbec045acfcd97295eb653a7a2b40e4dc764ef1e79aafcad439d3c" +checksum = "53f425ae0b12e2f5ae65542e00898d500d4d318b4baf09f40fd0d410454e9947" dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -6526,7 +6783,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -6536,18 +6793,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", - "core-foundation", + "core-foundation 0.9.4", "system-configuration-sys 0.5.0", ] [[package]] name = "system-configuration" -version = "0.6.1" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +checksum = "a13f3d0daba03132c0aa9767f98351b3488edc2c100cda2d2ec2b04f3d8d3c8b" dependencies = [ - "bitflags 2.9.4", - "core-foundation", + "bitflags 2.11.0", + "core-foundation 0.9.4", "system-configuration-sys 0.6.0", ] @@ -6579,15 +6836,15 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.23.0" +version = "3.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" +checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd" dependencies = [ "fastrand", - "getrandom 0.3.3", + "getrandom 0.4.2", "once_cell", "rustix", - "windows-sys 0.61.0", + "windows-sys 0.61.2", ] [[package]] @@ -6599,7 +6856,7 @@ checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" [[package]] name = "test_random_derive" version = "0.2.0" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ "quote", "syn 1.0.109", @@ -6616,11 +6873,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.16" +version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" dependencies = [ - "thiserror-impl 2.0.16", + "thiserror-impl 2.0.18", ] [[package]] @@ -6631,18 +6888,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "thiserror-impl" -version = "2.0.16" +version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -6685,30 +6942,30 @@ dependencies = [ [[package]] name = "time" -version = "0.3.44" +version = "0.3.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" +checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c" dependencies = [ "deranged", "itoa", "num-conv", "powerfmt", - "serde", + "serde_core", "time-core", "time-macros", ] [[package]] name = "time-core" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" +checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca" [[package]] name = "time-macros" -version = "0.2.24" +version = "0.2.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" +checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215" dependencies = [ "num-conv", "time-core", @@ -6725,9 +6982,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" dependencies = [ "displaydoc", "zerovec", @@ -6745,9 +7002,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +checksum = "3e61e67053d25a4e82c844e8424039d9745781b3fc4f32b8d55ed50f5f667ef3" dependencies = [ "tinyvec_macros", ] @@ -6760,33 +7017,30 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.47.1" +version = "1.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" +checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d" dependencies = [ - "backtrace", "bytes", - "io-uring", "libc", "mio", "parking_lot", "pin-project-lite", "signal-hook-registry", - "slab", - "socket2 0.6.0", + "socket2 0.6.3", "tokio-macros", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] name = "tokio-macros" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -6811,19 +7065,19 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.3" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f63835928ca123f1bef57abbcd23bb2ba0ac9ae1235f1e65bda0d06e7786bd" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" dependencies = [ - "rustls 0.23.32", + "rustls 0.23.37", "tokio", ] [[package]] name = "tokio-stream" -version = "0.1.17" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" dependencies = [ "futures-core", "pin-project-lite", @@ -6839,19 +7093,19 @@ checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084" dependencies = [ "futures-util", "log", - "rustls 0.23.32", + "rustls 0.23.37", "rustls-pki-types", "tokio", - "tokio-rustls 0.26.3", + "tokio-rustls 0.26.4", "tungstenite", "webpki-roots 0.26.11", ] [[package]] name = "tokio-util" -version = "0.7.16" +version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" dependencies = [ "bytes", "futures-core", @@ -6883,9 +7137,9 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.7.2" +version = "1.0.1+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f1085dec27c2b6632b04c80b3bb1b4300d6495d1e129693bdda7d91e72eec1" +checksum = "9b320e741db58cac564e26c607d3cc1fdc4a88fd36c879568c07856ed83ff3e9" dependencies = [ "serde_core", ] @@ -6896,33 +7150,33 @@ version = "0.22.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ - "indexmap 2.11.4", + "indexmap 2.13.0", "serde", "serde_spanned", "toml_datetime 0.6.11", "toml_write", - "winnow", + "winnow 0.7.15", ] [[package]] name = "toml_edit" -version = "0.23.6" +version = "0.25.5+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3effe7c0e86fdff4f69cdd2ccc1b96f933e24811c5441d44904e8683e27184b" +checksum = "8ca1a40644a28bce036923f6a431df0b34236949d111cc07cb6dca830c9ef2e1" dependencies = [ - "indexmap 2.11.4", - "toml_datetime 0.7.2", + "indexmap 2.13.0", + "toml_datetime 1.0.1+spec-1.1.0", "toml_parser", - "winnow", + "winnow 1.0.0", ] [[package]] name = "toml_parser" -version = "1.0.3" +version = "1.0.10+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cf893c33be71572e0e9aa6dd15e6677937abd686b066eac3f8cd3531688a627" +checksum = "7df25b4befd31c4816df190124375d5a20c6b6921e2cad937316de3fccd63420" dependencies = [ - "winnow", + "winnow 1.0.0", ] [[package]] @@ -6942,11 +7196,11 @@ dependencies = [ "axum 0.7.9", "base64 0.22.1", "bytes", - "h2 0.4.12", - "http 1.3.1", + "h2 0.4.13", + "http 1.4.0", "http-body 1.0.1", "http-body-util", - "hyper 1.7.0", + "hyper 1.8.1", "hyper-timeout", "hyper-util", "percent-encoding", @@ -6955,7 +7209,7 @@ dependencies = [ "rustls-pemfile 2.2.0", "socket2 0.5.10", "tokio", - "tokio-rustls 0.26.3", + "tokio-rustls 0.26.4", "tokio-stream", "tower 0.4.13", "tower-layer", @@ -6974,7 +7228,7 @@ dependencies = [ "prost-build", "prost-types", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -6999,9 +7253,9 @@ dependencies = [ [[package]] name = "tower" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" dependencies = [ "futures-core", "futures-util", @@ -7015,18 +7269,18 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.11.0", "bytes", "futures-util", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", "iri-string", "pin-project-lite", - "tower 0.5.2", + "tower 0.5.3", "tower-layer", "tower-service", "tracing", @@ -7046,9 +7300,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.41" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" dependencies = [ "log", "pin-project-lite", @@ -7058,32 +7312,32 @@ dependencies = [ [[package]] name = "tracing-appender" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" +checksum = "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf" dependencies = [ "crossbeam-channel", - "thiserror 1.0.69", + "thiserror 2.0.18", "time", "tracing-subscriber", ] [[package]] name = "tracing-attributes" -version = "0.1.30" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "tracing-core" -version = "0.1.34" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" dependencies = [ "once_cell", "valuable", @@ -7122,9 +7376,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.20" +version = "0.3.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" +checksum = "cb7f578e5945fb242538965c2d0b04418d38ec25c79d160cd279bf0731c8d319" dependencies = [ "matchers", "nu-ansi-term", @@ -7143,9 +7397,9 @@ dependencies = [ [[package]] name = "tracing-test" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "557b891436fe0d5e0e363427fc7f217abf9ccd510d5136549847bdcbcd011d68" +checksum = "19a4c448db514d4f24c5ddb9f73f2ee71bfb24c526cf0c570ba142d1119e0051" dependencies = [ "tracing-core", "tracing-subscriber", @@ -7154,12 +7408,12 @@ dependencies = [ [[package]] name = "tracing-test-macro" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568" +checksum = "ad06847b7afb65c7866a36664b75c40b895e318cea4f71299f013fb22965329d" dependencies = [ "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -7168,7 +7422,7 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee44f4cef85f88b4dea21c0b1f58320bdf35715cf56d840969487cff00613321" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "ethereum_hashing", "ethereum_ssz", "smallvec", @@ -7184,7 +7438,7 @@ dependencies = [ "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -7196,14 +7450,14 @@ dependencies = [ "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "triomphe" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85" +checksum = "dd69c5aa8f924c7519d6372789a74eac5b94fb0f8fcf0d4a97eb0bfc3e785f39" dependencies = [ "serde", "stable_deref_trait", @@ -7223,29 +7477,29 @@ checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" dependencies = [ "bytes", "data-encoding", - "http 1.3.1", + "http 1.4.0", "httparse", "log", "rand 0.9.2", - "rustls 0.23.32", + "rustls 0.23.37", "rustls-pki-types", "sha1", - "thiserror 2.0.16", + "thiserror 2.0.18", "utf-8", ] [[package]] name = "typenum" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" [[package]] name = "types" version = "0.2.1" -source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0-rc.0#e5b4983d6baf85770fe4539a565d8a2dd462bc53" +source = "git+https://github.com/sigp/lighthouse?tag=v8.0.0#e3ee7febce64c1b5a85c3ab0be0619571ee92d58" dependencies = [ - "alloy-primitives 1.3.1", + "alloy-primitives 1.5.7", "alloy-rlp", "bls", "compare_fields", @@ -7323,15 +7577,15 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicode-ident" -version = "1.0.19" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" [[package]] name = "unicode-normalization" -version = "0.1.24" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8" dependencies = [ "tinyvec", ] @@ -7344,9 +7598,9 @@ checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] name = "unicode-width" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" +checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" [[package]] name = "unicode-xid" @@ -7374,14 +7628,15 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.7" +version = "2.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" dependencies = [ "form_urlencoded", "idna", "percent-encoding", "serde", + "serde_derive", ] [[package]] @@ -7408,20 +7663,20 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" dependencies = [ - "getrandom 0.2.16", + "getrandom 0.2.17", "serde", ] [[package]] name = "uuid" -version = "1.18.1" +version = "1.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" +checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37" dependencies = [ - "getrandom 0.3.3", + "getrandom 0.4.2", "js-sys", - "rand 0.9.2", - "serde", + "rand 0.10.0", + "serde_core", "wasm-bindgen", ] @@ -7457,7 +7712,7 @@ checksum = "8fabeca519a296f0b39428cfe496b600c0179c9498687986449d61fa40e60806" dependencies = [ "crypto-bigint", "elliptic-curve", - "generic-array 1.2.0", + "generic-array 1.3.5", "rand_core 0.6.4", "serde", "sha3", @@ -7500,28 +7755,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] -name = "wasi" -version = "0.14.7+wasi-0.2.4" +name = "wasip2" +version = "1.0.2+wasi-0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" dependencies = [ - "wasip2", + "wit-bindgen", ] [[package]] -name = "wasip2" -version = "1.0.1+wasi-0.2.4" +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" dependencies = [ "wit-bindgen", ] [[package]] name = "wasm-bindgen" -version = "0.2.103" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab10a69fbd0a177f5f649ad4d8d3305499c42bab9aef2f7ff592d0ec8f833819" +checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e" dependencies = [ "cfg-if", "once_cell", @@ -7530,27 +7785,14 @@ dependencies = [ "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.103" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bb702423545a6007bbc368fde243ba47ca275e549c8a28617f56f6ba53b1d1c" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn 2.0.106", - "wasm-bindgen-shared", -] - [[package]] name = "wasm-bindgen-futures" -version = "0.4.53" +version = "0.4.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0b221ff421256839509adbb55998214a70d829d3a28c69b4a6672e9d2a42f67" +checksum = "e9c5522b3a28661442748e09d40924dfb9ca614b21c00d3fd135720e48b67db8" dependencies = [ "cfg-if", + "futures-util", "js-sys", "once_cell", "wasm-bindgen", @@ -7559,9 +7801,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.103" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc65f4f411d91494355917b605e1480033152658d71f722a90647f56a70c88a0" +checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -7569,26 +7811,48 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.103" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffc003a991398a8ee604a401e194b6b3a39677b3173d6e74495eb51b82e99a32" +checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3" dependencies = [ + "bumpalo", "proc-macro2", "quote", - "syn 2.0.106", - "wasm-bindgen-backend", + "syn 2.0.117", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.103" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "293c37f4efa430ca14db3721dfbe48d8c33308096bd44d80ebaa775ab71ba1cf" +checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16" dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap 2.13.0", + "wasm-encoder", + "wasmparser", +] + [[package]] name = "wasm-streams" version = "0.4.2" @@ -7602,6 +7866,18 @@ dependencies = [ "web-sys", ] +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags 2.11.0", + "hashbrown 0.15.5", + "indexmap 2.13.0", + "semver 1.0.27", +] + [[package]] name = "wasmtimer" version = "0.4.3" @@ -7618,9 +7894,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.80" +version = "0.3.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbe734895e869dc429d78c4b433f8d17d95f8d05317440b4fad5ab2d33e596dc" +checksum = "854ba17bb104abfb26ba36da9729addc7ce7f06f5c0f90f3c391f8461cca21f9" dependencies = [ "js-sys", "wasm-bindgen", @@ -7648,29 +7924,23 @@ version = "0.26.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" dependencies = [ - "webpki-roots 1.0.2", + "webpki-roots 1.0.6", ] [[package]] name = "webpki-roots" -version = "1.0.2" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2" +checksum = "22cfaf3c063993ff62e73cb4311efde4db1efb31ab78a3e5c457939ad5cc0bed" dependencies = [ "rustls-pki-types", ] [[package]] name = "widestring" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c168940144dd21fd8046987c16a46a33d5fc84eec29ef9dcddc2ac9e31526b7c" - -[[package]] -name = "widestring" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd7cf3379ca1aac9eea11fba24fd7e315d621f8dfe35c8d7d2be8b793726e07d" +checksum = "72069c3113ab32ab29e5584db3c6ec55d416895e60715417b5b883a357c3e471" [[package]] name = "winapi" @@ -7694,7 +7964,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.61.0", + "windows-sys 0.61.2", ] [[package]] @@ -7703,110 +7973,74 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows-acl" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "177b1723986bcb4c606058e77f6e8614b51c7f9ad2face6f6fd63dd5c8b3cec3" -dependencies = [ - "field-offset", - "libc", - "widestring 0.4.3", - "winapi", -] - [[package]] name = "windows-core" -version = "0.62.0" +version = "0.62.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57fe7168f7de578d2d8a05b07fd61870d2e73b4020e9f49aa00da8471723497c" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" dependencies = [ "windows-implement", "windows-interface", - "windows-link 0.2.0", - "windows-result 0.4.0", - "windows-strings 0.5.0", + "windows-link", + "windows-result", + "windows-strings", ] [[package]] name = "windows-implement" -version = "0.60.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "windows-interface" -version = "0.59.1" +version = "0.59.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "windows-link" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" - -[[package]] -name = "windows-link" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" [[package]] name = "windows-registry" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" -dependencies = [ - "windows-link 0.1.3", - "windows-result 0.3.4", - "windows-strings 0.4.2", -] - -[[package]] -name = "windows-result" -version = "0.3.4" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" dependencies = [ - "windows-link 0.1.3", + "windows-link", + "windows-result", + "windows-strings", ] [[package]] name = "windows-result" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7084dcc306f89883455a206237404d3eaf961e5bd7e0f312f7c91f57eb44167f" -dependencies = [ - "windows-link 0.2.0", -] - -[[package]] -name = "windows-strings" -version = "0.4.2" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" dependencies = [ - "windows-link 0.1.3", + "windows-link", ] [[package]] name = "windows-strings" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7218c655a553b0bed4426cf54b20d7ba363ef543b52d515b3e48d7fd55318dda" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" dependencies = [ - "windows-link 0.2.0", + "windows-link", ] [[package]] @@ -7827,31 +8061,22 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - [[package]] name = "windows-sys" version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ - "windows-targets 0.53.3", + "windows-targets 0.53.5", ] [[package]] name = "windows-sys" -version = "0.61.0" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e201184e40b2ede64bc2ea34968b28e33622acdbbf37104f0e4a33f7abe657aa" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-link 0.2.0", + "windows-link", ] [[package]] @@ -7887,19 +8112,19 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.53.3" +version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ - "windows-link 0.1.3", - "windows_aarch64_gnullvm 0.53.0", - "windows_aarch64_msvc 0.53.0", - "windows_i686_gnu 0.53.0", - "windows_i686_gnullvm 0.53.0", - "windows_i686_msvc 0.53.0", - "windows_x86_64_gnu 0.53.0", - "windows_x86_64_gnullvm 0.53.0", - "windows_x86_64_msvc 0.53.0", + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", ] [[package]] @@ -7916,9 +8141,9 @@ checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" [[package]] name = "windows_aarch64_msvc" @@ -7934,9 +8159,9 @@ checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_aarch64_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" [[package]] name = "windows_i686_gnu" @@ -7952,9 +8177,9 @@ checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnu" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" [[package]] name = "windows_i686_gnullvm" @@ -7964,9 +8189,9 @@ checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" [[package]] name = "windows_i686_msvc" @@ -7982,9 +8207,9 @@ checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_i686_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" [[package]] name = "windows_x86_64_gnu" @@ -8000,9 +8225,9 @@ checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnu" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" [[package]] name = "windows_x86_64_gnullvm" @@ -8018,9 +8243,9 @@ checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" [[package]] name = "windows_x86_64_msvc" @@ -8036,15 +8261,24 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "windows_x86_64_msvc" -version = "0.53.0" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "winnow" +version = "0.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" +checksum = "df79d97927682d2fd8adb29682d1140b343be4ac0f08fd68b7765d9c059d3945" +dependencies = [ + "memchr", +] [[package]] name = "winnow" -version = "0.7.13" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" +checksum = "a90e88e4667264a994d34e6d1ab2d26d398dcdca8b7f52bec8668957517fc7d8" dependencies = [ "memchr", ] @@ -8061,15 +8295,97 @@ dependencies = [ [[package]] name = "wit-bindgen" -version = "0.46.0" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap 2.13.0", + "prettyplease", + "syn 2.0.117", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn 2.0.117", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags 2.11.0", + "indexmap 2.13.0", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap 2.13.0", + "log", + "semver 1.0.27", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] [[package]] name = "writeable" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" [[package]] name = "ws_stream_wasm" @@ -8084,7 +8400,7 @@ dependencies = [ "pharos", "rustc_version 0.4.1", "send_wrapper", - "thiserror 2.0.16", + "thiserror 2.0.18", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -8099,13 +8415,21 @@ dependencies = [ "tap", ] +[[package]] +name = "yasna" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e17bb3549cc1321ae1296b9cdc2698e2b6cb1992adfa19a8c72e5b7a738f44cd" +dependencies = [ + "time", +] + [[package]] name = "yoke" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" dependencies = [ - "serde", "stable_deref_trait", "yoke-derive", "zerofrom", @@ -8113,34 +8437,34 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "synstructure", ] [[package]] name = "zerocopy" -version = "0.8.27" +version = "0.8.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" +checksum = "efbb2a062be311f2ba113ce66f697a4dc589f85e78a4aea276200804cea0ed87" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.27" +version = "0.8.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" +checksum = "0e8bc7269b54418e7aeeef514aa68f8690b8c0489a06b0136e5f57c4c5ccab89" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -8160,15 +8484,15 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "synstructure", ] [[package]] name = "zeroize" -version = "1.8.1" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" dependencies = [ "serde", "zeroize_derive", @@ -8176,20 +8500,20 @@ dependencies = [ [[package]] name = "zeroize_derive" -version = "1.4.2" +version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "zerotrie" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" dependencies = [ "displaydoc", "yoke", @@ -8198,9 +8522,9 @@ dependencies = [ [[package]] name = "zerovec" -version = "0.11.4" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" dependencies = [ "yoke", "zerofrom", @@ -8209,11 +8533,17 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/Cargo.toml b/Cargo.toml index 2a7e8255..83a27e93 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ resolver = "2" [workspace.package] edition = "2024" -rust-version = "1.89" +rust-version = "1.91" version = "0.9.3" [workspace.dependencies] @@ -23,6 +23,7 @@ assert_cmd = "2.1.2" async-trait = "0.1.80" axum = { version = "0.8.1", features = ["macros"] } axum-extra = { version = "0.10.0", features = ["typed-header"] } +axum-server = { version = "0.7.2", features = ["tls-rustls"] } base64 = "0.22.1" bimap = { version = "0.6.3", features = ["serde"] } blsful = "^2.5" @@ -37,6 +38,7 @@ cb-signer = { path = "crates/signer" } cipher = "0.4" clap = { version = "4.5.48", features = ["derive", "env"] } color-eyre = "0.6.3" +const_format = "0.2.34" ctr = "0.9.2" derive_more = { version = "2.0.1", features = ["deref", "display", "from", "into"] } docker-compose-types = "0.16.0" @@ -47,12 +49,14 @@ ethereum_ssz_derive = "0.9" eyre = "0.6.12" futures = "0.3.30" headers = "0.4.0" +headers-accept = "0.2.1" indexmap = "2.2.6" jsonwebtoken = { version = "9.3.1", default-features = false } lazy_static = "1.5.0" -lh_eth2 = { package = "eth2", git = "https://github.com/sigp/lighthouse", tag = "v8.0.0-rc.0" } -lh_eth2_keystore = { package = "eth2_keystore", git = "https://github.com/sigp/lighthouse", tag = "v8.0.0-rc.0" } -lh_types = { package = "types", git = "https://github.com/sigp/lighthouse", tag = "v8.0.0-rc.0" } +mediatype = "0.20.0" +lh_eth2 = { package = "eth2", git = "https://github.com/sigp/lighthouse", tag = "v8.0.0" } +lh_eth2_keystore = { package = "eth2_keystore", git = "https://github.com/sigp/lighthouse", tag = "v8.0.0" } +lh_types = { package = "types", git = "https://github.com/sigp/lighthouse", tag = "v8.0.0" } notify = "8.2.0" parking_lot = "0.12.3" pbkdf2 = "0.12.2" @@ -61,7 +65,9 @@ prometheus = "0.14.0" prost = "0.13.4" rand = { version = "0.9", features = ["os_rng"] } rayon = "1.10.0" -reqwest = { version = "0.12.4", features = ["json", "stream"] } +rcgen = "0.13.2" +reqwest = { version = "0.12.4", features = ["json", "rustls-tls", "stream"] } +rustls = "0.23.23" serde = { version = "1.0.202", features = ["derive"] } serde_json = "1.0.117" serde_yaml = "0.9.33" @@ -87,4 +93,4 @@ url = { version = "2.5.0", features = ["serde"] } uuid = { version = "1.8.0", features = ["fast-rng", "serde", "v4"] } [patch.crates-io] -blstrs_plus = { git = "https://github.com/Commit-Boost/blstrs" } +blstrs_plus = { git = "https://github.com/Commit-Boost/blstrs" } \ No newline at end of file diff --git a/api/signer-api.yml b/api/signer-api.yml index c876a3a2..95897ecd 100644 --- a/api/signer-api.yml +++ b/api/signer-api.yml @@ -1,7 +1,7 @@ -openapi: "3.0.2" +openapi: "3.1.1" info: title: Signer API - version: "0.1.0" + version: "0.2.0" description: API that allows commit modules to request generic signatures from validators tags: - name: Signer @@ -10,6 +10,13 @@ paths: /signer/v1/get_pubkeys: get: summary: Get a list of public keys for which signatures may be requested + description: > + This endpoint requires a valid JWT Bearer token. + + The token **must include** the following claims: + - `exp` (integer): Expiration timestamp + - `route` (string): The route being requested (must be `/signer/v1/get_pubkeys` for this endpoint). + - `module` (string): The ID of the module making the request, which must match a module ID in the Commit-Boost configuration file. tags: - Signer security: @@ -58,9 +65,17 @@ paths: type: string example: "Internal error" - /signer/v1/request_signature: + /signer/v1/request_signature/bls: post: - summary: Send a signature request + summary: Request a signature for a 32-byte blob of data (typically a hash), signed by the BLS private key for the requested public key. + description: > + This endpoint requires a valid JWT Bearer token. + + The token **must include** the following claims: + - `exp` (integer): Expiration timestamp + - `module` (string): The ID of the module making the request, which must match a module ID in the Commit-Boost configuration file. + - `route` (string): The route being requested (must be `/signer/v1/request_signature/bls` for this endpoint). + - `payload_hash` (string): The Keccak-256 hash of the JSON-encoded request body, with optional `0x` prefix. This is required to prevent JWT replay attacks. tags: - Signer security: @@ -71,63 +86,218 @@ paths: application/json: schema: type: object - required: [type, object_root] - oneOf: - - required: [pubkey] - - required: [proxy] + required: [pubkey, object_root] properties: - type: - description: Type of the sign request - type: string - enum: [consensus, proxy_bls, proxy_ecdsa] pubkey: - description: Public key of the validator for consensus signatures + description: The 48-byte BLS public key, with optional `0x` prefix, of the proposer key that you want to request a signature from. $ref: "#/components/schemas/BlsPubkey" + object_root: + description: The 32-byte data you want to sign, with optional `0x` prefix. + $ref: "#/components/schemas/B256" + nonce: + $ref: "#/components/schemas/Nonce" + example: + pubkey: "0xa3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989" + object_root: "0x3e9f4a78b5c21d64f0b8e3d9a7f5c02b4d1e67a3c8f29b5d6e4a3b1c8f72e6d9" + responses: + "200": + description: A successful signature response. + content: + application/json: + schema: + $ref: "#/components/schemas/BlsSignatureResponse" + example: + pubkey: "0x883827193f7627cd04e621e1e8d56498362a52b2a30c9a1c72036eb935c4278dee23d38a24d2f7dda62689886f0c39f4" + object_root: "0x0123456789012345678901234567890123456789012345678901234567890123" + module_signing_id: "0x6a33a23ef26a4836979edff86c493a69b26ccf0b4a16491a815a13787657431b" + signature: "0xa43e623f009e615faa3987368f64d6286a4103de70e9a81d82562c50c91eae2d5d6fb9db9fe943aa8ee42fd92d8210c1149f25ed6aa72a557d74a0ed5646fdd0e8255ec58e3e2931695fe913863ba0cdf90d29f651bce0a34169a6f6ce5b3115" + "400": + description: | + This can occur in several scenarios: + - The Commit-Boost configuration file does not contain a signing ID for the module that made the request. + - You requested an operation while using the Dirk signer mode instead of locally-managed signer mode, but Dirk doesn't support that operation. + - Something went wrong while preparing your request; the error text will provide more information. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 400 + message: + type: string + example: "Bad request: Invalid pubkey format" + "401": + description: The requesting module did not provide a JWT string in the request's authorization header, or the JWT string was not configured in the signer service's configuration file as belonging to the module. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 401 + message: + type: string + example: "Unauthorized" + + "404": + description: You either requested a route that doesn't exist, or you requested a signature from a key that does not exist. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 404 + message: + type: string + example: "Unknown pubkey" + "429": + description: Your module attempted and failed JWT authentication too many times recently, and is currently timed out. It cannot make any more requests until the timeout ends. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 429 + message: + type: string + example: "Too many requests" + "500": + description: Your request was valid, but something went wrong internally that prevented it from being fulfilled. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 500 + message: + type: string + example: "Internal error" + "502": + description: The signer service is running in Dirk signer mode, but Dirk could not be reached. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 502 + message: + type: string + example: "Bad gateway: Dirk signer service is unreachable" + + /signer/v1/request_signature/proxy-bls: + post: + summary: Request a signature for a 32-byte blob of data (typically a hash), signed by the BLS private key for the requested proxy public key. + description: > + This endpoint requires a valid JWT Bearer token. + + The token **must include** the following claims: + - `exp` (integer): Expiration timestamp + - `module` (string): The ID of the module making the request, which must match a module ID in the Commit-Boost configuration file. + - `route` (string): The route being requested (must be `/signer/v1/request_signature/proxy-bls` for this endpoint). + - `payload_hash` (string): The Keccak-256 hash of the JSON-encoded request body, with optional `0x` prefix. This is required to prevent JWT replay attacks. + tags: + - Signer + security: + - BearerAuth: [] + requestBody: + required: true + content: + application/json: + schema: + type: object + required: [proxy, object_root] + properties: proxy: - description: BLS proxy pubkey or ECDSA address for proxy signatures - oneOf: - - $ref: "#/components/schemas/BlsPubkey" - - $ref: "#/components/schemas/EcdsaAddress" + description: The 48-byte BLS public key (for `proxy_bls` mode) or the 20-byte Ethereum address (for `proxy_ecdsa` mode), with optional `0x` prefix, of the proxy key that you want to request a signature from. + $ref: "#/components/schemas/BlsPubkey" object_root: - description: The root of the object to be signed - type: string - format: hex - pattern: "^0x[a-fA-F0-9]{64}$" - example: "0x3e9f4a78b5c21d64f0b8e3d9a7f5c02b4d1e67a3c8f29b5d6e4a3b1c8f72e6d9" - examples: - Consensus: - value: - type: "consensus" - pubkey: "0xa3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989" - object_root: "0x3e9f4a78b5c21d64f0b8e3d9a7f5c02b4d1e67a3c8f29b5d6e4a3b1c8f72e6d9" - ProxyBls: - value: - type: "proxy_bls" - proxy: "0xa3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989" - object_root: "0x3e9f4a78b5c21d64f0b8e3d9a7f5c02b4d1e67a3c8f29b5d6e4a3b1c8f72e6d9" - ProxyEcdsa: - value: - type: "proxy_ecdsa" - proxy: "0x71f65e9f6336770e22d148bd5e89b391a1c3b0bb" - object_root: "0x3e9f4a78b5c21d64f0b8e3d9a7f5c02b4d1e67a3c8f29b5d6e4a3b1c8f72e6d9" + description: The 32-byte data you want to sign, with optional `0x` prefix. + $ref: "#/components/schemas/B256" + nonce: + $ref: "#/components/schemas/Nonce" + example: + pubkey: "0xa3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989" + object_root: "0x3e9f4a78b5c21d64f0b8e3d9a7f5c02b4d1e67a3c8f29b5d6e4a3b1c8f72e6d9" responses: "200": - description: Success + description: A successful signature response. content: application/json: schema: - oneOf: - - $ref: "#/components/schemas/BlsSignature" - - $ref: "#/components/schemas/EcdsaSignature" - examples: - Consensus: - value: "0xa3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989a3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989" - ProxyBls: - value: "0xa3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989a3ffa9241f78279f1af04644cb8c79c2d8f02bcf0e28e2f186f6dcccac0a869c2be441fda50f0dea895cfce2e53f0989" - ProxyEcdsa: - value: "0x985b495f49d1b96db3bba3f6c5dd1810950317c10d4c2042bd316f338cdbe74359072e209b85e56ac492092d7860063dd096ca31b4e164ef27e3f8d508e656801c" + $ref: "#/components/schemas/BlsSignatureResponse" + example: + pubkey: "0x883827193f7627cd04e621e1e8d56498362a52b2a30c9a1c72036eb935c4278dee23d38a24d2f7dda62689886f0c39f4" + object_root: "0x0123456789012345678901234567890123456789012345678901234567890123" + module_signing_id: "0x6a33a23ef26a4836979edff86c493a69b26ccf0b4a16491a815a13787657431b" + signature: "0xa43e623f009e615faa3987368f64d6286a4103de70e9a81d82562c50c91eae2d5d6fb9db9fe943aa8ee42fd92d8210c1149f25ed6aa72a557d74a0ed5646fdd0e8255ec58e3e2931695fe913863ba0cdf90d29f651bce0a34169a6f6ce5b3115" + "400": + description: | + This can occur in several scenarios: + - The Commit-Boost configuration file does not contain a signing ID for the module that made the request. + - You requested an operation while using the Dirk signer mode instead of locally-managed signer mode, but Dirk doesn't support that operation. + - Something went wrong while preparing your request; the error text will provide more information. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 400 + message: + type: string + example: "Bad request: Invalid pubkey format" + "401": + description: The requesting module did not provide a JWT string in the request's authorization header, or the JWT string was not configured in the signer service's configuration file as belonging to the module. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 401 + message: + type: string + example: "Unauthorized" + "404": - description: Unknown value (pubkey, etc.) + description: You either requested a route that doesn't exist, or you requested a signature from a key that does not exist. content: application/json: schema: @@ -142,8 +312,172 @@ paths: message: type: string example: "Unknown pubkey" + "429": + description: Your module attempted and failed JWT authentication too many times recently, and is currently timed out. It cannot make any more requests until the timeout ends. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 429 + message: + type: string + example: "Too many requests" "500": - description: Internal error + description: Your request was valid, but something went wrong internally that prevented it from being fulfilled. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 500 + message: + type: string + example: "Internal error" + "502": + description: The signer service is running in Dirk signer mode, but Dirk could not be reached. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 502 + message: + type: string + example: "Bad gateway: Dirk signer service is unreachable" + + /signer/v1/request_signature/proxy-ecdsa: + post: + summary: Request a signature for a 32-byte blob of data (typically a hash), signed by the ECDSA private key for the requested proxy Ethereum address. + description: > + This endpoint requires a valid JWT Bearer token. + + The token **must include** the following claims: + - `exp` (integer): Expiration timestamp + - `module` (string): The ID of the module making the request, which must match a module ID in the Commit-Boost configuration file. + - `route` (string): The route being requested (must be `/signer/v1/request_signature/proxy-ecdsa` for this endpoint). + - `payload_hash` (string): The Keccak-256 hash of the JSON-encoded request body, with optional `0x` prefix. This is required to prevent JWT replay attacks. + tags: + - Signer + security: + - BearerAuth: [] + requestBody: + required: true + content: + application/json: + schema: + type: object + required: [proxy, object_root] + properties: + proxy: + description: The 20-byte Ethereum address, with optional `0x` prefix, of the proxy key that you want to request a signature from. + $ref: "#/components/schemas/EcdsaAddress" + object_root: + description: The 32-byte data you want to sign, with optional `0x` prefix. + $ref: "#/components/schemas/B256" + nonce: + $ref: "#/components/schemas/Nonce" + example: + proxy: "0x71f65e9f6336770e22d148bd5e89b391a1c3b0bb" + object_root: "0x3e9f4a78b5c21d64f0b8e3d9a7f5c02b4d1e67a3c8f29b5d6e4a3b1c8f72e6d9" + responses: + "200": + description: A successful signature response. + content: + application/json: + schema: + $ref: "#/components/schemas/EcdsaSignatureResponse" + example: + address: "0x71f65e9f6336770e22d148bd5e89b391a1c3b0bb" + object_root: "0x3e9f4a78b5c21d64f0b8e3d9a7f5c02b4d1e67a3c8f29b5d6e4a3b1c8f72e6d9" + module_signing_id: "0x6a33a23ef26a4836979edff86c493a69b26ccf0b4a16491a815a13787657431b" + signature: "0x985b495f49d1b96db3bba3f6c5dd1810950317c10d4c2042bd316f338cdbe74359072e209b85e56ac492092d7860063dd096ca31b4e164ef27e3f8d508e656801c" + "400": + description: | + This can occur in several scenarios: + - The Commit-Boost configuration file does not contain a signing ID for the module that made the request. + - You requested an operation while using the Dirk signer mode instead of locally-managed signer mode, but Dirk doesn't support that operation. + - Something went wrong while preparing your request; the error text will provide more information. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 400 + message: + type: string + example: "Bad request: Invalid pubkey format" + "401": + description: The requesting module did not provide a JWT string in the request's authorization header, or the JWT string was not configured in the signer service's configuration file as belonging to the module. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 401 + message: + type: string + example: "Unauthorized" + + "404": + description: You either requested a route that doesn't exist, or you requested a signature from a key that does not exist. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 404 + message: + type: string + example: "Unknown pubkey" + "429": + description: Your module attempted and failed JWT authentication too many times recently, and is currently timed out. It cannot make any more requests until the timeout ends. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 429 + message: + type: string + example: "Too many requests" + "500": + description: Your request was valid, but something went wrong internally that prevented it from being fulfilled. content: application/json: schema: @@ -158,10 +492,34 @@ paths: message: type: string example: "Internal error" + "502": + description: The signer service is running in Dirk signer mode, but Dirk could not be reached. + content: + application/json: + schema: + type: object + required: + - code + - message + properties: + code: + type: number + example: 502 + message: + type: string + example: "Bad gateway: Dirk signer service is unreachable" /signer/v1/generate_proxy_key: post: summary: Request a proxy key be generated for a specific consensus pubkey + description: > + This endpoint requires a valid JWT Bearer token. + + The token **must include** the following claims: + - `exp` (integer): Expiration timestamp + - `module` (string): The ID of the module making the request, which must match a module ID in the Commit-Boost configuration file. + - `route` (string): The route being requested (must be `/signer/v1/generate_proxy_key` for this endpoint). + - `payload_hash` (string): The Keccak-256 hash of the JSON-encoded request body, with optional `0x` prefix. This is required to prevent JWT replay attacks. tags: - Signer security: @@ -261,20 +619,6 @@ paths: type: string example: "Internal error" - /status: - get: - summary: Get the status of the Signer API module - tags: - - Management - responses: - "200": - description: Success - content: - text/plain: - schema: - type: string - example: "OK" - components: securitySchemes: BearerAuth: @@ -282,6 +626,11 @@ components: scheme: bearer bearerFormat: JWT schemas: + B256: + type: string + format: hex + pattern: "^0x[a-fA-F0-9]{64}$" + example: "0x3e9f4a78b5c21d64f0b8e3d9a7f5c02b4d1e67a3c8f29b5d6e4a3b1c8f72e6d9" BlsPubkey: type: string format: hex @@ -302,3 +651,51 @@ components: format: hex pattern: "^0x[a-fA-F0-9]{130}$" example: "0x985b495f49d1b96db3bba3f6c5dd1810950317c10d4c2042bd316f338cdbe74359072e209b85e56ac492092d7860063dd096ca31b4e164ef27e3f8d508e656801c" + BlsSignatureResponse: + type: object + properties: + pubkey: + description: The BLS public key corresponding to the private key that was used to sign the request + $ref: "#/components/schemas/BlsPubkey" + object_root: + description: The 32-byte data that was signed, with `0x` prefix + $ref: "#/components/schemas/B256" + module_signing_id: + description: The signing ID of the module that requested the signature, as specified in the Commit-Boost configuration + $ref: "#/components/schemas/B256" + nonce: + $ref: "#/components/schemas/Nonce" + chain_id: + description: The chain ID that the signature is valid for, as specified in the Commit-Boost configuration + type: integer + example: 1 + signature: + description: The BLS signature of the Merkle root hash of the provided `object_root` field and the requesting module's Signing ID. For details on this signature, see the [signature structure documentation](https://commit-boost.github.io/commit-boost-client/developing/prop-commit-signing.md#structure-of-a-signature). + $ref: "#/components/schemas/BlsSignature" + EcdsaSignatureResponse: + type: object + properties: + address: + description: The ECDSA address corresponding to the private key that was used to sign the request + $ref: "#/components/schemas/EcdsaAddress" + object_root: + description: The 32-byte data that was signed, with `0x` prefix + $ref: "#/components/schemas/B256" + module_signing_id: + description: The signing ID of the module that requested the signature, as specified in the Commit-Boost configuration + $ref: "#/components/schemas/B256" + nonce: + $ref: "#/components/schemas/Nonce" + chain_id: + description: The chain ID that the signature is valid for, as specified in the Commit-Boost configuration + type: integer + example: 1 + signature: + description: The ECDSA signature (in Ethereum RSV format) of the Merkle root hash of the provided `object_root` field and the requesting module's Signing ID. For details on this signature, see the [signature structure documentation](https://commit-boost.github.io/commit-boost-client/developing/prop-commit-signing.md#structure-of-a-signature). + $ref: "#/components/schemas/EcdsaSignature" + Nonce: + type: integer + description: If your module tracks nonces per signature (e.g., to prevent replay attacks), this is the unique nonce to use for the signature. It should be an unsigned 64-bit integer in big-endian format. It must be between 0 and 2^64-2, inclusive. If your module doesn't use nonces, we suggest setting this to 2^64-1 instead of 0 because 0 is a legal nonce and will cause complications with your module if you ever want to use a nonce in the future. + minimum: 0 + maximum: 18446744073709551614 // 2^64-2 + example: 1 diff --git a/bin/src/lib.rs b/bin/src/lib.rs index 487a46ef..0897aa34 100644 --- a/bin/src/lib.rs +++ b/bin/src/lib.rs @@ -9,6 +9,9 @@ pub mod prelude { LogsSettings, PBS_SERVICE_NAME, StartCommitModuleConfig, load_builder_module_config, load_commit_module_config, load_pbs_config, load_pbs_custom_config, }, + signature::{ + verify_proposer_commitment_signature_bls, verify_proposer_commitment_signature_ecdsa, + }, signer::EcdsaSignature, types::{BlsPublicKey, BlsSignature, Chain}, utils::{initialize_tracing_log, utcnow_ms, utcnow_ns, utcnow_sec, utcnow_us}, diff --git a/bin/tests/binary.rs b/bin/tests/binary.rs index c000ed11..6352589e 100644 --- a/bin/tests/binary.rs +++ b/bin/tests/binary.rs @@ -23,6 +23,7 @@ key_path = "/keys/keys.json" id = "DA_COMMIT" type = "commit" docker_image = "test_da_commit" +signing_id = "0x6a33a23ef26a4836979edff86c493a69b26ccf0b4a16491a815a13787657431b" "#; // --------------------------------------------------------------------------- diff --git a/config.example.toml b/config.example.toml index cc065442..f7745df4 100644 --- a/config.example.toml +++ b/config.example.toml @@ -49,9 +49,16 @@ min_bid_eth = 0.0 # to force local building and miniminzing the risk of missed slots. See also the timing games section below # OPTIONAL, DEFAULT: 2000 late_in_slot_time_ms = 2000 -# Whether to enable extra validation of get_header responses, if this is enabled `rpc_url` must also be set -# OPTIONAL, DEFAULT: false -extra_validation_enabled = false +# The level of validation to perform on get_header responses. Less is faster but not as safe. Supported values: +# - "none": no validation, just accept the bid provided by the relay as-is and pass it back without decoding or checking it +# - "standard": perform standard validation of the header provided by the relay, which checks the bid's signature and several hashes to make sure it's legal (default) +# - "extra": perform extra validation on top of standard validation, which includes checking the bid against the execution layer via the `rpc_url` (requires `rpc_url` to be set) +# OPTIONAL, DEFAULT: standard +header_validation_mode = "standard" +# The level of validation to perform on submit_block responses. Less is faster but not as safe. Supported values: +# - "none": no validation, just accept the full unblinded block provided by the relay as-is and pass it back without decoding or checking it +# - "standard": perform standard validation of the unblinded block provided by the relay, which verifies things like the included KZG commitments and the block hash (default) +block_validation_mode = "standard" # Execution Layer RPC url to use for extra validation # OPTIONAL # rpc_url = "https://ethereum-holesky-rpc.publicnode.com" @@ -173,10 +180,10 @@ url = "http://0xa119589bb33ef52acbb8116832bec2b58fca590fe5c85eac5d3230b44d5bc09f # - Dirk: a remote Dirk instance # - Local: a local Signer module # More details on the docs (https://commit-boost.github.io/commit-boost-client/get_started/configuration/#signer-module) -# [signer] +[signer] # Docker image to use for the Signer module. # OPTIONAL, DEFAULT: ghcr.io/commit-boost/signer:latest -# docker_image = "ghcr.io/commit-boost/signer:latest" +docker_image = "ghcr.io/commit-boost/signer:latest" # Host to bind the Signer API server to # OPTIONAL, DEFAULT: 127.0.0.1 host = "127.0.0.1" @@ -186,10 +193,33 @@ port = 20000 # Number of JWT authentication attempts a client can fail before blocking that client temporarily from Signer access # OPTIONAL, DEFAULT: 3 jwt_auth_fail_limit = 3 -# How long to block a client from Signer access, in seconds, if it failed JWT authentication too many times +# How long to block a client from Signer access, in seconds, if it failed JWT authentication too many times. +# This also defines the interval at which failed attempts are regularly checked and expired ones are cleaned up. # OPTIONAL, DEFAULT: 300 jwt_auth_fail_timeout_seconds = 300 +# HTTP header to use to determine the real client IP, if the Signer is behind a proxy (e.g. nginx) +# OPTIONAL. If missing, the client IP will be taken directly from the TCP connection. +# [signer.reverse_proxy] +# Type of reverse proxy configuration. Supported values: +# - unique: use a single HTTP header value as the client IP. +# - rightmost: use the rightmost IP from a comma-separated list of IPs in the HTTP header. +# type = "unique" +# Unique: HTTP header name to use to determine the real client IP. If the header appears multiple times, the request will be rejected. +# header = "X-Real-IP" +# Rightmost: HTTP header name to use to determine the real client IP from a comma-separated list of IPs. If the header appears multiple times, the last value will be used. +# header = "X-Forwarded-For" +# Rightmost: number of trusted proxies in front of the Signer, whose IPs will be skipped when extracting the client IP from the rightmost side of the list. Must be greater than 0. +# trusted_count = 1 + +# [signer.tls_mode] +# How to use TLS for the Signer's HTTP server; two modes are supported: +# - type = "insecure": disable TLS, so the server runs in HTTP mode (not recommended for production). +# - type = "certificate": Use TLS. Include a property named "path" below this with the provided path; `path` should be a directory containing `cert.pem` and `key.pem` files to use. If they don't exist, they'll be automatically generated in self-signed mode. +# OPTIONAL, DEFAULT: +# type = "certificate" +# path = "./certs" + # For Remote signer: # [signer.remote] # URL of the Web3Signer instance @@ -272,6 +302,8 @@ proxy_dir = "./proxies" [[modules]] # Unique ID of the module id = "DA_COMMIT" +# Unique hash that the Signer service will combine with the incoming data in signing requests to generate a signature specific to this module +signing_id = "0x6a33a23ef26a4836979edff86c493a69b26ccf0b4a16491a815a13787657431b" # Type of the module. Supported values: commit type = "commit" # Docker image of the module diff --git a/crates/cli/src/docker_init.rs b/crates/cli/src/docker_init.rs index 3cbde28a..7976ce17 100644 --- a/crates/cli/src/docker_init.rs +++ b/crates/cli/src/docker_init.rs @@ -6,16 +6,17 @@ use std::{ use cb_common::{ config::{ - CHAIN_SPEC_ENV, CONFIG_DEFAULT, CONFIG_ENV, CommitBoostConfig, DIRK_CA_CERT_DEFAULT, - DIRK_CA_CERT_ENV, DIRK_CERT_DEFAULT, DIRK_CERT_ENV, DIRK_DIR_SECRETS_DEFAULT, - DIRK_DIR_SECRETS_ENV, DIRK_KEY_DEFAULT, DIRK_KEY_ENV, JWTS_ENV, LOGS_DIR_DEFAULT, - LOGS_DIR_ENV, LogsSettings, METRICS_PORT_ENV, MODULE_ID_ENV, MODULE_JWT_ENV, ModuleKind, - PBS_ENDPOINT_ENV, PBS_SERVICE_NAME, PROXY_DIR_DEFAULT, PROXY_DIR_ENV, - PROXY_DIR_KEYS_DEFAULT, PROXY_DIR_KEYS_ENV, PROXY_DIR_SECRETS_DEFAULT, + ADMIN_JWT_ENV, CHAIN_SPEC_ENV, CONFIG_DEFAULT, CONFIG_ENV, CommitBoostConfig, + DIRK_CA_CERT_DEFAULT, DIRK_CA_CERT_ENV, DIRK_CERT_DEFAULT, DIRK_CERT_ENV, + DIRK_DIR_SECRETS_DEFAULT, DIRK_DIR_SECRETS_ENV, DIRK_KEY_DEFAULT, DIRK_KEY_ENV, JWTS_ENV, + LOGS_DIR_DEFAULT, LOGS_DIR_ENV, LogsSettings, METRICS_PORT_ENV, MODULE_ID_ENV, + MODULE_JWT_ENV, ModuleKind, PBS_ENDPOINT_ENV, PBS_SERVICE_NAME, PROXY_DIR_DEFAULT, + PROXY_DIR_ENV, PROXY_DIR_KEYS_DEFAULT, PROXY_DIR_KEYS_ENV, PROXY_DIR_SECRETS_DEFAULT, PROXY_DIR_SECRETS_ENV, SIGNER_DEFAULT, SIGNER_DIR_KEYS_DEFAULT, SIGNER_DIR_KEYS_ENV, SIGNER_DIR_SECRETS_DEFAULT, SIGNER_DIR_SECRETS_ENV, SIGNER_ENDPOINT_ENV, SIGNER_KEYS_ENV, - SIGNER_PORT_DEFAULT, SIGNER_SERVICE_NAME, SIGNER_URL_ENV, SignerConfig, SignerType, - StaticModuleConfig, + SIGNER_PORT_DEFAULT, SIGNER_SERVICE_NAME, SIGNER_TLS_CERTIFICATE_NAME, + SIGNER_TLS_CERTIFICATES_PATH_DEFAULT, SIGNER_TLS_CERTIFICATES_PATH_ENV, + SIGNER_TLS_KEY_NAME, SIGNER_URL_ENV, SignerConfig, SignerType, StaticModuleConfig, }, pbs::{BUILDER_V1_API_PATH, GET_STATUS_PATH}, signer::{ProxyStore, SignerLoader}, @@ -130,10 +131,7 @@ pub async fn handle_docker_init(config_path: PathBuf, output_dir: PathBuf) -> Re .as_ref() .map(|m| m.start_port) .unwrap_or_default(); - let needs_signer_module = service_config.config_info.cb_config.pbs.with_signer || - service_config.config_info.cb_config.modules.as_ref().is_some_and(|modules| { - modules.iter().any(|module| matches!(module.kind, ModuleKind::Commit)) - }); + let needs_signer_module = service_config.config_info.cb_config.needs_signer_module(); let signer_config = if needs_signer_module { Some(service_config.config_info.cb_config.signer.clone().ok_or_else(|| { eyre::eyre!( @@ -143,26 +141,21 @@ pub async fn handle_docker_init(config_path: PathBuf, output_dir: PathBuf) -> Re } else { None }; - let signer_server = if let Some(SignerConfig { inner: SignerType::Remote { url }, .. }) = - &service_config.config_info.cb_config.signer - { - url.to_string() - } else { - let signer_port = service_config - .config_info - .cb_config - .signer - .as_ref() - .map(|s| s.port) - .unwrap_or(SIGNER_PORT_DEFAULT); - format!("http://cb_signer:{signer_port}") - }; + let signer_server_url = + service_config.config_info.cb_config.signer_server_url(SIGNER_PORT_DEFAULT); + + // Warn if the certificates path is not set for a TLS signer + if service_config.config_info.cb_config.signer_certs_path().is_none() { + service_config.warnings.push( + "Signer TLS mode is set to Insecure, using HTTP instead of HTTPS for signer communication".to_string(), + ); + } // setup modules if let Some(ref modules_config) = service_config.config_info.cb_config.modules { for module in modules_config.clone() { let (module_cid, module_service) = - create_module_service(&module, signer_server.as_str(), &mut service_config)?; + create_module_service(&module, signer_server_url.as_str(), &mut service_config)?; services.insert(module_cid, Some(module_service)); } }; @@ -296,14 +289,24 @@ fn create_pbs_service(service_config: &mut ServiceCreationInfo) -> eyre::Result< service_config.metrics_port += 1; } - // Logging + // Logging env/volume if cb_config.logs.file.enabled { let (key, val) = get_env_val(LOGS_DIR_ENV, LOGS_DIR_DEFAULT); envs.insert(key, val); } + volumes.extend(get_log_volume(&cb_config.logs, PBS_SERVICE_NAME)?); + + // Certs env/volume + if cb_config.needs_signer_module() && + let Some(certs_path) = cb_config.signer_certs_path() + { + volumes.push(create_cert_binding(certs_path)); + let (key, val) = + get_env_val(SIGNER_TLS_CERTIFICATES_PATH_ENV, SIGNER_TLS_CERTIFICATES_PATH_DEFAULT); + envs.insert(key, val); + } // Create the service - volumes.extend(get_log_volume(&cb_config.logs, PBS_SERVICE_NAME)?); let pbs_service = Service { container_name: Some("cb_pbs".to_owned()), image: Some(cb_config.pbs.docker_image.clone()), @@ -338,8 +341,12 @@ fn create_signer_service_local( let cb_config = &service_config.config_info.cb_config; let config_volume = &service_config.config_info.config_volume; let metrics_port = service_config.metrics_port; - let mut envs = - IndexMap::from([get_env_val(CONFIG_ENV, CONFIG_DEFAULT), get_env_same(JWTS_ENV)]); + let mut envs = IndexMap::from([ + get_env_val(CONFIG_ENV, CONFIG_DEFAULT), + get_env_same(JWTS_ENV), + get_env_same(ADMIN_JWT_ENV), + get_env_val(SIGNER_TLS_CERTIFICATES_PATH_ENV, SIGNER_TLS_CERTIFICATES_PATH_DEFAULT), + ]); let mut volumes = vec![config_volume.clone()]; // Bind the API to 0.0.0.0 @@ -373,7 +380,7 @@ fn create_signer_service_local( service_config.metrics_port += 1; } - // Logging + // Logging envs/volume if cb_config.logs.file.enabled { let (key, val) = get_env_val(LOGS_DIR_ENV, LOGS_DIR_DEFAULT); envs.insert(key, val); @@ -382,6 +389,7 @@ fn create_signer_service_local( // write jwts to env service_config.envs.insert(JWTS_ENV.into(), format_comma_separated(&service_config.jwts)); + service_config.envs.insert(ADMIN_JWT_ENV.into(), random_jwt_secret()); // Signer loader volumes and envs match loader { @@ -441,6 +449,11 @@ fn create_signer_service_local( } } + // Add TLS support if needed + if let Some(certs_path) = cb_config.signer_certs_path() { + add_tls_certs_volume(&mut volumes, certs_path)? + } + // Create the service let signer_networks = vec![SIGNER_NETWORK.to_owned()]; let signer_service = Service { @@ -452,8 +465,8 @@ fn create_signer_service_local( environment: Environment::KvPair(envs), healthcheck: Some(Healthcheck { test: Some(HealthcheckTest::Single(format!( - "curl -f http://localhost:{}/status", - signer_config.port, + "curl -k -f {}/status", + cb_config.signer_server_url(SIGNER_PORT_DEFAULT), ))), interval: Some("30s".into()), timeout: Some("5s".into()), @@ -526,7 +539,7 @@ fn create_signer_service_dirk( service_config.metrics_port += 1; } - // Logging + // Logging env/volume if cb_config.logs.file.enabled { let (key, val) = get_env_val(LOGS_DIR_ENV, LOGS_DIR_DEFAULT); envs.insert(key, val); @@ -560,6 +573,11 @@ fn create_signer_service_dirk( None => {} } + // Add TLS support if needed + if let Some(certs_path) = cb_config.signer_certs_path() { + add_tls_certs_volume(&mut volumes, certs_path)? + } + // Create the service let signer_networks = vec![SIGNER_NETWORK.to_owned()]; let signer_service = Service { @@ -614,6 +632,14 @@ fn create_module_service( get_env_val(SIGNER_URL_ENV, signer_server), ]); + if cb_config.signer_uses_tls() { + let env_val = get_env_val( + SIGNER_TLS_CERTIFICATES_PATH_ENV, + SIGNER_TLS_CERTIFICATES_PATH_DEFAULT, + ); + module_envs.insert(env_val.0, env_val.1); + } + // Pass on the env variables if let Some(envs) = &module.env { for (k, v) in envs { @@ -624,6 +650,9 @@ fn create_module_service( // volumes let mut module_volumes = vec![config_volume.clone()]; module_volumes.extend(get_log_volume(&cb_config.logs, &module.id)?); + if let Some(certs_path) = cb_config.signer_certs_path() { + module_volumes.push(create_cert_binding(certs_path)); + } // Chain spec env/volume if let Some(spec) = &service_config.chain_spec { @@ -749,10 +778,6 @@ fn get_env_uval(k: &str, v: u64) -> (String, Option) { (k.into(), Some(SingleValue::Unsigned(v))) } -// fn get_env_bool(k: &str, v: bool) -> (String, Option) { -// (k.into(), Some(SingleValue::Bool(v))) -// } - fn get_log_volume(config: &LogsSettings, module_id: &str) -> eyre::Result> { if !config.file.enabled { return Ok(None); @@ -769,11 +794,47 @@ fn format_comma_separated(map: &IndexMap) -> String { map.iter().map(|(k, v)| format!("{k}={v}")).collect::>().join(",") } +fn create_cert_binding(certs_path: &Path) -> Volumes { + Volumes::Simple(format!( + "{}:{}/{}:ro", + certs_path.join(SIGNER_TLS_CERTIFICATE_NAME).display(), + SIGNER_TLS_CERTIFICATES_PATH_DEFAULT, + SIGNER_TLS_CERTIFICATE_NAME + )) +} + +/// Adds the TLS cert and key bindings to the provided volumes list +fn add_tls_certs_volume(volumes: &mut Vec, certs_path: &Path) -> Result<()> { + if !certs_path.try_exists()? { + std::fs::create_dir(certs_path)?; + } + + if !certs_path.join(SIGNER_TLS_CERTIFICATE_NAME).try_exists()? || + !certs_path.join(SIGNER_TLS_KEY_NAME).try_exists()? + { + return Err(eyre::eyre!( + "Signer TLS certificate or key not found at {}, please provide a valid certificate and key or create them", + certs_path.display() + )); + } + + volumes.push(create_cert_binding(certs_path)); + volumes.push(Volumes::Simple(format!( + "{}:{}/{}:ro", + certs_path.join(SIGNER_TLS_KEY_NAME).display(), + SIGNER_TLS_CERTIFICATES_PATH_DEFAULT, + SIGNER_TLS_KEY_NAME + ))); + + Ok(()) +} + #[cfg(test)] mod tests { use cb_common::{ config::{ CommitBoostConfig, FileLogSettings, LogsSettings, MetricsConfig, StdoutLogSettings, + TlsMode, }, signer::{ProxyStore, SignerLoader}, }; @@ -1344,6 +1405,7 @@ mod tests { id = "DA_COMMIT" type = "commit" docker_image = "test_da_commit" + signing_id = "0x6a33a23ef26a4836979edff86c493a69b26ccf0b4a16491a815a13787657431b" "#, ) .expect("valid module config") @@ -1416,4 +1478,241 @@ mod tests { } Ok(()) } + + // ------------------------------------------------------------------------- + // Helpers for TLS tests + // ------------------------------------------------------------------------- + + fn local_signer_config_with_tls(certs_path: PathBuf) -> SignerConfig { + let mut config = local_signer_config(); + config.tls_mode = TlsMode::Certificate(certs_path); + config + } + + /// Returns a `ServiceCreationInfo` whose CB config has `pbs.with_signer = + /// true` and a local signer with `TlsMode::Certificate(certs_path)`. + fn service_config_with_tls(certs_path: PathBuf) -> ServiceCreationInfo { + let mut sc = minimal_service_config(); + sc.config_info.cb_config.pbs.with_signer = true; + sc.config_info.cb_config.signer = Some(local_signer_config_with_tls(certs_path)); + sc + } + + // ------------------------------------------------------------------------- + // create_cert_binding + // ------------------------------------------------------------------------- + + #[test] + fn test_create_cert_binding_volume_string() { + let certs_path = Path::new("/my/certs"); + let vol = create_cert_binding(certs_path); + let expected = format!( + "/my/certs/{}:{}/{}:ro", + SIGNER_TLS_CERTIFICATE_NAME, + SIGNER_TLS_CERTIFICATES_PATH_DEFAULT, + SIGNER_TLS_CERTIFICATE_NAME + ); + assert_eq!(vol, Volumes::Simple(expected)); + } + + // ------------------------------------------------------------------------- + // add_tls_certs_volume + // ------------------------------------------------------------------------- + + #[test] + fn test_add_tls_certs_volume_happy_path() -> eyre::Result<()> { + let dir = tempfile::tempdir()?; + let certs_path = dir.path(); + std::fs::write(certs_path.join(SIGNER_TLS_CERTIFICATE_NAME), b"cert")?; + std::fs::write(certs_path.join(SIGNER_TLS_KEY_NAME), b"key")?; + + let mut volumes = vec![]; + add_tls_certs_volume(&mut volumes, certs_path)?; + + assert_eq!(volumes.len(), 2); + assert!( + matches!(&volumes[0], Volumes::Simple(s) if s.contains(SIGNER_TLS_CERTIFICATE_NAME)) + ); + assert!(matches!(&volumes[1], Volumes::Simple(s) if s.contains(SIGNER_TLS_KEY_NAME))); + Ok(()) + } + + #[test] + fn test_add_tls_certs_volume_missing_cert_returns_error() -> eyre::Result<()> { + let dir = tempfile::tempdir()?; + let certs_path = dir.path(); + std::fs::write(certs_path.join(SIGNER_TLS_KEY_NAME), b"key")?; + + let result = add_tls_certs_volume(&mut vec![], certs_path); + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("certificate or key not found")); + Ok(()) + } + + #[test] + fn test_add_tls_certs_volume_missing_key_returns_error() -> eyre::Result<()> { + let dir = tempfile::tempdir()?; + let certs_path = dir.path(); + std::fs::write(certs_path.join(SIGNER_TLS_CERTIFICATE_NAME), b"cert")?; + + let result = add_tls_certs_volume(&mut vec![], certs_path); + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("certificate or key not found")); + Ok(()) + } + + #[test] + fn test_add_tls_certs_volume_missing_both_returns_error() -> eyre::Result<()> { + let dir = tempfile::tempdir()?; + let result = add_tls_certs_volume(&mut vec![], dir.path()); + assert!(result.is_err()); + Ok(()) + } + + #[test] + fn test_add_tls_certs_volume_creates_missing_directory() -> eyre::Result<()> { + let dir = tempfile::tempdir()?; + let certs_path = dir.path().join("new_certs_dir"); + assert!(!certs_path.exists(), "pre-condition: directory must not exist yet"); + + let result = add_tls_certs_volume(&mut vec![], &certs_path); + + // Directory created even though cert/key are absent + assert!(certs_path.exists(), "directory should have been created"); + // cert/key still missing → error + assert!(result.is_err()); + Ok(()) + } + + // ------------------------------------------------------------------------- + // create_pbs_service – TLS cert volume/env + // ------------------------------------------------------------------------- + + #[test] + fn test_create_pbs_service_with_tls_adds_cert_env_and_volume() -> eyre::Result<()> { + let mut sc = service_config_with_tls(PathBuf::from("/my/certs")); + let service = create_pbs_service(&mut sc)?; + + assert!(has_env_key(&service, SIGNER_TLS_CERTIFICATES_PATH_ENV)); + assert!(has_volume(&service, SIGNER_TLS_CERTIFICATE_NAME)); + Ok(()) + } + + #[test] + fn test_create_pbs_service_without_tls_no_cert_env() -> eyre::Result<()> { + let mut sc = minimal_service_config(); + let service = create_pbs_service(&mut sc)?; + + assert!(!has_env_key(&service, SIGNER_TLS_CERTIFICATES_PATH_ENV)); + assert!(!has_volume(&service, SIGNER_TLS_CERTIFICATE_NAME)); + Ok(()) + } + + // ------------------------------------------------------------------------- + // create_signer_service_local – TLS cert volumes + // ------------------------------------------------------------------------- + + #[test] + fn test_create_signer_service_local_with_tls_adds_cert_and_key_volumes() -> eyre::Result<()> { + let dir = tempfile::tempdir()?; + let certs_path = dir.path().to_path_buf(); + std::fs::write(certs_path.join(SIGNER_TLS_CERTIFICATE_NAME), b"cert")?; + std::fs::write(certs_path.join(SIGNER_TLS_KEY_NAME), b"key")?; + + let mut sc = service_config_with_tls(certs_path); + let signer_config = sc.config_info.cb_config.signer.clone().unwrap(); + let loader = SignerLoader::File { key_path: "/keys/keys.json".into() }; + let service = create_signer_service_local(&mut sc, &signer_config, &loader, &None)?; + + assert!(has_volume(&service, SIGNER_TLS_CERTIFICATE_NAME)); + assert!(has_volume(&service, SIGNER_TLS_KEY_NAME)); + Ok(()) + } + + #[test] + fn test_create_signer_service_local_without_tls_no_cert_key_volumes() -> eyre::Result<()> { + let mut sc = minimal_service_config(); + let signer_config = local_signer_config(); + let loader = SignerLoader::File { key_path: "/keys/keys.json".into() }; + let service = create_signer_service_local(&mut sc, &signer_config, &loader, &None)?; + + // SIGNER_TLS_CERTIFICATES_PATH_ENV is always emitted by the signer service, + // but no cert.pem / key.pem volume bindings should exist in insecure mode. + assert!(!has_volume(&service, SIGNER_TLS_CERTIFICATE_NAME)); + assert!(!has_volume(&service, SIGNER_TLS_KEY_NAME)); + Ok(()) + } + + // ------------------------------------------------------------------------- + // create_signer_service_dirk – TLS cert volumes + // ------------------------------------------------------------------------- + + #[test] + fn test_create_signer_service_dirk_with_tls_adds_cert_and_key_volumes() -> eyre::Result<()> { + let dir = tempfile::tempdir()?; + let certs_path = dir.path().to_path_buf(); + std::fs::write(certs_path.join(SIGNER_TLS_CERTIFICATE_NAME), b"cert")?; + std::fs::write(certs_path.join(SIGNER_TLS_KEY_NAME), b"key")?; + + let mut sc = service_config_with_tls(certs_path); + let signer_config = dirk_signer_config(); + let service = create_signer_service_dirk( + &mut sc, + &signer_config, + Path::new("/certs/client.crt"), + Path::new("/certs/client.key"), + Path::new("/dirk_secrets"), + &None, + &None, + )?; + + assert!(has_volume(&service, SIGNER_TLS_CERTIFICATE_NAME)); + assert!(has_volume(&service, SIGNER_TLS_KEY_NAME)); + Ok(()) + } + + #[test] + fn test_create_signer_service_dirk_without_tls_no_cert_key_volumes() -> eyre::Result<()> { + let mut sc = minimal_service_config(); + let signer_config = dirk_signer_config(); + let service = create_signer_service_dirk( + &mut sc, + &signer_config, + Path::new("/certs/client.crt"), + Path::new("/certs/client.key"), + Path::new("/dirk_secrets"), + &None, + &None, + )?; + + assert!(!has_volume(&service, SIGNER_TLS_CERTIFICATE_NAME)); + assert!(!has_volume(&service, SIGNER_TLS_KEY_NAME)); + Ok(()) + } + + // ------------------------------------------------------------------------- + // create_module_service – TLS cert env/volume + // ------------------------------------------------------------------------- + + #[test] + fn test_create_module_service_with_signer_tls_adds_cert_env_and_volume() -> eyre::Result<()> { + let module = commit_module(); + let mut sc = service_config_with_tls(PathBuf::from("/my/certs")); + let (_, service) = create_module_service(&module, "https://cb_signer:20000", &mut sc)?; + + assert!(has_env_key(&service, SIGNER_TLS_CERTIFICATES_PATH_ENV)); + assert!(has_volume(&service, SIGNER_TLS_CERTIFICATE_NAME)); + Ok(()) + } + + #[test] + fn test_create_module_service_without_signer_tls_no_cert_env() -> eyre::Result<()> { + let module = commit_module(); + let mut sc = minimal_service_config(); + let (_, service) = create_module_service(&module, "http://cb_signer:20000", &mut sc)?; + + assert!(!has_env_key(&service, SIGNER_TLS_CERTIFICATES_PATH_ENV)); + assert!(!has_volume(&service, SIGNER_TLS_CERTIFICATE_NAME)); + Ok(()) + } } diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 5faaf031..b0ab7aab 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -17,6 +17,7 @@ base64.workspace = true bimap.workspace = true bytes.workspace = true cipher.workspace = true +const_format.workspace = true ctr.workspace = true derive_more.workspace = true docker-image.workspace = true @@ -25,11 +26,13 @@ ethereum_ssz.workspace = true ethereum_ssz_derive.workspace = true eyre.workspace = true futures.workspace = true +headers-accept.workspace = true jsonwebtoken.workspace = true lazy_static.workspace = true lh_eth2.workspace = true lh_eth2_keystore.workspace = true lh_types.workspace = true +mediatype.workspace = true notify.workspace = true pbkdf2.workspace = true rand.workspace = true @@ -51,3 +54,7 @@ tree_hash.workspace = true tree_hash_derive.workspace = true unicode-normalization.workspace = true url.workspace = true +reqwest-eventsource = "=0.5.0" + +[dev-dependencies] + tempfile.workspace = true \ No newline at end of file diff --git a/crates/common/src/commit/client.rs b/crates/common/src/commit/client.rs index 4e8e0961..98d8c26d 100644 --- a/crates/common/src/commit/client.rs +++ b/crates/common/src/commit/client.rs @@ -1,24 +1,29 @@ -use std::time::{Duration, Instant}; +use std::path::PathBuf; use alloy::primitives::Address; use eyre::WrapErr; -use reqwest::header::{AUTHORIZATION, HeaderMap, HeaderValue}; -use serde::Deserialize; +use reqwest::Certificate; +use serde::{Deserialize, Serialize}; use url::Url; use super::{ - constants::{GENERATE_PROXY_KEY_PATH, GET_PUBKEYS_PATH, REQUEST_SIGNATURE_PATH}, + constants::{GENERATE_PROXY_KEY_PATH, GET_PUBKEYS_PATH}, error::SignerClientError, request::{ EncryptionScheme, GenerateProxyRequest, GetPubkeysResponse, ProxyId, SignConsensusRequest, - SignProxyRequest, SignRequest, SignedProxyDelegation, + SignProxyRequest, SignedProxyDelegation, }, }; use crate::{ DEFAULT_REQUEST_TIMEOUT, - constants::SIGNER_JWT_EXPIRATION, - signer::EcdsaSignature, - types::{BlsPublicKey, BlsSignature, Jwt, ModuleId}, + commit::{ + constants::{ + REQUEST_SIGNATURE_BLS_PATH, REQUEST_SIGNATURE_PROXY_BLS_PATH, + REQUEST_SIGNATURE_PROXY_ECDSA_PATH, + }, + response::{BlsSignResponse, EcdsaSignResponse}, + }, + types::{BlsPublicKey, Jwt, ModuleId}, utils::create_jwt, }; @@ -28,65 +33,51 @@ pub struct SignerClient { /// Url endpoint of the Signer Module url: Url, client: reqwest::Client, - last_jwt_refresh: Instant, module_id: ModuleId, jwt_secret: Jwt, } impl SignerClient { /// Create a new SignerClient - pub fn new(signer_server_url: Url, jwt_secret: Jwt, module_id: ModuleId) -> eyre::Result { - let jwt = create_jwt(&module_id, &jwt_secret)?; - - let mut auth_value = - HeaderValue::from_str(&format!("Bearer {jwt}")).wrap_err("invalid jwt")?; - auth_value.set_sensitive(true); - - let mut headers = HeaderMap::new(); - headers.insert(AUTHORIZATION, auth_value); - - let client = reqwest::Client::builder() - .timeout(DEFAULT_REQUEST_TIMEOUT) - .default_headers(headers) - .build()?; - - Ok(Self { - url: signer_server_url, - client, - last_jwt_refresh: Instant::now(), - module_id, - jwt_secret, - }) - } - - fn refresh_jwt(&mut self) -> Result<(), SignerClientError> { - if self.last_jwt_refresh.elapsed() > Duration::from_secs(SIGNER_JWT_EXPIRATION) { - let jwt = create_jwt(&self.module_id, &self.jwt_secret)?; - - let mut auth_value = - HeaderValue::from_str(&format!("Bearer {jwt}")).wrap_err("invalid jwt")?; - auth_value.set_sensitive(true); - - let mut headers = HeaderMap::new(); - headers.insert(AUTHORIZATION, auth_value); - - self.client = reqwest::Client::builder() - .timeout(DEFAULT_REQUEST_TIMEOUT) - .default_headers(headers) - .build()?; + pub fn new( + signer_server_url: Url, + cert_path: Option, + jwt_secret: Jwt, + module_id: ModuleId, + ) -> eyre::Result { + let mut builder = reqwest::Client::builder().timeout(DEFAULT_REQUEST_TIMEOUT); + + // If a certificate path is provided, use it + if let Some(cert_path) = cert_path { + builder = builder + .use_rustls_tls() + .add_root_certificate(Certificate::from_pem(&std::fs::read(cert_path)?)?); } - Ok(()) + Ok(Self { url: signer_server_url, client: builder.build()?, module_id, jwt_secret }) + } + + fn create_jwt_for_payload( + &mut self, + route: &str, + payload: &T, + ) -> Result { + let payload_vec = serde_json::to_vec(payload)?; + create_jwt(&self.module_id, &self.jwt_secret, route, Some(&payload_vec)) + .wrap_err("failed to create JWT for payload") + .map_err(SignerClientError::JWTError) } /// Request a list of validator pubkeys for which signatures can be /// requested. // TODO: add more docs on how proxy keys work pub async fn get_pubkeys(&mut self) -> Result { - self.refresh_jwt()?; + let jwt = create_jwt(&self.module_id, &self.jwt_secret, GET_PUBKEYS_PATH, None) + .wrap_err("failed to create JWT for payload") + .map_err(SignerClientError::JWTError)?; let url = self.url.join(GET_PUBKEYS_PATH)?; - let res = self.client.get(url).send().await?; + let res = self.client.get(url).bearer_auth(jwt).send().await?; if !res.status().is_success() { return Err(SignerClientError::FailedRequest { @@ -99,14 +90,19 @@ impl SignerClient { } /// Send a signature request - async fn request_signature(&mut self, request: &SignRequest) -> Result + async fn request_signature( + &mut self, + route: &str, + request: &Q, + ) -> Result where + Q: Serialize, T: for<'de> Deserialize<'de>, { - self.refresh_jwt()?; + let jwt = self.create_jwt_for_payload(route, request)?; - let url = self.url.join(REQUEST_SIGNATURE_PATH)?; - let res = self.client.post(url).json(&request).send().await?; + let url = self.url.join(route)?; + let res = self.client.post(url).json(&request).bearer_auth(jwt).send().await?; let status = res.status(); let response_bytes = res.bytes().await?; @@ -126,22 +122,22 @@ impl SignerClient { pub async fn request_consensus_signature( &mut self, request: SignConsensusRequest, - ) -> Result { - self.request_signature(&request.into()).await + ) -> Result { + self.request_signature(REQUEST_SIGNATURE_BLS_PATH, &request).await } pub async fn request_proxy_signature_ecdsa( &mut self, request: SignProxyRequest
, - ) -> Result { - self.request_signature(&request.into()).await + ) -> Result { + self.request_signature(REQUEST_SIGNATURE_PROXY_ECDSA_PATH, &request).await } pub async fn request_proxy_signature_bls( &mut self, request: SignProxyRequest, - ) -> Result { - self.request_signature(&request.into()).await + ) -> Result { + self.request_signature(REQUEST_SIGNATURE_PROXY_BLS_PATH, &request).await } async fn generate_proxy_key( @@ -151,10 +147,10 @@ impl SignerClient { where T: ProxyId + for<'de> Deserialize<'de>, { - self.refresh_jwt()?; + let jwt = self.create_jwt_for_payload(GENERATE_PROXY_KEY_PATH, request)?; let url = self.url.join(GENERATE_PROXY_KEY_PATH)?; - let res = self.client.post(url).json(&request).send().await?; + let res = self.client.post(url).json(&request).bearer_auth(jwt).send().await?; let status = res.status(); let response_bytes = res.bytes().await?; diff --git a/crates/common/src/commit/constants.rs b/crates/common/src/commit/constants.rs index 7c9f948c..f2d5e94c 100644 --- a/crates/common/src/commit/constants.rs +++ b/crates/common/src/commit/constants.rs @@ -1,5 +1,13 @@ +use const_format::concatcp; + pub const GET_PUBKEYS_PATH: &str = "/signer/v1/get_pubkeys"; -pub const REQUEST_SIGNATURE_PATH: &str = "/signer/v1/request_signature"; +pub const REQUEST_SIGNATURE_BASE_PATH: &str = "/signer/v1/request_signature"; +pub const REQUEST_SIGNATURE_BLS_PATH: &str = concatcp!(REQUEST_SIGNATURE_BASE_PATH, "/bls"); +pub const REQUEST_SIGNATURE_PROXY_BLS_PATH: &str = + concatcp!(REQUEST_SIGNATURE_BASE_PATH, "/proxy-bls"); +pub const REQUEST_SIGNATURE_PROXY_ECDSA_PATH: &str = + concatcp!(REQUEST_SIGNATURE_BASE_PATH, "/proxy-ecdsa"); pub const GENERATE_PROXY_KEY_PATH: &str = "/signer/v1/generate_proxy_key"; pub const STATUS_PATH: &str = "/status"; pub const RELOAD_PATH: &str = "/reload"; +pub const REVOKE_MODULE_PATH: &str = "/revoke_jwt"; diff --git a/crates/common/src/commit/mod.rs b/crates/common/src/commit/mod.rs index 205785ff..193db630 100644 --- a/crates/common/src/commit/mod.rs +++ b/crates/common/src/commit/mod.rs @@ -2,3 +2,4 @@ pub mod client; pub mod constants; pub mod error; pub mod request; +pub mod response; diff --git a/crates/common/src/commit/request.rs b/crates/common/src/commit/request.rs index afa01807..a64e9a67 100644 --- a/crates/common/src/commit/request.rs +++ b/crates/common/src/commit/request.rs @@ -1,21 +1,22 @@ use std::{ + collections::HashMap, fmt::{self, Debug, Display}, str::FromStr, }; use alloy::{ hex, - primitives::{Address, B256}, + primitives::{Address, B256, aliases::B32}, }; -use derive_more::derive::From; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Deserializer, Serialize}; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; use crate::{ + config::decode_string_to_map, constants::COMMIT_BOOST_DOMAIN, signature::verify_signed_message, - types::{BlsPublicKey, BlsSignature, Chain}, + types::{BlsPublicKey, BlsSignature, Chain, ModuleId}, }; pub trait ProxyId: Debug + Clone + TreeHash + Display { @@ -67,7 +68,8 @@ impl SignedProxyDelegation { &self.message.delegator, &self.message, &self.signature, - COMMIT_BOOST_DOMAIN, + None, + &B32::from(COMMIT_BOOST_DOMAIN), ) } } @@ -78,53 +80,20 @@ impl fmt::Display for SignedProxyDelegation { } } -// TODO(David): This struct shouldn't be visible to module authors -#[derive(Debug, Clone, Serialize, Deserialize, From)] -#[serde(tag = "type", rename_all = "snake_case")] -pub enum SignRequest { - Consensus(SignConsensusRequest), - ProxyBls(SignProxyRequest), - ProxyEcdsa(SignProxyRequest
), -} - -impl Display for SignRequest { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - SignRequest::Consensus(req) => write!( - f, - "Consensus(pubkey: {}, object_root: {})", - req.pubkey, - hex::encode_prefixed(req.object_root) - ), - SignRequest::ProxyBls(req) => write!( - f, - "BLS(proxy: {}, object_root: {})", - req.proxy, - hex::encode_prefixed(req.object_root) - ), - SignRequest::ProxyEcdsa(req) => write!( - f, - "ECDSA(proxy: {}, object_root: {})", - req.proxy, - hex::encode_prefixed(req.object_root) - ), - } - } -} - #[derive(Debug, Clone, Serialize, Deserialize)] pub struct SignConsensusRequest { pub pubkey: BlsPublicKey, pub object_root: B256, + pub nonce: u64, } impl SignConsensusRequest { - pub fn new(pubkey: BlsPublicKey, object_root: B256) -> Self { - Self { pubkey, object_root } + pub fn new(pubkey: BlsPublicKey, object_root: B256, nonce: u64) -> Self { + Self { pubkey, object_root, nonce } } pub fn builder(pubkey: BlsPublicKey) -> Self { - Self::new(pubkey, B256::ZERO) + Self::new(pubkey, B256::ZERO, u64::MAX - 1) } pub fn with_root>(self, object_root: R) -> Self { @@ -134,21 +103,38 @@ impl SignConsensusRequest { pub fn with_msg(self, msg: &impl TreeHash) -> Self { self.with_root(msg.tree_hash_root().0) } + + pub fn with_nonce(self, nonce: u64) -> Self { + Self { nonce, ..self } + } +} + +impl Display for SignConsensusRequest { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Consensus(pubkey: {}, object_root: {}, nonce: {})", + self.pubkey, + hex::encode_prefixed(self.object_root), + self.nonce + ) + } } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct SignProxyRequest { pub proxy: T, pub object_root: B256, + pub nonce: u64, } impl SignProxyRequest { - pub fn new(proxy: T, object_root: B256) -> Self { - Self { proxy, object_root } + pub fn new(proxy: T, object_root: B256, nonce: u64) -> Self { + Self { proxy, object_root, nonce } } pub fn builder(proxy: T) -> Self { - Self::new(proxy, B256::ZERO) + Self::new(proxy, B256::ZERO, u64::MAX - 1) } pub fn with_root>(self, object_root: R) -> Self { @@ -158,6 +144,34 @@ impl SignProxyRequest { pub fn with_msg(self, msg: &impl TreeHash) -> Self { self.with_root(msg.tree_hash_root().0) } + + pub fn with_nonce(self, nonce: u64) -> Self { + Self { nonce, ..self } + } +} + +impl Display for SignProxyRequest { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "BLS(proxy: {}, object_root: {}, nonce: {})", + self.proxy, + hex::encode_prefixed(self.object_root), + self.nonce + ) + } +} + +impl Display for SignProxyRequest
{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "ECDSA(proxy: {}, object_root: {}, nonce: {})", + self.proxy, + hex::encode_prefixed(self.object_root), + self.nonce + ) + } } #[derive(Debug, Clone, Copy, Serialize, Deserialize)] @@ -208,6 +222,31 @@ pub struct GetPubkeysResponse { pub keys: Vec, } +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ReloadRequest { + #[serde(default, deserialize_with = "deserialize_jwt_secrets")] + pub jwt_secrets: Option>, + pub admin_secret: Option, +} + +pub fn deserialize_jwt_secrets<'de, D>( + deserializer: D, +) -> Result>, D::Error> +where + D: Deserializer<'de>, +{ + let raw: String = Deserialize::deserialize(deserializer)?; + + decode_string_to_map(&raw) + .map(Some) + .map_err(|_| serde::de::Error::custom("Invalid format".to_string())) +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RevokeModuleRequest { + pub module_id: ModuleId, +} + /// Map of consensus pubkeys to proxies #[derive(Debug, Clone, Deserialize, Serialize)] pub struct ConsensusProxyMap { @@ -228,36 +267,6 @@ mod tests { use super::*; use crate::signer::EcdsaSignature; - #[test] - fn test_decode_request_signature() { - let data = r#"{ - "type": "consensus", - "pubkey": "0xa3366b54f28e4bf1461926a3c70cdb0ec432b5c92554ecaae3742d33fb33873990cbed1761c68020e6d3c14d30a22050", - "object_root": "0x5c89913beafa0472168e0ec05e349b4ceb9985d25ab9fa8de53a60208c85b3a5" - }"#; - - let request: SignRequest = serde_json::from_str(data).unwrap(); - assert!(matches!(request, SignRequest::Consensus(..))); - - let data = r#"{ - "type": "proxy_bls", - "proxy": "0xa3366b54f28e4bf1461926a3c70cdb0ec432b5c92554ecaae3742d33fb33873990cbed1761c68020e6d3c14d30a22050", - "object_root": "0x5c89913beafa0472168e0ec05e349b4ceb9985d25ab9fa8de53a60208c85b3a5" - }"#; - - let request: SignRequest = serde_json::from_str(data).unwrap(); - assert!(matches!(request, SignRequest::ProxyBls(..))); - - let data = r#"{ - "type": "proxy_ecdsa", - "proxy": "0x4ca9939a8311a7cab3dde201b70157285fa81a9d", - "object_root": "0x5c89913beafa0472168e0ec05e349b4ceb9985d25ab9fa8de53a60208c85b3a5" - }"#; - - let request: SignRequest = serde_json::from_str(data).unwrap(); - assert!(matches!(request, SignRequest::ProxyEcdsa(..))); - } - #[test] fn test_decode_response_signature() { let data = r#""0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000""#; @@ -298,7 +307,7 @@ mod tests { let _: SignedProxyDelegationBls = serde_json::from_str(data).unwrap(); - let data = r#"{ + let data = r#"{ "message": { "delegator": "0xa3366b54f28e4bf1461926a3c70cdb0ec432b5c92554ecaae3742d33fb33873990cbed1761c68020e6d3c14d30a22050", "proxy": "0x4ca9939a8311a7cab3dde201b70157285fa81a9d" @@ -309,6 +318,29 @@ mod tests { let _: SignedProxyDelegationEcdsa = serde_json::from_str(data).unwrap(); } + #[test] + fn test_reload_request_jwt_secrets_present() { + let data = r#"{"jwt_secrets": "module_a=secret1,module_b=secret2"}"#; + let req: ReloadRequest = serde_json::from_str(data).unwrap(); + let secrets = req.jwt_secrets.expect("should have secrets"); + assert_eq!(secrets.get(&ModuleId("module_a".into())), Some(&"secret1".to_string())); + assert_eq!(secrets.get(&ModuleId("module_b".into())), Some(&"secret2".to_string())); + } + + #[test] + fn test_reload_request_jwt_secrets_absent() { + let data = r#"{}"#; + let req: ReloadRequest = serde_json::from_str(data).unwrap(); + assert!(req.jwt_secrets.is_none()); + } + + #[test] + fn test_reload_request_jwt_secrets_invalid_format() { + // Missing '=' separator — decode_string_to_map should fail + let data = r#"{"jwt_secrets": "bad_value_no_equals"}"#; + assert!(serde_json::from_str::(data).is_err()); + } + #[test] fn test_decode_response_proxy_map() { let data = r#"{ diff --git a/crates/common/src/commit/response.rs b/crates/common/src/commit/response.rs new file mode 100644 index 00000000..0e984144 --- /dev/null +++ b/crates/common/src/commit/response.rs @@ -0,0 +1,53 @@ +use alloy::primitives::{Address, B256, U256}; +use serde::{Deserialize, Serialize}; + +use crate::{ + signer::EcdsaSignature, + types::{BlsPublicKey, BlsSignature}, +}; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct BlsSignResponse { + pub pubkey: BlsPublicKey, + pub object_root: B256, + pub module_signing_id: B256, + pub nonce: u64, + pub chain_id: U256, + pub signature: BlsSignature, +} + +impl BlsSignResponse { + pub fn new( + pubkey: BlsPublicKey, + object_root: B256, + module_signing_id: B256, + nonce: u64, + chain_id: U256, + signature: BlsSignature, + ) -> Self { + Self { pubkey, object_root, module_signing_id, nonce, chain_id, signature } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct EcdsaSignResponse { + pub address: Address, + pub object_root: B256, + pub module_signing_id: B256, + pub nonce: u64, + pub chain_id: U256, + pub signature: EcdsaSignature, +} + +impl EcdsaSignResponse { + pub fn new( + address: Address, + object_root: B256, + module_signing_id: B256, + nonce: u64, + chain_id: U256, + signature: EcdsaSignature, + ) -> Self { + Self { address, object_root, module_signing_id, nonce, chain_id, signature } + } +} diff --git a/crates/common/src/config/constants.rs b/crates/common/src/config/constants.rs index a2942f3a..fb5f3b08 100644 --- a/crates/common/src/config/constants.rs +++ b/crates/common/src/config/constants.rs @@ -44,7 +44,14 @@ pub const SIGNER_JWT_AUTH_FAIL_TIMEOUT_SECONDS_DEFAULT: u32 = 5 * 60; /// Comma separated list module_id=jwt_secret pub const JWTS_ENV: &str = "CB_JWTS"; - +pub const ADMIN_JWT_ENV: &str = "CB_SIGNER_ADMIN_JWT"; + +/// Path to the certificates folder where the cert.pem and key.pem files are +/// stored/generated +pub const SIGNER_TLS_CERTIFICATES_PATH_ENV: &str = "CB_SIGNER_TLS_CERTIFICATES"; +pub const SIGNER_TLS_CERTIFICATES_PATH_DEFAULT: &str = "/certs"; +pub const SIGNER_TLS_CERTIFICATE_NAME: &str = "cert.pem"; +pub const SIGNER_TLS_KEY_NAME: &str = "key.pem"; /// Path to json file with plaintext keys (testing only) pub const SIGNER_KEYS_ENV: &str = "CB_SIGNER_LOADER_FILE"; pub const SIGNER_DEFAULT: &str = "/keys.json"; diff --git a/crates/common/src/config/mod.rs b/crates/common/src/config/mod.rs index 340bb888..e0958342 100644 --- a/crates/common/src/config/mod.rs +++ b/crates/common/src/config/mod.rs @@ -128,6 +128,40 @@ impl CommitBoostConfig { Err(_) => None, } } + + /// Helper to return if the signer module is needed based on the config + pub fn needs_signer_module(&self) -> bool { + self.pbs.with_signer || + self.modules.as_ref().is_some_and(|modules| { + modules.iter().any(|module| matches!(module.kind, ModuleKind::Commit)) + }) + } + + pub fn signer_uses_tls(&self) -> bool { + self.signer + .as_ref() + .is_some_and(|signer_config| matches!(signer_config.tls_mode, TlsMode::Certificate(_))) + } + + pub fn signer_server_url(&self, default_port: u16) -> String { + if let Some(SignerConfig { inner: SignerType::Remote { url }, .. }) = &self.signer { + url.to_string() + } else { + let signer_http_prefix = if self.signer_uses_tls() { "https" } else { "http" }; + let port = self.signer.as_ref().map(|s| s.port).unwrap_or(default_port); + format!("{signer_http_prefix}://cb_signer:{port}") + } + } + + pub fn signer_certs_path(&self) -> Option<&PathBuf> { + self.signer + .as_ref() + .map(|config| match &config.tls_mode { + TlsMode::Insecure => None, + TlsMode::Certificate(path) => Some(path), + }) + .unwrap_or_default() + } } /// Helper struct to load the chain spec file diff --git a/crates/common/src/config/module.rs b/crates/common/src/config/module.rs index 332560eb..22884551 100644 --- a/crates/common/src/config/module.rs +++ b/crates/common/src/config/module.rs @@ -1,5 +1,6 @@ -use std::collections::HashMap; +use std::{collections::HashMap, path::PathBuf}; +use alloy::primitives::B256; use eyre::{ContextCompat, Result}; use serde::{Deserialize, Serialize, de::DeserializeOwned}; use toml::Table; @@ -7,6 +8,7 @@ use toml::Table; use crate::{ commit::client::SignerClient, config::{ + SIGNER_TLS_CERTIFICATE_NAME, SIGNER_TLS_CERTIFICATES_PATH_ENV, SignerConfig, TlsMode, constants::{CONFIG_ENV, MODULE_ID_ENV, MODULE_JWT_ENV, SIGNER_URL_ENV}, load_env_var, utils::load_file_from_env, @@ -34,6 +36,8 @@ pub struct StaticModuleConfig { /// Type of the module #[serde(rename = "type")] pub kind: ModuleKind, + /// Signing ID for the module to use when requesting signatures + pub signing_id: B256, } /// Runtime config to start a module @@ -79,6 +83,7 @@ pub fn load_commit_module_config() -> Result { chain: Chain, modules: Vec>, + signer: SignerConfig, } // load module config including the extra data (if any) @@ -101,7 +106,16 @@ pub fn load_commit_module_config() -> Result None, + TlsMode::Certificate(path) => Some( + load_env_var(SIGNER_TLS_CERTIFICATES_PATH_ENV) + .map(PathBuf::from) + .unwrap_or(path) + .join(SIGNER_TLS_CERTIFICATE_NAME), + ), + }; + let signer_client = SignerClient::new(signer_server_url, certs_path, module_jwt, module_id)?; Ok(StartCommitModuleConfig { id: module_config.static_config.id, diff --git a/crates/common/src/config/mux.rs b/crates/common/src/config/mux.rs index d67f8487..afd3075c 100644 --- a/crates/common/src/config/mux.rs +++ b/crates/common/src/config/mux.rs @@ -330,7 +330,7 @@ where offset += limit; - if offset % 1000 == 0 { + if offset.is_multiple_of(1000) { debug!("fetched {offset} keys"); } } diff --git a/crates/common/src/config/pbs.rs b/crates/common/src/config/pbs.rs index 1021815c..3fb49ee6 100644 --- a/crates/common/src/config/pbs.rs +++ b/crates/common/src/config/pbs.rs @@ -24,7 +24,8 @@ use crate::{ commit::client::SignerClient, config::{ CONFIG_ENV, MODULE_JWT_ENV, MuxKeysLoader, PBS_IMAGE_DEFAULT, PBS_SERVICE_NAME, PbsMuxes, - SIGNER_URL_ENV, load_env_var, load_file_from_env, + SIGNER_TLS_CERTIFICATE_NAME, SIGNER_TLS_CERTIFICATES_PATH_ENV, SIGNER_URL_ENV, + SignerConfig, TlsMode, load_env_var, load_file_from_env, }, pbs::{ DEFAULT_PBS_PORT, DEFAULT_REGISTRY_REFRESH_SECONDS, DefaultTimeout, LATE_IN_SLOT_TIME_MS, @@ -37,6 +38,34 @@ use crate::{ }, }; +/// Header validation modes for get_header responses +#[derive(Debug, Copy, Clone, Deserialize, Serialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum HeaderValidationMode { + // Bypass all validation and minimize decoding, which is faster but requires complete trust in + // the relays + None, + + // Validate the header itself, ensuring that it's for a correct block on the correct chain and + // fork. This is the default mode. + Standard, + + // Standard header validation, plus validation that the parent block is correct as well + Extra, +} + +/// Block validation modes for submit_block responses +#[derive(Debug, Copy, Clone, Deserialize, Serialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum BlockValidationMode { + // Bypass all validation, which is faster but requires complete trust in the relays + None, + + // Validate the block matches the header previously received from get_header and that it's for + // the correct chain and fork. This is the default mode. + Standard, +} + #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(deny_unknown_fields)] pub struct RelayConfig { @@ -121,8 +150,11 @@ pub struct PbsConfig { #[serde(default = "default_u64::")] pub late_in_slot_time_ms: u64, /// Enable extra validation of get_header responses - #[serde(default = "default_bool::")] - pub extra_validation_enabled: bool, + #[serde(default = "default_header_validation_mode")] + pub header_validation_mode: HeaderValidationMode, + /// Enable extra validation of submit_block requests + #[serde(default = "default_block_validation_mode")] + pub block_validation_mode: BlockValidationMode, /// Execution Layer RPC url to use for extra validation pub rpc_url: Option, /// URL for the user's own SSV node API endpoint @@ -174,26 +206,24 @@ impl PbsConfig { format!("min bid is too high: {} ETH", format_ether(self.min_bid_wei)) ); - if self.extra_validation_enabled { + if self.header_validation_mode == HeaderValidationMode::Extra { ensure!( self.rpc_url.is_some(), - "rpc_url is required if extra_validation_enabled is true" + "rpc_url is required if header_validation_mode is set to extra" ); } if let Some(rpc_url) = &self.rpc_url { - // TODO: remove this once we support chain ids for custom chains - if !matches!(chain, Chain::Custom { .. }) { - let provider = ProviderBuilder::new().connect_http(rpc_url.clone()); - let chain_id = provider.get_chain_id().await?; - ensure!( - chain_id == chain.id(), - "Rpc url is for the wrong chain, expected: {} ({:?}) got {}", - chain.id(), - chain, - chain_id - ); - } + let provider = ProviderBuilder::new().connect_http(rpc_url.clone()); + let chain_id = provider.get_chain_id().await?; + let chain_id_big = U256::from(chain_id); + ensure!( + chain_id_big == chain.id(), + "Rpc url is for the wrong chain, expected: {} ({:?}) got {}", + chain.id(), + chain, + chain_id_big + ); } ensure!( @@ -348,6 +378,7 @@ pub async fn load_pbs_custom_config() -> Result<(PbsModuleC chain: Chain, relays: Vec, pbs: CustomPbsConfig, + signer: SignerConfig, muxes: Option, } @@ -404,8 +435,18 @@ pub async fn load_pbs_custom_config() -> Result<(PbsModuleC // if custom pbs requires a signer client, load jwt let module_jwt = Jwt(load_env_var(MODULE_JWT_ENV)?); let signer_server_url = load_env_var(SIGNER_URL_ENV)?.parse()?; + let certs_path = match cb_config.signer.tls_mode { + TlsMode::Insecure => None, + TlsMode::Certificate(path) => Some( + load_env_var(SIGNER_TLS_CERTIFICATES_PATH_ENV) + .map(PathBuf::from) + .unwrap_or(path) + .join(SIGNER_TLS_CERTIFICATE_NAME), + ), + }; Some(SignerClient::new( signer_server_url, + certs_path, module_jwt, ModuleId(PBS_SERVICE_NAME.to_string()), )?) @@ -428,6 +469,16 @@ pub async fn load_pbs_custom_config() -> Result<(PbsModuleC )) } +/// Default value for header validation mode +fn default_header_validation_mode() -> HeaderValidationMode { + HeaderValidationMode::Standard +} + +/// Default value for block validation mode +fn default_block_validation_mode() -> BlockValidationMode { + BlockValidationMode::Standard +} + /// Default URL for the user's SSV node API endpoint (/v1/validators). fn default_ssv_node_api_url() -> Url { Url::parse("http://localhost:16000/v1/").expect("default URL is valid") diff --git a/crates/common/src/config/signer.rs b/crates/common/src/config/signer.rs index 2aa555e6..343ec213 100644 --- a/crates/common/src/config/signer.rs +++ b/crates/common/src/config/signer.rs @@ -1,11 +1,14 @@ use std::{ collections::HashMap, + fmt::Display, net::{Ipv4Addr, SocketAddr}, + num::NonZeroUsize, path::PathBuf, }; +use alloy::primitives::B256; use docker_image::DockerImage; -use eyre::{OptionExt, Result, bail, ensure}; +use eyre::{Context, OptionExt, Result, bail, ensure}; use serde::{Deserialize, Serialize}; use tonic::transport::{Certificate, Identity}; use url::Url; @@ -13,8 +16,9 @@ use url::Url; use super::{ CommitBoostConfig, SIGNER_ENDPOINT_ENV, SIGNER_JWT_AUTH_FAIL_LIMIT_DEFAULT, SIGNER_JWT_AUTH_FAIL_LIMIT_ENV, SIGNER_JWT_AUTH_FAIL_TIMEOUT_SECONDS_DEFAULT, - SIGNER_JWT_AUTH_FAIL_TIMEOUT_SECONDS_ENV, SIGNER_PORT_DEFAULT, load_jwt_secrets, - load_optional_env_var, utils::load_env_var, + SIGNER_JWT_AUTH_FAIL_TIMEOUT_SECONDS_ENV, SIGNER_PORT_DEFAULT, SIGNER_TLS_CERTIFICATE_NAME, + SIGNER_TLS_CERTIFICATES_PATH_ENV, SIGNER_TLS_KEY_NAME, load_jwt_secrets, load_optional_env_var, + utils::load_env_var, }; use crate::{ config::{ @@ -25,6 +29,81 @@ use crate::{ utils::{default_host, default_u16, default_u32}, }; +/// The signing configuration for a commitment module. +#[derive(Clone, Debug, PartialEq)] +pub struct ModuleSigningConfig { + /// Human-readable name of the module. + pub module_name: ModuleId, + + /// The JWT secret for the module to communicate with the signer module. + pub jwt_secret: String, + + /// A unique identifier for the module, which is used when signing requests + /// to generate signatures for this module. Must be a 32-byte hex string. + /// A leading 0x prefix is optional. + pub signing_id: B256, +} + +impl ModuleSigningConfig { + pub fn validate(&self) -> Result<()> { + if self.jwt_secret.is_empty() { + bail!("JWT secret cannot be empty"); + } + + if self.signing_id.is_zero() { + bail!("Signing ID cannot be zero"); + } + + Ok(()) + } +} + +/// Mode to use for TLS support when starting the signer service +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(tag = "type", content = "path", rename_all = "snake_case")] +pub enum TlsMode { + /// Don't use TLS (regular HTTP) + Insecure, + + /// Use TLS with a certificate and key file in the provided directory + Certificate(PathBuf), +} + +/// Reverse proxy setup, used to extract real client's IP +#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[serde(rename_all = "snake_case", tag = "type")] +pub enum ReverseProxyHeaderSetup { + #[default] + None, + Unique { + header: String, + }, + Rightmost { + header: String, + trusted_count: NonZeroUsize, + }, +} + +impl Display for ReverseProxyHeaderSetup { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ReverseProxyHeaderSetup::None => write!(f, "None"), + ReverseProxyHeaderSetup::Unique { header } => { + write!(f, "\"{header} (unique)\"") + } + ReverseProxyHeaderSetup::Rightmost { header, trusted_count } => { + let suffix = match trusted_count.get() % 10 { + 1 => "st", + 2 => "nd", + 3 => "rd", + _ => "th", + }; + write!(f, "\"{header} ({trusted_count}{suffix} from the right)\"") + } + } + } +} + #[derive(Debug, Serialize, Deserialize, Clone)] #[serde(rename_all = "snake_case")] pub struct SignerConfig { @@ -45,10 +124,21 @@ pub struct SignerConfig { pub jwt_auth_fail_limit: u32, /// Duration in seconds to rate limit an endpoint after the JWT auth failure - /// limit has been reached + /// limit has been reached. This also defines the interval at which failed + /// attempts are regularly checked and expired ones are cleaned up. #[serde(default = "default_u32::")] pub jwt_auth_fail_timeout_seconds: u32, + /// Mode to use for TLS support. + /// If using Certificate mode, this must include a path to the TLS + /// certificates directory (with a `cert.pem` and a `key.pem` file). + #[serde(default = "default_tls_mode")] + pub tls_mode: TlsMode, + + /// Reverse proxy setup to extract real client's IP + #[serde(default)] + pub reverse_proxy: ReverseProxyHeaderSetup, + /// Inner type-specific configuration #[serde(flatten)] pub inner: SignerType, @@ -75,6 +165,11 @@ fn default_signer_image() -> String { SIGNER_IMAGE_DEFAULT.to_string() } +fn default_tls_mode() -> TlsMode { + TlsMode::Insecure // To make the default use TLS, do + // TlsMode::Certificate(PathBuf::from("./certs")) +} + #[derive(Debug, Serialize, Deserialize, Clone)] #[serde(rename_all = "snake_case")] pub struct DirkHostConfig { @@ -137,17 +232,23 @@ pub struct StartSignerConfig { pub loader: Option, pub store: Option, pub endpoint: SocketAddr, - pub jwts: HashMap, + pub mod_signing_configs: HashMap, + pub admin_secret: String, pub jwt_auth_fail_limit: u32, pub jwt_auth_fail_timeout_seconds: u32, pub dirk: Option, + pub tls_certificates: Option<(Vec, Vec)>, + pub reverse_proxy: ReverseProxyHeaderSetup, } impl StartSignerConfig { pub fn load_from_env() -> Result { let (config, _) = CommitBoostConfig::from_env_path()?; - let jwts = load_jwt_secrets()?; + let (admin_secret, jwt_secrets) = load_jwt_secrets()?; + + let mod_signing_configs = load_module_signing_configs(&config, &jwt_secrets) + .wrap_err("Failed to load module signing configs")?; let signer_config = config.signer.ok_or_eyre("Signer config is missing")?; @@ -176,16 +277,35 @@ impl StartSignerConfig { signer_config.jwt_auth_fail_timeout_seconds }; + // Load the TLS certificates if requested, generating self-signed ones if + // necessary + let tls_certificates = match signer_config.tls_mode { + TlsMode::Insecure => None, + TlsMode::Certificate(path) => { + let certs_path = load_env_var(SIGNER_TLS_CERTIFICATES_PATH_ENV) + .map(PathBuf::from) + .unwrap_or(path); + let cert_path = certs_path.join(SIGNER_TLS_CERTIFICATE_NAME); + let key_path = certs_path.join(SIGNER_TLS_KEY_NAME); + Some((std::fs::read(cert_path)?, std::fs::read(key_path)?)) + } + }; + + let reverse_proxy = signer_config.reverse_proxy; + match signer_config.inner { SignerType::Local { loader, store, .. } => Ok(StartSignerConfig { chain: config.chain, loader: Some(loader), endpoint, - jwts, + mod_signing_configs, + admin_secret, jwt_auth_fail_limit, jwt_auth_fail_timeout_seconds, store, dirk: None, + tls_certificates, + reverse_proxy, }), SignerType::Dirk { @@ -211,7 +331,8 @@ impl StartSignerConfig { Ok(StartSignerConfig { chain: config.chain, endpoint, - jwts, + mod_signing_configs, + admin_secret, jwt_auth_fail_limit, jwt_auth_fail_timeout_seconds, loader: None, @@ -231,6 +352,8 @@ impl StartSignerConfig { }, max_response_size_bytes, }), + tls_certificates, + reverse_proxy, }) } @@ -240,3 +363,552 @@ impl StartSignerConfig { } } } + +/// Loads the signing configurations for each module defined in the Commit Boost +/// config, coupling them with their JWT secrets and handling any potential +/// duplicates or missing values. +pub fn load_module_signing_configs( + config: &CommitBoostConfig, + jwt_secrets: &HashMap, +) -> Result> { + let mut mod_signing_configs = HashMap::new(); + let modules = config.modules.as_ref().ok_or_eyre("No modules defined in the config")?; + + let mut seen_jwt_secrets = HashMap::new(); + let mut seen_signing_ids = HashMap::new(); + for module in modules { + ensure!(!module.id.is_empty(), "Module ID cannot be empty"); + + ensure!( + !mod_signing_configs.contains_key(&module.id), + "Duplicate module config detected: ID {} is already used", + module.id + ); + + let jwt_secret = match jwt_secrets.get(&module.id) { + Some(secret) => secret.clone(), + None => bail!("JWT secret for module {} is missing", module.id), + }; + let module_signing_config = ModuleSigningConfig { + module_name: module.id.clone(), + jwt_secret, + signing_id: module.signing_id, + }; + module_signing_config + .validate() + .wrap_err(format!("Invalid signing config for module {}", module.id))?; + + if let Some(existing_module) = + seen_jwt_secrets.insert(module_signing_config.jwt_secret.clone(), &module.id) + { + bail!("Duplicate JWT secret detected for modules {} and {}", existing_module, module.id) + }; + if let Some(existing_module) = + seen_signing_ids.insert(module_signing_config.signing_id, &module.id) + { + bail!("Duplicate signing ID detected for modules {} and {}", existing_module, module.id) + }; + + mod_signing_configs.insert(module.id.clone(), module_signing_config); + } + + Ok(mod_signing_configs) +} + +#[cfg(test)] +mod tests { + use std::num::NonZeroUsize; + + use alloy::primitives::{Uint, b256}; + + use super::*; + use crate::config::{ + BlockValidationMode, HeaderValidationMode, LogsSettings, ModuleKind, PbsConfig, + StaticModuleConfig, StaticPbsConfig, + }; + + // Wrapper needed because TOML requires a top-level struct (can't serialize + // a bare enum). + #[derive(Serialize, Deserialize, Debug)] + struct TlsWrapper { + tls_mode: TlsMode, + } + + fn make_local_signer_config(tls_mode: TlsMode) -> SignerConfig { + SignerConfig { + host: Ipv4Addr::LOCALHOST, + port: 20000, + docker_image: SIGNER_IMAGE_DEFAULT.to_string(), + jwt_auth_fail_limit: 3, + jwt_auth_fail_timeout_seconds: 300, + tls_mode, + reverse_proxy: ReverseProxyHeaderSetup::None, + inner: SignerType::Local { + loader: SignerLoader::File { key_path: PathBuf::from("/keys.json") }, + store: None, + }, + } + } + + async fn get_config_with_signer(tls_mode: TlsMode) -> CommitBoostConfig { + let mut cfg = get_base_config().await; + cfg.signer = Some(make_local_signer_config(tls_mode)); + cfg + } + + async fn get_base_config() -> CommitBoostConfig { + CommitBoostConfig { + chain: Chain::Hoodi, + relays: vec![], + pbs: StaticPbsConfig { + docker_image: String::from("cb-fake-repo/fake-cb:latest"), + pbs_config: PbsConfig { + host: Ipv4Addr::LOCALHOST, + port: 0, + relay_check: false, + wait_all_registrations: false, + timeout_get_header_ms: 0, + timeout_get_payload_ms: 0, + timeout_register_validator_ms: 0, + skip_sigverify: false, + min_bid_wei: Uint::<256, 4>::from(0), + late_in_slot_time_ms: 0, + header_validation_mode: HeaderValidationMode::Standard, + block_validation_mode: BlockValidationMode::Standard, + rpc_url: None, + http_timeout_seconds: 30, + register_validator_retry_limit: 3, + validator_registration_batch_size: None, + mux_registry_refresh_interval_seconds: 5, + ssv_node_api_url: Url::parse("https://example.net").unwrap(), + ssv_public_api_url: Url::parse("https://example.net").unwrap(), + }, + with_signer: true, + }, + muxes: None, + modules: Some(vec![]), + signer: None, + metrics: None, + logs: LogsSettings::default(), + } + } + + async fn create_module_config(id: ModuleId, signing_id: B256) -> StaticModuleConfig { + StaticModuleConfig { + id: id.clone(), + signing_id, + docker_image: String::from(""), + env: None, + env_file: None, + kind: ModuleKind::Commit, + } + } + + #[tokio::test] + async fn test_good_config() -> Result<()> { + let mut cfg = get_base_config().await; + let first_module_id = ModuleId("test_module".to_string()); + let first_signing_id = + b256!("0101010101010101010101010101010101010101010101010101010101010101"); + let second_module_id = ModuleId("2nd_test_module".to_string()); + let second_signing_id = + b256!("0202020202020202020202020202020202020202020202020202020202020202"); + + cfg.modules = Some(vec![ + create_module_config(first_module_id.clone(), first_signing_id).await, + create_module_config(second_module_id.clone(), second_signing_id).await, + ]); + + let jwts = HashMap::from([ + (first_module_id.clone(), "supersecret".to_string()), + (second_module_id.clone(), "another-secret".to_string()), + ]); + + // Load the mod signing configuration + let mod_signing_configs = load_module_signing_configs(&cfg, &jwts) + .wrap_err("Failed to load module signing configs")?; + assert!(mod_signing_configs.len() == 2, "Expected 2 mod signing configurations"); + + // Check the first module + let module_1 = mod_signing_configs + .get(&first_module_id) + .unwrap_or_else(|| panic!("Missing '{first_module_id}' in mod signing configs")); + assert_eq!(module_1.module_name, first_module_id, "Module name mismatch for 'test_module'"); + assert_eq!( + module_1.jwt_secret, jwts[&first_module_id], + "JWT secret mismatch for '{first_module_id}'" + ); + assert_eq!( + module_1.signing_id, first_signing_id, + "Signing ID mismatch for '{first_module_id}'" + ); + + // Check the second module + let module_2 = mod_signing_configs + .get(&second_module_id) + .unwrap_or_else(|| panic!("Missing '{second_module_id}' in mod signing configs")); + assert_eq!( + module_2.module_name, second_module_id, + "Module name mismatch for '{second_module_id}'" + ); + assert_eq!( + module_2.jwt_secret, jwts[&second_module_id], + "JWT secret mismatch for '{second_module_id}'" + ); + assert_eq!( + module_2.signing_id, second_signing_id, + "Signing ID mismatch for '{second_module_id}'" + ); + + Ok(()) + } + + #[tokio::test] + async fn test_duplicate_module_names() -> Result<()> { + let mut cfg = get_base_config().await; + let first_module_id = ModuleId("test_module".to_string()); + let first_signing_id = + b256!("0101010101010101010101010101010101010101010101010101010101010101"); + let second_module_id = ModuleId("2nd_test_module".to_string()); + let second_signing_id = + b256!("0202020202020202020202020202020202020202020202020202020202020202"); + + cfg.modules = Some(vec![ + create_module_config(first_module_id.clone(), first_signing_id).await, + create_module_config(first_module_id.clone(), second_signing_id).await, /* Duplicate + * module + * name */ + ]); + + let jwts = HashMap::from([ + (first_module_id.clone(), "supersecret".to_string()), + (second_module_id.clone(), "another-secret".to_string()), + ]); + + // Make sure there was an error + let result = load_module_signing_configs(&cfg, &jwts); + assert!(result.is_err(), "Expected error due to duplicate module names"); + if let Err(e) = result { + assert_eq!( + e.to_string(), + format!("Duplicate module config detected: ID {first_module_id} is already used") + ); + } + Ok(()) + } + + #[tokio::test] + async fn test_duplicate_jwt_secrets() -> Result<()> { + let mut cfg = get_base_config().await; + let first_module_id = ModuleId("test_module".to_string()); + let first_signing_id = + b256!("0101010101010101010101010101010101010101010101010101010101010101"); + let second_module_id = ModuleId("2nd_test_module".to_string()); + let second_signing_id = + b256!("0202020202020202020202020202020202020202020202020202020202020202"); + + cfg.modules = Some(vec![ + create_module_config(first_module_id.clone(), first_signing_id).await, + create_module_config(second_module_id.clone(), second_signing_id).await, + ]); + + let jwts = HashMap::from([ + (first_module_id.clone(), "supersecret".to_string()), + (second_module_id.clone(), "supersecret".to_string()), /* Duplicate JWT secret */ + ]); + + // Make sure there was an error + let result = load_module_signing_configs(&cfg, &jwts); + assert!(result.is_err(), "Expected error due to duplicate JWT secrets"); + if let Err(e) = result { + assert_eq!( + e.to_string(), + format!( + "Duplicate JWT secret detected for modules {first_module_id} and {second_module_id}", + ) + ); + } + Ok(()) + } + + #[tokio::test] + async fn test_duplicate_signing_ids() -> Result<()> { + let mut cfg = get_base_config().await; + let first_module_id = ModuleId("test_module".to_string()); + let first_signing_id = + b256!("0101010101010101010101010101010101010101010101010101010101010101"); + let second_module_id = ModuleId("2nd_test_module".to_string()); + + cfg.modules = Some(vec![ + create_module_config(first_module_id.clone(), first_signing_id).await, + create_module_config(second_module_id.clone(), first_signing_id).await, /* Duplicate signing ID */ + ]); + + let jwts = HashMap::from([ + (first_module_id.clone(), "supersecret".to_string()), + (second_module_id.clone(), "another-secret".to_string()), + ]); + + // Make sure there was an error + let result = load_module_signing_configs(&cfg, &jwts); + assert!(result.is_err(), "Expected error due to duplicate signing IDs"); + if let Err(e) = result { + assert_eq!( + e.to_string(), + format!( + "Duplicate signing ID detected for modules {first_module_id} and {second_module_id}", + ) + ); + } + Ok(()) + } + + #[tokio::test] + async fn test_missing_jwt_secret() -> Result<()> { + let mut cfg = get_base_config().await; + let first_module_id = ModuleId("test_module".to_string()); + let first_signing_id = + b256!("0101010101010101010101010101010101010101010101010101010101010101"); + let second_module_id = ModuleId("2nd_test_module".to_string()); + let second_signing_id = + b256!("0202020202020202020202020202020202020202020202020202020202020202"); + + cfg.modules = Some(vec![ + create_module_config(first_module_id.clone(), first_signing_id).await, + create_module_config(second_module_id.clone(), second_signing_id).await, + ]); + + let jwts = HashMap::from([(second_module_id.clone(), "another-secret".to_string())]); + + // Make sure there was an error + let result = load_module_signing_configs(&cfg, &jwts); + assert!(result.is_err(), "Expected error due to missing JWT secret"); + if let Err(e) = result { + assert_eq!( + e.to_string(), + format!("JWT secret for module {first_module_id} is missing") + ); + } + Ok(()) + } + + #[tokio::test] + async fn test_empty_jwt_secret() -> Result<()> { + let mut cfg = get_base_config().await; + let first_module_id = ModuleId("test_module".to_string()); + let first_signing_id = + b256!("0101010101010101010101010101010101010101010101010101010101010101"); + + cfg.modules = + Some(vec![create_module_config(first_module_id.clone(), first_signing_id).await]); + + let jwts = HashMap::from([(first_module_id.clone(), "".to_string())]); + + // Make sure there was an error + let result = load_module_signing_configs(&cfg, &jwts); + assert!(result.is_err(), "Expected error due to empty JWT secret"); + if let Err(e) = result { + assert!(format!("{:?}", e).contains("JWT secret cannot be empty")); + } + + Ok(()) + } + + #[tokio::test] + async fn test_zero_signing_id() -> Result<()> { + let mut cfg = get_base_config().await; + let first_module_id = ModuleId("test_module".to_string()); + let first_signing_id = + b256!("0000000000000000000000000000000000000000000000000000000000000000"); + + cfg.modules = + Some(vec![create_module_config(first_module_id.clone(), first_signing_id).await]); + + let jwts = HashMap::from([(first_module_id.clone(), "supersecret".to_string())]); + + // Make sure there was an error + let result = load_module_signing_configs(&cfg, &jwts); + assert!(result.is_err(), "Expected error due to zero signing ID"); + if let Err(e) = result { + assert!(format!("{:?}", e).contains("Signing ID cannot be zero")); + } + Ok(()) + } + + // ── TlsMode serde ──────────────────────────────────────────────────────── + + #[test] + fn test_tls_mode_insecure_roundtrip() -> Result<()> { + let original = TlsWrapper { tls_mode: TlsMode::Insecure }; + let toml_str = toml::to_string(&original)?; + let parsed: TlsWrapper = toml::from_str(&toml_str)?; + assert!(matches!(parsed.tls_mode, TlsMode::Insecure)); + Ok(()) + } + + #[test] + fn test_tls_mode_certificate_roundtrip() -> Result<()> { + let path = PathBuf::from("/certs"); + let original = TlsWrapper { tls_mode: TlsMode::Certificate(path.clone()) }; + let toml_str = toml::to_string(&original)?; + let parsed: TlsWrapper = toml::from_str(&toml_str)?; + match parsed.tls_mode { + TlsMode::Certificate(p) => assert_eq!(p, path), + TlsMode::Insecure => panic!("Expected Certificate variant"), + } + Ok(()) + } + + #[test] + fn test_tls_mode_insecure_from_toml() -> Result<()> { + let toml_str = r#" + [tls_mode] + type = "insecure" + "#; + let parsed: TlsWrapper = toml::from_str(toml_str)?; + assert!(matches!(parsed.tls_mode, TlsMode::Insecure)); + Ok(()) + } + + #[test] + fn test_tls_mode_certificate_from_toml() -> Result<()> { + let toml_str = r#" + [tls_mode] + type = "certificate" + path = "/custom/certs" + "#; + let parsed: TlsWrapper = toml::from_str(toml_str)?; + match parsed.tls_mode { + TlsMode::Certificate(p) => assert_eq!(p, PathBuf::from("/custom/certs")), + TlsMode::Insecure => panic!("Expected Certificate variant"), + } + Ok(()) + } + + // ── signer_uses_tls ─────────────────────────────────────────────────────── + + #[tokio::test] + async fn test_signer_uses_tls_no_signer() { + let cfg = get_base_config().await; + assert!(!cfg.signer_uses_tls()); + } + + #[tokio::test] + async fn test_signer_uses_tls_insecure() { + let cfg = get_config_with_signer(TlsMode::Insecure).await; + assert!(!cfg.signer_uses_tls()); + } + + #[tokio::test] + async fn test_signer_uses_tls_certificate() { + let cfg = get_config_with_signer(TlsMode::Certificate(PathBuf::from("/certs"))).await; + assert!(cfg.signer_uses_tls()); + } + + // ── signer_certs_path ───────────────────────────────────────────────────── + + #[tokio::test] + async fn test_signer_certs_path_no_signer() { + let cfg = get_base_config().await; + assert!(cfg.signer_certs_path().is_none()); + } + + #[tokio::test] + async fn test_signer_certs_path_insecure() { + let cfg = get_config_with_signer(TlsMode::Insecure).await; + assert!(cfg.signer_certs_path().is_none()); + } + + #[tokio::test] + async fn test_signer_certs_path_certificate() { + let certs_path = PathBuf::from("/my/certs"); + let cfg = get_config_with_signer(TlsMode::Certificate(certs_path.clone())).await; + assert_eq!(cfg.signer_certs_path(), Some(&certs_path)); + } + + // ── signer_server_url ───────────────────────────────────────────────────── + + #[tokio::test] + async fn test_signer_server_url_no_signer_uses_default_port() { + let cfg = get_base_config().await; + assert_eq!(cfg.signer_server_url(12345), "http://cb_signer:12345"); + } + + #[tokio::test] + async fn test_signer_server_url_insecure_uses_http() { + let cfg = get_config_with_signer(TlsMode::Insecure).await; + assert_eq!(cfg.signer_server_url(9999), "http://cb_signer:20000"); + } + + #[tokio::test] + async fn test_signer_server_url_certificate_uses_https() { + let cfg = get_config_with_signer(TlsMode::Certificate(PathBuf::from("/certs"))).await; + assert_eq!(cfg.signer_server_url(9999), "https://cb_signer:20000"); + } + + #[tokio::test] + async fn test_signer_server_url_remote_returned_as_is() { + let remote_url = Url::parse("https://remote-signer.example.com:8080").unwrap(); + let mut cfg = get_base_config().await; + cfg.signer = Some(SignerConfig { + host: Ipv4Addr::new(127, 0, 0, 1), + port: 20000, + docker_image: SIGNER_IMAGE_DEFAULT.to_string(), + jwt_auth_fail_limit: 3, + jwt_auth_fail_timeout_seconds: 300, + tls_mode: TlsMode::Insecure, + reverse_proxy: ReverseProxyHeaderSetup::None, + inner: SignerType::Remote { url: remote_url.clone() }, + }); + assert_eq!(cfg.signer_server_url(9999), remote_url.to_string()); + } + + // ── ReverseProxyHeaderSetup Display ────────────────────────────────────── + + #[test] + fn test_reverse_proxy_display_none() { + assert_eq!(ReverseProxyHeaderSetup::None.to_string(), "None"); + } + + #[test] + fn test_reverse_proxy_display_unique() { + let rp = ReverseProxyHeaderSetup::Unique { header: "X-Forwarded-For".to_string() }; + assert_eq!(rp.to_string(), r#""X-Forwarded-For (unique)""#); + } + + #[test] + fn test_reverse_proxy_display_rightmost_1st() { + let rp = ReverseProxyHeaderSetup::Rightmost { + header: "X-Real-IP".to_string(), + trusted_count: NonZeroUsize::new(1).unwrap(), + }; + assert_eq!(rp.to_string(), r#""X-Real-IP (1st from the right)""#); + } + + #[test] + fn test_reverse_proxy_display_rightmost_2nd() { + let rp = ReverseProxyHeaderSetup::Rightmost { + header: "X-Real-IP".to_string(), + trusted_count: NonZeroUsize::new(2).unwrap(), + }; + assert_eq!(rp.to_string(), r#""X-Real-IP (2nd from the right)""#); + } + + #[test] + fn test_reverse_proxy_display_rightmost_3rd() { + let rp = ReverseProxyHeaderSetup::Rightmost { + header: "X-Real-IP".to_string(), + trusted_count: NonZeroUsize::new(3).unwrap(), + }; + assert_eq!(rp.to_string(), r#""X-Real-IP (3rd from the right)""#); + } + + #[test] + fn test_reverse_proxy_display_rightmost_nth() { + let rp = ReverseProxyHeaderSetup::Rightmost { + header: "CF-Connecting-IP".to_string(), + trusted_count: NonZeroUsize::new(5).unwrap(), + }; + assert_eq!(rp.to_string(), r#""CF-Connecting-IP (5th from the right)""#); + } +} diff --git a/crates/common/src/config/utils.rs b/crates/common/src/config/utils.rs index a8fcbacd..579825b6 100644 --- a/crates/common/src/config/utils.rs +++ b/crates/common/src/config/utils.rs @@ -6,9 +6,8 @@ use std::{ use eyre::{Context, Result, bail}; use serde::de::DeserializeOwned; -use super::JWTS_ENV; use crate::{ - config::MUXER_HTTP_MAX_LENGTH, + config::{ADMIN_JWT_ENV, JWTS_ENV, MUXER_HTTP_MAX_LENGTH}, types::{BlsPublicKey, ModuleId}, utils::read_chunked_body_with_max, }; @@ -37,9 +36,10 @@ pub fn load_file_from_env(env: &str) -> Result<(T, PathBuf) } /// Loads a map of module id -> jwt secret from a json env -pub fn load_jwt_secrets() -> Result> { +pub fn load_jwt_secrets() -> Result<(String, HashMap)> { + let admin_jwt = std::env::var(ADMIN_JWT_ENV).wrap_err(format!("{ADMIN_JWT_ENV} is not set"))?; let jwt_secrets = std::env::var(JWTS_ENV).wrap_err(format!("{JWTS_ENV} is not set"))?; - decode_string_to_map(&jwt_secrets) + decode_string_to_map(&jwt_secrets).map(|secrets| (admin_jwt, secrets)) } /// Reads an HTTP response safely, erroring out if it failed or if the body is @@ -82,7 +82,7 @@ pub fn remove_duplicate_keys(keys: Vec) -> Vec { unique_keys } -fn decode_string_to_map(raw: &str) -> Result> { +pub fn decode_string_to_map(raw: &str) -> Result> { // trim the string and split for comma raw.trim() .split(',') @@ -98,19 +98,68 @@ fn decode_string_to_map(raw: &str) -> Result> { #[cfg(test)] mod tests { + use std::sync::Mutex; + use super::*; use crate::utils::TestRandomSeed; + // Serializes all tests that read/write environment variables. + // std::env::set_var is unsafe (Rust 1.81+) because mutating `environ` + // while another thread reads it is UB at the OS level. Holding this + // lock ensures our Rust threads don't race each other. + static ENV_LOCK: Mutex<()> = Mutex::new(()); + + /// Sets or removes env vars for the duration of `f`, then restores the + /// original values. Pass `Some("val")` to set, `None` to ensure absent. + fn with_env(vars: &[(&str, Option<&str>)], f: impl FnOnce() -> R) -> R { + let _guard = ENV_LOCK.lock().unwrap_or_else(|e| e.into_inner()); + let saved: Vec<(&str, Option)> = + vars.iter().map(|(k, _)| (*k, std::env::var(k).ok())).collect(); + for (k, v) in vars { + match v { + Some(val) => unsafe { std::env::set_var(k, val) }, + None => unsafe { std::env::remove_var(k) }, + } + } + let result = f(); + for (k, old) in &saved { + match old { + Some(v) => unsafe { std::env::set_var(k, v) }, + None => unsafe { std::env::remove_var(k) }, + } + } + result + } + + // Minimal TOML-deserializable type used by load_from_file / load_file_from_env + // tests. + #[derive(serde::Deserialize, Debug, PartialEq)] + struct TestConfig { + value: String, + } + + // ── decode_string_to_map ───────────────────────────────────────────────── + #[test] - fn test_decode_string_to_map() { - let raw = " KEY=VALUE , KEY2=value2 "; + fn test_decode_string_to_map_single_pair() { + let map = decode_string_to_map("ONLY=ONE").unwrap(); + assert_eq!(map.len(), 1); + assert_eq!(map.get(&ModuleId("ONLY".into())), Some(&"ONE".to_string())); + } - let map = decode_string_to_map(raw).unwrap(); + #[test] + fn test_decode_string_to_map_empty_string() { + // An empty string yields one token with no `=`, which is invalid. + assert!(decode_string_to_map("").is_err()); + } - assert_eq!(map.get(&ModuleId("KEY".into())), Some(&"VALUE".to_string())); - assert_eq!(map.get(&ModuleId("KEY2".into())), Some(&"value2".to_string())); + #[test] + fn test_decode_string_to_map_malformed_no_equals() { + assert!(decode_string_to_map("KEYONLY").is_err()); } + // ── remove_duplicate_keys ──────────────────────────────────────────────── + #[test] fn test_remove_duplicate_keys() { let key1 = BlsPublicKey::test_random(); @@ -122,4 +171,134 @@ mod tests { assert!(unique_keys.contains(&key1)); assert!(unique_keys.contains(&key2)); } + + // ── load_env_var ───────────────────────────────────────────────────────── + + #[test] + fn test_load_env_var_present() { + with_env(&[("CB_TEST_LOAD_ENV_VAR", Some("hello"))], || { + assert_eq!(load_env_var("CB_TEST_LOAD_ENV_VAR").unwrap(), "hello"); + }); + } + + #[test] + fn test_load_env_var_absent() { + with_env(&[("CB_TEST_LOAD_ENV_VAR_ABSENT", None)], || { + let err = load_env_var("CB_TEST_LOAD_ENV_VAR_ABSENT").unwrap_err(); + assert!(err.to_string().contains("CB_TEST_LOAD_ENV_VAR_ABSENT")); + }); + } + + // ── load_optional_env_var ──────────────────────────────────────────────── + + #[test] + fn test_load_optional_env_var_present() { + with_env(&[("CB_TEST_OPT_VAR", Some("world"))], || { + assert_eq!(load_optional_env_var("CB_TEST_OPT_VAR"), Some("world".to_string())); + }); + } + + #[test] + fn test_load_optional_env_var_absent() { + with_env(&[("CB_TEST_OPT_VAR_ABSENT", None)], || { + assert_eq!(load_optional_env_var("CB_TEST_OPT_VAR_ABSENT"), None); + }); + } + + // ── load_from_file ─────────────────────────────────────────────────────── + + #[test] + fn test_load_from_file_valid() { + use std::io::Write as _; + let mut file = tempfile::NamedTempFile::new().unwrap(); + file.write_all(b"value = \"hello\"").unwrap(); + let path = file.path().to_path_buf(); + + let (config, returned_path): (TestConfig, _) = load_from_file(&path).unwrap(); + assert_eq!(config.value, "hello"); + assert_eq!(returned_path, path); + } + + #[test] + fn test_load_from_file_missing() { + let result: eyre::Result<(TestConfig, _)> = + load_from_file("/nonexistent/cb_test_path/file.toml"); + assert!(result.is_err()); + } + + #[test] + fn test_load_from_file_invalid_toml() { + use std::io::Write as _; + let mut file = tempfile::NamedTempFile::new().unwrap(); + file.write_all(b"not valid toml !!!{{").unwrap(); + + let result: eyre::Result<(TestConfig, _)> = load_from_file(file.path()); + assert!(result.is_err()); + } + + // ── load_file_from_env ─────────────────────────────────────────────────── + + #[test] + fn test_load_file_from_env_ok() { + use std::io::Write as _; + let mut file = tempfile::NamedTempFile::new().unwrap(); + file.write_all(b"value = \"from_env\"").unwrap(); + let path = file.path().to_str().unwrap().to_owned(); + + with_env(&[("CB_TEST_FILE_ENV", Some(&path))], || { + let (config, _): (TestConfig, _) = load_file_from_env("CB_TEST_FILE_ENV").unwrap(); + assert_eq!(config.value, "from_env"); + }); + } + + #[test] + fn test_load_file_from_env_var_not_set() { + with_env(&[("CB_TEST_FILE_ENV_ABSENT", None)], || { + let result: eyre::Result<(TestConfig, _)> = + load_file_from_env("CB_TEST_FILE_ENV_ABSENT"); + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("CB_TEST_FILE_ENV_ABSENT")); + }); + } + + // ── load_jwt_secrets ───────────────────────────────────────────────────── + + #[test] + fn test_load_jwt_secrets_ok() { + with_env( + &[ + (ADMIN_JWT_ENV, Some("admin_secret")), + (JWTS_ENV, Some("MODULE1=secret1,MODULE2=secret2")), + ], + || { + let (admin_jwt, secrets) = load_jwt_secrets().unwrap(); + assert_eq!(admin_jwt, "admin_secret"); + assert_eq!(secrets.get(&ModuleId("MODULE1".into())), Some(&"secret1".to_string())); + assert_eq!(secrets.get(&ModuleId("MODULE2".into())), Some(&"secret2".to_string())); + }, + ); + } + + #[test] + fn test_load_jwt_secrets_missing_admin_jwt() { + with_env(&[(ADMIN_JWT_ENV, None), (JWTS_ENV, Some("MODULE1=secret1"))], || { + let err = load_jwt_secrets().unwrap_err(); + assert!(err.to_string().contains(ADMIN_JWT_ENV)); + }); + } + + #[test] + fn test_load_jwt_secrets_missing_jwts() { + with_env(&[(ADMIN_JWT_ENV, Some("admin_secret")), (JWTS_ENV, None)], || { + let err = load_jwt_secrets().unwrap_err(); + assert!(err.to_string().contains(JWTS_ENV)); + }); + } + + #[test] + fn test_load_jwt_secrets_malformed_jwts() { + with_env(&[(ADMIN_JWT_ENV, Some("admin_secret")), (JWTS_ENV, Some("MALFORMED"))], || { + assert!(load_jwt_secrets().is_err()); + }); + } } diff --git a/crates/common/src/pbs/error.rs b/crates/common/src/pbs/error.rs index 77d942cd..16ebdc35 100644 --- a/crates/common/src/pbs/error.rs +++ b/crates/common/src/pbs/error.rs @@ -14,6 +14,9 @@ pub enum PbsError { #[error("json decode error: {err:?}, raw: {raw}")] JsonDecode { err: serde_json::Error, raw: String }, + #[error("error with request: {0}")] + GeneralRequest(String), + #[error("{0}")] ReadResponse(#[from] ResponseReadError), @@ -107,3 +110,25 @@ pub enum ValidationError { #[error("unsupported fork")] UnsupportedFork, } + +#[derive(Debug, Error, PartialEq, Eq)] +pub enum SszValueError { + #[error("invalid payload length: required {required} but payload was {actual}")] + InvalidPayloadLength { required: usize, actual: usize }, + + #[error("unsupported fork")] + UnsupportedFork { name: String }, +} + +impl From for PbsError { + fn from(err: SszValueError) -> Self { + match err { + SszValueError::InvalidPayloadLength { required, actual } => PbsError::GeneralRequest( + format!("invalid payload length: required {required} but payload was {actual}"), + ), + SszValueError::UnsupportedFork { name } => { + PbsError::GeneralRequest(format!("unsupported fork: {name}")) + } + } + } +} diff --git a/crates/common/src/pbs/mod.rs b/crates/common/src/pbs/mod.rs index af2c07b4..a1152b58 100644 --- a/crates/common/src/pbs/mod.rs +++ b/crates/common/src/pbs/mod.rs @@ -6,5 +6,6 @@ mod types; pub use builder::*; pub use constants::*; +pub use lh_types::ForkVersionDecode; pub use relay::*; pub use types::*; diff --git a/crates/common/src/pbs/types/mod.rs b/crates/common/src/pbs/types/mod.rs index 8ad87c08..ca147c06 100644 --- a/crates/common/src/pbs/types/mod.rs +++ b/crates/common/src/pbs/types/mod.rs @@ -26,15 +26,25 @@ pub type PayloadAndBlobs = lh_eth2::types::ExecutionPayloadAndBlobs; pub type ExecutionPayloadHeader = lh_types::ExecutionPayloadHeader; +pub type ExecutionPayloadHeaderBellatrix = + lh_types::ExecutionPayloadHeaderBellatrix; +pub type ExecutionPayloadHeaderCapella = lh_types::ExecutionPayloadHeaderCapella; +pub type ExecutionPayloadHeaderDeneb = lh_types::ExecutionPayloadHeaderDeneb; pub type ExecutionPayloadHeaderElectra = lh_types::ExecutionPayloadHeaderElectra; pub type ExecutionPayloadHeaderFulu = lh_types::ExecutionPayloadHeaderFulu; +pub type ExecutionPayloadHeaderGloas = lh_types::ExecutionPayloadHeaderGloas; pub type ExecutionPayloadHeaderRef<'a> = lh_types::ExecutionPayloadHeaderRef<'a, MainnetEthSpec>; pub type ExecutionPayload = lh_types::ExecutionPayload; pub type ExecutionPayloadElectra = lh_types::ExecutionPayloadElectra; pub type ExecutionPayloadFulu = lh_types::ExecutionPayloadFulu; pub type SignedBuilderBid = lh_types::builder_bid::SignedBuilderBid; pub type BuilderBid = lh_types::builder_bid::BuilderBid; +pub type BuilderBidBellatrix = lh_types::builder_bid::BuilderBidBellatrix; +pub type BuilderBidCapella = lh_types::builder_bid::BuilderBidCapella; +pub type BuilderBidDeneb = lh_types::builder_bid::BuilderBidDeneb; pub type BuilderBidElectra = lh_types::builder_bid::BuilderBidElectra; +pub type BuilderBidFulu = lh_types::builder_bid::BuilderBidFulu; +pub type BuilderBidGloas = lh_types::builder_bid::BuilderBidGloas; /// Response object of GET /// `/eth/v1/builder/header/{slot}/{parent_hash}/{pubkey}` @@ -42,6 +52,8 @@ pub type GetHeaderResponse = lh_types::ForkVersionedResponse; pub type KzgCommitments = lh_types::beacon_block_body::KzgCommitments; +pub type Uint256 = lh_types::Uint256; + /// Response params of GET /// `/eth/v1/builder/header/{slot}/{parent_hash}/{pubkey}` #[derive(Debug, Serialize, Deserialize, Clone)] @@ -54,6 +66,17 @@ pub struct GetHeaderParams { pub pubkey: BlsPublicKey, } +/// Which encoding types the original requester accepts in the response. +/// As the builder spec adds more encoding types, this struct can be expanded. +#[derive(Clone)] +pub struct AcceptTypes { + /// Whether SSZ encoding is accepted + pub ssz: bool, + + /// Whether JSON encoding is accepted + pub json: bool, +} + pub trait GetHeaderInfo { fn block_hash(&self) -> B256; fn value(&self) -> &U256; diff --git a/crates/common/src/signature.rs b/crates/common/src/signature.rs index d899842b..18c10d4a 100644 --- a/crates/common/src/signature.rs +++ b/crates/common/src/signature.rs @@ -1,32 +1,44 @@ -use alloy::primitives::B256; +use alloy::primitives::{Address, B256, aliases::B32}; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; use crate::{ constants::{COMMIT_BOOST_DOMAIN, GENESIS_VALIDATORS_ROOT}, - signer::verify_bls_signature, - types::{BlsPublicKey, BlsSecretKey, BlsSignature, Chain}, + signer::{EcdsaSignature, verify_bls_signature, verify_ecdsa_signature}, + types::{self, BlsPublicKey, BlsSecretKey, BlsSignature, Chain, SignatureRequestInfo}, }; pub fn sign_message(secret_key: &BlsSecretKey, msg: B256) -> BlsSignature { secret_key.sign(msg) } -pub fn compute_signing_root(object_root: B256, signing_domain: B256) -> B256 { - #[derive(Default, Debug, TreeHash)] - struct SigningData { - object_root: B256, - signing_domain: B256, +pub fn compute_prop_commit_signing_root( + chain: Chain, + object_root: &B256, + signature_request_info: Option<&SignatureRequestInfo>, + domain_mask: &B32, +) -> B256 { + let domain = compute_domain(chain, domain_mask); + match signature_request_info { + Some(SignatureRequestInfo { module_signing_id, nonce }) => { + let object_root = types::PropCommitSigningInfo { + data: *object_root, + module_signing_id: *module_signing_id, + nonce: *nonce, + chain_id: chain.id(), + } + .tree_hash_root(); + types::SigningData { object_root, signing_domain: domain }.tree_hash_root() + } + None => types::SigningData { object_root: *object_root, signing_domain: domain } + .tree_hash_root(), } - - let signing_data = SigningData { object_root, signing_domain }; - signing_data.tree_hash_root() } // NOTE: this currently works only for builder domain signatures and // verifications // ref: https://github.com/ralexstokes/ethereum-consensus/blob/cf3c404043230559660810bc0c9d6d5a8498d819/ethereum-consensus/src/builder/mod.rs#L26-L29 -pub fn compute_domain(chain: Chain, domain_mask: [u8; 4]) -> B256 { +pub fn compute_domain(chain: Chain, domain_mask: &B32) -> B256 { #[derive(Debug, TreeHash)] struct ForkData { fork_version: [u8; 4], @@ -34,7 +46,7 @@ pub fn compute_domain(chain: Chain, domain_mask: [u8; 4]) -> B256 { } let mut domain = [0u8; 32]; - domain[..4].copy_from_slice(&domain_mask); + domain[..4].copy_from_slice(&domain_mask.0); let fork_version = chain.genesis_fork_version(); let fd = ForkData { fork_version, genesis_validators_root: GENESIS_VALIDATORS_ROOT.into() }; @@ -42,7 +54,7 @@ pub fn compute_domain(chain: Chain, domain_mask: [u8; 4]) -> B256 { domain[4..].copy_from_slice(&fork_data_root[..28]); - domain.into() + B256::from(domain) } pub fn verify_signed_message( @@ -50,65 +62,120 @@ pub fn verify_signed_message( pubkey: &BlsPublicKey, msg: &T, signature: &BlsSignature, - domain_mask: [u8; 4], + signature_request_info: Option<&SignatureRequestInfo>, + domain_mask: &B32, ) -> bool { - let domain = compute_domain(chain, domain_mask); - let signing_root = compute_signing_root(msg.tree_hash_root(), domain); - + let signing_root = compute_prop_commit_signing_root( + chain, + &msg.tree_hash_root(), + signature_request_info, + domain_mask, + ); verify_bls_signature(pubkey, signing_root, signature) } +/// Signs a message with the Beacon builder domain. pub fn sign_builder_message( chain: Chain, secret_key: &BlsSecretKey, msg: &impl TreeHash, ) -> BlsSignature { - sign_builder_root(chain, secret_key, msg.tree_hash_root()) + sign_builder_root(chain, secret_key, &msg.tree_hash_root()) } pub fn sign_builder_root( chain: Chain, secret_key: &BlsSecretKey, - object_root: B256, + object_root: &B256, ) -> BlsSignature { - let domain = chain.builder_domain(); - let signing_root = compute_signing_root(object_root, domain); + let signing_domain = chain.builder_domain(); + let signing_data = + types::SigningData { object_root: object_root.tree_hash_root(), signing_domain }; + let signing_root = signing_data.tree_hash_root(); sign_message(secret_key, signing_root) } pub fn sign_commit_boost_root( chain: Chain, secret_key: &BlsSecretKey, - object_root: B256, + object_root: &B256, + signature_request_info: Option<&SignatureRequestInfo>, ) -> BlsSignature { - let domain = compute_domain(chain, COMMIT_BOOST_DOMAIN); - let signing_root = compute_signing_root(object_root, domain); + let signing_root = compute_prop_commit_signing_root( + chain, + object_root, + signature_request_info, + &B32::from(COMMIT_BOOST_DOMAIN), + ); sign_message(secret_key, signing_root) } +// ============================== +// === Signature Verification === +// ============================== + +/// Verifies that a proposer commitment signature was generated by the given BLS +/// key for the provided message, chain ID, and module signing ID. +pub fn verify_proposer_commitment_signature_bls( + chain: Chain, + pubkey: &BlsPublicKey, + msg: &impl TreeHash, + signature: &BlsSignature, + module_signing_id: &B256, + nonce: u64, +) -> bool { + let signing_domain = compute_domain(chain, &B32::from(COMMIT_BOOST_DOMAIN)); + let object_root = types::PropCommitSigningInfo { + data: msg.tree_hash_root(), + module_signing_id: *module_signing_id, + nonce, + chain_id: chain.id(), + } + .tree_hash_root(); + let signing_root = types::SigningData { object_root, signing_domain }.tree_hash_root(); + verify_bls_signature(pubkey, signing_root, signature) +} + +/// Verifies that a proposer commitment signature was generated by the given +/// ECDSA key for the provided message, chain ID, and module signing ID. +pub fn verify_proposer_commitment_signature_ecdsa( + chain: Chain, + address: &Address, + msg: &impl TreeHash, + signature: &EcdsaSignature, + module_signing_id: &B256, + nonce: u64, +) -> Result<(), eyre::Report> { + let signing_domain = compute_domain(chain, &B32::from(COMMIT_BOOST_DOMAIN)); + let object_root = types::PropCommitSigningInfo { + data: msg.tree_hash_root(), + module_signing_id: *module_signing_id, + nonce, + chain_id: chain.id(), + } + .tree_hash_root(); + let signing_root = types::SigningData { object_root, signing_domain }.tree_hash_root(); + verify_ecdsa_signature(address, &signing_root, signature) +} + +// =============== +// === Testing === +// =============== + #[cfg(test)] mod tests { + use alloy::primitives::aliases::B32; + use super::compute_domain; use crate::{constants::APPLICATION_BUILDER_DOMAIN, types::Chain}; #[test] fn test_builder_domains() { - assert_eq!( - compute_domain(Chain::Mainnet, APPLICATION_BUILDER_DOMAIN), - Chain::Mainnet.builder_domain() - ); - assert_eq!( - compute_domain(Chain::Holesky, APPLICATION_BUILDER_DOMAIN), - Chain::Holesky.builder_domain() - ); - assert_eq!( - compute_domain(Chain::Sepolia, APPLICATION_BUILDER_DOMAIN), - Chain::Sepolia.builder_domain() - ); - assert_eq!( - compute_domain(Chain::Hoodi, APPLICATION_BUILDER_DOMAIN), - Chain::Hoodi.builder_domain() - ); + let domain = &B32::from(APPLICATION_BUILDER_DOMAIN); + assert_eq!(compute_domain(Chain::Mainnet, domain), Chain::Mainnet.builder_domain()); + assert_eq!(compute_domain(Chain::Holesky, domain), Chain::Holesky.builder_domain()); + assert_eq!(compute_domain(Chain::Sepolia, domain), Chain::Sepolia.builder_domain()); + assert_eq!(compute_domain(Chain::Hoodi, domain), Chain::Hoodi.builder_domain()); } } diff --git a/crates/common/src/signer/schemes/bls.rs b/crates/common/src/signer/schemes/bls.rs index 8525f015..07f5e6dd 100644 --- a/crates/common/src/signer/schemes/bls.rs +++ b/crates/common/src/signer/schemes/bls.rs @@ -3,7 +3,7 @@ use tree_hash::TreeHash; use crate::{ signature::sign_commit_boost_root, - types::{BlsPublicKey, BlsSecretKey, BlsSignature, Chain}, + types::{BlsPublicKey, BlsSecretKey, BlsSignature, Chain, SignatureRequestInfo}, }; #[derive(Clone)] @@ -28,20 +28,32 @@ impl BlsSigner { } } - pub fn secret(&self) -> [u8; 32] { + pub fn secret(&self) -> B256 { match self { BlsSigner::Local(secret) => secret.serialize().as_bytes().try_into().unwrap(), } } - pub async fn sign(&self, chain: Chain, object_root: B256) -> BlsSignature { + pub async fn sign( + &self, + chain: Chain, + object_root: &B256, + signature_request_info: Option<&SignatureRequestInfo>, + ) -> BlsSignature { match self { - BlsSigner::Local(sk) => sign_commit_boost_root(chain, sk, object_root), + BlsSigner::Local(sk) => { + sign_commit_boost_root(chain, sk, object_root, signature_request_info) + } } } - pub async fn sign_msg(&self, chain: Chain, msg: &impl TreeHash) -> BlsSignature { - self.sign(chain, msg.tree_hash_root()).await + pub async fn sign_msg( + &self, + chain: Chain, + msg: &impl TreeHash, + signature_request_info: Option<&SignatureRequestInfo>, + ) -> BlsSignature { + self.sign(chain, &msg.tree_hash_root(), signature_request_info).await } } diff --git a/crates/common/src/signer/schemes/ecdsa.rs b/crates/common/src/signer/schemes/ecdsa.rs index 37fc18b4..2ff0acce 100644 --- a/crates/common/src/signer/schemes/ecdsa.rs +++ b/crates/common/src/signer/schemes/ecdsa.rs @@ -1,7 +1,7 @@ use std::{ops::Deref, str::FromStr}; use alloy::{ - primitives::{Address, B256, Signature}, + primitives::{Address, B256, Signature, aliases::B32}, signers::{SignerSync, local::PrivateKeySigner}, }; use eyre::ensure; @@ -9,8 +9,8 @@ use tree_hash::TreeHash; use crate::{ constants::COMMIT_BOOST_DOMAIN, - signature::{compute_domain, compute_signing_root}, - types::Chain, + signature::compute_domain, + types::{self, Chain, SignatureRequestInfo}, }; #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -86,32 +86,46 @@ impl EcdsaSigner { pub async fn sign( &self, chain: Chain, - object_root: B256, + object_root: &B256, + signature_request_info: Option<&SignatureRequestInfo>, ) -> Result { match self { EcdsaSigner::Local(sk) => { - let domain = compute_domain(chain, COMMIT_BOOST_DOMAIN); - let signing_root = compute_signing_root(object_root, domain); + let signing_domain = compute_domain(chain, &B32::from(COMMIT_BOOST_DOMAIN)); + let signing_root = match signature_request_info { + Some(SignatureRequestInfo { module_signing_id, nonce }) => { + let object_root = types::PropCommitSigningInfo { + data: *object_root, + module_signing_id: *module_signing_id, + nonce: *nonce, + chain_id: chain.id(), + } + .tree_hash_root(); + types::SigningData { object_root, signing_domain }.tree_hash_root() + } + None => types::SigningData { object_root: *object_root, signing_domain } + .tree_hash_root(), + }; sk.sign_hash_sync(&signing_root).map(EcdsaSignature::from) } } } - pub async fn sign_msg( &self, chain: Chain, msg: &impl TreeHash, + signature_request_info: Option<&SignatureRequestInfo>, ) -> Result { - self.sign(chain, msg.tree_hash_root()).await + self.sign(chain, &msg.tree_hash_root(), signature_request_info).await } } pub fn verify_ecdsa_signature( address: &Address, - msg: &[u8; 32], + msg: &B256, signature: &EcdsaSignature, ) -> eyre::Result<()> { - let recovered = signature.recover_address_from_prehash(msg.into())?; + let recovered = signature.recover_address_from_prehash(msg)?; ensure!(recovered == *address, "invalid signature"); Ok(()) } @@ -119,20 +133,24 @@ pub fn verify_ecdsa_signature( #[cfg(test)] mod test { - use alloy::{hex, primitives::bytes}; + use alloy::{ + hex, + primitives::{b256, bytes}, + }; use super::*; #[tokio::test] - async fn test_ecdsa_signer() { + async fn test_ecdsa_signer_noncommit() { let pk = bytes!("88bcd6672d95bcba0d52a3146494ed4d37675af4ed2206905eb161aa99a6c0d1"); let signer = EcdsaSigner::new_from_bytes(&pk).unwrap(); let object_root = B256::from([1; 32]); - let signature = signer.sign(Chain::Holesky, object_root).await.unwrap(); + let signature = signer.sign(Chain::Holesky, &object_root, None).await.unwrap(); - let domain = compute_domain(Chain::Holesky, COMMIT_BOOST_DOMAIN); - let msg = compute_signing_root(object_root, domain); + let domain = compute_domain(Chain::Holesky, &B32::from(COMMIT_BOOST_DOMAIN)); + let signing_data = types::SigningData { object_root, signing_domain: domain }; + let msg = signing_data.tree_hash_root(); assert_eq!(msg, hex!("219ca7a673b2cbbf67bec6c9f60f78bd051336d57b68d1540190f30667e86725")); @@ -140,4 +158,41 @@ mod test { let verified = verify_ecdsa_signature(&address, &msg, &signature); assert!(verified.is_ok()); } + + #[tokio::test] + async fn test_ecdsa_signer_prop_commit() { + let pk = bytes!("88bcd6672d95bcba0d52a3146494ed4d37675af4ed2206905eb161aa99a6c0d1"); + let signer = EcdsaSigner::new_from_bytes(&pk).unwrap(); + + let object_root = B256::from([1; 32]); + let module_signing_id = B256::from([2; 32]); + let nonce = 42; + let signature = signer + .sign( + Chain::Hoodi, + &object_root, + Some(&SignatureRequestInfo { module_signing_id, nonce }), + ) + .await + .unwrap(); + + let signing_domain = compute_domain(Chain::Hoodi, &B32::from(COMMIT_BOOST_DOMAIN)); + let object_root = types::PropCommitSigningInfo { + data: object_root, + module_signing_id, + nonce, + chain_id: Chain::Hoodi.id(), + } + .tree_hash_root(); + let msg = types::SigningData { object_root, signing_domain }.tree_hash_root(); + + assert_eq!( + msg, + b256!("0x0b95fcdb3f003fc6f0fd3238d906f359809e97fe7ec71f56771cb05bee4150bd") + ); + + let address = signer.address(); + let verified = verify_ecdsa_signature(&address, &msg, &signature); + assert!(verified.is_ok()); + } } diff --git a/crates/common/src/signer/store.rs b/crates/common/src/signer/store.rs index 7cc0fc17..d70ea8a0 100644 --- a/crates/common/src/signer/store.rs +++ b/crates/common/src/signer/store.rs @@ -244,14 +244,14 @@ impl ProxyStore { serde_json::from_str(&file_content)?; let signer = EcdsaSigner::new_from_bytes(&key_and_delegation.secret)?; - let pubkey = signer.address(); + let address = signer.address(); let proxy_signer = EcdsaProxySigner { signer, delegation: key_and_delegation.delegation, }; - proxy_signers.ecdsa_signers.insert(pubkey, proxy_signer); - ecdsa_map.entry(module_id.clone()).or_default().push(pubkey); + proxy_signers.ecdsa_signers.insert(address, proxy_signer); + ecdsa_map.entry(module_id.clone()).or_default().push(address); } } } @@ -564,7 +564,8 @@ mod test { delegator: consensus_signer.pubkey(), proxy: proxy_signer.pubkey(), }; - let signature = consensus_signer.sign(Chain::Mainnet, message.tree_hash_root()).await; + let signature = + consensus_signer.sign(Chain::Mainnet, &message.tree_hash_root(), None).await; let delegation = SignedProxyDelegationBls { signature: signature.clone(), message }; let proxy_signer = BlsProxySigner { signer: proxy_signer, delegation }; @@ -679,7 +680,8 @@ mod test { delegator: consensus_signer.pubkey(), proxy: proxy_signer.pubkey(), }; - let signature = consensus_signer.sign(Chain::Mainnet, message.tree_hash_root()).await; + let signature = + consensus_signer.sign(Chain::Mainnet, &message.tree_hash_root(), None).await; let delegation = SignedProxyDelegationBls { signature, message }; let proxy_signer = BlsProxySigner { signer: proxy_signer, delegation }; diff --git a/crates/common/src/types.rs b/crates/common/src/types.rs index 695681e0..b347c187 100644 --- a/crates/common/src/types.rs +++ b/crates/common/src/types.rs @@ -1,10 +1,11 @@ use std::path::PathBuf; -use alloy::primitives::{B256, Bytes, b256, hex}; +use alloy::primitives::{B256, Bytes, U256, aliases::B32, b256, hex}; use derive_more::{Deref, Display, From, Into}; use eyre::{Context, bail}; use lh_types::ForkName; use serde::{Deserialize, Serialize}; +use tree_hash_derive::TreeHash; use crate::{constants::APPLICATION_BUILDER_DOMAIN, signature::compute_domain}; @@ -26,7 +27,17 @@ pub struct Jwt(pub String); #[derive(Debug, Serialize, Deserialize)] pub struct JwtClaims { pub exp: u64, - pub module: String, + pub module: ModuleId, + pub route: String, + pub payload_hash: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct JwtAdminClaims { + pub exp: u64, + pub admin: bool, + pub route: String, + pub payload_hash: Option, } #[derive(Clone, Copy, PartialEq, Eq, Hash)] @@ -40,7 +51,7 @@ pub enum Chain { slot_time_secs: u64, genesis_fork_version: ForkVersion, fulu_fork_slot: u64, - chain_id: u64, + chain_id: U256, }, } @@ -103,7 +114,9 @@ impl std::fmt::Debug for Chain { } impl Chain { - pub fn id(&self) -> u64 { + // Chain IDs are 256-bit unsigned integers because they need to support + // Keccak256 hashes + pub fn id(&self) -> U256 { match self { Chain::Mainnet => KnownChain::Mainnet.id(), Chain::Holesky => KnownChain::Holesky.id(), @@ -119,7 +132,7 @@ impl Chain { Chain::Holesky => KnownChain::Holesky.builder_domain(), Chain::Sepolia => KnownChain::Sepolia.builder_domain(), Chain::Hoodi => KnownChain::Hoodi.builder_domain(), - Chain::Custom { .. } => compute_domain(*self, APPLICATION_BUILDER_DOMAIN), + Chain::Custom { .. } => compute_domain(*self, &B32::from(APPLICATION_BUILDER_DOMAIN)), } } @@ -182,12 +195,12 @@ pub enum KnownChain { // Constants impl KnownChain { - pub fn id(&self) -> u64 { + pub fn id(&self) -> U256 { match self { - KnownChain::Mainnet => 1, - KnownChain::Holesky => 17000, - KnownChain::Sepolia => 11155111, - KnownChain::Hoodi => 560048, + KnownChain::Mainnet => U256::from(1), + KnownChain::Holesky => U256::from(17000), + KnownChain::Sepolia => U256::from(11155111), + KnownChain::Hoodi => U256::from(560048), } } @@ -272,7 +285,7 @@ pub enum ChainLoader { slot_time_secs: u64, genesis_fork_version: Bytes, fulu_fork_slot: u64, - chain_id: u64, + chain_id: U256, }, } @@ -346,13 +359,38 @@ impl<'de> Deserialize<'de> for Chain { } } +/// Structure for signatures used in Beacon chain operations +#[derive(Default, Debug, TreeHash)] +pub struct SigningData { + pub object_root: B256, + pub signing_domain: B256, +} + +/// Structure for signatures used for proposer commitments in Commit Boost. +/// The signing root of this struct must be used as the object_root of a +/// SigningData for signatures. +#[derive(Default, Debug, TreeHash)] +pub struct PropCommitSigningInfo { + pub data: B256, + pub module_signing_id: B256, + pub nonce: u64, // As per https://eips.ethereum.org/EIPS/eip-2681 + pub chain_id: U256, +} + +/// Information about a signature request, including the module signing ID and +/// nonce. +pub struct SignatureRequestInfo { + pub module_signing_id: B256, + pub nonce: u64, +} + /// Returns seconds_per_slot, genesis_fork_version, fulu_fork_epoch, and /// deposit_chain_id from a spec, such as returned by /eth/v1/config/spec ref: https://ethereum.github.io/beacon-APIs/#/Config/getSpec /// Try to load two formats: /// - JSON as return the getSpec endpoint, either with or without the `data` /// field /// - YAML as used e.g. in Kurtosis/Ethereum Package -pub fn load_chain_from_file(path: PathBuf) -> eyre::Result<(u64, ForkVersion, u64, u64)> { +pub fn load_chain_from_file(path: PathBuf) -> eyre::Result<(u64, ForkVersion, u64, U256)> { #[derive(Deserialize)] #[serde(rename_all = "UPPERCASE")] struct QuotedSpecFile { @@ -363,12 +401,12 @@ pub fn load_chain_from_file(path: PathBuf) -> eyre::Result<(u64, ForkVersion, u6 slots_per_epoch: u64, #[serde(with = "serde_utils::quoted_u64")] fulu_fork_epoch: u64, - #[serde(with = "serde_utils::quoted_u64")] - deposit_chain_id: u64, + #[serde(with = "serde_utils::quoted_u256")] + deposit_chain_id: U256, } impl QuotedSpecFile { - fn to_chain(&self) -> eyre::Result<(u64, ForkVersion, u64, u64)> { + fn to_chain(&self) -> eyre::Result<(u64, ForkVersion, u64, U256)> { let genesis_fork_version: ForkVersion = self.genesis_fork_version.as_ref().try_into()?; let fulu_fork_slot = self.fulu_fork_epoch.saturating_mul(self.slots_per_epoch); @@ -388,11 +426,11 @@ pub fn load_chain_from_file(path: PathBuf) -> eyre::Result<(u64, ForkVersion, u6 genesis_fork_version: u32, slots_per_epoch: Option, fulu_fork_epoch: u64, - deposit_chain_id: u64, + deposit_chain_id: U256, } impl SpecFile { - fn to_chain(&self) -> (u64, ForkVersion, u64, u64) { + fn to_chain(&self) -> (u64, ForkVersion, u64, U256) { let genesis_fork_version: ForkVersion = self.genesis_fork_version.to_be_bytes(); let fulu_fork_slot = self.fulu_fork_epoch.saturating_mul(self.slots_per_epoch.unwrap_or(32)); @@ -432,14 +470,14 @@ mod tests { #[test] fn test_load_custom() { - let s = r#"chain = { genesis_time_secs = 1, slot_time_secs = 2, genesis_fork_version = "0x01000000", fulu_fork_slot = 1, chain_id = 123 }"#; + let s = r#"chain = { genesis_time_secs = 1, slot_time_secs = 2, genesis_fork_version = "0x01000000", fulu_fork_slot = 1, chain_id = "123" }"#; let decoded: MockConfig = toml::from_str(s).unwrap(); assert_eq!(decoded.chain, Chain::Custom { genesis_time_secs: 1, slot_time_secs: 2, genesis_fork_version: [1, 0, 0, 0], fulu_fork_slot: 1, - chain_id: 123, + chain_id: U256::from(123), }) } @@ -548,7 +586,7 @@ mod tests { slot_time_secs: 12, genesis_fork_version: hex!("0x10000038"), fulu_fork_slot: 0, - chain_id: 3151908, + chain_id: U256::from(3151908), }) } } diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index 764ab188..d0540201 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -1,19 +1,39 @@ #[cfg(feature = "testing-flags")] use std::cell::Cell; use std::{ + collections::{HashMap, HashSet}, + fmt::Display, net::Ipv4Addr, + str::FromStr, time::{SystemTime, UNIX_EPOCH}, }; -use alloy::{hex, primitives::U256}; -use axum::http::HeaderValue; +use alloy::{ + hex, + primitives::{U256, keccak256}, +}; +use axum::{ + extract::{FromRequest, Request}, + http::HeaderValue, + response::{IntoResponse, Response as AxumResponse}, +}; +use bytes::Bytes; use futures::StreamExt; -use lh_types::test_utils::{SeedableRng, TestRandom, XorShiftRng}; +use headers_accept::Accept; +use lazy_static::lazy_static; +pub use lh_types::ForkName; +use lh_types::{ + BeaconBlock, Signature, + test_utils::{SeedableRng, TestRandom, XorShiftRng}, +}; use rand::{Rng, distr::Alphanumeric}; -use reqwest::{Response, header::HeaderMap}; +use reqwest::{ + Response, + header::{ACCEPT, CONTENT_TYPE, HeaderMap}, +}; use serde::{Serialize, de::DeserializeOwned}; use serde_json::Value; -use ssz::{Decode, Encode}; +use ssz::{BYTES_PER_LENGTH_OFFSET, Decode, Encode}; use thiserror::Error; use tracing::Level; use tracing_appender::{non_blocking::WorkerGuard, rolling::Rotation}; @@ -26,11 +46,41 @@ use tracing_subscriber::{ use crate::{ config::LogsSettings, constants::SIGNER_JWT_EXPIRATION, - pbs::HEADER_VERSION_VALUE, - types::{BlsPublicKey, Chain, Jwt, JwtClaims, ModuleId}, + pbs::{ + BuilderBidBellatrix, BuilderBidCapella, BuilderBidDeneb, BuilderBidElectra, BuilderBidFulu, + BuilderBidGloas, ExecutionPayloadHeaderBellatrix, ExecutionPayloadHeaderCapella, + ExecutionPayloadHeaderDeneb, ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, + ExecutionPayloadHeaderGloas, ExecutionRequests, HEADER_VERSION_VALUE, KzgCommitments, + SignedBlindedBeaconBlock, error::SszValueError, + }, + types::{BlsPublicKey, Chain, Jwt, JwtAdminClaims, JwtClaims, ModuleId}, }; +pub const APPLICATION_JSON: &str = "application/json"; +pub const APPLICATION_OCTET_STREAM: &str = "application/octet-stream"; +pub const WILDCARD: &str = "*/*"; + const MILLIS_PER_SECOND: u64 = 1_000; +pub const CONSENSUS_VERSION_HEADER: &str = "Eth-Consensus-Version"; + +lazy_static! { + static ref SSZ_VALUE_OFFSETS_BY_FORK: HashMap = { + let mut map: HashMap = HashMap::new(); + let forks = [ + ForkName::Bellatrix, + ForkName::Capella, + ForkName::Deneb, + ForkName::Electra, + ForkName::Fulu, + ForkName::Gloas, + ]; + for fork in forks { + let offset = get_ssz_value_offset_for_fork(fork).unwrap(); // If there isn't a supported fork, this needs to be updated prior to release so panicking is fine + map.insert(fork, offset); + } + map + }; +} #[derive(Debug, Error)] pub enum ResponseReadError { @@ -344,12 +394,19 @@ pub fn print_logo() { } /// Create a JWT for the given module id with expiration -pub fn create_jwt(module_id: &ModuleId, secret: &str) -> eyre::Result { +pub fn create_jwt( + module_id: &ModuleId, + secret: &str, + route: &str, + payload: Option<&[u8]>, +) -> eyre::Result { jsonwebtoken::encode( &jsonwebtoken::Header::default(), &JwtClaims { - module: module_id.to_string(), + module: module_id.clone(), + route: route.to_string(), exp: jsonwebtoken::get_current_timestamp() + SIGNER_JWT_EXPIRATION, + payload_hash: payload.map(keccak256), }, &jsonwebtoken::EncodingKey::from_secret(secret.as_ref()), ) @@ -357,36 +414,134 @@ pub fn create_jwt(module_id: &ModuleId, secret: &str) -> eyre::Result { .map(Jwt::from) } -/// Decode a JWT and return the module id. IMPORTANT: This function does not -/// validate the JWT, it only obtains the module id from the claims. -pub fn decode_jwt(jwt: Jwt) -> eyre::Result { +// Creates a JWT for module administration +pub fn create_admin_jwt( + admin_secret: String, + route: &str, + payload: Option<&[u8]>, +) -> eyre::Result { + jsonwebtoken::encode( + &jsonwebtoken::Header::default(), + &JwtAdminClaims { + admin: true, + route: route.to_string(), + exp: jsonwebtoken::get_current_timestamp() + SIGNER_JWT_EXPIRATION, + payload_hash: payload.map(keccak256), + }, + &jsonwebtoken::EncodingKey::from_secret(admin_secret.as_ref()), + ) + .map_err(Into::into) + .map(Jwt::from) +} + +/// Decode a JWT and return the JWT claims. IMPORTANT: This function does not +/// validate the JWT, it only obtains the claims. +pub fn decode_jwt(jwt: Jwt) -> eyre::Result { + let mut validation = jsonwebtoken::Validation::default(); + validation.insecure_disable_signature_validation(); + + let claims = jsonwebtoken::decode::( + jwt.as_str(), + &jsonwebtoken::DecodingKey::from_secret(&[]), + &validation, + )? + .claims; + + Ok(claims) +} + +/// Decode an administrator JWT and return the JWT claims. IMPORTANT: This +/// function does not validate the JWT, it only obtains the claims. +pub fn decode_admin_jwt(jwt: Jwt) -> eyre::Result { let mut validation = jsonwebtoken::Validation::default(); validation.insecure_disable_signature_validation(); - let module = jsonwebtoken::decode::( + let claims = jsonwebtoken::decode::( jwt.as_str(), &jsonwebtoken::DecodingKey::from_secret(&[]), &validation, )? - .claims - .module - .into(); + .claims; - Ok(module) + Ok(claims) } -/// Validate a JWT with the given secret -pub fn validate_jwt(jwt: Jwt, secret: &str) -> eyre::Result<()> { +pub fn validate_jwt( + jwt: Jwt, + secret: &str, + route: &str, + payload: Option<&[u8]>, +) -> eyre::Result<()> { let mut validation = jsonwebtoken::Validation::default(); validation.leeway = 10; - jsonwebtoken::decode::( + let claims = jsonwebtoken::decode::( jwt.as_str(), &jsonwebtoken::DecodingKey::from_secret(secret.as_ref()), &validation, - ) - .map(|_| ()) - .map_err(From::from) + )? + .claims; + + // Validate the route + if claims.route != route { + eyre::bail!("Token route does not match"); + } + + // Validate the payload hash if provided + if let Some(payload_bytes) = payload { + if let Some(expected_hash) = claims.payload_hash { + let actual_hash = keccak256(payload_bytes); + if actual_hash != expected_hash { + eyre::bail!("Payload hash does not match"); + } + } else { + eyre::bail!("JWT does not contain a payload hash"); + } + } else if claims.payload_hash.is_some() { + eyre::bail!("JWT contains a payload hash but no payload was provided"); + } + Ok(()) +} + +pub fn validate_admin_jwt( + jwt: Jwt, + secret: &str, + route: &str, + payload: Option<&[u8]>, +) -> eyre::Result<()> { + let mut validation = jsonwebtoken::Validation::default(); + validation.leeway = 10; + + let claims = jsonwebtoken::decode::( + jwt.as_str(), + &jsonwebtoken::DecodingKey::from_secret(secret.as_ref()), + &validation, + )? + .claims; + + if !claims.admin { + eyre::bail!("Token is not admin") + } + + // Validate the route + if claims.route != route { + eyre::bail!("Token route does not match"); + } + + // Validate the payload hash if provided + if let Some(payload_bytes) = payload { + if let Some(expected_hash) = claims.payload_hash { + let actual_hash = keccak256(payload_bytes); + if actual_hash != expected_hash { + eyre::bail!("Payload hash does not match"); + } + } else { + eyre::bail!("JWT does not contain a payload hash"); + } + } else if claims.payload_hash.is_some() { + eyre::bail!("JWT contains a payload hash but no payload was provided"); + } + Ok(()) } /// Generates a random string @@ -409,6 +564,167 @@ pub fn get_user_agent_with_version(req_headers: &HeaderMap) -> eyre::Result eyre::Result> { + let mut accepted_types = HashSet::new(); + let mut unsupported_type = false; + for header in req_headers.get_all(ACCEPT).iter() { + let accept = Accept::from_str(header.to_str()?) + .map_err(|e| eyre::eyre!("invalid accept header: {e}"))?; + for mt in accept.media_types() { + match mt.essence().to_string().as_str() { + APPLICATION_OCTET_STREAM => { + accepted_types.insert(EncodingType::Ssz); + } + APPLICATION_JSON | WILDCARD => { + accepted_types.insert(EncodingType::Json); + } + _ => unsupported_type = true, + }; + } + } + + if accepted_types.is_empty() { + if unsupported_type { + return Err(eyre::eyre!("unsupported accept type")); + } + + // No accept header so just return the same type as the content type + accepted_types.insert(get_content_type(req_headers)); + } + Ok(accepted_types) +} + +/// Parse CONTENT TYPE header to get the encoding type of the body, defaulting +/// to JSON if missing or malformed. +pub fn get_content_type(req_headers: &HeaderMap) -> EncodingType { + EncodingType::from_str( + req_headers + .get(CONTENT_TYPE) + .and_then(|value| value.to_str().ok()) + .unwrap_or(APPLICATION_JSON), + ) + .unwrap_or(EncodingType::Json) +} + +/// Parse CONSENSUS_VERSION header +pub fn get_consensus_version_header(req_headers: &HeaderMap) -> Option { + ForkName::from_str( + req_headers + .get(CONSENSUS_VERSION_HEADER) + .and_then(|value| value.to_str().ok()) + .unwrap_or(""), + ) + .ok() +} + +/// Enum for types that can be used to encode incoming request bodies or +/// outgoing response bodies +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum EncodingType { + /// Body is UTF-8 encoded as JSON + Json, + + /// Body is raw bytes representing an SSZ object + Ssz, +} + +impl EncodingType { + /// Get the content type string for the encoding type + pub fn content_type(&self) -> &str { + match self { + EncodingType::Json => APPLICATION_JSON, + EncodingType::Ssz => APPLICATION_OCTET_STREAM, + } + } +} + +impl std::fmt::Display for EncodingType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.content_type()) + } +} + +impl FromStr for EncodingType { + type Err = String; + fn from_str(value: &str) -> Result { + match value.to_ascii_lowercase().as_str() { + APPLICATION_JSON | "" => Ok(EncodingType::Json), + APPLICATION_OCTET_STREAM => Ok(EncodingType::Ssz), + _ => Err(format!("unsupported encoding type: {value}")), + } + } +} + +pub enum BodyDeserializeError { + SerdeJsonError(serde_json::Error), + SszDecodeError(ssz::DecodeError), + UnsupportedMediaType, + MissingVersionHeader, +} + +impl Display for BodyDeserializeError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + BodyDeserializeError::SerdeJsonError(e) => write!(f, "JSON deserialization error: {e}"), + BodyDeserializeError::SszDecodeError(e) => { + write!(f, "SSZ deserialization error: {e:?}") + } + BodyDeserializeError::UnsupportedMediaType => write!(f, "unsupported media type"), + BodyDeserializeError::MissingVersionHeader => { + write!(f, "missing consensus version header") + } + } + } +} + +pub async fn deserialize_body( + headers: &HeaderMap, + body: Bytes, +) -> Result { + if headers.contains_key(CONTENT_TYPE) { + return match get_content_type(headers) { + EncodingType::Json => serde_json::from_slice::(&body) + .map_err(BodyDeserializeError::SerdeJsonError), + EncodingType::Ssz => { + // Get the version header + match get_consensus_version_header(headers) { + Some(version) => { + SignedBlindedBeaconBlock::from_ssz_bytes_with(&body, |bytes| { + BeaconBlock::from_ssz_bytes_for_fork(bytes, version) + }) + .map_err(BodyDeserializeError::SszDecodeError) + } + None => Err(BodyDeserializeError::MissingVersionHeader), + } + } + }; + } + + Err(BodyDeserializeError::UnsupportedMediaType) +} + +#[must_use] +#[derive(Debug, Clone, Default)] +pub struct RawRequest { + pub body_bytes: Bytes, +} + +impl FromRequest for RawRequest +where + S: Send + Sync, +{ + type Rejection = AxumResponse; + + async fn from_request(req: Request, _state: &S) -> Result { + let bytes = Bytes::from_request(req, _state).await.map_err(IntoResponse::into_response)?; + Ok(Self { body_bytes: bytes }) + } +} + #[cfg(unix)] pub async fn wait_for_signal() -> eyre::Result<()> { use tokio::signal::unix::{SignalKind, signal}; @@ -458,30 +774,648 @@ pub fn bls_pubkey_from_hex_unchecked(hex: &str) -> BlsPublicKey { bls_pubkey_from_hex(hex).unwrap() } +// Get the offset of the message in a SignedBuilderBid SSZ structure +fn get_ssz_value_offset_for_fork(fork: ForkName) -> Option { + match fork { + ForkName::Bellatrix => { + // Message goes header -> value -> pubkey + Some( + get_message_offset::() + + ::ssz_fixed_len(), + ) + } + + ForkName::Capella => { + // Message goes header -> value -> pubkey + Some( + get_message_offset::() + + ::ssz_fixed_len(), + ) + } + + ForkName::Deneb => { + // Message goes header -> blob_kzg_commitments -> value -> pubkey + Some( + get_message_offset::() + + ::ssz_fixed_len() + + ::ssz_fixed_len(), + ) + } + + ForkName::Electra => { + // Message goes header -> blob_kzg_commitments -> execution_requests -> value -> + // pubkey + Some( + get_message_offset::() + + ::ssz_fixed_len() + + ::ssz_fixed_len() + + ::ssz_fixed_len(), + ) + } + + ForkName::Fulu => { + // Message goes header -> blob_kzg_commitments -> execution_requests -> value -> + // pubkey + Some( + get_message_offset::() + + ::ssz_fixed_len() + + ::ssz_fixed_len() + + ::ssz_fixed_len(), + ) + } + + ForkName::Gloas => { + // Message goes header -> blob_kzg_commitments -> execution_requests -> value -> + // pubkey + Some( + get_message_offset::() + + ::ssz_fixed_len() + + ::ssz_fixed_len() + + ::ssz_fixed_len(), + ) + } + _ => None, + } +} + +/// Extracts the bid value from SSZ-encoded SignedBuilderBid response bytes. +pub fn get_bid_value_from_signed_builder_bid_ssz( + response_bytes: &[u8], + fork: ForkName, +) -> Result { + let value_offset = SSZ_VALUE_OFFSETS_BY_FORK + .get(&fork) + .ok_or(SszValueError::UnsupportedFork { name: fork.to_string() })?; + + // Sanity check the response length so we don't panic trying to slice it + let end_offset = value_offset + 32; // U256 is 32 bytes + if response_bytes.len() < end_offset { + return Err(SszValueError::InvalidPayloadLength { + required: end_offset, + actual: response_bytes.len(), + }); + } + + // Extract the value bytes and convert to U256 + let value_bytes = &response_bytes[*value_offset..end_offset]; + let value = U256::from_le_slice(value_bytes); + Ok(value) +} + +// Get the offset where the `message` field starts in some SignedBuilderBid SSZ +// data. Requires that SignedBuilderBid always has the following structure: +// message -> signature +// where `message` is a BuilderBid type determined by the fork choice, and +// `signature` is a fixed-length Signature type. +fn get_message_offset() -> usize +where + BuilderBidType: ssz::Encode, +{ + // Since `message` is the first field, its offset is always 0 + let mut offset = 0; + + // If it's variable length, then it will be represented by a pointer to + // the actual data, so we need to get the location of where that data starts + if !BuilderBidType::is_ssz_fixed_len() { + offset += BYTES_PER_LENGTH_OFFSET + ::ssz_fixed_len(); + } + + offset +} + #[cfg(test)] mod test { - use super::{create_jwt, decode_jwt, validate_jwt}; - use crate::types::{Jwt, ModuleId}; + use alloy::primitives::keccak256; + use axum::http::{HeaderMap, HeaderName, HeaderValue}; + use bytes::Bytes; + use reqwest::header::{ACCEPT, CONTENT_TYPE}; + + use super::{ + BodyDeserializeError, CONSENSUS_VERSION_HEADER, create_admin_jwt, create_jwt, + decode_admin_jwt, decode_jwt, deserialize_body, get_consensus_version_header, + get_content_type, random_jwt_secret, validate_admin_jwt, validate_jwt, + }; + use crate::{ + constants::SIGNER_JWT_EXPIRATION, + pbs::error::SszValueError, + types::{Jwt, JwtAdminClaims, ModuleId}, + utils::{ + APPLICATION_JSON, APPLICATION_OCTET_STREAM, EncodingType, ForkName, WILDCARD, + get_accept_types, get_bid_value_from_signed_builder_bid_ssz, + }, + }; + + const APPLICATION_TEXT: &str = "application/text"; #[test] - fn test_jwt_validation() { + fn test_jwt_validation_no_payload_hash() { // Check valid JWT - let jwt = create_jwt(&ModuleId("DA_COMMIT".to_string()), "secret").unwrap(); - let module_id = decode_jwt(jwt.clone()).unwrap(); + let jwt = + create_jwt(&ModuleId("DA_COMMIT".to_string()), "secret", "/test/route", None).unwrap(); + let claims = decode_jwt(jwt.clone()).unwrap(); + let module_id = claims.module; + let payload_hash = claims.payload_hash; assert_eq!(module_id, ModuleId("DA_COMMIT".to_string())); - let response = validate_jwt(jwt, "secret"); + assert!(payload_hash.is_none()); + let response = validate_jwt(jwt, "secret", "/test/route", None); assert!(response.is_ok()); // Check expired JWT - let expired_jwt = Jwt::from("eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE3NDI5OTU5NDYsIm1vZHVsZSI6IkRBX0NPTU1JVCJ9.iiq4Z2ed2hk3c3c-cn2QOQJWE5XUOc5BoaIPT-I8q-s".to_string()); - let response = validate_jwt(expired_jwt, "secret"); + let expired_jwt = Jwt::from("eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE3NTgyOTkxNzIsIm1vZHVsZSI6IkRBX0NPTU1JVCIsInJvdXRlIjoiL3Rlc3Qvcm91dGUiLCJwYXlsb2FkX2hhc2giOm51bGx9._OBsNC67KLkk6f6ZQ2_CDbhYUJ2OtZ9egKAmi1L-ymA".to_string()); + let response = validate_jwt(expired_jwt, "secret", "/test/route", None); assert!(response.is_err()); assert_eq!(response.unwrap_err().to_string(), "ExpiredSignature"); // Check invalid signature JWT - let invalid_jwt = Jwt::from("eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE3NDI5OTU5NDYsIm1vZHVsZSI6IkRBX0NPTU1JVCJ9.w9WYdDNzgDjYTvjBkk4GGzywGNBYPxnzU2uJWzPUT1s".to_string()); - let response = validate_jwt(invalid_jwt, "secret"); + let invalid_jwt = Jwt::from("eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE3NTgyOTkxMzQsIm1vZHVsZSI6IkRBX0NPTU1JVCIsInJvdXRlIjoiL3Rlc3Qvcm91dGUiLCJwYXlsb2FkX2hhc2giOm51bGx9.58QXayg2XeX5lXhIPw-a8kl04DWBEj5wBsqsedTeClo".to_string()); + let response = validate_jwt(invalid_jwt, "secret", "/test/route", None); assert!(response.is_err()); assert_eq!(response.unwrap_err().to_string(), "InvalidSignature"); } + + /// Make sure a missing Accept header is interpreted as JSON + #[test] + fn test_missing_accept_header() { + let headers = HeaderMap::new(); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 1); + assert!(result.contains(&EncodingType::Json)); + } + + /// Test accepting JSON + #[test] + fn test_accept_header_json() { + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_JSON).unwrap()); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 1); + assert!(result.contains(&EncodingType::Json)); + } + + /// Test accepting SSZ + #[test] + fn test_accept_header_ssz() { + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_OCTET_STREAM).unwrap()); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 1); + assert!(result.contains(&EncodingType::Ssz)); + } + + /// Test accepting wildcards + #[test] + fn test_accept_header_wildcard() { + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(WILDCARD).unwrap()); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 1); + assert!(result.contains(&EncodingType::Json)); + } + + /// Test accepting one header with multiple values + #[test] + fn test_accept_header_multiple_values() { + let header_string = format!("{APPLICATION_JSON}, {APPLICATION_OCTET_STREAM}"); + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(&header_string).unwrap()); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 2); + assert!(result.contains(&EncodingType::Json)); + assert!(result.contains(&EncodingType::Ssz)); + } + + /// Test accepting multiple headers + #[test] + fn test_multiple_accept_headers() { + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_JSON).unwrap()); + headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_OCTET_STREAM).unwrap()); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 2); + assert!(result.contains(&EncodingType::Json)); + assert!(result.contains(&EncodingType::Ssz)); + } + + /// Test accepting one header with multiple values, including a type that + /// can't be used + #[test] + fn test_accept_header_multiple_values_including_unknown() { + let header_string = + format!("{APPLICATION_JSON}, {APPLICATION_OCTET_STREAM}, {APPLICATION_TEXT}"); + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(&header_string).unwrap()); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 2); + assert!(result.contains(&EncodingType::Json)); + assert!(result.contains(&EncodingType::Ssz)); + } + + /// Test rejecting an unknown accept type + #[test] + fn test_invalid_accept_header_type() { + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_TEXT).unwrap()); + let result = get_accept_types(&headers); + assert!(result.is_err()); + } + + /// Test accepting one header with multiple values + #[test] + fn test_accept_header_invalid_parse() { + let header_string = format!("{APPLICATION_JSON}, a?;ef)"); + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(&header_string).unwrap()); + let result = get_accept_types(&headers); + assert!(result.is_err()); + } + + #[test] + fn test_jwt_validation_with_payload() { + // Pretend payload + let payload = serde_json::json!({ + "data": "test" + }); + let payload_bytes = serde_json::to_vec(&payload).unwrap(); + + // Check valid JWT + let jwt = create_jwt( + &ModuleId("DA_COMMIT".to_string()), + "secret", + "/test/route", + Some(&payload_bytes), + ) + .unwrap(); + let claims = decode_jwt(jwt.clone()).unwrap(); + let module_id = claims.module; + let payload_hash = claims.payload_hash; + assert_eq!(module_id, ModuleId("DA_COMMIT".to_string())); + assert_eq!(payload_hash, Some(keccak256(&payload_bytes))); + let response = validate_jwt(jwt, "secret", "/test/route", Some(&payload_bytes)); + assert!(response.is_ok()); + + // Check expired JWT + let expired_jwt = Jwt::from("eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE3NTgyOTgzNDQsIm1vZHVsZSI6IkRBX0NPTU1JVCIsInJvdXRlIjoiL3Rlc3Qvcm91dGUiLCJwYXlsb2FkX2hhc2giOiIweGFmODk2MjY0MzUzNTFmYzIwMDBkYmEwM2JiNTlhYjcyZWE0ODJiOWEwMDBmZWQzNmNkMjBlMDU0YjE2NjZmZjEifQ.PYrSxLXadKBgYZlmLam8RBSL32I1T_zAxlZpG6xnnII".to_string()); + let response = validate_jwt(expired_jwt, "secret", "/test/route", Some(&payload_bytes)); + assert!(response.is_err()); + assert_eq!(response.unwrap_err().to_string(), "ExpiredSignature"); + + // Check invalid signature JWT + let invalid_jwt = Jwt::from("eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE3NTgyOTkwMDAsIm1vZHVsZSI6IkRBX0NPTU1JVCIsInJvdXRlIjoiL3Rlc3Qvcm91dGUiLCJwYXlsb2FkX2hhc2giOiIweGFmODk2MjY0MzUzNTFmYzIwMDBkYmEwM2JiNTlhYjcyZWE0ODJiOWEwMDBmZWQzNmNkMjBlMDU0YjE2NjZmZjEifQ.mnC-AexkLlR9l98SJbln3DmV6r9XyHYdbjcUVcWdi_8".to_string()); + let response = validate_jwt(invalid_jwt, "secret", "/test/route", Some(&payload_bytes)); + assert!(response.is_err()); + assert_eq!(response.unwrap_err().to_string(), "InvalidSignature"); + } + + // ── validate_jwt: route and secret errors ──────────────────────────────── + + #[test] + fn test_validate_jwt_wrong_route() { + let jwt = create_jwt(&ModuleId("MOD".into()), "secret", "/correct/route", None).unwrap(); + let err = validate_jwt(jwt, "secret", "/wrong/route", None).unwrap_err(); + assert!(err.to_string().contains("Token route does not match")); + } + + #[test] + fn test_validate_jwt_wrong_secret() { + let jwt = create_jwt(&ModuleId("MOD".into()), "correct_secret", "/route", None).unwrap(); + let err = validate_jwt(jwt, "wrong_secret", "/route", None).unwrap_err(); + assert_eq!(err.to_string(), "InvalidSignature"); + } + + // ── validate_jwt: payload hash mismatch branches ───────────────────────── + + #[test] + fn test_validate_jwt_payload_hash_mismatch() { + let payload_a = b"payload_a"; + let payload_b = b"payload_b"; + let jwt = create_jwt(&ModuleId("MOD".into()), "secret", "/route", Some(payload_a)).unwrap(); + let err = validate_jwt(jwt, "secret", "/route", Some(payload_b)).unwrap_err(); + assert!(err.to_string().contains("Payload hash does not match")); + } + + #[test] + fn test_validate_jwt_hash_present_but_no_payload_provided() { + let payload = b"some payload"; + let jwt = create_jwt(&ModuleId("MOD".into()), "secret", "/route", Some(payload)).unwrap(); + let err = validate_jwt(jwt, "secret", "/route", None).unwrap_err(); + assert!( + err.to_string().contains("JWT contains a payload hash but no payload was provided") + ); + } + + #[test] + fn test_validate_jwt_no_hash_but_payload_provided() { + let jwt = create_jwt(&ModuleId("MOD".into()), "secret", "/route", None).unwrap(); + let err = validate_jwt(jwt, "secret", "/route", Some(b"unexpected")).unwrap_err(); + assert!(err.to_string().contains("JWT does not contain a payload hash")); + } + + // ── admin JWT roundtrip ────────────────────────────────────────────────── + + #[test] + fn test_admin_jwt_roundtrip_no_payload() { + let jwt = create_admin_jwt("admin_secret".into(), "/admin/route", None).unwrap(); + let claims = decode_admin_jwt(jwt.clone()).unwrap(); + assert!(claims.admin); + assert_eq!(claims.route, "/admin/route"); + assert!(claims.payload_hash.is_none()); + validate_admin_jwt(jwt, "admin_secret", "/admin/route", None).unwrap(); + } + + #[test] + fn test_admin_jwt_roundtrip_with_payload() { + let payload = b"admin payload"; + let jwt = create_admin_jwt("admin_secret".into(), "/admin/route", Some(payload)).unwrap(); + let claims = decode_admin_jwt(jwt.clone()).unwrap(); + assert!(claims.admin); + assert_eq!(claims.payload_hash, Some(keccak256(payload))); + validate_admin_jwt(jwt, "admin_secret", "/admin/route", Some(payload)).unwrap(); + } + + // ── validate_admin_jwt: route, secret, admin flag errors ───────────────── + + #[test] + fn test_validate_admin_jwt_wrong_route() { + let jwt = create_admin_jwt("admin_secret".into(), "/correct/route", None).unwrap(); + let err = validate_admin_jwt(jwt, "admin_secret", "/wrong/route", None).unwrap_err(); + assert!(err.to_string().contains("Token route does not match")); + } + + #[test] + fn test_validate_admin_jwt_wrong_secret() { + let jwt = create_admin_jwt("correct_secret".into(), "/route", None).unwrap(); + let err = validate_admin_jwt(jwt, "wrong_secret", "/route", None).unwrap_err(); + assert_eq!(err.to_string(), "InvalidSignature"); + } + + #[test] + fn test_validate_admin_jwt_admin_false() { + // Craft a JWT whose claims have admin: false — something create_admin_jwt + // never produces — to exercise the explicit admin flag guard. + let claims = JwtAdminClaims { + admin: false, + route: "/route".into(), + exp: jsonwebtoken::get_current_timestamp() + SIGNER_JWT_EXPIRATION, + payload_hash: None, + }; + let token = jsonwebtoken::encode( + &jsonwebtoken::Header::default(), + &claims, + &jsonwebtoken::EncodingKey::from_secret(b"secret"), + ) + .unwrap(); + let jwt = Jwt::from(token); + let err = validate_admin_jwt(jwt, "secret", "/route", None).unwrap_err(); + assert!(err.to_string().contains("Token is not admin")); + } + + // ── validate_admin_jwt: payload hash mismatch branches ─────────────────── + + #[test] + fn test_validate_admin_jwt_payload_hash_mismatch() { + let payload_a = b"admin_payload_a"; + let payload_b = b"admin_payload_b"; + let jwt = create_admin_jwt("secret".into(), "/route", Some(payload_a)).unwrap(); + let err = validate_admin_jwt(jwt, "secret", "/route", Some(payload_b)).unwrap_err(); + assert!(err.to_string().contains("Payload hash does not match")); + } + + #[test] + fn test_validate_admin_jwt_hash_present_but_no_payload_provided() { + let payload = b"admin payload"; + let jwt = create_admin_jwt("secret".into(), "/route", Some(payload)).unwrap(); + let err = validate_admin_jwt(jwt, "secret", "/route", None).unwrap_err(); + assert!( + err.to_string().contains("JWT contains a payload hash but no payload was provided") + ); + } + + #[test] + fn test_validate_admin_jwt_no_hash_but_payload_provided() { + let jwt = create_admin_jwt("secret".into(), "/route", None).unwrap(); + let err = validate_admin_jwt(jwt, "secret", "/route", Some(b"unexpected")).unwrap_err(); + assert!(err.to_string().contains("JWT does not contain a payload hash")); + } + + // ── random_jwt_secret ──────────────────────────────────────────────────── + + #[test] + fn test_random_jwt_secret() { + let secret = random_jwt_secret(); + assert_eq!(secret.len(), 32); + assert!(secret.chars().all(|c| c.is_ascii_alphanumeric())); + // Two calls should produce distinct values with overwhelming probability. + assert_ne!(secret, random_jwt_secret()); + } + + // ── get_content_type ───────────────────────────────────────────────────── + + #[test] + fn test_content_type_missing_defaults_to_json() { + let headers = HeaderMap::new(); + assert_eq!(get_content_type(&headers), EncodingType::Json); + } + + #[test] + fn test_content_type_json() { + let mut headers = HeaderMap::new(); + headers.insert(CONTENT_TYPE, HeaderValue::from_str(APPLICATION_JSON).unwrap()); + assert_eq!(get_content_type(&headers), EncodingType::Json); + } + + #[test] + fn test_content_type_ssz() { + let mut headers = HeaderMap::new(); + headers.insert(CONTENT_TYPE, HeaderValue::from_str(APPLICATION_OCTET_STREAM).unwrap()); + assert_eq!(get_content_type(&headers), EncodingType::Ssz); + } + + #[test] + fn test_content_type_unknown_defaults_to_json() { + let mut headers = HeaderMap::new(); + headers.insert(CONTENT_TYPE, HeaderValue::from_str("application/xml").unwrap()); + assert_eq!(get_content_type(&headers), EncodingType::Json); + } + + // ── get_consensus_version_header ───────────────────────────────────────── + + #[test] + fn test_consensus_version_header_electra() { + let mut headers = HeaderMap::new(); + let name = HeaderName::try_from(CONSENSUS_VERSION_HEADER).unwrap(); + headers.insert(name, HeaderValue::from_str("electra").unwrap()); + assert_eq!(get_consensus_version_header(&headers), Some(ForkName::Electra)); + } + + #[test] + fn test_consensus_version_header_missing() { + let headers = HeaderMap::new(); + assert_eq!(get_consensus_version_header(&headers), None); + } + + #[test] + fn test_consensus_version_header_invalid() { + let mut headers = HeaderMap::new(); + let name = HeaderName::try_from(CONSENSUS_VERSION_HEADER).unwrap(); + headers.insert(name, HeaderValue::from_str("not_a_fork").unwrap()); + assert_eq!(get_consensus_version_header(&headers), None); + } + + // ── EncodingType ───────────────────────────────────────────────────────── + + #[test] + fn test_encoding_type_from_str_variants() { + use std::str::FromStr; + assert_eq!(EncodingType::from_str(APPLICATION_JSON).unwrap(), EncodingType::Json); + assert_eq!(EncodingType::from_str(APPLICATION_OCTET_STREAM).unwrap(), EncodingType::Ssz); + // empty string defaults to JSON per the impl + assert_eq!(EncodingType::from_str("").unwrap(), EncodingType::Json); + assert!(EncodingType::from_str("application/xml").is_err()); + } + + #[test] + fn test_encoding_type_display() { + assert_eq!(EncodingType::Json.to_string(), APPLICATION_JSON); + assert_eq!(EncodingType::Ssz.to_string(), APPLICATION_OCTET_STREAM); + } + + // ── get_bid_value_from_signed_builder_bid_ssz ──────────────────────────── + + #[test] + fn test_ssz_value_extraction_unsupported_fork() { + let dummy_bytes = vec![0u8; 1000]; + let err = + get_bid_value_from_signed_builder_bid_ssz(&dummy_bytes, ForkName::Altair).unwrap_err(); + assert!(matches!(err, SszValueError::UnsupportedFork { .. })); + } + + #[test] + fn test_ssz_value_extraction_truncated_payload() { + // A payload that is far too short for any supported fork's value offset + let tiny_bytes = vec![0u8; 4]; + let err = + get_bid_value_from_signed_builder_bid_ssz(&tiny_bytes, ForkName::Electra).unwrap_err(); + assert!(matches!(err, SszValueError::InvalidPayloadLength { .. })); + } + + /// Per-fork positive tests: construct a `SignedBuilderBid` with a known + /// value for each supported fork, SSZ-encode it, and verify + /// `get_bid_value_from_signed_builder_bid_ssz` round-trips correctly. + #[test] + fn test_ssz_value_extraction_with_known_bid() { + use alloy::primitives::U256; + use ssz::Encode; + + use crate::{ + pbs::{ + BuilderBid, BuilderBidBellatrix, BuilderBidCapella, BuilderBidDeneb, + BuilderBidElectra, BuilderBidFulu, ExecutionPayloadHeaderBellatrix, + ExecutionPayloadHeaderCapella, ExecutionPayloadHeaderDeneb, + ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, ExecutionRequests, + SignedBuilderBid, + }, + types::{BlsPublicKeyBytes, BlsSignature}, + utils::TestRandomSeed, + }; + + // Distinctive value — large enough that endianness bugs produce a + // different number and zero-matches are impossible. + let known_value = U256::from(0x0102_0304_0506_0708_u64); + let pubkey = BlsPublicKeyBytes::test_random(); + let sig = BlsSignature::test_random(); + + // ── Bellatrix ──────────────────────────────────────────────────────── + { + let message = BuilderBid::Bellatrix(BuilderBidBellatrix { + header: ExecutionPayloadHeaderBellatrix::test_random(), + value: known_value, + pubkey, + }); + let bid = SignedBuilderBid { message, signature: sig.clone() }; + let got = + get_bid_value_from_signed_builder_bid_ssz(&bid.as_ssz_bytes(), ForkName::Bellatrix) + .expect("Bellatrix extraction failed"); + assert_eq!(got, known_value, "Bellatrix: value mismatch"); + } + + // ── Capella ────────────────────────────────────────────────────────── + { + let message = BuilderBid::Capella(BuilderBidCapella { + header: ExecutionPayloadHeaderCapella::test_random(), + value: known_value, + pubkey, + }); + let bid = SignedBuilderBid { message, signature: sig.clone() }; + let got = + get_bid_value_from_signed_builder_bid_ssz(&bid.as_ssz_bytes(), ForkName::Capella) + .expect("Capella extraction failed"); + assert_eq!(got, known_value, "Capella: value mismatch"); + } + + // ── Deneb ──────────────────────────────────────────────────────────── + { + let message = BuilderBid::Deneb(BuilderBidDeneb { + header: ExecutionPayloadHeaderDeneb::test_random(), + blob_kzg_commitments: Default::default(), + value: known_value, + pubkey, + }); + let bid = SignedBuilderBid { message, signature: sig.clone() }; + let got = + get_bid_value_from_signed_builder_bid_ssz(&bid.as_ssz_bytes(), ForkName::Deneb) + .expect("Deneb extraction failed"); + assert_eq!(got, known_value, "Deneb: value mismatch"); + } + + // ── Electra ────────────────────────────────────────────────────────── + { + let message = BuilderBid::Electra(BuilderBidElectra { + header: ExecutionPayloadHeaderElectra::test_random(), + blob_kzg_commitments: Default::default(), + execution_requests: ExecutionRequests::default(), + value: known_value, + pubkey, + }); + let bid = SignedBuilderBid { message, signature: sig.clone() }; + let got = + get_bid_value_from_signed_builder_bid_ssz(&bid.as_ssz_bytes(), ForkName::Electra) + .expect("Electra extraction failed"); + assert_eq!(got, known_value, "Electra: value mismatch"); + } + + // ── Fulu ───────────────────────────────────────────────────────────── + { + let message = BuilderBid::Fulu(BuilderBidFulu { + header: ExecutionPayloadHeaderFulu::test_random(), + blob_kzg_commitments: Default::default(), + execution_requests: ExecutionRequests::default(), + value: known_value, + pubkey, + }); + let bid = SignedBuilderBid { message, signature: sig }; + let got = + get_bid_value_from_signed_builder_bid_ssz(&bid.as_ssz_bytes(), ForkName::Fulu) + .expect("Fulu extraction failed"); + assert_eq!(got, known_value, "Fulu: value mismatch"); + } + } + + // ── deserialize_body error paths ───────────────────────────────────────── + + #[tokio::test] + async fn test_deserialize_body_missing_content_type() { + let headers = HeaderMap::new(); + let body = Bytes::from_static(b"{}"); + let err = deserialize_body(&headers, body).await.unwrap_err(); + assert!(matches!(err, BodyDeserializeError::UnsupportedMediaType)); + } + + #[tokio::test] + async fn test_deserialize_body_ssz_missing_version_header() { + let mut headers = HeaderMap::new(); + headers.insert(CONTENT_TYPE, HeaderValue::from_str(APPLICATION_OCTET_STREAM).unwrap()); + let body = Bytes::from_static(b"\x00\x01\x02\x03"); + let err = deserialize_body(&headers, body).await.unwrap_err(); + assert!(matches!(err, BodyDeserializeError::MissingVersionHeader)); + } } diff --git a/crates/pbs/Cargo.toml b/crates/pbs/Cargo.toml index a9124c06..9d9df214 100644 --- a/crates/pbs/Cargo.toml +++ b/crates/pbs/Cargo.toml @@ -12,9 +12,13 @@ axum.workspace = true axum-extra.workspace = true cb-common.workspace = true cb-metrics.workspace = true +ethereum_serde_utils.workspace = true +ethereum_ssz.workspace = true eyre.workspace = true futures.workspace = true +headers.workspace = true lazy_static.workspace = true +lh_types.workspace = true notify.workspace = true parking_lot.workspace = true prometheus.workspace = true diff --git a/crates/pbs/src/api.rs b/crates/pbs/src/api.rs index 594b7d36..74d92fb2 100644 --- a/crates/pbs/src/api.rs +++ b/crates/pbs/src/api.rs @@ -1,14 +1,14 @@ -use std::sync::Arc; +use std::{collections::HashSet, sync::Arc}; use async_trait::async_trait; use axum::{Router, http::HeaderMap}; -use cb_common::pbs::{ - BuilderApiVersion, GetHeaderParams, GetHeaderResponse, SignedBlindedBeaconBlock, - SubmitBlindedBlockResponse, +use cb_common::{ + pbs::{BuilderApiVersion, GetHeaderParams, SignedBlindedBeaconBlock}, + utils::EncodingType, }; use crate::{ - mev_boost, + CompoundGetHeaderResponse, CompoundSubmitBlockResponse, mev_boost, state::{BuilderApiState, PbsState, PbsStateGuard}, }; @@ -24,8 +24,9 @@ pub trait BuilderApi: 'static { params: GetHeaderParams, req_headers: HeaderMap, state: PbsState, - ) -> eyre::Result> { - mev_boost::get_header(params, req_headers, state).await + accepted_types: HashSet, + ) -> eyre::Result> { + mev_boost::get_header(params, req_headers, state, accepted_types).await } /// https://ethereum.github.io/builder-specs/#/Builder/status @@ -40,8 +41,16 @@ pub trait BuilderApi: 'static { req_headers: HeaderMap, state: PbsState, api_version: BuilderApiVersion, - ) -> eyre::Result> { - mev_boost::submit_block(signed_blinded_block, req_headers, state, api_version).await + accepted_types: HashSet, + ) -> eyre::Result { + mev_boost::submit_block( + signed_blinded_block, + req_headers, + state, + api_version, + accepted_types, + ) + .await } /// https://ethereum.github.io/builder-specs/#/Builder/registerValidator diff --git a/crates/pbs/src/error.rs b/crates/pbs/src/error.rs index 590c03d4..1214fd6a 100644 --- a/crates/pbs/src/error.rs +++ b/crates/pbs/src/error.rs @@ -1,4 +1,5 @@ use axum::{http::StatusCode, response::IntoResponse}; +use cb_common::utils::BodyDeserializeError; #[derive(Debug)] /// Errors that the PbsService returns to client @@ -6,6 +7,8 @@ pub enum PbsClientError { NoResponse, NoPayload, Internal, + DecodeError(String), + RelayError(String), } impl PbsClientError { @@ -14,16 +17,26 @@ impl PbsClientError { PbsClientError::NoResponse => StatusCode::BAD_GATEWAY, PbsClientError::NoPayload => StatusCode::BAD_GATEWAY, PbsClientError::Internal => StatusCode::INTERNAL_SERVER_ERROR, + PbsClientError::DecodeError(_) => StatusCode::BAD_REQUEST, + PbsClientError::RelayError(_) => StatusCode::FAILED_DEPENDENCY, } } } +impl From for PbsClientError { + fn from(e: BodyDeserializeError) -> Self { + PbsClientError::DecodeError(format!("failed to deserialize body: {e}")) + } +} + impl IntoResponse for PbsClientError { fn into_response(self) -> axum::response::Response { let msg = match &self { PbsClientError::NoResponse => "no response from relays".to_string(), PbsClientError::NoPayload => "no payload from relays".to_string(), PbsClientError::Internal => "internal server error".to_string(), + PbsClientError::DecodeError(e) => format!("error decoding request: {e}"), + PbsClientError::RelayError(e) => format!("error processing relay response: {e}"), }; (self.status_code(), msg).into_response() diff --git a/crates/pbs/src/mev_boost/get_header.rs b/crates/pbs/src/mev_boost/get_header.rs index 86743703..751987af 100644 --- a/crates/pbs/src/mev_boost/get_header.rs +++ b/crates/pbs/src/mev_boost/get_header.rs @@ -1,31 +1,39 @@ use std::{ + collections::HashSet, sync::Arc, time::{Duration, Instant}, }; use alloy::{ - primitives::{B256, U256, utils::format_ether}, + primitives::{B256, U256, aliases::B32, utils::format_ether}, providers::Provider, rpc::types::Block, }; use axum::http::{HeaderMap, HeaderValue}; use cb_common::{ + config::HeaderValidationMode, constants::APPLICATION_BUILDER_DOMAIN, pbs::{ - EMPTY_TX_ROOT_HASH, ExecutionPayloadHeaderRef, GetHeaderInfo, GetHeaderParams, - GetHeaderResponse, HEADER_START_TIME_UNIX_MS, HEADER_TIMEOUT_MS, RelayClient, + EMPTY_TX_ROOT_HASH, ExecutionPayloadHeaderRef, ForkName, ForkVersionDecode, GetHeaderInfo, + GetHeaderParams, GetHeaderResponse, HEADER_START_TIME_UNIX_MS, HEADER_TIMEOUT_MS, + RelayClient, SignedBuilderBid, error::{PbsError, ValidationError}, }, signature::verify_signed_message, types::{BlsPublicKey, BlsPublicKeyBytes, BlsSignature, Chain}, utils::{ + EncodingType, get_bid_value_from_signed_builder_bid_ssz, get_consensus_version_header, get_user_agent_with_version, ms_into_slot, read_chunked_body_with_max, timestamp_of_slot_start_sec, utcnow_ms, }, }; use futures::future::join_all; use parking_lot::RwLock; -use reqwest::{StatusCode, header::USER_AGENT}; +use reqwest::{ + StatusCode, + header::{ACCEPT, CONTENT_TYPE, USER_AGENT}, +}; +use serde::Deserialize; use tokio::time::sleep; use tracing::{Instrument, debug, error, warn}; use tree_hash::TreeHash; @@ -37,21 +45,83 @@ use crate::{ TIMEOUT_ERROR_CODE_STR, }, metrics::{RELAY_HEADER_VALUE, RELAY_LAST_SLOT, RELAY_LATENCY, RELAY_STATUS_CODE}, + mev_boost::{CompoundGetHeaderResponse, LightGetHeaderResponse}, state::{BuilderApiState, PbsState}, utils::check_gas_limit, }; +/// Info about an incoming get_header request. +/// Sent from get_header to each send_timed_get_header call. +#[derive(Clone)] +struct RequestInfo { + /// The blockchain parameters of the get_header request (what slot it's for, + /// which pubkey is requesting it, etc) + params: GetHeaderParams, + + /// Common baseline of headers to send with each request + headers: Arc, + + /// The chain the request is for + chain: Chain, + + /// Context for validating the header returned by the relay + validation: ValidationContext, + + /// The accepted encoding types from the original request + accepted_types: HashSet, +} + +/// Used interally to provide info and context about a get_header request and +/// its response +struct GetHeaderResponseInfo { + /// ID of the relay the response came from + relay_id: Arc, + + /// The raw body of the response + response_bytes: Vec, + + /// The content type the response is encoded with + content_type: EncodingType, + + /// Which fork the response bid is for (if provided as a header, rather than + /// part of the body) + fork: Option, + + /// The status code of the response, for logging + code: StatusCode, + + /// The round-trip latency of the request + request_latency: Duration, +} + +/// Context for validating the header +#[derive(Clone)] +struct ValidationContext { + /// Whether to skip signature verification + skip_sigverify: bool, + + /// Minimum acceptable bid, in wei + min_bid_wei: U256, + + /// The mode used for response validation + mode: HeaderValidationMode, + + /// The parent block, if fetched + parent_block: Arc>>, +} + /// Implements https://ethereum.github.io/builder-specs/#/Builder/getHeader /// Returns 200 if at least one relay returns 200, else 204 pub async fn get_header( params: GetHeaderParams, req_headers: HeaderMap, state: PbsState, -) -> eyre::Result> { + accepted_types: HashSet, +) -> eyre::Result> { let parent_block = Arc::new(RwLock::new(None)); - if state.extra_validation_enabled() && - let Some(rpc_url) = state.pbs_config().rpc_url.clone() - { + let extra_validation_enabled = + state.config.pbs_config.header_validation_mode == HeaderValidationMode::Extra; + if extra_validation_enabled && let Some(rpc_url) = state.pbs_config().rpc_url.clone() { tokio::spawn( fetch_parent_block(rpc_url, params.parent_hash, parent_block.clone()).in_current_span(), ); @@ -97,22 +167,47 @@ pub async fn get_header( let mut send_headers = HeaderMap::new(); send_headers.insert(USER_AGENT, get_user_agent_with_version(&req_headers)?); + // Create the Accept headers for requests + let mode = state.pbs_config().header_validation_mode; + let accept_types = match mode { + HeaderValidationMode::None => { + // No validation mode, so only request what the user wants because the response + // will be forwarded directly + accepted_types.iter().map(|t| t.content_type()).collect::>().join(",") + } + _ => { + // We're unpacking the body, so request both types since we can handle both + [EncodingType::Ssz.content_type(), EncodingType::Json.content_type()].join(",") + } + }; + send_headers.insert( + ACCEPT, + HeaderValue::from_str(&accept_types) + .map_err(|e| PbsError::GeneralRequest(format!("invalid accept header value: {e}")))?, + ); + + // Send requests to all relays concurrently + let slot = params.slot as i64; + let request_info = Arc::new(RequestInfo { + params, + headers: Arc::new(send_headers), + chain: state.config.chain, + validation: ValidationContext { + skip_sigverify: state.pbs_config().skip_sigverify, + min_bid_wei: state.pbs_config().min_bid_wei, + mode, + parent_block, + }, + accepted_types, + }); let mut handles = Vec::with_capacity(relays.len()); for relay in relays.iter() { handles.push( send_timed_get_header( - params.clone(), + request_info.clone(), relay.clone(), - state.config.chain, - send_headers.clone(), ms_into_slot, max_timeout_ms, - ValidationContext { - skip_sigverify: state.pbs_config().skip_sigverify, - min_bid_wei: state.pbs_config().min_bid_wei, - extra_validation_enabled: state.extra_validation_enabled(), - parent_block: parent_block.clone(), - }, ) .in_current_span(), ); @@ -125,10 +220,12 @@ pub async fn get_header( match res { Ok(Some(res)) => { - RELAY_LAST_SLOT.with_label_values(&[relay_id]).set(params.slot as i64); - let value_gwei = (res.data.message.value() / U256::from(1_000_000_000)) - .try_into() - .unwrap_or_default(); + let value = match &res { + CompoundGetHeaderResponse::Full(full) => *full.value(), + CompoundGetHeaderResponse::Light(light) => light.value, + }; + RELAY_LAST_SLOT.with_label_values(&[relay_id]).set(slot); + let value_gwei = (value / U256::from(1_000_000_000)).try_into().unwrap_or_default(); RELAY_HEADER_VALUE.with_label_values(&[relay_id]).set(value_gwei); relay_bids.push(res) @@ -139,7 +236,10 @@ pub async fn get_header( } } - let max_bid = relay_bids.into_iter().max_by_key(|bid| *bid.value()); + let max_bid = relay_bids.into_iter().max_by_key(|bid| match bid { + CompoundGetHeaderResponse::Full(full) => *full.value(), + CompoundGetHeaderResponse::Light(light) => light.value, + }); Ok(max_bid) } @@ -170,15 +270,13 @@ async fn fetch_parent_block( } async fn send_timed_get_header( - params: GetHeaderParams, + request_info: Arc, relay: RelayClient, - chain: Chain, - headers: HeaderMap, ms_into_slot: u64, mut timeout_left_ms: u64, - validation: ValidationContext, -) -> Result, PbsError> { - let url = relay.get_header_url(params.slot, ¶ms.parent_hash, ¶ms.pubkey)?; +) -> Result, PbsError> { + let params = &request_info.params; + let url = Arc::new(relay.get_header_url(params.slot, ¶ms.parent_hash, ¶ms.pubkey)?); if relay.config.enable_timing_games { if let Some(target_ms) = relay.config.target_first_request_ms { @@ -209,18 +307,12 @@ async fn send_timed_get_header( ); loop { - let params = params.clone(); handles.push(tokio::spawn( send_one_get_header( - params, + request_info.clone(), relay.clone(), - chain, - RequestContext { - timeout_ms: timeout_left_ms, - url: url.clone(), - headers: headers.clone(), - }, - validation.clone(), + url.clone(), + timeout_left_ms, ) .in_current_span(), )); @@ -276,54 +368,271 @@ async fn send_timed_get_header( } // if no timing games or no repeated send, just send one request - send_one_get_header( - params, - relay, - chain, - RequestContext { timeout_ms: timeout_left_ms, url, headers }, - validation, - ) - .await - .map(|(_, maybe_header)| maybe_header) + send_one_get_header(request_info, relay, url, timeout_left_ms) + .await + .map(|(_, maybe_header)| maybe_header) +} + +/// Handles requesting a header from a relay, decoding, and validation. +/// Used by send_timed_get_header to handle each individual request. +async fn send_one_get_header( + request_info: Arc, + relay: RelayClient, + url: Arc, + timeout_left_ms: u64, +) -> Result<(u64, Option), PbsError> { + match request_info.validation.mode { + HeaderValidationMode::None => { + // Minimal processing: extract fork and value, forward response bytes directly. + // Expensive crypto/structural validation is skipped (sigverify, parent hash, + // timestamp), but the min_bid check is applied. + let (start_request_time, get_header_response) = send_get_header_light( + &relay, + url, + timeout_left_ms, + (*request_info.headers).clone(), /* Create a copy of the HeaderMap because the + * impl + * will + * modify it */ + ) + .await?; + match get_header_response { + None => Ok((start_request_time, None)), + Some(res) => { + let min_bid = request_info.validation.min_bid_wei; + if res.value < min_bid { + return Err(PbsError::Validation(ValidationError::BidTooLow { + min: min_bid, + got: res.value, + })); + } + + // Make sure the response is encoded in one of the accepted + // types since we're passing the raw response directly to the client + if !request_info.accepted_types.contains(&res.encoding_type) { + return Err(PbsError::RelayResponse { + error_msg: format!( + "relay returned unsupported encoding type for get_header in no-validation mode: {:?}", + res.encoding_type + ), + code: 406, // Not Acceptable + }); + } + Ok((start_request_time, Some(CompoundGetHeaderResponse::Light(res)))) + } + } + } + _ => { + // Full processing: decode full response and validate + let (start_request_time, get_header_response) = send_get_header_full( + &relay, + url, + timeout_left_ms, + (*request_info.headers).clone(), /* Create a copy of the HeaderMap because the + * impl + * will + * modify it */ + ) + .await?; + let get_header_response = match get_header_response { + None => { + // Break if there's no header + return Ok((start_request_time, None)); + } + Some(res) => res, + }; + + // Extract the basic header data needed for validation + let header_data = match &get_header_response.data.message.header() { + ExecutionPayloadHeaderRef::Bellatrix(_) | + ExecutionPayloadHeaderRef::Capella(_) | + ExecutionPayloadHeaderRef::Deneb(_) | + ExecutionPayloadHeaderRef::Gloas(_) => { + Err(PbsError::Validation(ValidationError::UnsupportedFork)) + } + ExecutionPayloadHeaderRef::Electra(res) => Ok(HeaderData { + block_hash: res.block_hash.0, + parent_hash: res.parent_hash.0, + tx_root: res.transactions_root, + value: *get_header_response.value(), + timestamp: res.timestamp, + }), + ExecutionPayloadHeaderRef::Fulu(res) => Ok(HeaderData { + block_hash: res.block_hash.0, + parent_hash: res.parent_hash.0, + tx_root: res.transactions_root, + value: *get_header_response.value(), + timestamp: res.timestamp, + }), + }?; + + // Validate the header + let chain = request_info.chain; + let params = &request_info.params; + let validation = &request_info.validation; + validate_header_data( + &header_data, + chain, + params.parent_hash, + validation.min_bid_wei, + params.slot, + )?; + + // Validate the relay signature + if !validation.skip_sigverify { + validate_signature( + chain, + relay.pubkey(), + get_header_response.data.message.pubkey(), + &get_header_response.data.message, + &get_header_response.data.signature, + )?; + } + + // Validate the parent block if enabled + if validation.mode == HeaderValidationMode::Extra { + let parent_block = validation.parent_block.read(); + if let Some(parent_block) = parent_block.as_ref() { + extra_validation(parent_block, &get_header_response)?; + } else { + warn!( + relay_id = relay.id.as_ref(), + "parent block not found, skipping extra validation" + ); + } + } + + Ok(( + start_request_time, + Some(CompoundGetHeaderResponse::Full(Box::new(get_header_response))), + )) + } + } } -struct RequestContext { - url: Url, - timeout_ms: u64, +/// Send and decode a full get_header response, will all of the fields. +async fn send_get_header_full( + relay: &RelayClient, + url: Arc, + timeout_left_ms: u64, headers: HeaderMap, +) -> Result<(u64, Option), PbsError> { + // Send the request + let (start_request_time, info) = + send_get_header_impl(relay, url, timeout_left_ms, headers).await?; + let info = match info { + Some(info) => info, + None => { + return Ok((start_request_time, None)); + } + }; + + // Decode the response + let get_header_response = match info.content_type { + EncodingType::Json => decode_json_payload(&info.response_bytes)?, + EncodingType::Ssz => { + let fork = info.fork.ok_or(PbsError::RelayResponse { + error_msg: "relay did not provide consensus version header for ssz payload" + .to_string(), + code: info.code.as_u16(), + })?; + decode_ssz_payload(&info.response_bytes, fork)? + } + }; + + // Log and return + debug!( + relay_id = info.relay_id.as_ref(), + header_size_bytes = info.response_bytes.len(), + latency = ?info.request_latency, + version =? get_header_response.version, + value_eth = format_ether(*get_header_response.value()), + block_hash = %get_header_response.block_hash(), + content_type = ?info.content_type, + "received new header" + ); + Ok((start_request_time, Some(get_header_response))) } -#[derive(Clone)] -struct ValidationContext { - skip_sigverify: bool, - min_bid_wei: U256, - extra_validation_enabled: bool, - parent_block: Arc>>, +/// Send a get_header request and decode only the fork and bid value from the +/// response, leaving the raw bytes intact for direct forwarding to the caller. +/// Used in `HeaderValidationMode::None` where expensive crypto/structural +/// checks are skipped. +async fn send_get_header_light( + relay: &RelayClient, + url: Arc, + timeout_left_ms: u64, + headers: HeaderMap, +) -> Result<(u64, Option), PbsError> { + // Send the request + let (start_request_time, info) = + send_get_header_impl(relay, url, timeout_left_ms, headers).await?; + let info = match info { + Some(info) => info, + None => { + return Ok((start_request_time, None)); + } + }; + + // Decode the value / fork from the response + let (fork, value) = match info.content_type { + EncodingType::Json => get_light_info_from_json(&info.response_bytes)?, + EncodingType::Ssz => { + let fork = info.fork.ok_or(PbsError::RelayResponse { + error_msg: "relay did not provide consensus version header for ssz payload" + .to_string(), + code: info.code.as_u16(), + })?; + (fork, get_bid_value_from_signed_builder_bid_ssz(&info.response_bytes, fork)?) + } + }; + + // Log and return + debug!( + relay_id = info.relay_id.as_ref(), + header_size_bytes = info.response_bytes.len(), + latency = ?info.request_latency, + version =? fork, + value_eth = format_ether(value), + content_type = ?info.content_type, + "received new header (light processing)" + ); + Ok(( + start_request_time, + Some(LightGetHeaderResponse { + version: fork, + value, + raw_bytes: info.response_bytes, + encoding_type: info.content_type, + }), + )) } -async fn send_one_get_header( - params: GetHeaderParams, - relay: RelayClient, - chain: Chain, - mut req_config: RequestContext, - validation: ValidationContext, -) -> Result<(u64, Option), PbsError> { +/// Sends a get_header request to a relay, returning the response, the time the +/// request was started, and the encoding type of the response (if any). +/// Used by send_one_get_header to perform the actual request submission. +async fn send_get_header_impl( + relay: &RelayClient, + url: Arc, + timeout_left_ms: u64, + mut headers: HeaderMap, +) -> Result<(u64, Option), PbsError> { // the timestamp in the header is the consensus block time which is fixed, // use the beginning of the request as proxy to make sure we use only the // last one received + let start_request = Instant::now(); let start_request_time = utcnow_ms(); - req_config.headers.insert(HEADER_START_TIME_UNIX_MS, HeaderValue::from(start_request_time)); + headers.insert(HEADER_START_TIME_UNIX_MS, HeaderValue::from(start_request_time)); // The timeout header indicating how long a relay has to respond, so they can // minimize timing games without losing the bid - req_config.headers.insert(HEADER_TIMEOUT_MS, HeaderValue::from(req_config.timeout_ms)); + headers.insert(HEADER_TIMEOUT_MS, HeaderValue::from(timeout_left_ms)); - let start_request = Instant::now(); let res = match relay .client - .get(req_config.url) - .timeout(Duration::from_millis(req_config.timeout_ms)) - .headers(req_config.headers) + .get(url.as_ref().clone()) + .timeout(Duration::from_millis(timeout_left_ms)) + .headers(headers) .send() .await { @@ -336,129 +645,130 @@ async fn send_one_get_header( } }; + // Log the response code and latency + let code = res.status(); let request_latency = start_request.elapsed(); RELAY_LATENCY .with_label_values(&[GET_HEADER_ENDPOINT_TAG, &relay.id]) .observe(request_latency.as_secs_f64()); - - let code = res.status(); RELAY_STATUS_CODE.with_label_values(&[code.as_str(), GET_HEADER_ENDPOINT_TAG, &relay.id]).inc(); - let response_bytes = read_chunked_body_with_max(res, MAX_SIZE_GET_HEADER_RESPONSE).await?; - let header_size_bytes = response_bytes.len(); - if !code.is_success() { - return Err(PbsError::RelayResponse { - error_msg: String::from_utf8_lossy(&response_bytes).into_owned(), - code: code.as_u16(), - }); - }; - if code == StatusCode::NO_CONTENT { - debug!( - relay_id = relay.id.as_ref(), - ?code, - latency = ?request_latency, - response = ?response_bytes, - "no header from relay" - ); - return Ok((start_request_time, None)); + // According to the spec, OK is the only allowed success code so this can break + // early + if code != StatusCode::OK { + if code == StatusCode::NO_CONTENT { + let response_bytes = + read_chunked_body_with_max(res, MAX_SIZE_GET_HEADER_RESPONSE).await?; + debug!( + relay_id = relay.id.as_ref(), + ?code, + latency = ?request_latency, + response = ?response_bytes, + "no header from relay" + ); + return Ok((start_request_time, None)); + } else { + return Err(PbsError::RelayResponse { + error_msg: format!("unexpected status code from relay: {code}"), + code: code.as_u16(), + }); + } } - let get_header_response = match serde_json::from_slice::(&response_bytes) { - Ok(parsed) => parsed, - Err(err) => { - return Err(PbsError::JsonDecode { - err, - raw: String::from_utf8_lossy(&response_bytes).into_owned(), - }); + // Get the content type + let content_type = match res.headers().get(CONTENT_TYPE) { + None => { + // Assume a missing content type means JSON; shouldn't happen in practice with + // any respectable HTTP server but just in case + EncodingType::Json } + Some(header_value) => match header_value.to_str().map_err(|e| PbsError::RelayResponse { + error_msg: format!("cannot decode content-type header: {e}").to_string(), + code: (code.as_u16()), + })? { + header_str if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) => { + EncodingType::Ssz + } + header_str if header_str.eq_ignore_ascii_case(&EncodingType::Json.to_string()) => { + EncodingType::Json + } + header_str => { + return Err(PbsError::RelayResponse { + error_msg: format!("unsupported content type: {header_str}"), + code: code.as_u16(), + }) + } + }, }; - debug!( - relay_id = relay.id.as_ref(), - header_size_bytes, - latency = ?request_latency, - version =? get_header_response.version, - value_eth = format_ether(*get_header_response.value()), - block_hash = %get_header_response.block_hash(), - "received new header" - ); - - match &get_header_response.data.message.header() { - ExecutionPayloadHeaderRef::Bellatrix(_) | - ExecutionPayloadHeaderRef::Capella(_) | - ExecutionPayloadHeaderRef::Deneb(_) | - ExecutionPayloadHeaderRef::Gloas(_) => { - return Err(PbsError::Validation(ValidationError::UnsupportedFork)) - } - ExecutionPayloadHeaderRef::Electra(res) => { - let header_data = HeaderData { - block_hash: res.block_hash.0, - parent_hash: res.parent_hash.0, - tx_root: res.transactions_root, - value: *get_header_response.value(), - timestamp: res.timestamp, - }; + // Decode the body + let fork = get_consensus_version_header(res.headers()); + let response_bytes = read_chunked_body_with_max(res, MAX_SIZE_GET_HEADER_RESPONSE).await?; + Ok(( + start_request_time, + Some(GetHeaderResponseInfo { + relay_id: relay.id.clone(), + response_bytes, + content_type, + fork, + code, + request_latency, + }), + )) +} - validate_header_data( - &header_data, - chain, - params.parent_hash, - validation.min_bid_wei, - params.slot, - )?; +/// Decode a JSON-encoded get_header response +fn decode_json_payload(response_bytes: &[u8]) -> Result { + match serde_json::from_slice::(response_bytes) { + Ok(parsed) => Ok(parsed), + Err(err) => Err(PbsError::JsonDecode { + err, + raw: String::from_utf8_lossy(response_bytes).into_owned(), + }), + } +} - if !validation.skip_sigverify { - validate_signature( - chain, - relay.pubkey(), - get_header_response.data.message.pubkey(), - &get_header_response.data.message, - &get_header_response.data.signature, - )?; - } - } - ExecutionPayloadHeaderRef::Fulu(res) => { - let header_data = HeaderData { - block_hash: res.block_hash.0, - parent_hash: res.parent_hash.0, - tx_root: res.transactions_root, - value: *get_header_response.value(), - timestamp: res.timestamp, - }; +/// Get the value of a builder bid and the fork name from a get_header JSON +/// response (used for light-level processing) +fn get_light_info_from_json(response_bytes: &[u8]) -> Result<(ForkName, U256), PbsError> { + #[derive(Deserialize)] + struct LightBuilderBid { + #[serde(with = "serde_utils::quoted_u256")] + pub value: U256, + } - validate_header_data( - &header_data, - chain, - params.parent_hash, - validation.min_bid_wei, - params.slot, - )?; + #[derive(Deserialize)] + struct LightSignedBuilderBid { + pub message: LightBuilderBid, + } - if !validation.skip_sigverify { - validate_signature( - chain, - relay.pubkey(), - get_header_response.data.message.pubkey(), - &get_header_response.data.message, - &get_header_response.data.signature, - )?; - } - } + #[derive(Deserialize)] + struct LightHeaderResponse { + version: ForkName, + data: LightSignedBuilderBid, } - if validation.extra_validation_enabled { - let parent_block = validation.parent_block.read(); - if let Some(parent_block) = parent_block.as_ref() { - extra_validation(parent_block, &get_header_response)?; - } else { - warn!( - relay_id = relay.id.as_ref(), - "parent block not found, skipping extra validation" - ); - } + match serde_json::from_slice::(response_bytes) { + Ok(parsed) => Ok((parsed.version, parsed.data.message.value)), + Err(err) => Err(PbsError::JsonDecode { + err, + raw: String::from_utf8_lossy(response_bytes).into_owned(), + }), } +} - Ok((start_request_time, Some(get_header_response))) +/// Decode an SSZ-encoded get_header response +fn decode_ssz_payload( + response_bytes: &[u8], + fork: ForkName, +) -> Result { + let data = SignedBuilderBid::from_ssz_bytes_by_fork(response_bytes, fork).map_err(|e| { + PbsError::RelayResponse { + error_msg: (format!("error decoding relay payload: {e:?}")).to_string(), + code: 200, + } + })?; + Ok(GetHeaderResponse { version: fork, data, metadata: Default::default() }) } struct HeaderData { @@ -525,7 +835,8 @@ fn validate_signature( expected_relay_pubkey, &message, signature, - APPLICATION_BUILDER_DOMAIN, + None, + &B32::from(APPLICATION_BUILDER_DOMAIN), ) { return Err(ValidationError::Sigverify); } @@ -556,13 +867,16 @@ fn extra_validation( #[cfg(test)] mod tests { + use std::{fs, path::Path}; + use alloy::primitives::{B256, U256}; use cb_common::{ - pbs::{EMPTY_TX_ROOT_HASH, error::ValidationError}, + pbs::*, signature::sign_builder_message, - types::{BlsSecretKey, Chain}, + types::{BlsPublicKeyBytes, BlsSecretKey, BlsSignature, Chain}, utils::{TestRandomSeed, timestamp_of_slot_start_sec}, }; + use ssz::Encode; use super::{validate_header_data, *}; @@ -664,4 +978,42 @@ mod tests { .is_ok() ); } + + #[test] + fn test_ssz_value_extraction() { + for fork_name in ForkName::list_all() { + match fork_name { + // Handle forks that didn't have builder bids yet + ForkName::Altair | ForkName::Base => continue, + + // Handle supported forks + ForkName::Bellatrix | + ForkName::Capella | + ForkName::Deneb | + ForkName::Electra | + ForkName::Fulu => {} + + // Skip unsupported forks + ForkName::Gloas => continue, + } + + // Load get_header JSON from test data + let fork_name_str = fork_name.to_string().to_lowercase(); + let path_str = format!("../../tests/data/get_header/{fork_name_str}.json"); + let path = Path::new(path_str.as_str()); + let json_bytes = fs::read(path).expect("file not found"); + let decoded = decode_json_payload(&json_bytes).expect("failed to decode JSON"); + + // Extract the bid value from the SSZ + let encoded = decoded.data.as_ssz_bytes(); + let bid_value = get_bid_value_from_signed_builder_bid_ssz(&encoded, fork_name) + .expect("failed to extract bid value from SSZ"); + + // Compare to the original value + println!("Testing fork: {}", fork_name); + println!("Original value: {}", decoded.value()); + println!("Extracted value: {}", bid_value); + assert_eq!(*decoded.value(), bid_value); + } + } } diff --git a/crates/pbs/src/mev_boost/mod.rs b/crates/pbs/src/mev_boost/mod.rs index a41b79db..81dc4bf6 100644 --- a/crates/pbs/src/mev_boost/mod.rs +++ b/crates/pbs/src/mev_boost/mod.rs @@ -4,8 +4,73 @@ mod reload; mod status; mod submit_block; +use alloy::primitives::U256; +use cb_common::{ + pbs::{GetHeaderResponse, SubmitBlindedBlockResponse}, + utils::EncodingType, +}; pub use get_header::get_header; +use lh_types::ForkName; pub use register_validator::register_validator; pub use reload::reload; pub use status::get_status; pub use submit_block::submit_block; + +/// Enum that handles different GetHeader response types based on the level of +/// validation required +pub enum CompoundGetHeaderResponse { + /// Standard response type, fully parsing the response from a relay into a + /// complete response struct + Full(Box), + + /// Light response type, only extracting the fork and value from the builder + /// bid with the entire (undecoded) payload for forwarding + Light(LightGetHeaderResponse), +} + +/// Core details of a GetHeaderResponse, used for light processing when +/// validation mode is set to none. +#[derive(Clone)] +pub struct LightGetHeaderResponse { + /// The fork name for the bid + pub version: ForkName, + + /// The bid value in wei + pub value: U256, + + /// The raw bytes of the response, for forwarding to the caller + pub raw_bytes: Vec, + + /// The format the response bytes are encoded with + pub encoding_type: EncodingType, +} + +/// Enum that handles different SubmitBlock response types based on the level of +/// validation required +pub enum CompoundSubmitBlockResponse { + /// Standard response type, fully parsing the response from a relay into a + /// complete response struct + Full(Box), + + /// Light response type, only extracting the fork from the response with the + /// entire (undecoded) payload for forwarding + Light(LightSubmitBlockResponse), + + /// Response with no body, used for v2 requests when the relay does not + /// return any content intentionally + EmptyBody, +} + +/// Core details of a SubmitBlockResponse, used for light processing when +/// validation mode is set to none. +#[derive(Clone, Debug)] +pub struct LightSubmitBlockResponse { + /// The fork name for the bid + pub version: ForkName, + + /// The raw bytes of the response, for forwarding to the caller + pub raw_bytes: Vec, + + /// The format the response bytes are encoded with + pub encoding_type: EncodingType, +} diff --git a/crates/pbs/src/mev_boost/submit_block.rs b/crates/pbs/src/mev_boost/submit_block.rs index b416dba2..11e0e289 100644 --- a/crates/pbs/src/mev_boost/submit_block.rs +++ b/crates/pbs/src/mev_boost/submit_block.rs @@ -1,5 +1,5 @@ use std::{ - str::FromStr, + collections::HashSet, sync::Arc, time::{Duration, Instant}, }; @@ -7,27 +7,75 @@ use std::{ use alloy::{eips::eip7594::CELLS_PER_EXT_BLOB, primitives::B256}; use axum::http::{HeaderMap, HeaderValue}; use cb_common::{ + config::BlockValidationMode, pbs::{ - BlindedBeaconBlock, BlobsBundle, BuilderApiVersion, ForkName, HEADER_CONSENSUS_VERSION, - HEADER_START_TIME_UNIX_MS, KzgCommitments, RelayClient, SignedBlindedBeaconBlock, - SubmitBlindedBlockResponse, + BlindedBeaconBlock, BlobsBundle, BuilderApiVersion, ForkName, ForkVersionDecode, + HEADER_START_TIME_UNIX_MS, KzgCommitments, PayloadAndBlobs, RelayClient, + SignedBlindedBeaconBlock, SubmitBlindedBlockResponse, error::{PbsError, ValidationError}, }, - utils::{get_user_agent_with_version, read_chunked_body_with_max, utcnow_ms}, + utils::{ + CONSENSUS_VERSION_HEADER, EncodingType, get_consensus_version_header, + get_user_agent_with_version, read_chunked_body_with_max, utcnow_ms, + }, }; use futures::{FutureExt, future::select_ok}; -use reqwest::header::USER_AGENT; +use reqwest::{ + StatusCode, + header::{ACCEPT, CONTENT_TYPE, USER_AGENT}, +}; +use serde::Deserialize; +use ssz::Encode; use tracing::{debug, warn}; use url::Url; use crate::{ - constants::{ - MAX_SIZE_SUBMIT_BLOCK_RESPONSE, SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, TIMEOUT_ERROR_CODE_STR, - }, + CompoundSubmitBlockResponse, LightSubmitBlockResponse, TIMEOUT_ERROR_CODE_STR, + constants::{MAX_SIZE_SUBMIT_BLOCK_RESPONSE, SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG}, metrics::{RELAY_LATENCY, RELAY_STATUS_CODE}, state::{BuilderApiState, PbsState}, }; +/// Info about a proposal submission request. +/// Sent from submit_block to the submit_block_with_timeout function. +#[derive(Clone)] +struct ProposalInfo { + /// The signed blinded block to submit + signed_blinded_block: Arc, + + /// Common baseline of headers to send with each request + headers: Arc, + + /// The version of the submit_block route being used + api_version: BuilderApiVersion, + + /// How to validate the block returned by the relay + validation_mode: BlockValidationMode, + + /// The accepted encoding types from the original request + accepted_types: HashSet, +} + +/// Used interally to provide info and context about a submit_block request and +/// its response +struct SubmitBlockResponseInfo { + /// The raw body of the response + response_bytes: Vec, + + /// The content type the response is encoded with + content_type: EncodingType, + + /// Which fork the response bid is for (if provided as a header, rather than + /// part of the body) + fork: Option, + + /// The status code of the response, for logging + code: StatusCode, + + /// The round-trip latency of the request + request_latency: Duration, +} + /// Implements https://ethereum.github.io/builder-specs/#/Builder/submitBlindedBlock and /// https://ethereum.github.io/builder-specs/#/Builder/submitBlindedBlockV2. Use `api_version` to /// distinguish between the two. @@ -36,39 +84,45 @@ pub async fn submit_block( req_headers: HeaderMap, state: PbsState, api_version: BuilderApiVersion, -) -> eyre::Result> { + accepted_types: HashSet, +) -> eyre::Result { debug!(?req_headers, "received headers"); - let fork_name = req_headers - .get(HEADER_CONSENSUS_VERSION) - .and_then(|h| { - let str = h.to_str().ok()?; - ForkName::from_str(str).ok() - }) - .unwrap_or_else(|| { - let slot = signed_blinded_block.slot().as_u64(); - state.config.chain.fork_by_slot(slot) - }); - - // safe because ForkName is visible ASCII chars - let consensus_version = HeaderValue::from_str(&fork_name.to_string()).unwrap(); - // prepare headers let mut send_headers = HeaderMap::new(); send_headers.insert(HEADER_START_TIME_UNIX_MS, HeaderValue::from(utcnow_ms())); send_headers.insert(USER_AGENT, get_user_agent_with_version(&req_headers)?); - send_headers.insert(HEADER_CONSENSUS_VERSION, consensus_version); + // Create the Accept headers for requests + let mode = state.pbs_config().block_validation_mode; + let accept_types = match mode { + BlockValidationMode::None => { + // No validation mode, so only request what the user wants because the response + // will be forwarded directly + accepted_types.iter().map(|t| t.content_type()).collect::>().join(",") + } + _ => { + // We're unpacking the body, so request both types since we can handle both + [EncodingType::Ssz.content_type(), EncodingType::Json.content_type()].join(",") + } + }; + send_headers.insert(ACCEPT, HeaderValue::from_str(&accept_types).unwrap()); + + // Send requests to all relays concurrently + let proposal_info = Arc::new(ProposalInfo { + signed_blinded_block, + headers: Arc::new(send_headers), + api_version, + validation_mode: mode, + accepted_types, + }); let mut handles = Vec::with_capacity(state.all_relays().len()); - for relay in state.all_relays().iter().cloned() { + for relay in state.all_relays().iter() { handles.push( tokio::spawn(submit_block_with_timeout( - signed_blinded_block.clone(), - relay, - send_headers.clone(), + proposal_info.clone(), + relay.clone(), state.pbs_config().timeout_get_payload_ms, - api_version, - fork_name, )) .map(|join_result| match join_result { Ok(res) => res, @@ -87,30 +141,25 @@ pub async fn submit_block( /// Submit blinded block to relay, retry connection errors until the /// given timeout has passed async fn submit_block_with_timeout( - signed_blinded_block: Arc, + proposal_info: Arc, relay: RelayClient, - headers: HeaderMap, timeout_ms: u64, - api_version: BuilderApiVersion, - fork_name: ForkName, -) -> Result, PbsError> { - let mut url = relay.submit_block_url(api_version)?; +) -> Result { + let mut url = Arc::new(relay.submit_block_url(proposal_info.api_version)?); let mut remaining_timeout_ms = timeout_ms; let mut retry = 0; let mut backoff = Duration::from_millis(250); - let mut request_api_version = api_version; + let mut request_api_version = proposal_info.api_version; loop { let start_request = Instant::now(); match send_submit_block( + proposal_info.clone(), url.clone(), - &signed_blinded_block, &relay, - headers.clone(), remaining_timeout_ms, retry, - &request_api_version, - fork_name, + request_api_version, ) .await { @@ -118,9 +167,9 @@ async fn submit_block_with_timeout( // If the original request was for v2 but we had to fall back to v1, return a v2 // response if request_api_version == BuilderApiVersion::V1 && - api_version != request_api_version + proposal_info.api_version != request_api_version { - return Ok(None); + return Ok(CompoundSubmitBlockResponse::EmptyBody); } return Ok(response); } @@ -144,7 +193,7 @@ async fn submit_block_with_timeout( relay_id = relay.id.as_ref(), "relay does not support v2 endpoint, retrying with v1" ); - url = relay.submit_block_url(BuilderApiVersion::V1)?; + url = Arc::new(relay.submit_block_url(BuilderApiVersion::V1)?); request_api_version = BuilderApiVersion::V1; } @@ -159,22 +208,233 @@ async fn submit_block_with_timeout( // back #[allow(clippy::too_many_arguments)] async fn send_submit_block( - url: Url, - signed_blinded_block: &SignedBlindedBeaconBlock, + proposal_info: Arc, + url: Arc, relay: &RelayClient, - headers: HeaderMap, timeout_ms: u64, retry: u32, - api_version: &BuilderApiVersion, - fork_name: ForkName, + api_version: BuilderApiVersion, +) -> Result { + match proposal_info.validation_mode { + BlockValidationMode::None => { + // No validation so do some light processing and forward the response directly + let response = + send_submit_block_light(proposal_info.clone(), url, relay, timeout_ms, retry) + .await?; + match response { + None => Ok(CompoundSubmitBlockResponse::EmptyBody), + Some(res) => { + // Make sure the response is encoded in one of the accepted + // types since we're passing the raw response directly to the client + if !proposal_info.accepted_types.contains(&res.encoding_type) { + return Err(PbsError::RelayResponse { + error_msg: format!( + "relay returned unsupported encoding type for submit_block in no-validation mode: {:?}", + res.encoding_type + ), + code: 406, // Not Acceptable + }); + } + Ok(CompoundSubmitBlockResponse::Light(res)) + } + } + } + _ => { + // Full processing: decode full response and validate + let response = send_submit_block_full( + proposal_info.clone(), + url, + relay, + timeout_ms, + retry, + api_version, + ) + .await?; + let response = match response { + None => { + // v2 request with no body + return Ok(CompoundSubmitBlockResponse::EmptyBody); + } + Some(res) => res, + }; + // Extract the info needed for validation + let got_block_hash = response.data.execution_payload.block_hash().0; + + // request has different type so cant be deserialized in the wrong version, + // response has a "version" field + match &proposal_info.signed_blinded_block.message() { + BlindedBeaconBlock::Electra(blinded_block) => { + let expected_block_hash = + blinded_block.body.execution_payload.execution_payload_header.block_hash.0; + let expected_commitments = &blinded_block.body.blob_kzg_commitments; + + validate_unblinded_block( + expected_block_hash, + got_block_hash, + expected_commitments, + &response.data.blobs_bundle, + response.version, + ) + } + + BlindedBeaconBlock::Fulu(blinded_block) => { + let expected_block_hash = + blinded_block.body.execution_payload.execution_payload_header.block_hash.0; + let expected_commitments = &blinded_block.body.blob_kzg_commitments; + + validate_unblinded_block( + expected_block_hash, + got_block_hash, + expected_commitments, + &response.data.blobs_bundle, + response.version, + ) + } + + _ => return Err(PbsError::Validation(ValidationError::UnsupportedFork)), + }?; + Ok(CompoundSubmitBlockResponse::Full(Box::new(response))) + } + } +} + +/// Send and fully process a submit_block request, returning a complete decoded +/// response +async fn send_submit_block_full( + proposal_info: Arc, + url: Arc, + relay: &RelayClient, + timeout_ms: u64, + retry: u32, + api_version: BuilderApiVersion, ) -> Result, PbsError> { + // Send the request + let block_response = send_submit_block_impl( + relay, + url, + timeout_ms, + (*proposal_info.headers).clone(), + &proposal_info.signed_blinded_block, + retry, + api_version, + ) + .await?; + + // If this is not v1, there's no body to decode + if api_version != BuilderApiVersion::V1 { + return Ok(None); + } + + // Decode the payload based on content type + let decoded_response = match block_response.content_type { + EncodingType::Json => decode_json_payload(&block_response.response_bytes)?, + EncodingType::Ssz => { + let fork = match block_response.fork { + Some(fork) => fork, + None => { + return Err(PbsError::RelayResponse { + error_msg: "missing fork version header in SSZ submit_block response" + .to_string(), + code: block_response.code.as_u16(), + }); + } + }; + decode_ssz_payload(&block_response.response_bytes, fork)? + } + }; + + // Log and return + debug!( + relay_id = relay.id.as_ref(), + retry, + latency = ?block_response.request_latency, + version =% decoded_response.version, + "received unblinded block" + ); + + Ok(Some(decoded_response)) +} + +/// Send and lightly process a submit_block request, minimizing the amount of +/// decoding and validation done +async fn send_submit_block_light( + proposal_info: Arc, + url: Arc, + relay: &RelayClient, + timeout_ms: u64, + retry: u32, +) -> Result, PbsError> { + // Send the request + let block_response = send_submit_block_impl( + relay, + url, + timeout_ms, + (*proposal_info.headers).clone(), + &proposal_info.signed_blinded_block, + retry, + proposal_info.api_version, + ) + .await?; + + // If this is not v1, there's no body to decode + if proposal_info.api_version != BuilderApiVersion::V1 { + return Ok(None); + } + + // Decode the payload based on content type + let fork = match block_response.content_type { + EncodingType::Json => get_light_info_from_json(&block_response.response_bytes)?, + EncodingType::Ssz => match block_response.fork { + Some(fork) => fork, + None => { + return Err(PbsError::RelayResponse { + error_msg: "missing fork version header in SSZ submit_block response" + .to_string(), + code: block_response.code.as_u16(), + }); + } + }, + }; + + // Log and return + debug!( + relay_id = relay.id.as_ref(), + retry, + latency = ?block_response.request_latency, + version =% fork, + "received unblinded block (light processing)" + ); + + Ok(Some(LightSubmitBlockResponse { + version: fork, + encoding_type: block_response.content_type, + raw_bytes: block_response.response_bytes, + })) +} + +/// Sends the actual HTTP request to the relay's submit_block endpoint, +/// returning the response (if applicable), the round-trip time, and the +/// encoding type used for the body (if any). Used by send_submit_block. +async fn send_submit_block_impl( + relay: &RelayClient, + url: Arc, + timeout_ms: u64, + headers: HeaderMap, + signed_blinded_block: &SignedBlindedBeaconBlock, + retry: u32, + api_version: BuilderApiVersion, +) -> Result { let start_request = Instant::now(); - let res = match relay + + // Try SSZ first + let mut res = match relay .client - .post(url) + .post(url.as_ref().clone()) .timeout(Duration::from_millis(timeout_ms)) - .headers(headers) - .json(&signed_blinded_block) + .headers(headers.clone()) + .body(signed_blinded_block.as_ssz_bytes()) + .header(CONTENT_TYPE, EncodingType::Ssz.to_string()) + .header(CONSENSUS_VERSION_HEADER, signed_blinded_block.fork_name_unchecked().to_string()) .send() .await { @@ -190,96 +450,178 @@ async fn send_submit_block( return Err(err.into()); } }; + + // If we got a client error, retry with JSON - the spec says that this should be + // a 406 or 415, but we're a little more permissive here + if res.status().is_client_error() { + warn!( + relay_id = relay.id.as_ref(), + "relay does not support SSZ, resubmitting block with JSON content-type" + ); + res = match relay + .client + .post(url.as_ref().clone()) + .timeout(Duration::from_millis(timeout_ms)) + .headers(headers) + .body(serde_json::to_vec(&signed_blinded_block).unwrap()) + .header(CONTENT_TYPE, EncodingType::Json.to_string()) + .send() + .await + { + Ok(res) => res, + Err(err) => { + RELAY_STATUS_CODE + .with_label_values(&[ + TIMEOUT_ERROR_CODE_STR, + SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, + &relay.id, + ]) + .inc(); + return Err(err.into()); + } + }; + } + + // Log the response code and latency + let code = res.status(); let request_latency = start_request.elapsed(); RELAY_LATENCY .with_label_values(&[SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, &relay.id]) .observe(request_latency.as_secs_f64()); - - let code = res.status(); RELAY_STATUS_CODE .with_label_values(&[code.as_str(), SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, &relay.id]) .inc(); - let response_bytes = read_chunked_body_with_max(res, MAX_SIZE_SUBMIT_BLOCK_RESPONSE).await?; - if !code.is_success() { + // If this was API v2 and succeeded then we can just return here + if api_version != BuilderApiVersion::V1 { + debug!( + relay_id = relay.id.as_ref(), + retry, + latency = ?request_latency, + "received 202 Accepted for v2 submit_block" + ); + + match code { + StatusCode::ACCEPTED => { + return Ok(SubmitBlockResponseInfo { + response_bytes: Vec::new(), + content_type: EncodingType::Json, // dummy value + fork: None, + code, + request_latency, + }); + } + StatusCode::OK => { + warn!( + relay_id = relay.id.as_ref(), + "relay sent OK response for v2 submit_block, expected 202 Accepted" + ); + return Ok(SubmitBlockResponseInfo { + response_bytes: Vec::new(), + content_type: EncodingType::Json, // dummy value + fork: None, + code, + request_latency, + }); + } + _ => { + return Err(PbsError::RelayResponse { + error_msg: format!( + "relay sent unexpected code for builder route v2 {}: {code}", + relay.id.as_ref() + ), + code: code.as_u16(), + }); + } + } + } + + // If the code is not OK, return early + if code != StatusCode::OK { + let response_bytes = + read_chunked_body_with_max(res, MAX_SIZE_SUBMIT_BLOCK_RESPONSE).await?; let err = PbsError::RelayResponse { error_msg: String::from_utf8_lossy(&response_bytes).into_owned(), code: code.as_u16(), }; // we requested the payload from all relays, but some may have not received it - warn!(relay_id = relay.id.as_ref(), retry, %err, "failed to get payload (this might be ok if other relays have it)"); + warn!(relay_id = relay.id.as_ref(), %err, "failed to get payload (this might be ok if other relays have it)"); return Err(err); - }; - - if api_version != &BuilderApiVersion::V1 { - // v2 response is going to be empty, so just break here - debug!( - relay_id = relay.id.as_ref(), - retry, - latency = ?request_latency, - "successful request" - ); - - return Ok(None); } - let block_response = match serde_json::from_slice::(&response_bytes) - { - Ok(parsed) => parsed, - Err(err) => { - return Err(PbsError::JsonDecode { - err, - raw: String::from_utf8_lossy(&response_bytes).into_owned(), - }); + // We're on v1 so decode the payload normally - get the content type + let content_type = match res.headers().get(CONTENT_TYPE) { + None => { + // Assume a missing content type means JSON; shouldn't happen in practice with + // any respectable HTTP server but just in case + EncodingType::Json } + Some(header_value) => match header_value.to_str().map_err(|e| PbsError::RelayResponse { + error_msg: format!("cannot decode content-type header: {e}").to_string(), + code: (code.as_u16()), + })? { + header_str if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) => { + EncodingType::Ssz + } + header_str if header_str.eq_ignore_ascii_case(&EncodingType::Json.to_string()) => { + EncodingType::Json + } + header_str => { + return Err(PbsError::RelayResponse { + error_msg: format!("unsupported content type: {header_str}"), + code: code.as_u16(), + }) + } + }, }; - debug!( - relay_id = relay.id.as_ref(), - retry, - latency = ?request_latency, - version =% block_response.version, - "received unblinded block" - ); + // Decode the body + let fork = get_consensus_version_header(res.headers()); + let response_bytes = read_chunked_body_with_max(res, MAX_SIZE_SUBMIT_BLOCK_RESPONSE).await?; + Ok(SubmitBlockResponseInfo { response_bytes, content_type, fork, code, request_latency }) +} - let got_block_hash = block_response.data.execution_payload.block_hash().0; - - // request has different type so cant be deserialized in the wrong version, - // response has a "version" field - match &signed_blinded_block.message() { - BlindedBeaconBlock::Electra(blinded_block) => { - let expected_block_hash = - blinded_block.body.execution_payload.execution_payload_header.block_hash.0; - let expected_commitments = &blinded_block.body.blob_kzg_commitments; - - validate_unblinded_block( - expected_block_hash, - got_block_hash, - expected_commitments, - &block_response.data.blobs_bundle, - fork_name, - ) - } +/// Decode a JSON-encoded submit_block response +fn decode_json_payload(response_bytes: &[u8]) -> Result { + match serde_json::from_slice::(response_bytes) { + Ok(parsed) => Ok(parsed), + Err(err) => Err(PbsError::JsonDecode { + err, + raw: String::from_utf8_lossy(response_bytes).into_owned(), + }), + } +} - BlindedBeaconBlock::Fulu(blinded_block) => { - let expected_block_hash = - blinded_block.body.execution_payload.execution_payload_header.block_hash.0; - let expected_commitments = &blinded_block.body.blob_kzg_commitments; - - validate_unblinded_block( - expected_block_hash, - got_block_hash, - expected_commitments, - &block_response.data.blobs_bundle, - fork_name, - ) - } +/// Get the fork name from a submit_block JSON response (used for light +/// processing) +fn get_light_info_from_json(response_bytes: &[u8]) -> Result { + #[derive(Deserialize)] + struct LightSubmitBlockResponse { + version: ForkName, + } - _ => return Err(PbsError::Validation(ValidationError::UnsupportedFork)), - }?; + match serde_json::from_slice::(response_bytes) { + Ok(parsed) => Ok(parsed.version), + Err(err) => Err(PbsError::JsonDecode { + err, + raw: String::from_utf8_lossy(response_bytes).into_owned(), + }), + } +} - Ok(Some(block_response)) +/// Decode an SSZ-encoded submit_block response +fn decode_ssz_payload( + response_bytes: &[u8], + fork: ForkName, +) -> Result { + let data = PayloadAndBlobs::from_ssz_bytes_by_fork(response_bytes, fork).map_err(|e| { + PbsError::RelayResponse { + error_msg: (format!("error decoding relay payload: {e:?}")).to_string(), + code: 200, + } + })?; + Ok(SubmitBlindedBlockResponse { version: fork, data, metadata: Default::default() }) } fn validate_unblinded_block( diff --git a/crates/pbs/src/routes/get_header.rs b/crates/pbs/src/routes/get_header.rs index 9ed312af..c550d92f 100644 --- a/crates/pbs/src/routes/get_header.rs +++ b/crates/pbs/src/routes/get_header.rs @@ -1,17 +1,21 @@ use alloy::primitives::utils::format_ether; use axum::{ extract::{Path, State}, - http::HeaderMap, + http::{HeaderMap, HeaderValue}, response::IntoResponse, }; use cb_common::{ pbs::{GetHeaderInfo, GetHeaderParams}, - utils::{get_user_agent, ms_into_slot}, + utils::{ + CONSENSUS_VERSION_HEADER, EncodingType, get_accept_types, get_user_agent, ms_into_slot, + }, }; -use reqwest::StatusCode; +use reqwest::{StatusCode, header::CONTENT_TYPE}; +use ssz::Encode; use tracing::{error, info}; use crate::{ + CompoundGetHeaderResponse, api::BuilderApi, constants::GET_HEADER_ENDPOINT_TAG, error::PbsClientError, @@ -32,16 +36,99 @@ pub async fn handle_get_header>( let ua = get_user_agent(&req_headers); let ms_into_slot = ms_into_slot(params.slot, state.config.chain); + let accept_types = get_accept_types(&req_headers).map_err(|e| { + error!(%e, "error parsing accept header"); + PbsClientError::DecodeError(format!("error parsing accept header: {e}")) + })?; + let accepts_ssz = accept_types.contains(&EncodingType::Ssz); + let accepts_json = accept_types.contains(&EncodingType::Json); info!(ua, ms_into_slot, "new request"); - match A::get_header(params, req_headers, state).await { + match A::get_header(params, req_headers, state, accept_types).await { Ok(res) => { if let Some(max_bid) = res { - info!(value_eth = format_ether(*max_bid.data.message.value()), block_hash =% max_bid.block_hash(), "received header"); - BEACON_NODE_STATUS.with_label_values(&["200", GET_HEADER_ENDPOINT_TAG]).inc(); - Ok((StatusCode::OK, axum::Json(max_bid)).into_response()) + match max_bid { + CompoundGetHeaderResponse::Light(light_bid) => { + // Light validation mode, so just forward the raw response + info!( + value_eth = format_ether(light_bid.value), + "received header (unvalidated)" + ); + + // Create the headers + let consensus_version_header = + match HeaderValue::from_str(&light_bid.version.to_string()) { + Ok(consensus_version_header) => { + Ok::(consensus_version_header) + } + Err(e) => { + return Err(PbsClientError::RelayError(format!( + "error decoding consensus version from relay payload: {e}" + ))); + } + }?; + let content_type = light_bid.encoding_type.content_type(); + let content_type_header = HeaderValue::from_str(content_type).unwrap(); + + // Build response + let mut res = light_bid.raw_bytes.into_response(); + res.headers_mut() + .insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + res.headers_mut().insert(CONTENT_TYPE, content_type_header); + info!("sending response as {} (light)", content_type); + Ok(res) + } + CompoundGetHeaderResponse::Full(max_bid) => { + // Full validation mode, so respond based on requester accept types + info!(value_eth = format_ether(*max_bid.data.message.value()), block_hash =% max_bid.block_hash(), "received header"); + + // Handle SSZ + if accepts_ssz { + let mut res = max_bid.data.as_ssz_bytes().into_response(); + let consensus_version_header = match HeaderValue::from_str( + &max_bid.version.to_string(), + ) { + Ok(consensus_version_header) => { + Ok::(consensus_version_header) + } + Err(e) => { + if accepts_json { + info!("sending response as JSON"); + return Ok( + (StatusCode::OK, axum::Json(max_bid)).into_response() + ); + } else { + return Err(PbsClientError::RelayError(format!( + "error decoding consensus version from relay payload: {e}" + ))); + } + } + }?; + + // This won't actually fail since the string is a const + let content_type_header = + HeaderValue::from_str(EncodingType::Ssz.content_type()).unwrap(); + + res.headers_mut() + .insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + res.headers_mut().insert(CONTENT_TYPE, content_type_header); + info!("sending response as SSZ"); + return Ok(res); + } + + // Handle JSON + if accepts_json { + Ok((StatusCode::OK, axum::Json(max_bid)).into_response()) + } else { + // This shouldn't ever happen but the compiler needs it + Err(PbsClientError::DecodeError( + "no viable accept types in request".to_string(), + )) + } + } + } } else { // spec: return 204 if request is valid but no bid available info!("no header available for slot"); diff --git a/crates/pbs/src/routes/submit_block.rs b/crates/pbs/src/routes/submit_block.rs index 004b601e..1cf442e0 100644 --- a/crates/pbs/src/routes/submit_block.rs +++ b/crates/pbs/src/routes/submit_block.rs @@ -1,14 +1,23 @@ use std::sync::Arc; -use axum::{Json, extract::State, http::HeaderMap, response::IntoResponse}; +use axum::{ + extract::State, + http::{HeaderMap, HeaderValue}, + response::IntoResponse, +}; use cb_common::{ - pbs::{BuilderApiVersion, GetPayloadInfo, SignedBlindedBeaconBlock}, - utils::{get_user_agent, timestamp_of_slot_start_millis, utcnow_ms}, + pbs::{BuilderApiVersion, GetPayloadInfo}, + utils::{ + CONSENSUS_VERSION_HEADER, EncodingType, RawRequest, deserialize_body, get_accept_types, + get_user_agent, timestamp_of_slot_start_millis, utcnow_ms, + }, }; -use reqwest::StatusCode; +use reqwest::{StatusCode, header::CONTENT_TYPE}; +use ssz::Encode; use tracing::{error, info, trace}; use crate::{ + CompoundSubmitBlockResponse, api::BuilderApi, constants::SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, error::PbsClientError, @@ -19,37 +28,27 @@ use crate::{ pub async fn handle_submit_block_v1>( state: State>, req_headers: HeaderMap, - Json(signed_blinded_block): Json>, + raw_request: RawRequest, ) -> Result { - handle_submit_block_impl::( - state, - req_headers, - signed_blinded_block, - BuilderApiVersion::V1, - ) - .await + handle_submit_block_impl::(state, req_headers, raw_request, BuilderApiVersion::V1).await } pub async fn handle_submit_block_v2>( state: State>, req_headers: HeaderMap, - Json(signed_blinded_block): Json>, + raw_request: RawRequest, ) -> Result { - handle_submit_block_impl::( - state, - req_headers, - signed_blinded_block, - BuilderApiVersion::V2, - ) - .await + handle_submit_block_impl::(state, req_headers, raw_request, BuilderApiVersion::V2).await } async fn handle_submit_block_impl>( State(state): State>, req_headers: HeaderMap, - signed_blinded_block: Arc, + raw_request: RawRequest, api_version: BuilderApiVersion, ) -> Result { + let signed_blinded_block = + Arc::new(deserialize_body(&req_headers, raw_request.body_bytes).await?); tracing::Span::current().record("slot", signed_blinded_block.slot().as_u64() as i64); tracing::Span::current() .record("block_hash", tracing::field::debug(signed_blinded_block.block_hash())); @@ -64,27 +63,91 @@ async fn handle_submit_block_impl>( let block_hash = signed_blinded_block.block_hash(); let slot_start_ms = timestamp_of_slot_start_millis(slot.into(), state.config.chain); let ua = get_user_agent(&req_headers); + let accept_types = get_accept_types(&req_headers).map_err(|e| { + error!(%e, "error parsing accept header"); + PbsClientError::DecodeError(format!("error parsing accept header: {e}")) + })?; + let accepts_ssz = accept_types.contains(&EncodingType::Ssz); + let accepts_json = accept_types.contains(&EncodingType::Json); info!(ua, ms_into_slot = now.saturating_sub(slot_start_ms), "new request"); - match A::submit_block(signed_blinded_block, req_headers, state, api_version).await { + match A::submit_block(signed_blinded_block, req_headers, state, api_version, accept_types).await + { Ok(res) => match res { - Some(block_response) => { - trace!(?block_response); - info!("received unblinded block (v1)"); + crate::CompoundSubmitBlockResponse::EmptyBody => { + info!("received unblinded block (v2)"); + + // Note: this doesn't provide consensus_version_header because it doesn't pass + // the body through, and there's no content-type header since the body is empty. + BEACON_NODE_STATUS + .with_label_values(&["202", SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG]) + .inc(); + Ok((StatusCode::ACCEPTED, "").into_response()) + } + CompoundSubmitBlockResponse::Light(payload_and_blobs) => { + trace!(?payload_and_blobs); + info!("received unblinded block (v1, unvalidated)"); BEACON_NODE_STATUS .with_label_values(&["200", SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG]) .inc(); - Ok((StatusCode::OK, Json(block_response).into_response())) + + // Create the headers + let consensus_version_header = + match HeaderValue::from_str(&payload_and_blobs.version.to_string()) { + Ok(consensus_version_header) => { + Ok::(consensus_version_header) + } + Err(e) => { + return Err(PbsClientError::RelayError(format!( + "error decoding consensus version from relay payload: {e}" + ))); + } + }?; + let content_type = payload_and_blobs.encoding_type.content_type(); + let content_type_header = HeaderValue::from_str(content_type).unwrap(); + + // Build response + let mut res = payload_and_blobs.raw_bytes.into_response(); + res.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + res.headers_mut().insert(CONTENT_TYPE, content_type_header); + info!("sending response as {} (light)", content_type); + Ok(res) } - None => { - info!("received unblinded block (v2)"); + CompoundSubmitBlockResponse::Full(payload_and_blobs) => { + trace!(?payload_and_blobs); + info!("received unblinded block (v1)"); BEACON_NODE_STATUS - .with_label_values(&["202", SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG]) + .with_label_values(&["200", SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG]) .inc(); - Ok((StatusCode::ACCEPTED, "".into_response())) + + // Try SSZ + if accepts_ssz { + let mut response = payload_and_blobs.data.as_ssz_bytes().into_response(); + + // This won't actually fail since the string is a const + let content_type_header = + HeaderValue::from_str(EncodingType::Ssz.content_type()).unwrap(); + response.headers_mut().insert(CONTENT_TYPE, content_type_header); + response.headers_mut().insert( + CONSENSUS_VERSION_HEADER, + HeaderValue::from_str(&payload_and_blobs.version.to_string()).unwrap(), + ); + info!("sending response as SSZ"); + return Ok(response); + } + + // Handle JSON + if accepts_json { + Ok((StatusCode::OK, axum::Json(payload_and_blobs)).into_response()) + } else { + // This shouldn't ever happen but the compiler needs it + Err(PbsClientError::DecodeError( + "no viable accept types in request".to_string(), + )) + } } }, diff --git a/crates/pbs/src/state.rs b/crates/pbs/src/state.rs index bd683e5f..cbe86af9 100644 --- a/crates/pbs/src/state.rs +++ b/crates/pbs/src/state.rs @@ -64,8 +64,4 @@ where None => (self.pbs_config(), &self.config.relays, None), } } - - pub fn extra_validation_enabled(&self) -> bool { - self.config.pbs_config.extra_validation_enabled - } } diff --git a/crates/signer/Cargo.toml b/crates/signer/Cargo.toml index 569797ac..7c6e63fa 100644 --- a/crates/signer/Cargo.toml +++ b/crates/signer/Cargo.toml @@ -9,6 +9,7 @@ version.workspace = true alloy.workspace = true axum.workspace = true axum-extra.workspace = true +axum-server.workspace = true bimap.workspace = true blsful.workspace = true cb-common.workspace = true @@ -22,6 +23,7 @@ parking_lot.workspace = true prometheus.workspace = true prost.workspace = true rand.workspace = true +rustls.workspace = true thiserror.workspace = true tokio.workspace = true tonic.workspace = true diff --git a/crates/signer/src/constants.rs b/crates/signer/src/constants.rs index 268cd2e2..e5884d27 100644 --- a/crates/signer/src/constants.rs +++ b/crates/signer/src/constants.rs @@ -1,3 +1,5 @@ pub const GET_PUBKEYS_ENDPOINT_TAG: &str = "get_pubkeys"; pub const GENERATE_PROXY_KEY_ENDPOINT_TAG: &str = "generate_proxy_key"; -pub const REQUEST_SIGNATURE_ENDPOINT_TAG: &str = "request_signature"; +pub const REQUEST_SIGNATURE_BLS_ENDPOINT_TAG: &str = "request_signature_bls"; +pub const REQUEST_SIGNATURE_PROXY_BLS_ENDPOINT_TAG: &str = "request_signature_proxy_bls"; +pub const REQUEST_SIGNATURE_PROXY_ECDSA_ENDPOINT_TAG: &str = "request_signature_proxy_ecdsa"; diff --git a/crates/signer/src/error.rs b/crates/signer/src/error.rs index a2a113f3..64a3e5b8 100644 --- a/crates/signer/src/error.rs +++ b/crates/signer/src/error.rs @@ -25,11 +25,17 @@ pub enum SignerModuleError { #[error("Dirk signer does not support this operation")] DirkNotSupported, + #[error("module id not found")] + ModuleIdNotFound, + #[error("internal error: {0}")] Internal(String), #[error("rate limited for {0} more seconds")] RateLimited(f64), + + #[error("request error: {0}")] + RequestError(String), } impl IntoResponse for SignerModuleError { @@ -48,9 +54,13 @@ impl IntoResponse for SignerModuleError { (StatusCode::INTERNAL_SERVER_ERROR, "internal error".to_string()) } SignerModuleError::SignerError(err) => (StatusCode::BAD_REQUEST, err.to_string()), + SignerModuleError::ModuleIdNotFound => (StatusCode::NOT_FOUND, self.to_string()), SignerModuleError::RateLimited(duration) => { (StatusCode::TOO_MANY_REQUESTS, format!("rate limited for {duration:?}")) } + SignerModuleError::RequestError(err) => { + (StatusCode::BAD_REQUEST, format!("bad request: {err}")) + } } .into_response() } diff --git a/crates/signer/src/lib.rs b/crates/signer/src/lib.rs index 4b5e1451..b4b9ecc4 100644 --- a/crates/signer/src/lib.rs +++ b/crates/signer/src/lib.rs @@ -4,3 +4,4 @@ pub mod manager; mod metrics; mod proto; pub mod service; +mod utils; diff --git a/crates/signer/src/manager/dirk.rs b/crates/signer/src/manager/dirk.rs index fe438ef0..45dcc733 100644 --- a/crates/signer/src/manager/dirk.rs +++ b/crates/signer/src/manager/dirk.rs @@ -1,6 +1,9 @@ use std::{collections::HashMap, io::Write, path::PathBuf}; -use alloy::{hex, primitives::B256}; +use alloy::{ + hex, + primitives::{B256, aliases::B32}, +}; use blsful::inner_types::{Field, G2Affine, G2Projective, Group, Scalar}; use cb_common::{ commit::request::{ConsensusProxyMap, ProxyDelegation, SignedProxyDelegation}, @@ -8,7 +11,7 @@ use cb_common::{ constants::COMMIT_BOOST_DOMAIN, signature::compute_domain, signer::ProxyStore, - types::{BlsPublicKey, BlsSignature, Chain, ModuleId}, + types::{self, BlsPublicKey, BlsSignature, Chain, ModuleId, SignatureRequestInfo}, }; use eyre::{OptionExt, bail}; use futures::{FutureExt, StreamExt, future::join_all, stream::FuturesUnordered}; @@ -151,6 +154,11 @@ impl DirkManager { }) } + /// Get the chain config for the manager + pub fn get_chain(&self) -> Chain { + self.chain + } + /// Set the proxy store to use for storing proxy delegations pub fn with_proxy_store(self, store: ProxyStore) -> eyre::Result { if let ProxyStore::ERC2335 { .. } = store { @@ -199,14 +207,16 @@ impl DirkManager { pub async fn request_consensus_signature( &self, pubkey: &BlsPublicKey, - object_root: B256, + object_root: &B256, + signature_request_info: Option<&SignatureRequestInfo>, ) -> Result { match self.consensus_accounts.get(pubkey) { Some(Account::Simple(account)) => { - self.request_simple_signature(account, object_root).await + self.request_simple_signature(account, object_root, signature_request_info).await } Some(Account::Distributed(account)) => { - self.request_distributed_signature(account, object_root).await + self.request_distributed_signature(account, object_root, signature_request_info) + .await } None => Err(SignerModuleError::UnknownConsensusSigner(pubkey.serialize().to_vec())), } @@ -216,14 +226,16 @@ impl DirkManager { pub async fn request_proxy_signature( &self, pubkey: &BlsPublicKey, - object_root: B256, + object_root: &B256, + signature_request_info: Option<&SignatureRequestInfo>, ) -> Result { match self.proxy_accounts.get(pubkey) { Some(ProxyAccount { inner: Account::Simple(account), .. }) => { - self.request_simple_signature(account, object_root).await + self.request_simple_signature(account, object_root, signature_request_info).await } Some(ProxyAccount { inner: Account::Distributed(account), .. }) => { - self.request_distributed_signature(account, object_root).await + self.request_distributed_signature(account, object_root, signature_request_info) + .await } None => Err(SignerModuleError::UnknownProxySigner(pubkey.serialize().to_vec())), } @@ -233,13 +245,28 @@ impl DirkManager { async fn request_simple_signature( &self, account: &SimpleAccount, - object_root: B256, + object_root: &B256, + signature_request_info: Option<&SignatureRequestInfo>, ) -> Result { - let domain = compute_domain(self.chain, COMMIT_BOOST_DOMAIN); + let domain = compute_domain(self.chain, &B32::from(COMMIT_BOOST_DOMAIN)); + + let data = match signature_request_info { + Some(SignatureRequestInfo { module_signing_id, nonce }) => { + types::PropCommitSigningInfo { + data: *object_root, + module_signing_id: *module_signing_id, + nonce: *nonce, + chain_id: self.chain.id(), + } + .tree_hash_root() + .to_vec() + } + None => object_root.to_vec(), + }; let response = SignerClient::new(account.connection.clone()) .sign(SignRequest { - data: object_root.to_vec(), + data, domain: domain.to_vec(), id: Some(sign_request::Id::PublicKey(account.public_key.serialize().to_vec())), }) @@ -263,17 +290,34 @@ impl DirkManager { async fn request_distributed_signature( &self, account: &DistributedAccount, - object_root: B256, + object_root: &B256, + signature_request_info: Option<&SignatureRequestInfo>, ) -> Result { let mut partials = Vec::with_capacity(account.participants.len()); let mut requests = Vec::with_capacity(account.participants.len()); + let data = match signature_request_info { + Some(SignatureRequestInfo { module_signing_id, nonce }) => { + types::PropCommitSigningInfo { + data: *object_root, + module_signing_id: *module_signing_id, + nonce: *nonce, + chain_id: self.chain.id(), + } + .tree_hash_root() + .to_vec() + } + None => object_root.to_vec(), + }; + for (id, channel) in account.participants.iter() { + let data_copy = data.clone(); let request = async move { SignerClient::new(channel.clone()) .sign(SignRequest { - data: object_root.to_vec(), - domain: compute_domain(self.chain, COMMIT_BOOST_DOMAIN).to_vec(), + data: data_copy, + domain: compute_domain(self.chain, &B32::from(COMMIT_BOOST_DOMAIN)) + .to_vec(), id: Some(sign_request::Id::Account(account.name.clone())), }) .map(|res| (res, *id)) @@ -328,9 +372,9 @@ impl DirkManager { pub async fn generate_proxy_key( &mut self, module: &ModuleId, - consensus: BlsPublicKey, + consensus: &BlsPublicKey, ) -> Result, SignerModuleError> { - let proxy_account = match self.consensus_accounts.get(&consensus) { + let proxy_account = match self.consensus_accounts.get(consensus) { Some(Account::Simple(account)) => { self.generate_simple_proxy_account(account, module).await? } @@ -349,7 +393,7 @@ impl DirkManager { proxy: proxy_account.inner.public_key().clone(), }; let delegation_signature = - self.request_consensus_signature(&consensus, message.tree_hash_root()).await?; + self.request_consensus_signature(consensus, &message.tree_hash_root(), None).await?; let delegation = SignedProxyDelegation { message, signature: delegation_signature }; diff --git a/crates/signer/src/manager/local.rs b/crates/signer/src/manager/local.rs index 17832fdf..fc2eabae 100644 --- a/crates/signer/src/manager/local.rs +++ b/crates/signer/src/manager/local.rs @@ -3,14 +3,14 @@ use std::collections::HashMap; use alloy::primitives::{Address, B256}; use cb_common::{ commit::request::{ - ConsensusProxyMap, ProxyDelegationBls, ProxyDelegationEcdsa, ProxyId, - SignedProxyDelegationBls, SignedProxyDelegationEcdsa, + ConsensusProxyMap, ProxyDelegationBls, ProxyDelegationEcdsa, SignedProxyDelegationBls, + SignedProxyDelegationEcdsa, }, signer::{ BlsProxySigner, BlsSigner, ConsensusSigner, EcdsaProxySigner, EcdsaSignature, EcdsaSigner, ProxySigners, ProxyStore, }, - types::{BlsPublicKey, BlsSignature, Chain, ModuleId}, + types::{BlsPublicKey, BlsSignature, Chain, ModuleId, SignatureRequestInfo}, }; use tree_hash::TreeHash; @@ -50,6 +50,11 @@ impl LocalSigningManager { Ok(manager) } + /// Get the chain config for the manager + pub fn get_chain(&self) -> Chain { + self.chain + } + pub fn add_consensus_signer(&mut self, signer: ConsensusSigner) { self.consensus_signers.insert(signer.pubkey(), signer); } @@ -89,13 +94,13 @@ impl LocalSigningManager { pub async fn create_proxy_bls( &mut self, module_id: ModuleId, - delegator: BlsPublicKey, + delegator: &BlsPublicKey, ) -> Result { let signer = BlsSigner::new_random(); let proxy_pubkey = signer.pubkey(); let message = ProxyDelegationBls { delegator: delegator.clone(), proxy: proxy_pubkey }; - let signature = self.sign_consensus(&delegator, message.tree_hash_root()).await?; + let signature = self.sign_consensus(delegator, &message.tree_hash_root(), None).await?; let delegation = SignedProxyDelegationBls { signature, message }; let proxy_signer = BlsProxySigner { signer, delegation: delegation.clone() }; @@ -108,13 +113,13 @@ impl LocalSigningManager { pub async fn create_proxy_ecdsa( &mut self, module_id: ModuleId, - delegator: BlsPublicKey, + delegator: &BlsPublicKey, ) -> Result { let signer = EcdsaSigner::new_random(); let proxy_address = signer.address(); let message = ProxyDelegationEcdsa { delegator: delegator.clone(), proxy: proxy_address }; - let signature = self.sign_consensus(&delegator, message.tree_hash_root()).await?; + let signature = self.sign_consensus(delegator, &message.tree_hash_root(), None).await?; let delegation = SignedProxyDelegationEcdsa { signature, message }; let proxy_signer = EcdsaProxySigner { signer, delegation: delegation.clone() }; @@ -129,13 +134,14 @@ impl LocalSigningManager { pub async fn sign_consensus( &self, pubkey: &BlsPublicKey, - object_root: B256, + object_root: &B256, + signature_request_info: Option<&SignatureRequestInfo>, ) -> Result { let signer = self .consensus_signers .get(pubkey) - .ok_or(SignerModuleError::UnknownConsensusSigner(pubkey.to_bytes()))?; - let signature = signer.sign(self.chain, object_root).await; + .ok_or(SignerModuleError::UnknownConsensusSigner(pubkey.serialize().to_vec()))?; + let signature = signer.sign(self.chain, object_root, signature_request_info).await; Ok(signature) } @@ -143,28 +149,30 @@ impl LocalSigningManager { pub async fn sign_proxy_bls( &self, pubkey: &BlsPublicKey, - object_root: B256, + object_root: &B256, + signature_request_info: Option<&SignatureRequestInfo>, ) -> Result { let bls_proxy = self .proxy_signers .bls_signers .get(pubkey) .ok_or(SignerModuleError::UnknownProxySigner(pubkey.serialize().to_vec()))?; - let signature = bls_proxy.sign(self.chain, object_root).await; + let signature = bls_proxy.sign(self.chain, object_root, signature_request_info).await; Ok(signature) } pub async fn sign_proxy_ecdsa( &self, address: &Address, - object_root: B256, + object_root: &B256, + signature_request_info: Option<&SignatureRequestInfo>, ) -> Result { let ecdsa_proxy = self .proxy_signers .ecdsa_signers .get(address) .ok_or(SignerModuleError::UnknownProxySigner(address.to_vec()))?; - let signature = ecdsa_proxy.sign(self.chain, object_root).await?; + let signature = ecdsa_proxy.sign(self.chain, object_root, signature_request_info).await?; Ok(signature) } @@ -265,7 +273,6 @@ impl LocalSigningManager { #[cfg(test)] mod tests { use alloy::primitives::B256; - use cb_common::signature::compute_signing_root; use lazy_static::lazy_static; use super::*; @@ -287,10 +294,54 @@ mod tests { (signing_manager, consensus_pk) } + mod test_bls { + use alloy::primitives::aliases::B32; + use cb_common::{ + constants::COMMIT_BOOST_DOMAIN, signature::compute_domain, + signer::verify_bls_signature, types, + }; + + use super::*; + + #[tokio::test] + async fn test_key_signs_message() { + let (signing_manager, consensus_pk) = init_signing_manager(); + + let data_root = B256::random(); + let module_signing_id = B256::random(); + let nonce = 43; + + let sig = signing_manager + .sign_consensus( + &consensus_pk, + &data_root, + Some(&SignatureRequestInfo { module_signing_id, nonce }), + ) + .await + .unwrap(); + + // Verify signature + let signing_domain = compute_domain(CHAIN, &B32::from(COMMIT_BOOST_DOMAIN)); + let object_root = types::PropCommitSigningInfo { + data: data_root.tree_hash_root(), + module_signing_id, + nonce, + chain_id: CHAIN.id(), + } + .tree_hash_root(); + let signing_root = types::SigningData { object_root, signing_domain }.tree_hash_root(); + + let validation_result = verify_bls_signature(&consensus_pk, signing_root, &sig); + + assert!(validation_result, "Keypair must produce valid signatures of messages.") + } + } + mod test_proxy_bls { + use alloy::primitives::aliases::B32; use cb_common::{ constants::COMMIT_BOOST_DOMAIN, signature::compute_domain, - signer::verify_bls_signature, utils::TestRandomSeed, + signer::verify_bls_signature, types, utils::TestRandomSeed, }; use super::*; @@ -300,7 +351,7 @@ mod tests { let (mut signing_manager, consensus_pk) = init_signing_manager(); let signed_delegation = - signing_manager.create_proxy_bls(MODULE_ID.clone(), consensus_pk).await.unwrap(); + signing_manager.create_proxy_bls(MODULE_ID.clone(), &consensus_pk).await.unwrap(); let validation_result = signed_delegation.validate(CHAIN); @@ -321,7 +372,7 @@ mod tests { let (mut signing_manager, consensus_pk) = init_signing_manager(); let mut signed_delegation = - signing_manager.create_proxy_bls(MODULE_ID.clone(), consensus_pk).await.unwrap(); + signing_manager.create_proxy_bls(MODULE_ID.clone(), &consensus_pk).await.unwrap(); signed_delegation.signature = BlsSignature::test_random(); @@ -335,16 +386,32 @@ mod tests { let (mut signing_manager, consensus_pk) = init_signing_manager(); let signed_delegation = - signing_manager.create_proxy_bls(MODULE_ID.clone(), consensus_pk).await.unwrap(); + signing_manager.create_proxy_bls(MODULE_ID.clone(), &consensus_pk).await.unwrap(); let proxy_pk = signed_delegation.message.proxy; let data_root = B256::random(); - - let sig = signing_manager.sign_proxy_bls(&proxy_pk, data_root).await.unwrap(); + let module_signing_id = B256::random(); + let nonce = 44; + + let sig = signing_manager + .sign_proxy_bls( + &proxy_pk, + &data_root, + Some(&SignatureRequestInfo { module_signing_id, nonce }), + ) + .await + .unwrap(); // Verify signature - let domain = compute_domain(CHAIN, COMMIT_BOOST_DOMAIN); - let signing_root = compute_signing_root(data_root.tree_hash_root(), domain); + let signing_domain = compute_domain(CHAIN, &B32::from(COMMIT_BOOST_DOMAIN)); + let object_root = types::PropCommitSigningInfo { + data: data_root.tree_hash_root(), + module_signing_id, + nonce, + chain_id: CHAIN.id(), + } + .tree_hash_root(); + let signing_root = types::SigningData { object_root, signing_domain }.tree_hash_root(); let validation_result = verify_bls_signature(&proxy_pk, signing_root, &sig); @@ -353,9 +420,10 @@ mod tests { } mod test_proxy_ecdsa { + use alloy::primitives::aliases::B32; use cb_common::{ constants::COMMIT_BOOST_DOMAIN, signature::compute_domain, - signer::verify_ecdsa_signature, utils::TestRandomSeed, + signer::verify_ecdsa_signature, types, utils::TestRandomSeed, }; use super::*; @@ -365,7 +433,7 @@ mod tests { let (mut signing_manager, consensus_pk) = init_signing_manager(); let signed_delegation = - signing_manager.create_proxy_ecdsa(MODULE_ID.clone(), consensus_pk).await.unwrap(); + signing_manager.create_proxy_ecdsa(MODULE_ID.clone(), &consensus_pk).await.unwrap(); let validation_result = signed_delegation.validate(CHAIN); @@ -386,7 +454,7 @@ mod tests { let (mut signing_manager, consensus_pk) = init_signing_manager(); let mut signed_delegation = - signing_manager.create_proxy_ecdsa(MODULE_ID.clone(), consensus_pk).await.unwrap(); + signing_manager.create_proxy_ecdsa(MODULE_ID.clone(), &consensus_pk).await.unwrap(); signed_delegation.signature = BlsSignature::test_random(); @@ -400,16 +468,32 @@ mod tests { let (mut signing_manager, consensus_pk) = init_signing_manager(); let signed_delegation = - signing_manager.create_proxy_ecdsa(MODULE_ID.clone(), consensus_pk).await.unwrap(); + signing_manager.create_proxy_ecdsa(MODULE_ID.clone(), &consensus_pk).await.unwrap(); let proxy_pk = signed_delegation.message.proxy; let data_root = B256::random(); - - let sig = signing_manager.sign_proxy_ecdsa(&proxy_pk, data_root).await.unwrap(); + let module_signing_id = B256::random(); + let nonce = 45; + + let sig = signing_manager + .sign_proxy_ecdsa( + &proxy_pk, + &data_root, + Some(&SignatureRequestInfo { module_signing_id, nonce }), + ) + .await + .unwrap(); // Verify signature - let domain = compute_domain(CHAIN, COMMIT_BOOST_DOMAIN); - let signing_root = compute_signing_root(data_root.tree_hash_root(), domain); + let signing_domain = compute_domain(CHAIN, &B32::from(COMMIT_BOOST_DOMAIN)); + let object_root = types::PropCommitSigningInfo { + data: data_root.tree_hash_root(), + module_signing_id, + nonce, + chain_id: CHAIN.id(), + } + .tree_hash_root(); + let signing_root = types::SigningData { object_root, signing_domain }.tree_hash_root(); let validation_result = verify_ecdsa_signature(&proxy_pk, &signing_root, &sig); diff --git a/crates/signer/src/metrics.rs b/crates/signer/src/metrics.rs index beaeefce..4110ec72 100644 --- a/crates/signer/src/metrics.rs +++ b/crates/signer/src/metrics.rs @@ -2,13 +2,15 @@ use axum::http::Uri; use cb_common::commit::constants::{ - GENERATE_PROXY_KEY_PATH, GET_PUBKEYS_PATH, REQUEST_SIGNATURE_PATH, + GENERATE_PROXY_KEY_PATH, GET_PUBKEYS_PATH, REQUEST_SIGNATURE_BLS_PATH, + REQUEST_SIGNATURE_PROXY_BLS_PATH, REQUEST_SIGNATURE_PROXY_ECDSA_PATH, }; use lazy_static::lazy_static; use prometheus::{IntCounterVec, Registry, register_int_counter_vec_with_registry}; use crate::constants::{ - GENERATE_PROXY_KEY_ENDPOINT_TAG, GET_PUBKEYS_ENDPOINT_TAG, REQUEST_SIGNATURE_ENDPOINT_TAG, + GENERATE_PROXY_KEY_ENDPOINT_TAG, GET_PUBKEYS_ENDPOINT_TAG, REQUEST_SIGNATURE_BLS_ENDPOINT_TAG, + REQUEST_SIGNATURE_PROXY_BLS_ENDPOINT_TAG, REQUEST_SIGNATURE_PROXY_ECDSA_ENDPOINT_TAG, }; lazy_static! { @@ -28,7 +30,9 @@ pub fn uri_to_tag(uri: &Uri) -> &str { match uri.path() { GET_PUBKEYS_PATH => GET_PUBKEYS_ENDPOINT_TAG, GENERATE_PROXY_KEY_PATH => GENERATE_PROXY_KEY_ENDPOINT_TAG, - REQUEST_SIGNATURE_PATH => REQUEST_SIGNATURE_ENDPOINT_TAG, + REQUEST_SIGNATURE_BLS_PATH => REQUEST_SIGNATURE_BLS_ENDPOINT_TAG, + REQUEST_SIGNATURE_PROXY_BLS_PATH => REQUEST_SIGNATURE_PROXY_BLS_ENDPOINT_TAG, + REQUEST_SIGNATURE_PROXY_ECDSA_PATH => REQUEST_SIGNATURE_PROXY_ECDSA_ENDPOINT_TAG, _ => "unknown endpoint", } } diff --git a/crates/signer/src/service.rs b/crates/signer/src/service.rs index af96c51e..fdf86879 100644 --- a/crates/signer/src/service.rs +++ b/crates/signer/src/service.rs @@ -5,45 +5,53 @@ use std::{ time::{Duration, Instant}, }; +use alloy::primitives::{Address, B256, U256}; use axum::{ Extension, Json, + body::{Body, to_bytes}, extract::{ConnectInfo, Request, State}, - http::StatusCode, + http::{HeaderMap, StatusCode}, middleware::{self, Next}, response::{IntoResponse, Response}, routing::{get, post}, }; use axum_extra::TypedHeader; +use axum_server::tls_rustls::RustlsConfig; use cb_common::{ commit::{ constants::{ - GENERATE_PROXY_KEY_PATH, GET_PUBKEYS_PATH, RELOAD_PATH, REQUEST_SIGNATURE_PATH, - STATUS_PATH, + GENERATE_PROXY_KEY_PATH, GET_PUBKEYS_PATH, RELOAD_PATH, REQUEST_SIGNATURE_BLS_PATH, + REQUEST_SIGNATURE_PROXY_BLS_PATH, REQUEST_SIGNATURE_PROXY_ECDSA_PATH, + REVOKE_MODULE_PATH, STATUS_PATH, }, request::{ - EncryptionScheme, GenerateProxyRequest, GetPubkeysResponse, SignConsensusRequest, - SignProxyRequest, SignRequest, + EncryptionScheme, GenerateProxyRequest, GetPubkeysResponse, ReloadRequest, + RevokeModuleRequest, SignConsensusRequest, SignProxyRequest, }, + response::{BlsSignResponse, EcdsaSignResponse}, }, - config::StartSignerConfig, + config::{ModuleSigningConfig, ReverseProxyHeaderSetup, StartSignerConfig}, constants::{COMMIT_BOOST_COMMIT, COMMIT_BOOST_VERSION}, - types::{Chain, Jwt, ModuleId}, - utils::{decode_jwt, validate_jwt}, + types::{BlsPublicKey, Chain, Jwt, ModuleId, SignatureRequestInfo}, + utils::{decode_jwt, validate_admin_jwt, validate_jwt}, }; use cb_metrics::provider::MetricsProvider; use eyre::Context; use headers::{Authorization, authorization::Bearer}; use parking_lot::RwLock as ParkingRwLock; -use tokio::{net::TcpListener, sync::RwLock}; +use rustls::crypto::{CryptoProvider, aws_lc_rs}; +use tokio::sync::RwLock; use tracing::{debug, error, info, warn}; -use uuid::Uuid; use crate::{ error::SignerModuleError, manager::{SigningManager, dirk::DirkManager, local::LocalSigningManager}, metrics::{SIGNER_METRICS_REGISTRY, SIGNER_STATUS, uri_to_tag}, + utils::get_true_ip, }; +pub const REQUEST_MAX_BODY_LENGTH: usize = 1024 * 1024; // 1 MB + /// Implements the Signer API and provides a service for signing requests pub struct SigningService; @@ -61,9 +69,12 @@ struct SigningState { /// Manager handling different signing methods manager: Arc>, - /// Map of modules ids to JWT secrets. This also acts as registry of all - /// modules running - jwts: Arc>, + /// Map of modules ids to JWT configurations. This also acts as registry of + /// all modules running + jwts: Arc>>, + + /// Secret for the admin JWT + admin_secret: Arc>, /// Map of JWT failures per peer jwt_auth_failures: Arc>>, @@ -71,23 +82,29 @@ struct SigningState { // JWT auth failure settings jwt_auth_fail_limit: u32, jwt_auth_fail_timeout: Duration, + + /// Header to extract the trusted client IP from + reverse_proxy: ReverseProxyHeaderSetup, } impl SigningService { pub async fn run(config: StartSignerConfig) -> eyre::Result<()> { - if config.jwts.is_empty() { + if config.mod_signing_configs.is_empty() { warn!("Signing service was started but no module is registered. Exiting"); return Ok(()); } - let module_ids: Vec = config.jwts.keys().cloned().map(Into::into).collect(); + let module_ids: Vec = + config.mod_signing_configs.keys().cloned().map(Into::into).collect(); let state = SigningState { manager: Arc::new(RwLock::new(start_manager(config.clone()).await?)), - jwts: config.jwts.into(), + jwts: Arc::new(ParkingRwLock::new(config.mod_signing_configs)), + admin_secret: Arc::new(ParkingRwLock::new(config.admin_secret)), jwt_auth_failures: Arc::new(ParkingRwLock::new(HashMap::new())), jwt_auth_fail_limit: config.jwt_auth_fail_limit, jwt_auth_fail_timeout: Duration::from_secs(config.jwt_auth_fail_timeout_seconds as u64), + reverse_proxy: config.reverse_proxy, }; // Get the signer counts @@ -108,25 +125,92 @@ impl SigningService { loaded_proxies, jwt_auth_fail_limit =? state.jwt_auth_fail_limit, jwt_auth_fail_timeout =? state.jwt_auth_fail_timeout, + reverse_proxy =% state.reverse_proxy, "Starting signing service" ); SigningService::init_metrics(config.chain)?; - let app = axum::Router::new() - .route(REQUEST_SIGNATURE_PATH, post(handle_request_signature)) + let signer_app = axum::Router::new() + .route(REQUEST_SIGNATURE_BLS_PATH, post(handle_request_signature_bls)) + .route(REQUEST_SIGNATURE_PROXY_BLS_PATH, post(handle_request_signature_proxy_bls)) + .route(REQUEST_SIGNATURE_PROXY_ECDSA_PATH, post(handle_request_signature_proxy_ecdsa)) .route(GET_PUBKEYS_PATH, get(handle_get_pubkeys)) .route(GENERATE_PROXY_KEY_PATH, post(handle_generate_proxy)) .route_layer(middleware::from_fn_with_state(state.clone(), jwt_auth)) + .with_state(state.clone()) + .route_layer(middleware::from_fn(log_request)); + + let admin_app = axum::Router::new() .route(RELOAD_PATH, post(handle_reload)) + .route(REVOKE_MODULE_PATH, post(handle_revoke_module)) + .route_layer(middleware::from_fn_with_state(state.clone(), admin_auth)) .with_state(state.clone()) .route_layer(middleware::from_fn(log_request)) - .route(STATUS_PATH, get(handle_status)) - .into_make_service_with_connect_info::(); + .route(STATUS_PATH, get(handle_status)); + + // Run the JWT cleaning task + let jwt_cleaning_task = tokio::spawn(async move { + let mut interval = tokio::time::interval(state.jwt_auth_fail_timeout); + loop { + interval.tick().await; + let mut failures = state.jwt_auth_failures.write(); + let before = failures.len(); + failures + .retain(|_, info| info.last_failure.elapsed() < state.jwt_auth_fail_timeout); + let after = failures.len(); + if before != after { + debug!("Cleaned up {} old JWT auth failure entries", before - after); + } + } + }); + + let server_result = if let Some(tls_config) = config.tls_certificates { + if CryptoProvider::get_default().is_none() { + // Install the AWS-LC provider if no default is set, usually for CI + debug!("Installing AWS-LC as default TLS provider"); + let mut attempts = 0; + loop { + match aws_lc_rs::default_provider().install_default() { + Ok(_) => { + debug!("Successfully installed AWS-LC as default TLS provider"); + break; + } + Err(e) => { + error!( + "Failed to install AWS-LC as default TLS provider: {e:?}. Retrying..." + ); + if attempts >= 3 { + error!( + "Exceeded maximum attempts to install AWS-LC as default TLS provider" + ); + break; + } + attempts += 1; + } + } + } + } + + let tls_config = RustlsConfig::from_pem(tls_config.0, tls_config.1).await?; + axum_server::bind_rustls(config.endpoint, tls_config) + .serve( + signer_app.merge(admin_app).into_make_service_with_connect_info::(), + ) + .await + } else { + warn!("Running in insecure HTTP mode, no TLS certificates provided"); + axum_server::bind(config.endpoint) + .serve( + signer_app.merge(admin_app).into_make_service_with_connect_info::(), + ) + .await + }; - let listener = TcpListener::bind(config.endpoint).await?; + // Shutdown the JWT cleaning task + jwt_cleaning_task.abort(); - axum::serve(listener, app).await.wrap_err("signer server exited") + server_result.wrap_err("signer service exited") } fn init_metrics(network: Chain) -> eyre::Result<()> { @@ -134,34 +218,56 @@ impl SigningService { } } +/// Marks a JWT authentication failure for a given client IP +fn mark_jwt_failure(state: &SigningState, client_ip: IpAddr) { + let mut failures = state.jwt_auth_failures.write(); + let failure_info = failures + .entry(client_ip) + .or_insert(JwtAuthFailureInfo { failure_count: 0, last_failure: Instant::now() }); + failure_info.failure_count += 1; + failure_info.last_failure = Instant::now(); +} + /// Authentication middleware layer async fn jwt_auth( State(state): State, + req_headers: HeaderMap, TypedHeader(auth): TypedHeader>, addr: ConnectInfo, - mut req: Request, + req: Request, next: Next, ) -> Result { // Check if the request needs to be rate limited - let client_ip = addr.ip(); + let client_ip = get_true_ip(&req_headers, &addr, &state.reverse_proxy).map_err(|e| { + error!("Failed to get client IP: {e}"); + SignerModuleError::RequestError("failed to get client IP".to_string()) + })?; check_jwt_rate_limit(&state, &client_ip)?; + // Clone the request so we can read the body + let (parts, body) = req.into_parts(); + let path = parts.uri.path(); + let bytes = to_bytes(body, REQUEST_MAX_BODY_LENGTH).await.map_err(|e| { + error!("Failed to read request body: {e}"); + mark_jwt_failure(&state, client_ip); + SignerModuleError::RequestError(e.to_string()) + })?; + // Process JWT authorization - match check_jwt_auth(&auth, &state) { + match check_jwt_auth(&auth, &state, path, &bytes) { Ok(module_id) => { + let mut req = Request::from_parts(parts, Body::from(bytes)); req.extensions_mut().insert(module_id); Ok(next.run(req).await) } Err(SignerModuleError::Unauthorized) => { - let mut failures = state.jwt_auth_failures.write(); - let failure_info = failures - .entry(client_ip) - .or_insert(JwtAuthFailureInfo { failure_count: 0, last_failure: Instant::now() }); - failure_info.failure_count += 1; - failure_info.last_failure = Instant::now(); + mark_jwt_failure(&state, client_ip); Err(SignerModuleError::Unauthorized) } - Err(err) => Err(err), + Err(err) => { + mark_jwt_failure(&state, client_ip); + Err(err) + } } } @@ -206,26 +312,69 @@ fn check_jwt_rate_limit(state: &SigningState, client_ip: &IpAddr) -> Result<(), fn check_jwt_auth( auth: &Authorization, state: &SigningState, + path: &str, + body: &[u8], ) -> Result { let jwt: Jwt = auth.token().to_string().into(); // We first need to decode it to get the module id and then validate it // with the secret stored in the state - let module_id = decode_jwt(jwt.clone()).map_err(|e| { + let claims = decode_jwt(jwt.clone()).map_err(|e| { error!("Unauthorized request. Invalid JWT: {e}"); SignerModuleError::Unauthorized })?; - let jwt_secret = state.jwts.get(&module_id).ok_or_else(|| { + let guard = state.jwts.read(); + let jwt_config = guard.get(&claims.module).ok_or_else(|| { error!("Unauthorized request. Was the module started correctly?"); SignerModuleError::Unauthorized })?; - validate_jwt(jwt, jwt_secret).map_err(|e| { + let body_bytes = if body.is_empty() { None } else { Some(body) }; + validate_jwt(jwt, &jwt_config.jwt_secret, path, body_bytes).map_err(|e| { + error!("Unauthorized request. Invalid JWT: {e}"); + SignerModuleError::Unauthorized + })?; + + Ok(claims.module) +} + +async fn admin_auth( + State(state): State, + req_headers: HeaderMap, + TypedHeader(auth): TypedHeader>, + addr: ConnectInfo, + req: Request, + next: Next, +) -> Result { + // Check if the request needs to be rate limited + let client_ip = get_true_ip(&req_headers, &addr, &state.reverse_proxy).map_err(|e| { + error!("Failed to get client IP: {e}"); + SignerModuleError::RequestError("failed to get client IP".to_string()) + })?; + check_jwt_rate_limit(&state, &client_ip)?; + + // Clone the request so we can read the body + let (parts, body) = req.into_parts(); + let path = parts.uri.path(); + let bytes = to_bytes(body, REQUEST_MAX_BODY_LENGTH).await.map_err(|e| { + error!("Failed to read request body: {e}"); + mark_jwt_failure(&state, client_ip); + SignerModuleError::RequestError(e.to_string()) + })?; + + let jwt: Jwt = auth.token().to_string().into(); + + // Validate the admin JWT + let body_bytes: Option<&[u8]> = if bytes.is_empty() { None } else { Some(&bytes) }; + validate_admin_jwt(jwt, &state.admin_secret.read(), path, body_bytes).map_err(|e| { error!("Unauthorized request. Invalid JWT: {e}"); + mark_jwt_failure(&state, client_ip); SignerModuleError::Unauthorized })?; - Ok(module_id) + + let req = Request::from_parts(parts, Body::from(bytes)); + Ok(next.run(req).await) } /// Requests logging middleware layer @@ -246,9 +395,7 @@ async fn handle_get_pubkeys( Extension(module_id): Extension, State(state): State, ) -> Result { - let req_id = Uuid::new_v4(); - - debug!(event = "get_pubkeys", ?req_id, "New request"); + debug!(event = "get_pubkeys", ?module_id, "New request"); let keys = state .manager @@ -262,62 +409,184 @@ async fn handle_get_pubkeys( Ok((StatusCode::OK, Json(res)).into_response()) } -/// Implements request_signature from the Signer API -async fn handle_request_signature( +/// Validates a BLS key signature request and returns the signature +async fn handle_request_signature_bls( Extension(module_id): Extension, State(state): State, - Json(request): Json, + Json(request): Json, ) -> Result { - let req_id = Uuid::new_v4(); + debug!(event = "bls_request_signature", ?module_id, %request, "New request"); + handle_request_signature_bls_impl( + module_id, + state, + false, + request.pubkey, + request.object_root, + request.nonce, + ) + .await +} - debug!(event = "request_signature", ?module_id, %request, ?req_id, "New request"); +/// Validates a BLS key signature request using a proxy key and returns the +/// signature +async fn handle_request_signature_proxy_bls( + Extension(module_id): Extension, + State(state): State, + Json(request): Json>, +) -> Result { + debug!(event = "proxy_bls_request_signature", ?module_id, %request, "New request"); + handle_request_signature_bls_impl( + module_id, + state, + true, + request.proxy, + request.object_root, + request.nonce, + ) + .await +} - let manager = state.manager.read().await; - let res = match &*manager { - SigningManager::Local(local_manager) => match request { - SignRequest::Consensus(SignConsensusRequest { object_root, pubkey }) => local_manager - .sign_consensus(&pubkey, object_root) - .await - .map(|sig| Json(sig).into_response()), - SignRequest::ProxyBls(SignProxyRequest { object_root, proxy: bls_key }) => { +/// Implementation for handling a BLS signature request +async fn handle_request_signature_bls_impl( + module_id: ModuleId, + state: SigningState, + is_proxy: bool, + signing_pubkey: BlsPublicKey, + object_root: B256, + nonce: u64, +) -> Result { + let Some(signing_id) = state.jwts.read().get(&module_id).map(|m| m.signing_id) else { + error!( + event = "proxy_bls_request_signature", + ?module_id, + %signing_pubkey, + %object_root, + nonce, + "Module signing ID not found" + ); + return Err(SignerModuleError::RequestError("Module signing ID not found".to_string())); + }; + + let (chain_id, signature) = match &*state.manager.read().await { + SigningManager::Local(local_manager) => { + let sig = if is_proxy { local_manager - .sign_proxy_bls(&bls_key, object_root) + .sign_proxy_bls( + &signing_pubkey, + &object_root, + Some(&SignatureRequestInfo { module_signing_id: signing_id, nonce }), + ) .await - .map(|sig| Json(sig).into_response()) - } - SignRequest::ProxyEcdsa(SignProxyRequest { object_root, proxy: ecdsa_key }) => { + } else { local_manager - .sign_proxy_ecdsa(&ecdsa_key, object_root) + .sign_consensus( + &signing_pubkey, + &object_root, + Some(&SignatureRequestInfo { module_signing_id: signing_id, nonce }), + ) .await - .map(|sig| Json(sig).into_response()) - } - }, - SigningManager::Dirk(dirk_manager) => match request { - SignRequest::Consensus(SignConsensusRequest { object_root, pubkey }) => dirk_manager - .request_consensus_signature(&pubkey, object_root) - .await - .map(|sig| Json(sig).into_response()), - SignRequest::ProxyBls(SignProxyRequest { object_root, proxy: bls_key }) => dirk_manager - .request_proxy_signature(&bls_key, object_root) - .await - .map(|sig| Json(sig).into_response()), - SignRequest::ProxyEcdsa(_) => { - error!( - event = "request_signature", - ?module_id, - ?req_id, - "ECDSA proxy sign request not supported with Dirk" - ); - Err(SignerModuleError::DirkNotSupported) - } - }, + }; + (local_manager.get_chain().id(), sig) + } + SigningManager::Dirk(dirk_manager) => { + let sig = if is_proxy { + dirk_manager + .request_proxy_signature( + &signing_pubkey, + &object_root, + Some(&SignatureRequestInfo { module_signing_id: signing_id, nonce }), + ) + .await + } else { + dirk_manager + .request_consensus_signature( + &signing_pubkey, + &object_root, + Some(&SignatureRequestInfo { module_signing_id: signing_id, nonce }), + ) + .await + }; + (dirk_manager.get_chain().id(), sig) + } }; - if let Err(err) = &res { - error!(event = "request_signature", ?module_id, ?req_id, "{err}"); - } + signature + .inspect_err(|err| { + error!(event = "request_signature", ?module_id, %signing_pubkey, %object_root, nonce, "{err}") + }) + .map(|sig| { + Json(BlsSignResponse::new( + signing_pubkey.clone(), + object_root, + signing_id, + nonce, + chain_id, + sig, + )) + .into_response() + }) +} - res +/// Validates an ECDSA key signature request using a proxy key and returns the +/// signature +async fn handle_request_signature_proxy_ecdsa( + Extension(module_id): Extension, + State(state): State, + Json(request): Json>, +) -> Result { + let Some(signing_id) = state.jwts.read().get(&module_id).map(|m| m.signing_id) else { + error!( + event = "proxy_ecdsa_request_signature", + ?module_id, + proxy = %request.proxy, + object_root = %request.object_root, + nonce = request.nonce, + "Module signing ID not found" + ); + return Err(SignerModuleError::RequestError("Module signing ID not found".to_string())); + }; + debug!(event = "proxy_ecdsa_request_signature", ?module_id, %request, "New request"); + + let (chain_id, signature) = match &*state.manager.read().await { + SigningManager::Local(local_manager) => { + let sig = local_manager + .sign_proxy_ecdsa( + &request.proxy, + &request.object_root, + Some(&SignatureRequestInfo { + module_signing_id: signing_id, + nonce: request.nonce, + }), + ) + .await; + (local_manager.get_chain().id(), sig) + } + SigningManager::Dirk(_) => { + // Dirk does not support ECDSA proxy signing + error!( + event = "request_signature", + ?module_id, + proxy = %request.proxy, + object_root = %request.object_root, + nonce = request.nonce, + "ECDSA proxy sign request not supported with Dirk" + ); + (U256::ZERO, Err(SignerModuleError::DirkNotSupported)) + } + }; + signature + .inspect_err(|err| error!(event = "request_signature", ?module_id, proxy = %request.proxy, object_root = %request.object_root, nonce = request.nonce, "{err}")) + .map(|sig| { + Json(EcdsaSignResponse::new( + request.proxy, + request.object_root, + signing_id, + request.nonce, + chain_id, + sig, + )) + .into_response() + }) } async fn handle_generate_proxy( @@ -325,25 +594,23 @@ async fn handle_generate_proxy( State(state): State, Json(request): Json, ) -> Result { - let req_id = Uuid::new_v4(); - - debug!(event = "generate_proxy", ?module_id, scheme=?request.scheme, pubkey=%request.consensus_pubkey, ?req_id, "New request"); + debug!(event = "generate_proxy", ?module_id, scheme=?request.scheme, pubkey=%request.consensus_pubkey, "New request"); let mut manager = state.manager.write().await; let res = match &mut *manager { SigningManager::Local(local_manager) => match request.scheme { EncryptionScheme::Bls => local_manager - .create_proxy_bls(module_id.clone(), request.consensus_pubkey) + .create_proxy_bls(module_id.clone(), &request.consensus_pubkey) .await .map(|proxy_delegation| Json(proxy_delegation).into_response()), EncryptionScheme::Ecdsa => local_manager - .create_proxy_ecdsa(module_id.clone(), request.consensus_pubkey) + .create_proxy_ecdsa(module_id.clone(), &request.consensus_pubkey) .await .map(|proxy_delegation| Json(proxy_delegation).into_response()), }, SigningManager::Dirk(dirk_manager) => match request.scheme { EncryptionScheme::Bls => dirk_manager - .generate_proxy_key(&module_id, request.consensus_pubkey) + .generate_proxy_key(&module_id, &request.consensus_pubkey) .await .map(|proxy_delegation| Json(proxy_delegation).into_response()), EncryptionScheme::Ecdsa => { @@ -354,40 +621,78 @@ async fn handle_generate_proxy( }; if let Err(err) = &res { - error!(event = "generate_proxy", module_id=?module_id, ?req_id, "{err}"); + error!(event = "generate_proxy", ?module_id, scheme=?request.scheme, pubkey=%request.consensus_pubkey, "{err}"); } res } async fn handle_reload( - State(mut state): State, + State(state): State, + Json(request): Json, ) -> Result { - let req_id = Uuid::new_v4(); - - debug!(event = "reload", ?req_id, "New request"); + debug!(event = "reload", "New request"); + // Regenerate the config let config = match StartSignerConfig::load_from_env() { Ok(config) => config, Err(err) => { - error!(event = "reload", ?req_id, error = ?err, "Failed to reload config"); + error!(event = "reload", error = ?err, "Failed to reload config"); return Err(SignerModuleError::Internal("failed to reload config".to_string())); } }; + // Start a new manager with the updated config let new_manager = match start_manager(config).await { Ok(manager) => manager, Err(err) => { - error!(event = "reload", ?req_id, error = ?err, "Failed to reload manager"); + error!(event = "reload", error = ?err, "Failed to reload manager"); return Err(SignerModuleError::Internal("failed to reload config".to_string())); } }; - state.manager = Arc::new(RwLock::new(new_manager)); + // Update the JWT configs if provided in the request + if let Some(jwt_secrets) = request.jwt_secrets { + let mut jwt_configs = state.jwts.write(); + let mut new_configs = HashMap::new(); + for (module_id, jwt_secret) in jwt_secrets { + if let Some(signing_id) = jwt_configs.get(&module_id).map(|cfg| cfg.signing_id) { + new_configs.insert(module_id.clone(), ModuleSigningConfig { + module_name: module_id, + jwt_secret, + signing_id, + }); + } else { + let error_message = format!( + "Module {module_id} signing ID not found in commit-boost config, cannot reload" + ); + error!(event = "reload", module_id = %module_id, error = %error_message); + return Err(SignerModuleError::RequestError(error_message)); + } + } + *jwt_configs = new_configs; + } + + // Update the rest of the state once everything has passed + if let Some(admin_secret) = request.admin_secret { + *state.admin_secret.write() = admin_secret; + } + *state.manager.write().await = new_manager; Ok(StatusCode::OK) } +async fn handle_revoke_module( + State(state): State, + Json(request): Json, +) -> Result { + let mut guard = state.jwts.write(); + guard + .remove(&request.module_id) + .ok_or(SignerModuleError::ModuleIdNotFound) + .map(|_| StatusCode::OK) +} + async fn start_manager(config: StartSignerConfig) -> eyre::Result { let proxy_store = if let Some(store) = config.store.clone() { Some(store.init_from_env()?) diff --git a/crates/signer/src/utils.rs b/crates/signer/src/utils.rs new file mode 100644 index 00000000..bfc28f9f --- /dev/null +++ b/crates/signer/src/utils.rs @@ -0,0 +1,242 @@ +use std::net::{IpAddr, SocketAddr}; + +use axum::http::HeaderMap; +use cb_common::config::ReverseProxyHeaderSetup; + +#[derive(Debug, thiserror::Error)] +pub enum IpError { + #[error("header `{0}` is not present")] + NotPresent(String), + #[error("header value has invalid characters")] + HasInvalidCharacters, + #[error("header value is not a valid IP address")] + InvalidValue, + #[error("header `{0}` appears multiple times but expected to be unique")] + NotUnique(String), + #[error("header does not contain enough values: found {found}, required {required}")] + NotEnoughValues { found: usize, required: usize }, +} + +/// Get the true client IP from the request headers or fallback to the socket +/// address +pub fn get_true_ip( + headers: &HeaderMap, + addr: &SocketAddr, + reverse_proxy: &ReverseProxyHeaderSetup, +) -> Result { + match reverse_proxy { + ReverseProxyHeaderSetup::None => Ok(addr.ip()), + ReverseProxyHeaderSetup::Unique { header } => get_ip_from_unique_header(headers, header), + ReverseProxyHeaderSetup::Rightmost { header, trusted_count } => { + get_ip_from_rightmost_value(headers, header, trusted_count.get()) + } + } +} + +fn get_ip_from_unique_header(headers: &HeaderMap, header_name: &str) -> Result { + let mut values = headers.get_all(header_name).iter(); + + let first_value = values.next().ok_or(IpError::NotPresent(header_name.to_string()))?; + + if values.next().is_some() { + return Err(IpError::NotUnique(header_name.to_string())); + } + + let ip = first_value + .to_str() + .map_err(|_| IpError::HasInvalidCharacters)? + .parse::() + .map_err(|_| IpError::InvalidValue)?; + + Ok(ip) +} + +fn get_ip_from_rightmost_value( + headers: &HeaderMap, + header_name: &str, + trusted_count: usize, +) -> Result { + let joined_values = headers + .get_all(header_name) + .iter() + .map(|x| x.to_str().map_err(|_| IpError::HasInvalidCharacters)) + .collect::, IpError>>()? + .join(","); + + if joined_values.is_empty() { + return Err(IpError::NotPresent(header_name.to_string())) + } + + // Selecting the first untrusted IP from the right according to: + // https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/X-Forwarded-For#selecting_an_ip_address + joined_values + .rsplit(",") + .nth(trusted_count - 1) + .ok_or(IpError::NotEnoughValues { + found: joined_values.split(",").count(), + required: trusted_count, + })? + .trim() + .parse::() + .map_err(|_| IpError::InvalidValue) +} + +#[cfg(test)] +mod tests { + use std::net::Ipv4Addr; + + use super::*; + + #[test] + fn test_unique_header_pass() { + let header_name = "X-Real-IP"; + let real_ip = IpAddr::V4(Ipv4Addr::new(1, 1, 1, 1)); + + let mut headers = HeaderMap::new(); + headers.insert(header_name, real_ip.to_string().parse().unwrap()); + + let ip = get_ip_from_unique_header(&headers, header_name).unwrap(); + assert_eq!(ip, real_ip); + } + + #[test] + fn test_unique_header_duplicated() { + let header_name = "X-Real-IP"; + let real_ip = IpAddr::V4(Ipv4Addr::new(1, 1, 1, 1)); + let fake_ip = IpAddr::V4(Ipv4Addr::new(2, 2, 2, 2)); + + let mut headers = HeaderMap::new(); + headers.insert(header_name, real_ip.to_string().parse().unwrap()); + headers.append(header_name, fake_ip.to_string().parse().unwrap()); + + let err = get_ip_from_unique_header(&headers, header_name) + .expect_err("Not unique header should fail"); + assert!(matches!(err, IpError::NotUnique(_))); + } + #[test] + fn test_unique_header_not_present() { + let header_name = "X-Real-IP"; + let headers = HeaderMap::new(); + + let err = get_ip_from_unique_header(&headers, header_name) + .expect_err("Missing header should fail"); + assert!(matches!(err, IpError::NotPresent(_))); + } + + #[test] + fn test_unique_header_invalid_value() { + let header_name = "X-Real-IP"; + let mut headers = HeaderMap::new(); + headers.insert(header_name, "invalid-ip".parse().unwrap()); + + let err = + get_ip_from_unique_header(&headers, header_name).expect_err("Invalid IP should fail"); + assert!(matches!(err, IpError::InvalidValue)); + } + + #[test] + fn test_unique_header_empty_value() { + let header_name = "X-Real-IP"; + let mut headers = HeaderMap::new(); + headers.insert(header_name, "".parse().unwrap()); + + let err = + get_ip_from_unique_header(&headers, header_name).expect_err("Invalid IP should fail"); + assert!(matches!(err, IpError::InvalidValue)); + } + + #[test] + fn test_rightmost_header_comma_separated() { + let header_name = "X-Forwarded-For"; + let ip1 = IpAddr::V4(Ipv4Addr::new(1, 1, 1, 1)); + let ip2 = IpAddr::V4(Ipv4Addr::new(2, 2, 2, 2)); + let ip3 = IpAddr::V4(Ipv4Addr::new(3, 3, 3, 3)); + + let mut headers = HeaderMap::new(); + headers.insert(header_name, format!("{},{},{}", ip1, ip2, ip3).parse().unwrap()); + + let ip = get_ip_from_rightmost_value(&headers, header_name, 1).unwrap(); + assert_eq!(ip, ip3); + + let ip = get_ip_from_rightmost_value(&headers, header_name, 2).unwrap(); + assert_eq!(ip, ip2); + + let ip = get_ip_from_rightmost_value(&headers, header_name, 3).unwrap(); + assert_eq!(ip, ip1); + + let err = get_ip_from_rightmost_value(&headers, header_name, 4) + .expect_err("Not enough values should fail"); + assert!(matches!(err, IpError::NotEnoughValues { .. })); + } + + #[test] + fn test_rightmost_header_comma_space_separated() { + let header_name = "X-Forwarded-For"; + let ip1 = IpAddr::V4(Ipv4Addr::new(1, 1, 1, 1)); + let ip2 = IpAddr::V4(Ipv4Addr::new(2, 2, 2, 2)); + let ip3 = IpAddr::V4(Ipv4Addr::new(3, 3, 3, 3)); + + let mut headers = HeaderMap::new(); + headers.insert(header_name, format!("{}, {}, {}", ip1, ip2, ip3).parse().unwrap()); + + let ip = get_ip_from_rightmost_value(&headers, header_name, 1).unwrap(); + assert_eq!(ip, ip3); + + let ip = get_ip_from_rightmost_value(&headers, header_name, 2).unwrap(); + assert_eq!(ip, ip2); + + let ip = get_ip_from_rightmost_value(&headers, header_name, 3).unwrap(); + assert_eq!(ip, ip1); + + let err = get_ip_from_rightmost_value(&headers, header_name, 4) + .expect_err("Not enough values should fail"); + assert!(matches!(err, IpError::NotEnoughValues { .. })); + } + + #[test] + fn test_rightmost_header_duplicated() { + // If the header appears multiple times, they should be joined together + // as if they were a single value. + let header_name = "X-Forwarded-For"; + let ip1 = IpAddr::V4(Ipv4Addr::new(1, 1, 1, 1)); + let ip2 = IpAddr::V4(Ipv4Addr::new(2, 2, 2, 2)); + let ip3 = IpAddr::V4(Ipv4Addr::new(3, 3, 3, 3)); + let ip4 = IpAddr::V4(Ipv4Addr::new(4, 4, 4, 4)); + let ip5 = IpAddr::V4(Ipv4Addr::new(5, 5, 5, 5)); + + let mut headers = HeaderMap::new(); + headers.insert(header_name, format!("{},{},{}", ip1, ip2, ip3).parse().unwrap()); + headers.append(header_name, format!("{},{}", ip4, ip5).parse().unwrap()); + + let ip = get_ip_from_rightmost_value(&headers, header_name, 1).unwrap(); + assert_eq!(ip, ip5); + + let ip = get_ip_from_rightmost_value(&headers, header_name, 5).unwrap(); + assert_eq!(ip, ip1); + + let err = get_ip_from_rightmost_value(&headers, header_name, 6) + .expect_err("Not enough values should fail"); + assert!(matches!(err, IpError::NotEnoughValues { .. })); + } + + #[test] + fn test_rightmost_header_not_present() { + let header_name = "X-Forwarded-For"; + let headers = HeaderMap::new(); + + let err = get_ip_from_rightmost_value(&headers, header_name, 1) + .expect_err("Missing header should fail"); + assert!(matches!(err, IpError::NotPresent(_))); + } + + #[test] + fn test_rightmost_header_invalid_value() { + let header_name = "X-Forwarded-For"; + let mut headers = HeaderMap::new(); + headers.insert(header_name, "invalid-ip".parse().unwrap()); + + let err = get_ip_from_rightmost_value(&headers, header_name, 1) + .expect_err("Invalid IP should fail"); + assert!(matches!(err, IpError::InvalidValue)); + } +} diff --git a/docs/docs/developing/prop-commit-signing.md b/docs/docs/developing/prop-commit-signing.md new file mode 100644 index 00000000..1e8bd249 --- /dev/null +++ b/docs/docs/developing/prop-commit-signing.md @@ -0,0 +1,76 @@ +# Requesting Proposer Commitment Signatures with Commit-Boost + +When you create a new validator on the Ethereum network, one of the steps is the generation of a new BLS private key (commonly known as the "validator key" or the "signer key") and its corresponding BLS public key (the "validator pubkey", used as an identifier). Typically this private key will be used by an Ethereum consensus client to sign things such as attestations and blocks for publication on the Beacon chain. These signatures prove that you, as the owner of that private key, approve of the data being signed. However, as general-purpose private keys, they can also be used to sign *other* arbitrary messages not destined for the Beacon chain. + +Commit-Boost takes advantage of this by offering a standard known as **proposer commitments**. These are arbitrary messages (albeit with some important rules), similar to the kind used on the Beacon chain, that have been signed by one of the owner's private keys. Modules interested in leveraging Commit-Boost's proposer commitments can construct their own data in whatever format they like and request that Commit-Boost's **signer service** generate a signature for it with a particular private key. The module can then use that signature to verify the data was signed by that user. + +Commit-Boost supports proposer commitment signatures for both BLS private keys (identified by their public key) and ECDSA private keys (identified by their Ethereum address). + + +## Rules of Proposer Commitment Signatures + +Proposer commitment signatures produced by Commit-Boost's signer service conform to the following rules: + +- Signatures are **unique** to a given EVM chain (identified by its [chain ID](https://chainlist.org/)). Signatures generated for one chain will not work on a different chain. +- Signatures are **unique** to Commit-Boost proposer commitments. The signer service **cannot** be used to create signatures that could be used for other applications, such as for attestations on the Beacon chain. While the signer service has access to the same validator private keys used to attest on the Beacon chain, it cannot create signatures that would get you slashed on the Beacon chain. +- Signatures are **unique** to a particular module. One module cannot, for example, request an identical payload as another module and effectively "forge" a signature for the second module; identical payloads from two separate modules will result in two separate signatures. +- The data payload being signed must be a **32-byte array**, typically serializd as a 64-character hex string with an optional `0x` prefix. The value itself is arbitrary, as long as it has meaning to the requester - though it is typically the 256-bit hash of some kind of data. +- If requesting a signature from a BLS key, the resulting signature will be a standard BLS signature (96 bytes in length). +- If requesting a signature from an ECDSA key, the resulting signature will be a standard Ethereum RSV signature (65 bytes in length). +- Signatures **may** be **unique** per request, using the optional `nonce` field in their requests to indicate a unique sequence that this signature belongs to. + + +## Configuring a Module for Proposer Commitments + +Commit-Boost's signer service must be configured prior to launching to expect requests from your module. There are two main parts: + +1. An entry for your module into [Commit-Boost's configuration file](../get_started/configuration.md#custom-module). This must include a unique ID for your module, the line `type = "commit"`, and include a unique [signing ID](#the-signing-id) for your module. Generally you should provide values for these in your documentation, so your users can reference it when configuring their own Commit-Boost node. + +2. A JWT secret used by your module to authenticate with the signer in HTTP requests. This must be a string that both the Commit-Boost signer can read and your module can read, but no other modules should be allowed to access it. The user should be responsible for determining an appropriate secret and providing it to the Commit-Boost signer service securely; your module will need some way to accept this, typically via a command line argument that accepts a path to a file with the secret or as an environment variable. + +Once the user has configured both Commit-Boost and your module with these settings, your module will be able to authenticate with the signer service and request signatures. + + +## The Signing ID + +Your module's signing ID is a 32-byte value that is used as a unique identifier within the signing process. Proposer commitment signatures incorporate this value along with the data being signed as a way to create signatures that are exclusive to your module, so other modules can't maliciously construct signatures that appear to be from your module. Your module must have this ID incorporated into itself ahead of time, and the user must include this same ID within their Commit-Boost configuration file section for your module. Commit-Boost does not maintain a global registry of signing IDs, so this is a value you should provide to your users in your documentation. + +The Signing ID is decoupled from your module's human-readable name (the `module_id` field in the Commit-Boost configuration file) so that any changes to your module name will not invalidate signatures from previous versions. Similarly, if you don't change the module ID but *want* to invalidate previous signatures, you can modify the signing ID and it will do so. Just ensure your users are made aware of the change, so they can update it in their Commit-Boost configuration files accordingly. + + +## Nonces + +Your module has the option of using **Nonces** for each of its signature requests. Nonces are intended to be unique values that establish a sequence of signature requests, distinguishing one signature from another - even if all of their other payload information is identical. When making a request for a signature, you may include a unique nonce as part of the request; the signature will include it in its data, ensuring that things like replay attacks cannot be used for that signature. + +If you want to use them within your module, your module (or whatever remote backend system it connects to) **will be responsible** for storing, comparing, validating, and otherwise using the nonces. Commit-Boost's signer service by itself **does not** store nonces or track which ones have already been used by a given module. + +In terms of implementation, the nonce format conforms to the specification in [EIP-2681](https://eips.ethereum.org/EIPS/eip-2681). It is an unsigned 64-bit big-endian integer, with a minimum value of 0 and a maximum value of `2^64-2`. We recommend using `2^64-1` as a signifier indicating that your module doesn't use nonces, rather than using 0 for such a purpose. + + +## Structure of a Signature + +The form proposer commitment signatures take depends on the type of signature being requested. BLS signatures take the [standard form](https://eth2book.info/latest/part2/building_blocks/signatures/) (96-byte values). ECDSA (Ethereum EL) signatures take the [standard Ethereum ECDSA `r,s,v` signature form](https://forum.openzeppelin.com/t/sign-it-like-you-mean-it-creating-and-verifying-ethereum-signatures/697). In both cases, the data being signed is a 32-byte hash - the root hash of a composite two-stage [SSZ Merkle tree](https://thogiti.github.io/2024/05/02/Merkleization.html), described below: + +
+ + + +
+ +where, for the sub-tree in blue: + +- `Request Data` is a 32-byte array that serves as the data you want to sign. This is typically a hash of some more complex data on its own that your module constructs. + +- `Signing ID` is your module's 32-byte signing ID. The signer service will load this for your module from its configuration file. + +- `Nonce` is the nonce value for the signature request. While this value must be present, it can be effectively ignored by setting it to some arbitrary value if your module does not track nonces. Conforming with the tree specification, it must be added as a 256-bit unsigned little-endian integer. Most libraries will be able to do this conversion automatically if you specify the field as the language's primitive for 64-bit unsigned integers (e.g., `uint64`, `u64`, `ulong`, etc.). + +- `Chain ID` is the ID of the chain that the Signer service is currently configured to use, as indicated by the [Commit-Boost configuration file](../get_started/configuration.md). This must also be a 256-bit unsigned little-endian integer. + +A Merkle tree must be constructed from these four leaf nodes, and its root hash calculated according to the standard SSZ hash computation rules. This result will be called the "sub-tree root". With this, a second Merkle tree is created using this sub-tree root and a value called the Domain: + +- `Domain` is the 32-byte output of the [compute_domain()](https://eth2book.info/capella/part2/building_blocks/signatures/#domain-separation-and-forks) function in the Beacon specification. The 4-byte domain type in this case is not a standard Beacon domain type, but rather Commit Boost's own domain type: `0x6D6D6F43`. + +The data signed in a proposer commitment is the 32-byte hash root of this new tree (the green `Root` box). + +Many languages provide libraries for computing the root of an SSZ Merkle tree, such as [fastssz for Go](https://github.com/ferranbt/fastssz) or [tree_hash for Rust](https://docs.rs/tree_hash/latest/tree_hash/). When verifying proposer commitment signatures, use a library that supports Merkle tree root hashing, the `compute_domain()` operation, and validation for signatures generated by your key of choice. diff --git a/docs/docs/get_started/building.md b/docs/docs/get_started/building.md index 81968dbc..dd860be2 100644 --- a/docs/docs/get_started/building.md +++ b/docs/docs/get_started/building.md @@ -34,7 +34,7 @@ If you don't want to use the Docker builder, you can compile the Commit-Boost ar Requirements: -- Rust 1.89+ +- Rust 1.91+ - GCC (or another C compiler of your choice) - OpenSSL development libraries - Protobuf Compiler (`protoc`) diff --git a/docs/docs/get_started/configuration.md b/docs/docs/get_started/configuration.md index 9764f821..60e55515 100644 --- a/docs/docs/get_started/configuration.md +++ b/docs/docs/get_started/configuration.md @@ -9,10 +9,31 @@ Commit-Boost needs a configuration file detailing all the services that you want - For a full explanation of all the fields, check out [here](https://github.com/Commit-Boost/commit-boost-client/blob/main/config.example.toml). - For some additional examples on config presets, check out [here](https://github.com/Commit-Boost/commit-boost-client/tree/main/configs). -## Minimal PBS setup on Holesky +## Validation + +The PBS service can be configured to perform various levels of validation against both builder bid requests and unblinded blocks returned by relays. This allows the user to trade-off between speed and safety. + +For requesting builder bids, you can specify the `header_validation_mode` setting within the `[pbs]` configuration section. It has three modes: + +- `header_validation_mode = "none"`: The bids returned by the relay will not undergo any validation, and they will only be partially decoded to check the fork version and the value. The bid with the highest value will still be returned, but the PBS service won't check to confirm whether or not the bid is actually legal. We recommend that this only gets used when you absolutely trust each relay you've configured. + +- `header_validation_mode = "standard"`: The bids returned by the relay will be fully decoded and validated against the expected request (such as a matching parent hash, correct relay signature, and so on). This takes a small amount of extra computing power but ensures any invalid bids will be ignored. + +- `header_validation_mode = "extra"`: Performs all of the `standard` validation, plus ensures the block number is correct and the block's gas limit is legal. Requires the `rpc_url` parameter to be set, so the PBS service can query an Execution Client to confirm those details. + +For submitting signed blinded blocks and retrieving unblinded blocks, you can specify the `block_validation_mode` setting: + +- `block_validation_mode = "none"`: The unblinded blocks returned by the relay will not undergo any validation, and they will only be partially decoded to check that the fork version is correct. The unblinded block won't be checked to verify that it matches the original blinded block you submitted. We recommend that this only gets used when you absolutely trust each relay you've configured. + + Blocks will be returned directly from the relay to the Beacon Node, and may not necessarily be in a format the Beacon Node requested. For example, if the Beacon Node sends the signed blinded block as SSZ, but the relay only accepts JSON, it will return the unblinded block to the Beacon Node as JSON rather than having the PBS service re-encode it into SSZ. Whether or not this is supported is an implementation detail of the particular Beacon Node you're using. + +- `block_validation_mode = "standard"`: The unblinded blocks returned by the relay will be fully decoded and validated to ensure they match the original request, and are valid according to the rules of the Beacon Chain. This takes a small amount of extra computing power but ensures the block was properly unblinded. + + +## Minimal PBS Setup on Hoodi ```toml -chain = "Holesky" +chain = "Hoodi" [pbs] port = 18550 @@ -24,20 +45,20 @@ url = "" enabled = true ``` -You can find a list of MEV-Boost Holesky relays [here](https://www.coincashew.com/coins/overview-eth/mev-boost/mev-relay-list#holesky-testnet-relays). +You can find a list of MEV-Boost Hoodi relays [here](https://www.coincashew.com/coins/overview-eth/mev-boost/mev-relay-list#hoodi-testnet-relays). After the sidecar is started, it will expose a port (`18550` in this example), that you need to point your CL to. This may be different depending on which CL you're running, check out [here](https://docs.flashbots.net/flashbots-mev-boost/getting-started/system-requirements#consensus-client-configuration-guides) for a list of configuration guides. :::note -In this setup, the signer module will not be started. +In this setup, the Signer service will not be started. ::: -## Signer module +## Signer Service -Commit-Boost supports both local and remote signers. The signer module is responsible for signing the transactions that other modules generates. Please note that only one signer at a time is allowed. +Commit-Boost supports both local and remote signers. The Signer service is responsible for signing the transactions that other modules generates. Please note that only one Signer at a time is allowed. -### Local signer +### Local Signer -To start a local signer module, you need to include its parameters in the config file +To start a local Signer Service, you need to include its parameters in the config file ```toml [pbs] @@ -219,9 +240,9 @@ All keys have the same password stored in `secrets/password.txt` ``` -### Proxy keys store +### Proxy Keys -Proxy keys can be used to sign transactions with a different key than the one used to sign the block. Proxy keys are generated by the Signer module and authorized by the validator key. Each module have their own proxy keys, that can be BLS or ECDSA. +Proxy keys can be used to sign transactions with a different key than the one used to sign the block. Proxy keys are generated by the Signer service and authorized by the validator key. Each service can have their own proxy keys, both BLS and ECDSA. To persist proxy keys across restarts, you must enable the proxy store in the config file. There are 2 options for this: @@ -230,7 +251,7 @@ To persist proxy keys across restarts, you must enable the proxy store in the co The keys are stored in plain text in a file. This method is unsafe and should only be used for testing. -#### File structure +#### File Structure ``` @@ -269,7 +290,7 @@ Where each `` file contains the following: The keys are stored in a ERC-2335 style keystore, along with a password. This way, you can safely share the keys directory as without the password they are useless. -#### File structure +#### File Structure ``` ├── @@ -305,13 +326,13 @@ Where the `.json` files contain ERC-2335 keystore, the ` -### Remote signer +### Remote Signer You might choose to use an external service to sign the transactions. For now, two types of remote signers are supported: Web3Signer and Dirk. #### Web3Signer -Web3Signer implements the same API as Commit-Boost, so there's no need to set up a Signer module. The parameters needed for the remote signer are: +Web3Signer implements the same API as Commit-Boost, so there's no need to set up a Signer service. The parameters needed for the remote signer are: ```toml [signer.remote] @@ -320,7 +341,7 @@ url = "https://remote.signer.url" #### Dirk -Dirk is a distributed key management system that can be used to sign transactions. In this case the Signer module is needed as an intermediary between the modules and Dirk. The following parameters are needed: +Dirk is a distributed key management system that can be used to sign transactions. In this case the Signer service is needed as an intermediary between the modules and Dirk. The following parameters are needed: ```toml [signer.dirk] @@ -344,7 +365,7 @@ wallets = ["AnotherWallet", "DistributedWallet"] ``` - `cert_path` and `key_path` are the paths to the client certificate and key used to authenticate with Dirk. -- `wallets` is a list of wallets from which the Signer module will load all accounts as consensus keys. Generated proxy keys will have format `///`, so accounts found with that pattern will be ignored. +- `wallets` is a list of wallets from which the Signer service will load all accounts as consensus keys. Generated proxy keys will have format `///`, so accounts found with that pattern will be ignored. - `secrets_path` is the path to the folder containing the passwords of the generated proxy accounts, which will be stored in `////.pass`. Additionally, you can set a proxy store so that the delegation signatures for generated proxy keys are stored locally. As these signatures are not sensitive, the only supported store type is `File`: @@ -358,9 +379,73 @@ Delegation signatures will be stored in files with the format `/deleg A full example of a config file with Dirk can be found [here](https://github.com/Commit-Boost/commit-boost-client/blob/main/examples/configs/dirk_signer.toml). + +### TLS + +By default, the Signer service runs in **insecure** mode, so its API service uses HTTP without any TLS encryption. This is sufficient for testing or if you're running locally within your machine's isolated Docker network and only intend to access it within the confines of your machine. However, for larger production setups, it's recommended to enable TLS - especially for traffic that spans across multiple machines. + +The Signer service in TLS mode supports **TLS 1.2** and **TLS 1.3**. Older protocol versions are not supported. + +To enable TLS, you must first create a **certificate / key pair**. We **strongly advise** using a well-known Certificate Authority to create and sign the certificate, such as [Let's Encrypt](https://letsencrypt.org/getting-started/) (a free service) or [Bluehost](https://www.bluehost.com/help/article/how-to-set-up-an-ssl-certificate-for-website-security) (free but requires an account). We do not recommend using a self-signed ceriticate / key pair for production environments. + +When configuring TLS support, the Signer service expects a single folder (which you can specify) that contains the following two files: +- `cert.pem`: The SSL certificate file signed by a certificate authority, in PEM format +- `key.pem`: The private key corresponding to `cert.pem` that will be used for signing TLS traffic, in PEM format + +Specifying it is done within Commit-Boost's configuration file using the `[signer.tls_mode]` table as follows: + +```toml +[pbs] +... +with_signer = true + +[signer] +port = 20000 +... + +[signer.tls_mode] +type = "certificate" +path = "path/to/your/cert/folder" +``` + +Where `path` is the aforementioned folder. It defaults to `./certs` but can be replaced with whichever directory your certificate and private key file reside in, as long as they're readable by the Signer service (or its Docker container, if using Docker). + +### Rate limit + +The Signer service implements a rate limit system of 3 failed authentications every 5 minutes. These values can be modified in the config file: + +```toml +[signer] +... +jwt_auth_fail_limit = 3 # The amount of failed requests allowed +jwt_auth_fail_timeout_seconds = 300 # The time window in seconds +``` + +The rate limit is applied to the IP address of the client making the request. By default, the IP is extracted directly from the TCP connection. If you're running the Signer service behind a reverse proxy (e.g. Nginx), you can configure it to extract the IP from a custom HTTP header instead. There're two options: + +- unique: Provides an HTTP header that contains the IP. This header is expected to appear only once in the request. This is common when using `X-Real-IP`, `True-Client-IP`, etc. If a request has multiple values for this header, it will be considered invalid and rejected. +- `rightmost`: Provides an HTTP header that contains a comma-separated list of IPs. The nth rightmost IP in the list is used. If the header appears multiple times, the last occurrence is used. This is common when using `X-Forwarded-For`. + +Examples: + +```toml +[signer.reverse_proxy] +type = "unique" +header = "X-Real-IP" +``` + +```toml +[signer.reverse_proxy] +type = "rightmost" +header = "X-Forwarded-For" +trusted_count = 1 +``` + +Note: `trusted_count` is the number of trusted proxies in front of the Signer service, but the last proxy won't add its address, so the number of skipped IPs is `trusted_count - 1`. See [MDN docs](https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/X-Forwarded-For#trusted_proxy_count) for more info. + ## Custom module -We currently provide a test module that needs to be built locally. To build the module run: +We currently provide a test module that needs to be built locally. To build the module, run: ```bash just docker-build-test-modules @@ -396,19 +481,21 @@ enabled = true id = "DA_COMMIT" type = "commit" docker_image = "test_da_commit" +signing_id = "0x6a33a23ef26a4836979edff86c493a69b26ccf0b4a16491a815a13787657431b" sleep_secs = 5 ``` A few things to note: -- We now added a `signer` section which will be used to create the Signer module. -- There is now a `[[modules]]` section which at a minimum needs to specify the module `id`, `type` and `docker_image`. Additional parameters needed for the business logic of the module will also be here, +- We now added a `signer` section which will be used to create the Signer service. +- There is now a `[[modules]]` section which at a minimum needs to specify the module `id`, `type` and `docker_image`. For modules with type `commit`, which will be used to access the Signer service and request signatures for proposer commitments, you will also need to specify the module's unique `signing_id` (see [the proposer commitment documentation](../developing/prop-commit-signing.md)). Additional parameters needed for the business logic of the module will also be here. To learn more about developing modules, check out [here](/category/developing). + ## Vouch -[Vouch](https://github.com/attestantio/vouch) is a multi-node validator client built by [Attestant](https://www.attestant.io/). Vouch is particular in that it also integrates an MEV-Boost client to interact with relays. The Commit-Boost PBS module is compatible with the Vouch `blockrelay` since it implements the same Builder-API as relays. For example, depending on your setup and preference, you may want to fetch headers from a given relay using Commit-Boost vs using the built-in Vouch `blockrelay`. +[Vouch](https://github.com/attestantio/vouch) is a multi-node validator client built by [Attestant](https://www.attestant.io/). Vouch is particular in that it also integrates an MEV-Boost client to interact with relays. The Commit-Boost PBS service is compatible with the Vouch `blockrelay` since it implements the same Builder-API as relays. For example, depending on your setup and preference, you may want to fetch headers from a given relay using Commit-Boost vs using the built-in Vouch `blockrelay`. ### Configuration @@ -431,7 +518,7 @@ Modify the `blockrelay.config` file to add Commit-Boost: #### Beacon Node to Commit-Boost -In this setup, the BN Builder-API endpoint will be pointing to the PBS module (e.g. for Lighthouse you will need the flag `--builder=http://127.0.0.0:18550`). +In this setup, the BN Builder-API endpoint will be pointing to the PBS service (e.g. for Lighthouse you will need the flag `--builder=http://127.0.0.0:18550`). This will bypass the `blockrelay` entirely so make sure all relays are properly configured in the `[[relays]]` section. @@ -446,15 +533,24 @@ This approach could also work if you have a multi-beacon-node setup, where some ## Hot Reload -Commit-Boost supports hot-reloading the configuration file. This means that you can modify the `cb-config.toml` file and apply the changes without needing to restart the modules. To do this, you need to send a `POST` request to the `/reload` endpoint on each module you want to reload the configuration. In the case the module is running in a Docker container without the port exposed (like the signer), you can use the following command: +Commit-Boost supports hot-reloading the configuration file. This means that you can modify the `cb-config.toml` file and apply the changes without needing to restart the services. To do this, you need to send a `POST` request to the `/reload` endpoint on each service you want to reload the configuration. In the case the service is running in a Docker container without the port exposed (like the signer), you can use the following command: ```bash docker compose -f cb.docker-compose.yml exec cb_signer curl -X POST http://localhost:20000/reload ``` +### Signer module reload + +The signer module takes 2 optional parameters in the JSON body: + +- `jwt_secrets`: a string with a comma-separated list of `=` for all modules. +- `admin_secret`: a string with the secret for the signer admin JWT. + +Parameters that are not provided will not be updated; they will be regenerated using their original on-disk data as though the signer service was being restarted. Note that any changes you made with calls to `/revoke_jwt` or `/reload` will be reverted, so make sure you provide any modifications again as part of this call. + ### Notes -- The hot reload feature is available for PBS modules (both default and custom) and signer module. +- The hot reload feature is available for both the PBS service (both default and custom) and Signer service. - Changes related to listening hosts and ports will not been applied, as it requires the server to be restarted. - If running in Docker containers, changes in `volumes` will not be applied, as it requires the container to be recreated. Be careful if changing a path to a local file as it may not be accessible from the container. - Custom PBS modules may override the default behaviour of the hot reload feature to parse extra configuration fields. Check the [examples](https://github.com/Commit-Boost/commit-boost-client/blob/main/examples/status_api/src/main.rs) for more details. diff --git a/docs/docs/get_started/overview.md b/docs/docs/get_started/overview.md index fb4f8a13..b5719567 100644 --- a/docs/docs/get_started/overview.md +++ b/docs/docs/get_started/overview.md @@ -28,7 +28,7 @@ The services are also published at [each release](https://github.com/orgs/Commit Requirements: -- Rust 1.89 +- Rust 1.91 :::note Run `rustup update` to update Rust and Cargo to the latest version diff --git a/docs/docs/get_started/running/binary.md b/docs/docs/get_started/running/binary.md index 74a09373..8f51fe65 100644 --- a/docs/docs/get_started/running/binary.md +++ b/docs/docs/get_started/running/binary.md @@ -28,7 +28,9 @@ Modules need some environment variables to work correctly. ### Signer Module +- `CB_SIGNER_ADMIN_JWT`: secret to use for admin JWT. - `CB_SIGNER_ENDPOINT`: optional, override to specify the `IP:port` endpoint to bind the signer server to. +- `CB_SIGNER_TLS_CERTIFICATES`: path to the TLS certificates for the server. - For loading keys we currently support: - `CB_SIGNER_LOADER_FILE`: path to a `.json` with plaintext keys (for testing purposes only). - `CB_SIGNER_LOADER_FORMAT`, `CB_SIGNER_LOADER_KEYS_DIR` and `CB_SIGNER_LOADER_SECRETS_DIR`: paths to the `keys` and `secrets` directories or files (ERC-2335 style keystores, see [Signer config](../configuration/#signer-module) for more info). diff --git a/docs/docs/res/img/prop_commit_tree.png b/docs/docs/res/img/prop_commit_tree.png new file mode 100644 index 00000000..2c0b1815 Binary files /dev/null and b/docs/docs/res/img/prop_commit_tree.png differ diff --git a/examples/da_commit/src/main.rs b/examples/da_commit/src/main.rs index 27b5ce86..646ace45 100644 --- a/examples/da_commit/src/main.rs +++ b/examples/da_commit/src/main.rs @@ -1,6 +1,6 @@ use std::time::Duration; -use alloy::primitives::Address; +use alloy::primitives::{Address, B256, b256}; use commit_boost::prelude::*; use eyre::{OptionExt, Result}; use lazy_static::lazy_static; @@ -9,6 +9,13 @@ use serde::Deserialize; use tokio::time::sleep; use tracing::{error, info}; +// This is the signing ID used for the DA Commit module. +// Signatures produced by the signer service will incorporate this ID as part of +// the signature, preventing other modules from using the same signature for +// different purposes. +pub const DA_COMMIT_SIGNING_ID: B256 = + b256!("0x6a33a23ef26a4836979edff86c493a69b26ccf0b4a16491a815a13787657431b"); + // You can define custom metrics and a custom registry for the business logic of // your module. These will be automatically scaped by the Prometheus server lazy_static! { @@ -25,6 +32,7 @@ struct Datagram { struct DaCommitService { config: StartCommitModuleConfig, + nonce: u64, } // Extra configurations parameters can be set here and will be automatically @@ -84,26 +92,65 @@ impl DaCommitService { ) -> Result<()> { let datagram = Datagram { data }; - let request = SignConsensusRequest::builder(pubkey).with_msg(&datagram); - let signature = self.config.signer_client.request_consensus_signature(request).await?; - - info!("Proposer commitment (consensus): {}", signature); + // Request a signature directly from a BLS key + let request = SignConsensusRequest::builder(pubkey.clone()).with_msg(&datagram); + let response = self.config.signer_client.request_consensus_signature(request).await?; + info!("Proposer commitment (consensus): {}", response.signature); + if verify_proposer_commitment_signature_bls( + self.config.chain, + &pubkey, + &datagram, + &response.signature, + &DA_COMMIT_SIGNING_ID, + self.nonce, + ) { + info!("Signature verified successfully"); + } else { + error!("Signature verification failed"); + } + self.nonce += 1; - let proxy_request_bls = SignProxyRequest::builder(proxy_bls).with_msg(&datagram); - let proxy_signature_bls = + // Request a signature from a proxy BLS key + let proxy_request_bls = SignProxyRequest::builder(proxy_bls.clone()).with_msg(&datagram); + let proxy_response_bls = self.config.signer_client.request_proxy_signature_bls(proxy_request_bls).await?; + info!("Proposer commitment (proxy BLS): {}", proxy_response_bls.signature); + if verify_proposer_commitment_signature_bls( + self.config.chain, + &proxy_bls, + &datagram, + &proxy_response_bls.signature, + &DA_COMMIT_SIGNING_ID, + self.nonce, + ) { + info!("Signature verified successfully"); + } else { + error!("Signature verification failed"); + } + self.nonce += 1; - info!("Proposer commitment (proxy BLS): {}", proxy_signature_bls); - + // If ECDSA keys are enabled, request a signature from a proxy ECDSA key if let Some(proxy_ecdsa) = proxy_ecdsa { let proxy_request_ecdsa = SignProxyRequest::builder(proxy_ecdsa).with_msg(&datagram); - let proxy_signature_ecdsa = self + let proxy_response_ecdsa = self .config .signer_client .request_proxy_signature_ecdsa(proxy_request_ecdsa) .await?; - info!("Proposer commitment (proxy ECDSA): {}", proxy_signature_ecdsa); + info!("Proposer commitment (proxy ECDSA): {}", proxy_response_ecdsa.signature); + match verify_proposer_commitment_signature_ecdsa( + self.config.chain, + &proxy_ecdsa, + &datagram, + &proxy_response_ecdsa.signature, + &DA_COMMIT_SIGNING_ID, + self.nonce, + ) { + Ok(_) => info!("Signature verified successfully"), + Err(err) => error!(%err, "Signature verification failed"), + }; } + self.nonce += 1; SIG_RECEIVED_COUNTER.inc(); @@ -131,7 +178,7 @@ async fn main() -> Result<()> { "Starting module with custom data" ); - let mut service = DaCommitService { config }; + let mut service = DaCommitService { config, nonce: 0 }; if let Err(err) = service.run().await { error!(%err, "Service failed"); diff --git a/justfile b/justfile index de70acfa..b4bd1b14 100644 --- a/justfile +++ b/justfile @@ -1,4 +1,4 @@ -toolchain := "nightly-2025-06-26" +toolchain := "nightly-2026-01-01" fmt: rustup toolchain install {{toolchain}} > /dev/null 2>&1 && \ @@ -17,6 +17,7 @@ checklist: just fmt just clippy just test + cargo audit # =================================== # === Build Commands for Services === diff --git a/provisioning/build.Dockerfile b/provisioning/build.Dockerfile index b28b62ff..21b42eb0 100644 --- a/provisioning/build.Dockerfile +++ b/provisioning/build.Dockerfile @@ -1,5 +1,5 @@ # This will be the main build image -FROM --platform=${BUILDPLATFORM} rust:1.89-slim-bookworm AS chef +FROM --platform=${BUILDPLATFORM} rust:1.91-slim-bookworm AS chef ARG TARGETOS TARGETARCH BUILDPLATFORM TARGET_CRATE ENV CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse WORKDIR /app diff --git a/provisioning/grafana/signer_public_dashboard.json b/provisioning/grafana/signer_public_dashboard.json index 327d48bb..4b904b1a 100644 --- a/provisioning/grafana/signer_public_dashboard.json +++ b/provisioning/grafana/signer_public_dashboard.json @@ -539,13 +539,19 @@ "list": [ { "current": { - "text": "$__all", + "selected": true, + "text": "All", "value": "$__all" }, "description": "SignerAPI endpoint", "includeAll": true, "name": "endpoint", "options": [ + { + "selected": true, + "text": "All", + "value": "$__all" + }, { "selected": false, "text": "get_pubkeys", @@ -558,11 +564,21 @@ }, { "selected": false, - "text": "request_signature", - "value": "request_signature" + "text": "request_signature_bls", + "value": "request_signature_bls" + }, + { + "selected": false, + "text": "request_signature_proxy_bls", + "value": "request_signature_proxy_bls" + }, + { + "selected": false, + "text": "request_signature_proxy_ecdsa", + "value": "request_signature_proxy_ecdsa" } ], - "query": "get_pubkeys, generate_proxy_key, request_signature", + "query": "get_pubkeys, generate_proxy_key, request_signature_bls, request_signature_proxy_bls, request_signature_proxy_ecdsa", "type": "custom" } ] diff --git a/rust-toolchain.toml b/rust-toolchain.toml index b67e7d53..d72668b0 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "1.89.0" +channel = "1.91.0" diff --git a/tests/Cargo.toml b/tests/Cargo.toml index cceba3bc..51fee87e 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -10,8 +10,11 @@ axum.workspace = true cb-common.workspace = true cb-pbs.workspace = true cb-signer.workspace = true +ethereum_ssz.workspace = true eyre.workspace = true +jsonwebtoken.workspace = true lh_types.workspace = true +rcgen.workspace = true reqwest.workspace = true serde.workspace = true serde_json.workspace = true @@ -25,4 +28,5 @@ tree_hash.workspace = true url.workspace = true [dev-dependencies] -cb-common = { path = "../crates/common", features = ["testing-flags"] } \ No newline at end of file +tracing-test.workspace = true +cb-common = { path = "../crates/common", features = ["testing-flags"] } diff --git a/tests/data/configs/pbs.happy.toml b/tests/data/configs/pbs.happy.toml index d77af2b6..67b39911 100644 --- a/tests/data/configs/pbs.happy.toml +++ b/tests/data/configs/pbs.happy.toml @@ -2,7 +2,8 @@ chain = "Holesky" [pbs] docker_image = "ghcr.io/commit-boost/pbs:latest" -extra_validation_enabled = false +header_validation_mode = "standard" +block_validation_mode = "standard" host = "127.0.0.1" late_in_slot_time_ms = 2000 min_bid_eth = 0.5 diff --git a/tests/data/configs/signer.happy.toml b/tests/data/configs/signer.happy.toml new file mode 100644 index 00000000..6fb76445 --- /dev/null +++ b/tests/data/configs/signer.happy.toml @@ -0,0 +1,52 @@ +chain = "Hoodi" + +[pbs] +docker_image = "ghcr.io/commit-boost/pbs:latest" +with_signer = true +host = "127.0.0.1" +port = 18550 +relay_check = true +wait_all_registrations = true +timeout_get_header_ms = 950 +timeout_get_payload_ms = 4000 +timeout_register_validator_ms = 3000 +skip_sigverify = false +min_bid_eth = 0.5 +late_in_slot_time_ms = 2000 +extra_validation_enabled = false +rpc_url = "https://ethereum-holesky-rpc.publicnode.com" + +[[relays]] +id = "example-relay" +url = "http://0xa1cec75a3f0661e99299274182938151e8433c61a19222347ea1313d839229cb4ce4e3e5aa2bdeb71c8fcf1b084963c2@abc.xyz" +headers = { X-MyCustomHeader = "MyCustomHeader" } +enable_timing_games = false +target_first_request_ms = 200 +frequency_get_header_ms = 300 + +[signer] +docker_image = "ghcr.io/commit-boost/signer:latest" +host = "127.0.0.1" +port = 20000 +jwt_auth_fail_limit = 3 +jwt_auth_fail_timeout_seconds = 300 + +[signer.local.loader] +key_path = "./tests/data/keys.example.json" + +[signer.local.store] +proxy_dir = "./proxies" + +[[modules]] +id = "test-module" +signing_id = "0x6a33a23ef26a4836979edff86c493a69b26ccf0b4a16491a815a13787657431b" +type = "commit" +docker_image = "test_da_commit" +env_file = ".cb.env" + +[[modules]] +id = "another-module" +signing_id = "0x61fe00135d7b4912a8c63ada215ac2e62326e6e7b30f49a29fcf9779d7ad800d" +type = "commit" +docker_image = "test_da_commit" +env_file = ".cb.env" diff --git a/tests/data/get_header/bellatrix.json b/tests/data/get_header/bellatrix.json new file mode 100644 index 00000000..16dfb330 --- /dev/null +++ b/tests/data/get_header/bellatrix.json @@ -0,0 +1,26 @@ +{ + "version": "bellatrix", + "data": { + "message": { + "header": { + "parent_hash": "0x114d1897fefa402a01a653c21a7f1f1db049d1373a5e73a2d25d7a8045dc02a1", + "fee_recipient": "0x477cc10a5b54aed5c88544c2e71ea0581cf64593", + "state_root": "0x6724be16ef8e65681cb66f9c144da67347b8983aa5e3f4662c9b5dba90ab5bc6", + "receipts_root": "0xf2f6d2fe6960e4dedad18cca0c7881e6509d551d3e04c1879a627fb8aba30272", + "logs_bloom": "0x00000400000000000000848008100000000000000000000004000000010080000000000100000400000000000000000000000000020100000000000000000000080004000000000800008008000000000000000020004000000400000000000000000000000400000000000000000000000000000010000002000010000000000000000000800000200100000000000000004000000000200002000004000000000800000000000000000000000000008000000000000000800000008000000400012002000000000000000000000000000200000000000000000000000000040000000000000000000000000000000000408000000000040000000000000000", + "prev_randao": "0x0fde820be6404bcb71d7bbeee140c16cd28b1940a40fa8a4e2c493114a08b38a", + "block_number": "1598034", + "gas_limit": "30000000", + "gas_used": "1939652", + "timestamp": "1716481836", + "extra_data": "0xd983010d0c846765746889676f312e32312e3130856c696e7578", + "base_fee_per_gas": "1266581747", + "block_hash": "0x0d9eccac62175d903e4242783d7252f4ab6cdd35995810646bda627b4c35adac", + "transactions_root": "0x9dca93e8c6c9a1b5fcc850990ed95cd44af96ff0a6094c87b119a34259eb64b0" + }, + "value": "1234567890", + "pubkey": "0x883827193f7627cd04e621e1e8d56498362a52b2a30c9a1c72036eb935c4278dee23d38a24d2f7dda62689886f0c39f4" + }, + "signature": "0xa9f158bca1d9d6b93a9104f48bd2d1e7689bef3fc974651fc755cc6f50d3649c5153a342a12f95cd8f9cac4f90144985189f498a7e0e1cb202ed5e7c98f3f504f371a53b9293bdd973fbb019c91242f808072d0ffcd9d17e2404baea3190fd18" + } +} \ No newline at end of file diff --git a/tests/data/get_header/capella.json b/tests/data/get_header/capella.json new file mode 100644 index 00000000..6cdbeb98 --- /dev/null +++ b/tests/data/get_header/capella.json @@ -0,0 +1,27 @@ +{ + "version": "capella", + "data": { + "message": { + "header": { + "parent_hash": "0x114d1897fefa402a01a653c21a7f1f1db049d1373a5e73a2d25d7a8045dc02a1", + "fee_recipient": "0x477cc10a5b54aed5c88544c2e71ea0581cf64593", + "state_root": "0x6724be16ef8e65681cb66f9c144da67347b8983aa5e3f4662c9b5dba90ab5bc6", + "receipts_root": "0xf2f6d2fe6960e4dedad18cca0c7881e6509d551d3e04c1879a627fb8aba30272", + "logs_bloom": "0x00000400000000000000848008100000000000000000000004000000010080000000000100000400000000000000000000000000020100000000000000000000080004000000000800008008000000000000000020004000000400000000000000000000000400000000000000000000000000000010000002000010000000000000000000800000200100000000000000004000000000200002000004000000000800000000000000000000000000008000000000000000800000008000000400012002000000000000000000000000000200000000000000000000000000040000000000000000000000000000000000408000000000040000000000000000", + "prev_randao": "0x0fde820be6404bcb71d7bbeee140c16cd28b1940a40fa8a4e2c493114a08b38a", + "block_number": "1598034", + "gas_limit": "30000000", + "gas_used": "1939652", + "timestamp": "1716481836", + "extra_data": "0xd983010d0c846765746889676f312e32312e3130856c696e7578", + "base_fee_per_gas": "1266581747", + "block_hash": "0x0d9eccac62175d903e4242783d7252f4ab6cdd35995810646bda627b4c35adac", + "transactions_root": "0x9dca93e8c6c9a1b5fcc850990ed95cd44af96ff0a6094c87b119a34259eb64b0", + "withdrawals_root": "0x2daccf0e476ca3e2644afbd13b2621d55b4d515b813a3b867cdacea24bb352d1" + }, + "value": "1234567890", + "pubkey": "0x883827193f7627cd04e621e1e8d56498362a52b2a30c9a1c72036eb935c4278dee23d38a24d2f7dda62689886f0c39f4" + }, + "signature": "0xa9f158bca1d9d6b93a9104f48bd2d1e7689bef3fc974651fc755cc6f50d3649c5153a342a12f95cd8f9cac4f90144985189f498a7e0e1cb202ed5e7c98f3f504f371a53b9293bdd973fbb019c91242f808072d0ffcd9d17e2404baea3190fd18" + } +} \ No newline at end of file diff --git a/tests/data/get_header/deneb.json b/tests/data/get_header/deneb.json new file mode 100644 index 00000000..28d3426a --- /dev/null +++ b/tests/data/get_header/deneb.json @@ -0,0 +1,37 @@ +{ + "version": "deneb", + "data": { + "message": { + "header": { + "parent_hash": "0x114d1897fefa402a01a653c21a7f1f1db049d1373a5e73a2d25d7a8045dc02a1", + "fee_recipient": "0x477cc10a5b54aed5c88544c2e71ea0581cf64593", + "state_root": "0x6724be16ef8e65681cb66f9c144da67347b8983aa5e3f4662c9b5dba90ab5bc6", + "receipts_root": "0xf2f6d2fe6960e4dedad18cca0c7881e6509d551d3e04c1879a627fb8aba30272", + "logs_bloom": "0x00000400000000000000848008100000000000000000000004000000010080000000000100000400000000000000000000000000020100000000000000000000080004000000000800008008000000000000000020004000000400000000000000000000000400000000000000000000000000000010000002000010000000000000000000800000200100000000000000004000000000200002000004000000000800000000000000000000000000008000000000000000800000008000000400012002000000000000000000000000000200000000000000000000000000040000000000000000000000000000000000408000000000040000000000000000", + "prev_randao": "0x0fde820be6404bcb71d7bbeee140c16cd28b1940a40fa8a4e2c493114a08b38a", + "block_number": "1598034", + "gas_limit": "30000000", + "gas_used": "1939652", + "timestamp": "1716481836", + "extra_data": "0xd983010d0c846765746889676f312e32312e3130856c696e7578", + "base_fee_per_gas": "1266581747", + "blob_gas_used": "786432", + "excess_blob_gas": "95158272", + "block_hash": "0x0d9eccac62175d903e4242783d7252f4ab6cdd35995810646bda627b4c35adac", + "transactions_root": "0x9dca93e8c6c9a1b5fcc850990ed95cd44af96ff0a6094c87b119a34259eb64b0", + "withdrawals_root": "0x2daccf0e476ca3e2644afbd13b2621d55b4d515b813a3b867cdacea24bb352d1" + }, + "blob_kzg_commitments": [ + "0x9559cce9cd71a3416793c8e28d3aaaae9f53732180f57e046bf725c74ab348a7b16693fd03194cac9dd2199a526461b7", + "0xabc493f754d156c7156eb8365d28eee13e5b3413767356ce4cb30cb0306fbe0ed45eaba92936a94e81ed976aa0d787c2", + "0xa5d87332b5dd391ed3153fe36dbd67775dcbc1818cbf6a68d2089a5c6015de1de02e5138f039f2375e6b3511cc94764b", + "0xa49c576627561ec9ae1ef7494e7cee7ede7fa7695d4462436c3e549cc3ce78674b407e8b5f8903b80f77a68814642d6c", + "0x83155fbeb04758d267193800fb89fa30eb13ac0e217005ae7e271733205ca8a6cd80fba08bf5c9a4a5cc0c9d463ac633", + "0xa20c71d1985996098aa63e8b5dc7b7fedb70de31478fe309dad3ac0e9b6d28d82be8e5e543021a0203dc785742e94b2f" + ], + "value": "1234567890", + "pubkey": "0x883827193f7627cd04e621e1e8d56498362a52b2a30c9a1c72036eb935c4278dee23d38a24d2f7dda62689886f0c39f4" + }, + "signature": "0xa9f158bca1d9d6b93a9104f48bd2d1e7689bef3fc974651fc755cc6f50d3649c5153a342a12f95cd8f9cac4f90144985189f498a7e0e1cb202ed5e7c98f3f504f371a53b9293bdd973fbb019c91242f808072d0ffcd9d17e2404baea3190fd18" + } +} \ No newline at end of file diff --git a/tests/data/get_header/electra.json b/tests/data/get_header/electra.json new file mode 100644 index 00000000..458018d6 --- /dev/null +++ b/tests/data/get_header/electra.json @@ -0,0 +1,62 @@ +{ + "version": "electra", + "data": { + "message": { + "header": { + "parent_hash": "0x114d1897fefa402a01a653c21a7f1f1db049d1373a5e73a2d25d7a8045dc02a1", + "fee_recipient": "0x477cc10a5b54aed5c88544c2e71ea0581cf64593", + "state_root": "0x6724be16ef8e65681cb66f9c144da67347b8983aa5e3f4662c9b5dba90ab5bc6", + "receipts_root": "0xf2f6d2fe6960e4dedad18cca0c7881e6509d551d3e04c1879a627fb8aba30272", + "logs_bloom": "0x00000400000000000000848008100000000000000000000004000000010080000000000100000400000000000000000000000000020100000000000000000000080004000000000800008008000000000000000020004000000400000000000000000000000400000000000000000000000000000010000002000010000000000000000000800000200100000000000000004000000000200002000004000000000800000000000000000000000000008000000000000000800000008000000400012002000000000000000000000000000200000000000000000000000000040000000000000000000000000000000000408000000000040000000000000000", + "prev_randao": "0x0fde820be6404bcb71d7bbeee140c16cd28b1940a40fa8a4e2c493114a08b38a", + "block_number": "1598034", + "gas_limit": "30000000", + "gas_used": "1939652", + "timestamp": "1716481836", + "extra_data": "0xd983010d0c846765746889676f312e32312e3130856c696e7578", + "base_fee_per_gas": "1266581747", + "blob_gas_used": "786432", + "excess_blob_gas": "95158272", + "block_hash": "0x0d9eccac62175d903e4242783d7252f4ab6cdd35995810646bda627b4c35adac", + "transactions_root": "0x9dca93e8c6c9a1b5fcc850990ed95cd44af96ff0a6094c87b119a34259eb64b0", + "withdrawals_root": "0x2daccf0e476ca3e2644afbd13b2621d55b4d515b813a3b867cdacea24bb352d1" + }, + "blob_kzg_commitments": [ + "0x9559cce9cd71a3416793c8e28d3aaaae9f53732180f57e046bf725c74ab348a7b16693fd03194cac9dd2199a526461b7", + "0xabc493f754d156c7156eb8365d28eee13e5b3413767356ce4cb30cb0306fbe0ed45eaba92936a94e81ed976aa0d787c2", + "0xa5d87332b5dd391ed3153fe36dbd67775dcbc1818cbf6a68d2089a5c6015de1de02e5138f039f2375e6b3511cc94764b", + "0xa49c576627561ec9ae1ef7494e7cee7ede7fa7695d4462436c3e549cc3ce78674b407e8b5f8903b80f77a68814642d6c", + "0x83155fbeb04758d267193800fb89fa30eb13ac0e217005ae7e271733205ca8a6cd80fba08bf5c9a4a5cc0c9d463ac633", + "0xa20c71d1985996098aa63e8b5dc7b7fedb70de31478fe309dad3ac0e9b6d28d82be8e5e543021a0203dc785742e94b2f" + ], + "execution_requests": { + "deposits": [ + { + "pubkey": "0xac0a230bd98a766b8e4156f0626ee679dd280dee5b0eedc2b9455ca3dacc4c7618da5010b9db609450a712f095c9f7a5", + "withdrawal_credentials": "0x0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f", + "amount": "100", + "signature": "0x8aeb4642fb2982039a43fd6a6d9cc0ebf7598dbf02343c4617d9a68d799393c162492add63f31099a25eacc2782ba27a190e977a8c58760b6636dccb503d528b3be9e885c93d5b79699e68fcca870b0c790cdb00d67604d8b4a3025ae75efa2f", + "index": "1" + } + ], + "withdrawals": [ + { + "source_address": "0x1100000000000000000000000000000000000000", + "validator_pubkey": "0xac0a230bd98a766b8e4156f0626ee679dd280dee5b0eedc2b9455ca3dacc4c7618da5010b9db609450a712f095c9f7a5", + "amount": "1" + } + ], + "consolidations": [ + { + "source_address": "0x1200000000000000000000000000000000000000", + "source_pubkey": "0xac0a230bd98a766b8e4156f0626ee679dd280dee5b0eedc2b9455ca3dacc4c7618da5010b9db609450a712f095c9f7a5", + "target_pubkey": "0xac0a230bd98a766b8e4156f0626ee679dd280dee5b0eedc2b9455ca3dacc4c7618da5010b9db609450a712f095c9f7a5" + } + ] + }, + "value": "1234567890", + "pubkey": "0x883827193f7627cd04e621e1e8d56498362a52b2a30c9a1c72036eb935c4278dee23d38a24d2f7dda62689886f0c39f4" + }, + "signature": "0xa9f158bca1d9d6b93a9104f48bd2d1e7689bef3fc974651fc755cc6f50d3649c5153a342a12f95cd8f9cac4f90144985189f498a7e0e1cb202ed5e7c98f3f504f371a53b9293bdd973fbb019c91242f808072d0ffcd9d17e2404baea3190fd18" + } +} \ No newline at end of file diff --git a/tests/data/get_header/fulu.json b/tests/data/get_header/fulu.json new file mode 100644 index 00000000..b4cef51a --- /dev/null +++ b/tests/data/get_header/fulu.json @@ -0,0 +1,62 @@ +{ + "version": "fulu", + "data": { + "message": { + "header": { + "parent_hash": "0x114d1897fefa402a01a653c21a7f1f1db049d1373a5e73a2d25d7a8045dc02a1", + "fee_recipient": "0x477cc10a5b54aed5c88544c2e71ea0581cf64593", + "state_root": "0x6724be16ef8e65681cb66f9c144da67347b8983aa5e3f4662c9b5dba90ab5bc6", + "receipts_root": "0xf2f6d2fe6960e4dedad18cca0c7881e6509d551d3e04c1879a627fb8aba30272", + "logs_bloom": "0x00000400000000000000848008100000000000000000000004000000010080000000000100000400000000000000000000000000020100000000000000000000080004000000000800008008000000000000000020004000000400000000000000000000000400000000000000000000000000000010000002000010000000000000000000800000200100000000000000004000000000200002000004000000000800000000000000000000000000008000000000000000800000008000000400012002000000000000000000000000000200000000000000000000000000040000000000000000000000000000000000408000000000040000000000000000", + "prev_randao": "0x0fde820be6404bcb71d7bbeee140c16cd28b1940a40fa8a4e2c493114a08b38a", + "block_number": "1598034", + "gas_limit": "30000000", + "gas_used": "1939652", + "timestamp": "1716481836", + "extra_data": "0xd983010d0c846765746889676f312e32312e3130856c696e7578", + "base_fee_per_gas": "1266581747", + "blob_gas_used": "786432", + "excess_blob_gas": "95158272", + "block_hash": "0x0d9eccac62175d903e4242783d7252f4ab6cdd35995810646bda627b4c35adac", + "transactions_root": "0x9dca93e8c6c9a1b5fcc850990ed95cd44af96ff0a6094c87b119a34259eb64b0", + "withdrawals_root": "0x2daccf0e476ca3e2644afbd13b2621d55b4d515b813a3b867cdacea24bb352d1" + }, + "blob_kzg_commitments": [ + "0x9559cce9cd71a3416793c8e28d3aaaae9f53732180f57e046bf725c74ab348a7b16693fd03194cac9dd2199a526461b7", + "0xabc493f754d156c7156eb8365d28eee13e5b3413767356ce4cb30cb0306fbe0ed45eaba92936a94e81ed976aa0d787c2", + "0xa5d87332b5dd391ed3153fe36dbd67775dcbc1818cbf6a68d2089a5c6015de1de02e5138f039f2375e6b3511cc94764b", + "0xa49c576627561ec9ae1ef7494e7cee7ede7fa7695d4462436c3e549cc3ce78674b407e8b5f8903b80f77a68814642d6c", + "0x83155fbeb04758d267193800fb89fa30eb13ac0e217005ae7e271733205ca8a6cd80fba08bf5c9a4a5cc0c9d463ac633", + "0xa20c71d1985996098aa63e8b5dc7b7fedb70de31478fe309dad3ac0e9b6d28d82be8e5e543021a0203dc785742e94b2f" + ], + "execution_requests": { + "deposits": [ + { + "pubkey": "0xac0a230bd98a766b8e4156f0626ee679dd280dee5b0eedc2b9455ca3dacc4c7618da5010b9db609450a712f095c9f7a5", + "withdrawal_credentials": "0x0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f0f", + "amount": "100", + "signature": "0x8aeb4642fb2982039a43fd6a6d9cc0ebf7598dbf02343c4617d9a68d799393c162492add63f31099a25eacc2782ba27a190e977a8c58760b6636dccb503d528b3be9e885c93d5b79699e68fcca870b0c790cdb00d67604d8b4a3025ae75efa2f", + "index": "1" + } + ], + "withdrawals": [ + { + "source_address": "0x1100000000000000000000000000000000000000", + "validator_pubkey": "0xac0a230bd98a766b8e4156f0626ee679dd280dee5b0eedc2b9455ca3dacc4c7618da5010b9db609450a712f095c9f7a5", + "amount": "1" + } + ], + "consolidations": [ + { + "source_address": "0x1200000000000000000000000000000000000000", + "source_pubkey": "0xac0a230bd98a766b8e4156f0626ee679dd280dee5b0eedc2b9455ca3dacc4c7618da5010b9db609450a712f095c9f7a5", + "target_pubkey": "0xac0a230bd98a766b8e4156f0626ee679dd280dee5b0eedc2b9455ca3dacc4c7618da5010b9db609450a712f095c9f7a5" + } + ] + }, + "value": "1234567890", + "pubkey": "0x883827193f7627cd04e621e1e8d56498362a52b2a30c9a1c72036eb935c4278dee23d38a24d2f7dda62689886f0c39f4" + }, + "signature": "0xa9f158bca1d9d6b93a9104f48bd2d1e7689bef3fc974651fc755cc6f50d3649c5153a342a12f95cd8f9cac4f90144985189f498a7e0e1cb202ed5e7c98f3f504f371a53b9293bdd973fbb019c91242f808072d0ffcd9d17e2404baea3190fd18" + } +} \ No newline at end of file diff --git a/tests/src/lib.rs b/tests/src/lib.rs index d332711f..42eec95a 100644 --- a/tests/src/lib.rs +++ b/tests/src/lib.rs @@ -2,4 +2,5 @@ pub mod mock_relay; pub mod mock_ssv_node; pub mod mock_ssv_public; pub mod mock_validator; +pub mod signer_service; pub mod utils; diff --git a/tests/src/mock_relay.rs b/tests/src/mock_relay.rs index 75532666..21accb34 100644 --- a/tests/src/mock_relay.rs +++ b/tests/src/mock_relay.rs @@ -1,4 +1,5 @@ use std::{ + collections::HashSet, net::SocketAddr, sync::{ Arc, RwLock, @@ -10,26 +11,32 @@ use alloy::{primitives::U256, rpc::types::beacon::relay::ValidatorRegistration}; use axum::{ Json, Router, extract::{Path, State}, - http::StatusCode, + http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Response}, routing::{get, post}, }; use cb_common::{ pbs::{ BUILDER_V1_API_PATH, BUILDER_V2_API_PATH, BlobsBundle, BuilderBid, BuilderBidElectra, - ExecutionPayloadElectra, ExecutionPayloadHeaderElectra, ExecutionRequests, ForkName, - GET_HEADER_PATH, GET_STATUS_PATH, GetHeaderParams, GetHeaderResponse, GetPayloadInfo, - PayloadAndBlobs, REGISTER_VALIDATOR_PATH, SUBMIT_BLOCK_PATH, SignedBlindedBeaconBlock, - SignedBuilderBid, SubmitBlindedBlockResponse, + BuilderBidFulu, ExecutionPayloadElectra, ExecutionPayloadHeaderElectra, + ExecutionPayloadHeaderFulu, ExecutionRequests, ForkName, GET_HEADER_PATH, GET_STATUS_PATH, + GetHeaderParams, GetHeaderResponse, GetPayloadInfo, PayloadAndBlobs, + REGISTER_VALIDATOR_PATH, SUBMIT_BLOCK_PATH, SignedBuilderBid, SubmitBlindedBlockResponse, }, signature::sign_builder_root, types::{BlsSecretKey, Chain}, - utils::{TestRandomSeed, timestamp_of_slot_start_sec}, + utils::{ + CONSENSUS_VERSION_HEADER, EncodingType, RawRequest, TestRandomSeed, deserialize_body, + get_accept_types, get_consensus_version_header, get_content_type, + timestamp_of_slot_start_sec, + }, }; use cb_pbs::MAX_SIZE_SUBMIT_BLOCK_RESPONSE; use lh_types::KzgProof; +use reqwest::header::CONTENT_TYPE; +use ssz::Encode; use tokio::net::TcpListener; -use tracing::debug; +use tracing::{debug, error}; use tree_hash::TreeHash; pub async fn start_mock_relay_service(state: Arc, port: u16) -> eyre::Result<()> { @@ -45,6 +52,7 @@ pub async fn start_mock_relay_service(state: Arc, port: u16) -> pub struct MockRelayState { pub chain: Chain, pub signer: BlsSecretKey, + pub supported_content_types: Arc>, large_body: bool, supports_submit_block_v2: bool, use_not_found_for_submit_block: bool, @@ -53,6 +61,7 @@ pub struct MockRelayState { received_register_validator: Arc, received_submit_block: Arc, response_override: RwLock>, + bid_value: RwLock, } impl MockRelayState { @@ -95,9 +104,20 @@ impl MockRelayState { received_register_validator: Default::default(), received_submit_block: Default::default(), response_override: RwLock::new(None), + bid_value: RwLock::new(U256::from(10)), + supported_content_types: Arc::new( + [EncodingType::Json, EncodingType::Ssz].iter().cloned().collect(), + ), } } + /// Override the bid value returned by this relay. Defaults to + /// `U256::from(10)`. + pub fn with_bid_value(self, value: U256) -> Self { + *self.bid_value.write().unwrap() = value; + self + } + pub fn with_large_body(self) -> Self { Self { large_body: true, ..self } } @@ -132,36 +152,110 @@ pub fn mock_relay_app_router(state: Arc) -> Router { async fn handle_get_header( State(state): State>, Path(GetHeaderParams { parent_hash, .. }): Path, + headers: HeaderMap, ) -> Response { state.received_get_header.fetch_add(1, Ordering::Relaxed); + let accept_types = get_accept_types(&headers) + .map_err(|e| (StatusCode::BAD_REQUEST, format!("error parsing accept header: {e}"))); + if let Err(e) = accept_types { + return e.into_response(); + } + let accept_types = accept_types.unwrap(); + let consensus_version_header = + get_consensus_version_header(&headers).unwrap_or(ForkName::Electra); - let mut header = ExecutionPayloadHeaderElectra { - parent_hash: parent_hash.into(), - block_hash: Default::default(), - timestamp: timestamp_of_slot_start_sec(0, state.chain), - ..ExecutionPayloadHeaderElectra::test_random() + let content_type = if state.supported_content_types.contains(&EncodingType::Ssz) && + accept_types.contains(&EncodingType::Ssz) + { + EncodingType::Ssz + } else if state.supported_content_types.contains(&EncodingType::Json) && + accept_types.contains(&EncodingType::Json) + { + EncodingType::Json + } else { + return (StatusCode::NOT_ACCEPTABLE, "No acceptable content type found".to_string()) + .into_response(); }; - header.block_hash.0[0] = 1; + let bid_value = *state.bid_value.read().unwrap(); - let message = BuilderBid::Electra(BuilderBidElectra { - header, - blob_kzg_commitments: Default::default(), - execution_requests: ExecutionRequests::default(), - value: U256::from(10), - pubkey: state.signer.public_key().into(), - }); + let data = match consensus_version_header { + ForkName::Electra => { + let mut header = ExecutionPayloadHeaderElectra { + parent_hash: parent_hash.into(), + block_hash: Default::default(), + timestamp: timestamp_of_slot_start_sec(0, state.chain), + ..ExecutionPayloadHeaderElectra::test_random() + }; + header.block_hash.0[0] = 1; - let object_root = message.tree_hash_root(); - let signature = sign_builder_root(state.chain, &state.signer, object_root); - let response = SignedBuilderBid { message, signature }; + let message = BuilderBid::Electra(BuilderBidElectra { + header, + blob_kzg_commitments: Default::default(), + execution_requests: ExecutionRequests::default(), + value: bid_value, + pubkey: state.signer.public_key().into(), + }); + let object_root = message.tree_hash_root(); + let signature = sign_builder_root(state.chain, &state.signer, &object_root); + let response = SignedBuilderBid { message, signature }; + if content_type == EncodingType::Ssz { + response.as_ssz_bytes() + } else { + let versioned_response = GetHeaderResponse { + version: ForkName::Electra, + data: response, + metadata: Default::default(), + }; + serde_json::to_vec(&versioned_response).unwrap() + } + } + ForkName::Fulu => { + let mut header = ExecutionPayloadHeaderFulu { + parent_hash: parent_hash.into(), + block_hash: Default::default(), + timestamp: timestamp_of_slot_start_sec(0, state.chain), + ..ExecutionPayloadHeaderFulu::test_random() + }; + header.block_hash.0[0] = 1; - let response = GetHeaderResponse { - version: ForkName::Electra, - data: response, - metadata: Default::default(), + let message = BuilderBid::Fulu(BuilderBidFulu { + header, + blob_kzg_commitments: Default::default(), + execution_requests: ExecutionRequests::default(), + value: bid_value, + pubkey: state.signer.public_key().into(), + }); + let object_root = message.tree_hash_root(); + let signature = sign_builder_root(state.chain, &state.signer, &object_root); + let response = SignedBuilderBid { message, signature }; + if content_type == EncodingType::Ssz { + response.as_ssz_bytes() + } else { + let versioned_response = GetHeaderResponse { + version: ForkName::Fulu, + data: response, + metadata: Default::default(), + }; + serde_json::to_vec(&versioned_response).unwrap() + } + } + _ => { + return ( + StatusCode::BAD_REQUEST, + format!("Unsupported fork {consensus_version_header}"), + ) + .into_response(); + } }; - (StatusCode::OK, Json(response)).into_response() + + let mut response = (StatusCode::OK, data).into_response(); + let consensus_version_header = + HeaderValue::from_str(&consensus_version_header.to_string()).unwrap(); + let content_type_header = HeaderValue::from_str(&content_type.to_string()).unwrap(); + response.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + response.headers_mut().insert(CONTENT_TYPE, content_type_header); + response } async fn handle_get_status(State(state): State>) -> impl IntoResponse { @@ -184,17 +278,53 @@ async fn handle_register_validator( } async fn handle_submit_block_v1( + headers: HeaderMap, State(state): State>, - Json(submit_block): Json, + raw_request: RawRequest, ) -> Response { if state.use_not_found_for_submit_block() { return StatusCode::NOT_FOUND.into_response(); } state.received_submit_block.fetch_add(1, Ordering::Relaxed); - if state.large_body() { - (StatusCode::OK, Json(vec![1u8; 1 + MAX_SIZE_SUBMIT_BLOCK_RESPONSE])).into_response() + let accept_types = get_accept_types(&headers) + .map_err(|e| (StatusCode::BAD_REQUEST, format!("error parsing accept header: {e}"))); + if let Err(e) = accept_types { + return e.into_response(); + } + let accept_types = accept_types.unwrap(); + let consensus_version_header = get_consensus_version_header(&headers); + let response_content_type = if state.supported_content_types.contains(&EncodingType::Ssz) && + accept_types.contains(&EncodingType::Ssz) + { + EncodingType::Ssz + } else if state.supported_content_types.contains(&EncodingType::Json) && + accept_types.contains(&EncodingType::Json) + { + EncodingType::Json + } else { + return (StatusCode::NOT_ACCEPTABLE, "No acceptable content type found".to_string()) + .into_response(); + }; + + // Error out if the request content type is not supported + let content_type = get_content_type(&headers); + if !state.supported_content_types.contains(&content_type) { + return (StatusCode::UNSUPPORTED_MEDIA_TYPE, "Unsupported content type".to_string()) + .into_response(); + }; + + let data = if state.large_body() { + vec![1u8; 1 + MAX_SIZE_SUBMIT_BLOCK_RESPONSE] } else { let mut execution_payload = ExecutionPayloadElectra::test_random(); + let submit_block = deserialize_body(&headers, raw_request.body_bytes).await.map_err(|e| { + error!(%e, "failed to deserialize signed blinded block"); + (StatusCode::BAD_REQUEST, format!("failed to deserialize body: {e}")) + }); + if let Err(e) = submit_block { + return e.into_response(); + } + let submit_block = submit_block.unwrap(); execution_payload.block_hash = submit_block.block_hash().into(); let mut blobs_bundle = BlobsBundle::default(); @@ -207,19 +337,49 @@ async fn handle_submit_block_v1( let response = PayloadAndBlobs { execution_payload: execution_payload.into(), blobs_bundle }; - let response = SubmitBlindedBlockResponse { - version: ForkName::Electra, - metadata: Default::default(), - data: response, - }; + if response_content_type == EncodingType::Ssz { + response.as_ssz_bytes() + } else { + // Return JSON for everything else; this is fine for the mock + let response = SubmitBlindedBlockResponse { + version: ForkName::Electra, + metadata: Default::default(), + data: response, + }; + serde_json::to_vec(&response).unwrap() + } + }; - (StatusCode::OK, Json(response)).into_response() + let mut response = (StatusCode::OK, data).into_response(); + if response_content_type == EncodingType::Ssz { + let consensus_version_header = match consensus_version_header { + Some(header) => header, + None => { + return (StatusCode::BAD_REQUEST, "Missing consensus version header".to_string()) + .into_response() + } + }; + let consensus_version_header = + HeaderValue::from_str(&consensus_version_header.to_string()).unwrap(); + response.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); } + let content_type_header = HeaderValue::from_str(&response_content_type.to_string()).unwrap(); + response.headers_mut().insert(CONTENT_TYPE, content_type_header); + response } -async fn handle_submit_block_v2(State(state): State>) -> Response { + +async fn handle_submit_block_v2( + headers: HeaderMap, + State(state): State>, +) -> Response { if state.use_not_found_for_submit_block() { return StatusCode::NOT_FOUND.into_response(); } state.received_submit_block.fetch_add(1, Ordering::Relaxed); + let content_type = get_content_type(&headers); + if !state.supported_content_types.contains(&content_type) { + return (StatusCode::NOT_ACCEPTABLE, "No acceptable content type found".to_string()) + .into_response(); + }; (StatusCode::ACCEPTED, "").into_response() } diff --git a/tests/src/mock_validator.rs b/tests/src/mock_validator.rs index ab593277..092b97a5 100644 --- a/tests/src/mock_validator.rs +++ b/tests/src/mock_validator.rs @@ -1,10 +1,16 @@ +use std::collections::HashSet; + use alloy::{primitives::B256, rpc::types::beacon::relay::ValidatorRegistration}; use cb_common::{ pbs::{BuilderApiVersion, RelayClient, SignedBlindedBeaconBlock}, types::BlsPublicKey, - utils::bls_pubkey_from_hex, + utils::{CONSENSUS_VERSION_HEADER, EncodingType, ForkName, bls_pubkey_from_hex}, +}; +use reqwest::{ + Response, + header::{ACCEPT, CONTENT_TYPE}, }; -use reqwest::Response; +use ssz::Encode; use crate::utils::generate_mock_relay; @@ -20,13 +26,36 @@ impl MockValidator { Ok(Self { comm_boost: generate_mock_relay(port, pubkey)? }) } - pub async fn do_get_header(&self, pubkey: Option) -> eyre::Result { + pub async fn do_get_header( + &self, + pubkey: Option, + accept: HashSet, + fork_name: ForkName, + ) -> eyre::Result { let default_pubkey = bls_pubkey_from_hex( "0xac6e77dfe25ecd6110b8e780608cce0dab71fdd5ebea22a16c0205200f2f8e2e3ad3b71d3499c54ad14d6c21b41a37ae", )?; let url = self.comm_boost.get_header_url(0, &B256::ZERO, &pubkey.unwrap_or(default_pubkey))?; - Ok(self.comm_boost.client.get(url).send().await?) + let accept = match accept.len() { + 0 => None, + 1 => Some(accept.into_iter().next().unwrap().to_string()), + _ => { + let accept_strings: Vec = + accept.into_iter().map(|e| e.to_string()).collect(); + Some(accept_strings.join(", ")) + } + }; + let mut res = self + .comm_boost + .client + .get(url) + .header(CONSENSUS_VERSION_HEADER, &fork_name.to_string()); + if let Some(accept_header) = accept { + res = res.header(ACCEPT, accept_header); + } + let res = res.send().await?; + Ok(res) } pub async fn do_get_status(&self) -> eyre::Result { @@ -49,29 +78,76 @@ impl MockValidator { pub async fn do_submit_block_v1( &self, - signed_blinded_block: Option, + signed_blinded_block_opt: Option, + accept: HashSet, + content_type: EncodingType, + fork_name: ForkName, ) -> eyre::Result { - self.do_submit_block_impl(signed_blinded_block, BuilderApiVersion::V1).await + self.do_submit_block_impl( + signed_blinded_block_opt, + accept, + content_type, + fork_name, + BuilderApiVersion::V1, + ) + .await } pub async fn do_submit_block_v2( &self, - signed_blinded_block: Option, + signed_blinded_block_opt: Option, + accept: HashSet, + content_type: EncodingType, + fork_name: ForkName, ) -> eyre::Result { - self.do_submit_block_impl(signed_blinded_block, BuilderApiVersion::V2).await + self.do_submit_block_impl( + signed_blinded_block_opt, + accept, + content_type, + fork_name, + BuilderApiVersion::V2, + ) + .await } async fn do_submit_block_impl( &self, - signed_blinded_block: Option, + signed_blinded_block_opt: Option, + accept: HashSet, + content_type: EncodingType, + fork_name: ForkName, api_version: BuilderApiVersion, ) -> eyre::Result { let url = self.comm_boost.submit_block_url(api_version).unwrap(); let signed_blinded_block = - signed_blinded_block.unwrap_or_else(load_test_signed_blinded_block); + signed_blinded_block_opt.unwrap_or_else(load_test_signed_blinded_block); + let body = match content_type { + EncodingType::Json => serde_json::to_vec(&signed_blinded_block).unwrap(), + EncodingType::Ssz => signed_blinded_block.as_ssz_bytes(), + }; - Ok(self.comm_boost.client.post(url).json(&signed_blinded_block).send().await?) + let accept = match accept.len() { + 0 => None, + 1 => Some(accept.into_iter().next().unwrap().to_string()), + _ => { + let accept_strings: Vec = + accept.into_iter().map(|e| e.to_string()).collect(); + Some(accept_strings.join(", ")) + } + }; + let mut res = self + .comm_boost + .client + .post(url) + .body(body) + .header(CONSENSUS_VERSION_HEADER, &fork_name.to_string()) + .header(CONTENT_TYPE, &content_type.to_string()); + if let Some(accept_header) = accept { + res = res.header(ACCEPT, accept_header); + } + let res = res.send().await?; + Ok(res) } } diff --git a/tests/src/signer_service.rs b/tests/src/signer_service.rs new file mode 100644 index 00000000..550ac4ce --- /dev/null +++ b/tests/src/signer_service.rs @@ -0,0 +1,98 @@ +use std::{collections::HashMap, time::Duration}; + +use cb_common::{ + commit::{constants::STATUS_PATH, request::GetPubkeysResponse}, + config::{ModuleSigningConfig, StartSignerConfig}, + signer::{SignerLoader, ValidatorKeysFormat}, + types::{Chain, ModuleId}, + utils::bls_pubkey_from_hex, +}; +use cb_signer::service::SigningService; +use eyre::Result; +use reqwest::{Certificate, Response, StatusCode}; +use tracing::info; + +use crate::utils::{get_signer_config, get_start_signer_config}; + +// Starts the signer moduler server on a separate task and returns its +// configuration +pub async fn start_server( + port: u16, + mod_signing_configs: &HashMap, + admin_secret: String, + use_tls: bool, +) -> Result { + let chain = Chain::Hoodi; + + // Create a signer config + let loader = SignerLoader::ValidatorsDir { + keys_path: "data/keystores/keys".into(), + secrets_path: "data/keystores/secrets".into(), + format: ValidatorKeysFormat::Lighthouse, + }; + let mut config = get_signer_config(loader, use_tls); + config.port = port; + config.jwt_auth_fail_limit = 3; // Set a low fail limit for testing + config.jwt_auth_fail_timeout_seconds = 3; // Set a short timeout for testing + let start_config = get_start_signer_config(config, chain, mod_signing_configs, admin_secret); + + // Run the Signer + let server_handle = tokio::spawn(SigningService::run(start_config.clone())); + + // Wait for the server to start + let (url, client) = match start_config.tls_certificates { + Some(ref certificates) => { + let url = format!("https://{}{}", start_config.endpoint, STATUS_PATH); + let client = reqwest::Client::builder() + .add_root_certificate(Certificate::from_pem(&certificates.0)?) + .build()?; + (url, client) + } + None => { + let url = format!("http://{}{}", start_config.endpoint, STATUS_PATH); + (url, reqwest::Client::new()) + } + }; + + let sleep_duration = Duration::from_millis(100); + for i in 0..100 { + // 10 second max wait + if i > 0 { + tokio::time::sleep(sleep_duration).await; + } + match client.get(&url).send().await { + Ok(_) => { + return Ok(start_config); + } + Err(e) => { + info!("Waiting for signer service to start: {}", e); + } + } + } + Err(eyre::eyre!("Signer service failed to start: {}", server_handle.await.unwrap_err())) +} + +// Verifies that the pubkeys returned by the server match the pubkeys in the +// test data +pub async fn verify_pubkeys(response: Response) -> Result<()> { + // Verify the expected pubkeys are returned + assert!(response.status() == StatusCode::OK); + let pubkey_json = response.json::().await?; + assert_eq!(pubkey_json.keys.len(), 2); + let expected_pubkeys = vec![ + bls_pubkey_from_hex( + "883827193f7627cd04e621e1e8d56498362a52b2a30c9a1c72036eb935c4278dee23d38a24d2f7dda62689886f0c39f4", + )?, + bls_pubkey_from_hex( + "b3a22e4a673ac7a153ab5b3c17a4dbef55f7e47210b20c0cbb0e66df5b36bb49ef808577610b034172e955d2312a61b9", + )?, + ]; + for expected in expected_pubkeys { + assert!( + pubkey_json.keys.iter().any(|k| k.consensus == expected), + "Expected pubkey not found: {expected}" + ); + info!("Server returned expected pubkey: {:?}", expected); + } + Ok(()) +} diff --git a/tests/src/utils.rs b/tests/src/utils.rs index 253007c7..dd0ba733 100644 --- a/tests/src/utils.rs +++ b/tests/src/utils.rs @@ -1,15 +1,18 @@ use std::{ collections::HashMap, net::{Ipv4Addr, SocketAddr}, + path::PathBuf, sync::{Arc, Once}, }; -use alloy::primitives::U256; +use alloy::primitives::{B256, U256}; use cb_common::{ config::{ - PbsConfig, PbsModuleConfig, RelayConfig, SIGNER_IMAGE_DEFAULT, - SIGNER_JWT_AUTH_FAIL_LIMIT_DEFAULT, SIGNER_JWT_AUTH_FAIL_TIMEOUT_SECONDS_DEFAULT, - SIGNER_PORT_DEFAULT, SignerConfig, SignerType, StartSignerConfig, + BlockValidationMode, CommitBoostConfig, HeaderValidationMode, LogsSettings, ModuleKind, + ModuleSigningConfig, PbsConfig, PbsModuleConfig, RelayConfig, ReverseProxyHeaderSetup, + SIGNER_IMAGE_DEFAULT, SIGNER_JWT_AUTH_FAIL_LIMIT_DEFAULT, + SIGNER_JWT_AUTH_FAIL_TIMEOUT_SECONDS_DEFAULT, SIGNER_PORT_DEFAULT, SignerConfig, + SignerType, StartSignerConfig, StaticModuleConfig, StaticPbsConfig, TlsMode, }, pbs::{RelayClient, RelayEntry}, signer::SignerLoader, @@ -17,6 +20,7 @@ use cb_common::{ utils::{bls_pubkey_from_hex, default_host}, }; use eyre::Result; +use rcgen::generate_simple_self_signed; use url::Url; pub fn get_local_address(port: u16) -> String { @@ -66,7 +70,7 @@ pub fn generate_mock_relay_with_batch_size( RelayClient::new(config) } -pub fn get_pbs_static_config(port: u16) -> PbsConfig { +pub fn get_pbs_config(port: u16) -> PbsConfig { PbsConfig { host: Ipv4Addr::UNSPECIFIED, port, @@ -78,7 +82,8 @@ pub fn get_pbs_static_config(port: u16) -> PbsConfig { skip_sigverify: false, min_bid_wei: U256::ZERO, late_in_slot_time_ms: u64::MAX, - extra_validation_enabled: false, + header_validation_mode: HeaderValidationMode::Standard, + block_validation_mode: BlockValidationMode::Standard, ssv_node_api_url: Url::parse("http://localhost:0").unwrap(), ssv_public_api_url: Url::parse("http://localhost:0").unwrap(), rpc_url: None, @@ -89,6 +94,23 @@ pub fn get_pbs_static_config(port: u16) -> PbsConfig { } } +pub fn get_pbs_static_config(pbs_config: PbsConfig) -> StaticPbsConfig { + StaticPbsConfig { docker_image: String::from(""), pbs_config, with_signer: true } +} + +pub fn get_commit_boost_config(pbs_static_config: StaticPbsConfig) -> CommitBoostConfig { + CommitBoostConfig { + chain: Chain::Hoodi, + relays: vec![], + pbs: pbs_static_config, + muxes: None, + modules: Some(vec![]), + signer: None, + metrics: None, + logs: LogsSettings::default(), + } +} + pub fn to_pbs_config( chain: Chain, pbs_config: PbsConfig, @@ -106,7 +128,7 @@ pub fn to_pbs_config( } } -pub fn get_signer_config(loader: SignerLoader) -> SignerConfig { +pub fn get_signer_config(loader: SignerLoader, tls: bool) -> SignerConfig { SignerConfig { host: default_host(), port: SIGNER_PORT_DEFAULT, @@ -114,29 +136,60 @@ pub fn get_signer_config(loader: SignerLoader) -> SignerConfig { jwt_auth_fail_limit: SIGNER_JWT_AUTH_FAIL_LIMIT_DEFAULT, jwt_auth_fail_timeout_seconds: SIGNER_JWT_AUTH_FAIL_TIMEOUT_SECONDS_DEFAULT, inner: SignerType::Local { loader, store: None }, + tls_mode: if tls { TlsMode::Certificate(PathBuf::new()) } else { TlsMode::Insecure }, + reverse_proxy: ReverseProxyHeaderSetup::None, } } pub fn get_start_signer_config( signer_config: SignerConfig, chain: Chain, - jwts: HashMap, + mod_signing_configs: &HashMap, + admin_secret: String, ) -> StartSignerConfig { + let tls_certificates = match signer_config.tls_mode { + TlsMode::Insecure => None, + TlsMode::Certificate(_) => Some( + generate_simple_self_signed(vec![signer_config.host.to_string()]) + .map(|x| { + ( + x.cert.pem().as_bytes().to_vec(), + x.key_pair.serialize_pem().as_bytes().to_vec(), + ) + }) + .expect("Failed to generate TLS certificate"), + ), + }; + match signer_config.inner { SignerType::Local { loader, .. } => StartSignerConfig { chain, loader: Some(loader), store: None, endpoint: SocketAddr::new(signer_config.host.into(), signer_config.port), - jwts, + mod_signing_configs: mod_signing_configs.clone(), + admin_secret, jwt_auth_fail_limit: signer_config.jwt_auth_fail_limit, jwt_auth_fail_timeout_seconds: signer_config.jwt_auth_fail_timeout_seconds, dirk: None, + tls_certificates, + reverse_proxy: ReverseProxyHeaderSetup::None, }, _ => panic!("Only local signers are supported in tests"), } } +pub fn create_module_config(id: ModuleId, signing_id: B256) -> StaticModuleConfig { + StaticModuleConfig { + id, + signing_id, + docker_image: String::from(""), + env: None, + env_file: None, + kind: ModuleKind::Commit, + } +} + pub fn bls_pubkey_from_hex_unchecked(hex: &str) -> BlsPublicKey { bls_pubkey_from_hex(hex).unwrap() } diff --git a/tests/tests/config.rs b/tests/tests/config.rs index bffefcbc..27b02318 100644 --- a/tests/tests/config.rs +++ b/tests/tests/config.rs @@ -1,7 +1,11 @@ use std::{net::Ipv4Addr, path::PathBuf}; use alloy::primitives::U256; -use cb_common::{config::CommitBoostConfig, types::Chain, utils::WEI_PER_ETH}; +use cb_common::{ + config::{BlockValidationMode, CommitBoostConfig, HeaderValidationMode}, + types::Chain, + utils::WEI_PER_ETH, +}; use eyre::Result; use url::Url; @@ -54,7 +58,8 @@ async fn test_load_pbs_happy() -> Result<()> { dbg!(&U256::from(0.5)); assert_eq!(config.pbs.pbs_config.min_bid_wei, U256::from((0.5 * WEI_PER_ETH as f64) as u64)); assert_eq!(config.pbs.pbs_config.late_in_slot_time_ms, 2000); - assert!(!config.pbs.pbs_config.extra_validation_enabled); + assert_eq!(config.pbs.pbs_config.header_validation_mode, HeaderValidationMode::Standard); + assert_eq!(config.pbs.pbs_config.block_validation_mode, BlockValidationMode::Standard); // Relay specific settings let relay = &config.relays[0]; @@ -156,7 +161,7 @@ async fn test_validate_bad_min_bid() -> Result<()> { #[tokio::test] async fn test_validate_missing_rpc_url() -> Result<()> { let mut config = load_happy_config().await?; - config.pbs.pbs_config.extra_validation_enabled = true; + config.pbs.pbs_config.header_validation_mode = HeaderValidationMode::Extra; config.pbs.pbs_config.rpc_url = None; let result = config.validate().await; @@ -165,7 +170,7 @@ async fn test_validate_missing_rpc_url() -> Result<()> { result .unwrap_err() .to_string() - .contains("rpc_url is required if extra_validation_enabled is true") + .contains("rpc_url is required if header_validation_mode is set to extra") ); Ok(()) } diff --git a/tests/tests/pbs_cfg_file_update.rs b/tests/tests/pbs_cfg_file_update.rs index a1d4b94f..b70ab47a 100644 --- a/tests/tests/pbs_cfg_file_update.rs +++ b/tests/tests/pbs_cfg_file_update.rs @@ -1,8 +1,11 @@ -use std::{net::Ipv4Addr, sync::Arc, time::Duration}; +use std::{collections::HashSet, net::Ipv4Addr, sync::Arc, time::Duration}; use alloy::primitives::U256; use cb_common::{ - config::{CommitBoostConfig, LogsSettings, PbsConfig, RelayConfig, StaticPbsConfig}, + config::{ + BlockValidationMode, CommitBoostConfig, HeaderValidationMode, LogsSettings, PbsConfig, + RelayConfig, StaticPbsConfig, + }, pbs::RelayEntry, signer::random_secret, types::Chain, @@ -11,9 +14,10 @@ use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ mock_relay::{MockRelayState, start_mock_relay_service}, mock_validator::MockValidator, - utils::{generate_mock_relay, get_pbs_static_config, setup_test_env, to_pbs_config}, + utils::{generate_mock_relay, get_pbs_config, setup_test_env, to_pbs_config}, }; use eyre::Result; +use lh_types::ForkName; use reqwest::StatusCode; use tracing::info; use url::Url; @@ -57,7 +61,8 @@ async fn test_cfg_file_update() -> Result<()> { min_bid_wei: U256::ZERO, late_in_slot_time_ms: u64::MAX / 2, /* serde gets very upset about serializing u64::MAX * or anything close to it */ - extra_validation_enabled: false, + block_validation_mode: BlockValidationMode::Standard, + header_validation_mode: HeaderValidationMode::Standard, rpc_url: None, ssv_node_api_url: Url::parse("http://example.com").unwrap(), ssv_public_api_url: Url::parse("http://example.com").unwrap(), @@ -102,7 +107,7 @@ async fn test_cfg_file_update() -> Result<()> { std::fs::write(config_path.clone(), config_toml.as_bytes())?; // Run the PBS service - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), vec![relay1.clone()]); + let config = to_pbs_config(chain, get_pbs_config(pbs_port), vec![relay1.clone()]); let state = PbsState::new(config, config_path.clone()); tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); @@ -112,7 +117,7 @@ async fn test_cfg_file_update() -> Result<()> { // Send a get header request - should go to relay 1 only let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header"); - let res = mock_validator.do_get_header(None).await?; + let res = mock_validator.do_get_header(None, HashSet::new(), ForkName::Fulu).await?; assert_eq!(res.status(), StatusCode::OK); assert_eq!(relay1_state.received_get_header(), 1); assert_eq!(relay2_state.received_get_header(), 0); @@ -154,7 +159,7 @@ async fn test_cfg_file_update() -> Result<()> { // Send another get header request - should go to relay 2 only info!("Sending get header after config update"); - let res = mock_validator.do_get_header(None).await?; + let res = mock_validator.do_get_header(None, HashSet::new(), ForkName::Fulu).await?; assert_eq!(res.status(), StatusCode::OK); assert_eq!(relay1_state.received_get_header(), 1); // no change assert_eq!(relay2_state.received_get_header(), 1); // incremented diff --git a/tests/tests/pbs_get_header.rs b/tests/tests/pbs_get_header.rs index 5ae4b656..b7f3c4a5 100644 --- a/tests/tests/pbs_get_header.rs +++ b/tests/tests/pbs_get_header.rs @@ -1,65 +1,304 @@ -use std::{path::PathBuf, sync::Arc, time::Duration}; +use std::{collections::HashSet, path::PathBuf, sync::Arc, time::Duration}; use alloy::primitives::{B256, U256}; use cb_common::{ - pbs::GetHeaderResponse, + config::HeaderValidationMode, + pbs::{GetHeaderResponse, SignedBuilderBid}, signature::sign_builder_root, signer::random_secret, types::{BlsPublicKeyBytes, Chain}, - utils::timestamp_of_slot_start_sec, + utils::{ + EncodingType, ForkName, get_bid_value_from_signed_builder_bid_ssz, + get_consensus_version_header, timestamp_of_slot_start_sec, + }, }; use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ mock_relay::{MockRelayState, start_mock_relay_service}, mock_validator::MockValidator, - utils::{generate_mock_relay, get_pbs_static_config, setup_test_env, to_pbs_config}, + utils::{generate_mock_relay, get_pbs_config, setup_test_env, to_pbs_config}, }; use eyre::Result; -use lh_types::ForkName; +use lh_types::{ForkVersionDecode, beacon_response::EmptyMetadata}; use reqwest::StatusCode; use tracing::info; use tree_hash::TreeHash; +use url::Url; +/// Test requesting JSON when the relay supports JSON #[tokio::test] async fn test_get_header() -> Result<()> { + test_get_header_impl( + 3200, + HashSet::from([EncodingType::Json]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + 1, + HeaderValidationMode::Standard, + StatusCode::OK, + U256::from(10u64), + U256::ZERO, + None, + ForkName::Electra, + ) + .await +} + +/// Test requesting SSZ when the relay supports SSZ +#[tokio::test] +async fn test_get_header_ssz() -> Result<()> { + test_get_header_impl( + 3202, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + 1, + HeaderValidationMode::Standard, + StatusCode::OK, + U256::from(10u64), + U256::ZERO, + None, + ForkName::Electra, + ) + .await +} + +/// Test requesting SSZ when the relay only supports JSON, which should be +/// handled because PBS supports both types internally and re-maps them on the +/// fly +#[tokio::test] +async fn test_get_header_ssz_into_json() -> Result<()> { + test_get_header_impl( + 3204, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Json]), + 1, + HeaderValidationMode::Standard, + StatusCode::OK, + U256::from(10u64), + U256::ZERO, + None, + ForkName::Electra, + ) + .await +} + +/// Test requesting multiple types when the relay supports SSZ, which should +/// return SSZ +#[tokio::test] +async fn test_get_header_multitype_ssz() -> Result<()> { + test_get_header_impl( + 3206, + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + HashSet::from([EncodingType::Ssz]), + 1, + HeaderValidationMode::Standard, + StatusCode::OK, + U256::from(10u64), + U256::ZERO, + None, + ForkName::Electra, + ) + .await +} + +/// Test requesting multiple types when the relay supports JSON, which should +/// still work +#[tokio::test] +async fn test_get_header_multitype_json() -> Result<()> { + test_get_header_impl( + 3208, + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + HashSet::from([EncodingType::Json]), + 1, + HeaderValidationMode::Standard, + StatusCode::OK, + U256::from(10u64), + U256::ZERO, + None, + ForkName::Electra, + ) + .await +} + +// === Light Mode Tests === + +/// Test requesting JSON without validation when the relay supports JSON +#[tokio::test] +async fn test_get_header_light() -> Result<()> { + test_get_header_impl( + 3210, + HashSet::from([EncodingType::Json]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + 1, + HeaderValidationMode::None, + StatusCode::OK, + U256::from(10u64), + U256::ZERO, + None, + ForkName::Electra, + ) + .await +} + +/// Test requesting SSZ without validation when the relay supports SSZ +#[tokio::test] +async fn test_get_header_ssz_light() -> Result<()> { + test_get_header_impl( + 3212, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + 1, + HeaderValidationMode::None, + StatusCode::OK, + U256::from(10u64), + U256::ZERO, + None, + ForkName::Electra, + ) + .await +} + +/// Test requesting SSZ without validation when the relay only supports JSON. +/// This should actually fail because in no-validation mode we just forward the +/// response without re-encoding it. +#[tokio::test] +async fn test_get_header_ssz_into_json_light() -> Result<()> { + test_get_header_impl( + 3214, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Json]), + 1, + HeaderValidationMode::None, + StatusCode::NO_CONTENT, // Should fail because the only relay can't be used + U256::from(10u64), + U256::ZERO, + None, + ForkName::Electra, + ) + .await +} + +/// Test requesting multiple types without validation when the relay supports +/// SSZ, which should return SSZ +#[tokio::test] +async fn test_get_header_multitype_ssz_light() -> Result<()> { + test_get_header_impl( + 3216, + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + HashSet::from([EncodingType::Ssz]), + 1, + HeaderValidationMode::None, + StatusCode::OK, + U256::from(10u64), + U256::ZERO, + None, + ForkName::Electra, + ) + .await +} + +/// Test requesting multiple types without validation when the relay supports +/// JSON, which should still work +#[tokio::test] +async fn test_get_header_multitype_json_light() -> Result<()> { + test_get_header_impl( + 3218, + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + HashSet::from([EncodingType::Json]), + 1, + HeaderValidationMode::None, + StatusCode::OK, + U256::from(10u64), + U256::ZERO, + None, + ForkName::Electra, + ) + .await +} + +/// Core implementation for get_header tests. +/// Pass `rpc_url: Some(url)` when testing `HeaderValidationMode::Extra` — PBS +/// requires a non-None rpc_url to start in that mode. A non-existent address is +/// fine; if the parent block fetch fails the relay response is still returned +/// (extra validation is skipped with a warning). +async fn test_get_header_impl( + pbs_port: u16, + accept_types: HashSet, + relay_types: HashSet, + expected_try_count: u64, + mode: HeaderValidationMode, + expected_code: StatusCode, + bid_value: U256, + min_bid_wei: U256, + rpc_url: Option, + fork_name: ForkName, +) -> Result<()> { + // Setup test environment setup_test_env(); let signer = random_secret(); let pubkey = signer.public_key(); - let chain = Chain::Holesky; - let pbs_port = 3200; let relay_port = pbs_port + 1; - // Run a mock relay - let mock_state = Arc::new(MockRelayState::new(chain, signer)); + let mut mock_state = MockRelayState::new(chain, signer).with_bid_value(bid_value); + mock_state.supported_content_types = Arc::new(relay_types); + let mock_state = Arc::new(mock_state); let mock_relay = generate_mock_relay(relay_port, pubkey)?; tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); // Run the PBS service - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), vec![mock_relay.clone()]); + let mut pbs_config = get_pbs_config(pbs_port); + pbs_config.header_validation_mode = mode; + pbs_config.min_bid_wei = min_bid_wei; + pbs_config.rpc_url = rpc_url; + let config = to_pbs_config(chain, pbs_config, vec![mock_relay.clone()]); let state = PbsState::new(config, PathBuf::new()); tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); // leave some time to start servers tokio::time::sleep(Duration::from_millis(100)).await; + // Send the get_header request let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header"); - let res = mock_validator.do_get_header(None).await?; - assert_eq!(res.status(), StatusCode::OK); + let res = mock_validator.do_get_header(None, accept_types.clone(), fork_name).await?; + assert_eq!(res.status(), expected_code); + assert_eq!(mock_state.received_get_header(), expected_try_count); + match expected_code { + StatusCode::OK => {} + _ => return Ok(()), + } - let res = serde_json::from_slice::(&res.bytes().await?)?; + // Get the content type + let content_type = match res + .headers() + .get(reqwest::header::CONTENT_TYPE) + .and_then(|ct| ct.to_str().ok()) + .unwrap() + { + ct if ct == EncodingType::Ssz.to_string() => EncodingType::Ssz, + ct if ct == EncodingType::Json.to_string() => EncodingType::Json, + _ => panic!("unexpected content type"), + }; + assert!(accept_types.contains(&content_type)); - assert_eq!(mock_state.received_get_header(), 1); - assert_eq!(res.version, ForkName::Electra); + // Get the data + let res = match content_type { + EncodingType::Json => serde_json::from_slice::(&res.bytes().await?)?, + EncodingType::Ssz => { + let fork = + get_consensus_version_header(res.headers()).expect("missing fork version header"); + let data = SignedBuilderBid::from_ssz_bytes_by_fork(&res.bytes().await?, fork).unwrap(); + GetHeaderResponse { version: fork, data, metadata: EmptyMetadata::default() } + } + }; assert_eq!(res.data.message.header().block_hash().0[0], 1); assert_eq!(res.data.message.header().parent_hash().0, B256::ZERO); - assert_eq!(*res.data.message.value(), U256::from(10)); + assert_eq!(*res.data.message.value(), bid_value); assert_eq!(*res.data.message.pubkey(), BlsPublicKeyBytes::from(mock_state.signer.public_key())); assert_eq!(res.data.message.header().timestamp(), timestamp_of_slot_start_sec(0, chain)); assert_eq!( res.data.signature, - sign_builder_root(chain, &mock_state.signer, res.data.message.tree_hash_root()) + sign_builder_root(chain, &mock_state.signer, &res.data.message.tree_hash_root()) ); Ok(()) } @@ -82,7 +321,7 @@ async fn test_get_header_returns_204_if_relay_down() -> Result<()> { // tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); // Run the PBS service - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), vec![mock_relay.clone()]); + let config = to_pbs_config(chain, get_pbs_config(pbs_port), vec![mock_relay.clone()]); let state = PbsState::new(config, PathBuf::new()); tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); @@ -91,7 +330,7 @@ async fn test_get_header_returns_204_if_relay_down() -> Result<()> { let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header"); - let res = mock_validator.do_get_header(None).await?; + let res = mock_validator.do_get_header(None, HashSet::new(), ForkName::Electra).await?; assert_eq!(res.status(), StatusCode::NO_CONTENT); // 204 error assert_eq!(mock_state.received_get_header(), 0); // no header received @@ -114,7 +353,7 @@ async fn test_get_header_returns_400_if_request_is_invalid() -> Result<()> { tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); // Run the PBS service - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), vec![mock_relay.clone()]); + let config = to_pbs_config(chain, get_pbs_config(pbs_port), vec![mock_relay.clone()]); let state = PbsState::new(config, PathBuf::new()); tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); @@ -141,3 +380,304 @@ async fn test_get_header_returns_400_if_request_is_invalid() -> Result<()> { assert_eq!(mock_state.received_get_header(), 0); // no header received Ok(()) } + +/// All validation modes (None, Standard, Extra) enforce the min-bid threshold. +/// None skips expensive crypto checks; Standard adds sigverify + structural +/// checks; Extra adds the parent-block check via EL RPC (which is skipped with +/// a warning if the fetch fails, so a non-existent RPC URL still passes here). +#[tokio::test] +async fn test_get_header_all_modes_enforce_min_bid() -> Result<()> { + let relay_bid = U256::from(7u64); + let min_bid_above_relay = relay_bid + U256::from(1); + // A syntactically valid URL that will never connect — Extra mode config + // validation only requires rpc_url to be Some; the actual fetch failing is + // handled gracefully (extra validation is skipped with a warning). + let fake_rpc: Url = "http://127.0.0.1:1".parse()?; + + for (pbs_port, mode, rpc_url) in [ + (3500u16, HeaderValidationMode::Standard, None), + (3502u16, HeaderValidationMode::None, None), + (3504u16, HeaderValidationMode::Extra, Some(fake_rpc.clone())), + ] { + // Bid below min → all modes reject (204). + test_get_header_impl( + pbs_port, + HashSet::from([EncodingType::Json]), + HashSet::from([EncodingType::Json]), + 1, + mode, + StatusCode::NO_CONTENT, + relay_bid, + min_bid_above_relay, + rpc_url.clone(), + ForkName::Electra, + ) + .await?; + + // Bid above min → all modes accept (200). + test_get_header_impl( + pbs_port + 100, + HashSet::from([EncodingType::Json]), + HashSet::from([EncodingType::Json]), + 1, + mode, + StatusCode::OK, + min_bid_above_relay, + U256::ZERO, + rpc_url, + ForkName::Electra, + ) + .await?; + } + Ok(()) +} + +/// SSZ round-trip: configure the relay with a specific bid value, request via +/// PBS in None mode with SSZ encoding, and verify the raw response bytes decode +/// to the exact value that was configured. This exercises the byte-offset +/// extraction logic (`get_bid_value_from_signed_builder_bid_ssz`) end-to-end +/// through a live HTTP relay for both currently-supported forks. +#[tokio::test] +async fn test_get_header_ssz_bid_value_round_trip() -> Result<()> { + setup_test_env(); + let signer = random_secret(); + let pubkey = signer.public_key(); + let chain = Chain::Holesky; + + // Use a distinctive value so accidental zero-matches are impossible. + let relay_bid = U256::from(999_888_777u64); + + for (pbs_port, fork_name) in [(3508u16, ForkName::Electra), (3510u16, ForkName::Fulu)] { + let relay_port = pbs_port + 1; + let mock_state = + Arc::new(MockRelayState::new(chain, signer.clone()).with_bid_value(relay_bid)); + let mock_relay = generate_mock_relay(relay_port, pubkey.clone())?; + tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); + + let mut pbs_config = get_pbs_config(pbs_port); + // None mode: PBS forwards the raw SSZ bytes without re-encoding. + pbs_config.header_validation_mode = HeaderValidationMode::None; + pbs_config.min_bid_wei = U256::ZERO; + let config = to_pbs_config(chain, pbs_config, vec![mock_relay]); + let state = PbsState::new(config, PathBuf::new()); + tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); + + tokio::time::sleep(Duration::from_millis(100)).await; + + let mock_validator = MockValidator::new(pbs_port)?; + let res = mock_validator + .do_get_header(None, HashSet::from([EncodingType::Ssz]), fork_name) + .await?; + assert_eq!(res.status(), StatusCode::OK, "fork {fork_name}: expected 200"); + + let bytes = res.bytes().await?; + let extracted = get_bid_value_from_signed_builder_bid_ssz(&bytes, fork_name) + .map_err(|e| eyre::eyre!("fork {fork_name}: SSZ extraction failed: {e}"))?; + assert_eq!( + extracted, relay_bid, + "fork {fork_name}: SSZ-extracted bid value does not match configured relay bid" + ); + } + Ok(()) +} + +/// Verify the mock relay returns 400 when the validator requests an unsupported +/// fork. Tested by pointing MockValidator directly at the relay (no PBS) so the +/// assertion is on the relay's raw response, not PBS's 204 fallback. +#[tokio::test] +async fn test_get_header_unsupported_fork_returns_400() -> Result<()> { + setup_test_env(); + let signer = random_secret(); + let chain = Chain::Holesky; + + let relay_port = 3512u16; + let mock_state = Arc::new(MockRelayState::new(chain, signer.clone())); + tokio::spawn(start_mock_relay_service(mock_state, relay_port)); + + tokio::time::sleep(Duration::from_millis(100)).await; + + // Point MockValidator directly at the relay (no PBS in the path). + let direct = MockValidator::new(relay_port)?; + for unsupported_fork in [ForkName::Base, ForkName::Altair] { + let res = direct + .do_get_header(None, HashSet::from([EncodingType::Json]), unsupported_fork) + .await?; + assert_eq!( + res.status(), + StatusCode::BAD_REQUEST, + "expected 400 for unsupported fork {unsupported_fork}" + ); + } + Ok(()) +} + +/// Exhaustive bid-acceptance matrix across every (fork, encoding, mode, bid) +/// combination. +#[tokio::test] +async fn test_get_header_bid_validation_matrix() -> Result<()> { + let bid_low = U256::from(5u64); + let bid_high = U256::from(100u64); + let min_bid = U256::from(50u64); + + // (fork, encoding, mode, relay_bid, expected_status) + let cases: &[(ForkName, EncodingType, HeaderValidationMode, U256, StatusCode)] = &[ + ( + ForkName::Electra, + EncodingType::Json, + HeaderValidationMode::None, + bid_low, + StatusCode::NO_CONTENT, + ), + ( + ForkName::Electra, + EncodingType::Json, + HeaderValidationMode::None, + bid_high, + StatusCode::OK, + ), + ( + ForkName::Electra, + EncodingType::Ssz, + HeaderValidationMode::None, + bid_low, + StatusCode::NO_CONTENT, + ), + ( + ForkName::Electra, + EncodingType::Ssz, + HeaderValidationMode::None, + bid_high, + StatusCode::OK, + ), + ( + ForkName::Fulu, + EncodingType::Json, + HeaderValidationMode::None, + bid_low, + StatusCode::NO_CONTENT, + ), + (ForkName::Fulu, EncodingType::Json, HeaderValidationMode::None, bid_high, StatusCode::OK), + ( + ForkName::Fulu, + EncodingType::Ssz, + HeaderValidationMode::None, + bid_low, + StatusCode::NO_CONTENT, + ), + (ForkName::Fulu, EncodingType::Ssz, HeaderValidationMode::None, bid_high, StatusCode::OK), + ( + ForkName::Electra, + EncodingType::Json, + HeaderValidationMode::Standard, + bid_low, + StatusCode::NO_CONTENT, + ), + ( + ForkName::Electra, + EncodingType::Json, + HeaderValidationMode::Standard, + bid_high, + StatusCode::OK, + ), + ( + ForkName::Electra, + EncodingType::Ssz, + HeaderValidationMode::Standard, + bid_low, + StatusCode::NO_CONTENT, + ), + ( + ForkName::Electra, + EncodingType::Ssz, + HeaderValidationMode::Standard, + bid_high, + StatusCode::OK, + ), + ( + ForkName::Fulu, + EncodingType::Json, + HeaderValidationMode::Standard, + bid_low, + StatusCode::NO_CONTENT, + ), + ( + ForkName::Fulu, + EncodingType::Json, + HeaderValidationMode::Standard, + bid_high, + StatusCode::OK, + ), + ( + ForkName::Fulu, + EncodingType::Ssz, + HeaderValidationMode::Standard, + bid_low, + StatusCode::NO_CONTENT, + ), + ( + ForkName::Fulu, + EncodingType::Ssz, + HeaderValidationMode::Standard, + bid_high, + StatusCode::OK, + ), + ]; + + for (i, &(fork, encoding, mode, relay_bid, expected_status)) in cases.iter().enumerate() { + test_get_header_impl( + 3900u16 + (i as u16 * 2), + HashSet::from([encoding]), + HashSet::from([encoding]), + 1, + mode, + expected_status, + relay_bid, + min_bid, + None, + fork, + ) + .await + .map_err(|e| eyre::eyre!("case {i} (fork={fork} enc={encoding} mode={mode:?} bid={relay_bid} min={min_bid}): {e}"))?; + } + Ok(()) +} + +/// Standard mode rejects a bid whose embedded pubkey does not match the relay's +/// configured pubkey; None mode forwards it unchecked, proving the bypass works +/// for the signature/pubkey validation check. +#[tokio::test] +async fn test_get_header_none_mode_bypasses_pubkey_validation() -> Result<()> { + setup_test_env(); + let chain = Chain::Holesky; + + // The mock relay signs with `signer` and embeds `signer.public_key()` in + // its message, but we register the relay in PBS with a *different* pubkey. + // Standard mode catches this mismatch; None mode does not check. + let signer = random_secret(); + let wrong_pubkey = random_secret().public_key(); + + for (pbs_port, mode, expected_status) in [ + (3504u16, HeaderValidationMode::Standard, StatusCode::NO_CONTENT), + (3506u16, HeaderValidationMode::None, StatusCode::OK), + ] { + let relay_port = pbs_port + 1; + let mock_state = Arc::new(MockRelayState::new(chain, signer.clone())); + // Register with `wrong_pubkey` — PBS will expect this key but the relay + // embeds `signer.public_key()`, causing a mismatch in Standard mode. + let mock_relay = generate_mock_relay(relay_port, wrong_pubkey.clone())?; + tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); + + let mut pbs_config = get_pbs_config(pbs_port); + pbs_config.header_validation_mode = mode; + let config = to_pbs_config(chain, pbs_config, vec![mock_relay]); + let state = PbsState::new(config, PathBuf::new()); + tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); + + tokio::time::sleep(Duration::from_millis(100)).await; + + let mock_validator = MockValidator::new(pbs_port)?; + let res = mock_validator.do_get_header(None, HashSet::new(), ForkName::Electra).await?; + assert_eq!(res.status(), expected_status, "unexpected status for mode {mode:?}"); + } + Ok(()) +} diff --git a/tests/tests/pbs_get_status.rs b/tests/tests/pbs_get_status.rs index 9dc8615f..cd2ab51d 100644 --- a/tests/tests/pbs_get_status.rs +++ b/tests/tests/pbs_get_status.rs @@ -5,7 +5,7 @@ use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ mock_relay::{MockRelayState, start_mock_relay_service}, mock_validator::MockValidator, - utils::{generate_mock_relay, get_pbs_static_config, setup_test_env, to_pbs_config}, + utils::{generate_mock_relay, get_pbs_config, setup_test_env, to_pbs_config}, }; use eyre::Result; use reqwest::StatusCode; @@ -30,7 +30,7 @@ async fn test_get_status() -> Result<()> { tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_0_port)); tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_1_port)); - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), relays.clone()); + let config = to_pbs_config(chain, get_pbs_config(pbs_port), relays.clone()); let state = PbsState::new(config, PathBuf::new()); tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); @@ -63,7 +63,7 @@ async fn test_get_status_returns_502_if_relay_down() -> Result<()> { // Don't start the relay // tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), relays.clone()); + let config = to_pbs_config(chain, get_pbs_config(pbs_port), relays.clone()); let state = PbsState::new(config, PathBuf::new()); tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); diff --git a/tests/tests/pbs_mux.rs b/tests/tests/pbs_mux.rs index 34da1dc7..93731aa5 100644 --- a/tests/tests/pbs_mux.rs +++ b/tests/tests/pbs_mux.rs @@ -1,4 +1,9 @@ -use std::{collections::HashMap, path::PathBuf, sync::Arc, time::Duration}; +use std::{ + collections::{HashMap, HashSet}, + path::PathBuf, + sync::Arc, + time::Duration, +}; use alloy::primitives::U256; use cb_common::{ @@ -12,7 +17,7 @@ use cb_common::{ }, signer::random_secret, types::Chain, - utils::{ResponseReadError, set_ignore_content_length}, + utils::{EncodingType, ForkName, ResponseReadError, set_ignore_content_length}, }; use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ @@ -21,7 +26,7 @@ use cb_tests::{ mock_ssv_public::{PublicSsvMockState, TEST_HTTP_TIMEOUT, create_mock_public_ssv_server}, mock_validator::MockValidator, utils::{ - bls_pubkey_from_hex_unchecked, generate_mock_relay, get_pbs_static_config, setup_test_env, + bls_pubkey_from_hex_unchecked, generate_mock_relay, get_pbs_config, setup_test_env, to_pbs_config, }, }; @@ -214,7 +219,7 @@ async fn test_mux() -> Result<()> { // Register all relays in PBS config let relays = vec![default_relay.clone()]; - let mut config = to_pbs_config(chain, get_pbs_static_config(pbs_port), relays); + let mut config = to_pbs_config(chain, get_pbs_config(pbs_port), relays); config.all_relays = vec![mux_relay_1.clone(), mux_relay_2.clone(), default_relay.clone()]; // Configure mux for two relays @@ -238,13 +243,19 @@ async fn test_mux() -> Result<()> { // Send default request without specifying a validator key let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header with default"); - assert_eq!(mock_validator.do_get_header(None).await?.status(), StatusCode::OK); + assert_eq!( + mock_validator.do_get_header(None, HashSet::new(), ForkName::Electra).await?.status(), + StatusCode::OK + ); assert_eq!(mock_state.received_get_header(), 1); // only default relay was used // Send request specifying a validator key to use mux info!("Sending get header with mux"); assert_eq!( - mock_validator.do_get_header(Some(validator_pubkey)).await?.status(), + mock_validator + .do_get_header(Some(validator_pubkey), HashSet::new(), ForkName::Electra) + .await? + .status(), StatusCode::OK ); assert_eq!(mock_state.received_get_header(), 3); // two mux relays were used @@ -261,12 +272,34 @@ async fn test_mux() -> Result<()> { // v1 Submit block requests should go to all relays info!("Sending submit block v1"); - assert_eq!(mock_validator.do_submit_block_v1(None).await?.status(), StatusCode::OK); + assert_eq!( + mock_validator + .do_submit_block_v1( + None, + HashSet::from([EncodingType::Json]), + EncodingType::Json, + ForkName::Electra + ) + .await? + .status(), + StatusCode::OK + ); assert_eq!(mock_state.received_submit_block(), 3); // default + 2 mux relays were used // v2 Submit block requests should go to all relays info!("Sending submit block v2"); - assert_eq!(mock_validator.do_submit_block_v2(None).await?.status(), StatusCode::ACCEPTED); + assert_eq!( + mock_validator + .do_submit_block_v2( + None, + HashSet::from([EncodingType::Json]), + EncodingType::Json, + ForkName::Electra + ) + .await? + .status(), + StatusCode::ACCEPTED + ); assert_eq!(mock_state.received_submit_block(), 6); // default + 2 mux relays were used Ok(()) @@ -333,7 +366,7 @@ async fn test_ssv_multi_with_node() -> Result<()> { }; // Set up the PBS config - let mut pbs_config = get_pbs_static_config(pbs_port); + let mut pbs_config = get_pbs_config(pbs_port); pbs_config.ssv_node_api_url = ssv_node_url.clone(); pbs_config.ssv_public_api_url = ssv_public_url.clone(); pbs_config.mux_registry_refresh_interval_seconds = 1; // Refresh the mux every second @@ -356,7 +389,9 @@ async fn test_ssv_multi_with_node() -> Result<()> { // relay only since it hasn't been seen in the mux yet let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header"); - let res = mock_validator.do_get_header(Some(pubkey2.clone())).await?; + let res = mock_validator + .do_get_header(Some(pubkey2.clone()), HashSet::new(), ForkName::Electra) + .await?; assert_eq!(res.status(), StatusCode::OK); assert_eq!(relay_state.received_get_header(), 1); // pubkey2 was loaded from the SSV node @@ -429,7 +464,7 @@ async fn test_ssv_multi_with_public() -> Result<()> { }; // Set up the PBS config - let mut pbs_config = get_pbs_static_config(pbs_port); + let mut pbs_config = get_pbs_config(pbs_port); pbs_config.ssv_node_api_url = ssv_node_url.clone(); pbs_config.ssv_public_api_url = ssv_public_url.clone(); pbs_config.mux_registry_refresh_interval_seconds = 1; // Refresh the mux every second @@ -452,7 +487,9 @@ async fn test_ssv_multi_with_public() -> Result<()> { // relay only since it hasn't been seen in the mux yet let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header"); - let res = mock_validator.do_get_header(Some(pubkey2.clone())).await?; + let res = mock_validator + .do_get_header(Some(pubkey2.clone()), HashSet::new(), ForkName::Electra) + .await?; assert_eq!(res.status(), StatusCode::OK); assert_eq!(relay_state.received_get_header(), 1); // pubkey2 was loaded from the SSV public API diff --git a/tests/tests/pbs_mux_refresh.rs b/tests/tests/pbs_mux_refresh.rs index ceb688cb..11a96712 100644 --- a/tests/tests/pbs_mux_refresh.rs +++ b/tests/tests/pbs_mux_refresh.rs @@ -1,4 +1,4 @@ -use std::{path::PathBuf, sync::Arc, time::Duration}; +use std::{collections::HashSet, path::PathBuf, sync::Arc, time::Duration}; use cb_common::{ config::{MuxConfig, MuxKeysLoader, PbsMuxes}, @@ -11,9 +11,10 @@ use cb_tests::{ mock_relay::{MockRelayState, start_mock_relay_service}, mock_ssv_public::{PublicSsvMockState, create_mock_public_ssv_server}, mock_validator::MockValidator, - utils::{generate_mock_relay, get_pbs_static_config, to_pbs_config}, + utils::{generate_mock_relay, get_pbs_config, to_pbs_config}, }; use eyre::Result; +use lh_types::ForkName; use reqwest::StatusCode; use tokio::sync::RwLock; use tracing::info; @@ -87,7 +88,7 @@ async fn test_auto_refresh() -> Result<()> { }; // Set up the PBS config - let mut pbs_config = get_pbs_static_config(pbs_port); + let mut pbs_config = get_pbs_config(pbs_port); pbs_config.ssv_public_api_url = ssv_api_url.clone(); pbs_config.mux_registry_refresh_interval_seconds = 1; // Refresh the mux every second let (mux_lookup, registry_muxes) = muxes.validate_and_fill(chain, &pbs_config).await?; @@ -109,7 +110,9 @@ async fn test_auto_refresh() -> Result<()> { // relay only since it hasn't been seen in the mux yet let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header"); - let res = mock_validator.do_get_header(Some(new_mux_pubkey.clone())).await?; + let res = mock_validator + .do_get_header(Some(new_mux_pubkey.clone()), HashSet::new(), ForkName::Electra) + .await?; assert_eq!(res.status(), StatusCode::OK); assert_eq!(default_relay_state.received_get_header(), 1); // default relay was used assert_eq!(mux_relay_state.received_get_header(), 0); // mux relay was not used @@ -137,14 +140,18 @@ async fn test_auto_refresh() -> Result<()> { assert!(logs_contain(&format!("fetched 2 pubkeys for registry mux {mux_relay_id}"))); // Try to run a get_header on the new pubkey - now it should use the mux relay - let res = mock_validator.do_get_header(Some(new_mux_pubkey.clone())).await?; + let res = mock_validator + .do_get_header(Some(new_mux_pubkey.clone()), HashSet::new(), ForkName::Electra) + .await?; assert_eq!(res.status(), StatusCode::OK); assert_eq!(default_relay_state.received_get_header(), 1); // default relay was not used here assert_eq!(mux_relay_state.received_get_header(), 1); // mux relay was used // Now try to do a get_header with the old pubkey - it should only use the // default relay - let res = mock_validator.do_get_header(Some(default_pubkey.clone())).await?; + let res = mock_validator + .do_get_header(Some(default_pubkey.clone()), HashSet::new(), ForkName::Electra) + .await?; assert_eq!(res.status(), StatusCode::OK); assert_eq!(default_relay_state.received_get_header(), 2); // default relay was used assert_eq!(mux_relay_state.received_get_header(), 1); // mux relay was not used @@ -161,7 +168,9 @@ async fn test_auto_refresh() -> Result<()> { // Try to do a get_header with the removed pubkey - it should only use the // default relay - let res = mock_validator.do_get_header(Some(existing_mux_pubkey.clone())).await?; + let res = mock_validator + .do_get_header(Some(existing_mux_pubkey.clone()), HashSet::new(), ForkName::Electra) + .await?; assert_eq!(res.status(), StatusCode::OK); assert_eq!(default_relay_state.received_get_header(), 3); // default relay was used assert_eq!(mux_relay_state.received_get_header(), 1); // mux relay was not used diff --git a/tests/tests/pbs_post_blinded_blocks.rs b/tests/tests/pbs_post_blinded_blocks.rs index b5854829..36214c15 100644 --- a/tests/tests/pbs_post_blinded_blocks.rs +++ b/tests/tests/pbs_post_blinded_blocks.rs @@ -1,25 +1,38 @@ -use std::{path::PathBuf, sync::Arc, time::Duration}; +use std::{collections::HashSet, path::PathBuf, sync::Arc, time::Duration}; use cb_common::{ - pbs::{BuilderApiVersion, GetPayloadInfo, SubmitBlindedBlockResponse}, + config::BlockValidationMode, + pbs::{BuilderApiVersion, GetPayloadInfo, PayloadAndBlobs, SubmitBlindedBlockResponse}, signer::random_secret, types::Chain, + utils::{EncodingType, ForkName}, }; use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ mock_relay::{MockRelayState, start_mock_relay_service}, mock_validator::{MockValidator, load_test_signed_blinded_block}, - utils::{generate_mock_relay, get_pbs_static_config, setup_test_env, to_pbs_config}, + utils::{generate_mock_relay, get_pbs_config, setup_test_env, to_pbs_config}, }; use eyre::Result; +use lh_types::beacon_response::ForkVersionDecode; use reqwest::{Response, StatusCode}; use tracing::info; #[tokio::test] async fn test_submit_block_v1() -> Result<()> { - let res = submit_block_impl(3800, &BuilderApiVersion::V1, false, false).await?; - assert_eq!(res.status(), StatusCode::OK); - + let res = submit_block_impl( + 3800, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Json]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Json, + 1, + BlockValidationMode::Standard, + StatusCode::OK, + false, + false, + ) + .await?; let signed_blinded_block = load_test_signed_blinded_block(); let response_body = serde_json::from_slice::(&res.bytes().await?)?; @@ -32,8 +45,19 @@ async fn test_submit_block_v1() -> Result<()> { #[tokio::test] async fn test_submit_block_v2() -> Result<()> { - let res = submit_block_impl(3802, &BuilderApiVersion::V2, false, false).await?; - assert_eq!(res.status(), StatusCode::ACCEPTED); + let res = submit_block_impl( + 3802, + BuilderApiVersion::V2, + HashSet::from([EncodingType::Json]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Json, + 1, + BlockValidationMode::Standard, + StatusCode::ACCEPTED, + false, + false, + ) + .await?; assert_eq!(res.bytes().await?.len(), 0); Ok(()) } @@ -42,8 +66,19 @@ async fn test_submit_block_v2() -> Result<()> { // v2, PBS falls back to v1 and successfully submits the block. #[tokio::test] async fn test_submit_block_v2_without_relay_support() -> Result<()> { - let res = submit_block_impl(3804, &BuilderApiVersion::V2, true, false).await?; - assert_eq!(res.status(), StatusCode::ACCEPTED); + let res = submit_block_impl( + 3804, + BuilderApiVersion::V2, + HashSet::from([EncodingType::Json]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Json, + 1, + BlockValidationMode::Standard, + StatusCode::ACCEPTED, + true, + false, + ) + .await?; assert_eq!(res.bytes().await?.len(), 0); Ok(()) } @@ -52,8 +87,354 @@ async fn test_submit_block_v2_without_relay_support() -> Result<()> { // for both v1 and v2, PBS doesn't loop forever. #[tokio::test] async fn test_submit_block_on_broken_relay() -> Result<()> { - let res = submit_block_impl(3806, &BuilderApiVersion::V2, true, true).await?; - assert_eq!(res.status(), StatusCode::BAD_GATEWAY); + let _res = submit_block_impl( + 3806, + BuilderApiVersion::V2, + HashSet::from([EncodingType::Json]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Json, + 1, + BlockValidationMode::Standard, + StatusCode::BAD_GATEWAY, + true, + true, + ) + .await?; + Ok(()) +} + +#[tokio::test] +async fn test_submit_block_v1_ssz() -> Result<()> { + let res = submit_block_impl( + 3808, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Ssz, + 1, + BlockValidationMode::Standard, + StatusCode::OK, + false, + false, + ) + .await?; + let signed_blinded_block = load_test_signed_blinded_block(); + + let response_body = + PayloadAndBlobs::from_ssz_bytes_by_fork(&res.bytes().await?, ForkName::Electra).unwrap(); + assert_eq!( + response_body.execution_payload.block_hash(), + signed_blinded_block.block_hash().into() + ); + Ok(()) +} + +#[tokio::test] +async fn test_submit_block_v2_ssz() -> Result<()> { + let res = submit_block_impl( + 3810, + BuilderApiVersion::V2, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Ssz, + 1, + BlockValidationMode::Standard, + StatusCode::ACCEPTED, + false, + false, + ) + .await?; + assert_eq!(res.bytes().await?.len(), 0); + Ok(()) +} + +/// Test that a v1 submit block request in SSZ is converted to JSON if the relay +/// only supports JSON +#[tokio::test] +async fn test_submit_block_v1_ssz_into_json() -> Result<()> { + let res = submit_block_impl( + 3812, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Json]), + EncodingType::Ssz, + 2, + BlockValidationMode::Standard, + StatusCode::OK, + false, + false, + ) + .await?; + let signed_blinded_block = load_test_signed_blinded_block(); + + let response_body = + PayloadAndBlobs::from_ssz_bytes_by_fork(&res.bytes().await?, ForkName::Electra).unwrap(); + assert_eq!( + response_body.execution_payload.block_hash(), + signed_blinded_block.block_hash().into() + ); + Ok(()) +} + +/// Test that a v2 submit block request in SSZ is converted to JSON if the relay +/// only supports JSON +#[tokio::test] +async fn test_submit_block_v2_ssz_into_json() -> Result<()> { + let res = submit_block_impl( + 3814, + BuilderApiVersion::V2, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Json]), + EncodingType::Ssz, + 2, + BlockValidationMode::Standard, + StatusCode::ACCEPTED, + false, + false, + ) + .await?; + assert_eq!(res.bytes().await?.len(), 0); + Ok(()) +} + +/// Test v1 requesting multiple types when the relay supports SSZ, which should +/// return SSZ +#[tokio::test] +async fn test_submit_block_v1_multitype_ssz() -> Result<()> { + let res = submit_block_impl( + 3816, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + HashSet::from([EncodingType::Ssz]), + EncodingType::Ssz, + 1, + BlockValidationMode::Standard, + StatusCode::OK, + false, + false, + ) + .await?; + let signed_blinded_block = load_test_signed_blinded_block(); + + let response_body = + PayloadAndBlobs::from_ssz_bytes_by_fork(&res.bytes().await?, ForkName::Electra).unwrap(); + assert_eq!( + response_body.execution_payload.block_hash(), + signed_blinded_block.block_hash().into() + ); + Ok(()) +} + +/// Test v1 requesting multiple types when the relay supports JSON, which should +/// still return SSZ +#[tokio::test] +async fn test_submit_block_v1_multitype_json() -> Result<()> { + let res = submit_block_impl( + 3818, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + HashSet::from([EncodingType::Json]), + EncodingType::Ssz, + 2, + BlockValidationMode::Standard, + StatusCode::OK, + false, + false, + ) + .await?; + let signed_blinded_block = load_test_signed_blinded_block(); + + let response_body = + PayloadAndBlobs::from_ssz_bytes_by_fork(&res.bytes().await?, ForkName::Electra).unwrap(); + assert_eq!( + response_body.execution_payload.block_hash(), + signed_blinded_block.block_hash().into() + ); + Ok(()) +} + +#[tokio::test] +async fn test_submit_block_v1_light() -> Result<()> { + let res = submit_block_impl( + 3820, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Json]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Json, + 1, + BlockValidationMode::None, + StatusCode::OK, + false, + false, + ) + .await?; + let signed_blinded_block = load_test_signed_blinded_block(); + + let response_body = serde_json::from_slice::(&res.bytes().await?)?; + assert_eq!( + response_body.data.execution_payload.block_hash(), + signed_blinded_block.block_hash().into() + ); + Ok(()) +} + +#[tokio::test] +async fn test_submit_block_v2_light() -> Result<()> { + let res = submit_block_impl( + 3822, + BuilderApiVersion::V2, + HashSet::from([EncodingType::Json]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Json, + 1, + BlockValidationMode::None, + StatusCode::ACCEPTED, + false, + false, + ) + .await?; + assert_eq!(res.bytes().await?.len(), 0); + Ok(()) +} + +#[tokio::test] +async fn test_submit_block_v1_ssz_light() -> Result<()> { + let res = submit_block_impl( + 3824, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Ssz, + 1, + BlockValidationMode::None, + StatusCode::OK, + false, + false, + ) + .await?; + let signed_blinded_block = load_test_signed_blinded_block(); + + let response_body = + PayloadAndBlobs::from_ssz_bytes_by_fork(&res.bytes().await?, ForkName::Electra).unwrap(); + assert_eq!( + response_body.execution_payload.block_hash(), + signed_blinded_block.block_hash().into() + ); + Ok(()) +} + +#[tokio::test] +async fn test_submit_block_v2_ssz_light() -> Result<()> { + let res = submit_block_impl( + 3826, + BuilderApiVersion::V2, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Ssz, + 1, + BlockValidationMode::None, + StatusCode::ACCEPTED, + false, + false, + ) + .await?; + assert_eq!(res.bytes().await?.len(), 0); + Ok(()) +} + +/// Test that a v1 submit block request in light mode, with SSZ, is converted to +/// JSON if the relay only supports JSON +#[tokio::test] +async fn test_submit_block_v1_ssz_into_json_light() -> Result<()> { + submit_block_impl( + 3828, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Json]), + EncodingType::Ssz, + 2, + BlockValidationMode::None, + StatusCode::BAD_GATEWAY, + false, + false, + ) + .await?; + Ok(()) +} + +/// Test that a v2 submit block request in light mode, with SSZ, is converted to +/// JSON if the relay only supports JSON +#[tokio::test] +async fn test_submit_block_v2_ssz_into_json_light() -> Result<()> { + let res = submit_block_impl( + 3830, + BuilderApiVersion::V2, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Json]), + EncodingType::Ssz, + 2, + BlockValidationMode::Standard, + StatusCode::ACCEPTED, + false, + false, + ) + .await?; + assert_eq!(res.bytes().await?.len(), 0); + Ok(()) +} + +/// Test v1 requesting multiple types in light mode when the relay supports SSZ, +/// which should return SSZ +#[tokio::test] +async fn test_submit_block_v1_multitype_ssz_light() -> Result<()> { + let res = submit_block_impl( + 3832, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + HashSet::from([EncodingType::Ssz]), + EncodingType::Ssz, + 1, + BlockValidationMode::None, + StatusCode::OK, + false, + false, + ) + .await?; + let signed_blinded_block = load_test_signed_blinded_block(); + + let response_body = + PayloadAndBlobs::from_ssz_bytes_by_fork(&res.bytes().await?, ForkName::Electra).unwrap(); + assert_eq!( + response_body.execution_payload.block_hash(), + signed_blinded_block.block_hash().into() + ); + Ok(()) +} + +/// Test v1 requesting multiple types in light mode when the relay supports +/// JSON, which should be able to handle an SSZ request by converting to JSON +#[tokio::test] +async fn test_submit_block_v1_multitype_json_light() -> Result<()> { + let res = submit_block_impl( + 3834, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + HashSet::from([EncodingType::Json]), + EncodingType::Ssz, + 2, + BlockValidationMode::None, + StatusCode::OK, + false, + false, + ) + .await?; + let signed_blinded_block = load_test_signed_blinded_block(); + + let response_body = serde_json::from_slice::(&res.bytes().await?)?; + assert_eq!( + response_body.data.execution_payload.block_hash(), + signed_blinded_block.block_hash().into() + ); Ok(()) } @@ -64,13 +445,13 @@ async fn test_submit_block_too_large() -> Result<()> { let pubkey = signer.public_key(); let chain = Chain::Holesky; - let pbs_port = 3900; + let pbs_port = 3836; let relays = vec![generate_mock_relay(pbs_port + 1, pubkey)?]; let mock_state = Arc::new(MockRelayState::new(chain, signer).with_large_body()); tokio::spawn(start_mock_relay_service(mock_state.clone(), pbs_port + 1)); - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), relays); + let config = to_pbs_config(chain, get_pbs_config(pbs_port), relays); let state = PbsState::new(config, PathBuf::new()); tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); @@ -79,7 +460,14 @@ async fn test_submit_block_too_large() -> Result<()> { let mock_validator = MockValidator::new(pbs_port)?; info!("Sending submit block"); - let res = mock_validator.do_submit_block_v1(None).await; + let res = mock_validator + .do_submit_block_v1( + None, + HashSet::from([EncodingType::Json]), + EncodingType::Json, + ForkName::Electra, + ) + .await; // response size exceeds max size: max: 20971520 assert_eq!(res.unwrap().status(), StatusCode::BAD_GATEWAY); @@ -87,21 +475,30 @@ async fn test_submit_block_too_large() -> Result<()> { Ok(()) } +#[allow(clippy::too_many_arguments)] async fn submit_block_impl( pbs_port: u16, - api_version: &BuilderApiVersion, + api_version: BuilderApiVersion, + accept_types: HashSet, + relay_types: HashSet, + serialization_mode: EncodingType, + expected_try_count: u64, + mode: BlockValidationMode, + expected_code: StatusCode, remove_v2_support: bool, force_404s: bool, ) -> Result { + // Setup test environment setup_test_env(); let signer = random_secret(); let pubkey = signer.public_key(); - let chain = Chain::Holesky; + let relay_port = pbs_port + 1; // Run a mock relay - let relays = vec![generate_mock_relay(pbs_port + 1, pubkey)?]; + let mock_relay = generate_mock_relay(relay_port, pubkey)?; let mut mock_relay_state = MockRelayState::new(chain, signer); + mock_relay_state.supported_content_types = Arc::new(relay_types); if remove_v2_support { mock_relay_state = mock_relay_state.with_no_submit_block_v2(); } @@ -109,28 +506,46 @@ async fn submit_block_impl( mock_relay_state = mock_relay_state.with_not_found_for_submit_block(); } let mock_state = Arc::new(mock_relay_state); - tokio::spawn(start_mock_relay_service(mock_state.clone(), pbs_port + 1)); + tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); // Run the PBS service - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), relays); + let mut pbs_config = get_pbs_config(pbs_port); + pbs_config.block_validation_mode = mode; + let config = to_pbs_config(chain, pbs_config, vec![mock_relay]); let state = PbsState::new(config, PathBuf::new()); tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); // leave some time to start servers tokio::time::sleep(Duration::from_millis(100)).await; + // Send the submit block request let signed_blinded_block = load_test_signed_blinded_block(); let mock_validator = MockValidator::new(pbs_port)?; info!("Sending submit block"); let res = match api_version { BuilderApiVersion::V1 => { - mock_validator.do_submit_block_v1(Some(signed_blinded_block)).await? + mock_validator + .do_submit_block_v1( + Some(signed_blinded_block), + accept_types, + serialization_mode, + ForkName::Electra, + ) + .await? } BuilderApiVersion::V2 => { - mock_validator.do_submit_block_v2(Some(signed_blinded_block)).await? + mock_validator + .do_submit_block_v2( + Some(signed_blinded_block), + accept_types, + serialization_mode, + ForkName::Electra, + ) + .await? } }; - let expected_count = if force_404s { 0 } else { 1 }; + let expected_count = if force_404s { 0 } else { expected_try_count }; assert_eq!(mock_state.received_submit_block(), expected_count); + assert_eq!(res.status(), expected_code); Ok(res) } diff --git a/tests/tests/pbs_post_validators.rs b/tests/tests/pbs_post_validators.rs index ef2ac40b..12601cda 100644 --- a/tests/tests/pbs_post_validators.rs +++ b/tests/tests/pbs_post_validators.rs @@ -9,7 +9,7 @@ use cb_pbs::{DefaultBuilderApi, PbsService, PbsState}; use cb_tests::{ mock_relay::{MockRelayState, start_mock_relay_service}, mock_validator::MockValidator, - utils::{generate_mock_relay, get_pbs_static_config, setup_test_env, to_pbs_config}, + utils::{generate_mock_relay, get_pbs_config, setup_test_env, to_pbs_config}, }; use eyre::Result; use reqwest::StatusCode; @@ -30,7 +30,7 @@ async fn test_register_validators() -> Result<()> { tokio::spawn(start_mock_relay_service(mock_state.clone(), pbs_port + 1)); // Run the PBS service - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), relays); + let config = to_pbs_config(chain, get_pbs_config(pbs_port), relays); let state = PbsState::new(config, PathBuf::new()); tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); @@ -79,7 +79,7 @@ async fn test_register_validators_does_not_retry_on_429() -> Result<()> { tokio::spawn(start_mock_relay_service(mock_state.clone(), pbs_port + 1)); // Run the PBS service - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), relays); + let config = to_pbs_config(chain, get_pbs_config(pbs_port), relays); let state = PbsState::new(config, PathBuf::new()); tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state.clone())); @@ -131,7 +131,7 @@ async fn test_register_validators_retries_on_500() -> Result<()> { tokio::spawn(start_mock_relay_service(mock_state.clone(), pbs_port + 1)); // Set retry limit to 3 - let mut pbs_config = get_pbs_static_config(pbs_port); + let mut pbs_config = get_pbs_config(pbs_port); pbs_config.register_validator_retry_limit = 3; let config = to_pbs_config(chain, pbs_config, relays); diff --git a/tests/tests/signer_jwt_auth.rs b/tests/tests/signer_jwt_auth.rs index 683a268a..d1b65b3f 100644 --- a/tests/tests/signer_jwt_auth.rs +++ b/tests/tests/signer_jwt_auth.rs @@ -1,29 +1,51 @@ use std::{collections::HashMap, time::Duration}; +use alloy::primitives::b256; use cb_common::{ - commit::{constants::GET_PUBKEYS_PATH, request::GetPubkeysResponse}, - config::StartSignerConfig, - signer::{SignerLoader, ValidatorKeysFormat}, - types::{Chain, ModuleId}, - utils::{bls_pubkey_from_hex, create_jwt}, + commit::{ + constants::{GET_PUBKEYS_PATH, REVOKE_MODULE_PATH}, + request::RevokeModuleRequest, + }, + config::{ModuleSigningConfig, load_module_signing_configs}, + types::ModuleId, + utils::{create_admin_jwt, create_jwt}, +}; +use cb_tests::{ + signer_service::{start_server, verify_pubkeys}, + utils::{self, setup_test_env}, }; -use cb_signer::service::SigningService; -use cb_tests::utils::{get_signer_config, get_start_signer_config, setup_test_env}; use eyre::Result; -use reqwest::{Response, StatusCode}; +use reqwest::StatusCode; use tracing::info; const JWT_MODULE: &str = "test-module"; const JWT_SECRET: &str = "test-jwt-secret"; +const ADMIN_SECRET: &str = "test-admin-secret"; + +async fn create_mod_signing_configs() -> HashMap { + let mut cfg = + utils::get_commit_boost_config(utils::get_pbs_static_config(utils::get_pbs_config(0))); + + let module_id = ModuleId(JWT_MODULE.to_string()); + let signing_id = b256!("0101010101010101010101010101010101010101010101010101010101010101"); + + cfg.modules = Some(vec![utils::create_module_config(module_id.clone(), signing_id)]); + + let jwts = HashMap::from([(module_id.clone(), JWT_SECRET.to_string())]); + + load_module_signing_configs(&cfg, &jwts).unwrap() +} #[tokio::test] async fn test_signer_jwt_auth_success() -> Result<()> { setup_test_env(); let module_id = ModuleId(JWT_MODULE.to_string()); - let start_config = start_server(20100).await?; + let mod_cfgs = create_mod_signing_configs().await; + let start_config = start_server(20100, &mod_cfgs, ADMIN_SECRET.to_string(), false).await?; + let jwt_config = mod_cfgs.get(&module_id).expect("JWT config for test module not found"); // Run a pubkeys request - let jwt = create_jwt(&module_id, JWT_SECRET)?; + let jwt = create_jwt(&module_id, &jwt_config.jwt_secret, GET_PUBKEYS_PATH, None)?; let client = reqwest::Client::new(); let url = format!("http://{}{}", start_config.endpoint, GET_PUBKEYS_PATH); let response = client.get(&url).bearer_auth(&jwt).send().await?; @@ -38,10 +60,11 @@ async fn test_signer_jwt_auth_success() -> Result<()> { async fn test_signer_jwt_auth_fail() -> Result<()> { setup_test_env(); let module_id = ModuleId(JWT_MODULE.to_string()); - let start_config = start_server(20200).await?; + let mod_cfgs = create_mod_signing_configs().await; + let start_config = start_server(20101, &mod_cfgs, ADMIN_SECRET.to_string(), false).await?; // Run a pubkeys request - this should fail due to invalid JWT - let jwt = create_jwt(&module_id, "incorrect secret")?; + let jwt = create_jwt(&module_id, "incorrect secret", GET_PUBKEYS_PATH, None)?; let client = reqwest::Client::new(); let url = format!("http://{}{}", start_config.endpoint, GET_PUBKEYS_PATH); let response = client.get(&url).bearer_auth(&jwt).send().await?; @@ -58,10 +81,12 @@ async fn test_signer_jwt_auth_fail() -> Result<()> { async fn test_signer_jwt_rate_limit() -> Result<()> { setup_test_env(); let module_id = ModuleId(JWT_MODULE.to_string()); - let start_config = start_server(20300).await?; + let mod_cfgs = create_mod_signing_configs().await; + let start_config = start_server(20102, &mod_cfgs, ADMIN_SECRET.to_string(), false).await?; + let mod_cfg = mod_cfgs.get(&module_id).expect("JWT config for test module not found"); // Run as many pubkeys requests as the fail limit - let jwt = create_jwt(&module_id, "incorrect secret")?; + let jwt = create_jwt(&module_id, "incorrect secret", GET_PUBKEYS_PATH, None)?; let client = reqwest::Client::new(); let url = format!("http://{}{}", start_config.endpoint, GET_PUBKEYS_PATH); for _ in 0..start_config.jwt_auth_fail_limit { @@ -70,7 +95,7 @@ async fn test_signer_jwt_rate_limit() -> Result<()> { } // Run another request - this should fail due to rate limiting now - let jwt = create_jwt(&module_id, JWT_SECRET)?; + let jwt = create_jwt(&module_id, &mod_cfg.jwt_secret, GET_PUBKEYS_PATH, None)?; let response = client.get(&url).bearer_auth(&jwt).send().await?; assert!(response.status() == StatusCode::TOO_MANY_REQUESTS); @@ -85,65 +110,101 @@ async fn test_signer_jwt_rate_limit() -> Result<()> { Ok(()) } -// Starts the signer moduler server on a separate task and returns its -// configuration -async fn start_server(port: u16) -> Result { +#[tokio::test] +async fn test_signer_revoked_jwt_fail() -> Result<()> { setup_test_env(); - let chain = Chain::Hoodi; - - // Mock JWT secrets + let admin_secret = ADMIN_SECRET.to_string(); let module_id = ModuleId(JWT_MODULE.to_string()); - let mut jwts = HashMap::new(); - jwts.insert(module_id.clone(), JWT_SECRET.to_string()); - - // Create a signer config - let loader = SignerLoader::ValidatorsDir { - keys_path: "data/keystores/keys".into(), - secrets_path: "data/keystores/secrets".into(), - format: ValidatorKeysFormat::Lighthouse, - }; - let mut config = get_signer_config(loader); - config.port = port; - config.jwt_auth_fail_limit = 3; // Set a low fail limit for testing - config.jwt_auth_fail_timeout_seconds = 3; // Set a short timeout for testing - let start_config = get_start_signer_config(config, chain, jwts); - - // Run the Signer - let server_handle = tokio::spawn(SigningService::run(start_config.clone())); - - // Make sure the server is running - tokio::time::sleep(Duration::from_millis(100)).await; - if server_handle.is_finished() { - return Err(eyre::eyre!( - "Signer service failed to start: {}", - server_handle.await.unwrap_err() - )); - } - Ok(start_config) + let mod_cfgs = create_mod_signing_configs().await; + let start_config = start_server(20400, &mod_cfgs, admin_secret.clone(), false).await?; + + // Run as many pubkeys requests as the fail limit + let jwt = create_jwt(&module_id, JWT_SECRET, GET_PUBKEYS_PATH, None)?; + let client = reqwest::Client::new(); + + // At first, test module should be allowed to request pubkeys + let url = format!("http://{}{}", start_config.endpoint, GET_PUBKEYS_PATH); + let response = client.get(&url).bearer_auth(&jwt).send().await?; + assert!(response.status() == StatusCode::OK); + + let revoke_body = RevokeModuleRequest { module_id: ModuleId(JWT_MODULE.to_string()) }; + let body_bytes = serde_json::to_vec(&revoke_body)?; + let admin_jwt = create_admin_jwt(admin_secret, REVOKE_MODULE_PATH, Some(&body_bytes))?; + + let revoke_url = format!("http://{}{}", start_config.endpoint, REVOKE_MODULE_PATH); + let response = + client.post(&revoke_url).json(&revoke_body).bearer_auth(&admin_jwt).send().await?; + assert!(response.status() == StatusCode::OK); + + // After revoke, test module shouldn't be allowed anymore + let response = client.get(&url).bearer_auth(&jwt).send().await?; + assert!(response.status() == StatusCode::UNAUTHORIZED); + + Ok(()) } -// Verifies that the pubkeys returned by the server match the pubkeys in the -// test data -async fn verify_pubkeys(response: Response) -> Result<()> { - // Verify the expected pubkeys are returned +#[tokio::test] +async fn test_signer_only_admin_can_revoke() -> Result<()> { + setup_test_env(); + let admin_secret = ADMIN_SECRET.to_string(); + let module_id = ModuleId(JWT_MODULE.to_string()); + let mod_cfgs = create_mod_signing_configs().await; + let start_config = start_server(20500, &mod_cfgs, admin_secret.clone(), false).await?; + + let revoke_body = RevokeModuleRequest { module_id: ModuleId(JWT_MODULE.to_string()) }; + let body_bytes = serde_json::to_vec(&revoke_body)?; + + // Run as many pubkeys requests as the fail limit + let jwt = create_jwt(&module_id, JWT_SECRET, REVOKE_MODULE_PATH, Some(&body_bytes))?; + let client = reqwest::Client::new(); + let url = format!("http://{}{}", start_config.endpoint, REVOKE_MODULE_PATH); + + // Module JWT shouldn't be able to revoke modules + let response = client.post(&url).json(&revoke_body).bearer_auth(&jwt).send().await?; + assert!(response.status() == StatusCode::UNAUTHORIZED); + + // Admin should be able to revoke modules + let admin_jwt = create_admin_jwt(admin_secret, REVOKE_MODULE_PATH, Some(&body_bytes))?; + let response = client.post(&url).json(&revoke_body).bearer_auth(&admin_jwt).send().await?; assert!(response.status() == StatusCode::OK); - let pubkey_json = response.json::().await?; - assert_eq!(pubkey_json.keys.len(), 2); - let expected_pubkeys = vec![ - bls_pubkey_from_hex( - "883827193f7627cd04e621e1e8d56498362a52b2a30c9a1c72036eb935c4278dee23d38a24d2f7dda62689886f0c39f4", - )?, - bls_pubkey_from_hex( - "b3a22e4a673ac7a153ab5b3c17a4dbef55f7e47210b20c0cbb0e66df5b36bb49ef808577610b034172e955d2312a61b9", - )?, - ]; - for expected in expected_pubkeys { - assert!( - pubkey_json.keys.iter().any(|k| k.consensus == expected), - "Expected pubkey not found: {:?}", - expected - ); - info!("Server returned expected pubkey: {:?}", expected); + + Ok(()) +} + +#[tokio::test] +async fn test_signer_admin_jwt_rate_limit() -> Result<()> { + setup_test_env(); + let admin_secret = ADMIN_SECRET.to_string(); + let module_id = ModuleId(JWT_MODULE.to_string()); + let mod_cfgs = create_mod_signing_configs().await; + let start_config = start_server(20510, &mod_cfgs, admin_secret.clone(), false).await?; + + let revoke_body = RevokeModuleRequest { module_id: ModuleId(JWT_MODULE.to_string()) }; + let body_bytes = serde_json::to_vec(&revoke_body)?; + + // Run as many pubkeys requests as the fail limit + let jwt = create_jwt(&module_id, JWT_SECRET, REVOKE_MODULE_PATH, Some(&body_bytes))?; + let client = reqwest::Client::new(); + let url = format!("http://{}{}", start_config.endpoint, REVOKE_MODULE_PATH); + + // Module JWT shouldn't be able to revoke modules + for _ in 0..start_config.jwt_auth_fail_limit { + let response = client.post(&url).json(&revoke_body).bearer_auth(&jwt).send().await?; + assert!(response.status() == StatusCode::UNAUTHORIZED); } + + // Run another request - this should fail due to rate limiting now + let admin_jwt = create_admin_jwt(admin_secret, REVOKE_MODULE_PATH, Some(&body_bytes))?; + let response = client.post(&url).json(&revoke_body).bearer_auth(&admin_jwt).send().await?; + assert!(response.status() == StatusCode::TOO_MANY_REQUESTS); + + // Wait for the rate limit timeout + tokio::time::sleep(Duration::from_secs(start_config.jwt_auth_fail_timeout_seconds as u64)) + .await; + + // Now the next request should succeed + let response = client.post(&url).json(&revoke_body).bearer_auth(&admin_jwt).send().await?; + assert!(response.status() == StatusCode::OK); + Ok(()) } diff --git a/tests/tests/signer_jwt_auth_cleanup.rs b/tests/tests/signer_jwt_auth_cleanup.rs new file mode 100644 index 00000000..d6fde2a4 --- /dev/null +++ b/tests/tests/signer_jwt_auth_cleanup.rs @@ -0,0 +1,70 @@ +use std::{collections::HashMap, time::Duration}; + +use alloy::primitives::b256; +use cb_common::{ + commit::constants::GET_PUBKEYS_PATH, + config::{ModuleSigningConfig, load_module_signing_configs}, + types::ModuleId, + utils::create_jwt, +}; +use cb_tests::{ + signer_service::start_server, + utils::{self}, +}; +use eyre::Result; +use reqwest::StatusCode; + +const JWT_MODULE: &str = "test-module"; +const JWT_SECRET: &str = "test-jwt-secret"; +const ADMIN_SECRET: &str = "test-admin-secret"; + +async fn create_mod_signing_configs() -> HashMap { + let mut cfg = + utils::get_commit_boost_config(utils::get_pbs_static_config(utils::get_pbs_config(0))); + + let module_id = ModuleId(JWT_MODULE.to_string()); + let signing_id = b256!("0101010101010101010101010101010101010101010101010101010101010101"); + + cfg.modules = Some(vec![utils::create_module_config(module_id.clone(), signing_id)]); + + let jwts = HashMap::from([(module_id.clone(), JWT_SECRET.to_string())]); + + load_module_signing_configs(&cfg, &jwts).unwrap() +} + +#[tokio::test] +#[tracing_test::traced_test] +async fn test_signer_jwt_fail_cleanup() -> Result<()> { + // setup_test_env() isn't used because we want to capture logs with tracing_test + let module_id = ModuleId(JWT_MODULE.to_string()); + let mod_cfgs = create_mod_signing_configs().await; + let start_config = start_server(20102, &mod_cfgs, ADMIN_SECRET.to_string(), false).await?; + let mod_cfg = mod_cfgs.get(&module_id).expect("JWT config for test module not found"); + + // Run as many pubkeys requests as the fail limit + let jwt = create_jwt(&module_id, "incorrect secret", GET_PUBKEYS_PATH, None)?; + let client = reqwest::Client::new(); + let url = format!("http://{}{}", start_config.endpoint, GET_PUBKEYS_PATH); + for _ in 0..start_config.jwt_auth_fail_limit { + let response = client.get(&url).bearer_auth(&jwt).send().await?; + assert!(response.status() == StatusCode::UNAUTHORIZED); + } + + // Run another request - this should fail due to rate limiting now + let jwt = create_jwt(&module_id, &mod_cfg.jwt_secret, GET_PUBKEYS_PATH, None)?; + let response = client.get(&url).bearer_auth(&jwt).send().await?; + assert!(response.status() == StatusCode::TOO_MANY_REQUESTS); + + // Wait until the cleanup task should have run properly, takes a while for the + // timing to work out + tokio::time::sleep(Duration::from_secs( + (start_config.jwt_auth_fail_timeout_seconds * 3) as u64, + )) + .await; + + // Make sure the cleanup message was logged - it's all internal state so without + // refactoring or exposing it, this is the easiest way to check if it triggered + assert!(logs_contain("Cleaned up 1 old JWT auth failure entries")); + + Ok(()) +} diff --git a/tests/tests/signer_request_sig.rs b/tests/tests/signer_request_sig.rs new file mode 100644 index 00000000..78efbf9e --- /dev/null +++ b/tests/tests/signer_request_sig.rs @@ -0,0 +1,197 @@ +use std::collections::HashMap; + +use alloy::primitives::{b256, hex}; +use cb_common::{ + commit::{ + constants::REQUEST_SIGNATURE_BLS_PATH, request::SignConsensusRequest, + response::BlsSignResponse, + }, + config::{ModuleSigningConfig, load_module_signing_configs}, + types::{BlsPublicKey, BlsSignature, Chain, ModuleId}, + utils::create_jwt, +}; +use cb_tests::{ + signer_service::start_server, + utils::{self, setup_test_env}, +}; +use eyre::Result; +use reqwest::StatusCode; + +const MODULE_ID_1: &str = "test-module"; +const MODULE_ID_2: &str = "another-module"; +const PUBKEY_1: [u8; 48] = hex!( + "883827193f7627cd04e621e1e8d56498362a52b2a30c9a1c72036eb935c4278dee23d38a24d2f7dda62689886f0c39f4" +); +const ADMIN_SECRET: &str = "test-admin-secret"; + +async fn create_mod_signing_configs() -> HashMap { + let mut cfg = + utils::get_commit_boost_config(utils::get_pbs_static_config(utils::get_pbs_config(0))); + + let module_id_1 = ModuleId(MODULE_ID_1.to_string()); + let signing_id_1 = b256!("0x6a33a23ef26a4836979edff86c493a69b26ccf0b4a16491a815a13787657431b"); + let module_id_2 = ModuleId(MODULE_ID_2.to_string()); + let signing_id_2 = b256!("0x61fe00135d7b4912a8c63ada215ac2e62326e6e7b30f49a29fcf9779d7ad800d"); + + cfg.modules = Some(vec![ + utils::create_module_config(module_id_1.clone(), signing_id_1), + utils::create_module_config(module_id_2.clone(), signing_id_2), + ]); + + let jwts = HashMap::from([ + (module_id_1.clone(), "supersecret".to_string()), + (module_id_2.clone(), "anothersecret".to_string()), + ]); + + load_module_signing_configs(&cfg, &jwts).unwrap() +} + +/// Makes sure the signer service signs requests correctly, using the module's +/// signing ID +#[tokio::test] +async fn test_signer_sign_request_good() -> Result<()> { + setup_test_env(); + let module_id = ModuleId(MODULE_ID_1.to_string()); + let mod_cfgs = create_mod_signing_configs().await; + let start_config = start_server(20200, &mod_cfgs, ADMIN_SECRET.to_string(), false).await?; + let jwt_config = mod_cfgs.get(&module_id).expect("JWT config for test module not found"); + + // Send a signing request + let object_root = b256!("0x0123456789012345678901234567890123456789012345678901234567890123"); + let nonce: u64 = 101; + let pubkey = BlsPublicKey::deserialize(&PUBKEY_1).unwrap(); + let request = SignConsensusRequest { pubkey: pubkey.clone(), object_root, nonce }; + let payload_bytes = serde_json::to_vec(&request)?; + let jwt = create_jwt( + &module_id, + &jwt_config.jwt_secret, + REQUEST_SIGNATURE_BLS_PATH, + Some(&payload_bytes), + )?; + let client = reqwest::Client::new(); + let url = format!("http://{}{}", start_config.endpoint, REQUEST_SIGNATURE_BLS_PATH); + let response = client.post(&url).json(&request).bearer_auth(&jwt).send().await?; + + // Verify the response is successful + assert!(response.status() == StatusCode::OK); + + // Verify the signature is returned + let sig_response = response.json::().await?; + let expected = BlsSignResponse::new( + pubkey, + object_root, + mod_cfgs.get(&module_id).unwrap().signing_id, + nonce, + Chain::Hoodi.id(), + BlsSignature::deserialize(&hex!("0xb653034a6da0e516cb999d6bbcd5ddd8dde9695322a94aefcd3049e6235e0f4f63b13d81ddcd80d4e1e698c3f88c3b440ae696650ccef2f22329afb4ffecec85a34523e25920ceced54c5bc31168174a3b352977750c222c1c25f72672467e5c")).unwrap()); + assert_eq!(sig_response, expected, "Signature response does not match expected value"); + + Ok(()) +} + +/// Makes sure the signer service returns a signature that is different for each +/// module +#[tokio::test] +async fn test_signer_sign_request_different_module() -> Result<()> { + setup_test_env(); + let module_id = ModuleId(MODULE_ID_2.to_string()); + let mod_cfgs = create_mod_signing_configs().await; + let start_config = start_server(20201, &mod_cfgs, ADMIN_SECRET.to_string(), false).await?; + let jwt_config = mod_cfgs.get(&module_id).expect("JWT config for 2nd test module not found"); + + // Send a signing request + let object_root = b256!("0x0123456789012345678901234567890123456789012345678901234567890123"); + let nonce: u64 = 101; + let pubkey = BlsPublicKey::deserialize(&PUBKEY_1).unwrap(); + let request = SignConsensusRequest { pubkey: pubkey.clone(), object_root, nonce }; + let payload_bytes = serde_json::to_vec(&request)?; + let jwt = create_jwt( + &module_id, + &jwt_config.jwt_secret, + REQUEST_SIGNATURE_BLS_PATH, + Some(&payload_bytes), + )?; + let client = reqwest::Client::new(); + let url = format!("http://{}{}", start_config.endpoint, REQUEST_SIGNATURE_BLS_PATH); + let response = client.post(&url).json(&request).bearer_auth(&jwt).send().await?; + + // Verify the response is successful + assert!(response.status() == StatusCode::OK); + + // Verify the signature is returned + let sig_response = response.json::().await?; + assert_eq!(sig_response.pubkey, pubkey, "Public key does not match expected value"); + assert_eq!(sig_response.object_root, object_root, "Object root does not match expected value"); + assert_eq!( + sig_response.module_signing_id, + mod_cfgs.get(&module_id).unwrap().signing_id, + "Module signing ID does not match expected value" + ); + assert_ne!( + sig_response.signature, BlsSignature::deserialize(&hex!("0xb653034a6da0e516cb999d6bbcd5ddd8dde9695322a94aefcd3049e6235e0f4f63b13d81ddcd80d4e1e698c3f88c3b440ae696650ccef2f22329afb4ffecec85a34523e25920ceced54c5bc31168174a3b352977750c222c1c25f72672467e5c")).unwrap(), + "Signature matches the reference signature, which should not happen" + ); + + Ok(()) +} + +/// Makes sure the signer service does not allow requests for JWTs that do +/// not match the JWT hash +#[tokio::test] +async fn test_signer_sign_request_incorrect_hash() -> Result<()> { + setup_test_env(); + let module_id = ModuleId(MODULE_ID_2.to_string()); + let mod_cfgs = create_mod_signing_configs().await; + let start_config = start_server(20202, &mod_cfgs, ADMIN_SECRET.to_string(), false).await?; + let jwt_config = mod_cfgs.get(&module_id).expect("JWT config for 2nd test module not found"); + + // Send a signing request + let fake_object_root = + b256!("0xabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcd"); + let nonce: u64 = 101; + let pubkey = BlsPublicKey::deserialize(&PUBKEY_1).unwrap(); + let fake_request = + SignConsensusRequest { pubkey: pubkey.clone(), object_root: fake_object_root, nonce }; + let fake_payload_bytes = serde_json::to_vec(&fake_request)?; + let true_object_root = + b256!("0x0123456789012345678901234567890123456789012345678901234567890123"); + let true_request = SignConsensusRequest { pubkey, object_root: true_object_root, nonce }; + let jwt = create_jwt( + &module_id, + &jwt_config.jwt_secret, + REQUEST_SIGNATURE_BLS_PATH, + Some(&fake_payload_bytes), + )?; + let client = reqwest::Client::new(); + let url = format!("http://{}{}", start_config.endpoint, REQUEST_SIGNATURE_BLS_PATH); + let response = client.post(&url).json(&true_request).bearer_auth(&jwt).send().await?; + + // Verify that authorization failed + assert!(response.status() == StatusCode::UNAUTHORIZED); + Ok(()) +} + +/// Makes sure the signer service does not allow signer requests for JWTs that +/// do not include a payload hash +#[tokio::test] +async fn test_signer_sign_request_missing_hash() -> Result<()> { + setup_test_env(); + let module_id = ModuleId(MODULE_ID_2.to_string()); + let mod_cfgs = create_mod_signing_configs().await; + let start_config = start_server(20203, &mod_cfgs, ADMIN_SECRET.to_string(), false).await?; + let jwt_config = mod_cfgs.get(&module_id).expect("JWT config for 2nd test module not found"); + + // Send a signing request + let nonce: u64 = 101; + let pubkey = BlsPublicKey::deserialize(&PUBKEY_1).unwrap(); + let object_root = b256!("0x0123456789012345678901234567890123456789012345678901234567890123"); + let request = SignConsensusRequest { pubkey, object_root, nonce }; + let jwt = create_jwt(&module_id, &jwt_config.jwt_secret, REQUEST_SIGNATURE_BLS_PATH, None)?; + let client = reqwest::Client::new(); + let url = format!("http://{}{}", start_config.endpoint, REQUEST_SIGNATURE_BLS_PATH); + let response = client.post(&url).json(&request).bearer_auth(&jwt).send().await?; + + // Verify that authorization failed + assert!(response.status() == StatusCode::UNAUTHORIZED); + Ok(()) +} diff --git a/tests/tests/signer_tls.rs b/tests/tests/signer_tls.rs new file mode 100644 index 00000000..2df98d73 --- /dev/null +++ b/tests/tests/signer_tls.rs @@ -0,0 +1,58 @@ +use std::collections::HashMap; + +use alloy::primitives::b256; +use cb_common::{ + commit::constants::GET_PUBKEYS_PATH, + config::{ModuleSigningConfig, load_module_signing_configs}, + types::ModuleId, + utils::create_jwt, +}; +use cb_tests::{ + signer_service::{start_server, verify_pubkeys}, + utils::{self, setup_test_env}, +}; +use eyre::{Result, bail}; +use reqwest::Certificate; + +const JWT_MODULE: &str = "test-module"; +const JWT_SECRET: &str = "test-jwt-secret"; +const ADMIN_SECRET: &str = "test-admin-secret"; + +async fn create_mod_signing_configs() -> HashMap { + let mut cfg = + utils::get_commit_boost_config(utils::get_pbs_static_config(utils::get_pbs_config(0))); + + let module_id = ModuleId(JWT_MODULE.to_string()); + let signing_id = b256!("0101010101010101010101010101010101010101010101010101010101010101"); + + cfg.modules = Some(vec![utils::create_module_config(module_id.clone(), signing_id)]); + + let jwts = HashMap::from([(module_id.clone(), JWT_SECRET.to_string())]); + + load_module_signing_configs(&cfg, &jwts).unwrap() +} + +#[tokio::test] +async fn test_signer_tls() -> Result<()> { + setup_test_env(); + let module_id = ModuleId(JWT_MODULE.to_string()); + let mod_cfgs = create_mod_signing_configs().await; + let start_config = start_server(20100, &mod_cfgs, ADMIN_SECRET.to_string(), true).await?; + let jwt_config = mod_cfgs.get(&module_id).expect("JWT config for test module not found"); + + // Run a pubkeys request + let jwt = create_jwt(&module_id, &jwt_config.jwt_secret, GET_PUBKEYS_PATH, None)?; + let cert = match start_config.tls_certificates { + Some(ref certificates) => &certificates.0, + None => bail!("TLS certificates not found in start config"), + }; + let client = + reqwest::Client::builder().add_root_certificate(Certificate::from_pem(cert)?).build()?; + let url = format!("https://{}{}", start_config.endpoint, GET_PUBKEYS_PATH); + let response = client.get(&url).bearer_auth(&jwt).send().await?; + + // Verify the expected pubkeys are returned + verify_pubkeys(response).await?; + + Ok(()) +}