diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4b52074856..c9cc3ca70a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -38,12 +38,13 @@ jobs: test-default: name: Test Default + if: false needs: [runner] runs-on: ${{ matrix.os }} - concurrency: ${{ needs.runner.outputs.concurrency-group2 }} + concurrency: ${{ needs.runner.outputs.concurrency-group1 }} strategy: matrix: - os: ${{ fromJSON(needs.runner.outputs.runner-matrix2) }} + os: ${{ fromJSON(needs.runner.outputs.runner-matrix1) }} steps: - uses: actions/checkout@v4 @@ -52,12 +53,12 @@ jobs: echo "HOME=/home/CI" >> $GITHUB_ENV - uses: dtolnay/rust-toolchain@master with: - toolchain: nightly-2023-12-03 + toolchain: nightly-2024-01-25 - name: Setup golang uses: actions/setup-go@v3 with: cache: false - go-version: ~1.21 + go-version: ~1.22 # Go cache for building geth-utils - name: Go cache uses: actions/cache@v3 @@ -83,10 +84,10 @@ jobs: name: Test Scroll needs: [runner] runs-on: ${{ matrix.os }} - concurrency: ${{ needs.runner.outputs.concurrency-group1 }} + concurrency: ${{ needs.runner.outputs.concurrency-group2 }} strategy: matrix: - os: ${{ fromJSON(needs.runner.outputs.runner-matrix1) }} + os: ${{ fromJSON(needs.runner.outputs.runner-matrix2) }} steps: - uses: actions/checkout@v4 @@ -95,12 +96,12 @@ jobs: echo "HOME=/home/CI" >> $GITHUB_ENV - uses: dtolnay/rust-toolchain@master with: - toolchain: nightly-2023-12-03 + toolchain: nightly-2024-01-25 - name: Setup golang uses: actions/setup-go@v3 with: cache: false - go-version: ~1.21 + go-version: ~1.22 # Go cache for building geth-utils - name: Go cache uses: actions/cache@v3 @@ -146,7 +147,7 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@master with: - toolchain: nightly-2023-12-03 + toolchain: nightly-2024-01-25 - name: Add target run: rustup target add ${{ matrix.target }} # Go cache for building geth-utils @@ -183,7 +184,7 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@master with: - toolchain: nightly-2023-12-03 + toolchain: nightly-2024-01-25 # Go cache for building geth-utils - name: Go cache uses: actions/cache@v3 @@ -216,7 +217,7 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@master with: - toolchain: nightly-2023-12-03 + toolchain: nightly-2024-01-25 # Go cache for building geth-utils - name: Go cache uses: actions/cache@v3 @@ -253,7 +254,7 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@master with: - toolchain: nightly-2023-12-03 + toolchain: nightly-2024-01-25 components: rustfmt # Go cache for building geth-utils - name: Go cache diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index c1c9c7597f..b879ef70fe 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -61,7 +61,7 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@master with: - toolchain: nightly-2023-12-03 + toolchain: nightly-2024-01-25 - name: Setup golang uses: actions/setup-go@v3 with: diff --git a/.github/workflows/lints.yml b/.github/workflows/lints.yml index 08c7d3fa51..1e2345ca12 100644 --- a/.github/workflows/lints.yml +++ b/.github/workflows/lints.yml @@ -43,7 +43,7 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@master with: - toolchain: nightly-2023-12-03 + toolchain: nightly-2024-01-25 components: clippy # Go cache for building geth-utils - name: Go cache diff --git a/.github/workflows/test-features.yml b/.github/workflows/test-features.yml index 7f55372f01..f80f1f29f2 100644 --- a/.github/workflows/test-features.yml +++ b/.github/workflows/test-features.yml @@ -42,7 +42,7 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@master with: - toolchain: nightly-2023-12-03 + toolchain: nightly-2024-01-25 # Go cache for building geth-utils - name: Go cache diff --git a/Cargo.lock b/Cargo.lock index 1a9340b506..bad06407b5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -44,7 +44,7 @@ version = "0.13.0" dependencies = [ "ark-std 0.3.0", "bitstream-io", - "c-kzg", + "c-kzg 1.0.2", "csv", "ctor", "encoder", @@ -61,8 +61,8 @@ dependencies = [ "num-bigint", "once_cell", "rand", - "revm-precompile", - "revm-primitives", + "revm-precompile 7.0.0", + "revm-primitives 4.0.0", "serde", "serde_json", "snark-verifier", @@ -99,6 +99,23 @@ version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" +[[package]] +name = "alloy-primitives" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0628ec0ba5b98b3370bb6be17b12f23bfce8ee4ad83823325a20546d9b03b78" +dependencies = [ + "alloy-rlp", + "bytes", + "cfg-if 1.0.0", + "const-hex", + "derive_more", + "hex-literal", + "itoa", + "ruint", + "tiny-keccak", +] + [[package]] name = "alloy-primitives" version = "0.7.4" @@ -127,10 +144,22 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d58d9f5da7b40e9bfff0b7e7816700be4019db97d4b6359fe7f94a9e22e42ac" dependencies = [ + "alloy-rlp-derive", "arrayvec", "bytes", ] +[[package]] +name = "alloy-rlp-derive" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d0f2d905ebd295e7effec65e5f6868d153936130ae718352771de3e7d03c75c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.60", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -461,6 +490,29 @@ dependencies = [ "serde", ] +[[package]] +name = "bindgen" +version = "0.66.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2b84e06fc203107bfbad243f4aba2af864eb7db3b1cf46ea0a023b0b433d2a7" +dependencies = [ + "bitflags 2.5.0", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "log", + "peeking_take_while", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn 2.0.60", + "which", +] + [[package]] name = "bit-set" version = "0.5.3" @@ -615,7 +667,7 @@ dependencies = [ "pretty_assertions", "rand", "rayon", - "revm-precompile", + "revm-precompile 7.0.0", "serde", "serde_json", "strum 0.25.0", @@ -666,6 +718,21 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "c-kzg" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac926d808fb72fe09ebf471a091d6d72918876ccf0b4989766093d2d0d24a0ef" +dependencies = [ + "bindgen", + "blst", + "cc", + "glob", + "hex", + "libc", + "serde", +] + [[package]] name = "c-kzg" version = "1.0.2" @@ -722,6 +789,15 @@ dependencies = [ "libc", ] +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + [[package]] name = "cfg-if" version = "0.1.10" @@ -780,6 +856,17 @@ dependencies = [ "zkevm-circuits", ] +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + [[package]] name = "clap" version = "4.5.4" @@ -1422,8 +1509,8 @@ dependencies = [ "num-bigint", "poseidon-base", "regex", - "revm-precompile", - "revm-primitives", + "revm-precompile 7.0.0", + "revm-primitives 4.0.0", "serde", "serde_json", "serde_with", @@ -2074,7 +2161,7 @@ dependencies = [ [[package]] name = "halo2-base" version = "0.2.2" -source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#817cace374a9f4b2eca682b1cc36f143255ea25f" +source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#5e3a1c94d069c168febc4586fe6734e6b31b21bb" dependencies = [ "ff", "halo2_proofs", @@ -2089,7 +2176,7 @@ dependencies = [ [[package]] name = "halo2-ecc" version = "0.2.2" -source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#817cace374a9f4b2eca682b1cc36f143255ea25f" +source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#5e3a1c94d069c168febc4586fe6734e6b31b21bb" dependencies = [ "ff", "group", @@ -2164,7 +2251,7 @@ dependencies = [ [[package]] name = "halo2_proofs" version = "1.1.0" -source = "git+https://github.com/scroll-tech/halo2.git?branch=v1.1#6c5e9eec5088dc8b4a23edcdd1eaf8705b5fc7dc" +source = "git+https://github.com/scroll-tech/halo2.git?branch=v1.1#e5ddf67e5ae16be38d6368ed355c7c41906272ab" dependencies = [ "ark-std 0.3.0", "blake2b_simd", @@ -2651,12 +2738,28 @@ dependencies = [ "spin 0.5.2", ] +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "libc" version = "0.2.153" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +[[package]] +name = "libloading" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" +dependencies = [ + "cfg-if 1.0.0", + "windows-targets 0.52.5", +] + [[package]] name = "libm" version = "0.2.8" @@ -2760,6 +2863,12 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + [[package]] name = "miniz_oxide" version = "0.7.2" @@ -2832,6 +2941,16 @@ version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + [[package]] name = "num" version = "0.4.2" @@ -3142,6 +3261,12 @@ dependencies = [ "hmac", ] +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + [[package]] name = "percent-encoding" version = "2.3.1" @@ -3482,6 +3607,7 @@ dependencies = [ "num-bigint", "rand", "rand_xorshift", + "revm 3.5.0", "serde", "serde_derive", "serde_json", @@ -3489,6 +3615,7 @@ dependencies = [ "sha2", "snark-verifier", "snark-verifier-sdk", + "thiserror", "zkevm-circuits", ] @@ -3694,32 +3821,85 @@ dependencies = [ "sha3 0.10.8", ] +[[package]] +name = "revm" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f4ca8ae0345104523b4af1a8a7ea97cfa1865cdb7a7c25d23c1a18d9b48598" +dependencies = [ + "auto_impl", + "revm-interpreter", + "revm-precompile 2.2.0", +] + +[[package]] +name = "revm-interpreter" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f959cafdf64a7f89b014fa73dc2325001cf654b3d9400260b212d19a2ebe3da0" +dependencies = [ + "revm-primitives 1.3.0", +] + +[[package]] +name = "revm-precompile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d360a88223d85709d2e95d4609eb1e19c649c47e28954bfabae5e92bb37e83e" +dependencies = [ + "c-kzg 0.1.1", + "k256", + "num", + "once_cell", + "revm-primitives 1.3.0", + "ripemd", + "secp256k1 0.27.0", + "sha2", + "substrate-bn", +] + [[package]] name = "revm-precompile" version = "7.0.0" source = "git+https://github.com/scroll-tech/revm?branch=scroll-evm-executor/v36#30613ed430505b7c11f335d265dc561e485c94bf" dependencies = [ "aurora-engine-modexp", - "c-kzg", + "c-kzg 1.0.2", "k256", "once_cell", - "revm-primitives", + "revm-primitives 4.0.0", "ripemd", "secp256k1 0.29.0", "sha2", "substrate-bn", ] +[[package]] +name = "revm-primitives" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51187b852d9e458816a2e19c81f1dd6c924077e1a8fccd16e4f044f865f299d7" +dependencies = [ + "alloy-primitives 0.4.2", + "alloy-rlp", + "auto_impl", + "bitflags 2.5.0", + "bitvec", + "enumn", + "hashbrown 0.14.3", + "hex", +] + [[package]] name = "revm-primitives" version = "4.0.0" source = "git+https://github.com/scroll-tech/revm?branch=scroll-evm-executor/v36#30613ed430505b7c11f335d265dc561e485c94bf" dependencies = [ - "alloy-primitives", + "alloy-primitives 0.7.4", "auto_impl", "bitflags 2.5.0", "bitvec", - "c-kzg", + "c-kzg 1.0.2", "cfg-if 1.0.0", "derive_more", "dyn-clone", @@ -4058,6 +4238,15 @@ dependencies = [ "secp256k1-sys 0.6.1", ] +[[package]] +name = "secp256k1" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25996b82292a7a57ed3508f052cfff8640d38d32018784acd714758b43da9c8f" +dependencies = [ + "secp256k1-sys 0.8.1", +] + [[package]] name = "secp256k1" version = "0.29.0" @@ -4077,6 +4266,15 @@ dependencies = [ "cc", ] +[[package]] +name = "secp256k1-sys" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70a129b9e9efbfb223753b9163c4ab3b13cff7fd9c7f010fbac25ab4099fa07e" +dependencies = [ + "cc", +] + [[package]] name = "secp256k1-sys" version = "0.10.0" @@ -4272,6 +4470,12 @@ dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + [[package]] name = "signature" version = "2.2.0" @@ -4306,7 +4510,7 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "snark-verifier" version = "0.1.0" -source = "git+https://github.com/scroll-tech/snark-verifier?branch=v0.1#58c46b74c73156b9e09dc27617369d2acfb4461b" +source = "git+https://github.com/scroll-tech/snark-verifier?branch=feat/hybrid-snark-agg#0d7c3ae2ea12b864fe8e361c1a2662039ac6c626" dependencies = [ "bytes", "ethereum-types", @@ -4319,7 +4523,7 @@ dependencies = [ "num-traits", "poseidon", "rand", - "revm", + "revm 2.3.1", "rlp", "rustc-hash", "serde", @@ -4329,7 +4533,7 @@ dependencies = [ [[package]] name = "snark-verifier-sdk" version = "0.0.1" -source = "git+https://github.com/scroll-tech/snark-verifier?branch=v0.1#58c46b74c73156b9e09dc27617369d2acfb4461b" +source = "git+https://github.com/scroll-tech/snark-verifier?branch=feat/hybrid-snark-agg#0d7c3ae2ea12b864fe8e361c1a2662039ac6c626" dependencies = [ "bincode", "ethereum-types", @@ -5181,6 +5385,18 @@ version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + [[package]] name = "winapi" version = "0.3.9" diff --git a/Cargo.toml b/Cargo.toml index 58740501c1..0b74f33ad3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -56,8 +56,8 @@ serde = {version = "1.0", features = ["derive"] } serde_json = "1.0" serde_stacker = "0.1" sha3 = "0.10" -snark-verifier = { git = "https://github.com/scroll-tech/snark-verifier", branch = "v0.1" } -snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "v0.1", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] } +snark-verifier = { git = "https://github.com/scroll-tech/snark-verifier", branch = "feat/hybrid-snark-agg" } +snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "feat/hybrid-snark-agg", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] } strum = "0.25" strum_macros = "0.25" subtle = "2.4" diff --git a/aggregator/src/aggregation.rs b/aggregator/src/aggregation.rs index 25f7ddafec..948ece9cf7 100644 --- a/aggregator/src/aggregation.rs +++ b/aggregator/src/aggregation.rs @@ -25,3 +25,43 @@ pub(crate) use rlc::RlcConfig; pub use circuit::BatchCircuit; pub use config::BatchCircuitConfig; +use halo2_base::halo2_proofs::halo2curves::bn256::{Fr, G1Affine}; +use snark_verifier::Protocol; + +/// Alias for a list of G1 points. +pub type PreprocessedPolyCommits = Vec; +/// Alias for the transcript's initial state. +pub type TranscriptInitState = Fr; + +/// Alias for the fixed part of the protocol which consists of the commitments to the preprocessed +/// polynomials and the initial state of the transcript. +#[derive(Clone)] +pub struct FixedProtocol { + /// The commitments to the preprocessed polynomials. + pub preprocessed: PreprocessedPolyCommits, + /// The initial state of the transcript. + pub init_state: TranscriptInitState, +} + +impl From> for FixedProtocol { + fn from(protocol: Protocol) -> Self { + Self { + preprocessed: protocol.preprocessed, + init_state: protocol + .transcript_initial_state + .expect("protocol transcript init state None"), + } + } +} + +impl From<&Protocol> for FixedProtocol { + fn from(protocol: &Protocol) -> Self { + Self { + preprocessed: protocol.preprocessed.clone(), + init_state: protocol + .transcript_initial_state + .clone() + .expect("protocol transcript init state None"), + } + } +} diff --git a/aggregator/src/aggregation/circuit.rs b/aggregator/src/aggregation/circuit.rs index 47d03c41eb..32dc427164 100644 --- a/aggregator/src/aggregation/circuit.rs +++ b/aggregator/src/aggregation/circuit.rs @@ -2,16 +2,22 @@ use crate::{ aggregation::decoder::WORKED_EXAMPLE, blob::BatchData, witgen::{zstd_encode, MultiBlockProcessResult}, - LOG_DEGREE, PI_CHAIN_ID, PI_CURRENT_BATCH_HASH, PI_CURRENT_STATE_ROOT, - PI_CURRENT_WITHDRAW_ROOT, PI_PARENT_BATCH_HASH, PI_PARENT_STATE_ROOT, + FixedProtocol, LOG_DEGREE, MAX_AGG_SNARKS, PI_CHAIN_ID, PI_CURRENT_BATCH_HASH, + PI_CURRENT_STATE_ROOT, PI_CURRENT_WITHDRAW_ROOT, PI_PARENT_BATCH_HASH, PI_PARENT_STATE_ROOT, }; use ark_std::{end_timer, start_timer}; -use halo2_base::{Context, ContextParams}; +use halo2_base::{ + gates::{GateInstructions, RangeInstructions}, + Context, ContextParams, + QuantumCell::Existing, +}; +use halo2_ecc::fields::FieldChip; #[cfg(not(feature = "disable_proof_aggregation"))] use halo2_ecc::{ecc::EccChip, fields::fp::FpConfig}; use halo2_proofs::{ + arithmetic::Field, circuit::{Layouter, SimpleFloorPlanner, Value}, halo2curves::bn256::{Bn256, Fr, G1Affine}, plonk::{Circuit, ConstraintSystem, Error, Selector}, @@ -24,7 +30,9 @@ use std::rc::Rc; use std::{env, fs::File}; #[cfg(not(feature = "disable_proof_aggregation"))] -use snark_verifier::loader::halo2::{halo2_ecc::halo2_base::AssignedValue, Halo2Loader}; +use snark_verifier::loader::halo2::{ + halo2_ecc::halo2_base::AssignedValue, Halo2Loader, IntegerInstructions, +}; use snark_verifier::pcs::kzg::KzgSuccinctVerifyingKey; #[cfg(not(feature = "disable_proof_aggregation"))] use snark_verifier::{ @@ -32,7 +40,7 @@ use snark_verifier::{ pcs::kzg::{Bdfg21, Kzg}, }; #[cfg(not(feature = "disable_proof_aggregation"))] -use snark_verifier_sdk::{aggregate, flatten_accumulator}; +use snark_verifier_sdk::{aggregate_hybrid, flatten_accumulator}; use snark_verifier_sdk::{CircuitExt, Snark, SnarkWitness}; use zkevm_circuits::util::Challenges; @@ -68,14 +76,21 @@ pub struct BatchCircuit { // batch hash circuit for which the snarks are generated // the chunks in this batch are also padded already pub batch_hash: BatchHash, + + /// The SNARK protocol from the halo2-based inner circuit route. + pub halo2_protocol: FixedProtocol, + /// The SNARK protocol from the sp1-based inner circuit route. + pub sp1_protocol: FixedProtocol, } impl BatchCircuit { - pub fn new( + pub fn new>( params: &ParamsKZG, snarks_with_padding: &[Snark], rng: impl Rng + Send, batch_hash: BatchHash, + halo2_protocol: P, + sp1_protocol: P, ) -> Result { let timer = start_timer!(|| "generate aggregation circuit"); @@ -133,6 +148,8 @@ impl BatchCircuit { flattened_instances, as_proof: Value::known(as_proof), batch_hash, + halo2_protocol: halo2_protocol.into(), + sp1_protocol: sp1_protocol.into(), }) } @@ -268,15 +285,17 @@ impl Circuit for BatchCircuit { // - new accumulator // log::debug!("aggregation: chunk aggregation"); - let (assigned_aggregation_instances, acc) = aggregate::>( + let ( + assigned_aggregation_instances, + acc, + preprocessed_poly_sets, + transcript_init_states, + ) = aggregate_hybrid::>( &self.svk, &loader, &self.snarks_with_padding, self.as_proof(), ); - for (i, e) in assigned_aggregation_instances[0].iter().enumerate() { - log::trace!("{}-th instance: {:?}", i, e.value) - } // extract the following cells for later constraints // - the accumulators @@ -290,12 +309,130 @@ impl Circuit for BatchCircuit { .iter() .flat_map(|instance_column| instance_column.iter().skip(ACC_LEN)), ); + for (i, e) in assigned_aggregation_instances[0].iter().enumerate() { + log::trace!("{}-th instance: {:?}", i, e.value) + } - loader - .ctx_mut() - .print_stats(&["snark aggregation [chunks -> batch]"]); + loader.ctx_mut().print_stats(&["snark aggregation"]); let mut ctx = Rc::into_inner(loader).unwrap().into_ctx(); + + // We must ensure that the commitments to preprocessed polynomial and initial + // state of transcripts for every SNARK that is being aggregated belongs to the + // fixed set of values expected. + // + // First we load the constants. + log::info!("populating constants"); + let mut preprocessed_polys_halo2 = Vec::with_capacity(7); + let mut preprocessed_polys_sp1 = Vec::with_capacity(7); + for (i, &preprocessed_poly) in + self.halo2_protocol.preprocessed.iter().enumerate() + { + log::debug!("load const {i}"); + preprocessed_polys_halo2.push( + config + .ecc_chip() + .assign_constant_point(&mut ctx, preprocessed_poly), + ); + log::debug!("load const {i} OK"); + } + for (i, &preprocessed_poly) in self.sp1_protocol.preprocessed.iter().enumerate() + { + log::debug!("load const (sp1) {i}"); + preprocessed_polys_sp1.push( + config + .ecc_chip() + .assign_constant_point(&mut ctx, preprocessed_poly), + ); + log::debug!("load const (sp1) {i} OK"); + } + let transcript_init_state_halo2 = config + .ecc_chip() + .field_chip() + .range() + .gate() + .assign_constant(&mut ctx, self.halo2_protocol.init_state) + .expect("IntegerInstructions::assign_constant infallible"); + log::debug!("load transcript OK"); + let transcript_init_state_sp1 = config + .ecc_chip() + .field_chip() + .range() + .gate() + .assign_constant(&mut ctx, self.sp1_protocol.init_state) + .expect("IntegerInstructions::assign_constant infallible"); + log::info!("populating constants OK"); + + // Commitments to the preprocessed polynomials. + // + // check_1: halo2-route + // check_2: sp1-route + // + // OR(check_1, check_2) == 1 + let mut route_check = Vec::with_capacity(MAX_AGG_SNARKS); + for preprocessed_polys in preprocessed_poly_sets.iter() { + let mut preprocessed_check_1 = + config.flex_gate().load_constant(&mut ctx, Fr::ONE); + let mut preprocessed_check_2 = + config.flex_gate().load_constant(&mut ctx, Fr::ONE); + for ((commitment, comm_halo2), comm_sp1) in preprocessed_polys + .iter() + .zip_eq(preprocessed_polys_halo2.iter()) + .zip_eq(preprocessed_polys_sp1.iter()) + { + let check_1 = + config + .ecc_chip() + .is_equal(&mut ctx, &commitment, comm_halo2); + let check_2 = + config.ecc_chip().is_equal(&mut ctx, &commitment, comm_sp1); + preprocessed_check_1 = config.flex_gate().and( + &mut ctx, + Existing(preprocessed_check_1), + Existing(check_1), + ); + preprocessed_check_2 = config.flex_gate().and( + &mut ctx, + Existing(preprocessed_check_2), + Existing(check_2), + ); + } + route_check.push(preprocessed_check_1); + let preprocessed_check = config.flex_gate().or( + &mut ctx, + Existing(preprocessed_check_1), + Existing(preprocessed_check_2), + ); + config + .flex_gate() + .assert_is_const(&mut ctx, &preprocessed_check, Fr::ONE); + } + + // Transcript initial state. + // + // If the SNARK belongs to halo2-route, the initial state is the halo2-initial + // state. Otherwise sp1-initial state. + for (transcript_init_state, &route) in + transcript_init_states.iter().zip_eq(route_check.iter()) + { + let transcript_init_state = transcript_init_state + .expect("SNARK should have an initial state for transcript"); + let init_state_expected = config.flex_gate().select( + &mut ctx, + Existing(transcript_init_state_halo2), + Existing(transcript_init_state_sp1), + Existing(route), + ); + GateInstructions::assert_equal( + config.flex_gate(), + &mut ctx, + Existing(transcript_init_state), + Existing(init_state_expected), + ); + } + + ctx.print_stats(&["protocol check"]); + log::debug!("batching: assigning barycentric"); let barycentric = config.barycentric.assign( &mut ctx, diff --git a/aggregator/src/tests/aggregation.rs b/aggregator/src/tests/aggregation.rs index 345dc8de90..1a83f5577b 100644 --- a/aggregator/src/tests/aggregation.rs +++ b/aggregator/src/tests/aggregation.rs @@ -181,6 +181,7 @@ fn build_new_batch_circuit( }) .collect_vec() }; + let snark_protocol = real_snarks[0].protocol.clone(); // ========================== // padded chunks @@ -198,6 +199,8 @@ fn build_new_batch_circuit( [real_snarks, padded_snarks].concat().as_ref(), rng, batch_hash, + &snark_protocol, + &snark_protocol, ) .unwrap() } @@ -266,6 +269,8 @@ fn build_batch_circuit_skip_encoding() -> BatchCircuit() -> BatchCircuit OpcodeId::SELFDESTRUCT, "CHAINID" => OpcodeId::CHAINID, - "opcode 0x48 not defined" => OpcodeId::BASEFEE, "BASEFEE" => OpcodeId::BASEFEE, "BLOBHASH" => OpcodeId::INVALID(0x49), "BLOBBASEFEE" => OpcodeId::INVALID(0x4a), - "opcode 0x5c not defined" => OpcodeId::TLOAD, "TLOAD" => OpcodeId::TLOAD, - "opcode 0x5d not defined" => OpcodeId::TSTORE, "TSTORE" => OpcodeId::TSTORE, _ => { // Parse an invalid opcode value as reported by geth diff --git a/geth-utils/l2geth/go.mod b/geth-utils/l2geth/go.mod index 421cd196eb..433a32f7ec 100644 --- a/geth-utils/l2geth/go.mod +++ b/geth-utils/l2geth/go.mod @@ -1,20 +1,22 @@ module l2gethutil -go 1.21 +go 1.22 + +toolchain go1.23.2 require ( github.com/imdario/mergo v0.3.16 - github.com/scroll-tech/go-ethereum v1.10.14-0.20240621133406-517e5b4b0764 + github.com/scroll-tech/go-ethereum v1.10.14-0.20241001202020-2ed7456a4bc0 ) require ( github.com/VictoriaMetrics/fastcache v1.12.1 // indirect - github.com/bits-and-blooms/bitset v1.13.0 // indirect + github.com/bits-and-blooms/bitset v1.14.3 // indirect github.com/btcsuite/btcd v0.20.1-beta // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect - github.com/consensys/bavard v0.1.13 // indirect - github.com/consensys/gnark-crypto v0.12.1 // indirect - github.com/crate-crypto/go-kzg-4844 v1.0.0 // indirect + github.com/consensys/bavard v0.1.22 // indirect + github.com/consensys/gnark-crypto v0.14.0 // indirect + github.com/crate-crypto/go-kzg-4844 v1.1.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/deckarep/golang-set v1.8.0 // indirect github.com/edsrzf/mmap-go v1.0.0 // indirect @@ -30,9 +32,9 @@ require ( github.com/gorilla/websocket v1.5.0 // indirect github.com/hashicorp/golang-lru v1.0.2 // indirect github.com/holiman/bloomfilter/v2 v2.0.3 // indirect - github.com/holiman/uint256 v1.2.4 // indirect + github.com/holiman/uint256 v1.3.1 // indirect github.com/huin/goupnp v1.0.2 // indirect - github.com/iden3/go-iden3-crypto v0.0.16 // indirect + github.com/iden3/go-iden3-crypto v0.0.17 // indirect github.com/jackpal/go-nat-pmp v1.0.2-0.20160603034137-1fa385a6f458 // indirect github.com/mattn/go-runewidth v0.0.15 // indirect github.com/mmcloughlin/addchain v0.4.0 // indirect @@ -46,16 +48,16 @@ require ( github.com/scroll-tech/zktrie v0.8.4 // indirect github.com/shirou/gopsutil v3.21.11+incompatible // indirect github.com/status-im/keycard-go v0.0.0-20190316090335-8537d3370df4 // indirect - github.com/supranational/blst v0.3.12 // indirect + github.com/supranational/blst v0.3.13 // indirect github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 // indirect github.com/tklauser/go-sysconf v0.3.14 // indirect - github.com/tklauser/numcpus v0.8.0 // indirect + github.com/tklauser/numcpus v0.9.0 // indirect github.com/tyler-smith/go-bip39 v1.0.1-0.20181017060643-dbb3b84ba2ef // indirect github.com/yusufpapurcu/wmi v1.2.4 // indirect - golang.org/x/crypto v0.24.0 // indirect - golang.org/x/sync v0.7.0 // indirect - golang.org/x/sys v0.21.0 // indirect - golang.org/x/text v0.16.0 // indirect + golang.org/x/crypto v0.28.0 // indirect + golang.org/x/sync v0.8.0 // indirect + golang.org/x/sys v0.26.0 // indirect + golang.org/x/text v0.19.0 // indirect gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect rsc.io/tmplfunc v0.0.3 // indirect ) diff --git a/geth-utils/l2geth/go.sum b/geth-utils/l2geth/go.sum index bb2bbc25ef..909daa9bd4 100644 --- a/geth-utils/l2geth/go.sum +++ b/geth-utils/l2geth/go.sum @@ -10,6 +10,8 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24 github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/bits-and-blooms/bitset v1.13.0 h1:bAQ9OPNFYbGHV6Nez0tmNI0RiEu7/hxlYJRUA0wFAVE= github.com/bits-and-blooms/bitset v1.13.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/bits-and-blooms/bitset v1.14.3 h1:Gd2c8lSNf9pKXom5JtD7AaKO8o7fGQ2LtFj1436qilA= +github.com/bits-and-blooms/bitset v1.14.3/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= github.com/btcsuite/btcd v0.20.1-beta h1:Ik4hyJqN8Jfyv3S4AGBOmyouMsYE3EdYODkMbQjwPGw= github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= @@ -29,10 +31,16 @@ github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5P github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/consensys/bavard v0.1.13 h1:oLhMLOFGTLdlda/kma4VOJazblc7IM5y5QPd2A/YjhQ= github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI= +github.com/consensys/bavard v0.1.22 h1:Uw2CGvbXSZWhqK59X0VG/zOjpTFuOMcPLStrp1ihI0A= +github.com/consensys/bavard v0.1.22/go.mod h1:k/zVjHHC4B+PQy1Pg7fgvG3ALicQw540Crag8qx+dZs= github.com/consensys/gnark-crypto v0.12.1 h1:lHH39WuuFgVHONRl3J0LRBtuYdQTumFSDtJF7HpyG8M= github.com/consensys/gnark-crypto v0.12.1/go.mod h1:v2Gy7L/4ZRosZ7Ivs+9SfUDr0f5UlG+EM5t7MPHiLuY= +github.com/consensys/gnark-crypto v0.14.0 h1:DDBdl4HaBtdQsq/wfMwJvZNE80sHidrK3Nfrefatm0E= +github.com/consensys/gnark-crypto v0.14.0/go.mod h1:CU4UijNPsHawiVGNxe9co07FkzCeWHHrb1li/n1XoU0= github.com/crate-crypto/go-kzg-4844 v1.0.0 h1:TsSgHwrkTKecKJ4kadtHi4b3xHW5dCFUDFnUp1TsawI= github.com/crate-crypto/go-kzg-4844 v1.0.0/go.mod h1:1kMhvPgI0Ky3yIa+9lFySEBUBXkYxeOi8ZF1sYioxhc= +github.com/crate-crypto/go-kzg-4844 v1.1.0 h1:EN/u9k2TF6OWSHrCCDBBU6GLNMq88OspHHlMnHfoyU4= +github.com/crate-crypto/go-kzg-4844 v1.1.0/go.mod h1:JolLjpSff1tCCJKaJx4psrlEdlXuJEC996PL3tTAFks= github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -94,6 +102,8 @@ github.com/holiman/bloomfilter/v2 v2.0.3 h1:73e0e/V0tCydx14a0SCYS/EWCxgwLZ18CZcZ github.com/holiman/bloomfilter/v2 v2.0.3/go.mod h1:zpoh+gs7qcpqrHr3dB55AMiJwo0iURXE7ZOP9L9hSkA= github.com/holiman/uint256 v1.2.4 h1:jUc4Nk8fm9jZabQuqr2JzednajVmBpC+oiTiXZJEApU= github.com/holiman/uint256 v1.2.4/go.mod h1:EOMSn4q6Nyt9P6efbI3bueV4e1b3dGlUCXeiRV4ng7E= +github.com/holiman/uint256 v1.3.1 h1:JfTzmih28bittyHM8z360dCjIA9dbPIBlcTI6lmctQs= +github.com/holiman/uint256 v1.3.1/go.mod h1:EOMSn4q6Nyt9P6efbI3bueV4e1b3dGlUCXeiRV4ng7E= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/huin/goupnp v1.0.2 h1:RfGLP+h3mvisuWEyybxNq5Eft3NWhHLPeUN72kpKZoI= github.com/huin/goupnp v1.0.2/go.mod h1:0dxJBVBHqTMjIUMkESDTNgOOx/Mw5wYIfyFmdzSamkM= @@ -101,6 +111,8 @@ github.com/huin/goutil v0.0.0-20170803182201-1ca381bf3150/go.mod h1:PpLOETDnJ0o3 github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/iden3/go-iden3-crypto v0.0.16 h1:zN867xiz6HgErXVIV/6WyteGcOukE9gybYTorBMEdsk= github.com/iden3/go-iden3-crypto v0.0.16/go.mod h1:dLpM4vEPJ3nDHzhWFXDjzkn1qHoBeOT/3UEhXsEsP3E= +github.com/iden3/go-iden3-crypto v0.0.17 h1:NdkceRLJo/pI4UpcjVah4lN/a3yzxRUGXqxbWcYh9mY= +github.com/iden3/go-iden3-crypto v0.0.17/go.mod h1:dLpM4vEPJ3nDHzhWFXDjzkn1qHoBeOT/3UEhXsEsP3E= github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/jackpal/go-nat-pmp v1.0.2-0.20160603034137-1fa385a6f458 h1:6OvNmYgJyexcZ3pYbTI9jWx5tHo1Dee/tWbLMfPe2TA= @@ -114,6 +126,7 @@ github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxv github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c= github.com/leanovate/gopter v0.2.9/go.mod h1:U2L/78B+KVFIx2VmW6onHJQzXtFb+p5y3y2Sh+Jxxv8= +github.com/leanovate/gopter v0.2.11 h1:vRjThO1EKPb/1NsDXuDrzldR28RLkBflWYcU9CvzWu4= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= @@ -179,6 +192,8 @@ github.com/scroll-tech/go-ethereum v1.10.14-0.20240603142403-9ec83a509ac7 h1:z+l github.com/scroll-tech/go-ethereum v1.10.14-0.20240603142403-9ec83a509ac7/go.mod h1:UyUop/xDNmbxIIA38L8bjdG3nNbos2azGOqrfqTVgD4= github.com/scroll-tech/go-ethereum v1.10.14-0.20240621133406-517e5b4b0764 h1:lzOZjEuAcrHwDGdTTqn06Ylo9qk5tRkMFVV32VKZiBY= github.com/scroll-tech/go-ethereum v1.10.14-0.20240621133406-517e5b4b0764/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ= +github.com/scroll-tech/go-ethereum v1.10.14-0.20241001202020-2ed7456a4bc0 h1:+l2Xru9yBH+TGoltTdShtzhFU18oNWdXb4J4BI4aZVI= +github.com/scroll-tech/go-ethereum v1.10.14-0.20241001202020-2ed7456a4bc0/go.mod h1:xRDJvaNUe7lCU2fB+AqyS7gahar+dfJPrUJplfXF4dw= github.com/scroll-tech/zktrie v0.7.1 h1:NrmZNjuBzsbrKePqdHDG+t2cXnimbtezPAFS0+L9ElE= github.com/scroll-tech/zktrie v0.7.1/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk= github.com/scroll-tech/zktrie v0.8.2 h1:UMuIfA+jdgWMLmTgTL64Emo+zzMOdcnH0+eYdDcshxQ= @@ -202,6 +217,8 @@ github.com/supranational/blst v0.3.11 h1:LyU6FolezeWAhvQk0k6O/d49jqgO52MSDDfYgbe github.com/supranational/blst v0.3.11/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw= github.com/supranational/blst v0.3.12 h1:Vfas2U2CFHhniv2QkUm2OVa1+pGTdqtpqm9NnhUUbZ8= github.com/supranational/blst v0.3.12/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw= +github.com/supranational/blst v0.3.13 h1:AYeSxdOMacwu7FBmpfloBz5pbFXDmJL33RuwnKtmTjk= +github.com/supranational/blst v0.3.13/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw= github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY= github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= @@ -212,6 +229,8 @@ github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+F github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= github.com/tklauser/numcpus v0.8.0 h1:Mx4Wwe/FjZLeQsK/6kt2EOepwwSl7SmJrK5bV/dXYgY= github.com/tklauser/numcpus v0.8.0/go.mod h1:ZJZlAY+dmR4eut8epnzf0u/VwodKmryxR8txiloSqBE= +github.com/tklauser/numcpus v0.9.0 h1:lmyCHtANi8aRUgkckBgoDk1nHCux3n2cgkJLXdQGPDo= +github.com/tklauser/numcpus v0.9.0/go.mod h1:SN6Nq1O3VychhC1npsWostA+oW+VOQTxZrS604NSRyI= github.com/tyler-smith/go-bip39 v1.0.1-0.20181017060643-dbb3b84ba2ef h1:wHSqTBrZW24CsNJDfeh9Ex6Pm0Rcpc7qrgKBiL44vF4= github.com/tyler-smith/go-bip39 v1.0.1-0.20181017060643-dbb3b84ba2ef/go.mod h1:sJ5fKU0s6JVwZjjcUEX2zFOnvq0ASQ2K9Zr6cf67kNs= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -230,6 +249,8 @@ golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI= golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= +golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= +golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -251,6 +272,8 @@ golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -280,6 +303,8 @@ golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws= golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= +golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= @@ -289,6 +314,7 @@ golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= +golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= diff --git a/go.work b/go.work index 8e850659a2..9024fb14e0 100644 --- a/go.work +++ b/go.work @@ -1,4 +1,6 @@ -go 1.21 +go 1.22 + +toolchain go1.23.2 use ./geth-utils/l1geth diff --git a/go.work.sum b/go.work.sum index 3c4f5090af..be94857770 100644 --- a/go.work.sum +++ b/go.work.sum @@ -1,3 +1,9 @@ +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= github.com/Azure/azure-pipeline-go v0.2.2 h1:6oiIS9yaG6XCCzhgAgKFfIWyo4LLCiDhZot6ltoThhY= github.com/Azure/azure-pipeline-go v0.2.2/go.mod h1:4rQ/NZncSvGqNkkOsNpOU1tgoNuIlp9AfUH5G1tvCHc= github.com/Azure/azure-sdk-for-go/sdk/azcore v0.21.1 h1:qoVeMsc9/fh/yhxVaA0obYjVH/oI/ihrOoMwsLS9KSA= @@ -28,6 +34,10 @@ github.com/aead/siphash v1.0.1 h1:FwHfE/T45KPKYuuSAKyyvE+oPWcaQ+CUmFW0bPlM+kg= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc h1:cAKDfWh5VpdgMhJosfJnn5/FoN2SRZ4p7fJNX58YPaU= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf h1:qet1QNfXsQxTZqLG4oE62mJzwPIB8+Tee4RNCL9ulrY= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/aws/aws-sdk-go-v2 v1.2.0 h1:BS+UYpbsElC82gB+2E2jiCBg36i8HlubTB/dO/moQ9c= github.com/aws/aws-sdk-go-v2 v1.21.2 h1:+LXZ0sgo8quN9UOKXXzAWRT3FWd4NxeXWOZom9pE7GA= github.com/aws/aws-sdk-go-v2 v1.21.2/go.mod h1:ErQhvNuEMhJjweavOYhxVkn2RUx7kQXVATHrjKtxIpM= @@ -65,9 +75,12 @@ github.com/aws/smithy-go v1.15.0 h1:PS/durmlzvAFpQHDs4wi4sNNP9ExsqZh6IlfdHXgKK8= github.com/aws/smithy-go v1.15.0/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= github.com/beorn7/perks v1.0.0 h1:HWo1m869IqiPhD389kmkxeTalrjNbbJTC8LXupb+sl0= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bits-and-blooms/bitset v1.5.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA= github.com/bits-and-blooms/bitset v1.7.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA= github.com/bits-and-blooms/bitset v1.10.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/bits-and-blooms/bitset v1.14.2/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= github.com/btcsuite/btcd/btcec/v2 v2.2.0/go.mod h1:U7MHm051Al6XmscBQ0BoNydpOTsFAn707034b5nY8zU= github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f h1:bAs4lUbRJpnnkd9VhRV3jjAVU7DJVjMaK+IsvSeZvFo= github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d h1:yJzD/yFppdVCf6ApMkVy8cUxV0XrxdP9rVf6D87/Mng= @@ -86,6 +99,8 @@ github.com/chzyer/test v0.0.0-20210722231415-061457976a23/go.mod h1:Q3SI9o4m/ZMn github.com/cloudflare/cloudflare-go v0.14.0 h1:gFqGlGl/5f9UGXAaKapCGUfaTCgRKKnzu2VvzMZlOFA= github.com/cloudflare/cloudflare-go v0.79.0 h1:ErwCYDjFCYppDJlDJ/5WhsSmzegAUe2+K9qgFyQDg3M= github.com/cloudflare/cloudflare-go v0.79.0/go.mod h1:gkHQf9xEubaQPEuerBuoinR9P8bf8a05Lq0X6WKy1Oc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cockroachdb/errors v1.8.1 h1:A5+txlVZfOqFBDa4mGz2bUWSp0aHElvHX2bKkdbQu+Y= github.com/cockroachdb/logtags v0.0.0-20190617123548-eb05cc24525f h1:o/kfcElHqOiXqcou5a3rIlMc7oJbMQkeLk0VQJ7zgqY= github.com/cockroachdb/pebble v0.0.0-20230928194634-aa077af62593 h1:aPEJyR4rPBvDmeyi+l/FS/VtA00IWvjeFvjen1m1l1A= @@ -96,8 +111,13 @@ github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcju github.com/consensys/gnark-crypto v0.4.1-0.20210426202927-39ac3d4b3f1f h1:C43yEtQ6NIf4ftFXD/V55gnGFgPbMQobd//YlnLjUJ8= github.com/consensys/gnark-crypto v0.9.1-0.20230105202408-1a7a29904a7c h1:llSLg4o9EgH3SrXky+Q5BqEYqV76NGKo07K5Ps2pIKo= github.com/consensys/gnark-crypto v0.10.0/go.mod h1:Iq/P3HHl0ElSjsg2E1gsMwhAyxnxoKK5nVyZKd+/KhU= +github.com/consensys/gnark-crypto v0.13.0/go.mod h1:wKqwsieaKPThcFkHe0d0zMsbHEUWFmZcG7KBCse210o= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/crate-crypto/go-ipa v0.0.0-20220523130400-f11357ae11c7 h1:6IrxszG5G+O7zhtkWxq6+unVvnrm1fqV2Pe+T95DUzw= github.com/crate-crypto/go-ipa v0.0.0-20230601170251-1830d0757c80 h1:DuBDHVjgGMPki7bAyh91+3cF1Vh34sAEdH8JQgbc2R0= github.com/crate-crypto/go-ipa v0.0.0-20230601170251-1830d0757c80/go.mod h1:gzbVz57IDJgQ9rLQwfSk696JGWof8ftznEL9GoAv3NI= @@ -131,8 +151,12 @@ github.com/dop251/goja v0.0.0-20230806174421-c933cf95e127/go.mod h1:QMWlm50DNe14 github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y= github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8aVqWbuLRMHItjPUyqdj+HWPvnQe8V8y1nDpIbM= github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/ethereum/go-ethereum v1.10.26/go.mod h1:EYFyF19u3ezGLD4RqOkLq+ZCXzYbLoNDdZlMt7kyKFg= github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= @@ -166,6 +190,7 @@ github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5Nq github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= github.com/gogo/protobuf v1.1.1 h1:72R+M5VuhED/KujmZVcIquuo8mBgX4oVda//DQb3PXo= github.com/gogo/status v1.1.0/go.mod h1:BFv9nrluPLmrS0EmGVvLaPNmRosr9KapBYd5/hpY1WM= @@ -173,20 +198,32 @@ github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzq github.com/golang-jwt/jwt/v4 v4.3.0 h1:kHL1vqdqWNfATmA0FNMdmZNMyZI1U6O31X4rlIPoBog= github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golangci/lint-1 v0.0.0-20181222135242-d2cdd8c08219/go.mod h1:/X8TswGSh1pIozq4ZwCfxS0WA5JGXguxk94ar/4c87Y= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/gofuzz v1.1.1-0.20200604201612-c04b05f3adfa h1:Q75Upo5UN4JbPFURXZ8nLKYUvF85dyFRop/vQ0Rv+64= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38 h1:yAJXTCF9TqKcTiHJAE8dj7HMvPfh66eeA2JYW7eFpSE= github.com/google/pprof v0.0.0-20230207041349-798e818bf904 h1:4/hN5RUoecvl+RmJRE2YxKWtnnQls6rQjjW5oV7qg2U= github.com/google/pprof v0.0.0-20230207041349-798e818bf904/go.mod h1:uglQLonpP8qtYCYyzA+8c/9qtqgA3qsXGYqCPKARAFg= github.com/google/subcommands v1.2.0 h1:vWQspBTo2nEqTUFita5/KeEWlUL8kQObDFbub/EN9oE= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gopherjs/gopherjs v1.17.2/go.mod h1:pRRIvn/QzFLrKfvEz3qUuEhtE/zLCWfreZ6J5gM2i+k= github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gotestyourself/gotestyourself v1.4.0 h1:CDSlSIuRL/Fsc72Ln5lMybtrCvSRDddsHsDRG/nP7Rg= @@ -195,15 +232,34 @@ github.com/graph-gophers/graphql-go v0.0.0-20201113091052-beb923fada29 h1:sezaKh github.com/graph-gophers/graphql-go v0.0.0-20201113091052-beb923fada29/go.mod h1:9CQHMSxwO4MprSdzoIEobiHpoLtHm77vfxsvsIN5Vuc= github.com/graph-gophers/graphql-go v1.3.0 h1:Eb9x/q6MFpCLz7jBCiP/WTxjSDrYLR1QY41SORZyNJ0= github.com/graph-gophers/graphql-go v1.3.0/go.mod h1:9CQHMSxwO4MprSdzoIEobiHpoLtHm77vfxsvsIN5Vuc= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/guptarohit/asciigraph v0.5.5/go.mod h1:dYl5wwK4gNsnFf9Zp+l06rFiDZ5YtXM6x7SRWZ3KGag= +github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-bexpr v0.1.10 h1:9kuI5PFotCboP3dkDYFr/wi0gg0QVbSNz5oFRpxn4uE= github.com/hashicorp/go-bexpr v0.1.10/go.mod h1:oxlubA2vC/gFVfX1A6JGp7ls7uCDlfJn732ehYYg+g0= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= github.com/hashicorp/go-hclog v1.2.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-retryablehttp v0.7.4 h1:ZQgVdpTdAL7WpMIwLzCfbalOcSUdkDZnpUv3/+BxzFA= github.com/hashicorp/go-retryablehttp v0.7.4/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/holiman/billy v0.0.0-20230718173358-1c7e68d277a7 h1:3JQNjnMRil1yD0IfZKHF9GxxWKDJGj8I0IqOUol//sw= github.com/holiman/billy v0.0.0-20230718173358-1c7e68d277a7/go.mod h1:5GuXa7vkL8u9FkFuWdVvfR5ix8hRB7DbOAaYULamFpc= github.com/holiman/billy v0.0.0-20240216141850-2abb0c79d3c4/go.mod h1:5GuXa7vkL8u9FkFuWdVvfR5ix8hRB7DbOAaYULamFpc= @@ -215,6 +271,7 @@ github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639 h1:mV02weK github.com/ianlancetaylor/demangle v0.0.0-20220319035150-800ac71e25c2/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/influxdata/influxdb v1.8.3 h1:WEypI1BQFTT4teLM+1qkEcvUi0dAvopAI/ir0vAiBg8= github.com/influxdata/influxdb v1.8.3/go.mod h1:JugdFhsvvI8gadxOI6noqNeeBHvWNTbfYGtiAn+2jhI= github.com/influxdata/influxdb-client-go/v2 v2.4.0 h1:HGBfZYStlx3Kqvsv1h2pJixbCl/jhnFtxpKFAv9Tu5k= @@ -236,6 +293,7 @@ github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfC github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/jrick/logrotate v1.0.0 h1:lQ1bL/n9mBNeIXoTUoYRlK4dHuNJVofX9oWqBtPnSzI= github.com/json-iterator/go v1.1.6 h1:MrUvLMLTMxbqFJ9kzlvat/rYZqZnW3u4wkLzWTaFwKs= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0 h1:TDTW5Yz1mjftljbcKqRcrYhd4XeOoI98t+9HbQbYf7g= github.com/julienschmidt/httprouter v1.3.0 h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U= github.com/karalabe/hid v1.0.1-0.20240306101548-573246063e52/go.mod h1:qk1sX/IBgppQNcGCRoj90u6EGC056EBoIc1oEjCWla8= @@ -255,6 +313,7 @@ github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23 h1:FOOIBWrEkLgmlgGfM github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515 h1:T+h1c/A9Gawja4Y9mFVWj2vyii2bbUNDw3kt9VxK2EY= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= @@ -262,12 +321,15 @@ github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4F github.com/labstack/echo/v4 v4.9.0/go.mod h1:xkCDAdFCIf8jsFQ5NnbK7oqaF/yU1A1X20Ltm0OvSks= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= github.com/labstack/gommon v0.3.1/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM= +github.com/leanovate/gopter v0.2.11/go.mod h1:aK3tzZP/C+p1m3SPRE4SYZFGP7jjkuSI4f7Xvpt0S9c= github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mailgun/raymond/v2 v2.0.46/go.mod h1:lsgvL50kgt1ylcFJYZiULi5fjPBkkhNfj4KA0W54Z18= github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/matryer/moq v0.0.0-20190312154309-6cfb0558e1bd/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8= @@ -278,6 +340,7 @@ github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxec github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-ieproxy v0.0.0-20190702010315-6dee0af9227d h1:oNAwILwmgWKFpuU+dXvI6dl9jG2mAWAZLX3r9s0PPiw= github.com/mattn/go-ieproxy v0.0.0-20190702010315-6dee0af9227d/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= @@ -290,7 +353,15 @@ github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/ github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= github.com/microcosm-cc/bluemonday v1.0.21/go.mod h1:ytNkv4RrDrLJ2pqlsSI46O6IVXmZOBBD4SaJyDwwTkM= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/pointerstructure v1.2.0 h1:O+i9nHnXS3l/9Wu7r4NrEdwA2VFTicjUEN1uBnDo34A= @@ -303,14 +374,20 @@ github.com/naoina/go-stringutil v0.1.0 h1:rCUeRUHjBjGTSHl0VC00jUPLz8/F9dDzYI70Hz github.com/naoina/go-stringutil v0.1.0/go.mod h1:XJ2SJL9jCtBh+P9q5btrd/Ylo8XwT/h1USek5+NqSA0= github.com/naoina/toml v0.1.2-0.20170918210437-9fafd6967416 h1:shk/vn9oCoOTmwcouEdwIeOtOGA/ELRUw/GwvxwfT+0= github.com/naoina/toml v0.1.2-0.20170918210437-9fafd6967416/go.mod h1:NBIhNtsFMo3G2szEBne+bO4gS192HuIYRqfvOWb4i1E= +github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= +github.com/neelance/sourcemap v0.0.0-20200213170602-2833bce08e4c/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/onsi/ginkgo/v2 v2.0.0 h1:CcuG/HvWNkkaqCUpJifQY8z7qEMBJya6aLPx6ftGyjQ= github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas= github.com/peterh/liner v1.1.1-0.20190123174540-a2c9a5303de7 h1:oYW+YCJ1pachXTQmzR3rNLYGGz4g/UgFcjb28p/viDM= github.com/peterh/liner v1.1.1-0.20190123174540-a2c9a5303de7/go.mod h1:CRroGNssyjTd/qIG2FyxByd2S8JEAZXBl4qUrZf8GS0= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U= github.com/prometheus/client_golang v1.0.0 h1:vrDKnkGzuGvhNAL56c7DBz29ZL+KxnoR0x7enabFceM= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90 h1:S/YWwWx/RA8rT8tKFRuGUZhuA90OyIBpPCXkcbwU8DE= @@ -323,31 +400,53 @@ github.com/protolambda/messagediff v1.4.0/go.mod h1:LboJp0EwIbJsePYpzh5Op/9G1/4m github.com/protolambda/zrnt v0.32.2/go.mod h1:A0fezkp9Tt3GBLATSPIbuY4ywYESyAuc/FFmPKg8Lqs= github.com/protolambda/ztyp v0.2.2/go.mod h1:9bYgKGqg3wJqT9ac1gI2hnVb0STQq7p/1lapqrqY1dU= github.com/prysmaticlabs/gohashtree v0.0.1-alpha.0.20220714111606-acbb2962fb48/go.mod h1:4pWaT30XoEx1j8KNJf3TV+E3mQkaufn7mf+jRNb/Fuk= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik= github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/schollz/closestmatch v2.1.0+incompatible/go.mod h1:RtP1ddjLong6gTkbtmuhtR2uUrrJOpYzYRvbcPAid+g= github.com/scroll-tech/go-ethereum v1.10.14-0.20231108100028-cb76ecd42bf7 h1:xDtuJk3CjD46kHw87Xe9o/1PcvTVgNZYoT2vGgRmO5s= github.com/scroll-tech/go-ethereum v1.10.14-0.20231108100028-cb76ecd42bf7/go.mod h1:4HrFcoStbViFVy/9l/rvKl1XmizVAaPdgqI8v0U8hOc= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/shurcooL/go v0.0.0-20200502201357-93f07166e636/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= +github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= github.com/sirupsen/logrus v1.2.0 h1:juTguoYk5qI21pwyTXY3B3Y5cOTH3ZUyZCg1v/mihuo= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/smarty/assertions v1.15.0/go.mod h1:yABtdzeQs6l1brC900WlRNwj6ZR55d7B+E8C6HtKdec= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/smartystreets/goconvey v1.8.1/go.mod h1:+/u4qLyY6x1jReYOp7GOM2FSt8aP9CzCZL03bI28W60= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72 h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= github.com/spf13/cobra v1.5.0 h1:X+jTBEBqF0bHN+9cSMgmfuvv2VHJ9ezmFNf9Y/XstYU= github.com/spf13/cobra v1.5.0/go.mod h1:dWXEIy2H428czQCjInthrTRUg7yKbok+2Qi/yBIJoUM= +github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/supranational/blst v0.3.8-0.20220526154634-513d2456b344 h1:m+8fKfQwCAy1QjzINvKe/pYtLjo2dl59x2w9YSEJxuY= github.com/tdewolff/minify/v2 v2.12.4/go.mod h1:h+SRvSIX3kwgwTFOpSckvSxgax3uy8kZTSF1Ojrr3bk= github.com/tdewolff/parse/v2 v2.6.4/go.mod h1:woz0cgbLwFdtbjJu8PIKxhW05KplTFQkOdX78o+Jgrs= @@ -366,31 +465,59 @@ github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRT github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8= github.com/yosssi/ace v0.0.5/go.mod h1:ALfIzm2vT7t5ZE7uoIZqF3TQ7SAOyupFZnkrF5id+K0= github.com/yuin/goldmark v1.2.1 h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/automaxprocs v1.5.2 h1:2LxUOGiR3O6tw8ui5sZa2LAaHnsviZdVOUZw4fvbnME= go.uber.org/automaxprocs v1.5.2/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnwa1WM0= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= golang.org/x/crypto v0.10.0/go.mod h1:o4eNf7Ede1fv+hwOwZsTHl9EsPFO6q6ZvYR8vYfY45I= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54= golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g= golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0 h1:b9gGHsz9/HhJ3HF5DHQytPpuwocVTChQJK3AvoLRD5I= golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.16.0 h1:7eBu7KsSvFDtSXUIDbh3aqlK4DPsZ1rByC8PFfBThos= golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= @@ -398,17 +525,38 @@ golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/perf v0.0.0-20230113213139-801c7ef9e5c5/go.mod h1:UBKtEnL8aqnd+0JHqZ+2qoMDwtuy6cYhhKNoHLBiTQc= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201101102859-da207088b7d1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -422,11 +570,16 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.11.0 h1:F9tnn/DA/Im8nCwm+fX+1/eBwi4qFjRT++MhtVC4ZX0= golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= @@ -437,37 +590,81 @@ golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0= +golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= +golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba h1:O8mE0/t419eoIwhTFpKVkHiTs/Igowgfkj25AcZrtiE= golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20220922220347-f3bd1da661af h1:Yx9k8YCG3dvF87UAn2tu2HQLf2dt/eR1bXxpLMWeH+Y= golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e h1:4nW4NLDYnU28ojHaHO8OVxFHk/aQ33U01a9cjED+pzE= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.2.0 h1:G6AHpWxTMGY1KyEYoAQ5WTtIekUUvDNjan3ugu60JvE= golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df h1:5Pf6pFKu98ODmgnpvkJ3kFUOQGGLIzLIkbzUHp47618= golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= google.golang.org/genproto v0.0.0-20230227214838-9b19f0bdc514/go.mod h1:TvhZT5f700eVlTNwND1xoEZQeWTB2RY/65kplwl/bFA= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw= google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= +gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8= gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= @@ -475,6 +672,7 @@ gopkg.in/olebedev/go-duktape.v3 v3.0.0-20200619000410-60c24ae608a6 h1:a6cXbcDDUk gopkg.in/olebedev/go-duktape.v3 v3.0.0-20200619000410-60c24ae608a6/go.mod h1:uAJfkITjFhyEEuUfm7bsmCZRbW5WRq8s9EY8HZ6hCns= gopkg.in/urfave/cli.v1 v1.20.0 h1:NdAVW6RYxDif9DhDHaAortIu956m2c0v+09AZBPTbE0= gopkg.in/urfave/cli.v1 v1.20.0/go.mod h1:vuBzUtMdQeixQj8LVd+/98pzhxNGQoyuPBlsXHOQNO0= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 987b86ef2c..304774784a 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -34,7 +34,9 @@ serde.workspace = true serde_derive = "1.0" serde_json = { workspace = true, features = ["unbounded_depth"] } serde_stacker.workspace = true +thiserror = "1.0" sha2 ="0.10.2" +revm = { version = "3.5.0", default-features = false, features = ["std"] } [features] default = ["scroll"] diff --git a/prover/src/aggregator.rs b/prover/src/aggregator.rs deleted file mode 100644 index 0a5a63f9e3..0000000000 --- a/prover/src/aggregator.rs +++ /dev/null @@ -1,6 +0,0 @@ -mod prover; -mod verifier; - -pub use self::prover::{check_chunk_hashes, Prover}; -pub use aggregator::{eip4844, BatchData, BatchHash, BatchHeader, MAX_AGG_SNARKS}; -pub use verifier::Verifier; diff --git a/prover/src/aggregator/error.rs b/prover/src/aggregator/error.rs new file mode 100644 index 0000000000..808f9ce60d --- /dev/null +++ b/prover/src/aggregator/error.rs @@ -0,0 +1,43 @@ +/// Errors encountered in the proof generation pipeline for batch and bundle proving. +#[derive(thiserror::Error, Debug)] +pub enum BatchProverError { + /// Represents a mismatch in the verifying key at the specified proof layer. + #[error("verifying key mismatch: layer={0}, expected={1}, found={2}")] + VerifyingKeyMismatch(crate::config::LayerId, String, String), + /// Verifying key for the specified layer was not found in the prover. + #[error("verifying key not found: layer={0}, expected={1}")] + VerifyingKeyNotFound(crate::config::LayerId, String), + /// Sanity check failure indicating that the [`Snark`][snark_verifier_sdk::Snark] + /// [`protocol`][snark_verifier::Protocol] did not match the expected protocols. + #[error("SNARK protocol mismatch: index={0}, expected={1}, found={2}")] + ChunkProtocolMismatch(usize, String, String), + /// Indicates that after generating an EVM verifier contract, the proof itself could not be + /// verified successfully, implying that this sanity check failed. + #[error("EVM verifier contract could not verify proof")] + SanityEVMVerifier, + /// Error indicating that the verification of batch proof failed. + #[error("proof verification failure")] + Verification, + /// Error indicating that the verifier contract's deployment code is not found. + #[error("EVM verifier deployment code not found!")] + VerifierCodeMissing, + /// Error indicating that in the final [`BundleProof`][crate::BundleProofV2] the number of + /// instances found does not match the number of instances expected. + #[error("number of instances in bundle proof mismatch! expected={0}, got={1}")] + PublicInputsMismatch(usize, usize), + /// This variant represents other errors. + #[error("custom: {0}")] + Custom(String), +} + +impl From for BatchProverError { + fn from(value: String) -> Self { + Self::Custom(value) + } +} + +impl From for BatchProverError { + fn from(value: anyhow::Error) -> Self { + Self::Custom(value.to_string()) + } +} diff --git a/prover/src/aggregator/mod.rs b/prover/src/aggregator/mod.rs new file mode 100644 index 0000000000..67e9577fd8 --- /dev/null +++ b/prover/src/aggregator/mod.rs @@ -0,0 +1,20 @@ +mod error; +pub use error::BatchProverError; + +mod prover; +pub use prover::{check_chunk_hashes, Prover}; + +mod recursion; +pub use recursion::RecursionTask; + +mod verifier; +pub use verifier::Verifier; + +/// Re-export some types from the [`aggregator`] crate. +pub use aggregator::{eip4844, BatchData, BatchHash, BatchHeader, MAX_AGG_SNARKS}; + +/// Alias for convenience. +pub type BatchProver<'a> = Prover<'a>; + +/// Alias for convenience. +pub type BatchVerifier<'a> = Verifier<'a>; diff --git a/prover/src/aggregator/prover.rs b/prover/src/aggregator/prover.rs index fcfe4831ee..b557862a9f 100644 --- a/prover/src/aggregator/prover.rs +++ b/prover/src/aggregator/prover.rs @@ -1,46 +1,82 @@ -use std::{collections::BTreeMap, env, iter::repeat}; +use std::env; use aggregator::{ eip4844::decode_blob, BatchData, BatchHash, BatchHeader, ChunkInfo, MAX_AGG_SNARKS, }; -use anyhow::{bail, Result}; use eth_types::H256; -use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG}; use sha2::{Digest, Sha256}; use snark_verifier_sdk::Snark; use crate::{ + aggregator::BatchProverError, common, config::LayerId, - consts::{BATCH_KECCAK_ROW, BATCH_VK_FILENAME, BUNDLE_VK_FILENAME, CHUNK_PROTOCOL_FILENAME}, - io::{force_to_read, try_to_read}, - proof::BundleProof, + consts::{ + BATCH_KECCAK_ROW, BATCH_VK_FILENAME, BUNDLE_VK_FILENAME, FD_HALO2_CHUNK_PROTOCOL, + FD_SP1_CHUNK_PROTOCOL, + }, types::BundleProvingTask, - BatchProof, BatchProvingTask, ChunkProof, + utils::{force_to_read, try_to_read}, + BatchProofV2, BatchProofV2Metadata, BatchProvingTask, BundleProofV2, ChunkKind, ChunkProof, + ParamsMap, ProverError, }; +/// Prover capable of generating [`BatchProof`] and [`BundleProof`]. #[derive(Debug)] pub struct Prover<'params> { - // Make it public for testing with inner functions (unnecessary for FFI). + /// Encapsulating the common prover. pub prover_impl: common::Prover<'params>, - pub chunk_protocol: Vec, + /// The SNARK [`protocol`][snark_verifier::Protocol] for the halo2-based route, i.e. where + /// the inner SNARK is generated using the [`SuperCircuit`][zkevm_circuits::super_circuit::SuperCircuit]. + halo2_protocol: Vec, + /// The SNARK [`protocol`][snark_verifier::Protocol] for the sp1-based route, i.e. where the + /// inner proof is an Sp1 compressed proof, later SNARKified using a halo2-backend. + sp1_protocol: Vec, + /// The verifying key for [`Layer-4`][LayerId::Layer4] in the proof generation pipeline, i.e. + /// the [`CompressionCircuit`][aggregator::CompressionCircuit] SNARK on top of the + /// [`BatchCircuit`][aggregator::BatchCircuit] SNARK. + /// + /// This is an optional field, as it is generated on-the-fly for dev-mode, while the verifying + /// key is expected in production environments. + /// + /// The verifying key is specified in its raw byte-encoded format. raw_vk_batch: Option>, + /// The verifying key for [`Layer-6`][LayerId::Layer6] in the proof generation pipeline, i.e. + /// the [`CompressionCircuit`][aggregator::CompressionCircuit] SNARK on top of the + /// [`RecursionCircuit`][aggregator::RecursionCircuit] SNARK. + /// + /// This is an optional field, as it is generated on-the-fly for dev-mode, while the verifying + /// key is expected in production environments. + /// + /// The verifying key is specified in its raw byte-encoded format. raw_vk_bundle: Option>, } impl<'params> Prover<'params> { - pub fn from_params_and_assets( - params_map: &'params BTreeMap>, - assets_dir: &str, - ) -> Self { + /// Construct batch prover given a map of degree to KZG setup parameters and a path to the + /// assets directory. + /// + /// Panics if the SNARK [`protocols`][snark_verifier::Protocol] for both [`chunk proof variants`][crate::proof::ChunkKind] + /// are not found in the assets directory. + pub fn from_params_and_assets(params_map: &'params ParamsMap, assets_dir: &str) -> Self { + // Set the number of rows in the keccak-circuit's config. The value is eventually read + // to configure the keccak config at runtime. log::debug!("set env KECCAK_ROWS={}", BATCH_KECCAK_ROW.to_string()); env::set_var("KECCAK_ROWS", BATCH_KECCAK_ROW.to_string()); + // Construct the inner common prover. let prover_impl = common::Prover::from_params_map(params_map); - let chunk_protocol = force_to_read(assets_dir, &CHUNK_PROTOCOL_FILENAME); + // The SNARK protocols for both variants of the Layer-2 SNARK must be available in the + // assets directory before setting up the batch prover. The SNARK protocols are + // specifically for the halo2-route and sp1-route of generating chunk proofs. + let halo2_protocol = force_to_read(assets_dir, &FD_HALO2_CHUNK_PROTOCOL); + let sp1_protocol = force_to_read(assets_dir, &FD_SP1_CHUNK_PROTOCOL); + + // Try to read the verifying key for both Layer-4 and Layer-6 compression circuits. let raw_vk_batch = try_to_read(assets_dir, &BATCH_VK_FILENAME); let raw_vk_bundle = try_to_read(assets_dir, &BUNDLE_VK_FILENAME); + if raw_vk_batch.is_none() { log::warn!( "batch-prover: {} doesn't exist in {}", @@ -58,122 +94,201 @@ impl<'params> Prover<'params> { Self { prover_impl, - chunk_protocol, + halo2_protocol, + sp1_protocol, raw_vk_batch, raw_vk_bundle, } } - // Return true if chunk proofs are valid (same protocol), false otherwise. - pub fn check_protocol_of_chunks(&self, chunk_proofs: &[ChunkProof]) -> bool { - chunk_proofs.iter().enumerate().all(|(i, proof)| { - let result = proof.protocol == self.chunk_protocol; - if !result { - log::error!( - "Non-match protocol of chunk-proof index-{}: expected = {:x}, actual = {:x}", - i, - Sha256::digest(&self.chunk_protocol), - Sha256::digest(&proof.protocol), - ); - } - - result - }) - } - + /// Returns the optional verifying key for [`Layer-4`][LayerId::Layer4] in byte-encoded form. pub fn get_batch_vk(&self) -> Option> { self.prover_impl .raw_vk(LayerId::Layer4.id()) .or_else(|| self.raw_vk_batch.clone()) } + /// Returns the optional verifying key for [`Layer-6`][LayerId::Layer6] in byte-encoded form. pub fn get_bundle_vk(&self) -> Option> { self.prover_impl .raw_vk(LayerId::Layer6.id()) .or_else(|| self.raw_vk_bundle.clone()) } - // Return the batch proof for a BatchProvingTask. - // TODO: should we rename the method name to `load_or_gen_batch_proof`? + /// Generate [`BatchProof`] given a [`BatchProvingTask`]. + /// + /// The [`Layer-2`][LayerId::Layer2] SNARKs representing chunk proofs are aggregated using the + /// [`Layer-3`][LayerId::Layer3] [`BatchCircuit`][aggregator::BatchCircuit] and this SNARK is + /// then compressed using the [`Layer-4`][LayerId::Layer4] + /// [`CompressionCircuit`][aggregator::CompressionCircuit]. + /// + /// Returns early if a batch proof with a matching proof identifier is found on disk in the + /// provided output directory. pub fn gen_batch_proof( &mut self, batch: BatchProvingTask, name: Option<&str>, output_dir: Option<&str>, - ) -> Result { + ) -> Result { + // Denotes the identifier for this batch proving task. Eventually a generated proof is + // cached to disk using this identifier. let name = name.map_or_else(|| batch.identifier(), |name| name.to_string()); - log::info!("gen_batch_proof with identifier {name}"); + // Return early if the batch proof was found on disk. if let Some(output_dir) = output_dir { - if let Ok(batch_proof) = BatchProof::from_json_file(output_dir, &name) { - log::info!("batch proof loaded from {output_dir}"); + if let Ok(batch_proof) = BatchProofV2::from_json(output_dir, &name) { return Ok(batch_proof); } } + // Load from disk or generate the layer-3 SNARK using the batch circuit. let (layer3_snark, batch_hash) = - self.load_or_gen_last_agg_snark::(&name, batch, output_dir)?; - - // Load or generate batch compression thin proof (layer-4). - let layer4_snark = self.prover_impl.load_or_gen_comp_snark( - &name, - LayerId::Layer4.id(), - true, - LayerId::Layer4.degree(), - layer3_snark, - output_dir, - )?; + self.load_or_gen_last_agg_snark::(batch, &name, output_dir)?; + + // Load from disk or generate the layer-4 SNARK using thin compression circuit. + let layer4_snark = self + .prover_impl + .load_or_gen_comp_snark( + &name, + LayerId::Layer4.id(), + true, + LayerId::Layer4.degree(), + layer3_snark, + output_dir, + ) + .map_err(|e| BatchProverError::Custom(e.to_string()))?; log::info!("Got batch compression thin proof (layer-4): {name}"); - self.check_batch_vk(); + // Sanity check on the layer-4 verifying key. + self.check_batch_vk()?; + // Get the proving key for layer-4. let pk = self.prover_impl.pk(LayerId::Layer4.id()); - let batch_proof = BatchProof::new(layer4_snark, pk, batch_hash)?; + + // Build a wrapper around the layer-4 SNARK, aka batch proof. + let batch_proof_metadata = BatchProofV2Metadata::new(&layer4_snark, batch_hash)?; + let batch_proof = BatchProofV2::new(layer4_snark, pk, batch_proof_metadata)?; + + // If an output directory was provided, write the generated batch proof and layer-4 + // verifying key to disk. if let Some(output_dir) = output_dir { - batch_proof.dump_vk(output_dir, "agg")?; batch_proof.dump(output_dir, &name)?; - log::debug!("batch proof dumped to {output_dir}"); } Ok(batch_proof) } - // Generate layer3 snark. - // Then it could be used to generate a layer4 proof. - pub fn load_or_gen_last_agg_snark( + /// Generate [`BundleProof`] given a [`BundleProvingTask`]. + /// + /// The bundle proving task consists of a list of [`Layer-4`][LayerId::Layer4] + /// [`BatchProofs`][BatchProof] representing the batches being bundled. + /// + /// The [`RecursionCircuit`][aggregator::RecursionCircuit] recursively proves the correctness + /// of all those batch proofs. + pub fn gen_bundle_proof( &mut self, - name: &str, - batch: BatchProvingTask, + bundle: BundleProvingTask, + name: Option<&str>, output_dir: Option<&str>, - ) -> Result<(Snark, H256)> { - let real_chunk_count = batch.chunk_proofs.len(); - assert!((1..=MAX_AGG_SNARKS).contains(&real_chunk_count)); + ) -> Result { + // Denotes the identifier for this bundle proving task. Eventually a generated proof is + // written to disk using this name. + let name = name.map_or_else(|| bundle.identifier(), |name| name.to_string()); + + // Collect the layer-4 SNARKs from the batch proofs. + let bundle_snarks = bundle + .batch_proofs + .iter() + .map(|proof| proof.into()) + .collect::>(); - if !self.check_protocol_of_chunks(&batch.chunk_proofs) { - bail!("non-match-chunk-protocol: {name}"); + // Load from disk or generate a layer-5 Recursive Circuit SNARK. + let layer5_snark = self + .prover_impl + .load_or_gen_recursion_snark( + &name, + LayerId::Layer5.id(), + LayerId::Layer5.degree(), + &bundle_snarks, + output_dir, + ) + .map_err(|e| BatchProverError::Custom(e.to_string()))?; + + // Load from disk or generate a layer-6 Compression Circuit SNARK. Since we use a Keccak + // hasher for the proof transcript at layer-6, the output proof is EVM-verifiable. + let layer6_proof = self + .prover_impl + .load_or_gen_comp_evm_proof( + &name, + LayerId::Layer6.id(), + true, + LayerId::Layer6.degree(), + layer5_snark, + output_dir, + ) + .map_err(|e| BatchProverError::Custom(e.to_string()))?; + + // Sanity check for the layer-6 verifying key. + self.check_bundle_vk()?; + + // Wrap the layer-6 proof into the wrapper Bundle Proof. + let bundle_proof = BundleProofV2::new_from_raw( + &layer6_proof.proof.proof, + &layer6_proof.proof.instances, + &layer6_proof.proof.vk, + )?; + + // If an output directory was provided, write the bundle proof to disk. + if let Some(output_dir) = output_dir { + bundle_proof.dump(output_dir, "recursion")?; } - let mut chunk_hashes: Vec<_> = batch + + Ok(bundle_proof) + } + + /// Generate the [`Layer-3`][LayerId::Layer3] SNARK using the [`BatchCircuit`][aggregator::BatchCircuit]. + /// + /// Returns early if the SNARK was located on disk. + fn load_or_gen_last_agg_snark( + &mut self, + batch: BatchProvingTask, + name: &str, + output_dir: Option<&str>, + ) -> Result<(Snark, H256), ProverError> { + // Early return with an error if the number of SNARKs to aggregate is not within limits. + let num_chunks = batch.chunk_proofs.len(); + if !(1..=MAX_AGG_SNARKS).contains(&num_chunks) { + return Err(BatchProverError::Custom(format!( + "1 <= num_chunks <= MAX_AGG_SNARKS, found={num_chunks}" + )) + .into()); + } + + // Sanity check on the chunk proof's SNARK protocols. + self.check_protocol_of_chunks(&batch.chunk_proofs)?; + + // Split chunk info and snarks from the batch proving task. + let (mut chunk_infos, mut layer2_snarks): (Vec<_>, Vec<_>) = batch .chunk_proofs .iter() - .map(|p| p.chunk_info.clone()) - .collect(); - let mut layer2_snarks: Vec<_> = batch - .chunk_proofs - .into_iter() - .map(|p| p.to_snark()) - .collect(); - - if real_chunk_count < MAX_AGG_SNARKS { - let padding_snark = layer2_snarks.last().unwrap().clone(); - let mut padding_chunk_hash = chunk_hashes.last().unwrap().clone(); - padding_chunk_hash.is_padding = true; - - // Extend to MAX_AGG_SNARKS for both chunk hashes and layer-2 snarks. - chunk_hashes.extend(repeat(padding_chunk_hash).take(MAX_AGG_SNARKS - real_chunk_count)); - layer2_snarks.extend(repeat(padding_snark).take(MAX_AGG_SNARKS - real_chunk_count)); + .map(|proof| (proof.chunk_info.clone(), proof.to_snark())) + .unzip(); + + // Pad the SNARKs with the last SNARK until we have MAX_AGG_SNARKS number of SNARKs. + if num_chunks < MAX_AGG_SNARKS { + let padding_chunk_info = { + let mut last_chunk = chunk_infos.last().expect("num_chunks > 0").clone(); + last_chunk.is_padding = true; + last_chunk + }; + let padding_snark = layer2_snarks.last().expect("num_chunks > 0").clone(); + + // Extend to MAX_AGG_SNARKS for both chunk infos and layer-2 snarks. + chunk_infos.resize(MAX_AGG_SNARKS, padding_chunk_info); + layer2_snarks.resize(MAX_AGG_SNARKS, padding_snark); } - // Load or generate aggregation snark (layer-3). + // Reconstruct the batch header. let batch_header = BatchHeader::construct_from_chunks( batch.batch_header.version, batch.batch_header.batch_index, @@ -181,160 +296,187 @@ impl<'params> Prover<'params> { batch.batch_header.total_l1_message_popped, batch.batch_header.parent_batch_hash, batch.batch_header.last_block_timestamp, - &chunk_hashes, + &chunk_infos, &batch.blob_bytes, ); + let batch_hash = batch_header.batch_hash(); - // sanity check between: - // - BatchHeader supplied from infra - // - BatchHeader re-constructed by circuits + // Sanity checks between the Batch Header supplied vs reconstructed. // - // for the fields data_hash, z, y, blob_versioned_hash. - assert_eq!( - batch_header.data_hash, batch.batch_header.data_hash, - "BatchHeader(sanity) mismatch data_hash expected={}, got={}", - batch.batch_header.data_hash, batch_header.data_hash - ); - assert_eq!( - batch_header.blob_data_proof[0], batch.batch_header.blob_data_proof[0], - "BatchHeader(sanity) mismatch blob data proof (z) expected={}, got={}", - batch.batch_header.blob_data_proof[0], batch_header.blob_data_proof[0], - ); - assert_eq!( - batch_header.blob_data_proof[1], batch.batch_header.blob_data_proof[1], - "BatchHeader(sanity) mismatch blob data proof (y) expected={}, got={}", - batch.batch_header.blob_data_proof[1], batch_header.blob_data_proof[1], - ); - assert_eq!( - batch_header.blob_versioned_hash, batch.batch_header.blob_versioned_hash, - "BatchHeader(sanity) mismatch blob versioned hash expected={}, got={}", - batch.batch_header.blob_versioned_hash, batch_header.blob_versioned_hash, - ); + // Batch's data_hash field must match. + if batch_header.data_hash != batch.batch_header.data_hash { + return Err(BatchProverError::Custom(format!( + "BatchHeader(sanity) data_hash mismatch! expected={}, got={}", + batch.batch_header.data_hash, batch_header.data_hash + )) + .into()); + } + // Batch's random challenge point (z) must match. + if batch_header.blob_data_proof[0] != batch.batch_header.blob_data_proof[0] { + return Err(BatchProverError::Custom(format!( + "BatchHeader(sanity) random challenge (z) mismatch! expected={}, got={}", + batch.batch_header.blob_data_proof[0], batch_header.blob_data_proof[0], + )) + .into()); + } + // Batch's evaluation at z, i.e. y, must match. + if batch_header.blob_data_proof[1] != batch.batch_header.blob_data_proof[1] { + return Err(BatchProverError::Custom(format!( + "BatchHeader(sanity) evaluation (y) mismatch! expected={}, got={}", + batch.batch_header.blob_data_proof[1], batch_header.blob_data_proof[1], + )) + .into()); + } + // The versioned hash of the blob that encodes the batch must match. + if batch_header.blob_versioned_hash != batch.batch_header.blob_versioned_hash { + return Err(BatchProverError::Custom(format!( + "BatchHeader(sanity) blob versioned_hash mismatch! expected={}, got={}", + batch.batch_header.blob_versioned_hash, batch_header.blob_versioned_hash, + )) + .into()); + } - let batch_hash = batch_header.batch_hash(); + // Build relevant types that are used for batch circuit witness assignments. let batch_info: BatchHash = - BatchHash::construct(&chunk_hashes, batch_header, &batch.blob_bytes); + BatchHash::construct(&chunk_infos, batch_header, &batch.blob_bytes); let batch_data: BatchData = BatchData::from(&batch_info); - // sanity check: - // - conditionally decoded blob should match batch data. + // Sanity check: validate that conditionally decoded blob should match batch data. let batch_bytes = batch_data.get_batch_data_bytes(); - let decoded_blob_bytes = decode_blob(&batch.blob_bytes)?; - assert_eq!( - batch_bytes, decoded_blob_bytes, - "BatchProvingTask(sanity) mismatch batch bytes and decoded blob bytes", - ); + let decoded_blob_bytes = + decode_blob(&batch.blob_bytes).map_err(|e| BatchProverError::Custom(e.to_string()))?; + if batch_bytes != decoded_blob_bytes { + return Err(BatchProverError::Custom(format!( + "BatchProvingTask(sanity) decoded blob bytes do not match batch bytes! len(expected)={}, len(got)={}", + decoded_blob_bytes.len(), + batch_bytes.len(), + )).into()); + } - let layer3_snark = self.prover_impl.load_or_gen_agg_snark( - name, - LayerId::Layer3.id(), - LayerId::Layer3.degree(), - batch_info, - &layer2_snarks, - output_dir, - )?; - log::info!("Got aggregation snark (layer-3): {name}"); + // Load from disk or generate the layer-3 SNARK using the batch circuit. + let layer3_snark = self + .prover_impl + .load_or_gen_agg_snark( + name, + LayerId::Layer3.id(), + LayerId::Layer3.degree(), + batch_info, + &self.halo2_protocol, + &self.sp1_protocol, + &layer2_snarks, + output_dir, + ) + .map_err(|e| BatchProverError::Custom(e.to_string()))?; Ok((layer3_snark, batch_hash)) } - // Given a bundle proving task that consists of a list of batch proofs for all intermediate - // batches, bundles them into a single bundle proof using the RecursionCircuit, effectively - // proving the validity of all those batches. - pub fn gen_bundle_proof( - &mut self, - bundle: BundleProvingTask, - name: Option<&str>, - output_dir: Option<&str>, - ) -> Result { - let name = name.map_or_else(|| bundle.identifier(), |name| name.to_string()); - - let bundle_snarks = bundle - .batch_proofs - .iter() - .map(|proof| proof.into()) - .collect::>(); - - let layer5_snark = self.prover_impl.load_or_gen_recursion_snark( - &name, - LayerId::Layer5.id(), - LayerId::Layer5.degree(), - &bundle_snarks, - output_dir, - )?; - - let layer6_evm_proof = self.prover_impl.load_or_gen_comp_evm_proof( - &name, - LayerId::Layer6.id(), - true, - LayerId::Layer6.degree(), - layer5_snark, - output_dir, - )?; - - self.check_bundle_vk(); - - let bundle_proof: BundleProof = layer6_evm_proof.proof.into(); - if let Some(output_dir) = output_dir { - bundle_proof.dump(output_dir, "recursion")?; + /// Sanity check: validate that the SNARK [`protocol`][snark_verifier::Protocol] for the SNARKs + /// being aggregated by the [`BatchCircuit`][aggregator::BatchCircuit] match the expected SNARK + /// protocols conditional to the chunk proof generation route utilised, i.e. halo2 or sp1. + fn check_protocol_of_chunks(&self, chunk_proofs: &[ChunkProof]) -> Result<(), ProverError> { + for (i, proof) in chunk_proofs.iter().enumerate() { + let expected = match proof.chunk_kind { + ChunkKind::Halo2 => &self.halo2_protocol, + ChunkKind::Sp1 => &self.sp1_protocol, + }; + if proof.protocol.ne(expected) { + let expected_digest = format!("{:x}", Sha256::digest(expected)); + let found_digest = format!("{:x}", Sha256::digest(&proof.protocol)); + log::error!( + "BatchProver: SNARK protocol mismatch! index={}, expected={}, found={}", + i, + expected_digest, + found_digest, + ); + return Err(BatchProverError::ChunkProtocolMismatch( + i, + expected_digest, + found_digest, + ) + .into()); + } } - Ok(bundle_proof) + Ok(()) } - /// Check vk generated is same with vk loaded from assets - fn check_batch_vk(&self) { - if self.raw_vk_batch.is_some() { - let gen_vk = self - .prover_impl - .raw_vk(LayerId::Layer4.id()) - .unwrap_or_default(); - if gen_vk.is_empty() { - log::warn!("no gen_vk found, skip check_vk"); - return; - } - let init_vk = self.raw_vk_batch.clone().unwrap_or_default(); - if gen_vk != init_vk { - log::error!( - "batch-prover: generated VK is different with init one - gen_vk = {}, init_vk = {}", - base64::encode(gen_vk), - base64::encode(init_vk), - ); + /// Sanity check for the [`VerifyinKey`][halo2_proofs::plonk::VerifyingKey] used to generate + /// Layer-4 SNARK that is wrapped inside the [`BatchProof`]. The prover generated VK is + /// expected to match the VK used to initialise the prover. + fn check_batch_vk(&self) -> Result<(), ProverError> { + let layer = LayerId::Layer4; + if let Some(expected_vk) = self.raw_vk_batch.as_ref() { + let base64_exp_vk = base64::encode(expected_vk); + if let Some(generated_vk) = self.prover_impl.raw_vk(layer.id()).as_ref() { + let base64_gen_vk = base64::encode(generated_vk); + if generated_vk.ne(expected_vk) { + log::error!( + "BatchProver: {:?} VK mismatch! found={}, expected={}", + layer, + base64_gen_vk, + base64_exp_vk, + ); + return Err(BatchProverError::VerifyingKeyMismatch( + layer, + base64_gen_vk, + base64_exp_vk, + ) + .into()); + } + } else { + return Err(BatchProverError::VerifyingKeyNotFound(layer, base64_exp_vk).into()); } } + + Ok(()) } - /// Check vk generated is same with vk loaded from assets - fn check_bundle_vk(&self) { - if self.raw_vk_bundle.is_some() { - let gen_vk = self - .prover_impl - .raw_vk(LayerId::Layer6.id()) - .unwrap_or_default(); - if gen_vk.is_empty() { - log::warn!("no gen_vk found, skip check_vk"); - return; - } - let init_vk = self.raw_vk_bundle.clone().unwrap_or_default(); - if gen_vk != init_vk { - log::error!( - "bundle-prover: generated VK is different with init one - gen_vk = {}, init_vk = {}", - base64::encode(gen_vk), - base64::encode(init_vk), - ); + /// Sanity check for the [`VerifyinKey`][halo2_proofs::plonk::VerifyingKey] used to generate + /// Layer-6 SNARK that is wrapped inside the [`BundleProof`]. The prover generated VK is + /// expected to match the VK used to initialise the prover. + fn check_bundle_vk(&self) -> Result<(), ProverError> { + let layer = LayerId::Layer6; + if let Some(expected_vk) = self.raw_vk_bundle.as_ref() { + let base64_exp_vk = base64::encode(expected_vk); + if let Some(generated_vk) = self.prover_impl.raw_vk(layer.id()).as_ref() { + let base64_gen_vk = base64::encode(generated_vk); + if generated_vk.ne(expected_vk) { + log::error!( + "BatchProver: {:?} VK mismatch! found={}, expected={}", + layer, + base64_gen_vk, + base64_exp_vk, + ); + return Err(BatchProverError::VerifyingKeyMismatch( + layer, + base64_gen_vk, + base64_exp_vk, + ) + .into()); + } + } else { + return Err(BatchProverError::VerifyingKeyNotFound(layer, base64_exp_vk).into()); } } + + Ok(()) } } pub fn check_chunk_hashes( name: &str, chunk_hashes_proofs: &[(ChunkInfo, ChunkProof)], -) -> Result<()> { +) -> anyhow::Result<()> { for (idx, (in_arg, chunk_proof)) in chunk_hashes_proofs.iter().enumerate() { let in_proof = &chunk_proof.chunk_info; - crate::proof::compare_chunk_info(&format!("{name} chunk num {idx}"), in_arg, in_proof)?; + if let Err(e) = + crate::proof::compare_chunk_info(&format!("{name} chunk num {idx}"), in_arg, in_proof) + { + anyhow::bail!(e); + } } + Ok(()) } diff --git a/prover/src/recursion.rs b/prover/src/aggregator/recursion.rs similarity index 91% rename from prover/src/recursion.rs rename to prover/src/aggregator/recursion.rs index 8e1694a56e..897a3125d5 100644 --- a/prover/src/recursion.rs +++ b/prover/src/aggregator/recursion.rs @@ -1,6 +1,5 @@ -use halo2_proofs::halo2curves::bn256::Fr; - use aggregator::{BatchCircuit, StateTransition}; +use halo2_proofs::halo2curves::bn256::Fr; use snark_verifier_sdk::Snark; /// 4 fields for 2 hashes (Hi, Lo) @@ -19,10 +18,7 @@ const NUM_INIT_INSTANCES: usize = ST_INSTANCE; #[derive(Clone, Debug)] pub struct RecursionTask<'a, const N_SNARK: usize> { - /// The [`snarks`][snark] from the [`BatchCircuit`][batch_circuit]. - /// - /// [snark]: snark_verifier_sdk::Snark - /// [batch_circuit]: aggregator::BatchCircuit + /// The [`snarks`][Snark] from the [`BatchCircuit`][aggregator::BatchCircuit]. snarks: &'a [Snark], } diff --git a/prover/src/aggregator/verifier.rs b/prover/src/aggregator/verifier.rs index 0b3bc252f1..711051e76a 100644 --- a/prover/src/aggregator/verifier.rs +++ b/prover/src/aggregator/verifier.rs @@ -1,28 +1,35 @@ -use crate::{ - common, - config::{LAYER4_CONFIG_PATH, LAYER4_DEGREE}, - consts::{batch_vk_filename, DEPLOYMENT_CODE_FILENAME}, - io::{force_to_read, try_to_read}, - proof::BundleProof, -}; +use std::env; + use aggregator::CompressionCircuit; use halo2_proofs::{ halo2curves::bn256::{Bn256, G1Affine}, plonk::VerifyingKey, poly::kzg::commitment::ParamsKZG, }; -use snark_verifier_sdk::verify_evm_calldata; -use snark_verifier_sdk::Snark; -use std::{collections::BTreeMap, env}; +use crate::{ + common, + config::{LAYER4_CONFIG_PATH, LAYER4_DEGREE}, + consts::{batch_vk_filename, DEPLOYMENT_CODE_FILENAME}, + utils::{deploy_and_call, force_to_read, try_to_read}, + BatchProofV2, BatchProverError, BundleProofV2, ParamsMap, ProverError, +}; + +/// Verifier capable of verifying both [`BatchProof`][crate::BatchProof] and [`BundleProof`]. #[derive(Debug)] pub struct Verifier<'params> { - // Make it public for testing with inner functions (unnecessary for FFI). + /// Encapsulate the common verifier. pub inner: common::Verifier<'params, CompressionCircuit>, + /// The EVM deployment code for the verifier contract. + /// + /// This field is optional as it is not set in dev-mode. It is expected in production + /// environments where we already have the verifier contract's deployment code available. In + /// dev-mode or E2E testing, we generate the deployment code on-the-fly. deployment_code: Option>, } impl<'params> Verifier<'params> { + /// Construct a new batch verifier. pub fn new( params: &'params ParamsKZG, vk: VerifyingKey, @@ -36,33 +43,54 @@ impl<'params> Verifier<'params> { } } - pub fn from_params_and_assets( - params_map: &'params BTreeMap>, - assets_dir: &str, - ) -> Self { + /// Instantiate a new batch verifier given a map of degree to KZG setup parameters and a + /// directory to find assets. + /// + /// Panics if the verifying key is not found in the assets directory. + pub fn from_params_and_assets(params_map: &'params ParamsMap, assets_dir: &str) -> Self { + // Read verifying key in the assets directory. let raw_vk = force_to_read(assets_dir, &batch_vk_filename()); + + // Try to read the bytecode to deploy the verifier contract. let deployment_code = try_to_read(assets_dir, &DEPLOYMENT_CODE_FILENAME); + // The Layer-4 compressioe circuit is configured with the shape as per + // [`LAYER4_CONFIG_PATH`]. env::set_var("COMPRESSION_CONFIG", &*LAYER4_CONFIG_PATH); - let params = params_map.get(&*LAYER4_DEGREE).expect("should be loaded"); - let inner = common::Verifier::from_params(params, &raw_vk); + + let params = params_map.get(&*LAYER4_DEGREE).expect(&format!( + "KZG params don't contain degree={:?}", + LAYER4_DEGREE + )); Self { - inner, + inner: common::Verifier::from_params(params, &raw_vk), deployment_code, } } - pub fn verify_batch_proof(&self, snark: impl Into) -> bool { - self.inner.verify_snark(snark.into()) + /// Verify a [`Layer-4`][crate::config::LayerId::Layer4] [`CompressionCircuit`] [`Snark`]. + pub fn verify_batch_proof(&self, batch_proof: &BatchProofV2) -> Result<(), ProverError> { + let snark = batch_proof.try_into()?; + if self.inner.verify_snark(snark) { + Ok(()) + } else { + Err(BatchProverError::Verification.into()) + } } - pub fn verify_bundle_proof(&self, bundle_proof: BundleProof) -> bool { - if let Some(deployment_code) = self.deployment_code.clone() { - verify_evm_calldata(deployment_code, bundle_proof.calldata()) + /// Verify a [`Layer-6`][crate::config::LayerId::Layer6] EVM-verifiable + /// [`Proof`][crate::proof::EvmProof], aka [`BundleProof`]. + /// + /// Returns `false` if the verifier contract's deployment bytecode is not set. Otherwise + /// deploys the contract and verifies the proof utilising an [`EVM Executor`][revm]. + pub fn verify_bundle_proof(&self, bundle_proof: &BundleProofV2) -> Result<(), ProverError> { + if let Some(code) = self.deployment_code.as_ref() { + deploy_and_call(code.to_vec(), bundle_proof.calldata()) + .map_err(|e| BatchProverError::Custom(e.to_string()))?; + Ok(()) } else { - log::warn!("No deployment_code found for EVM verifier"); - false + Err(BatchProverError::VerifierCodeMissing.into()) } } } diff --git a/prover/src/common.rs b/prover/src/common.rs deleted file mode 100644 index 5499b16b5b..0000000000 --- a/prover/src/common.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod prover; -mod verifier; - -pub use self::{prover::Prover, verifier::Verifier}; -pub use aggregator::{ChunkInfo, CompressionCircuit}; diff --git a/prover/src/common/mod.rs b/prover/src/common/mod.rs new file mode 100644 index 0000000000..00b9f04627 --- /dev/null +++ b/prover/src/common/mod.rs @@ -0,0 +1,8 @@ +mod prover; +pub use prover::Prover; + +mod verifier; +pub use verifier::Verifier; + +// Re-export from the aggregator crate. +pub use aggregator::{ChunkInfo, CompressionCircuit}; diff --git a/prover/src/common/prover/aggregation.rs b/prover/src/common/prover/aggregation.rs index 4d4ca2bc1b..815f687607 100644 --- a/prover/src/common/prover/aggregation.rs +++ b/prover/src/common/prover/aggregation.rs @@ -1,60 +1,85 @@ -use super::Prover; -use crate::{ - config::layer_config_path, - io::{load_snark, write_snark}, - utils::gen_rng, -}; +use std::{env, path::Path}; + use aggregator::{BatchCircuit, BatchHash}; use anyhow::{anyhow, Result}; +use halo2_proofs::halo2curves::bn256::G1Affine; use rand::Rng; use snark_verifier_sdk::Snark; -use std::env; -impl<'params> Prover<'params> { - pub fn gen_agg_snark( +use crate::{ + config::layer_config_path, + utils::{gen_rng, read_json_deep, write_json}, +}; + +impl<'params> super::Prover<'params> { + pub fn load_or_gen_agg_snark( &mut self, + name: &str, id: &str, degree: u32, - mut rng: impl Rng + Send, batch_info: BatchHash, + halo2_protocol: &[u8], + sp1_protocol: &[u8], previous_snarks: &[Snark], + output_dir: Option<&str>, ) -> Result { - env::set_var("AGGREGATION_CONFIG", layer_config_path(id)); + // If an output directory is provided and we are successfully able to locate a SNARK with + // the same identifier on disk, return early. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("aggregation_snark_{}_{}.json", id, name)); + if let Ok(snark) = read_json_deep(&path) { + return Ok(snark); + } + } - let circuit: BatchCircuit = - BatchCircuit::new(self.params(degree), previous_snarks, &mut rng, batch_info) - .map_err(|err| anyhow!("Failed to construct aggregation circuit: {err:?}"))?; + // Generate the layer-3 SNARK. + let rng = gen_rng(); + let snark = self.gen_agg_snark( + id, + degree, + rng, + batch_info, + halo2_protocol, + sp1_protocol, + previous_snarks, + )?; - self.gen_snark(id, degree, &mut rng, circuit, "gen_agg_snark") + // Write to disk if an output directory is provided. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("aggregation_snark_{}_{}.json", id, name)); + write_json(&path, &snark)?; + } + + Ok(snark) } - pub fn load_or_gen_agg_snark( + fn gen_agg_snark( &mut self, - name: &str, id: &str, degree: u32, + mut rng: impl Rng + Send, batch_info: BatchHash, + halo2_protocol: &[u8], + sp1_protocol: &[u8], previous_snarks: &[Snark], - output_dir: Option<&str>, ) -> Result { - let file_path = format!( - "{}/aggregation_snark_{}_{}.json", - output_dir.unwrap_or_default(), - id, - name - ); - - match output_dir.and_then(|_| load_snark(&file_path).ok().flatten()) { - Some(snark) => Ok(snark), - None => { - let rng = gen_rng(); - let result = self.gen_agg_snark(id, degree, rng, batch_info, previous_snarks); - if let (Some(_), Ok(snark)) = (output_dir, &result) { - write_snark(&file_path, snark); - } - - result - } - } + env::set_var("AGGREGATION_CONFIG", layer_config_path(id)); + + let halo2_protocol = + serde_json::from_slice::>(halo2_protocol)?; + let sp1_protocol = + serde_json::from_slice::>(sp1_protocol)?; + + let circuit: BatchCircuit = BatchCircuit::new( + self.params(degree), + previous_snarks, + &mut rng, + batch_info, + halo2_protocol, + sp1_protocol, + ) + .map_err(|err| anyhow!("Failed to construct aggregation circuit: {err:?}"))?; + + self.gen_snark(id, degree, &mut rng, circuit, "gen_agg_snark") } } diff --git a/prover/src/common/prover/compression.rs b/prover/src/common/prover/compression.rs index 80078f84d1..3614b066a8 100644 --- a/prover/src/common/prover/compression.rs +++ b/prover/src/common/prover/compression.rs @@ -1,59 +1,60 @@ -use super::Prover; -use crate::{ - config::layer_config_path, - io::{load_snark, write_snark}, - utils::gen_rng, -}; +use std::{env, path::Path}; + use aggregator::CompressionCircuit; use anyhow::{anyhow, Result}; use rand::Rng; use snark_verifier_sdk::Snark; -use std::env; -impl<'params> Prover<'params> { - pub fn gen_comp_snark( +use crate::{ + config::layer_config_path, + utils::{gen_rng, read_json_deep, write_json}, +}; + +impl<'params> super::Prover<'params> { + pub fn load_or_gen_comp_snark( &mut self, + name: &str, id: &str, has_accumulator: bool, degree: u32, - mut rng: impl Rng + Send, prev_snark: Snark, + output_dir: Option<&str>, ) -> Result { - env::set_var("COMPRESSION_CONFIG", layer_config_path(id)); + // If an output directory is provided and we are successfully able to locate a SNARK with + // the same identifier on disk, return early. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("compression_snark_{}_{}.json", id, name)); + if let Ok(snark) = read_json_deep(&path) { + return Ok(snark); + } + } - let circuit = - CompressionCircuit::new(self.params(degree), prev_snark, has_accumulator, &mut rng) - .map_err(|err| anyhow!("Failed to construct compression circuit: {err:?}"))?; - self.gen_snark(id, degree, &mut rng, circuit, "gen_comp_snark") + // Generate the compression SNARK. + let rng = gen_rng(); + let snark = self.gen_comp_snark(id, has_accumulator, degree, rng, prev_snark)?; + + // Write to disk if an output directory is provided. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("compression_snark_{}_{}.json", id, name)); + write_json(&path, &snark)?; + } + + Ok(snark) } - pub fn load_or_gen_comp_snark( + fn gen_comp_snark( &mut self, - name: &str, id: &str, has_accumulator: bool, degree: u32, + mut rng: impl Rng + Send, prev_snark: Snark, - output_dir: Option<&str>, ) -> Result { - let file_path = format!( - "{}/compression_snark_{}_{}.json", - output_dir.unwrap_or_default(), - id, - name - ); - - match output_dir.and_then(|_| load_snark(&file_path).ok().flatten()) { - Some(snark) => Ok(snark), - None => { - let rng = gen_rng(); - let result = self.gen_comp_snark(id, has_accumulator, degree, rng, prev_snark); - if let (Some(_), Ok(snark)) = (output_dir, &result) { - write_snark(&file_path, snark); - } - - result - } - } + env::set_var("COMPRESSION_CONFIG", layer_config_path(id)); + + let circuit = + CompressionCircuit::new(self.params(degree), prev_snark, has_accumulator, &mut rng) + .map_err(|err| anyhow!("Failed to construct compression circuit: {err:?}"))?; + self.gen_snark(id, degree, &mut rng, circuit, "gen_comp_snark") } } diff --git a/prover/src/common/prover/evm.rs b/prover/src/common/prover/evm.rs index c019898092..13330ddcc4 100644 --- a/prover/src/common/prover/evm.rs +++ b/prover/src/common/prover/evm.rs @@ -1,9 +1,3 @@ -use super::Prover; -use crate::{ - config::layer_config_path, - utils::{gen_rng, read_env_var}, - EvmProof, -}; use aggregator::CompressionCircuit; use anyhow::{anyhow, Result}; use halo2_proofs::halo2curves::bn256::Fr; @@ -11,7 +5,13 @@ use rand::Rng; use snark_verifier_sdk::{gen_evm_proof_shplonk, CircuitExt, Snark}; use std::env; -impl<'params> Prover<'params> { +use crate::{ + config::layer_config_path, + utils::{gen_evm_verifier, gen_rng, read_env_var}, + EvmProof, +}; + +impl<'params> super::Prover<'params> { pub fn load_or_gen_comp_evm_proof( &mut self, name: &str, @@ -68,7 +68,7 @@ impl<'params> Prover<'params> { let evm_proof = EvmProof::new(proof, &instances, num_instance, Some(pk))?; if read_env_var("SCROLL_PROVER_DUMP_YUL", false) { - crate::evm::gen_evm_verifier::(params, pk.get_vk(), &evm_proof, output_dir); + gen_evm_verifier::(params, pk.get_vk(), &evm_proof, output_dir)?; } Ok(evm_proof) diff --git a/prover/src/common/prover/inner.rs b/prover/src/common/prover/inner.rs index ae87ea5746..44e706600d 100644 --- a/prover/src/common/prover/inner.rs +++ b/prover/src/common/prover/inner.rs @@ -1,17 +1,47 @@ -use super::Prover; -use crate::{ - config::INNER_DEGREE, - io::{load_snark, write_snark}, - utils::{gen_rng, metric_of_witness_block}, - zkevm::circuit::{SuperCircuit, TargetCircuit}, -}; +use std::path::Path; + use anyhow::Result; use rand::Rng; use snark_verifier_sdk::{gen_snark_shplonk, Snark}; use zkevm_circuits::evm_circuit::witness::Block; -impl<'params> Prover<'params> { - pub fn gen_inner_snark( +use crate::{ + config::INNER_DEGREE, + utils::{gen_rng, metric_of_witness_block, read_json_deep, write_json}, + zkevm::circuit::{SuperCircuit, TargetCircuit}, +}; + +impl<'params> super::Prover<'params> { + pub fn load_or_gen_inner_snark( + &mut self, + name: &str, + id: &str, + witness_block: &Block, + output_dir: Option<&str>, + ) -> Result { + // If an output directory is provided and we are successfully able to locate a SNARK with + // the same identifier on disk, return early. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("inner_snark_{}_{}.json", id, name)); + if let Ok(snark) = read_json_deep(&path) { + return Ok(snark); + } + } + + // Generate the inner SNARK. + let rng = gen_rng(); + let snark = self.gen_inner_snark::(id, rng, witness_block)?; + + // Write to disk if an output directory is provided. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("inner_snark_{}_{}.json", id, name)); + write_json(&path, &snark)?; + } + + Ok(snark) + } + + fn gen_inner_snark( &mut self, id: &str, mut rng: impl Rng + Send, @@ -37,32 +67,4 @@ impl<'params> Prover<'params> { Ok(snark) } - - pub fn load_or_gen_inner_snark( - &mut self, - name: &str, - id: &str, - witness_block: &Block, - output_dir: Option<&str>, - ) -> Result { - let file_path = format!( - "{}/inner_snark_{}_{}.json", - output_dir.unwrap_or_default(), - id, - name - ); - - match output_dir.and_then(|_| load_snark(&file_path).ok().flatten()) { - Some(snark) => Ok(snark), - None => { - let rng = gen_rng(); - let result = self.gen_inner_snark::(id, rng, witness_block); - if let (Some(_), Ok(snark)) = (output_dir, &result) { - write_snark(&file_path, snark); - } - - result - } - } - } } diff --git a/prover/src/common/prover.rs b/prover/src/common/prover/mod.rs similarity index 86% rename from prover/src/common/prover.rs rename to prover/src/common/prover/mod.rs index b3df0582e9..a478cab1db 100644 --- a/prover/src/common/prover.rs +++ b/prover/src/common/prover/mod.rs @@ -1,10 +1,15 @@ -use crate::utils::{load_params, param_path_for_degree}; +use std::collections::{BTreeMap, BTreeSet, HashMap}; + use halo2_proofs::{ - halo2curves::bn256::{Bn256, G1Affine}, + halo2curves::bn256::G1Affine, plonk::ProvingKey, poly::{commitment::Params, kzg::commitment::ParamsKZG}, }; -use std::collections::{BTreeMap, BTreeSet, HashMap}; + +use crate::{ + utils::{load_params, param_path_for_degree}, + ParamsMap, +}; mod aggregation; mod chunk; @@ -18,20 +23,20 @@ mod utils; #[derive(Debug)] pub struct Prover<'params> { // degree -> params (use BTreeMap to find proper degree for params downsize) - pub params_map: &'params BTreeMap>, + pub params_map: &'params ParamsMap, // Cached id -> pk pk_map: HashMap>, } impl<'params> Prover<'params> { - pub fn from_params_map(params_map: &'params BTreeMap>) -> Self { + pub fn from_params_map(params_map: &'params ParamsMap) -> Self { Self { params_map, pk_map: HashMap::new(), } } - pub fn load_params_map(params_dir: &str, degrees: &[u32]) -> BTreeMap> { + pub fn load_params_map(params_dir: &str, degrees: &[u32]) -> ParamsMap { let degrees = BTreeSet::from_iter(degrees); let max_degree = **degrees.last().unwrap(); diff --git a/prover/src/common/prover/recursion.rs b/prover/src/common/prover/recursion.rs index 168b5641ca..2fa6bb13b0 100644 --- a/prover/src/common/prover/recursion.rs +++ b/prover/src/common/prover/recursion.rs @@ -1,4 +1,4 @@ -use std::env; +use std::{env, path::Path}; use aggregator::{initial_recursion_snark, RecursionCircuit, StateTransition, MAX_AGG_SNARKS}; use anyhow::Result; @@ -6,16 +6,43 @@ use rand::Rng; use snark_verifier_sdk::{gen_snark_shplonk, Snark}; use crate::{ + aggregator::RecursionTask, config::layer_config_path, - io::{load_snark, write_snark}, - recursion::RecursionTask, - utils::gen_rng, + utils::{gen_rng, read_json_deep, write_json}, }; -use super::Prover; +impl<'params> super::Prover<'params> { + pub fn load_or_gen_recursion_snark( + &mut self, + name: &str, + id: &str, + degree: u32, + batch_snarks: &[Snark], + output_dir: Option<&str>, + ) -> Result { + // If an output directory is provided and we are successfully able to locate a SNARK with + // the same identifier on disk, return early. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("recursion_snark_{}_{}.json", id, name)); + if let Ok(snark) = read_json_deep(&path) { + return Ok(snark); + } + } + + // Generate the layer-5 recursion SNARK. + let rng = gen_rng(); + let snark = self.gen_recursion_snark(id, degree, rng, batch_snarks)?; + + // Write to disk if an output directory is provided. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("recursion_snark_{}_{}.json", id, name)); + write_json(&path, &snark)?; + } + + Ok(snark) + } -impl<'params> Prover<'params> { - pub fn gen_recursion_snark( + fn gen_recursion_snark( &mut self, id: &str, degree: u32, @@ -78,33 +105,4 @@ impl<'params> Prover<'params> { Ok(cur_snark) } - - pub fn load_or_gen_recursion_snark( - &mut self, - name: &str, - id: &str, - degree: u32, - batch_snarks: &[Snark], - output_dir: Option<&str>, - ) -> Result { - let file_path = format!( - "{}/recursion_snark_{}_{}.json", - output_dir.unwrap_or_default(), - id, - name - ); - - match output_dir.and_then(|_| load_snark(&file_path).ok().flatten()) { - Some(snark) => Ok(snark), - None => { - let rng = gen_rng(); - let result = self.gen_recursion_snark(id, degree, rng, batch_snarks); - if let (Some(_), Ok(snark)) = (output_dir, &result) { - write_snark(&file_path, snark); - } - - result - } - } - } } diff --git a/prover/src/common/prover/utils.rs b/prover/src/common/prover/utils.rs index 8bf8363cb3..f2b529248d 100644 --- a/prover/src/common/prover/utils.rs +++ b/prover/src/common/prover/utils.rs @@ -1,5 +1,3 @@ -use super::Prover; -use crate::io::serialize_vk; use anyhow::Result; use halo2_proofs::{ halo2curves::bn256::{Bn256, Fr, G1Affine}, @@ -9,7 +7,9 @@ use halo2_proofs::{ use rand::Rng; use snark_verifier_sdk::{gen_snark_shplonk, CircuitExt, Snark}; -impl<'params> Prover<'params> { +use crate::utils::serialize_vk; + +impl<'params> super::Prover<'params> { pub fn gen_snark>( &mut self, id: &str, diff --git a/prover/src/common/verifier/evm.rs b/prover/src/common/verifier/evm.rs index 783eea2d00..82ece6996e 100644 --- a/prover/src/common/verifier/evm.rs +++ b/prover/src/common/verifier/evm.rs @@ -1,10 +1,12 @@ -use super::Verifier; -use crate::EvmProof; use halo2_proofs::halo2curves::bn256::Fr; use snark_verifier_sdk::CircuitExt; -impl<'params, C: CircuitExt> Verifier<'params, C> { - pub fn gen_evm_verifier(&self, evm_proof: &EvmProof, output_dir: Option<&str>) { - crate::evm::gen_evm_verifier::(self.params, &self.vk, evm_proof, output_dir) +impl<'params, C: CircuitExt> super::Verifier<'params, C> { + pub fn gen_evm_verifier( + &self, + evm_proof: &crate::EvmProof, + output_dir: Option<&str>, + ) -> Result<(), crate::ProverError> { + crate::gen_evm_verifier::(self.params, &self.vk, evm_proof, output_dir) } } diff --git a/prover/src/common/verifier.rs b/prover/src/common/verifier/mod.rs similarity index 96% rename from prover/src/common/verifier.rs rename to prover/src/common/verifier/mod.rs index 053d6fccc6..65f4455923 100644 --- a/prover/src/common/verifier.rs +++ b/prover/src/common/verifier/mod.rs @@ -1,4 +1,3 @@ -use crate::io::deserialize_vk; use halo2_proofs::{ halo2curves::bn256::{Bn256, Fr, G1Affine}, plonk::VerifyingKey, @@ -7,6 +6,8 @@ use halo2_proofs::{ use snark_verifier_sdk::{verify_snark_shplonk, CircuitExt, Snark}; use std::marker::PhantomData; +use crate::utils::deserialize_vk; + mod evm; mod utils; diff --git a/prover/src/common/verifier/utils.rs b/prover/src/common/verifier/utils.rs index b5883feec1..44c639901a 100644 --- a/prover/src/common/verifier/utils.rs +++ b/prover/src/common/verifier/utils.rs @@ -1,4 +1,3 @@ -use super::Verifier; use halo2_proofs::{ halo2curves::bn256::{Bn256, Fr, G1Affine}, plonk::VerifyingKey, @@ -6,7 +5,7 @@ use halo2_proofs::{ }; use snark_verifier_sdk::CircuitExt; -impl<'params, C: CircuitExt> Verifier<'params, C> { +impl<'params, C: CircuitExt> super::Verifier<'params, C> { pub fn params(&self) -> &ParamsKZG { self.params } diff --git a/prover/src/config.rs b/prover/src/config.rs index 8db379416f..73cd14c21f 100644 --- a/prover/src/config.rs +++ b/prover/src/config.rs @@ -1,34 +1,79 @@ +use std::{ + collections::HashSet, + fmt, + fs::File, + path::{Path, PathBuf}, + sync::LazyLock, +}; + use crate::utils::read_env_var; -use aggregator::ConfigParams; -use std::{collections::HashSet, fmt, fs::File, path::Path, sync::LazyLock}; +/// Degree (k) used for the inner circuit, i.e. +/// [`SuperCircuit`][zkevm_circuits::super_circuit::SuperCircuit]. pub static INNER_DEGREE: LazyLock = LazyLock::new(|| read_env_var("SCROLL_PROVER_INNER_DEGREE", 20)); -pub static ASSETS_DIR: LazyLock = - LazyLock::new(|| read_env_var("SCROLL_PROVER_ASSETS_DIR", "configs".to_string())); +/// Name of the directory to find asset files on disk. +pub static ASSETS_DIR: LazyLock = + LazyLock::new(|| read_env_var("SCROLL_PROVER_ASSETS_DIR", PathBuf::from("configs"))); -pub static LAYER1_CONFIG_PATH: LazyLock = +/// The path to the [`Config Parameters`][aggregator::ConfigParams] JSON file that define the shape +/// of the [`Layer-1`][LayerId::Layer1] [`Circuit`][halo2_proofs::plonk::Circuit]. +pub static LAYER1_CONFIG_PATH: LazyLock = LazyLock::new(|| asset_file_path("layer1.config")); -pub static LAYER2_CONFIG_PATH: LazyLock = + +/// The path to the [`Config Parameters`][aggregator::ConfigParams] JSON file that define the shape +/// of the [`Layer-2`][LayerId::Layer2] [`Circuit`][halo2_proofs::plonk::Circuit]. +pub static LAYER2_CONFIG_PATH: LazyLock = LazyLock::new(|| asset_file_path("layer2.config")); -pub static LAYER3_CONFIG_PATH: LazyLock = + +/// The path to the [`Config Parameters`][aggregator::ConfigParams] JSON file that define the shape +/// of the [`Layer-3`][LayerId::Layer3] [`Circuit`][halo2_proofs::plonk::Circuit]. +pub static LAYER3_CONFIG_PATH: LazyLock = LazyLock::new(|| asset_file_path("layer3.config")); -pub static LAYER4_CONFIG_PATH: LazyLock = + +/// The path to the [`Config Parameters`][aggregator::ConfigParams] JSON file that define the shape +/// of the [`Layer-4`][LayerId::Layer4] [`Circuit`][halo2_proofs::plonk::Circuit]. +pub static LAYER4_CONFIG_PATH: LazyLock = LazyLock::new(|| asset_file_path("layer4.config")); -pub static LAYER5_CONFIG_PATH: LazyLock = + +/// The path to the [`Config Parameters`][aggregator::ConfigParams] JSON file that define the shape +/// of the [`Layer-5`][LayerId::Layer5] [`Circuit`][halo2_proofs::plonk::Circuit]. +pub static LAYER5_CONFIG_PATH: LazyLock = LazyLock::new(|| asset_file_path("layer5.config")); -pub static LAYER6_CONFIG_PATH: LazyLock = + +/// The path to the [`Config Parameters`][aggregator::ConfigParams] JSON file that define the shape +/// of the [`Layer-6`][LayerId::Layer6] [`Circuit`][halo2_proofs::plonk::Circuit]. +pub static LAYER6_CONFIG_PATH: LazyLock = LazyLock::new(|| asset_file_path("layer6.config")); -pub static LAYER1_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&LAYER1_CONFIG_PATH)); -pub static LAYER2_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&LAYER2_CONFIG_PATH)); -pub static LAYER3_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&LAYER3_CONFIG_PATH)); -pub static LAYER4_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&LAYER4_CONFIG_PATH)); -pub static LAYER5_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&LAYER5_CONFIG_PATH)); -pub static LAYER6_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&LAYER6_CONFIG_PATH)); +/// The degree (k) for the halo2 [`Circuit`][halo2_proofs::plonk::Circuit] at +/// [`Layer-1`][LayerId::Layer1]. +pub static LAYER1_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&*LAYER1_CONFIG_PATH)); + +/// The degree (k) for the halo2 [`Circuit`][halo2_proofs::plonk::Circuit] at +/// [`Layer-2`][LayerId::Layer2]. +pub static LAYER2_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&*LAYER2_CONFIG_PATH)); + +/// The degree (k) for the halo2 [`Circuit`][halo2_proofs::plonk::Circuit] at +/// [`Layer-3`][LayerId::Layer3]. +pub static LAYER3_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&*LAYER3_CONFIG_PATH)); -pub static ZKEVM_DEGREES: LazyLock> = LazyLock::new(|| { +/// The degree (k) for the halo2 [`Circuit`][halo2_proofs::plonk::Circuit] at +/// [`Layer-4`][LayerId::Layer4]. +pub static LAYER4_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&*LAYER4_CONFIG_PATH)); + +/// The degree (k) for the halo2 [`Circuit`][halo2_proofs::plonk::Circuit] at +/// [`Layer-5`][LayerId::Layer5]. +pub static LAYER5_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&*LAYER5_CONFIG_PATH)); + +/// The degree (k) for the halo2 [`Circuit`][halo2_proofs::plonk::Circuit] at +/// [`Layer-6`][LayerId::Layer6]. +pub static LAYER6_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&*LAYER6_CONFIG_PATH)); + +/// The list of degrees for Inner, Layer-1 and Layer-2, i.e. the proof generation [`layers`][LayerId] +/// covered by the [`ChunkProver`][crate::ChunkProver]. +pub static CHUNK_PROVER_DEGREES: LazyLock> = LazyLock::new(|| { Vec::from_iter(HashSet::from([ *INNER_DEGREE, *LAYER1_DEGREE, @@ -36,7 +81,9 @@ pub static ZKEVM_DEGREES: LazyLock> = LazyLock::new(|| { ])) }); -pub static AGG_DEGREES: LazyLock> = LazyLock::new(|| { +/// The list of degrees for Layer-3, Layer-4, Layer-5 and Layer-6, i.e. the proof generation [`layers`][LayerId] +/// covered by the [`BatchProver`][crate::BatchProver]. +pub static BATCH_PROVER_DEGREES: LazyLock> = LazyLock::new(|| { Vec::from_iter(HashSet::from([ *LAYER3_DEGREE, *LAYER4_DEGREE, @@ -45,6 +92,7 @@ pub static AGG_DEGREES: LazyLock> = LazyLock::new(|| { ])) }); +/// The various proof layers in the proof generation pipeline. #[derive(Clone, Copy, Debug)] pub enum LayerId { /// Super (inner) circuit layer @@ -70,6 +118,7 @@ impl fmt::Display for LayerId { } impl LayerId { + /// Returns the identifier by layer. pub fn id(&self) -> &str { match self { Self::Inner => "inner", @@ -82,6 +131,7 @@ impl LayerId { } } + /// The degree (k) for the [`Circuit`][halo2_proofs::plonk::Circuit] by layer. pub fn degree(&self) -> u32 { match self { Self::Inner => *INNER_DEGREE, @@ -94,43 +144,57 @@ impl LayerId { } } - pub fn config_path(&self) -> &str { + /// The path to the [`Config Parameters`][aggregator::ConfigParams] used to configure the shape + /// of the [`Circuit`][halo2_proofs::plonk::Circuit]. + pub fn config_path(&self) -> PathBuf { match self { - Self::Layer1 => &LAYER1_CONFIG_PATH, - Self::Layer2 => &LAYER2_CONFIG_PATH, - Self::Layer3 => &LAYER3_CONFIG_PATH, - Self::Layer4 => &LAYER4_CONFIG_PATH, - Self::Layer5 => &LAYER5_CONFIG_PATH, - Self::Layer6 => &LAYER6_CONFIG_PATH, + Self::Layer1 => LAYER1_CONFIG_PATH.to_path_buf(), + Self::Layer2 => LAYER2_CONFIG_PATH.to_path_buf(), + Self::Layer3 => LAYER3_CONFIG_PATH.to_path_buf(), + Self::Layer4 => LAYER4_CONFIG_PATH.to_path_buf(), + Self::Layer5 => LAYER5_CONFIG_PATH.to_path_buf(), + Self::Layer6 => LAYER6_CONFIG_PATH.to_path_buf(), Self::Inner => unreachable!("No config file for super (inner) circuit"), } } -} -pub fn asset_file_path(filename: &str) -> String { - Path::new(&*ASSETS_DIR) - .join(filename) - .to_string_lossy() - .into_owned() + /// Whether or not the [`Snark`][snark_verifier_sdk::Snark] generated at this layer has an + /// accumulator. + /// + /// Every SNARK layer on top of the [`innermost layer`][LayerId::Inner] has an accumulator. + pub fn accumulator(&self) -> bool { + if let Self::Inner = self { + false + } else { + true + } + } } -pub fn layer_config_path(id: &str) -> &str { +/// Returns the path to the [`Config Parameters`][aggregator::ConfigParams] that configure the +/// shape of the [`Circuit`][halo2_proofs::plonk::Circuit] given the [`id`][LayerId::id] of the +/// layer. +pub fn layer_config_path(id: &str) -> PathBuf { match id { - "layer1" => &LAYER1_CONFIG_PATH, - "layer2" => &LAYER2_CONFIG_PATH, - "layer3" => &LAYER3_CONFIG_PATH, - "layer4" => &LAYER4_CONFIG_PATH, - "layer5" => &LAYER5_CONFIG_PATH, - "layer6" => &LAYER6_CONFIG_PATH, + "layer1" => LAYER1_CONFIG_PATH.to_path_buf(), + "layer2" => LAYER2_CONFIG_PATH.to_path_buf(), + "layer3" => LAYER3_CONFIG_PATH.to_path_buf(), + "layer4" => LAYER4_CONFIG_PATH.to_path_buf(), + "layer5" => LAYER5_CONFIG_PATH.to_path_buf(), + "layer6" => LAYER6_CONFIG_PATH.to_path_buf(), _ => panic!("Wrong id-{id} to get layer config path"), } } -fn layer_degree(config_file: &str) -> u32 { - let f = File::open(config_file).unwrap_or_else(|_| panic!("Failed to open {config_file}")); +fn asset_file_path(filename: &str) -> PathBuf { + ASSETS_DIR.join(filename) +} + +fn layer_degree + fmt::Debug>(path: P) -> u32 { + let f = File::open(&path).unwrap_or_else(|_| panic!("Failed to open {path:?}")); - let params: ConfigParams = - serde_json::from_reader(f).unwrap_or_else(|_| panic!("Failed to parse {config_file}")); + let params = serde_json::from_reader::<_, aggregator::ConfigParams>(f) + .unwrap_or_else(|_| panic!("Failed to parse {path:?}")); params.degree } diff --git a/prover/src/consts.rs b/prover/src/consts.rs index 978594092d..e9c7af048a 100644 --- a/prover/src/consts.rs +++ b/prover/src/consts.rs @@ -1,6 +1,7 @@ -use crate::utils::read_env_var; use std::sync::LazyLock; +use crate::utils::read_env_var; + // TODO: is it a good design to use LazyLock? Why not read env var each time? pub fn bundle_vk_filename() -> String { @@ -13,8 +14,24 @@ pub fn chunk_vk_filename() -> String { read_env_var("CHUNK_VK_FILENAME", "vk_chunk.vkey".to_string()) } -pub static CHUNK_PROTOCOL_FILENAME: LazyLock = - LazyLock::new(|| read_env_var("CHUNK_PROTOCOL_FILENAME", "chunk.protocol".to_string())); +/// The file descriptor for the JSON serialised SNARK [`protocol`][protocol] that +/// defines the [`CompressionCircuit`][compr_circuit] SNARK that uses halo2-based +/// [`SuperCircuit`][super_circuit]. +/// +/// [protocol]: snark_verifier::Protocol +/// [compr_circuit]: aggregator::CompressionCircuit +/// [super_circuit]: zkevm_circuits::super_circuit::SuperCircuit +pub static FD_HALO2_CHUNK_PROTOCOL: LazyLock = + LazyLock::new(|| read_env_var("HALO2_CHUNK_PROTOCOL", "chunk_halo2.protocol".to_string())); + +/// The file descriptor for the JSON serialised SNARK [`protocol`][protocol] that +/// defines the [`CompressionCircuit`][compr_circuit] SNARK that uses sp1-based +/// STARK that is SNARKified using a halo2-backend. +/// +/// [protocol]: snark_verifier::Protocol +/// [compr_circuit]: aggregator::CompressionCircuit +pub static FD_SP1_CHUNK_PROTOCOL: LazyLock = + LazyLock::new(|| read_env_var("SP1_CHUNK_PROTOCOL", "chunk_sp1.protocol".to_string())); pub static CHUNK_VK_FILENAME: LazyLock = LazyLock::new(chunk_vk_filename); pub static BATCH_VK_FILENAME: LazyLock = LazyLock::new(batch_vk_filename); diff --git a/prover/src/error.rs b/prover/src/error.rs new file mode 100644 index 0000000000..5be7545621 --- /dev/null +++ b/prover/src/error.rs @@ -0,0 +1,47 @@ +use std::path::PathBuf; + +use crate::{BatchProverError, ChunkProverError}; + +/// Represents error variants possibly encountered during the proof generation process. +#[derive(Debug, thiserror::Error)] +pub enum ProverError { + /// Error occurred while doing i/o operations. + #[error(transparent)] + Io(#[from] std::io::Error), + /// Error encountered while reading from or writing to files. + #[error("error during read/write! path={path}, e={source}")] + IoReadWrite { + /// The path we tried to read from or write to. + path: PathBuf, + /// The source error. + source: std::io::Error, + }, + /// Error occurred while doing serde operations. + #[error(transparent)] + Serde(#[from] serde_json::Error), + /// Error encountered during JSON serde. + #[error("error during read/write json! path={path}, e={source}")] + JsonReadWrite { + /// The path of the file we tried to serialize/deserialize. + path: PathBuf, + /// The source error. + source: serde_json::Error, + }, + /// Error encountered while reading variable from the process environment. + #[error("error while reading env var! key={key}, e={source}")] + EnvVar { + /// The key tried to be read. + key: String, + /// The source error. + source: std::env::VarError, + }, + /// Error propagated in the [`ChunkProver`][crate::ChunkProver] pipeline. + #[error(transparent)] + ChunkProverError(#[from] ChunkProverError), + /// Error propagated from the [`BatchProver`][crate::BatchProver] pipeline. + #[error(transparent)] + BatchProverError(#[from] BatchProverError), + /// Other errors. + #[error("custom error: {0}")] + Custom(String), +} diff --git a/prover/src/evm.rs b/prover/src/evm.rs deleted file mode 100644 index b87541a042..0000000000 --- a/prover/src/evm.rs +++ /dev/null @@ -1,41 +0,0 @@ -use crate::{io::write_file, EvmProof}; -use halo2_proofs::{ - halo2curves::bn256::{Bn256, Fr, G1Affine}, - plonk::VerifyingKey, - poly::kzg::commitment::ParamsKZG, -}; -use snark_verifier::pcs::kzg::{Bdfg21, Kzg}; -use snark_verifier_sdk::CircuitExt; -use std::{path::PathBuf, str::FromStr}; - -/// Dump YUL and binary bytecode(use `solc` in PATH) to output_dir. -/// Panic if error encountered. -pub fn gen_evm_verifier>( - params: &ParamsKZG, - vk: &VerifyingKey, - evm_proof: &EvmProof, - output_dir: Option<&str>, -) { - let yul_file_path = output_dir.map(|dir| { - let mut path = PathBuf::from_str(dir).unwrap(); - path.push("evm_verifier.yul"); - path - }); - - // Generate deployment code and dump YUL file. - let deployment_code = snark_verifier_sdk::gen_evm_verifier::>( - params, - vk, - evm_proof.num_instance.clone(), - yul_file_path.as_deref(), - ); - - if let Some(dir) = output_dir { - // Dump bytecode. - let mut dir = PathBuf::from_str(dir).unwrap(); - write_file(&mut dir, "evm_verifier.bin", &deployment_code); - } - - let success = evm_proof.proof.evm_verify(deployment_code); - assert!(success); -} diff --git a/prover/src/inner.rs b/prover/src/inner.rs deleted file mode 100644 index 7b8f21b530..0000000000 --- a/prover/src/inner.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod prover; -mod verifier; - -pub use self::prover::Prover; -pub use verifier::Verifier; diff --git a/prover/src/inner/mod.rs b/prover/src/inner/mod.rs new file mode 100644 index 0000000000..8c95b64445 --- /dev/null +++ b/prover/src/inner/mod.rs @@ -0,0 +1,5 @@ +mod prover; +pub use prover::Prover as InnerProver; + +mod verifier; +pub use verifier::Verifier as InnerVerifier; diff --git a/prover/src/inner/prover/mock.rs b/prover/src/inner/prover/mock.rs index e023b06f4f..e02f9589a0 100644 --- a/prover/src/inner/prover/mock.rs +++ b/prover/src/inner/prover/mock.rs @@ -1,16 +1,16 @@ -use super::Prover; -use crate::{ - config::INNER_DEGREE, - utils::metric_of_witness_block, - zkevm::circuit::{block_traces_to_witness_block, TargetCircuit}, -}; use anyhow::bail; use eth_types::l2_types::BlockTrace; use halo2_proofs::{dev::MockProver, halo2curves::bn256::Fr}; use snark_verifier_sdk::CircuitExt; use zkevm_circuits::witness::Block; -impl<'params, C: TargetCircuit> Prover<'params, C> { +use crate::{ + config::INNER_DEGREE, + utils::metric_of_witness_block, + zkevm::circuit::{block_traces_to_witness_block, TargetCircuit}, +}; + +impl<'params, C: TargetCircuit> super::Prover<'params, C> { pub fn mock_prove_target_circuit(block_trace: BlockTrace) -> anyhow::Result<()> { Self::mock_prove_target_circuit_chunk(vec![block_trace]) } diff --git a/prover/src/inner/prover.rs b/prover/src/inner/prover/mod.rs similarity index 94% rename from prover/src/inner/prover.rs rename to prover/src/inner/prover/mod.rs index 2d9471df18..c8f9bd96d6 100644 --- a/prover/src/inner/prover.rs +++ b/prover/src/inner/prover/mod.rs @@ -1,15 +1,16 @@ +use anyhow::Result; +use eth_types::l2_types::BlockTrace; +use snark_verifier_sdk::Snark; +use std::marker::PhantomData; + use crate::{ common, config::INNER_DEGREE, - io::serialize_vk, - utils::{chunk_trace_to_witness_block, gen_rng}, - zkevm::circuit::TargetCircuit, + utils::gen_rng, + utils::serialize_vk, + zkevm::circuit::{chunk_trace_to_witness_block, TargetCircuit}, Proof, }; -use anyhow::Result; -use eth_types::l2_types::BlockTrace; -use snark_verifier_sdk::Snark; -use std::marker::PhantomData; mod mock; diff --git a/prover/src/inner/verifier.rs b/prover/src/inner/verifier.rs index 0b3a5be138..98bd65988a 100644 --- a/prover/src/inner/verifier.rs +++ b/prover/src/inner/verifier.rs @@ -1,9 +1,10 @@ use std::collections::BTreeMap; -use crate::{common, config::INNER_DEGREE, io::deserialize_vk, zkevm::circuit::TargetCircuit}; use halo2_proofs::{halo2curves::bn256::Bn256, plonk::keygen_vk, poly::kzg::commitment::ParamsKZG}; use snark_verifier_sdk::Snark; +use crate::{common, config::INNER_DEGREE, utils::deserialize_vk, zkevm::circuit::TargetCircuit}; + #[derive(Debug)] pub struct Verifier<'params, C: TargetCircuit> { // Make it public for testing with inner functions (unnecessary for FFI). diff --git a/prover/src/io.rs b/prover/src/io.rs deleted file mode 100644 index c800124430..0000000000 --- a/prover/src/io.rs +++ /dev/null @@ -1,154 +0,0 @@ -use anyhow; -use halo2_proofs::{ - halo2curves::bn256::{Fr, G1Affine}, - plonk::{Circuit, VerifyingKey}, - SerdeFormat, -}; -use snark_verifier::util::arithmetic::PrimeField; -use snark_verifier_sdk::Snark; -use std::{ - fs::File, - io::{Cursor, Read, Write}, - path::{Path, PathBuf}, -}; - -pub fn from_json_file<'de, P: serde::Deserialize<'de>>(file_path: &str) -> anyhow::Result

{ - if !Path::new(&file_path).exists() { - anyhow::bail!("File {file_path} doesn't exist"); - } - - let fd = File::open(file_path)?; - let mut deserializer = serde_json::Deserializer::from_reader(fd); - deserializer.disable_recursion_limit(); - let deserializer = serde_stacker::Deserializer::new(&mut deserializer); - - Ok(serde::Deserialize::deserialize(deserializer)?) -} - -pub fn serialize_fr(f: &Fr) -> Vec { - f.to_bytes().to_vec() -} - -pub fn deserialize_fr(buf: Vec) -> Fr { - Fr::from_repr(buf.try_into().unwrap()).unwrap() -} -pub fn serialize_fr_vec(v: &[Fr]) -> Vec> { - v.iter().map(serialize_fr).collect() -} -pub fn deserialize_fr_vec(l2_buf: Vec>) -> Vec { - l2_buf.into_iter().map(deserialize_fr).collect() -} - -pub fn serialize_fr_matrix(m: &[Vec]) -> Vec>> { - m.iter().map(|v| serialize_fr_vec(v.as_slice())).collect() -} - -pub fn deserialize_fr_matrix(l3_buf: Vec>>) -> Vec> { - l3_buf.into_iter().map(deserialize_fr_vec).collect() -} - -pub fn serialize_instance(instance: &[Vec]) -> Vec { - let instances_for_serde = serialize_fr_matrix(instance); - - serde_json::to_vec(&instances_for_serde).unwrap() -} - -pub fn read_all(filename: &str) -> Vec { - let mut buf = vec![]; - let mut fd = std::fs::File::open(filename).unwrap(); - fd.read_to_end(&mut buf).unwrap(); - buf -} - -pub fn read_file(folder: &mut PathBuf, filename: &str) -> Vec { - let mut buf = vec![]; - - folder.push(filename); - let mut fd = std::fs::File::open(folder.as_path()).unwrap(); - folder.pop(); - - fd.read_to_end(&mut buf).unwrap(); - buf -} - -pub fn try_to_read(dir: &str, filename: &str) -> Option> { - let mut path = PathBuf::from(dir); - path.push(filename); - - if path.exists() { - Some(read_all(&path.to_string_lossy())) - } else { - None - } -} - -pub fn force_to_read(dir: &str, filename: &str) -> Vec { - try_to_read(dir, filename).unwrap_or_else(|| panic!("File {filename} must exist in {dir}")) -} - -pub fn write_file(folder: &mut PathBuf, filename: &str, buf: &[u8]) { - folder.push(filename); - let mut fd = std::fs::File::create(folder.as_path()).unwrap(); - folder.pop(); - - fd.write_all(buf).unwrap(); -} - -pub fn serialize_vk(vk: &VerifyingKey) -> Vec { - let mut result = Vec::::new(); - vk.write(&mut result, SerdeFormat::Processed).unwrap(); - result -} - -pub fn deserialize_vk>(raw_vk: &[u8]) -> VerifyingKey { - VerifyingKey::::read::<_, C>(&mut Cursor::new(raw_vk), SerdeFormat::Processed) - .unwrap_or_else(|_| panic!("failed to deserialize vk with len {}", raw_vk.len())) -} - -pub fn write_snark(file_path: &str, snark: &Snark) { - log::debug!("write_snark to {file_path}"); - let mut fd = std::fs::File::create(file_path).unwrap(); - serde_json::to_writer(&mut fd, snark).unwrap(); - log::debug!("write_snark to {file_path} done"); -} - -pub fn load_snark(file_path: &str) -> anyhow::Result> { - if !Path::new(file_path).exists() { - return Ok(None); - } - - let fd = File::open(file_path)?; - let mut deserializer = serde_json::Deserializer::from_reader(fd); - deserializer.disable_recursion_limit(); - let deserializer = serde_stacker::Deserializer::new(&mut deserializer); - let snark = serde::Deserialize::deserialize(deserializer)?; - Ok(Some(snark)) -} - -pub fn load_instances(buf: &[u8]) -> Vec>> { - let instances: Vec>>> = serde_json::from_reader(buf).unwrap(); - instances - .into_iter() - .map(|l1| { - l1.into_iter() - .map(|l2| { - l2.into_iter() - .map(|buf| Fr::from_bytes(&buf.try_into().unwrap()).unwrap()) - .collect() - }) - .collect() - }) - .collect() -} - -#[ignore] -#[test] -fn test_block_trace_convert() { - let trace_v1: eth_types::l2_types::BlockTrace = - from_json_file("src/testdata/trace_v1_5224657.json").expect("should load"); - let trace_v2: eth_types::l2_types::BlockTraceV2 = trace_v1.into(); - let mut fd = std::fs::File::create("src/testdata/trace_v2_5224657.json").unwrap(); - serde_json::to_writer_pretty(&mut fd, &trace_v2).unwrap(); - // then we can use this command to compare the traces: - // vimdiff <(jq -S "del(.executionResults)|del(.txStorageTraces)" src/testdata/trace_v1_5224657.json) <(jq -S . src/testdata/trace_v2_5224657.json) -} diff --git a/prover/src/lib.rs b/prover/src/lib.rs index 64a9a14da2..98aec9c9c2 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -1,33 +1,88 @@ #![feature(lazy_cell)] -/// Meaning of each circuit: -/// inner: first layer EVM super circuit -/// layer1: compression circuit of "inner" -/// layer2: comppresion circuit of "layer1" -/// layer3: batch circuit. Proving many "layer2" circuits, plus blob/kzg handling. -/// layer4: compression circuit of "layer3". Final layer circuit currently. -/// -// TODO: don't always use "pub mod". -// We need to define which types and methods should be public carefully. -pub mod aggregator; -pub mod common; -pub mod config; -pub mod consts; -mod evm; -pub mod inner; -pub mod io; -pub mod proof; -pub mod recursion; -pub mod test; -pub mod types; -pub mod utils; -pub mod zkevm; - -pub use aggregator::{check_chunk_hashes, BatchData, BatchHash, BatchHeader, MAX_AGG_SNARKS}; -pub use common::{ChunkInfo, CompressionCircuit}; +//! This crate exposes an interface to setup provers and verifiers for Scroll's proof generation +//! pipeline. +//! +//! Scroll's proof generation pipeline implements a [layered][config::LayerId] approach where +//! [`SNARK(s)`][snark_verifier_sdk::Snark] from a layer is (are) used in the subsequent layer. +//! +//! A high-level abstraction has been implemented with the notion of: +//! - Block: Consists of a list of txs +//! - Chunk: Composed of a list of contiguous Blocks +//! - Batch: Composed of a list of contiguous Chunks +//! - Bundle: Composed of a list of contiguous Batches +//! +//! The proof generation pipeline starts at the `Chunk` level where the inner proof can be +//! generated either via the halo2-route or the sp1-route, aka [`ChunkKind`]. +//! +//! The pipeline for the halo2-route is described below: +//! 1. [`Inner`][config::LayerId::Inner] layer: SNARK generated by halo2-based +//! [`SuperCircuit`][zkevm_circuits::super_circuit::SuperCircuit] +//! 2. [`Layer-1`][config::LayerId::Layer1]: Inner SNARK compressed by the [`CompressionCircuit`] +//! configured with a wide shape, i.e. higher advices for a lower degree +//! 3. [`Layer-2`][config::LayerId::Layer2]: Layer-1 SNARK compressed by the `CompressionCircuit` +//! with a thin shape, i.e. higher degree for lower advices +//! +//! The pipeline for the sp1-route is described below: +//! 1. [`Inner`][config::LayerId::Inner] layer: Sp1 compressed proof generated via the Sp1 Prover. +//! 2. [`Layer-1`][config::LayerId::Layer1]: Inner STARK is SNARKified using a halo2-backend. +//! 3. [`Layer-2`][config::LayerId::Layer2]: Layer-1 SNARK compressed by the `CompressionCircuit` +//! with a thin shape, i.e. higher degree for lower advices +//! +//! For both of the above described branches, we continue the pipeline with: +//! 4. [`Layer-3`][config::LayerId::Layer3]: List of Layer-2 SNARKs aggregated using the +//! [`BatchCircuit`] +//! 5. [`Layer-4`][config::LayerId::Layer4]: Layer-3 SNARK compressed by the `CompressionCircuit` +//! 6. [`Layer-5`][config::LayerId::Layer5]: Layer-4 SNARKs are recursively aggregated using the +//! [`RecursionCircuit`] +//! 7. [`Layer-6`][config::LayerId::Layer6]: Layer-5 SNARK is compressed by the +//! `CompressionCircuit` with a thin shape, while using Keccak hasher as the transcript digest +//! to allow verification of Layer-6 proof in EVM. + +mod aggregator; +pub use aggregator::{ + check_chunk_hashes, eip4844, BatchData, BatchHash, BatchHeader, BatchProver, BatchProverError, + BatchVerifier, RecursionTask, MAX_AGG_SNARKS, +}; + +mod common; +pub use common::{ChunkInfo, CompressionCircuit, Prover, Verifier}; + +mod config; +pub use config::*; + +mod consts; +pub use consts::*; + +mod error; +pub use error::*; + +mod proof; +pub use proof::*; + +mod test; +pub use test::{batch_prove, bundle_prove, chunk_prove, inner_prove}; + +mod types; +pub use types::{BatchProvingTask, BundleProvingTask, ChunkProvingTask, WitnessBlock}; + +mod utils; +pub use utils::*; + +mod zkevm; +pub use zkevm::{ChunkProver, ChunkProverError, ChunkVerifier, CircuitCapacityChecker}; + +/// Re-export the eth-types crate. pub use eth_types; -pub use eth_types::l2_types::BlockTrace; -pub use proof::{BatchProof, BundleProof, ChunkProof, EvmProof, Proof}; + +/// Re-export some types from snark-verifier-sdk. pub use snark_verifier_sdk::{CircuitExt, Snark}; -pub use types::{BatchProvingTask, BundleProvingTask, ChunkProvingTask, WitnessBlock}; + +/// Re-export the zkevm-circuits crate. pub use zkevm_circuits; + +use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG}; +use std::collections::BTreeMap; + +/// Alias for convenience. +pub type ParamsMap = BTreeMap>; diff --git a/prover/src/proof/batch.rs b/prover/src/proof/batch.rs index e0f4aec4f8..bceacf5dc7 100644 --- a/prover/src/proof/batch.rs +++ b/prover/src/proof/batch.rs @@ -1,18 +1,22 @@ -use super::{dump_as_json, dump_vk, from_json_file, Proof}; -use crate::types::base64; +use std::path::Path; + use anyhow::Result; -use eth_types::H256; +use eth_types::{base64, H256}; use halo2_proofs::{halo2curves::bn256::G1Affine, plonk::ProvingKey}; use serde_derive::{Deserialize, Serialize}; use snark_verifier::Protocol; use snark_verifier_sdk::Snark; +use crate::utils::read_json_deep; + +use super::{dump_as_json, dump_vk, InnerProof}; + #[derive(Clone, Debug, Deserialize, Serialize)] pub struct BatchProof { #[serde(with = "base64")] pub protocol: Vec, #[serde(flatten)] - proof: Proof, + proof: InnerProof, pub batch_hash: H256, } @@ -32,7 +36,7 @@ impl From<&BatchProof> for Snark { impl BatchProof { pub fn new(snark: Snark, pk: Option<&ProvingKey>, batch_hash: H256) -> Result { let protocol = serde_json::to_vec(&snark.protocol)?; - let proof = Proof::new(snark.proof, &snark.instances, pk); + let proof = InnerProof::new(snark.proof, &snark.instances, pk); Ok(Self { protocol, @@ -42,7 +46,8 @@ impl BatchProof { } pub fn from_json_file(dir: &str, name: &str) -> Result { - from_json_file(dir, &dump_filename(name)) + let file_path = Path::new(dir).join(dump_filename(name)); + Ok(read_json_deep(&file_path)?) } pub fn dump_vk(&self, dir: &str, name: &str) -> Result<()> { @@ -50,7 +55,7 @@ impl BatchProof { if self.proof.vk.is_empty() { log::warn!("batch proof vk is empty, skip dumping"); } else { - dump_vk(dir, &filename, &self.proof.vk) + dump_vk(dir, &filename, &self.proof.vk)?; } Ok(()) } diff --git a/prover/src/proof/bundle.rs b/prover/src/proof/bundle.rs index 52fa290db9..599edb0e17 100644 --- a/prover/src/proof/bundle.rs +++ b/prover/src/proof/bundle.rs @@ -1,8 +1,10 @@ -use super::{dump_as_json, dump_data, dump_vk, serialize_instance}; -use crate::{utils::short_git_version, Proof}; use anyhow::Result; use serde_derive::{Deserialize, Serialize}; +use crate::utils::short_git_version; + +use super::{dump_as_json, dump_data, dump_vk, serialize_instance, InnerProof}; + // 3 limbs per field element, 4 field elements const ACC_LEN: usize = 12; @@ -15,7 +17,6 @@ const ACC_LEN: usize = 12; // - chain id // - (hi, lo) pending withdraw root // - bundle count - const PI_LEN: usize = 13; const ACC_BYTES: usize = ACC_LEN * 32; @@ -24,11 +25,11 @@ const PI_BYTES: usize = PI_LEN * 32; #[derive(Clone, Debug, Deserialize, Serialize)] pub struct BundleProof { #[serde(flatten)] - on_chain_proof: Proof, + on_chain_proof: InnerProof, } -impl From for BundleProof { - fn from(proof: Proof) -> Self { +impl From for BundleProof { + fn from(proof: InnerProof) -> Self { let instances = proof.instances(); assert_eq!(instances.len(), 1); assert_eq!(instances[0].len(), ACC_LEN + PI_LEN); @@ -46,7 +47,7 @@ impl From for BundleProof { let instances = serialize_instance(&instances[0][ACC_LEN..]); Self { - on_chain_proof: Proof { + on_chain_proof: InnerProof { proof, instances, vk, @@ -76,21 +77,23 @@ impl BundleProof { dir, &format!("pi_{filename}.data"), &self.on_chain_proof.instances, - ); + )?; dump_data( dir, &format!("proof_{filename}.data"), &self.on_chain_proof.proof, - ); + )?; + + dump_vk(dir, &filename, &self.on_chain_proof.vk)?; - dump_vk(dir, &filename, &self.on_chain_proof.vk); + dump_as_json(dir, &filename, &self)?; - dump_as_json(dir, &filename, &self) + Ok(()) } // Recover a `Proof` which follows halo2 semantic of "proof" and "instance", // where "accumulators" are instance instead of proof, not like "onchain proof". - pub fn proof_to_verify(self) -> Proof { + pub fn proof_to_verify(self) -> InnerProof { // raw.proof is accumulator + proof assert!(self.on_chain_proof.proof.len() > ACC_BYTES); // raw.instances is PI @@ -103,9 +106,9 @@ impl BundleProof { instances.extend(self.on_chain_proof.instances); let vk = self.on_chain_proof.vk; - let git_version = Some(short_git_version()); + let git_version = short_git_version(); - Proof { + InnerProof { proof, instances, vk, diff --git a/prover/src/proof/chunk.rs b/prover/src/proof/chunk.rs index 8d760725d7..bc69f873cf 100644 --- a/prover/src/proof/chunk.rs +++ b/prover/src/proof/chunk.rs @@ -1,19 +1,39 @@ -use super::{dump_as_json, dump_data, dump_vk, from_json_file, Proof}; -use crate::{types::base64, zkevm::SubCircuitRowUsage}; +use std::path::Path; + use aggregator::ChunkInfo; -use anyhow::{bail, Result}; +use eth_types::base64; use halo2_proofs::{halo2curves::bn256::G1Affine, plonk::ProvingKey}; use serde_derive::{Deserialize, Serialize}; use snark_verifier::Protocol; use snark_verifier_sdk::Snark; +use crate::{utils::read_json_deep, zkevm::SubCircuitRowUsage}; + +use super::{dump_as_json, dump_data, dump_vk, InnerProof}; + +/// The innermost SNARK belongs to the following variants. +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] +pub enum ChunkKind { + /// halo2-based SuperCircuit. + Halo2, + /// sp1-based STARK with a halo2-backend. + Sp1, +} + +impl Default for ChunkKind { + fn default() -> Self { + Self::Halo2 + } +} + #[derive(Clone, Debug, Default, Deserialize, Serialize)] pub struct ChunkProof { #[serde(with = "base64")] pub protocol: Vec, #[serde(flatten)] - pub proof: Proof, + pub proof: InnerProof, pub chunk_info: ChunkInfo, + pub chunk_kind: ChunkKind, #[serde(default)] pub row_usages: Vec, } @@ -21,33 +41,34 @@ pub struct ChunkProof { macro_rules! compare_field { ($desc:expr, $field:ident, $lhs:ident, $rhs:ident) => { if $lhs.$field != $rhs.$field { - bail!( + return Err(format!( "{} chunk different {}: {} != {}", $desc, stringify!($field), $lhs.$field, $rhs.$field - ); + )); } }; } /// Check chunk info is consistent with chunk info embedded inside proof -pub fn compare_chunk_info(name: &str, lhs: &ChunkInfo, rhs: &ChunkInfo) -> Result<()> { +pub fn compare_chunk_info(name: &str, lhs: &ChunkInfo, rhs: &ChunkInfo) -> Result<(), String> { compare_field!(name, chain_id, lhs, rhs); compare_field!(name, prev_state_root, lhs, rhs); compare_field!(name, post_state_root, lhs, rhs); compare_field!(name, withdraw_root, lhs, rhs); compare_field!(name, data_hash, lhs, rhs); if lhs.tx_bytes != rhs.tx_bytes { - bail!( + return Err(format!( "{} chunk different {}: {} != {}", name, "tx_bytes", hex::encode(&lhs.tx_bytes), hex::encode(&rhs.tx_bytes) - ); + )); } + Ok(()) } @@ -56,40 +77,44 @@ impl ChunkProof { snark: Snark, pk: Option<&ProvingKey>, chunk_info: ChunkInfo, + chunk_kind: ChunkKind, row_usages: Vec, - ) -> Result { + ) -> anyhow::Result { let protocol = serde_json::to_vec(&snark.protocol)?; - let proof = Proof::new(snark.proof, &snark.instances, pk); + let proof = InnerProof::new(snark.proof, &snark.instances, pk); Ok(Self { protocol, proof, chunk_info, + chunk_kind, row_usages, }) } - pub fn from_json_file(dir: &str, name: &str) -> Result { - from_json_file(dir, &dump_filename(name)) + pub fn from_json_file(dir: &str, name: &str) -> anyhow::Result { + let path = Path::new(dir).join(dump_filename(name)); + Ok(read_json_deep(&path)?) } - pub fn dump(&self, dir: &str, name: &str) -> Result<()> { + pub fn dump(&self, dir: &str, name: &str) -> anyhow::Result<()> { let filename = dump_filename(name); // Dump vk and protocol. - dump_vk(dir, &filename, &self.proof.vk); - dump_data(dir, &format!("chunk_{filename}.protocol"), &self.protocol); + dump_vk(dir, &filename, &self.proof.vk)?; + dump_data(dir, &format!("chunk_{filename}.protocol"), &self.protocol)?; + dump_as_json(dir, &filename, &self)?; - dump_as_json(dir, &filename, &self) + Ok(()) } - pub fn to_snark(self) -> Snark { + pub fn to_snark(&self) -> Snark { let instances = self.proof.instances(); let protocol = serde_json::from_slice::>(&self.protocol).unwrap(); Snark { protocol, - proof: self.proof.proof, + proof: self.proof.proof.clone(), instances, } } diff --git a/prover/src/proof/evm.rs b/prover/src/proof/evm.rs index 92f17204ec..0b4fc04131 100644 --- a/prover/src/proof/evm.rs +++ b/prover/src/proof/evm.rs @@ -1,4 +1,5 @@ -use super::{dump_as_json, dump_vk, from_json_file, Proof}; +use std::path::Path; + use anyhow::Result; use halo2_proofs::{ halo2curves::bn256::{Fr, G1Affine}, @@ -6,9 +7,13 @@ use halo2_proofs::{ }; use serde_derive::{Deserialize, Serialize}; +use crate::utils::read_json_deep; + +use super::{dump_as_json, dump_vk, InnerProof}; + #[derive(Clone, Debug, Deserialize, Serialize)] pub struct EvmProof { - pub proof: Proof, + pub proof: InnerProof, pub num_instance: Vec, } @@ -19,7 +24,7 @@ impl EvmProof { num_instance: Vec, pk: Option<&ProvingKey>, ) -> Result { - let proof = Proof::new(proof, instances, pk); + let proof = InnerProof::new(proof, instances, pk); Ok(Self { proof, @@ -28,14 +33,17 @@ impl EvmProof { } pub fn from_json_file(dir: &str, name: &str) -> Result { - from_json_file(dir, &dump_filename(name)) + let path = Path::new(dir).join(dump_filename(name)); + Ok(read_json_deep(&path)?) } pub fn dump(&self, dir: &str, name: &str) -> Result<()> { let filename = dump_filename(name); - dump_vk(dir, &filename, &self.proof.vk); - dump_as_json(dir, &filename, &self) + dump_vk(dir, &filename, &self.proof.vk)?; + dump_as_json(dir, &filename, &self)?; + + Ok(()) } } diff --git a/prover/src/proof.rs b/prover/src/proof/mod.rs similarity index 56% rename from prover/src/proof.rs rename to prover/src/proof/mod.rs index 5e662794df..a8e19955c1 100644 --- a/prover/src/proof.rs +++ b/prover/src/proof/mod.rs @@ -1,43 +1,60 @@ -use crate::{ - io::{deserialize_fr, deserialize_vk, serialize_fr, serialize_vk, write_file}, - types::base64, - utils::short_git_version, -}; +use std::{fs::File, path::Path}; + use anyhow::Result; +use eth_types::base64; use halo2_proofs::{ halo2curves::bn256::{Fr, G1Affine}, plonk::{Circuit, ProvingKey, VerifyingKey}, }; use serde_derive::{Deserialize, Serialize}; -use snark_verifier_sdk::{verify_evm_proof, Snark}; -use std::{fs::File, path::PathBuf}; +use snark_verifier_sdk::Snark; + +use crate::utils::{ + deploy_and_call, deserialize_fr, deserialize_vk, serialize_fr, serialize_vk, short_git_version, + write, +}; mod batch; +pub use batch::BatchProof; + mod bundle; +pub use bundle::BundleProof; + mod chunk; -mod evm; +pub use chunk::{compare_chunk_info, ChunkKind, ChunkProof}; -pub use batch::BatchProof; -pub use bundle::BundleProof; -pub use chunk::{compare_chunk_info, ChunkProof}; +mod evm; pub use evm::EvmProof; +mod proof_v2; +pub use proof_v2::*; + +/// Proof extracted from [`Snark`]. #[derive(Clone, Debug, Default, Deserialize, Serialize)] -pub struct Proof { +pub struct InnerProof { + /// The raw bytes of the proof in the [`Snark`]. + /// + /// Serialized using base64 format in order to not bloat the JSON-encoded proof dump. #[serde(with = "base64")] - proof: Vec, + pub proof: Vec, + /// The public values, aka instances of this [`Snark`]. #[serde(with = "base64")] - instances: Vec, + pub instances: Vec, + /// The raw bytes of the [`VerifyingKey`] of the [`Circuit`] used to generate the [`Snark`]. #[serde(with = "base64")] - vk: Vec, - pub git_version: Option, + pub vk: Vec, + /// The git ref of the codebase. + /// + /// Generally useful for debug reasons to know the exact commit using which this proof was + /// generated. + pub git_version: String, } -impl Proof { +impl InnerProof { pub fn new(proof: Vec, instances: &[Vec], pk: Option<&ProvingKey>) -> Self { let instances = serialize_instances(instances); let vk = pk.map_or_else(Vec::new, |pk| serialize_vk(pk.get_vk())); - let git_version = Some(short_git_version()); + let git_version = short_git_version(); Self { proof, @@ -47,16 +64,12 @@ impl Proof { } } - pub fn from_json_file(dir: &str, filename: &str) -> Result { - from_json_file(dir, filename) - } - pub fn from_snark(snark: Snark, vk: Vec) -> Self { let proof = snark.proof; let instances = serialize_instances(&snark.instances); - let git_version = Some(short_git_version()); + let git_version = short_git_version(); - Proof { + Self { proof, instances, vk, @@ -65,13 +78,17 @@ impl Proof { } pub fn dump(&self, dir: &str, filename: &str) -> Result<()> { - dump_vk(dir, filename, &self.vk); + dump_vk(dir, filename, &self.vk)?; + dump_as_json(dir, filename, &self)?; - dump_as_json(dir, filename, &self) + Ok(()) } pub fn evm_verify(&self, deployment_code: Vec) -> bool { - verify_evm_proof(deployment_code, self.instances(), self.proof().to_vec()) + let instances = self.instances(); + let proof = self.proof().to_vec(); + let calldata = snark_verifier::loader::evm::encode_calldata(&instances, &proof); + deploy_and_call(deployment_code, calldata).is_ok() } pub fn instances(&self) -> Vec> { @@ -97,25 +114,20 @@ impl Proof { } } -pub fn dump_as_json(dir: &str, filename: &str, proof: &P) -> Result<()> { - // Write full proof as json. +pub fn dump_as_json(dir: &str, filename: &str, proof: &T) -> Result<()> { let mut fd = File::create(dump_proof_path(dir, filename))?; serde_json::to_writer(&mut fd, proof)?; Ok(()) } -pub fn dump_data(dir: &str, filename: &str, data: &[u8]) { - write_file(&mut PathBuf::from(dir), filename, data); -} - -pub fn dump_vk(dir: &str, filename: &str, raw_vk: &[u8]) { - dump_data(dir, &format!("vk_{filename}.vkey"), raw_vk); +pub fn dump_data(dir: &str, filename: &str, data: &[u8]) -> Result<()> { + let path = Path::new(dir).join(filename); + Ok(write(&path, data)?) } -pub fn from_json_file<'de, P: serde::Deserialize<'de>>(dir: &str, filename: &str) -> Result

{ - let file_path = dump_proof_path(dir, filename); - crate::io::from_json_file(&file_path) +pub fn dump_vk(dir: &str, filename: &str, raw_vk: &[u8]) -> Result<()> { + dump_data(dir, &format!("vk_{filename}.vkey"), raw_vk) } fn dump_proof_path(dir: &str, filename: &str) -> String { diff --git a/prover/src/proof/proof_v2.rs b/prover/src/proof/proof_v2.rs new file mode 100644 index 0000000000..a7082452c4 --- /dev/null +++ b/prover/src/proof/proof_v2.rs @@ -0,0 +1,332 @@ +use std::path::{Path, PathBuf}; + +use aggregator::ChunkInfo; +use eth_types::{base64, H256}; +use halo2_proofs::{ + halo2curves::bn256::{Fr, G1Affine}, + plonk::ProvingKey, +}; +use serde_derive::{Deserialize, Serialize}; +use snark_verifier::Protocol; +use snark_verifier_sdk::Snark; + +use crate::{ + deserialize_fr, read_json_deep, serialize_vk, short_git_version, write, write_json, + zkevm::RowUsage, BatchProverError, ChunkKind, ProverError, +}; + +use super::serialize_instances; + +/// Proof generated at certain checkpoints in the proof generation pipeline. +/// +/// Variants of [`ProofV2`] are [`ChunkProofV2`], [`BatchProofV2`] and [`BundleProofV2`], that are +/// the output of proof generation at [`Layer-2`][crate::LayerId::Layer2], [`Layer-4`][crate::LayerId::Layer4] +/// and [`Layer-6`][crate::LayerId::Layer6] respectively. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ProofV2 { + /// The inner data that differs between chunk proofs, batch proofs and bundle proofs. + #[serde(flatten)] + pub inner: Inner, + /// The raw bytes of the proof in the [`Snark`]. + /// + /// Serialized using base64 format in order to not bloat the JSON-encoded proof dump. + #[serde(with = "base64")] + pub proof: Vec, + /// The public values, aka instances of this [`Snark`]. + #[serde(with = "base64")] + pub instances: Vec, + /// The raw bytes of the [`VerifyingKey`] of the [`Circuit`] used to generate the [`Snark`]. + #[serde(with = "base64")] + pub vk: Vec, + /// The git ref of the codebase. + /// + /// Generally useful for debug reasons to know the exact commit using which this proof was + /// generated. + pub git_version: String, +} + +impl TryFrom<&ProofV2> for Snark { + type Error = ProverError; + + fn try_from(value: &ProofV2) -> Result { + let protocol = value + .inner + .protocol() + .ok_or(ProverError::Custom(String::from( + "protocol either not found or cannot be deserialized successfully", + )))?; + + let instances = value.deserialize_instances(); + + let proof = value.proof.to_vec(); + + Ok(Self { + protocol, + proof, + instances, + }) + } +} + +impl ProofV2 { + /// Construct a new proof given the inner metadata, proving key and the + /// [`Snark`][snark_verifier_sdk::Snark]. + pub fn new( + snark: Snark, + proving_key: Option<&ProvingKey>, + inner: Inner, + ) -> Result { + let instances = serialize_instances(&snark.instances); + let vk = proving_key.map_or_else(Vec::new, |pk| serialize_vk(pk.get_vk())); + + Ok(Self { + inner, + proof: snark.proof, + instances, + vk, + git_version: short_git_version(), + }) + } + + /// Read and deserialize the proof. + pub fn from_json>(dir: P, suffix: &str) -> Result { + let path = Self::path_proof(dir, suffix); + Ok(read_json_deep(path)?) + } + + /// Serialize the proof and other peripheral data, before dumping in the provided directory. + pub fn dump>(&self, dir: P, suffix: &str) -> Result<(), ProverError> { + // Dump the verifying key. + write(Self::path_vk(&dir, &suffix), &self.vk)?; + + // Dump the proof itself. + write_json(Self::path_proof(&dir, &suffix), &self)?; + + // Dump any other data for the inner data. + self.inner.dump(&dir, &suffix)?; + + Ok(()) + } + + /// Deserialize public values in the native scalar field. + fn deserialize_instances(&self) -> Vec> { + vec![self + .instances + .chunks(32) + .map(|bytes| deserialize_fr(bytes.iter().rev().cloned().collect())) + .collect::>()] + } + + /// Path to the JSON-encoded proof in the directory. + fn path_proof>(dir: P, suffix: &str) -> PathBuf { + Inner::path_proof(dir, suffix) + } + + /// Path to the encoded [`VerifyingKey`][halo2_proofs::plonk::VerifyingKey] in the directory. + fn path_vk>(dir: P, suffix: &str) -> PathBuf { + Inner::path_vk(dir, suffix) + } +} + +pub trait Proof: Clone + std::fmt::Debug + serde::Serialize { + /// Name of the proof layer. + const NAME: &'static str; + + ///

/proof_{NAME}_{suffix}.json + fn path_proof>(dir: P, suffix: &str) -> PathBuf { + dir.as_ref() + .join(format!("proof_{}_{}.json", Self::NAME, suffix)) + } + + /// /vk_{NAME}_{suffix}.vkey + fn path_vk>(dir: P, suffix: &str) -> PathBuf { + dir.as_ref() + .join(format!("vk_{}_{}.vkey", Self::NAME, suffix)) + } + + /// /protocol_{NAME}_{suffix}.protocol + fn path_protocol>(dir: P, suffix: &str) -> PathBuf { + dir.as_ref() + .join(format!("protocol_{}_{}.protocol", Self::NAME, suffix,)) + } + + /// Returns the SNARK protocol, if any in the metadata. + fn protocol(&self) -> Option>; + + /// Dump relevant fields from the proof metadata in the provided directory. + fn dump>(&self, dir: P, suffix: &str) -> Result<(), ProverError>; +} + +/// Alias for convenience. +pub type ChunkProofV2 = ProofV2; + +/// Alias for convenience. +pub type BatchProofV2 = ProofV2; + +/// Alias for convenience. +pub type BundleProofV2 = ProofV2; + +/// The number of scalar field elements used to encode the KZG accumulator. +/// +/// The accumulator is essentially an `(lhs, rhs)` pair of [`G1Affine`] points, where each +/// `G1Affine` point comprises of 2 base field elements `(x, y)`. But since each base field +/// element is split into 3 limbs each, where each limb is our native scalar [`Fr`], in total we +/// have 12 scalar field elements to represent this accumulator. +const ACCUMULATOR_LEN: usize = 12; + +/// Each scalar field [`Fr`] element is encoded using 32 bytes. +const ACCUMULATOR_BYTES: usize = ACCUMULATOR_LEN * 32; + +/// The public input (excluding the accumulator) for the outermost +/// [`Layer-6`][crate::LayerId::Layer6] circuit is basically the public input carried forward from +/// the `Layer-5` [`RecursionCircuit`][aggregator::RecursionCircuit]. +/// +/// They are the following: +/// - Fr: Preprocessed Digest +/// - Fr: Recursion Round +/// - (Fr, Fr): Pre State Root (finalized) +/// - (Fr, Fr): Pre Batch Hash (finalized) +/// - (Fr, Fr): Post State Root (pending finalization) +/// - (Fr, Fr): Post Batch Hash (pending finalization) +/// - Fr: Chain ID +/// - (Fr, Fr): Post Withdraw Root (pending finalization) +/// +/// In total these are 13 scalar field elements. +const PUBLIC_INPUT_LEN: usize = 13; + +/// Each scalar field [`Fr`] element is encoded using 32 bytes. +const PUBLIC_INPUT_BYTES: usize = PUBLIC_INPUT_LEN * 32; + +impl BundleProofV2 { + /// Construct a new proof given raw proof and instance values. Generally to be used in the case + /// of final EVM proof using the [`gen_evm_verifier`][snark_verifier_sdk::gen_evm_verifier] + /// method. + pub fn new_from_raw(proof: &[u8], instances: &[u8], vk: &[u8]) -> Result { + // Sanity check on the number of public input bytes. + let expected_len = ACCUMULATOR_BYTES + PUBLIC_INPUT_BYTES; + let got_len = instances.len(); + if got_len != expected_len { + return Err(BatchProverError::PublicInputsMismatch(expected_len, got_len).into()); + } + + Ok(Self { + inner: BundleProofV2Metadata::default(), + proof: proof.to_vec(), + instances: instances.to_vec(), + vk: vk.to_vec(), + git_version: short_git_version(), + }) + } + + /// Encode the calldata for the proof verification transaction to be made on-chain. + /// + /// [ public_input_bytes | accumulator_bytes | proof ] + pub fn calldata(&self) -> Vec { + std::iter::empty() + .chain(self.instances[ACCUMULATOR_BYTES..].iter()) + .chain(self.instances[0..ACCUMULATOR_BYTES].iter()) + .chain(self.proof.iter()) + .cloned() + .collect::>() + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ChunkProofV2Metadata { + /// The [`Protocol`][snark_verifier::Protocol] for the SNARK construction for the chunk proof. + #[serde(with = "base64")] + protocol: Vec, + /// The chunk proof can be for either the halo2 or sp1 routes. + chunk_kind: ChunkKind, + /// The EVM execution traces as a result of executing all txs in the chunk. + chunk_info: ChunkInfo, + /// Optional [Circuit-Capacity Checker][ccc] row usage statistics from the halo2-route. + /// + /// Is `None` for the sp1-route. + /// + /// [ccc]: crate::zkevm::CircuitCapacityChecker + row_usage: Option, +} + +impl ChunkProofV2Metadata { + /// Construct new chunk proof metadata. + pub fn new( + snark: &Snark, + chunk_kind: ChunkKind, + chunk_info: ChunkInfo, + row_usage: Option, + ) -> Result { + let protocol = serde_json::to_vec(&snark.protocol)?; + + Ok(Self { + protocol, + chunk_kind, + chunk_info, + row_usage, + }) + } +} + +impl Proof for ChunkProofV2Metadata { + const NAME: &'static str = "chunk"; + + fn protocol(&self) -> Option> { + serde_json::from_slice(&self.protocol).ok() + } + + fn dump>(&self, dir: P, suffix: &str) -> Result<(), ProverError> { + write(Self::path_protocol(&dir, &suffix), &self.protocol)?; + + Ok(()) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BatchProofV2Metadata { + /// The [`Protocol`][snark_verifier::Protocol] for the SNARK construction for the chunk proof. + #[serde(with = "base64")] + protocol: Vec, + /// The hash of [`BatchHeader`][aggregator::BatchHeader] of the batch. + pub batch_hash: H256, +} + +impl BatchProofV2Metadata { + /// Create new batch proof metadata. + pub fn new(snark: &Snark, batch_hash: H256) -> Result { + let protocol = serde_json::to_vec(&snark.protocol)?; + + Ok(Self { + protocol, + batch_hash, + }) + } +} + +impl Proof for BatchProofV2Metadata { + const NAME: &'static str = "batch"; + + fn protocol(&self) -> Option> { + serde_json::from_slice(&self.protocol).ok() + } + + fn dump>(&self, dir: P, suffix: &str) -> Result<(), ProverError> { + write(Self::path_protocol(&dir, &suffix), &self.protocol)?; + + Ok(()) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize, Default)] +pub struct BundleProofV2Metadata; + +impl Proof for BundleProofV2Metadata { + const NAME: &'static str = "bundle"; + + fn protocol(&self) -> Option> { + None + } + + fn dump>(&self, _dir: P, _suffix: &str) -> Result<(), ProverError> { + Ok(()) + } +} diff --git a/prover/src/test/batch.rs b/prover/src/test/batch.rs index 15ecf06248..7e76f6e181 100644 --- a/prover/src/test/batch.rs +++ b/prover/src/test/batch.rs @@ -1,22 +1,17 @@ -use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG}; +use std::sync::{LazyLock, Mutex}; use crate::{ aggregator::{Prover, Verifier}, - config::{LayerId, AGG_DEGREES}, + config::{LayerId, BATCH_PROVER_DEGREES}, consts::DEPLOYMENT_CODE_FILENAME, - io::force_to_read, types::BundleProvingTask, - utils::read_env_var, - BatchProvingTask, -}; -use std::{ - collections::BTreeMap, - sync::{LazyLock, Mutex}, + utils::{force_to_read, read_env_var}, + BatchProvingTask, ParamsMap, }; -static PARAMS_MAP: LazyLock>> = LazyLock::new(|| { +static PARAMS_MAP: LazyLock = LazyLock::new(|| { let params_dir = read_env_var("SCROLL_PROVER_PARAMS_DIR", "./test_params".to_string()); - crate::common::Prover::load_params_map(¶ms_dir, &AGG_DEGREES) + crate::common::Prover::load_params_map(¶ms_dir, &BATCH_PROVER_DEGREES) }); static BATCH_PROVER: LazyLock> = LazyLock::new(|| { @@ -55,7 +50,7 @@ pub fn batch_prove(test: &str, batch: BatchProvingTask) { verifier }; let verified = verifier.verify_batch_proof(&proof); - assert!(verified, "{test}: failed to verify batch proof"); + assert!(verified.is_ok(), "{test}: failed to verify batch proof"); log::info!("{test}: batch-prove END"); } @@ -88,8 +83,8 @@ pub fn bundle_prove(test: &str, bundle: BundleProvingTask) { verifier }; - let verified = verifier.verify_bundle_proof(proof); - assert!(verified, "{test}: failed to verify bundle proof"); + let verified = verifier.verify_bundle_proof(&proof); + assert!(verified.is_ok(), "{test}: failed to verify bundle proof"); log::info!("{test}: bundle-prove END"); } diff --git a/prover/src/test/chunk.rs b/prover/src/test/chunk.rs index 4d7c29d1e4..bf468a6987 100644 --- a/prover/src/test/chunk.rs +++ b/prover/src/test/chunk.rs @@ -1,19 +1,15 @@ -use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG}; +use std::sync::{LazyLock, Mutex}; use crate::{ - config::ZKEVM_DEGREES, + config::CHUNK_PROVER_DEGREES, utils::read_env_var, zkevm::{Prover, Verifier}, - ChunkProof, ChunkProvingTask, -}; -use std::{ - collections::BTreeMap, - sync::{LazyLock, Mutex}, + ChunkProofV2, ChunkProvingTask, ParamsMap, }; -static PARAMS_MAP: LazyLock>> = LazyLock::new(|| { +static PARAMS_MAP: LazyLock = LazyLock::new(|| { let params_dir = read_env_var("SCROLL_PROVER_PARAMS_DIR", "./test_params".to_string()); - crate::common::Prover::load_params_map(¶ms_dir, &ZKEVM_DEGREES) + crate::common::Prover::load_params_map(¶ms_dir, &CHUNK_PROVER_DEGREES) }); static CHUNK_PROVER: LazyLock> = LazyLock::new(|| { @@ -24,13 +20,13 @@ static CHUNK_PROVER: LazyLock> = LazyLock::new(|| { Mutex::new(prover) }); -pub fn chunk_prove(desc: &str, chunk: ChunkProvingTask) -> ChunkProof { +pub fn chunk_prove(desc: &str, chunk: ChunkProvingTask) -> ChunkProofV2 { log::info!("{desc}: chunk-prove BEGIN"); let mut prover = CHUNK_PROVER.lock().expect("poisoned chunk-prover"); let proof = prover - .gen_chunk_proof(chunk, None, None, None) + .gen_halo2_chunk_proof(chunk, None, None, None) .unwrap_or_else(|err| panic!("{desc}: failed to generate chunk snark: {err}")); log::info!("{desc}: generated chunk proof"); @@ -41,8 +37,8 @@ pub fn chunk_prove(desc: &str, chunk: ChunkProvingTask) -> ChunkProof { verifier }; - let verified = verifier.verify_chunk_proof(proof.clone()); - assert!(verified, "{desc}: failed to verify chunk snark"); + let verified = verifier.verify_chunk_proof(&proof); + assert!(verified.is_ok(), "{desc}: failed to verify chunk snark"); log::info!("{desc}: chunk-prove END"); diff --git a/prover/src/test/inner.rs b/prover/src/test/inner.rs index fe6e90d5df..ea249cb219 100644 --- a/prover/src/test/inner.rs +++ b/prover/src/test/inner.rs @@ -1,18 +1,14 @@ -use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG}; +use std::sync::{LazyLock, Mutex}; use crate::{ common::{Prover, Verifier}, config::{LayerId, INNER_DEGREE}, - utils::{gen_rng, read_env_var}, + utils::read_env_var, zkevm::circuit::{SuperCircuit, TargetCircuit}, - WitnessBlock, -}; -use std::{ - collections::BTreeMap, - sync::{LazyLock, Mutex}, + ParamsMap, WitnessBlock, }; -static PARAMS_MAP: LazyLock>> = LazyLock::new(|| { +static PARAMS_MAP: LazyLock = LazyLock::new(|| { let params_dir = read_env_var("SCROLL_PROVER_PARAMS_DIR", "./test_params".to_string()); crate::common::Prover::load_params_map(¶ms_dir, &[*INNER_DEGREE]) }); @@ -29,9 +25,8 @@ pub fn inner_prove(test: &str, witness_block: &WitnessBlock) { let mut prover = INNER_PROVER.lock().expect("poisoned inner-prover"); - let rng = gen_rng(); let snark = prover - .gen_inner_snark::("inner", rng, witness_block) + .load_or_gen_inner_snark("", "inner", witness_block, None) .unwrap_or_else(|err| panic!("{test}: failed to generate inner snark: {err}")); log::info!("{test}: generated inner snark"); diff --git a/prover/src/test.rs b/prover/src/test/mod.rs similarity index 99% rename from prover/src/test.rs rename to prover/src/test/mod.rs index 982dba2a11..2f120d6e2d 100644 --- a/prover/src/test.rs +++ b/prover/src/test/mod.rs @@ -1,7 +1,8 @@ mod batch; -mod chunk; -mod inner; - pub use batch::{batch_prove, bundle_prove}; + +mod chunk; pub use chunk::chunk_prove; + +mod inner; pub use inner::inner_prove; diff --git a/prover/src/types.rs b/prover/src/types.rs index bf661f9cc2..29592ec155 100644 --- a/prover/src/types.rs +++ b/prover/src/types.rs @@ -1,56 +1,84 @@ use aggregator::{BatchHeader, ChunkInfo, MAX_AGG_SNARKS}; -use eth_types::l2_types::BlockTrace; +use eth_types::{base64, l2_types::BlockTrace}; use serde::{Deserialize, Serialize}; use zkevm_circuits::evm_circuit::witness::Block; +use crate::{BatchProof, ChunkProof}; + +/// Alias for convenience. pub type WitnessBlock = Block; +/// Helper type to deserialize JSON-encoded RPC result for [`BlockTrace`]. #[derive(Deserialize, Serialize, Default, Debug, Clone)] pub struct BlockTraceJsonRpcResult { + /// The value of the "result" key. pub result: BlockTrace, } -pub use eth_types::base64; - -use crate::{BatchProof, ChunkProof}; +/// Defines a proving task for chunk proof generation. #[derive(Debug, Clone, Deserialize, Serialize)] pub struct ChunkProvingTask { - /// Prover can check `chunk_info` is consistent with block traces + /// Optional chunk data encapsulated within the proving task. + /// + /// As part of a sanity check, the prover reconstructs the chunk data using the EVM execution + /// traces from all blocks in the chunk and compares against the supplied chunk data. pub chunk_info: Option, + /// The EVM execution traces for all blocks in the chunk. pub block_traces: Vec, } impl ChunkProvingTask { - pub fn from(block_traces: Vec) -> Self { + /// Create a new chunk proving task given the chunk trace. + pub fn new(block_traces: Vec) -> Self { Self { block_traces, chunk_info: None, } } + + /// Returns true if there are no block traces in the chunk. pub fn is_empty(&self) -> bool { self.block_traces.is_empty() } - /// Used for cache/load proof from disk + + /// An identifier for the chunk. It is the block number of the first block in the chunk. + /// + /// This is used as a file descriptor to save to (load from) disk in order to avoid proof + /// generation if the same proof/SNARK is already found on disk. pub fn identifier(&self) -> String { self.block_traces .first() .map_or(0, |trace: &BlockTrace| { - trace.header.number.expect("block num").low_u64() + trace + .header + .number + .expect("block number should be present") + .low_u64() }) .to_string() } } +/// Defines a proving task for batch proof generation. #[derive(Debug, Clone, Deserialize, Serialize)] pub struct BatchProvingTask { + /// Chunk proofs for the contiguous list of chunks within the batch. pub chunk_proofs: Vec, + /// The [`BatchHeader`], as computed on-chain for this batch. + /// + /// Ref: https://github.com/scroll-tech/scroll-contracts/blob/2ac4f3f7e090d7127db4b13b3627cb3ce2d762bc/src/libraries/codec/BatchHeaderV3Codec.sol pub batch_header: BatchHeader, + /// The bytes encoding the batch data that will finally be published on-chain in the form of an + /// EIP-4844 blob. #[serde(with = "base64")] pub blob_bytes: Vec, } impl BatchProvingTask { - /// Used for cache/load proof from disk + /// An identifier for the batch. It is the public input hash of the last chunk in the batch. + /// + /// This is used as a file descriptor to save to (load from) disk in order to avoid proof + /// generation if the same proof/SNARK is already found on disk. pub fn identifier(&self) -> String { self.chunk_proofs .last() @@ -62,12 +90,18 @@ impl BatchProvingTask { } } +/// Defines a proving task for bundle proof generation. #[derive(Debug, Clone, Deserialize, Serialize)] pub struct BundleProvingTask { + /// The [`BatchProofs`][BatchProof] for the contiguous list of batches to be bundled together. pub batch_proofs: Vec, } impl BundleProvingTask { + /// An identifier for the bundle. It is the batch hash of the last batch in the bundle. + /// + /// This is used as a file descriptor to save to (load from) disk in order to avoid proof + /// generation if the same proof/SNARK is already found on disk. pub fn identifier(&self) -> String { self.batch_proofs.last().unwrap().batch_hash.to_string() } diff --git a/prover/src/utils/evm.rs b/prover/src/utils/evm.rs new file mode 100644 index 0000000000..4e1b091a90 --- /dev/null +++ b/prover/src/utils/evm.rs @@ -0,0 +1,103 @@ +use std::path::Path; + +use halo2_proofs::{ + halo2curves::bn256::{Bn256, Fr, G1Affine}, + plonk::VerifyingKey, + poly::kzg::commitment::ParamsKZG, +}; +use revm::{ + primitives::{CreateScheme, ExecutionResult, Output, TransactTo, TxEnv}, + InMemoryDB, EVM, +}; +use snark_verifier::pcs::kzg::{Bdfg21, Kzg}; +use snark_verifier_sdk::CircuitExt; + +use crate::{utils::write, BatchProverError, EvmProof, ProverError}; + +/// Dump YUL and binary bytecode(use `solc` in PATH) to output_dir. +/// +/// Panics if the verifier contract cannot successfully verify the [`EvmProof`]. +pub fn gen_evm_verifier>( + params: &ParamsKZG, + vk: &VerifyingKey, + evm_proof: &EvmProof, + output_dir: Option<&str>, +) -> Result<(), ProverError> { + // Generate deployment code and dump YUL file. + let deployment_code = snark_verifier_sdk::gen_evm_verifier::>( + params, + vk, + evm_proof.num_instance.clone(), + None, + ); + + // Write the contract binary if an output directory was specified. + if let Some(dir) = output_dir { + let path = Path::new(dir).join("evm_verifier.bin"); + write(&path, &deployment_code)?; + } + + if evm_proof.proof.evm_verify(deployment_code) { + Ok(()) + } else { + Err(ProverError::BatchProverError( + BatchProverError::SanityEVMVerifier, + )) + } +} + +/// Deploy contract and then call with calldata. +/// +/// Returns the gas used to verify proof. +pub fn deploy_and_call(deployment_code: Vec, calldata: Vec) -> Result { + let mut evm = EVM { + env: Default::default(), + db: Some(InMemoryDB::default()), + }; + + evm.env.tx = TxEnv { + gas_limit: u64::MAX, + transact_to: TransactTo::Create(CreateScheme::Create), + data: deployment_code.into(), + ..Default::default() + }; + + let result = evm.transact_commit().unwrap(); + let contract = match result { + ExecutionResult::Success { + output: Output::Create(_, Some(contract)), + .. + } => contract, + ExecutionResult::Revert { gas_used, output } => { + return Err(format!( + "Contract deployment transaction reverts with gas_used {gas_used} and output {:#x}", + output + )) + } + ExecutionResult::Halt { reason, gas_used } => return Err(format!( + "Contract deployment transaction halts unexpectedly with gas_used {gas_used} and reason {:?}", + reason + )), + _ => unreachable!(), + }; + + evm.env.tx = TxEnv { + gas_limit: u64::MAX, + transact_to: TransactTo::Call(contract), + data: calldata.into(), + ..Default::default() + }; + + let result = evm.transact_commit().unwrap(); + match result { + ExecutionResult::Success { gas_used, .. } => Ok(gas_used), + ExecutionResult::Revert { gas_used, output } => Err(format!( + "Contract call transaction reverts with gas_used {gas_used} and output {:#x}", + output + )), + ExecutionResult::Halt { reason, gas_used } => Err(format!( + "Contract call transaction halts unexpectedly with gas_used {gas_used} and reason {:?}", + reason + )), + } +} diff --git a/prover/src/utils/io.rs b/prover/src/utils/io.rs new file mode 100644 index 0000000000..4107017b2d --- /dev/null +++ b/prover/src/utils/io.rs @@ -0,0 +1,137 @@ +use std::{ + fs, + io::{Cursor, Write}, + path::{Path, PathBuf}, +}; + +use halo2_proofs::{ + halo2curves::bn256::{Fr, G1Affine}, + plonk::{Circuit, VerifyingKey}, + SerdeFormat, +}; +use serde::{ + de::{Deserialize, DeserializeOwned}, + Serialize, +}; +use snark_verifier::util::arithmetic::PrimeField; + +use crate::ProverError; + +pub fn serialize_fr(f: &Fr) -> Vec { + f.to_bytes().to_vec() +} + +pub fn deserialize_fr(buf: Vec) -> Fr { + Fr::from_repr(buf.try_into().unwrap()).unwrap() +} +pub fn serialize_fr_vec(v: &[Fr]) -> Vec> { + v.iter().map(serialize_fr).collect() +} +pub fn deserialize_fr_vec(l2_buf: Vec>) -> Vec { + l2_buf.into_iter().map(deserialize_fr).collect() +} + +pub fn serialize_fr_matrix(m: &[Vec]) -> Vec>> { + m.iter().map(|v| serialize_fr_vec(v.as_slice())).collect() +} + +pub fn deserialize_fr_matrix(l3_buf: Vec>>) -> Vec> { + l3_buf.into_iter().map(deserialize_fr_vec).collect() +} + +pub fn serialize_instance(instance: &[Vec]) -> Vec { + let instances_for_serde = serialize_fr_matrix(instance); + + serde_json::to_vec(&instances_for_serde).unwrap() +} + +pub fn try_to_read(dir: &str, filename: &str) -> Option> { + let mut path = PathBuf::from(dir); + path.push(filename); + + if path.exists() { + self::read(&path).ok() + } else { + None + } +} + +pub fn force_to_read(dir: &str, filename: &str) -> Vec { + try_to_read(dir, filename).unwrap_or_else(|| panic!("File {filename} must exist in {dir}")) +} + +pub fn write_file(folder: &mut PathBuf, filename: &str, buf: &[u8]) { + folder.push(filename); + let mut fd = std::fs::File::create(folder.as_path()).unwrap(); + folder.pop(); + + fd.write_all(buf).unwrap(); +} + +pub fn serialize_vk(vk: &VerifyingKey) -> Vec { + let mut result = Vec::::new(); + vk.write(&mut result, SerdeFormat::Processed).unwrap(); + result +} + +pub fn deserialize_vk>(raw_vk: &[u8]) -> VerifyingKey { + VerifyingKey::::read::<_, C>(&mut Cursor::new(raw_vk), SerdeFormat::Processed) + .unwrap_or_else(|_| panic!("failed to deserialize vk with len {}", raw_vk.len())) +} + +/// Read bytes from a file. +pub fn read>(path: P) -> Result, ProverError> { + let path = path.as_ref(); + fs::read(path).map_err(|source| ProverError::IoReadWrite { + source, + path: path.into(), + }) +} + +/// Wrapper to read JSON file. +pub fn read_json, T: DeserializeOwned>(path: P) -> Result { + let path = path.as_ref(); + let bytes = read(path)?; + serde_json::from_slice(&bytes).map_err(|source| ProverError::JsonReadWrite { + source, + path: path.to_path_buf(), + }) +} + +/// Wrapper to read JSON that might be deeply nested. +pub fn read_json_deep, T: DeserializeOwned>(path: P) -> Result { + let fd = fs::File::open(path)?; + let mut deserializer = serde_json::Deserializer::from_reader(fd); + deserializer.disable_recursion_limit(); + let deserializer = serde_stacker::Deserializer::new(&mut deserializer); + Ok(Deserialize::deserialize(deserializer)?) +} + +/// Try to read bytes from a file. +/// +/// Returns an optional value, which is `None` in case of an i/o error encountered. +pub fn try_read>(path: P) -> Option> { + self::read(path).ok() +} + +/// Read bytes from a file. +/// +/// Panics if any i/o error encountered. +pub fn force_read + std::fmt::Debug>(path: P) -> Vec { + self::read(path.as_ref()).expect(&format!("no file found! path={path:?}")) +} + +/// Wrapper functionality to write bytes to a file. +pub fn write>(path: P, data: &[u8]) -> Result<(), ProverError> { + let path = path.as_ref(); + fs::write(path, data).map_err(|source| ProverError::IoReadWrite { + source, + path: path.into(), + }) +} + +/// Serialize the provided type to JSON format and write to the given path. +pub fn write_json, T: Serialize>(path: P, value: &T) -> Result<(), ProverError> { + let mut writer = fs::File::create(path)?; + Ok(serde_json::to_writer(&mut writer, value)?) +} diff --git a/prover/src/utils.rs b/prover/src/utils/mod.rs similarity index 80% rename from prover/src/utils.rs rename to prover/src/utils/mod.rs index 534016376f..eff19a0468 100644 --- a/prover/src/utils.rs +++ b/prover/src/utils/mod.rs @@ -1,8 +1,11 @@ -#![allow(deprecated)] -use crate::{ - types::BlockTraceJsonRpcResult, - zkevm::circuit::{block_traces_to_witness_block, print_chunk_stats}, +use std::{ + fs::{self, metadata, File}, + io::{BufReader, Read}, + path::{Path, PathBuf}, + str::FromStr, + sync::Once, }; + use anyhow::{bail, Result}; use chrono::Utc; use eth_types::l2_types::BlockTrace; @@ -19,18 +22,20 @@ use log4rs::{ use rand::{Rng, SeedableRng}; use rand_xorshift::XorShiftRng; use std::fmt::Debug; -use std::{ - fs::{self, metadata, File}, - io::{BufReader, Read}, - path::{Path, PathBuf}, - str::FromStr, - sync::Once, -}; use zkevm_circuits::evm_circuit::witness::Block; +use crate::types::BlockTraceJsonRpcResult; + +mod evm; +pub use evm::*; + +mod io; +pub use io::*; + pub static LOGGER: Once = Once::new(); pub const DEFAULT_SERDE_FORMAT: SerdeFormat = SerdeFormat::RawBytesUnchecked; + pub const GIT_VERSION: &str = git_version!(args = ["--abbrev=7", "--always"]); pub const PARAMS_G2_SECRET_POWER: &str = "(Fq2 { c0: 0x17944351223333f260ddc3b4af45191b856689eda9eab5cbcddbbe570ce860d2, c1: 0x186282957db913abd99f91db59fe69922e95040603ef44c0bd7aa3adeef8f5ac }, Fq2 { c0: 0x297772d34bc9aa8ae56162486363ffe417b02dc7e8c207fc2cc20203e67a02ad, c1: 0x298adc7396bd3865cbf6d6df91bae406694e6d2215baa893bdeadb63052895f4 })"; @@ -83,42 +88,13 @@ pub fn load_params( Ok(p) } -#[deprecated] -fn post_process_tx_storage_proof(trace: &mut BlockTrace) { - // fill intrinsicStorageProofs into tx storage proof - let addrs = vec![ - *bus_mapping::l2_predeployed::message_queue::ADDRESS, - *bus_mapping::l2_predeployed::l1_gas_price_oracle::ADDRESS, - ]; - for tx_storage_trace in &mut trace.tx_storage_trace { - if let Some(proof) = tx_storage_trace.proofs.as_mut() { - for addr in &addrs { - proof.insert( - *addr, - trace - .storage_trace - .proofs - .as_ref() - .map(|p| p[addr].clone()) - .unwrap(), - ); - } - } - for addr in &addrs { - tx_storage_trace - .storage_proofs - .insert(*addr, trace.storage_trace.storage_proofs[addr].clone()); - } - } -} - /// get a block-result from file pub fn get_block_trace_from_file>(path: P) -> BlockTrace { let mut buffer = Vec::new(); let mut f = File::open(&path).unwrap(); f.read_to_end(&mut buffer).unwrap(); - let mut trace = serde_json::from_slice::(&buffer).unwrap_or_else(|e1| { + serde_json::from_slice::(&buffer).unwrap_or_else(|e1| { serde_json::from_slice::(&buffer) .map_err(|e2| { panic!( @@ -130,9 +106,7 @@ pub fn get_block_trace_from_file>(path: P) -> BlockTrace { }) .unwrap() .result - }); - post_process_tx_storage_proof(&mut trace); - trace + }) } pub fn read_env_var(var_name: &'static str, default: T) -> T { @@ -158,14 +132,6 @@ pub fn metric_of_witness_block(block: &Block) -> ChunkMetric { } } -pub fn chunk_trace_to_witness_block(chunk_trace: Vec) -> Result { - if chunk_trace.is_empty() { - bail!("Empty chunk trace"); - } - print_chunk_stats(&chunk_trace); - block_traces_to_witness_block(chunk_trace) -} - // Return the output dir. pub fn init_env_and_log(id: &str) -> String { dotenvy::dotenv().ok(); diff --git a/prover/src/zkevm.rs b/prover/src/zkevm.rs deleted file mode 100644 index 6a53ee7bbc..0000000000 --- a/prover/src/zkevm.rs +++ /dev/null @@ -1,17 +0,0 @@ -#[cfg(feature = "scroll")] -mod capacity_checker; -pub mod circuit; -mod prover; -mod verifier; - -pub use self::prover::Prover; -#[cfg(feature = "scroll")] -pub use capacity_checker::{CircuitCapacityChecker, RowUsage}; -use serde::{Deserialize, Serialize}; -pub use verifier::Verifier; - -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct SubCircuitRowUsage { - pub name: String, - pub row_number: usize, -} diff --git a/prover/src/zkevm/capacity_checker.rs b/prover/src/zkevm/capacity_checker.rs index 4536f1b7f9..733beef41d 100644 --- a/prover/src/zkevm/capacity_checker.rs +++ b/prover/src/zkevm/capacity_checker.rs @@ -1,5 +1,4 @@ -use super::circuit::{calculate_row_usage_of_witness_block, finalize_builder}; -use bus_mapping::circuit_input_builder::{self, CircuitInputBuilder}; +use bus_mapping::circuit_input_builder::{Blocks, CircuitInputBuilder}; use eth_types::{ l2_types::BlockTrace, state_db::{CodeDB, StateDB}, @@ -14,7 +13,13 @@ use zkevm_circuits::{ super_circuit::params::{get_sub_circuit_limit_and_confidence, get_super_circuit_params}, }; -pub use super::SubCircuitRowUsage; +use super::circuit::{calculate_row_usage_of_witness_block, finalize_builder}; + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct SubCircuitRowUsage { + pub name: String, + pub row_number: usize, +} #[derive(Debug, Clone, Deserialize, Serialize)] pub struct RowUsage { @@ -137,10 +142,7 @@ impl CircuitCapacityChecker { self.acc_row_usage.clone() } } - pub fn estimate_circuit_capacity( - &mut self, - trace: BlockTrace, - ) -> Result { + pub fn estimate_circuit_capacity(&mut self, trace: BlockTrace) -> anyhow::Result { let (mut estimate_builder, codedb_prev) = if let Some((code_db, sdb, mpt_state)) = self.builder_ctx.take() { // here we create a new builder for another (sealed) witness block @@ -148,8 +150,7 @@ impl CircuitCapacityChecker { // the previous one and do not use zktrie state, // notice the prev_root in current builder may be not invalid (since the state has // changed but we may not update it in light mode) - let mut builder_block = - circuit_input_builder::Blocks::init(trace.chain_id, get_super_circuit_params()); + let mut builder_block = Blocks::init(trace.chain_id, get_super_circuit_params()); builder_block.start_l1_queue_index = trace.start_l1_queue_index; builder_block.prev_state_root = mpt_state .as_ref() diff --git a/prover/src/zkevm/circuit/builder.rs b/prover/src/zkevm/circuit/builder.rs index 3c55ba6068..1460986e5a 100644 --- a/prover/src/zkevm/circuit/builder.rs +++ b/prover/src/zkevm/circuit/builder.rs @@ -1,6 +1,4 @@ -use crate::zkevm::SubCircuitRowUsage; -use anyhow::{bail, Result}; -use bus_mapping::circuit_input_builder::CircuitInputBuilder; +use bus_mapping::{circuit_input_builder::CircuitInputBuilder, Error as CircuitBuilderError}; use eth_types::{l2_types::BlockTrace, ToWord}; use itertools::Itertools; use mpt_zktrie::state::ZkTrieHash; @@ -10,14 +8,20 @@ use zkevm_circuits::{ witness::block_convert, }; +use crate::zkevm::{ChunkProverError, SubCircuitRowUsage}; + +/// Returns the row-usage for all sub-circuits in the process of applying the entire witness block +/// to the super circuit. pub fn calculate_row_usage_of_witness_block( witness_block: &Block, -) -> Result> { +) -> Result, ChunkProverError> { let rows = ScrollSuperCircuit::min_num_rows_block_subcircuits(witness_block); // Check whether we need to "estimate" poseidon sub circuit row usage if witness_block.mpt_updates.smt_traces.is_empty() { - bail!("ligth mode no longer supported"); + return Err(ChunkProverError::Custom( + "light mode no longer supported".to_string(), + )); } let first_block_num = witness_block.first_block_number(); @@ -34,45 +38,80 @@ pub fn calculate_row_usage_of_witness_block( .sum::(), rows, ); - let row_usage_details: Vec = rows + + Ok(rows .into_iter() .map(|x| SubCircuitRowUsage { name: x.name, row_number: x.row_num_real, }) - .collect_vec(); - Ok(row_usage_details) + .collect_vec()) } -pub fn print_chunk_stats(block_traces: &[BlockTrace]) { - let num_blocks = block_traces.len(); - let num_txs = block_traces - .iter() - .map(|b| b.transactions.len()) - .sum::(); - let total_tx_len = block_traces - .iter() - .flat_map(|b| b.transactions.iter().map(|t| t.data.len())) - .sum::(); - log::info!( - "check capacity of block traces, num_block {}, num_tx {}, tx total len {}", - num_blocks, - num_txs, - total_tx_len - ); +/// Generate a dummy witness block to eventually generate proving key and verifying key for the +/// target circuit without going through the expensive process of actual witness assignment. +pub fn dummy_witness_block() -> Block { + let dummy_chain_id = 0; + zkevm_circuits::witness::dummy_witness_block(dummy_chain_id) } -pub fn dummy_witness_block() -> Result { - log::debug!("generate dummy witness block"); - let dummy_chain_id = 0; - let witness_block = zkevm_circuits::witness::dummy_witness_block(dummy_chain_id); - log::debug!("generate dummy witness block done"); +/// Build a witness block from block traces for all blocks in the chunk. +pub fn chunk_trace_to_witness_block( + chunk_trace: Vec, +) -> Result { + if chunk_trace.is_empty() { + return Err(ChunkProverError::Custom("Empty chunk trace".to_string())); + } + print_chunk_stats(&chunk_trace); + block_traces_to_witness_block(chunk_trace) +} + +/// Finalize building and return witness block +pub fn finalize_builder(builder: &mut CircuitInputBuilder) -> Result { + builder.finalize_building()?; + + log::debug!("converting builder.block to witness block"); + + let mut witness_block = block_convert(&builder.block, &builder.code_db)?; + log::debug!( + "witness_block built with circuits_params {:?}", + witness_block.circuits_params + ); + + if let Some(state) = &mut builder.mpt_init_state { + if *state.root() != [0u8; 32] { + log::debug!("apply_mpt_updates"); + witness_block.apply_mpt_updates_and_update_mpt_state(state); + log::debug!("apply_mpt_updates done"); + } else { + // Empty state root means circuit capacity checking, or dummy witness block for key gen? + log::info!("empty state root, skip apply_mpt_updates"); + } + + let root_after = witness_block.post_state_root().to_word(); + log::debug!( + "finish replay trie updates, root {}, root after {:#x?}", + hex::encode(state.root()), + root_after, + ); + // switch state to new root + let mut new_root_hash = ZkTrieHash::default(); + root_after.to_big_endian(&mut new_root_hash); + assert!(state.switch_to(new_root_hash)); + } + Ok(witness_block) } -pub fn block_traces_to_witness_block(block_traces: Vec) -> Result { +/// Build a witness block from block traces for all blocks in the chunk. +/// +/// Kind of a duplication of [`self::chunk_trace_to_witness_block`], so should eventually be +/// deprecated. +fn block_traces_to_witness_block(block_traces: Vec) -> Result { if block_traces.is_empty() { - bail!("use dummy_witness_block instead"); + return Err(ChunkProverError::Custom( + "empty block traces! hint: use dummy_witness_block instead".to_string(), + )); } let block_num = block_traces.len(); let total_tx_num = block_traces @@ -80,12 +119,12 @@ pub fn block_traces_to_witness_block(block_traces: Vec) -> Result(); if total_tx_num > MAX_TXS { - bail!( + return Err(ChunkProverError::Custom(format!( "tx num overflow {}, block range {} to {}", total_tx_num, block_traces[0].header.number.unwrap(), block_traces[block_num - 1].header.number.unwrap() - ); + ))); } log::info!( "block_traces_to_witness_block, block num {}, tx num {}", @@ -116,39 +155,20 @@ pub fn block_traces_to_witness_block(block_traces: Vec) -> Result Result { - builder.finalize_building()?; - - log::debug!("converting builder.block to witness block"); - - let mut witness_block = block_convert(&builder.block, &builder.code_db)?; - log::debug!( - "witness_block built with circuits_params {:?}", - witness_block.circuits_params +fn print_chunk_stats(block_traces: &[BlockTrace]) { + let num_blocks = block_traces.len(); + let num_txs = block_traces + .iter() + .map(|b| b.transactions.len()) + .sum::(); + let total_tx_len = block_traces + .iter() + .flat_map(|b| b.transactions.iter().map(|t| t.data.len())) + .sum::(); + log::info!( + "check capacity of block traces, num_block {}, num_tx {}, tx total len {}", + num_blocks, + num_txs, + total_tx_len ); - - if let Some(state) = &mut builder.mpt_init_state { - if *state.root() != [0u8; 32] { - log::debug!("apply_mpt_updates"); - witness_block.apply_mpt_updates_and_update_mpt_state(state); - log::debug!("apply_mpt_updates done"); - } else { - // Empty state root means circuit capacity checking, or dummy witness block for key gen? - log::info!("empty state root, skip apply_mpt_updates"); - } - - let root_after = witness_block.post_state_root().to_word(); - log::debug!( - "finish replay trie updates, root {}, root after {:#x?}", - hex::encode(state.root()), - root_after, - ); - // switch state to new root - let mut new_root_hash = ZkTrieHash::default(); - root_after.to_big_endian(&mut new_root_hash); - assert!(state.switch_to(new_root_hash)); - } - - Ok(witness_block) } diff --git a/prover/src/zkevm/circuit.rs b/prover/src/zkevm/circuit/mod.rs similarity index 69% rename from prover/src/zkevm/circuit.rs rename to prover/src/zkevm/circuit/mod.rs index 33c0caed88..3dad52aa58 100644 --- a/prover/src/zkevm/circuit.rs +++ b/prover/src/zkevm/circuit/mod.rs @@ -1,34 +1,29 @@ -use builder::dummy_witness_block; use halo2_proofs::halo2curves::bn256::Fr; use snark_verifier_sdk::CircuitExt; use zkevm_circuits::{super_circuit::params::ScrollSuperCircuit, util::SubCircuit, witness}; mod builder; -pub use self::builder::{ - block_traces_to_witness_block, calculate_row_usage_of_witness_block, finalize_builder, - print_chunk_stats, +pub use builder::{ + calculate_row_usage_of_witness_block, chunk_trace_to_witness_block, finalize_builder, }; -pub use zkevm_circuits::super_circuit::params::{MAX_CALLDATA, MAX_INNER_BLOCKS, MAX_TXS}; - /// A target circuit trait is a wrapper of inner circuit, with convenient APIs for building /// circuits from traces. pub trait TargetCircuit { /// The actual inner circuit that implements Circuit trait. type Inner: CircuitExt + SubCircuit; - /// Generate a dummy circuit with an empty trace. - /// This is useful for generating vk and pk. + /// Generate a dummy circuit with an empty trace. This is useful for generating vk and pk. fn dummy_inner_circuit() -> anyhow::Result where Self: Sized, { - let witness_block = dummy_witness_block()?; + let witness_block = builder::dummy_witness_block(); let circuit = Self::from_witness_block(&witness_block)?; Ok(circuit) } - /// Build the inner circuit and the instances from the witness block + /// Build the inner circuit and the instances from the witness block. fn from_witness_block(witness_block: &witness::Block) -> anyhow::Result where Self: Sized, diff --git a/prover/src/zkevm/error.rs b/prover/src/zkevm/error.rs new file mode 100644 index 0000000000..a970145d08 --- /dev/null +++ b/prover/src/zkevm/error.rs @@ -0,0 +1,39 @@ +/// Various errors potentially encountered during proof generation. +#[derive(thiserror::Error, Debug)] +pub enum ChunkProverError { + /// Indicates that the halo2-based [`SuperCircuit`][super_circ] does not have sufficient + /// capacity to populate block traces from all the blocks in the chunk. The error encapsulates + /// the [`RowUsage`][row_usage] observed from populating the chunk. + /// + /// [super_circ]: zkevm_circuits::super_circuit::SuperCircuit + /// [row_usage]: crate::zkevm::RowUsage + #[error("halo2 circuit-capacity exceeded")] + CircuitCapacityOverflow(crate::zkevm::RowUsage), + /// Represents an error propagated from the [`bus_mapping`] crate. + #[error(transparent)] + CircuitBuilder(#[from] bus_mapping::Error), + /// Represents the [`halo2 error`][halo2_error] being propagated. + /// + /// [halo2_error]: halo2_proofs::plonk::Error + #[error(transparent)] + Halo2(#[from] halo2_proofs::plonk::Error), + /// Error indicating that the verifying key found post proof generation does not match the + /// expected verifying key. + #[error("verifying key mismatch: found={0}, expected={1}")] + VerifyingKeyMismatch(String, String), + /// Error indicating that no verifying key was found post proof generation. + #[error("verifying key not found: expected={0}")] + VerifyingKeyNotFound(String), + /// Error indicating that proof verification failed. + #[error("proof verification failure")] + Verification, + /// Represents all other custom errors. + #[error("custom error: {0}")] + Custom(String), +} + +impl From for ChunkProverError { + fn from(value: String) -> Self { + Self::Custom(value) + } +} diff --git a/prover/src/zkevm/mod.rs b/prover/src/zkevm/mod.rs new file mode 100644 index 0000000000..f1069bf660 --- /dev/null +++ b/prover/src/zkevm/mod.rs @@ -0,0 +1,21 @@ +#[cfg(feature = "scroll")] +mod capacity_checker; +#[cfg(feature = "scroll")] +pub use capacity_checker::{CircuitCapacityChecker, RowUsage, SubCircuitRowUsage}; + +pub mod circuit; + +mod error; +pub use error::ChunkProverError; + +mod prover; +pub use prover::Prover; + +mod verifier; +pub use verifier::Verifier; + +/// Alias for convenience. +pub type ChunkProver<'a> = Prover<'a>; + +/// Alias for convenience. +pub type ChunkVerifier<'a> = Verifier<'a>; diff --git a/prover/src/zkevm/prover.rs b/prover/src/zkevm/prover.rs index 4d9f71b8b4..1556da1a9e 100644 --- a/prover/src/zkevm/prover.rs +++ b/prover/src/zkevm/prover.rs @@ -1,33 +1,55 @@ use std::collections::BTreeMap; -use crate::{ - common, config::LayerId, consts::CHUNK_VK_FILENAME, io::try_to_read, proof::compare_chunk_info, - types::ChunkProvingTask, utils::chunk_trace_to_witness_block, - zkevm::circuit::calculate_row_usage_of_witness_block, ChunkProof, -}; use aggregator::ChunkInfo; -use anyhow::Result; use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG}; +use snark_verifier_sdk::Snark; + +use crate::{ + common, + config::LayerId, + consts::CHUNK_VK_FILENAME, + proof::compare_chunk_info, + types::ChunkProvingTask, + utils::try_to_read, + zkevm::{ + circuit::{calculate_row_usage_of_witness_block, chunk_trace_to_witness_block}, + ChunkProverError, ChunkVerifier, RowUsage, + }, + ChunkKind, ChunkProofV2, ChunkProofV2Metadata, ProverError, +}; +/// Prover responsible for generating [`chunk proofs`][ChunkProof]. #[derive(Debug)] pub struct Prover<'params> { - // Make it public for testing with inner functions (unnecessary for FFI). + /// Encapsulates the common prover. pub prover_impl: common::Prover<'params>, - verifier: Option>, + /// The chunk proof verifier. + /// + /// The verifier is optional in dev-scenarios where the verifier is generated on-the-fly. For + /// production environments, we already have the verifying key available. + verifier: Option>, + /// The [`VerifyingKey`][halo2_proofs::plonk::VerifyingKey] in its raw bytes form, as read from + /// disk. For the same reasons as the [Self::verifier] field, this too is optional. raw_vk: Option>, } impl<'params> Prover<'params> { + /// Construct a chunk prover given a map of degree to KZG setup params and a path to a + /// directory to find stored assets. pub fn from_params_and_assets( params_map: &'params BTreeMap>, assets_dir: &str, ) -> Self { + // Try to read the verifying key from disk, but don't panic if not found. + let raw_vk = try_to_read(assets_dir, &CHUNK_VK_FILENAME); + + // Build the inner prover. let prover_impl = common::Prover::from_params_map(params_map); - let raw_vk = try_to_read(assets_dir, &CHUNK_VK_FILENAME); + // Build an optional verifier if the verifying key has been located on disk. let verifier = if raw_vk.is_none() { log::warn!( - "zkevm-prover: {} doesn't exist in {}", + "ChunkProver setup without verifying key (dev mode): {} doesn't exist in {}", *CHUNK_VK_FILENAME, assets_dir ); @@ -38,114 +60,203 @@ impl<'params> Prover<'params> { assets_dir, )) }; + Self { prover_impl, - raw_vk, verifier, + raw_vk, } } + /// Returns the optional [`VerifyingKey`][halo2_proofs::plonk::VerifyingKey] in its raw form. pub fn get_vk(&self) -> Option> { self.prover_impl .raw_vk(LayerId::Layer2.id()) .or_else(|| self.raw_vk.clone()) } - /// Generate proof for a chunk. This method usually takes ~10minutes. - /// Meaning of each parameter: - /// output_dir: - /// If `output_dir` is not none, the dir will be used to save/load proof or intermediate results. - /// If proof or intermediate results can be loaded from `output_dir`, - /// then they will not be computed again. - /// If `output_dir` is not none, computed intermediate results and proof will be written - /// into this dir. - /// chunk_identifier: - /// used to distinguish different chunk files located in output_dir. - /// If it is not set, default value(first block number of this chuk) will be used. - /// id: - /// TODO(zzhang). clean this. I think it can only be None or Some(0)... - pub fn gen_chunk_proof( + /// Generate a proof for a chunk via the halo2-route, i.e. the inner SNARK is generated using the + /// halo2-based [`SuperCircuit`][zkevm_circuits::super_circuit::SuperCircuit]. + pub fn gen_halo2_chunk_proof( &mut self, chunk: ChunkProvingTask, - chunk_identifier: Option<&str>, + chunk_id: Option<&str>, inner_id: Option<&str>, output_dir: Option<&str>, - ) -> Result { + ) -> Result { + // Panic if the chunk is empty, i.e. no traces were found. assert!(!chunk.is_empty()); - let chunk_identifier = - chunk_identifier.map_or_else(|| chunk.identifier(), |name| name.to_string()); - - let chunk_proof = match output_dir - .and_then(|output_dir| ChunkProof::from_json_file(output_dir, &chunk_identifier).ok()) - { - Some(proof) => Ok(proof), - None => { - let witness_block = chunk_trace_to_witness_block(chunk.block_traces)?; - let row_usage = calculate_row_usage_of_witness_block(&witness_block)?; - log::info!("Got witness block"); - - let chunk_info = ChunkInfo::from_witness_block(&witness_block, false); - if let Some(chunk_info_input) = chunk.chunk_info.as_ref() { - compare_chunk_info( - &format!("gen_chunk_proof {chunk_identifier:?}"), - &chunk_info, - chunk_info_input, - )?; - } - let snark = self.prover_impl.load_or_gen_final_chunk_snark( - &chunk_identifier, - &witness_block, - inner_id, - output_dir, - )?; - - self.check_vk(); - - let result = ChunkProof::new( - snark, - self.prover_impl.pk(LayerId::Layer2.id()), - chunk_info, - row_usage, - ); - - if let (Some(output_dir), Ok(proof)) = (output_dir, &result) { - proof.dump(output_dir, &chunk_identifier)?; - } + // The chunk identifier is either the specified identifier or we calculate it on-the-fly. + let chunk_id = chunk_id.map_or_else(|| chunk.identifier(), |name| name.to_string()); - result + // Try to locate a cached chunk proof for the same identifier. + if let Some(dir) = output_dir.as_ref() { + if let Ok(chunk_proof) = ChunkProofV2::from_json(dir, &chunk_id) { + return Ok(chunk_proof); } - }?; + } + + // Generate the proof if proof was not found in cache. + // + // Construct the chunk as witness and check circuit capacity for the halo2-based super + // circuit. + let witness_block = chunk_trace_to_witness_block(chunk.block_traces)?; + let sub_circuit_row_usages = calculate_row_usage_of_witness_block(&witness_block)?; + let row_usage = RowUsage::from_row_usage_details(sub_circuit_row_usages.clone()); + + // If the circuit-capacity checker (ccc) overflows, early-return with appropriate + // error. + if !row_usage.is_ok { + return Err(ChunkProverError::CircuitCapacityOverflow(row_usage).into()); + } + + // Build the chunk information required by the inner circuit for SNARK generation. + let chunk_info_reconstructed = ChunkInfo::from_witness_block(&witness_block, false); + // Sanity check: if chunk information was already provided, make sure it exactly + // matches the chunk information reconstructed from the block traces of the chunk. + if let Some(chunk_info_provided) = chunk.chunk_info.as_ref() { + compare_chunk_info( + &format!("gen_halo2_chunk_proof {chunk_id:?}"), + &chunk_info_reconstructed, + chunk_info_provided, + ) + .map_err(|e| ChunkProverError::Custom(e))?; + } + + // Generate the final Layer-2 SNARK. + let snark = self + .prover_impl + .load_or_gen_final_chunk_snark(&chunk_id, &witness_block, inner_id, output_dir) + .map_err(|e| ChunkProverError::Custom(e.to_string()))?; + + // Sanity check on the verifying key used at Layer-2. + self.check_vk()?; + + // Construct the chunk proof. + let chunk_proof_metadata = ChunkProofV2Metadata::new( + &snark, + ChunkKind::Halo2, + chunk_info_reconstructed, + Some(row_usage), + )?; + let chunk_proof = ChunkProofV2::new( + snark, + self.prover_impl.pk(LayerId::Layer2.id()), + chunk_proof_metadata, + )?; + + // If the output directory was provided, write the proof to disk. + if let Some(output_dir) = output_dir { + chunk_proof.dump(output_dir, &chunk_id)?; + } + + // If the verifier was set, i.e. production environments, we also do a sanity verification + // of the proof that was generated above. if let Some(verifier) = &self.verifier { - if !verifier.verify_chunk_proof(chunk_proof.clone()) { - anyhow::bail!("chunk prover cannot generate valid proof"); - } - log::info!("verify_chunk_proof done"); + verifier.verify_chunk_proof(&chunk_proof)?; } Ok(chunk_proof) } - /// Check vk generated is same with vk loaded from assets - fn check_vk(&self) { - if self.raw_vk.is_some() { - let gen_vk = self - .prover_impl - .raw_vk(LayerId::Layer2.id()) - .unwrap_or_default(); - if gen_vk.is_empty() { - log::warn!("no gen_vk found, skip check_vk"); - return; - } - let init_vk = self.raw_vk.clone().unwrap_or_default(); - if gen_vk != init_vk { - log::error!( - "zkevm-prover: generated VK is different with init one - gen_vk = {}, init_vk = {}", - base64::encode(gen_vk), - base64::encode(init_vk), - ); + /// Generates a chunk proof by compressing the provided SNARK. The generated proof uses the + /// [`CompressionCircuit`][aggregator::CompressionCircuit] to compress the supplied + /// [`SNARK`][snark_verifier_sdk::Snark] only once using thin-compression parameters. + /// + /// The [`ChunkProof`] represents the Layer-2 proof in Scroll's proving pipeline and the + /// generated SNARK can then be used as inputs to the [`BatchCircuit`][aggregator::BatchCircuit]. + /// + /// This method should be used iff the input SNARK was generated from a halo2-backend for Sp1. + /// In order to construct a chunk proof via the halo2-based + /// [`SuperCircuit`][zkevm_circuits::super_circuit::SuperCircuit], please use [`gen_chunk_proof`][Self::gen_chunk_proof]. + pub fn gen_sp1_chunk_proof( + &mut self, + inner_snark: Snark, + chunk: ChunkProvingTask, + chunk_id: Option<&str>, + output_dir: Option<&str>, + ) -> Result { + // Panic if the chunk is empty, i.e. no traces were found. + assert!(!chunk.is_empty()); + + // The chunk identifier is either the specified identifier or we calculate it on-the-fly. + let chunk_id = chunk_id.map_or_else(|| chunk.identifier(), |name| name.to_string()); + + // Generate a Layer-2 compression SNARK for the provided inner SNARK. + let snark = self + .prover_impl + .load_or_gen_comp_snark( + &chunk_id, + LayerId::Layer2.id(), + true, + LayerId::Layer2.degree(), + inner_snark, + output_dir, + ) + .map_err(|e| ChunkProverError::Custom(e.to_string()))?; + + // Sanity check on the verifying key used at Layer-2. + self.check_vk()?; + + // We reconstruct some metadata to be attached with the chunk proof. + let chunk_info = chunk.chunk_info.unwrap_or({ + let witness_block = chunk_trace_to_witness_block(chunk.block_traces)?; + ChunkInfo::from_witness_block(&witness_block, false) + }); + + // Construct a chunk proof. + // + // Note that the `row_usage` has been set to an empty vector, because in the sp1-route we + // don't have the notion of rows being allocated to sub-circuits, as in the case of the + // halo2-route. + let chunk_proof_metadata = + ChunkProofV2Metadata::new(&snark, ChunkKind::Sp1, chunk_info, None)?; + let chunk_proof = ChunkProofV2::new( + snark, + self.prover_impl.pk(LayerId::Layer2.id()), + chunk_proof_metadata, + )?; + + // If the output directory was provided, write the proof to disk. + if let Some(output_dir) = output_dir { + chunk_proof.dump(output_dir, &chunk_id)?; + } + + // If the verifier was set, i.e. production environments, we also do a sanity verification + // of the proof that was generated above. + if let Some(verifier) = &self.verifier { + verifier.verify_chunk_proof(&chunk_proof)?; + } + + Ok(chunk_proof) + } + + /// Sanity check for the [`VerifyinKey`][halo2_proofs::plonk::VerifyingKey] used to generate + /// Layer-2 SNARK that is wrapped inside the [`ChunkProof`]. The prover generated VK is + /// expected to match the VK used to initialise the prover. + fn check_vk(&self) -> Result<(), ChunkProverError> { + if let Some(expected_vk) = self.raw_vk.as_ref() { + let base64_exp_vk = base64::encode(expected_vk); + if let Some(generated_vk) = self.prover_impl.raw_vk(LayerId::Layer2.id()).as_ref() { + let base64_gen_vk = base64::encode(generated_vk); + if generated_vk.ne(expected_vk) { + log::error!( + "ChunkProver: VK mismatch! found={}, expected={}", + base64_gen_vk, + base64_exp_vk, + ); + return Err(ChunkProverError::VerifyingKeyMismatch( + base64_gen_vk, + base64_exp_vk, + )); + } + } else { + return Err(ChunkProverError::VerifyingKeyNotFound(base64_exp_vk)); } } + + Ok(()) } } diff --git a/prover/src/zkevm/verifier.rs b/prover/src/zkevm/verifier.rs index f382cd1199..c3c766a579 100644 --- a/prover/src/zkevm/verifier.rs +++ b/prover/src/zkevm/verifier.rs @@ -1,21 +1,24 @@ -use crate::{ - common, - config::{LAYER2_CONFIG_PATH, LAYER2_DEGREE}, - consts::chunk_vk_filename, - io::force_to_read, - ChunkProof, -}; +use std::env; + use aggregator::CompressionCircuit; use halo2_proofs::{ halo2curves::bn256::{Bn256, G1Affine}, plonk::VerifyingKey, poly::kzg::commitment::ParamsKZG, }; -use std::{collections::BTreeMap, env}; +use crate::{ + common, + config::{LAYER2_CONFIG_PATH, LAYER2_DEGREE}, + consts::chunk_vk_filename, + utils::force_to_read, + ChunkProofV2, ChunkProverError, ParamsMap, ProverError, +}; + +/// Verifier capable of verifying a [`ChunkProof`]. #[derive(Debug)] pub struct Verifier<'params> { - // Make it public for testing with inner functions (unnecessary for FFI). + /// Encapsulates the common verifier. pub inner: common::Verifier<'params, CompressionCircuit>, } @@ -26,22 +29,42 @@ impl<'params> From> for Verifier<' } impl<'params> Verifier<'params> { + /// Construct a new Verifier given the KZG parameters and a Verifying Key. pub fn new(params: &'params ParamsKZG, vk: VerifyingKey) -> Self { common::Verifier::new(params, vk).into() } - pub fn from_params_and_assets( - params_map: &'params BTreeMap>, - assets_dir: &str, - ) -> Self { + /// Construct a new Verifier given the path to an assets directory where the [`VerifyingKey`] + /// is stored on disk. This method accepts a map of degree to the KZG parameters for that + /// degree, and picks the appropriate parameters based on the degree of the + /// [`Layer-2`][crate::config::LayerId::Layer2] [`CompressionCircuit`]. + /// + /// Panics if the verifying key cannot be located in the given assets directory. + pub fn from_params_and_assets(params_map: &'params ParamsMap, assets_dir: &str) -> Self { + // Read the verifying key or panic. let raw_vk = force_to_read(assets_dir, &chunk_vk_filename()); + + // The Layer-2 compression circuit is configured with the shape as per + // [`LAYER2_CONFIG_PATH`]. env::set_var("COMPRESSION_CONFIG", &*LAYER2_CONFIG_PATH); - let params = params_map.get(&*LAYER2_DEGREE).expect("should be loaded"); - let verifier = common::Verifier::from_params(params, &raw_vk); - verifier.into() + + let params = params_map.get(&*LAYER2_DEGREE).expect(&format!( + "KZG params don't contain degree={:?}", + LAYER2_DEGREE + )); + + Self { + inner: common::Verifier::from_params(params, &raw_vk), + } } - pub fn verify_chunk_proof(&self, proof: ChunkProof) -> bool { - self.inner.verify_snark(proof.to_snark()) + /// Verify a chunk proof. Returns true if the verification is successful. + pub fn verify_chunk_proof(&self, proof: &ChunkProofV2) -> Result<(), ProverError> { + let snark = proof.try_into()?; + if self.inner.verify_snark(snark) { + Ok(()) + } else { + Err(ChunkProverError::Verification.into()) + } } } diff --git a/rust-toolchain b/rust-toolchain deleted file mode 100644 index 27c108be5c..0000000000 --- a/rust-toolchain +++ /dev/null @@ -1 +0,0 @@ -nightly-2023-12-03 diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 0000000000..584353ad1c --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,2 @@ +[toolchain] +channel = "nightly-2024-01-25" diff --git a/testool/src/main.rs b/testool/src/main.rs index f91de184f9..eb15e5c0e9 100644 --- a/testool/src/main.rs +++ b/testool/src/main.rs @@ -1,5 +1,4 @@ #![feature(lazy_cell)] - /// Execute the bytecode from an empty state and run the EVM and State circuits mod abi; mod compiler;