Skip to content
This repository was archived by the owner on Apr 18, 2025. It is now read-only.

Commit 19a7110

Browse files
committed
[feat] implement Aggregator
1 parent a595c55 commit 19a7110

33 files changed

+2303
-481
lines changed

aggregator/Cargo.toml

+12-11
Original file line numberDiff line numberDiff line change
@@ -6,22 +6,23 @@ edition = "2021"
66
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
77

88
[dependencies]
9-
eth-types = { path = "../eth-types" }
10-
zkevm-circuits = { path = "../zkevm-circuits" }
11-
ethers-core = "0.17.0"
12-
rand = "0.8"
9+
eth-types = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "develop" }
10+
zkevm-circuits = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "develop" }
11+
12+
1313
ark-std = "0.4.0"
14-
log = "0.4"
1514
env_logger = "0.10.0"
15+
ethers-core = "0.17.0"
16+
log = "0.4"
17+
itertools = "0.10.3"
18+
serde = { version = "1.0", features = ["derive"] }
19+
serde_json = "1.0"
20+
rand = "0.8"
1621

1722
halo2_proofs = { git = "https://github.com/privacy-scaling-explorations/halo2.git", tag = "v2023_02_02" }
18-
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "halo2-ecc-snark-verifier-0323" }
19-
20-
[dev-dependencies]
21-
halo2-base = { git = "https://github.com/scroll-tech/halo2-lib", branch = "halo2-ecc-snark-verifier-0323", default-features=false, features=["halo2-pse","display"] }
2223
snark-verifier = { git = "https://github.com/scroll-tech/snark-verifier", branch = "halo2-ecc-snark-verifier-0323" }
23-
24+
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "halo2-ecc-snark-verifier-0323" }
2425

2526
[features]
2627
default = []
27-
# default = [ "ark-std/print-trace" ]
28+
print-trace = [ "ark-std/print-trace" ]

aggregator/README.md

+26
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
Proof Aggregation
2+
-----
3+
4+
![Architecture](./figures/architecture.png)
5+
6+
This repo does proof aggregations for zkEVM proofs.
7+
8+
## zkEVM circuit
9+
A zkEVM circuits generates a ZK proof for a chunk of blocks. It takes 64 field elements as its public input, consist of
10+
- chunk's data hash digest
11+
- chunk's public input hash digest
12+
Each hash digest is decomposed into 32 bytes, and then casted as 32 field elements.
13+
14+
For the ease of testing, this repo implements a `MockCircuit` which hash same public input APIs as a zkEVM circuit.
15+
16+
## Compression circuit
17+
A compression circuit takes in a snark proof and generates a new (potentially small) snark proof.
18+
It re-expose the same public inputs as the original circuit.
19+
20+
## Aggregation circuit
21+
An aggregation circuit takes in a batch of proofs, each for a chunk of blocks.
22+
It generates a single proof asserting the validity of all the proofs.
23+
It also performs public input aggregation, i.e., reducing the 64 public elements per proof into a fixed number of elements:
24+
- 12 elements from accumulators
25+
- 132 elements from the hashes
26+
See [public input aggregation](./src/proof_aggregation/public_input_aggregation.rs) for the details of public input aggregation.

aggregator/configs/aggregation.config

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
{"strategy":"Simple","degree":23,"num_advice":[30],"num_lookup_advice":[1],"num_fixed":1,"lookup_bits":20,"limb_bits":88,"num_limbs":3}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
{"strategy":"Simple","degree":23,"num_advice":[3],"num_lookup_advice":[1],"num_fixed":1,"lookup_bits":20,"limb_bits":88,"num_limbs":3}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
{"strategy":"Simple","degree":23,"num_advice":[18],"num_lookup_advice":[1],"num_fixed":1,"lookup_bits":20,"limb_bits":88,"num_limbs":3}

aggregator/src/public_input_aggregation/chunk.rs renamed to aggregator/src/chunk.rs

+10-4
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
use eth_types::H256;
44
use ethers_core::utils::keccak256;
55

6-
#[derive(Default, Debug, Clone)]
6+
#[derive(Default, Debug, Clone, Copy)]
77
/// A chunk is a set of continuous blocks.
88
/// A ChunkHash consists of 4 hashes, representing the changes incurred by this chunk of blocks:
99
/// - state root before this chunk
@@ -47,14 +47,20 @@ impl ChunkHash {
4747
/// Public input hash for a given chunk is defined as
4848
/// keccak( chain id || prev state root || post state root || withdraw root || data hash )
4949
pub fn public_input_hash(&self) -> H256 {
50-
let preimage = [
50+
let preimage = self.extract_hash_preimage();
51+
keccak256::<&[u8]>(preimage.as_ref()).into()
52+
}
53+
54+
/// Extract the preimage for the hash
55+
/// chain id || prev state root || post state root || withdraw root || data hash
56+
pub fn extract_hash_preimage(&self) -> Vec<u8> {
57+
[
5158
self.chain_id.to_le_bytes().as_ref(),
5259
self.prev_state_root.as_bytes(),
5360
self.post_state_root.as_bytes(),
5461
self.withdraw_root.as_bytes(),
5562
self.data_hash.as_bytes(),
5663
]
57-
.concat();
58-
keccak256::<&[u8]>(preimage.as_ref()).into()
64+
.concat()
5965
}
6066
}

aggregator/src/core.rs

+285
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,285 @@
1+
use ark_std::{end_timer, start_timer};
2+
use eth_types::Field;
3+
use halo2_proofs::{
4+
circuit::{AssignedCell, Layouter, Value},
5+
plonk::Error,
6+
};
7+
use halo2_proofs::{
8+
halo2curves::bn256::{Bn256, G1Affine},
9+
poly::{commitment::ParamsProver, kzg::commitment::ParamsKZG},
10+
};
11+
use rand::Rng;
12+
use snark_verifier::{
13+
pcs::{
14+
kzg::{Bdfg21, Kzg, KzgAccumulator, KzgAs},
15+
AccumulationSchemeProver,
16+
},
17+
verifier::PlonkVerifier,
18+
};
19+
use snark_verifier_sdk::{
20+
halo2::{aggregation::Shplonk, PoseidonTranscript, POSEIDON_SPEC},
21+
NativeLoader, Snark,
22+
};
23+
use zkevm_circuits::{
24+
keccak_circuit::{keccak_packed_multi::multi_keccak, KeccakCircuitConfig},
25+
table::LookupTable,
26+
util::Challenges,
27+
};
28+
29+
use crate::{
30+
util::{assert_equal, capacity, get_indices},
31+
LOG_DEGREE,
32+
};
33+
34+
/// Input the hash input bytes,
35+
/// assign the circuit for the hash function,
36+
/// return cells of the hash inputs and digests.
37+
#[allow(clippy::type_complexity)]
38+
pub(crate) fn assign_batch_hashes<F: Field>(
39+
config: &KeccakCircuitConfig<F>,
40+
layouter: &mut impl Layouter<F>,
41+
challenges: Challenges<Value<F>>,
42+
preimages: &[Vec<u8>],
43+
) -> Result<
44+
(
45+
Vec<Vec<AssignedCell<F, F>>>, // input cells
46+
Vec<Vec<AssignedCell<F, F>>>, // digest cells
47+
),
48+
Error,
49+
> {
50+
let mut is_first_time = true;
51+
let num_rows = 1 << LOG_DEGREE;
52+
53+
let timer = start_timer!(|| ("multi keccak").to_string());
54+
let witness = multi_keccak(preimages, challenges, capacity(num_rows))?;
55+
end_timer!(timer);
56+
57+
// extract the indices of the rows for which the preimage and the digest cells lie in
58+
let (preimage_indices, digest_indices) = get_indices(preimages);
59+
let mut preimage_indices_iter = preimage_indices.iter();
60+
let mut digest_indices_iter = digest_indices.iter();
61+
62+
let mut hash_input_cells = vec![];
63+
let mut hash_output_cells = vec![];
64+
65+
let mut cur_preimage_index = preimage_indices_iter.next();
66+
let mut cur_digest_index = digest_indices_iter.next();
67+
68+
layouter.assign_region(
69+
|| "assign keccak rows",
70+
|mut region| {
71+
if is_first_time {
72+
is_first_time = false;
73+
let offset = witness.len() - 1;
74+
config.set_row(&mut region, offset, &witness[offset])?;
75+
return Ok(());
76+
}
77+
// ====================================================
78+
// Step 1. Extract the hash cells
79+
// ====================================================
80+
let mut current_hash_input_cells = vec![];
81+
let mut current_hash_output_cells = vec![];
82+
83+
let timer = start_timer!(|| "assign row");
84+
for (offset, keccak_row) in witness.iter().enumerate() {
85+
let row = config.set_row(&mut region, offset, keccak_row)?;
86+
87+
if cur_preimage_index.is_some() && *cur_preimage_index.unwrap() == offset {
88+
current_hash_input_cells.push(row[6].clone());
89+
cur_preimage_index = preimage_indices_iter.next();
90+
}
91+
if cur_digest_index.is_some() && *cur_digest_index.unwrap() == offset {
92+
current_hash_output_cells.push(row.last().unwrap().clone());
93+
cur_digest_index = digest_indices_iter.next();
94+
}
95+
96+
// we reset the current hash when it is finalized
97+
// note that length == 0 indicate that the hash is a padding
98+
// so we simply skip it
99+
if keccak_row.is_final && keccak_row.length != 0 {
100+
hash_input_cells.push(current_hash_input_cells);
101+
hash_output_cells.push(current_hash_output_cells);
102+
current_hash_input_cells = vec![];
103+
current_hash_output_cells = vec![];
104+
}
105+
}
106+
end_timer!(timer);
107+
108+
// sanity: we have same number of hash input and output
109+
let hash_num = hash_input_cells.len();
110+
let num_chunks = hash_num - 2;
111+
assert_eq!(hash_num, preimages.len());
112+
assert_eq!(hash_num, hash_output_cells.len());
113+
114+
// ====================================================
115+
// Step 2. Constraint the relations between hash preimages and digests
116+
// ====================================================
117+
//
118+
// 2.1 batch_data_hash digest is reused for public input hash
119+
//
120+
// public input hash is build as
121+
// keccak(
122+
// chain_id ||
123+
// chunk[0].prev_state_root ||
124+
// chunk[k-1].post_state_root ||
125+
// chunk[k-1].withdraw_root ||
126+
// batch_data_hash )
127+
for i in 0..4 {
128+
for j in 0..8 {
129+
// sanity check
130+
assert_equal(
131+
&hash_input_cells[0][i * 8 + j + 100],
132+
&hash_output_cells[1][(3 - i) * 8 + j],
133+
);
134+
region.constrain_equal(
135+
// preimage and digest has different endianness
136+
hash_input_cells[0][i * 8 + j + 100].cell(),
137+
hash_output_cells[1][(3 - i) * 8 + j].cell(),
138+
)?;
139+
}
140+
}
141+
142+
// 2.2 batch_pi_hash used same roots as chunk_pi_hash
143+
//
144+
// batch_pi_hash =
145+
// keccak(
146+
// chain_id ||
147+
// chunk[0].prev_state_root ||
148+
// chunk[k-1].post_state_root ||
149+
// chunk[k-1].withdraw_root ||
150+
// batchData_hash )
151+
//
152+
// chunk[i].piHash =
153+
// keccak(
154+
// chain id ||
155+
// chunk[i].prevStateRoot ||
156+
// chunk[i].postStateRoot ||
157+
// chunk[i].withdrawRoot ||
158+
// chunk[i].datahash)
159+
for i in 0..32 {
160+
// 2.2.1 chunk[0].prev_state_root
161+
// sanity check
162+
assert_equal(&hash_input_cells[0][i + 4], &hash_input_cells[2][i + 4]);
163+
region.constrain_equal(
164+
hash_input_cells[0][i + 4].cell(),
165+
hash_input_cells[2][i + 4].cell(),
166+
)?;
167+
// 2.2.2 chunk[k-1].post_state_root
168+
// sanity check
169+
assert_equal(
170+
&hash_input_cells[0][i + 36],
171+
&hash_input_cells[hash_num - 1][i + 36],
172+
);
173+
region.constrain_equal(
174+
hash_input_cells[0][i + 36].cell(),
175+
hash_input_cells[hash_num - 1][i + 36].cell(),
176+
)?;
177+
// 2.2.3 chunk[k-1].withdraw_root
178+
assert_equal(
179+
&hash_input_cells[0][i + 68],
180+
&hash_input_cells[hash_num - 1][i + 68],
181+
);
182+
region.constrain_equal(
183+
hash_input_cells[0][i + 68].cell(),
184+
hash_input_cells[hash_num - 1][i + 68].cell(),
185+
)?;
186+
}
187+
188+
// 2.3 same dataHash is used for batchDataHash and chunk[i].piHash
189+
//
190+
// batchDataHash = keccak(chunk[0].dataHash || ... || chunk[k-1].dataHash)
191+
//
192+
// chunk[i].piHash =
193+
// keccak(
194+
// &chain id ||
195+
// chunk[i].prevStateRoot ||
196+
// chunk[i].postStateRoot ||
197+
// chunk[i].withdrawRoot ||
198+
// chunk[i].datahash)
199+
for (i, chunk) in hash_input_cells[1].chunks(32).enumerate().take(num_chunks) {
200+
for (j, cell) in chunk.iter().enumerate() {
201+
// sanity check
202+
assert_equal(cell, &hash_input_cells[2 + i][j + 100]);
203+
region.constrain_equal(cell.cell(), hash_input_cells[2 + i][j + 100].cell())?;
204+
}
205+
}
206+
207+
// 2.4 chunks are continuous: they are linked via the state roots
208+
for i in 0..num_chunks - 1 {
209+
for j in 0..32 {
210+
// sanity check
211+
assert_equal(
212+
&hash_input_cells[i + 3][4 + j],
213+
&hash_input_cells[i + 2][36 + j],
214+
);
215+
region.constrain_equal(
216+
// chunk[i+1].prevStateRoot
217+
hash_input_cells[i + 3][4 + j].cell(),
218+
// chunk[i].postStateRoot
219+
hash_input_cells[i + 2][36 + j].cell(),
220+
)?;
221+
}
222+
}
223+
224+
// 2.5 assert hashes use a same chain id
225+
for i in 0..num_chunks {
226+
for j in 0..4 {
227+
// sanity check
228+
assert_equal(&hash_input_cells[0][j], &hash_input_cells[i + 2][j]);
229+
region.constrain_equal(
230+
// chunk[i+1].prevStateRoot
231+
hash_input_cells[0][j].cell(),
232+
// chunk[i].postStateRoot
233+
hash_input_cells[i + 2][j].cell(),
234+
)?;
235+
}
236+
}
237+
238+
config.keccak_table.annotate_columns_in_region(&mut region);
239+
config.annotate_circuit(&mut region);
240+
Ok(())
241+
},
242+
)?;
243+
244+
Ok((hash_input_cells, hash_output_cells))
245+
}
246+
247+
/// Subroutine for the witness generations.
248+
/// Extract the accumulator and proof that from previous snarks.
249+
/// Uses SHPlonk for accumulation.
250+
pub(crate) fn extract_accumulators_and_proof(
251+
params: &ParamsKZG<Bn256>,
252+
snarks: &[Snark],
253+
rng: impl Rng + Send,
254+
) -> (KzgAccumulator<G1Affine, NativeLoader>, Vec<u8>) {
255+
let svk = params.get_g()[0].into();
256+
257+
let mut transcript_read =
258+
PoseidonTranscript::<NativeLoader, &[u8]>::from_spec(&[], POSEIDON_SPEC.clone());
259+
let accumulators = snarks
260+
.iter()
261+
.flat_map(|snark| {
262+
transcript_read.new_stream(snark.proof.as_slice());
263+
let proof = Shplonk::read_proof(
264+
&svk,
265+
&snark.protocol,
266+
&snark.instances,
267+
&mut transcript_read,
268+
);
269+
Shplonk::succinct_verify(&svk, &snark.protocol, &snark.instances, &proof)
270+
})
271+
.collect::<Vec<_>>();
272+
273+
let mut transcript_write =
274+
PoseidonTranscript::<NativeLoader, Vec<u8>>::from_spec(vec![], POSEIDON_SPEC.clone());
275+
// We always use SHPLONK for accumulation scheme when aggregating proofs
276+
let accumulator =
277+
KzgAs::<Kzg<Bn256, Bdfg21>>::create_proof::<PoseidonTranscript<NativeLoader, Vec<u8>>, _>(
278+
&Default::default(),
279+
&accumulators,
280+
&mut transcript_write,
281+
rng,
282+
)
283+
.unwrap();
284+
(accumulator, transcript_write.finalize())
285+
}

0 commit comments

Comments
 (0)