Skip to content
This repository was archived by the owner on Apr 18, 2025. It is now read-only.

Commit c01fcfe

Browse files
committed
[feat] parameterize hard coded constants
1 parent 3dbdc4c commit c01fcfe

File tree

7 files changed

+70
-51
lines changed

7 files changed

+70
-51
lines changed

aggregator/src/core.rs

+34-33
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,8 @@ use zkevm_circuits::{
2727

2828
use crate::{
2929
util::{assert_equal, capacity, get_indices},
30-
CHAIN_ID_LEN, LOG_DEGREE,
30+
CHAIN_ID_LEN, CHUNK_DATA_HASH_INDEX, LOG_DEGREE, POST_STATE_ROOT_INDEX, PREV_STATE_ROOT_INDEX,
31+
WITHDRAW_ROOT_INDEX,
3132
};
3233

3334
/// Input the hash input bytes,
@@ -50,18 +51,18 @@ pub(crate) fn assign_batch_hashes<F: Field>(
5051
let num_rows = 1 << LOG_DEGREE;
5152

5253
let timer = start_timer!(|| ("multi keccak").to_string());
53-
// wenqing: preimages consists of the following parts
54+
// preimages consists of the following parts
5455
// (1) batchPiHash preimage =
5556
// (chain_id ||
5657
// chunk[0].prev_state_root ||
5758
// chunk[k-1].post_state_root ||
5859
// chunk[k-1].withdraw_root ||
5960
// batch_data_hash)
60-
// (2) batchDataHash preimage =
61+
// (2) batchDataHash preimage =
6162
// (chunk[0].dataHash || ... || chunk[k-1].dataHash)
6263
// (3) chunk[i].piHash preimage =
6364
// (chain id ||
64-
// chunk[i].prevStateRoot || chunk[i].postStateRoot ||
65+
// chunk[i].prevStateRoot || chunk[i].postStateRoot ||
6566
// chunk[i].withdrawRoot || chunk[i].datahash)
6667
// each part of the preimage is mapped to image by Keccak256
6768
let witness = multi_keccak(preimages, challenges, capacity(num_rows))?;
@@ -98,12 +99,12 @@ pub(crate) fn assign_batch_hashes<F: Field>(
9899
let row = config.set_row(&mut region, offset, keccak_row)?;
99100

100101
if cur_preimage_index.is_some() && *cur_preimage_index.unwrap() == offset {
101-
// wenqing: 7-th column is Keccak input in Keccak circuit
102+
// 7-th column is Keccak input in Keccak circuit
102103
current_hash_input_cells.push(row[6].clone());
103104
cur_preimage_index = preimage_indices_iter.next();
104105
}
105106
if cur_digest_index.is_some() && *cur_digest_index.unwrap() == offset {
106-
// wenqing: last column is Keccak output in Keccak circuit
107+
// last column is Keccak output in Keccak circuit
107108
current_hash_output_cells.push(row.last().unwrap().clone());
108109
cur_digest_index = digest_indices_iter.next();
109110
}
@@ -142,14 +143,14 @@ pub(crate) fn assign_batch_hashes<F: Field>(
142143
for i in 0..4 {
143144
for j in 0..8 {
144145
// sanity check
145-
// wenqing: 96 + CHAIN_ID_LEN is the byte position for batch_data_hash
146+
// CHUNK_DATA_HASH_INDEX is the byte position for batch_data_hash
146147
assert_equal(
147-
&hash_input_cells[0][i * 8 + j + 96 + CHAIN_ID_LEN],
148+
&hash_input_cells[0][i * 8 + j + CHUNK_DATA_HASH_INDEX],
148149
&hash_output_cells[1][(3 - i) * 8 + j],
149150
);
150151
region.constrain_equal(
151152
// preimage and digest has different endianness
152-
hash_input_cells[0][i * 8 + j + 96 + CHAIN_ID_LEN].cell(),
153+
hash_input_cells[0][i * 8 + j + CHUNK_DATA_HASH_INDEX].cell(),
153154
hash_output_cells[1][(3 - i) * 8 + j].cell(),
154155
)?;
155156
}
@@ -172,39 +173,39 @@ pub(crate) fn assign_batch_hashes<F: Field>(
172173
// chunk[i].postStateRoot ||
173174
// chunk[i].withdrawRoot ||
174175
// chunk[i].datahash)
175-
// wenqing: CHAIN_ID_LEN,
176-
// CHAIN_ID_LEN+32,
177-
// CHAIN_ID_LEN+64 used below are byte positions for
178-
// prev_state_root, post_state_root, withdraw_root
176+
//
177+
// PREV_STATE_ROOT_INDEX, POST_STATE_ROOT_INDEX, WITHDRAW_ROOT_INDEX
178+
// used below are byte positions for
179+
// prev_state_root, post_state_root, withdraw_root
179180
for i in 0..32 {
180181
// 2.2.1 chunk[0].prev_state_root
181182
// sanity check
182183
assert_equal(
183-
&hash_input_cells[0][i + CHAIN_ID_LEN],
184-
&hash_input_cells[2][i + CHAIN_ID_LEN],
184+
&hash_input_cells[0][i + PREV_STATE_ROOT_INDEX],
185+
&hash_input_cells[2][i + PREV_STATE_ROOT_INDEX],
185186
);
186187
region.constrain_equal(
187-
hash_input_cells[0][i + CHAIN_ID_LEN].cell(),
188-
hash_input_cells[2][i + CHAIN_ID_LEN].cell(),
188+
hash_input_cells[0][i + PREV_STATE_ROOT_INDEX].cell(),
189+
hash_input_cells[2][i + PREV_STATE_ROOT_INDEX].cell(),
189190
)?;
190191
// 2.2.2 chunk[k-1].post_state_root
191192
// sanity check
192193
assert_equal(
193-
&hash_input_cells[0][i + CHAIN_ID_LEN + 32],
194-
&hash_input_cells[hash_num - 1][i + CHAIN_ID_LEN + 32],
194+
&hash_input_cells[0][i + POST_STATE_ROOT_INDEX],
195+
&hash_input_cells[hash_num - 1][i + POST_STATE_ROOT_INDEX],
195196
);
196197
region.constrain_equal(
197-
hash_input_cells[0][i + CHAIN_ID_LEN + 32].cell(),
198-
hash_input_cells[hash_num - 1][i + CHAIN_ID_LEN + 32].cell(),
198+
hash_input_cells[0][i + POST_STATE_ROOT_INDEX].cell(),
199+
hash_input_cells[hash_num - 1][i + POST_STATE_ROOT_INDEX].cell(),
199200
)?;
200201
// 2.2.3 chunk[k-1].withdraw_root
201202
assert_equal(
202-
&hash_input_cells[0][i + CHAIN_ID_LEN + 64],
203-
&hash_input_cells[hash_num - 1][i + CHAIN_ID_LEN + 64],
203+
&hash_input_cells[0][i + WITHDRAW_ROOT_INDEX],
204+
&hash_input_cells[hash_num - 1][i + WITHDRAW_ROOT_INDEX],
204205
);
205206
region.constrain_equal(
206-
hash_input_cells[0][i + CHAIN_ID_LEN + 64].cell(),
207-
hash_input_cells[hash_num - 1][i + CHAIN_ID_LEN + 64].cell(),
207+
hash_input_cells[0][i + WITHDRAW_ROOT_INDEX].cell(),
208+
hash_input_cells[hash_num - 1][i + WITHDRAW_ROOT_INDEX].cell(),
208209
)?;
209210
}
210211

@@ -222,10 +223,10 @@ pub(crate) fn assign_batch_hashes<F: Field>(
222223
for (i, chunk) in hash_input_cells[1].chunks(32).enumerate().take(num_chunks) {
223224
for (j, cell) in chunk.iter().enumerate() {
224225
// sanity check
225-
assert_equal(cell, &hash_input_cells[2 + i][j + CHAIN_ID_LEN + 96]);
226+
assert_equal(cell, &hash_input_cells[2 + i][j + CHUNK_DATA_HASH_INDEX]);
226227
region.constrain_equal(
227228
cell.cell(),
228-
hash_input_cells[2 + i][j + CHAIN_ID_LEN + 96].cell(),
229+
hash_input_cells[2 + i][j + CHUNK_DATA_HASH_INDEX].cell(),
229230
)?;
230231
}
231232
}
@@ -235,14 +236,14 @@ pub(crate) fn assign_batch_hashes<F: Field>(
235236
for j in 0..32 {
236237
// sanity check
237238
assert_equal(
238-
&hash_input_cells[i + 3][CHAIN_ID_LEN + j],
239-
&hash_input_cells[i + 2][CHAIN_ID_LEN + 32 + j],
239+
&hash_input_cells[i + 3][PREV_STATE_ROOT_INDEX + j],
240+
&hash_input_cells[i + 2][POST_STATE_ROOT_INDEX + j],
240241
);
241242
region.constrain_equal(
242243
// chunk[i+1].prevStateRoot
243-
hash_input_cells[i + 3][CHAIN_ID_LEN + j].cell(),
244+
hash_input_cells[i + 3][PREV_STATE_ROOT_INDEX + j].cell(),
244245
// chunk[i].postStateRoot
245-
hash_input_cells[i + 2][CHAIN_ID_LEN + 32 + j].cell(),
246+
hash_input_cells[i + 2][POST_STATE_ROOT_INDEX + j].cell(),
246247
)?;
247248
}
248249
}
@@ -292,7 +293,7 @@ pub(crate) fn extract_accumulators_and_proof(
292293
&snark.instances,
293294
&mut transcript_read,
294295
);
295-
// wenqing: each accumulator has (lhs, rhs) based on Shplonk
296+
// each accumulator has (lhs, rhs) based on Shplonk
296297
// lhs and rhs are EC points
297298
Shplonk::succinct_verify(&svk, &snark.protocol, &snark.instances, &proof)
298299
})
@@ -302,7 +303,7 @@ pub(crate) fn extract_accumulators_and_proof(
302303
PoseidonTranscript::<NativeLoader, Vec<u8>>::from_spec(vec![], POSEIDON_SPEC.clone());
303304
// We always use SHPLONK for accumulation scheme when aggregating proofs
304305
let accumulator =
305-
// wenqing: core step
306+
// core step
306307
// KzgAs does KZG accumulation scheme based on given accumulators and random number (for adding blinding)
307308
// accumulated ec_pt = ec_pt_1 * 1 + ec_pt_2 * r + ... + ec_pt_n * r^{n-1}
308309
// ec_pt can be lhs and rhs

aggregator/src/proof_aggregation/circuit.rs

+9-8
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,8 @@ use crate::{
2727
core::{assign_batch_hashes, extract_accumulators_and_proof},
2828
param::{ConfigParams, BITS, LIMBS},
2929
proof_aggregation::config::AggregationConfig,
30-
BatchHashCircuit, ChunkHash, CHAIN_ID_LEN,
30+
BatchHashCircuit, ChunkHash, CHAIN_ID_LEN, POST_STATE_ROOT_INDEX, PREV_STATE_ROOT_INDEX,
31+
WITHDRAW_ROOT_INDEX,
3132
};
3233

3334
/// Aggregation circuit that does not re-expose any public inputs from aggregated snarks
@@ -68,7 +69,7 @@ impl AggregationCircuit {
6869
let snark_hash_bytes = &snark.instances[0];
6970

7071
for i in 0..32 {
71-
// wenqing: for each snark,
72+
// for each snark,
7273
// first 12 elements are accumulator
7374
// next 32 elements are data hash (44=12+32)
7475
// next 32 elements are public_input_hash
@@ -88,7 +89,7 @@ impl AggregationCircuit {
8889
// extract the accumulators and proofs
8990
let svk = params.get_g()[0].into();
9091

91-
// wenqing: this aggregates MULTIPLE snarks
92+
// this aggregates MULTIPLE snarks
9293
// (instead of ONE as in proof compression)
9394
let (accumulator, as_proof) = extract_accumulators_and_proof(params, snarks, rng);
9495
let KzgAccumulator::<G1Affine, NativeLoader> { lhs, rhs } = accumulator;
@@ -336,19 +337,19 @@ impl Circuit<Fr> for AggregationCircuit {
336337
for i in 0..32 {
337338
// first_chunk_prev_state_root
338339
layouter.constrain_instance(
339-
hash_input_cells[2][CHAIN_ID_LEN + i].cell(),
340+
hash_input_cells[2][PREV_STATE_ROOT_INDEX + i].cell(),
340341
config.instance,
341342
i + acc_len,
342343
)?;
343344
// last_chunk_post_state_root
344345
layouter.constrain_instance(
345-
hash_input_cells.last().unwrap()[CHAIN_ID_LEN + 32 + i].cell(),
346+
hash_input_cells.last().unwrap()[POST_STATE_ROOT_INDEX + i].cell(),
346347
config.instance,
347348
i + 32 + acc_len,
348349
)?;
349350
// last_chunk_withdraw_root
350351
layouter.constrain_instance(
351-
hash_input_cells.last().unwrap()[CHAIN_ID_LEN + 64 + i].cell(),
352+
hash_input_cells.last().unwrap()[WITHDRAW_ROOT_INDEX + i].cell(),
352353
config.instance,
353354
i + 64 + acc_len,
354355
)?;
@@ -357,7 +358,7 @@ impl Circuit<Fr> for AggregationCircuit {
357358
for i in 0..4 {
358359
for j in 0..8 {
359360
// digest in circuit has a different endianness
360-
// wenqing: 96 is the byte position for batch data hash
361+
// 96 is the byte position for batch data hash
361362
layouter.constrain_instance(
362363
hash_output_cells[0][(3 - i) * 8 + j].cell(),
363364
config.instance,
@@ -366,7 +367,7 @@ impl Circuit<Fr> for AggregationCircuit {
366367
}
367368
}
368369
// last 8 inputs are the chain id
369-
// wenqing: chain_id is put at last here
370+
// chain_id is put at last here
370371
for i in 0..CHAIN_ID_LEN {
371372
layouter.constrain_instance(
372373
hash_input_cells[0][i].cell(),

aggregator/src/proof_aggregation/config.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ pub struct AggregationConfig {
3535
/// chunk\[k-1\].withdraw_root ||
3636
/// batch_data_hash ||
3737
/// chain_id
38-
/// wenqing: chain_id is put at last here for instance
38+
/// chain_id is put at last here for instance
3939
pub instance: Column<Instance>,
4040
}
4141

aggregator/src/proof_aggregation/public_input_aggregation.rs

+14
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,22 @@ pub use config::{BatchCircuitConfig, BatchCircuitConfigArgs};
6666
// TODO(ZZ): update to the right degree
6767
pub(crate) const LOG_DEGREE: u32 = 19;
6868

69+
// ================================
70+
// indices for hash bytes
71+
//
72+
// the preimages are arranged as
73+
// - chain_id: 8 bytes
74+
// - prev_state_root 32 bytes
75+
// - post_state_root 32 bytes
76+
// - withdraw_root 32 bytes
77+
// - chunk_data_hash 32 bytes
78+
//
6979
// A chain_id is u64 and uses 8 bytes
7080
pub(crate) const CHAIN_ID_LEN: usize = 8;
81+
pub(crate) const PREV_STATE_ROOT_INDEX: usize = 8;
82+
pub(crate) const POST_STATE_ROOT_INDEX: usize = 40;
83+
pub(crate) const WITHDRAW_ROOT_INDEX: usize = 72;
84+
pub(crate) const CHUNK_DATA_HASH_INDEX: usize = 104;
7185

7286
// Each round requires (NUM_ROUNDS+1) * DEFAULT_KECCAK_ROWS = 300 rows.
7387
// This library is hard coded for this parameter.

aggregator/src/proof_aggregation/public_input_aggregation/circuit.rs

+7-4
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,10 @@ use halo2_proofs::{
99

1010
use zkevm_circuits::util::{Challenges, SubCircuitConfig};
1111

12-
use crate::{core::assign_batch_hashes, BatchHash, ChunkHash, CHAIN_ID_LEN};
12+
use crate::{
13+
core::assign_batch_hashes, BatchHash, ChunkHash, CHAIN_ID_LEN, POST_STATE_ROOT_INDEX,
14+
PREV_STATE_ROOT_INDEX, WITHDRAW_ROOT_INDEX,
15+
};
1316

1417
use super::config::{BatchCircuitConfig, BatchCircuitConfigArgs};
1518

@@ -184,19 +187,19 @@ impl<F: Field> Circuit<F> for BatchHashCircuit<F> {
184187
for i in 0..32 {
185188
// first_chunk_prev_state_root
186189
layouter.constrain_instance(
187-
hash_input_cells[2][CHAIN_ID_LEN + i].cell(),
190+
hash_input_cells[2][PREV_STATE_ROOT_INDEX + i].cell(),
188191
config.hash_digest_column,
189192
i,
190193
)?;
191194
// last_chunk_post_state_root
192195
layouter.constrain_instance(
193-
hash_input_cells.last().unwrap()[CHAIN_ID_LEN + 32 + i].cell(),
196+
hash_input_cells.last().unwrap()[POST_STATE_ROOT_INDEX + i].cell(),
194197
config.hash_digest_column,
195198
i + 32,
196199
)?;
197200
// last_chunk_withdraw_root
198201
layouter.constrain_instance(
199-
hash_input_cells.last().unwrap()[CHAIN_ID_LEN + 64 + i].cell(),
202+
hash_input_cells.last().unwrap()[WITHDRAW_ROOT_INDEX + i].cell(),
200203
config.hash_digest_column,
201204
i + 64,
202205
)?;

aggregator/src/proof_compression/circuit.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ impl Circuit<Fr> for CompressionCircuit {
8484
)
8585
.unwrap();
8686

87-
// wenqing: circuit configuration is built from config with given num columns etc
87+
// circuit configuration is built from config with given num columns etc
8888
// can be wide or thin circuit
8989
Self::Config::configure(meta, params)
9090
}
@@ -168,9 +168,9 @@ impl CompressionCircuit {
168168
) -> Self {
169169
let svk = params.get_g()[0].into();
170170

171-
// wenqing: for the proof compression, only ONE snark is under accumulation
171+
// for the proof compression, only ONE snark is under accumulation
172172
// it is turned into an accumulator via KzgAs accumulation scheme
173-
// in case not first time:
173+
// in case not first time:
174174
// (old_accumulator, public inputs) -> (new_accumulator, public inputs)
175175
let (accumulator, as_proof) = extract_accumulators_and_proof(params, &[snark.clone()], rng);
176176

aggregator/src/util.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -30,11 +30,11 @@ pub(crate) fn get_indices(preimages: &[Vec<u8>]) -> (Vec<usize>, Vec<usize>) {
3030
let mut round_ctr = 0;
3131

3232
for preimage in preimages.iter() {
33-
// wenqing: 136 = 17 * 8 is the size in bits of each
33+
// 136 = 17 * 8 is the size in bits of each
3434
// input chunk that can be processed by Keccak circuit using absorb
3535
// each chunk of size 136 needs 300 Keccak circuit rows to prove
3636
// which consists of 12 Keccak rows for each of 24 + 1 Keccak cicuit rounds
37-
// digest only happens at the end of the last input chunk with
37+
// digest only happens at the end of the last input chunk with
3838
// 4 Keccak circuit rounds, so 48 Keccak rows, and 300 - 48 = 256
3939
let num_rounds = 1 + preimage.len() / 136;
4040
let mut preimage_padded = preimage.clone();

0 commit comments

Comments
 (0)