|
| 1 | +use ark_std::{end_timer, start_timer}; |
| 2 | +use eth_types::Field; |
| 3 | +use halo2_proofs::{ |
| 4 | + circuit::{AssignedCell, Layouter, Value}, |
| 5 | + plonk::Error, |
| 6 | +}; |
| 7 | +use halo2_proofs::{ |
| 8 | + halo2curves::bn256::{Bn256, G1Affine}, |
| 9 | + poly::{commitment::ParamsProver, kzg::commitment::ParamsKZG}, |
| 10 | +}; |
| 11 | +use rand::Rng; |
| 12 | +use snark_verifier::{ |
| 13 | + pcs::{ |
| 14 | + kzg::{Bdfg21, Kzg, KzgAccumulator, KzgAs}, |
| 15 | + AccumulationSchemeProver, |
| 16 | + }, |
| 17 | + verifier::PlonkVerifier, |
| 18 | +}; |
| 19 | +use snark_verifier_sdk::{ |
| 20 | + halo2::{aggregation::Shplonk, PoseidonTranscript, POSEIDON_SPEC}, |
| 21 | + NativeLoader, Snark, |
| 22 | +}; |
| 23 | +use zkevm_circuits::{ |
| 24 | + keccak_circuit::{keccak_packed_multi::multi_keccak, KeccakCircuitConfig}, |
| 25 | + table::LookupTable, |
| 26 | + util::Challenges, |
| 27 | +}; |
| 28 | + |
| 29 | +use crate::{ |
| 30 | + util::{assert_equal, capacity, get_indices}, |
| 31 | + LOG_DEGREE, |
| 32 | +}; |
| 33 | + |
| 34 | +/// Input the hash input bytes, |
| 35 | +/// assign the circuit for the hash function, |
| 36 | +/// return cells of the hash inputs and digests. |
| 37 | +#[allow(clippy::type_complexity)] |
| 38 | +pub(crate) fn assign_batch_hashes<F: Field>( |
| 39 | + config: &KeccakCircuitConfig<F>, |
| 40 | + layouter: &mut impl Layouter<F>, |
| 41 | + challenges: Challenges<Value<F>>, |
| 42 | + preimages: &[Vec<u8>], |
| 43 | +) -> Result< |
| 44 | + ( |
| 45 | + Vec<Vec<AssignedCell<F, F>>>, // input cells |
| 46 | + Vec<Vec<AssignedCell<F, F>>>, // digest cells |
| 47 | + ), |
| 48 | + Error, |
| 49 | +> { |
| 50 | + let mut is_first_time = true; |
| 51 | + let num_rows = 1 << LOG_DEGREE; |
| 52 | + |
| 53 | + let timer = start_timer!(|| ("multi keccak").to_string()); |
| 54 | + let witness = multi_keccak(preimages, challenges, capacity(num_rows))?; |
| 55 | + end_timer!(timer); |
| 56 | + |
| 57 | + // extract the indices of the rows for which the preimage and the digest cells lie in |
| 58 | + let (preimage_indices, digest_indices) = get_indices(preimages); |
| 59 | + let mut preimage_indices_iter = preimage_indices.iter(); |
| 60 | + let mut digest_indices_iter = digest_indices.iter(); |
| 61 | + |
| 62 | + let mut hash_input_cells = vec![]; |
| 63 | + let mut hash_output_cells = vec![]; |
| 64 | + |
| 65 | + let mut cur_preimage_index = preimage_indices_iter.next(); |
| 66 | + let mut cur_digest_index = digest_indices_iter.next(); |
| 67 | + |
| 68 | + layouter.assign_region( |
| 69 | + || "assign keccak rows", |
| 70 | + |mut region| { |
| 71 | + if is_first_time { |
| 72 | + is_first_time = false; |
| 73 | + let offset = witness.len() - 1; |
| 74 | + config.set_row(&mut region, offset, &witness[offset])?; |
| 75 | + return Ok(()); |
| 76 | + } |
| 77 | + // ==================================================== |
| 78 | + // Step 1. Extract the hash cells |
| 79 | + // ==================================================== |
| 80 | + let mut current_hash_input_cells = vec![]; |
| 81 | + let mut current_hash_output_cells = vec![]; |
| 82 | + |
| 83 | + let timer = start_timer!(|| "assign row"); |
| 84 | + for (offset, keccak_row) in witness.iter().enumerate() { |
| 85 | + let row = config.set_row(&mut region, offset, keccak_row)?; |
| 86 | + |
| 87 | + if cur_preimage_index.is_some() && *cur_preimage_index.unwrap() == offset { |
| 88 | + current_hash_input_cells.push(row[6].clone()); |
| 89 | + cur_preimage_index = preimage_indices_iter.next(); |
| 90 | + } |
| 91 | + if cur_digest_index.is_some() && *cur_digest_index.unwrap() == offset { |
| 92 | + current_hash_output_cells.push(row.last().unwrap().clone()); |
| 93 | + cur_digest_index = digest_indices_iter.next(); |
| 94 | + } |
| 95 | + |
| 96 | + // we reset the current hash when it is finalized |
| 97 | + // note that length == 0 indicate that the hash is a padding |
| 98 | + // so we simply skip it |
| 99 | + if keccak_row.is_final && keccak_row.length != 0 { |
| 100 | + hash_input_cells.push(current_hash_input_cells); |
| 101 | + hash_output_cells.push(current_hash_output_cells); |
| 102 | + current_hash_input_cells = vec![]; |
| 103 | + current_hash_output_cells = vec![]; |
| 104 | + } |
| 105 | + } |
| 106 | + end_timer!(timer); |
| 107 | + |
| 108 | + // sanity: we have same number of hash input and output |
| 109 | + let hash_num = hash_input_cells.len(); |
| 110 | + let num_chunks = hash_num - 2; |
| 111 | + assert_eq!(hash_num, preimages.len()); |
| 112 | + assert_eq!(hash_num, hash_output_cells.len()); |
| 113 | + |
| 114 | + // ==================================================== |
| 115 | + // Step 2. Constraint the relations between hash preimages and digests |
| 116 | + // ==================================================== |
| 117 | + // |
| 118 | + // 2.1 batch_data_hash digest is reused for public input hash |
| 119 | + // |
| 120 | + // public input hash is build as |
| 121 | + // keccak( |
| 122 | + // chain_id || |
| 123 | + // chunk[0].prev_state_root || |
| 124 | + // chunk[k-1].post_state_root || |
| 125 | + // chunk[k-1].withdraw_root || |
| 126 | + // batch_data_hash ) |
| 127 | + for i in 0..4 { |
| 128 | + for j in 0..8 { |
| 129 | + // sanity check |
| 130 | + assert_equal( |
| 131 | + &hash_input_cells[0][i * 8 + j + 100], |
| 132 | + &hash_output_cells[1][(3 - i) * 8 + j], |
| 133 | + ); |
| 134 | + region.constrain_equal( |
| 135 | + // preimage and digest has different endianness |
| 136 | + hash_input_cells[0][i * 8 + j + 100].cell(), |
| 137 | + hash_output_cells[1][(3 - i) * 8 + j].cell(), |
| 138 | + )?; |
| 139 | + } |
| 140 | + } |
| 141 | + |
| 142 | + // 2.2 batch_pi_hash used same roots as chunk_pi_hash |
| 143 | + // |
| 144 | + // batch_pi_hash = |
| 145 | + // keccak( |
| 146 | + // chain_id || |
| 147 | + // chunk[0].prev_state_root || |
| 148 | + // chunk[k-1].post_state_root || |
| 149 | + // chunk[k-1].withdraw_root || |
| 150 | + // batchData_hash ) |
| 151 | + // |
| 152 | + // chunk[i].piHash = |
| 153 | + // keccak( |
| 154 | + // chain id || |
| 155 | + // chunk[i].prevStateRoot || |
| 156 | + // chunk[i].postStateRoot || |
| 157 | + // chunk[i].withdrawRoot || |
| 158 | + // chunk[i].datahash) |
| 159 | + for i in 0..32 { |
| 160 | + // 2.2.1 chunk[0].prev_state_root |
| 161 | + // sanity check |
| 162 | + assert_equal(&hash_input_cells[0][i + 4], &hash_input_cells[2][i + 4]); |
| 163 | + region.constrain_equal( |
| 164 | + hash_input_cells[0][i + 4].cell(), |
| 165 | + hash_input_cells[2][i + 4].cell(), |
| 166 | + )?; |
| 167 | + // 2.2.2 chunk[k-1].post_state_root |
| 168 | + // sanity check |
| 169 | + assert_equal( |
| 170 | + &hash_input_cells[0][i + 36], |
| 171 | + &hash_input_cells[hash_num - 1][i + 36], |
| 172 | + ); |
| 173 | + region.constrain_equal( |
| 174 | + hash_input_cells[0][i + 36].cell(), |
| 175 | + hash_input_cells[hash_num - 1][i + 36].cell(), |
| 176 | + )?; |
| 177 | + // 2.2.3 chunk[k-1].withdraw_root |
| 178 | + assert_equal( |
| 179 | + &hash_input_cells[0][i + 68], |
| 180 | + &hash_input_cells[hash_num - 1][i + 68], |
| 181 | + ); |
| 182 | + region.constrain_equal( |
| 183 | + hash_input_cells[0][i + 68].cell(), |
| 184 | + hash_input_cells[hash_num - 1][i + 68].cell(), |
| 185 | + )?; |
| 186 | + } |
| 187 | + |
| 188 | + // 2.3 same dataHash is used for batchDataHash and chunk[i].piHash |
| 189 | + // |
| 190 | + // batchDataHash = keccak(chunk[0].dataHash || ... || chunk[k-1].dataHash) |
| 191 | + // |
| 192 | + // chunk[i].piHash = |
| 193 | + // keccak( |
| 194 | + // &chain id || |
| 195 | + // chunk[i].prevStateRoot || |
| 196 | + // chunk[i].postStateRoot || |
| 197 | + // chunk[i].withdrawRoot || |
| 198 | + // chunk[i].datahash) |
| 199 | + for (i, chunk) in hash_input_cells[1].chunks(32).enumerate().take(num_chunks) { |
| 200 | + for (j, cell) in chunk.iter().enumerate() { |
| 201 | + // sanity check |
| 202 | + assert_equal(cell, &hash_input_cells[2 + i][j + 100]); |
| 203 | + region.constrain_equal(cell.cell(), hash_input_cells[2 + i][j + 100].cell())?; |
| 204 | + } |
| 205 | + } |
| 206 | + |
| 207 | + // 2.4 chunks are continuous: they are linked via the state roots |
| 208 | + for i in 0..num_chunks - 1 { |
| 209 | + for j in 0..32 { |
| 210 | + // sanity check |
| 211 | + assert_equal( |
| 212 | + &hash_input_cells[i + 3][4 + j], |
| 213 | + &hash_input_cells[i + 2][36 + j], |
| 214 | + ); |
| 215 | + region.constrain_equal( |
| 216 | + // chunk[i+1].prevStateRoot |
| 217 | + hash_input_cells[i + 3][4 + j].cell(), |
| 218 | + // chunk[i].postStateRoot |
| 219 | + hash_input_cells[i + 2][36 + j].cell(), |
| 220 | + )?; |
| 221 | + } |
| 222 | + } |
| 223 | + |
| 224 | + // 2.5 assert hashes use a same chain id |
| 225 | + for i in 0..num_chunks { |
| 226 | + for j in 0..4 { |
| 227 | + // sanity check |
| 228 | + assert_equal(&hash_input_cells[0][j], &hash_input_cells[i + 2][j]); |
| 229 | + region.constrain_equal( |
| 230 | + // chunk[i+1].prevStateRoot |
| 231 | + hash_input_cells[0][j].cell(), |
| 232 | + // chunk[i].postStateRoot |
| 233 | + hash_input_cells[i + 2][j].cell(), |
| 234 | + )?; |
| 235 | + } |
| 236 | + } |
| 237 | + |
| 238 | + config.keccak_table.annotate_columns_in_region(&mut region); |
| 239 | + config.annotate_circuit(&mut region); |
| 240 | + Ok(()) |
| 241 | + }, |
| 242 | + )?; |
| 243 | + |
| 244 | + Ok((hash_input_cells, hash_output_cells)) |
| 245 | +} |
| 246 | + |
| 247 | +/// Subroutine for the witness generations. |
| 248 | +/// Extract the accumulator and proof that from previous snarks. |
| 249 | +/// Uses SHPlonk for accumulation. |
| 250 | +pub(crate) fn extract_accumulators_and_proof( |
| 251 | + params: &ParamsKZG<Bn256>, |
| 252 | + snarks: &[Snark], |
| 253 | + rng: impl Rng + Send, |
| 254 | +) -> (KzgAccumulator<G1Affine, NativeLoader>, Vec<u8>) { |
| 255 | + let svk = params.get_g()[0].into(); |
| 256 | + |
| 257 | + let mut transcript_read = |
| 258 | + PoseidonTranscript::<NativeLoader, &[u8]>::from_spec(&[], POSEIDON_SPEC.clone()); |
| 259 | + let accumulators = snarks |
| 260 | + .iter() |
| 261 | + .flat_map(|snark| { |
| 262 | + transcript_read.new_stream(snark.proof.as_slice()); |
| 263 | + let proof = Shplonk::read_proof( |
| 264 | + &svk, |
| 265 | + &snark.protocol, |
| 266 | + &snark.instances, |
| 267 | + &mut transcript_read, |
| 268 | + ); |
| 269 | + Shplonk::succinct_verify(&svk, &snark.protocol, &snark.instances, &proof) |
| 270 | + }) |
| 271 | + .collect::<Vec<_>>(); |
| 272 | + |
| 273 | + let mut transcript_write = |
| 274 | + PoseidonTranscript::<NativeLoader, Vec<u8>>::from_spec(vec![], POSEIDON_SPEC.clone()); |
| 275 | + // We always use SHPLONK for accumulation scheme when aggregating proofs |
| 276 | + let accumulator = |
| 277 | + KzgAs::<Kzg<Bn256, Bdfg21>>::create_proof::<PoseidonTranscript<NativeLoader, Vec<u8>>, _>( |
| 278 | + &Default::default(), |
| 279 | + &accumulators, |
| 280 | + &mut transcript_write, |
| 281 | + rng, |
| 282 | + ) |
| 283 | + .unwrap(); |
| 284 | + (accumulator, transcript_write.finalize()) |
| 285 | +} |
0 commit comments