diff --git a/aggregator/src/aggregation/circuit.rs b/aggregator/src/aggregation/circuit.rs index d41936d7d0..c8a4fbbaba 100644 --- a/aggregator/src/aggregation/circuit.rs +++ b/aggregator/src/aggregation/circuit.rs @@ -506,21 +506,20 @@ impl Circuit for BatchCircuit { "original and recovered bytes mismatch" ); - // batch_circuit_debug - // let decoder_exports = config.decoder_config.assign( - // &mut layouter, - // &batch_bytes, - // &encoded_batch_bytes, - // witness_rows, - // decoded_literals, - // fse_aux_tables, - // block_info_arr, - // sequence_info_arr, - // address_table_arr, - // sequence_exec_info_arr, - // &challenges, - // LOG_DEGREE, // TODO: configure k for batch circuit instead of hard-coded here. - // )?; + let decoder_exports = config.decoder_config.assign( + &mut layouter, + &batch_bytes, + &encoded_batch_bytes, + witness_rows, + decoded_literals, + fse_aux_tables, + block_info_arr, + sequence_info_arr, + address_table_arr, + sequence_exec_info_arr, + &challenges, + LOG_DEGREE, // TODO: configure k for batch circuit instead of hard-coded here. + )?; layouter.assign_region( || "consistency checks", @@ -569,27 +568,26 @@ impl Circuit for BatchCircuit { region.constrain_equal(c.cell(), ec.cell())?; } - // batch_circuit_debug - // // equate rlc (from blob data) with decoder's encoded_rlc - // region.constrain_equal( - // blob_data_exports.bytes_rlc.cell(), - // decoder_exports.encoded_rlc.cell(), - // )?; - // // equate len(blob_bytes) with decoder's encoded_len - // region.constrain_equal( - // blob_data_exports.bytes_len.cell(), - // decoder_exports.encoded_len.cell(), - // )?; - // // equate rlc (from batch data) with decoder's decoded_rlc - // region.constrain_equal( - // batch_data_exports.bytes_rlc.cell(), - // decoder_exports.decoded_rlc.cell(), - // )?; - // // equate len(batch_data) with decoder's decoded_len - // region.constrain_equal( - // batch_data_exports.batch_data_len.cell(), - // decoder_exports.decoded_len.cell(), - // )?; + // equate rlc (from blob data) with decoder's encoded_rlc + region.constrain_equal( + blob_data_exports.bytes_rlc.cell(), + decoder_exports.encoded_rlc.cell(), + )?; + // equate len(blob_bytes) with decoder's encoded_len + region.constrain_equal( + blob_data_exports.bytes_len.cell(), + decoder_exports.encoded_len.cell(), + )?; + // equate rlc (from batch data) with decoder's decoded_rlc + region.constrain_equal( + batch_data_exports.bytes_rlc.cell(), + decoder_exports.decoded_rlc.cell(), + )?; + // equate len(batch_data) with decoder's decoded_len + region.constrain_equal( + batch_data_exports.batch_data_len.cell(), + decoder_exports.decoded_len.cell(), + )?; Ok(()) }, diff --git a/aggregator/src/aggregation/config.rs b/aggregator/src/aggregation/config.rs index dc850df1b7..0d744ec060 100644 --- a/aggregator/src/aggregation/config.rs +++ b/aggregator/src/aggregation/config.rs @@ -38,9 +38,8 @@ pub struct BatchCircuitConfig { pub blob_data_config: BlobDataConfig, /// The batch data's config. pub batch_data_config: BatchDataConfig, - // batch_circuit_debug - // /// The zstd decoder's config. - // pub decoder_config: DecoderConfig<1024, 512>, + /// The zstd decoder's config. + pub decoder_config: DecoderConfig<1024, 512>, /// Config to do the barycentric evaluation on blob polynomial. pub barycentric: BarycentricEvaluationConfig, /// Instance for public input; stores @@ -131,30 +130,29 @@ impl BatchCircuitConfig { ); // Zstd decoder. - // batch_circuit_debug - // let pow_rand_table = PowOfRandTable::construct(meta, &challenges_expr); - - // let pow2_table = Pow2Table::construct(meta); - // let range8 = RangeTable::construct(meta); - // let range16 = RangeTable::construct(meta); - // let range512 = RangeTable::construct(meta); - // let range_block_len = RangeTable::construct(meta); - // let bitwise_op_table = BitwiseOpTable::construct(meta); - - // let decoder_config = DecoderConfig::configure( - // meta, - // &challenges_expr, - // DecoderConfigArgs { - // pow_rand_table, - // pow2_table, - // u8_table, - // range8, - // range16, - // range512, - // range_block_len, - // bitwise_op_table, - // }, - // ); + let pow_rand_table = PowOfRandTable::construct(meta, &challenges_expr); + + let pow2_table = Pow2Table::construct(meta); + let range8 = RangeTable::construct(meta); + let range16 = RangeTable::construct(meta); + let range512 = RangeTable::construct(meta); + let range_block_len = RangeTable::construct(meta); + let bitwise_op_table = BitwiseOpTable::construct(meta); + + let decoder_config = DecoderConfig::configure( + meta, + &challenges_expr, + DecoderConfigArgs { + pow_rand_table, + pow2_table, + u8_table, + range8, + range16, + range512, + range_block_len, + bitwise_op_table, + }, + ); // Instance column stores public input column // the public instance for this circuit consists of @@ -179,8 +177,7 @@ impl BatchCircuitConfig { instance, barycentric, batch_data_config, - // batch_circuit_debug - // decoder_config, + decoder_config, } } diff --git a/aggregator/src/batch.rs b/aggregator/src/batch.rs index 1fba182c76..bcfebe39d2 100644 --- a/aggregator/src/batch.rs +++ b/aggregator/src/batch.rs @@ -14,7 +14,7 @@ use crate::{ /// Batch header provides additional fields from the context (within recursion) /// for constructing the preimage of the batch hash. #[derive(Default, Debug, Clone, Copy, Serialize, Deserialize)] -pub struct BatchHeader { +pub struct BatchHeader { /// the batch version pub version: u8, /// the index of the batch @@ -35,7 +35,65 @@ pub struct BatchHeader { pub blob_data_proof: [H256; 2], } -impl BatchHeader { +impl BatchHeader { + /// Constructs the correct batch header from chunks data and context variables + pub fn construct_from_chunks( + version: u8, + batch_index: u64, + l1_message_popped: u64, + total_l1_message_popped: u64, + parent_batch_hash: H256, + last_block_timestamp: u64, + chunks: &[ChunkInfo], + ) -> Self { + assert_ne!(chunks.len(), 0); + assert!(chunks.len() <= N_SNARKS); + + let mut chunks_with_padding = chunks.to_vec(); + if chunks.len() < N_SNARKS { + let last_chunk = chunks.last().unwrap(); + let mut padding_chunk = last_chunk.clone(); + padding_chunk.is_padding = true; + chunks_with_padding + .extend(std::iter::repeat(padding_chunk).take(N_SNARKS - chunks.len())); + } + + let number_of_valid_chunks = match chunks_with_padding + .iter() + .enumerate() + .find(|(_index, chunk)| chunk.is_padding) + { + Some((index, _)) => index, + None => N_SNARKS, + }; + + let batch_data_hash_preimage = chunks_with_padding + .iter() + .take(number_of_valid_chunks) + .flat_map(|chunk_info| chunk_info.data_hash.0.iter()) + .cloned() + .collect::>(); + let batch_data_hash = keccak256(batch_data_hash_preimage); + + let batch_data = BatchData::::new(number_of_valid_chunks, &chunks_with_padding); + let point_evaluation_assignments = PointEvaluationAssignments::from(&batch_data); + + Self { + version, + batch_index, + l1_message_popped, + total_l1_message_popped, + parent_batch_hash, + last_block_timestamp, + data_hash: batch_data_hash.into(), + blob_versioned_hash: batch_data.get_versioned_hash(), + blob_data_proof: [ + H256::from_slice(&point_evaluation_assignments.challenge.to_be_bytes()), + H256::from_slice(&point_evaluation_assignments.evaluation.to_be_bytes()), + ], + } + } + /// Returns the batch hash as per BatchHeaderV3. pub fn batch_hash(&self) -> H256 { // the current batch hash is build as @@ -107,12 +165,15 @@ pub struct BatchHash { /// The 4844 versioned hash for the blob. pub(crate) versioned_hash: H256, /// The context batch header - pub(crate) batch_header: BatchHeader, + pub(crate) batch_header: BatchHeader, } impl BatchHash { /// Build Batch hash from an ordered list of chunks. Will pad if needed - pub fn construct_with_unpadded(chunks: &[ChunkInfo], batch_header: BatchHeader) -> Self { + pub fn construct_with_unpadded( + chunks: &[ChunkInfo], + batch_header: BatchHeader, + ) -> Self { assert_ne!(chunks.len(), 0); assert!(chunks.len() <= N_SNARKS); let mut chunks_with_padding = chunks.to_vec(); @@ -132,15 +193,16 @@ impl BatchHash { } /// Build Batch hash from an ordered list of #N_SNARKS of chunks. - pub fn construct(chunks_with_padding: &[ChunkInfo], batch_header: BatchHeader) -> Self { + pub fn construct( + chunks_with_padding: &[ChunkInfo], + batch_header: BatchHeader, + ) -> Self { assert_eq!( chunks_with_padding.len(), N_SNARKS, "input chunk slice does not match N_SNARKS" ); - let mut export_batch_header = batch_header; - let number_of_valid_chunks = match chunks_with_padding .iter() .enumerate() @@ -209,24 +271,34 @@ impl BatchHash { .collect::>(); let batch_data_hash = keccak256(preimage); - // Update export value - export_batch_header.data_hash = batch_data_hash.into(); + assert_eq!( + batch_header.data_hash, + H256::from_slice(&batch_data_hash), + "Expect provided BatchHeader's data_hash field to be correct" + ); let batch_data = BatchData::::new(number_of_valid_chunks, chunks_with_padding); let point_evaluation_assignments = PointEvaluationAssignments::from(&batch_data); - // Update export value - export_batch_header.blob_data_proof[0] = - H256::from_slice(&point_evaluation_assignments.challenge.to_be_bytes()); - export_batch_header.blob_data_proof[1] = - H256::from_slice(&point_evaluation_assignments.evaluation.to_be_bytes()); + assert_eq!( + batch_header.blob_data_proof[0], + H256::from_slice(&point_evaluation_assignments.challenge.to_be_bytes()), + "Expect provided BatchHeader's blob_data_proof field 0 to be correct" + ); + assert_eq!( + batch_header.blob_data_proof[1], + H256::from_slice(&point_evaluation_assignments.evaluation.to_be_bytes()), + "Expect provided BatchHeader's blob_data_proof field 1 to be correct" + ); let versioned_hash = batch_data.get_versioned_hash(); - // Update export value - export_batch_header.blob_versioned_hash = versioned_hash; + assert_eq!( + batch_header.blob_versioned_hash, versioned_hash, + "Expect provided BatchHeader's blob_versioned_hash field to be correct" + ); - let current_batch_hash = export_batch_header.batch_hash(); + let current_batch_hash = batch_header.batch_hash(); log::info!( "batch hash {:?}, datahash {}, z {}, y {}, versioned hash {:x}", @@ -248,7 +320,7 @@ impl BatchHash { number_of_valid_chunks, point_evaluation_assignments, versioned_hash, - batch_header: export_batch_header, + batch_header, } } @@ -378,7 +450,7 @@ impl BatchHash { } /// ... - pub fn batch_header(&self) -> BatchHeader { + pub fn batch_header(&self) -> BatchHeader { self.batch_header } } diff --git a/prover/src/aggregator/prover.rs b/prover/src/aggregator/prover.rs index ee24c2ce7c..9e27e17e77 100644 --- a/prover/src/aggregator/prover.rs +++ b/prover/src/aggregator/prover.rs @@ -9,6 +9,7 @@ use crate::{ }; use aggregator::{BatchHash, BatchHeader, ChunkInfo, MAX_AGG_SNARKS}; use anyhow::{bail, Result}; +use eth_types::H256; use sha2::{Digest, Sha256}; use snark_verifier_sdk::Snark; use std::{env, iter::repeat}; @@ -83,7 +84,7 @@ impl Prover { ) -> Result { let name = name.map_or_else(|| batch.identifier(), |name| name.to_string()); - let (layer3_snark, batch_header) = + let (layer3_snark, batch_hash) = self.load_or_gen_last_agg_snark::(&name, batch, output_dir)?; // Load or generate final compression thin EVM proof (layer-4). @@ -100,7 +101,7 @@ impl Prover { self.check_batch_vk(); let pk = self.prover_impl.pk(LayerId::Layer4.id()); - let batch_proof = BatchProof::new(layer4_snark, pk, batch_header)?; + let batch_proof = BatchProof::new(layer4_snark, pk, batch_hash)?; if let Some(output_dir) = output_dir { batch_proof.dump(output_dir, "agg")?; } @@ -115,7 +116,7 @@ impl Prover { name: &str, batch: BatchProvingTask, output_dir: Option<&str>, - ) -> Result<(Snark, BatchHeader)> { + ) -> Result<(Snark, H256)> { let real_chunk_count = batch.chunk_proofs.len(); assert!((1..=MAX_AGG_SNARKS).contains(&real_chunk_count)); @@ -144,9 +145,17 @@ impl Prover { } // Load or generate aggregation snark (layer-3). - let batch_info: BatchHash = - BatchHash::construct(&chunk_hashes, batch.batch_header); - let batch_header = batch_info.batch_header(); + let batch_header = BatchHeader::construct_from_chunks( + batch.version, + batch.batch_index, + batch.l1_message_popped, + batch.total_l1_message_popped, + batch.parent_batch_hash, + batch.last_block_timestamp, + &chunk_hashes, + ); + let batch_hash = batch_header.batch_hash(); + let batch_info: BatchHash = BatchHash::construct(&chunk_hashes, batch_header); let layer3_snark = self.prover_impl.load_or_gen_agg_snark( name, LayerId::Layer3.id(), @@ -157,7 +166,7 @@ impl Prover { )?; log::info!("Got aggregation snark (layer-3): {name}"); - Ok((layer3_snark, batch_header)) + Ok((layer3_snark, batch_hash)) } // Given a bundle proving task that consists of a list of batch proofs for all intermediate @@ -175,7 +184,7 @@ impl Prover { .batch_proofs .clone() .into_iter() - .map(|proof|proof.into()) + .map(|proof| proof.into()) .collect::>(); let layer5_snark = self.prover_impl.load_or_gen_recursion_snark( @@ -197,7 +206,7 @@ impl Prover { self.check_bundle_vk(); - let bundle_proof : BundleProof = layer6_evm_proof.proof.into(); + let bundle_proof: BundleProof = layer6_evm_proof.proof.into(); if let Some(output_dir) = output_dir { bundle_proof.dump(output_dir, "recursion")?; } diff --git a/prover/src/aggregator/verifier.rs b/prover/src/aggregator/verifier.rs index a6f4a117bb..b4c55bba48 100644 --- a/prover/src/aggregator/verifier.rs +++ b/prover/src/aggregator/verifier.rs @@ -4,9 +4,7 @@ use crate::{ consts::{batch_vk_filename, DEPLOYMENT_CODE_FILENAME}, io::{force_to_read, try_to_read}, proof::BundleProof, - BatchProof, }; -use snark_verifier_sdk::Snark; use aggregator::CompressionCircuit; use halo2_proofs::{ halo2curves::bn256::{Bn256, G1Affine}, @@ -14,6 +12,7 @@ use halo2_proofs::{ poly::kzg::commitment::ParamsKZG, }; use snark_verifier_sdk::verify_evm_calldata; +use snark_verifier_sdk::Snark; use std::env; #[derive(Debug)] diff --git a/prover/src/proof/batch.rs b/prover/src/proof/batch.rs index 4215987217..ffb963bfa2 100644 --- a/prover/src/proof/batch.rs +++ b/prover/src/proof/batch.rs @@ -1,7 +1,7 @@ use super::{dump_as_json, dump_vk, from_json_file, Proof}; use crate::types::base64; -use aggregator::BatchHeader; use anyhow::Result; +use eth_types::H256; use halo2_proofs::{halo2curves::bn256::G1Affine, plonk::ProvingKey}; use serde_derive::{Deserialize, Serialize}; use snark_verifier::Protocol; @@ -13,7 +13,7 @@ pub struct BatchProof { pub protocol: Vec, #[serde(flatten)] proof: Proof, - pub batch_header: BatchHeader, + pub batch_hash: H256, } impl From for Snark { @@ -25,23 +25,19 @@ impl From for Snark { protocol, proof: value.proof.proof, instances, - } + } } } impl BatchProof { - pub fn new( - snark: Snark, - pk: Option<&ProvingKey>, - batch_header: BatchHeader, - ) -> Result { + pub fn new(snark: Snark, pk: Option<&ProvingKey>, batch_hash: H256) -> Result { let protocol = serde_json::to_vec(&snark.protocol)?; let proof = Proof::new(snark.proof, &snark.instances, pk); Ok(Self { protocol, proof, - batch_header, + batch_hash, }) } diff --git a/prover/src/proof/bundle.rs b/prover/src/proof/bundle.rs index 4a33da36d0..52fa290db9 100644 --- a/prover/src/proof/bundle.rs +++ b/prover/src/proof/bundle.rs @@ -57,7 +57,6 @@ impl From for BundleProof { } impl BundleProof { - /// Returns the calldata given to YUL verifier. /// Format: Accumulator(12x32bytes) || PI(13x32bytes) || Proof pub fn calldata(self) -> Vec { @@ -73,8 +72,16 @@ impl BundleProof { pub fn dump(&self, dir: &str, name: &str) -> Result<()> { let filename = format!("bundle_{name}"); - dump_data(dir, &format!("pi_{filename}.data"), &self.on_chain_proof.instances); - dump_data(dir, &format!("proof_{filename}.data"), &self.on_chain_proof.proof); + dump_data( + dir, + &format!("pi_{filename}.data"), + &self.on_chain_proof.instances, + ); + dump_data( + dir, + &format!("proof_{filename}.data"), + &self.on_chain_proof.proof, + ); dump_vk(dir, &filename, &self.on_chain_proof.vk); diff --git a/prover/src/types.rs b/prover/src/types.rs index 0c5352c4eb..c049a97fc6 100644 --- a/prover/src/types.rs +++ b/prover/src/types.rs @@ -1,5 +1,5 @@ -use aggregator::{BatchHeader, ChunkInfo}; -use eth_types::l2_types::BlockTrace; +use aggregator::ChunkInfo; +use eth_types::{l2_types::BlockTrace, H256}; use serde::{Deserialize, Serialize}; use zkevm_circuits::evm_circuit::witness::Block; @@ -43,7 +43,12 @@ impl ChunkProvingTask { #[derive(Debug, Clone, Deserialize, Serialize)] pub struct BatchProvingTask { - pub batch_header: BatchHeader, + pub version: u8, + pub batch_index: u64, + pub l1_message_popped: u64, + pub total_l1_message_popped: u64, + pub parent_batch_hash: H256, + pub last_block_timestamp: u64, pub chunk_proofs: Vec, } @@ -67,11 +72,6 @@ pub struct BundleProvingTask { impl BundleProvingTask { pub fn identifier(&self) -> String { - self.batch_proofs - .last() - .unwrap() - .batch_header - .batch_hash() - .to_string() + self.batch_proofs.last().unwrap().batch_hash.to_string() } }