Skip to content
This repository was archived by the owner on Apr 18, 2025. It is now read-only.

Commit 67e2107

Browse files
committed
add conversion functions
1 parent 1581f5a commit 67e2107

File tree

4 files changed

+75
-29
lines changed

4 files changed

+75
-29
lines changed

aggregator/src/aggregation/circuit.rs

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
use ark_std::{end_timer, start_timer};
2+
use eth_types::U256;
23
use halo2_proofs::{
34
circuit::{Layouter, SimpleFloorPlanner, Value},
45
halo2curves::bn256::{Bn256, Fr, G1Affine},
@@ -248,11 +249,13 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> {
248249
let barycentric = config.blob_consistency_config.assign_barycentric(
249250
&mut ctx,
250251
&self.batch_hash.blob_bytes,
251-
self.batch_hash
252-
.blob_consistency_witness
253-
.challenge()
254-
.0
255-
.into(),
252+
U256::from_big_endian(
253+
&self
254+
.batch_hash
255+
.blob_consistency_witness
256+
.challenge()
257+
.to_bytes(),
258+
),
256259
);
257260

258261
ctx.print_stats(&["barycentric"]);

aggregator/src/batch.rs

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -85,10 +85,7 @@ impl<const N_SNARKS: usize> BatchHeader<N_SNARKS> {
8585
last_block_timestamp,
8686
data_hash: batch_data_hash.into(),
8787
blob_versioned_hash: blob_consistency_witness.id(),
88-
blob_data_proof: [
89-
blob_consistency_witness.challenge(),
90-
blob_consistency_witness.evaluation(),
91-
],
88+
blob_data_proof: blob_consistency_witness.blob_data_proof(),
9289
}
9390
}
9491

@@ -282,8 +279,8 @@ impl<const N_SNARKS: usize> BatchHash<N_SNARKS> {
282279
"batch hash {:?}, datahash {}, z {}, y {}, versioned hash {:x}",
283280
current_batch_hash,
284281
hex::encode(batch_data_hash),
285-
hex::encode(blob_consistency_witness.challenge().to_fixed_bytes()),
286-
hex::encode(blob_consistency_witness.evaluation().to_fixed_bytes()),
282+
hex::encode(blob_consistency_witness.challenge().to_bytes()),
283+
hex::encode(blob_consistency_witness.evaluation().to_bytes()),
287284
blob_consistency_witness.id(),
288285
);
289286

aggregator/src/blob_consistency/eip4844.rs

Lines changed: 48 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,11 @@ use crate::{
1616
aggregation::batch_data::N_DATA_BYTES_PER_COEFFICIENT, constants::N_BYTES_U256, BatchData,
1717
RlcConfig,
1818
};
19-
use eth_types::{ToBigEndian, H256, U256};
19+
use eth_types::{ToBigEndian, ToLittleEndian, H256, U256};
2020
use ethers_core::k256::sha2::{Digest, Sha256};
2121
use halo2_base::{gates::range::RangeConfig, Context};
2222
use halo2_ecc::bigint::CRTInteger;
23+
use halo2_proofs::halo2curves::bls12_381::Scalar;
2324
use halo2_proofs::{
2425
circuit::{AssignedCell, Layouter, Value},
2526
halo2curves::bn256::Fr,
@@ -165,37 +166,66 @@ impl<const N_SNARKS: usize> BlobConsistencyConfig<N_SNARKS> {
165166

166167
#[derive(Debug, Clone, Copy, Default)]
167168
pub struct BlobConsistencyWitness {
168-
id: H256,
169-
blob_data_proof: [H256; 2],
169+
blob_versioned_hash: H256,
170+
challenge_digest: H256,
171+
evaluation: Scalar,
170172
}
171173

172174
impl BlobConsistencyWitness {
173175
pub fn new<const N_SNARKS: usize>(bytes: &[u8], batch_data: &BatchData<N_SNARKS>) -> Self {
174176
let coeffs = get_coefficients(bytes);
175-
let versioned_hash = get_versioned_hash(&coeffs);
177+
let blob_versioned_hash = get_versioned_hash(&coeffs);
176178
let point_evaluation_assignments =
177-
PointEvaluationAssignments::new(&batch_data, bytes, versioned_hash);
178-
let blob_data_proof = [
179-
point_evaluation_assignments.challenge,
180-
point_evaluation_assignments.evaluation,
181-
]
182-
.map(|x| H256::from_slice(&x.to_be_bytes()));
183-
179+
PointEvaluationAssignments::new(&batch_data, bytes, blob_versioned_hash);
184180
Self {
185-
id: versioned_hash,
186-
blob_data_proof,
181+
blob_versioned_hash,
182+
challenge_digest: digest_from_word(point_evaluation_assignments.challenge_digest),
183+
evaluation: scalar_from_word(point_evaluation_assignments.evaluation),
187184
}
188185
}
189186

190187
pub fn id(&self) -> H256 {
191-
self.id
188+
self.blob_versioned_hash
189+
}
190+
191+
pub fn challenge_digest(&self) -> H256 {
192+
self.challenge_digest
193+
}
194+
195+
pub fn challenge(&self) -> Scalar {
196+
scalar_from_digest(self.challenge_digest)
192197
}
193198

194-
pub fn challenge(&self) -> H256 {
195-
self.blob_data_proof[0]
199+
pub fn evaluation(&self) -> Scalar {
200+
self.evaluation
196201
}
197202

198-
pub fn evaluation(&self) -> H256 {
199-
self.blob_data_proof[1]
203+
pub fn blob_data_proof(&self) -> [H256; 2] {
204+
[self.challenge(), self.evaluation].map(digest_from_scalar)
200205
}
201206
}
207+
208+
fn digest_from_word(x: U256) -> H256 {
209+
H256::from_slice(&x.to_be_bytes())
210+
}
211+
212+
fn digest_from_scalar(x: Scalar) -> H256 {
213+
let mut bytes = x.to_bytes();
214+
bytes.reverse();
215+
H256::from_slice(&bytes)
216+
}
217+
218+
fn scalar_from_word(x: U256) -> Scalar {
219+
let (_quotient, remainder) = x.div_mod(*BLS_MODULUS);
220+
Scalar::from_bytes(&remainder.to_le_bytes()).expect("non-canonical bytes")
221+
}
222+
223+
fn scalar_from_digest(x: H256) -> Scalar {
224+
scalar_from_word(word_from_digest(x))
225+
}
226+
227+
fn word_from_digest(x: H256) -> U256 {
228+
U256::from_big_endian(&x.to_fixed_bytes())
229+
}
230+
231+
// word_from scalar would not be used.

aggregator/src/blob_consistency/eip4844/tests.rs

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -628,3 +628,19 @@ fn test_decode_blob() {
628628
batch_bytes,
629629
);
630630
}
631+
632+
use super::*;
633+
634+
#[test]
635+
fn test_conversions() {
636+
let scalar = Scalar::one();
637+
let word = U256::one();
638+
let mut digest = H256::zero();
639+
digest.0[31] = 1;
640+
641+
assert_eq!(digest_from_word(word), digest);
642+
assert_eq!(digest_from_scalar(scalar), digest);
643+
assert_eq!(scalar_from_word(word), scalar);
644+
assert_eq!(scalar_from_digest(digest), scalar);
645+
assert_eq!(word_from_digest(digest), word);
646+
}

0 commit comments

Comments
 (0)