Skip to content

Commit 68c82ee

Browse files
authored
Add bellatrix-capella proof verification (#603)
* Add fluffy proof verification * Move proof verification to utility methods * add pre capella proof verification step * add verification and tests for post merge header proofs * Better testing * lint * laziness
1 parent 7ba54fc commit 68c82ee

File tree

9 files changed

+341
-99
lines changed

9 files changed

+341
-99
lines changed

packages/cli/src/rpc/modules/ultralight.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ export class ultralight {
8282

8383
const [blockHash, rlpHex] = params
8484
try {
85-
await addRLPSerializedBlock(rlpHex, blockHash, this._history!)
85+
await addRLPSerializedBlock(rlpHex, blockHash, this._history!, [])
8686
this.logger(`Block ${blockHash} added to content DB`)
8787
return `Block ${blockHash} added to content DB`
8888
} catch (err: any) {

packages/portalnetwork/src/client/provider.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,7 @@ export class UltralightProvider extends ethers.JsonRpcProvider {
9292
toHexString(ethJSBlock.serialize()),
9393
block.hash,
9494
this.historyNetwork,
95+
[], // I'm too lazy to fix this right now
9596
)
9697
const ethersBlock = await ethJsBlockToEthersBlockWithTxs(ethJSBlock, this.provider)
9798
return ethersBlock

packages/portalnetwork/src/networks/history/history.ts

Lines changed: 16 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import { ENR } from '@chainsafe/enr'
2-
import { ProofType, createProof } from '@chainsafe/persistent-merkle-tree'
32
import { Block, BlockHeader } from '@ethereumjs/block'
43
import { bytesToInt, hexToBytes } from '@ethereumjs/util'
54
import debug from 'debug'
@@ -27,18 +26,12 @@ import {
2726
EpochAccumulator,
2827
HistoryNetworkContentType,
2928
MERGE_BLOCK,
29+
SHANGHAI_BLOCK,
3030
sszReceiptsListType,
3131
} from './types.js'
32-
import {
33-
blockNumberToGindex,
34-
epochIndexByBlocknumber,
35-
epochRootByBlocknumber,
36-
epochRootByIndex,
37-
getContentKey,
38-
} from './util.js'
32+
import { getContentKey, verifyPreCapellaHeaderProof, verifyPreMergeHeaderProof } from './util.js'
3933

40-
import type { BaseNetworkConfig, FindContentMessage, Witnesses } from '../../index.js'
41-
import type { Proof, SingleProof, SingleProofInput } from '@chainsafe/persistent-merkle-tree'
34+
import type { BaseNetworkConfig, FindContentMessage } from '../../index.js'
4235
import type { Debugger } from 'debug'
4336
export class HistoryNetwork extends BaseNetwork {
4437
networkId: NetworkId.HistoryNetwork
@@ -136,12 +129,22 @@ export class HistoryNetwork extends BaseNetwork {
136129
if (proof.value === null) {
137130
throw new Error('Received block header without proof')
138131
}
139-
// Only check proofs on pre-merge headers
140132
if (Array.isArray(proof.value)) {
141133
try {
142-
this.verifyInclusionProof(proof.value, contentHash, header.number)
134+
verifyPreMergeHeaderProof(proof.value, contentHash, header.number)
143135
} catch {
144-
throw new Error('Received block header with invalid proof')
136+
throw new Error('Received pre-merge block header with invalid proof')
137+
}
138+
}
139+
} else {
140+
if (header.number < SHANGHAI_BLOCK) {
141+
if (proof.value === null) {
142+
this.logger('Received post-merge block without proof')
143+
}
144+
try {
145+
verifyPreCapellaHeaderProof(proof.value as any, header.hash())
146+
} catch {
147+
throw new Error('Received post-merge block header with invalid proof')
145148
}
146149
}
147150
}
@@ -322,51 +325,6 @@ export class HistoryNetwork extends BaseNetwork {
322325
}
323326
}
324327

325-
public generateInclusionProof = async (blockNumber: bigint): Promise<Witnesses> => {
326-
if (blockNumber < MERGE_BLOCK) {
327-
try {
328-
const epochHash = epochRootByBlocknumber(blockNumber)
329-
const epoch = await this.retrieve(
330-
getContentKey(HistoryNetworkContentType.EpochAccumulator, epochHash!),
331-
)
332-
const accumulator = EpochAccumulator.deserialize(hexToBytes(epoch!))
333-
const tree = EpochAccumulator.value_toTree(accumulator)
334-
const proofInput: SingleProofInput = {
335-
type: ProofType.single,
336-
gindex: blockNumberToGindex(blockNumber),
337-
}
338-
const proof = createProof(tree, proofInput) as SingleProof
339-
return proof.witnesses
340-
} catch (err: any) {
341-
throw new Error('Error generating inclusion proof: ' + (err as any).message)
342-
}
343-
} else {
344-
// TODO: Implement inclusion proof generation for post-merge blocks
345-
return []
346-
}
347-
}
348-
349-
public verifyInclusionProof(
350-
witnesses: Uint8Array[],
351-
blockHash: string,
352-
blockNumber: bigint,
353-
): boolean {
354-
if (blockNumber < MERGE_BLOCK) {
355-
const target = epochRootByIndex(epochIndexByBlocknumber(blockNumber))
356-
const proof: Proof = {
357-
type: ProofType.single,
358-
gindex: blockNumberToGindex(blockNumber),
359-
witnesses,
360-
leaf: hexToBytes(blockHash),
361-
}
362-
EpochAccumulator.createFromProof(proof, target)
363-
return true
364-
} else {
365-
// TODO: Implement verification for post-merge blocks
366-
return true
367-
}
368-
}
369-
370328
public async getStateRoot(blockNumber: bigint) {
371329
const block = await this.portal.ETH.getBlockByNumber(blockNumber, false)
372330
if (block === undefined) {

packages/portalnetwork/src/networks/history/util.ts

Lines changed: 97 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
11
import { digest } from '@chainsafe/as-sha256'
2+
import { ProofType, createProof } from '@chainsafe/persistent-merkle-tree'
23
import { fromHexString, toHexString } from '@chainsafe/ssz'
34
import { Block, BlockHeader } from '@ethereumjs/block'
45
import { RLP as rlp } from '@ethereumjs/rlp'
5-
import { hexToBytes } from '@ethereumjs/util'
6+
import { equalsBytes, hexToBytes } from '@ethereumjs/util'
7+
import { ssz } from '@lodestar/types'
68

79
import { historicalEpochs } from './data/epochHashes.js'
10+
import { historicalRoots } from './data/historicalRoots.js'
811
import {
912
BlockBodyContentType,
1013
BlockHeaderWithProof,
@@ -20,6 +23,13 @@ import {
2023

2124
import type { HistoryNetwork } from './history.js'
2225
import type { BlockBodyContent, Witnesses } from './types.js'
26+
import type { Proof, SingleProof, SingleProofInput } from '@chainsafe/persistent-merkle-tree'
27+
import type {
28+
ByteVectorType,
29+
UintBigintType,
30+
ValueOfFields,
31+
VectorCompositeType,
32+
} from '@chainsafe/ssz'
2333
import type {
2434
BlockBytes,
2535
BlockHeaderBytes,
@@ -159,16 +169,15 @@ export const addRLPSerializedBlock = async (
159169
rlpHex: string,
160170
blockHash: string,
161171
network: HistoryNetwork,
162-
witnesses?: Witnesses,
172+
witnesses: Witnesses,
163173
) => {
164174
const block = Block.fromRLPSerializedBlock(fromHexString(rlpHex), {
165175
setHardfork: true,
166176
})
167177
const header = block.header
168178
const headerKey = getContentKey(HistoryNetworkContentType.BlockHeader, hexToBytes(blockHash))
169179
if (header.number < MERGE_BLOCK) {
170-
// Only generate proofs for pre-merge headers
171-
const proof: Witnesses = witnesses ?? (await network.generateInclusionProof(header.number))
180+
const proof: Witnesses = witnesses
172181
const headerProof = BlockHeaderWithProof.serialize({
173182
header: header.serialize(),
174183
proof: { selector: 1, value: proof },
@@ -230,3 +239,87 @@ export const slotToHistoricalBatchIndex = (slot: bigint) => {
230239
export const slotToHistoricalBatch = (slot: bigint) => {
231240
return slot / 8192n
232241
}
242+
243+
export const verifyPreMergeHeaderProof = (
244+
witnesses: Uint8Array[],
245+
blockHash: string,
246+
blockNumber: bigint,
247+
): boolean => {
248+
try {
249+
const target = epochRootByIndex(epochIndexByBlocknumber(blockNumber))
250+
const proof: Proof = {
251+
type: ProofType.single,
252+
gindex: blockNumberToGindex(blockNumber),
253+
witnesses,
254+
leaf: hexToBytes(blockHash),
255+
}
256+
EpochAccumulator.createFromProof(proof, target)
257+
return true
258+
} catch (_err) {
259+
return false
260+
}
261+
}
262+
263+
export const verifyPreCapellaHeaderProof = (
264+
proof: ValueOfFields<{
265+
beaconBlockHeaderProof: VectorCompositeType<ByteVectorType>
266+
beaconBlockHeaderRoot: ByteVectorType
267+
historicalRootsProof: VectorCompositeType<ByteVectorType>
268+
slot: UintBigintType
269+
}>,
270+
elBlockHash: Uint8Array,
271+
) => {
272+
const batchIndex = slotToHistoricalBatchIndex(proof.slot)
273+
const historicalRootsPath = ssz.phase0.HistoricalBatch.getPathInfo([
274+
'blockRoots',
275+
Number(batchIndex),
276+
])
277+
const reconstructedBatch = ssz.phase0.HistoricalBatch.createFromProof({
278+
witnesses: proof.historicalRootsProof,
279+
type: ProofType.single,
280+
gindex: historicalRootsPath.gindex,
281+
leaf: proof.beaconBlockHeaderRoot, // This should be the leaf value this proof is verifying
282+
})
283+
if (
284+
!equalsBytes(
285+
reconstructedBatch.hashTreeRoot(),
286+
hexToBytes(historicalRoots[Number(slotToHistoricalBatch(proof.slot))]),
287+
)
288+
)
289+
return false
290+
291+
const elBlockHashPath = ssz.bellatrix.BeaconBlock.getPathInfo([
292+
'body',
293+
'executionPayload',
294+
'blockHash',
295+
])
296+
const reconstructedBlock = ssz.bellatrix.BeaconBlock.createFromProof({
297+
witnesses: proof.beaconBlockHeaderProof,
298+
type: ProofType.single,
299+
gindex: elBlockHashPath.gindex,
300+
leaf: elBlockHash,
301+
})
302+
303+
if (!equalsBytes(reconstructedBlock.hashTreeRoot(), proof.beaconBlockHeaderRoot)) return false
304+
return true
305+
}
306+
307+
export const generatePreMergeHeaderProof = async (
308+
blockNumber: bigint,
309+
epochAccumulator: Uint8Array,
310+
): Promise<Witnesses> => {
311+
if (blockNumber > MERGE_BLOCK)
312+
throw new Error('cannot generate preMerge header for post merge block')
313+
try {
314+
const accumulator = EpochAccumulator.deserialize(epochAccumulator)
315+
const tree = EpochAccumulator.value_toTree(accumulator)
316+
const proofInput: SingleProofInput = {
317+
type: ProofType.single,
318+
gindex: blockNumberToGindex(blockNumber),
319+
}
320+
const proof = createProof(tree, proofInput) as SingleProof
321+
return proof.witnesses
322+
} catch (err: any) {
323+
throw new Error('Error generating inclusion proof: ' + (err as any).message)
324+
}
325+
}

packages/portalnetwork/test/integration/integration.spec.ts

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ import {
1515
PortalNetwork,
1616
TransportLayer,
1717
addRLPSerializedBlock,
18+
generatePreMergeHeaderProof,
1819
getContentKey,
1920
toHexString,
2021
} from '../../src/index.js'
@@ -105,7 +106,7 @@ it('gossip test', async () => {
105106
'epoch 25 added',
106107
)
107108
for await (const [_idx, testBlock] of testBlocks.entries()) {
108-
const proof = await network1.generateInclusionProof(testBlock.header.number)
109+
const proof = await generatePreMergeHeaderProof(testBlock.header.number, hexToBytes(epoch25))
109110
assert.equal(proof.length, 15, 'proof generated for ' + toHexString(testBlock.hash()))
110111
const headerWith = BlockHeaderWithProof.serialize({
111112
header: testBlock.header.serialize(),
@@ -190,7 +191,16 @@ it('FindContent', async () => {
190191
epoch25,
191192
'epoch 25 added',
192193
)
193-
await addRLPSerializedBlock(testBlockData[29].rlp, testBlockData[29].blockHash, network1)
194+
const witnesses = await generatePreMergeHeaderProof(
195+
BigInt(testBlockData[29].number),
196+
hexToBytes(epoch25),
197+
)
198+
await addRLPSerializedBlock(
199+
testBlockData[29].rlp,
200+
testBlockData[29].blockHash,
201+
network1,
202+
witnesses,
203+
)
194204
await network1.sendPing(network2?.enr!.toENR())
195205

196206
const res = await network2.sendFindContent(
@@ -256,7 +266,16 @@ it('eth_getBlockByHash', async () => {
256266
epoch25,
257267
'epoch 25 added',
258268
)
259-
await addRLPSerializedBlock(testBlockData[29].rlp, testBlockData[29].blockHash, network1)
269+
const witnesses = await generatePreMergeHeaderProof(
270+
BigInt(testBlockData[29].number),
271+
hexToBytes(epoch25),
272+
)
273+
await addRLPSerializedBlock(
274+
testBlockData[29].rlp,
275+
testBlockData[29].blockHash,
276+
network1,
277+
witnesses,
278+
)
260279
await network1.sendPing(network2?.enr!.toENR())
261280

262281
const retrieved = await network2.portal.ETH.getBlockByHash(testBlockData[29].blockHash, false)

packages/portalnetwork/test/networks/history/headerProof.spec.ts

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@ import { BlockHeader } from '@ethereumjs/block'
44
import { hexToBytes } from '@ethereumjs/util'
55
import { ssz } from '@lodestar/types'
66
import { readFileSync } from 'fs'
7+
import yaml from 'js-yaml'
78
import { createRequire } from 'module'
9+
import { resolve } from 'path'
810
import { assert, describe, it } from 'vitest'
911

1012
import {
@@ -16,6 +18,7 @@ import {
1618
blockNumberToLeafIndex,
1719
slotToHistoricalBatch,
1820
slotToHistoricalBatchIndex,
21+
verifyPreCapellaHeaderProof,
1922
} from '../../../src/index.js'
2023
import { historicalRoots } from '../../../src/networks/history/data/historicalRoots.js'
2124

@@ -166,7 +169,6 @@ describe('Bellatrix - Capella header proof tests', () => {
166169
assert.deepEqual(
167170
reconstructedBatch.hashTreeRoot(),
168171
hexToBytes(historicalRoots[Number(slotToHistoricalBatch(postMergeProof.slot))]),
169-
// this works because the actual historical epoch is 574 but bigInt division always gives you a floor and our historical_roots array is zero indexed
170172
)
171173

172174
const elBlockHashPath = ssz.bellatrix.BeaconBlock.getPathInfo([
@@ -186,4 +188,27 @@ describe('Bellatrix - Capella header proof tests', () => {
186188

187189
assert.deepEqual(reconstructedBlock.hashTreeRoot(), postMergeProof.beaconBlockHeaderRoot)
188190
})
191+
192+
it('should verify a fluffy proof', () => {
193+
const testString = readFileSync(resolve(__dirname, './testData/fluffyPostMergeProof.yaml'), {
194+
encoding: 'utf-8',
195+
})
196+
const testVector: {
197+
execution_block_header: string
198+
beacon_block_proof: string
199+
beacon_block_root: string
200+
historical_roots_proof: string
201+
slot: string
202+
} = yaml.load(testString) as any
203+
const fluffyProof = HistoricalRootsBlockProof.fromJson({
204+
beaconBlockHeaderProof: testVector.beacon_block_proof,
205+
historicalRootsProof: testVector.historical_roots_proof,
206+
slot: testVector.slot,
207+
beaconBlockHeaderRoot: testVector.beacon_block_root,
208+
executionBlockHeader: testVector.execution_block_header,
209+
})
210+
assert.ok(
211+
verifyPreCapellaHeaderProof(fluffyProof, hexToBytes(testVector.execution_block_header)),
212+
)
213+
})
189214
})

0 commit comments

Comments
 (0)