Skip to content

Commit

Permalink
Add bellatrix-capella proof verification (#603)
Browse files Browse the repository at this point in the history
* Add fluffy proof verification

* Move proof verification to utility methods

* add pre capella proof verification step

* add verification and tests for post merge header proofs

* Better testing

* lint

* laziness
  • Loading branch information
acolytec3 authored Jul 16, 2024
1 parent 7ba54fc commit 68c82ee
Show file tree
Hide file tree
Showing 9 changed files with 341 additions and 99 deletions.
2 changes: 1 addition & 1 deletion packages/cli/src/rpc/modules/ultralight.ts
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ export class ultralight {

const [blockHash, rlpHex] = params
try {
await addRLPSerializedBlock(rlpHex, blockHash, this._history!)
await addRLPSerializedBlock(rlpHex, blockHash, this._history!, [])
this.logger(`Block ${blockHash} added to content DB`)
return `Block ${blockHash} added to content DB`
} catch (err: any) {
Expand Down
1 change: 1 addition & 0 deletions packages/portalnetwork/src/client/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ export class UltralightProvider extends ethers.JsonRpcProvider {
toHexString(ethJSBlock.serialize()),
block.hash,
this.historyNetwork,
[], // I'm too lazy to fix this right now
)
const ethersBlock = await ethJsBlockToEthersBlockWithTxs(ethJSBlock, this.provider)
return ethersBlock
Expand Down
74 changes: 16 additions & 58 deletions packages/portalnetwork/src/networks/history/history.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { ENR } from '@chainsafe/enr'
import { ProofType, createProof } from '@chainsafe/persistent-merkle-tree'
import { Block, BlockHeader } from '@ethereumjs/block'
import { bytesToInt, hexToBytes } from '@ethereumjs/util'
import debug from 'debug'
Expand Down Expand Up @@ -27,18 +26,12 @@ import {
EpochAccumulator,
HistoryNetworkContentType,
MERGE_BLOCK,
SHANGHAI_BLOCK,
sszReceiptsListType,
} from './types.js'
import {
blockNumberToGindex,
epochIndexByBlocknumber,
epochRootByBlocknumber,
epochRootByIndex,
getContentKey,
} from './util.js'
import { getContentKey, verifyPreCapellaHeaderProof, verifyPreMergeHeaderProof } from './util.js'

import type { BaseNetworkConfig, FindContentMessage, Witnesses } from '../../index.js'
import type { Proof, SingleProof, SingleProofInput } from '@chainsafe/persistent-merkle-tree'
import type { BaseNetworkConfig, FindContentMessage } from '../../index.js'
import type { Debugger } from 'debug'
export class HistoryNetwork extends BaseNetwork {
networkId: NetworkId.HistoryNetwork
Expand Down Expand Up @@ -136,12 +129,22 @@ export class HistoryNetwork extends BaseNetwork {
if (proof.value === null) {
throw new Error('Received block header without proof')
}
// Only check proofs on pre-merge headers
if (Array.isArray(proof.value)) {
try {
this.verifyInclusionProof(proof.value, contentHash, header.number)
verifyPreMergeHeaderProof(proof.value, contentHash, header.number)
} catch {
throw new Error('Received block header with invalid proof')
throw new Error('Received pre-merge block header with invalid proof')
}
}
} else {
if (header.number < SHANGHAI_BLOCK) {
if (proof.value === null) {
this.logger('Received post-merge block without proof')
}
try {
verifyPreCapellaHeaderProof(proof.value as any, header.hash())
} catch {
throw new Error('Received post-merge block header with invalid proof')
}
}
}
Expand Down Expand Up @@ -322,51 +325,6 @@ export class HistoryNetwork extends BaseNetwork {
}
}

public generateInclusionProof = async (blockNumber: bigint): Promise<Witnesses> => {
if (blockNumber < MERGE_BLOCK) {
try {
const epochHash = epochRootByBlocknumber(blockNumber)
const epoch = await this.retrieve(
getContentKey(HistoryNetworkContentType.EpochAccumulator, epochHash!),
)
const accumulator = EpochAccumulator.deserialize(hexToBytes(epoch!))
const tree = EpochAccumulator.value_toTree(accumulator)
const proofInput: SingleProofInput = {
type: ProofType.single,
gindex: blockNumberToGindex(blockNumber),
}
const proof = createProof(tree, proofInput) as SingleProof
return proof.witnesses
} catch (err: any) {
throw new Error('Error generating inclusion proof: ' + (err as any).message)
}
} else {
// TODO: Implement inclusion proof generation for post-merge blocks
return []
}
}

public verifyInclusionProof(
witnesses: Uint8Array[],
blockHash: string,
blockNumber: bigint,
): boolean {
if (blockNumber < MERGE_BLOCK) {
const target = epochRootByIndex(epochIndexByBlocknumber(blockNumber))
const proof: Proof = {
type: ProofType.single,
gindex: blockNumberToGindex(blockNumber),
witnesses,
leaf: hexToBytes(blockHash),
}
EpochAccumulator.createFromProof(proof, target)
return true
} else {
// TODO: Implement verification for post-merge blocks
return true
}
}

public async getStateRoot(blockNumber: bigint) {
const block = await this.portal.ETH.getBlockByNumber(blockNumber, false)
if (block === undefined) {
Expand Down
101 changes: 97 additions & 4 deletions packages/portalnetwork/src/networks/history/util.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
import { digest } from '@chainsafe/as-sha256'
import { ProofType, createProof } from '@chainsafe/persistent-merkle-tree'
import { fromHexString, toHexString } from '@chainsafe/ssz'
import { Block, BlockHeader } from '@ethereumjs/block'
import { RLP as rlp } from '@ethereumjs/rlp'
import { hexToBytes } from '@ethereumjs/util'
import { equalsBytes, hexToBytes } from '@ethereumjs/util'
import { ssz } from '@lodestar/types'

import { historicalEpochs } from './data/epochHashes.js'
import { historicalRoots } from './data/historicalRoots.js'
import {
BlockBodyContentType,
BlockHeaderWithProof,
Expand All @@ -20,6 +23,13 @@ import {

import type { HistoryNetwork } from './history.js'
import type { BlockBodyContent, Witnesses } from './types.js'
import type { Proof, SingleProof, SingleProofInput } from '@chainsafe/persistent-merkle-tree'
import type {
ByteVectorType,
UintBigintType,
ValueOfFields,
VectorCompositeType,
} from '@chainsafe/ssz'
import type {
BlockBytes,
BlockHeaderBytes,
Expand Down Expand Up @@ -159,16 +169,15 @@ export const addRLPSerializedBlock = async (
rlpHex: string,
blockHash: string,
network: HistoryNetwork,
witnesses?: Witnesses,
witnesses: Witnesses,
) => {
const block = Block.fromRLPSerializedBlock(fromHexString(rlpHex), {
setHardfork: true,
})
const header = block.header
const headerKey = getContentKey(HistoryNetworkContentType.BlockHeader, hexToBytes(blockHash))
if (header.number < MERGE_BLOCK) {
// Only generate proofs for pre-merge headers
const proof: Witnesses = witnesses ?? (await network.generateInclusionProof(header.number))
const proof: Witnesses = witnesses
const headerProof = BlockHeaderWithProof.serialize({
header: header.serialize(),
proof: { selector: 1, value: proof },
Expand Down Expand Up @@ -230,3 +239,87 @@ export const slotToHistoricalBatchIndex = (slot: bigint) => {
export const slotToHistoricalBatch = (slot: bigint) => {
return slot / 8192n
}

export const verifyPreMergeHeaderProof = (
witnesses: Uint8Array[],
blockHash: string,
blockNumber: bigint,
): boolean => {
try {
const target = epochRootByIndex(epochIndexByBlocknumber(blockNumber))
const proof: Proof = {
type: ProofType.single,
gindex: blockNumberToGindex(blockNumber),
witnesses,
leaf: hexToBytes(blockHash),
}
EpochAccumulator.createFromProof(proof, target)
return true
} catch (_err) {
return false
}
}

export const verifyPreCapellaHeaderProof = (
proof: ValueOfFields<{
beaconBlockHeaderProof: VectorCompositeType<ByteVectorType>
beaconBlockHeaderRoot: ByteVectorType
historicalRootsProof: VectorCompositeType<ByteVectorType>
slot: UintBigintType
}>,
elBlockHash: Uint8Array,
) => {
const batchIndex = slotToHistoricalBatchIndex(proof.slot)
const historicalRootsPath = ssz.phase0.HistoricalBatch.getPathInfo([
'blockRoots',
Number(batchIndex),
])
const reconstructedBatch = ssz.phase0.HistoricalBatch.createFromProof({
witnesses: proof.historicalRootsProof,
type: ProofType.single,
gindex: historicalRootsPath.gindex,
leaf: proof.beaconBlockHeaderRoot, // This should be the leaf value this proof is verifying
})
if (
!equalsBytes(
reconstructedBatch.hashTreeRoot(),
hexToBytes(historicalRoots[Number(slotToHistoricalBatch(proof.slot))]),
)
)
return false

const elBlockHashPath = ssz.bellatrix.BeaconBlock.getPathInfo([
'body',
'executionPayload',
'blockHash',
])
const reconstructedBlock = ssz.bellatrix.BeaconBlock.createFromProof({
witnesses: proof.beaconBlockHeaderProof,
type: ProofType.single,
gindex: elBlockHashPath.gindex,
leaf: elBlockHash,
})

if (!equalsBytes(reconstructedBlock.hashTreeRoot(), proof.beaconBlockHeaderRoot)) return false
return true
}

export const generatePreMergeHeaderProof = async (
blockNumber: bigint,
epochAccumulator: Uint8Array,
): Promise<Witnesses> => {
if (blockNumber > MERGE_BLOCK)
throw new Error('cannot generate preMerge header for post merge block')
try {
const accumulator = EpochAccumulator.deserialize(epochAccumulator)
const tree = EpochAccumulator.value_toTree(accumulator)
const proofInput: SingleProofInput = {
type: ProofType.single,
gindex: blockNumberToGindex(blockNumber),
}
const proof = createProof(tree, proofInput) as SingleProof
return proof.witnesses
} catch (err: any) {
throw new Error('Error generating inclusion proof: ' + (err as any).message)
}
}
25 changes: 22 additions & 3 deletions packages/portalnetwork/test/integration/integration.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import {
PortalNetwork,
TransportLayer,
addRLPSerializedBlock,
generatePreMergeHeaderProof,
getContentKey,
toHexString,
} from '../../src/index.js'
Expand Down Expand Up @@ -105,7 +106,7 @@ it('gossip test', async () => {
'epoch 25 added',
)
for await (const [_idx, testBlock] of testBlocks.entries()) {
const proof = await network1.generateInclusionProof(testBlock.header.number)
const proof = await generatePreMergeHeaderProof(testBlock.header.number, hexToBytes(epoch25))
assert.equal(proof.length, 15, 'proof generated for ' + toHexString(testBlock.hash()))
const headerWith = BlockHeaderWithProof.serialize({
header: testBlock.header.serialize(),
Expand Down Expand Up @@ -190,7 +191,16 @@ it('FindContent', async () => {
epoch25,
'epoch 25 added',
)
await addRLPSerializedBlock(testBlockData[29].rlp, testBlockData[29].blockHash, network1)
const witnesses = await generatePreMergeHeaderProof(
BigInt(testBlockData[29].number),
hexToBytes(epoch25),
)
await addRLPSerializedBlock(
testBlockData[29].rlp,
testBlockData[29].blockHash,
network1,
witnesses,
)
await network1.sendPing(network2?.enr!.toENR())

const res = await network2.sendFindContent(
Expand Down Expand Up @@ -256,7 +266,16 @@ it('eth_getBlockByHash', async () => {
epoch25,
'epoch 25 added',
)
await addRLPSerializedBlock(testBlockData[29].rlp, testBlockData[29].blockHash, network1)
const witnesses = await generatePreMergeHeaderProof(
BigInt(testBlockData[29].number),
hexToBytes(epoch25),
)
await addRLPSerializedBlock(
testBlockData[29].rlp,
testBlockData[29].blockHash,
network1,
witnesses,
)
await network1.sendPing(network2?.enr!.toENR())

const retrieved = await network2.portal.ETH.getBlockByHash(testBlockData[29].blockHash, false)
Expand Down
27 changes: 26 additions & 1 deletion packages/portalnetwork/test/networks/history/headerProof.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@ import { BlockHeader } from '@ethereumjs/block'
import { hexToBytes } from '@ethereumjs/util'
import { ssz } from '@lodestar/types'
import { readFileSync } from 'fs'
import yaml from 'js-yaml'
import { createRequire } from 'module'
import { resolve } from 'path'
import { assert, describe, it } from 'vitest'

import {
Expand All @@ -16,6 +18,7 @@ import {
blockNumberToLeafIndex,
slotToHistoricalBatch,
slotToHistoricalBatchIndex,
verifyPreCapellaHeaderProof,
} from '../../../src/index.js'
import { historicalRoots } from '../../../src/networks/history/data/historicalRoots.js'

Expand Down Expand Up @@ -166,7 +169,6 @@ describe('Bellatrix - Capella header proof tests', () => {
assert.deepEqual(
reconstructedBatch.hashTreeRoot(),
hexToBytes(historicalRoots[Number(slotToHistoricalBatch(postMergeProof.slot))]),
// this works because the actual historical epoch is 574 but bigInt division always gives you a floor and our historical_roots array is zero indexed
)

const elBlockHashPath = ssz.bellatrix.BeaconBlock.getPathInfo([
Expand All @@ -186,4 +188,27 @@ describe('Bellatrix - Capella header proof tests', () => {

assert.deepEqual(reconstructedBlock.hashTreeRoot(), postMergeProof.beaconBlockHeaderRoot)
})

it('should verify a fluffy proof', () => {
const testString = readFileSync(resolve(__dirname, './testData/fluffyPostMergeProof.yaml'), {
encoding: 'utf-8',
})
const testVector: {
execution_block_header: string
beacon_block_proof: string
beacon_block_root: string
historical_roots_proof: string
slot: string
} = yaml.load(testString) as any
const fluffyProof = HistoricalRootsBlockProof.fromJson({
beaconBlockHeaderProof: testVector.beacon_block_proof,
historicalRootsProof: testVector.historical_roots_proof,
slot: testVector.slot,
beaconBlockHeaderRoot: testVector.beacon_block_root,
executionBlockHeader: testVector.execution_block_header,
})
assert.ok(
verifyPreCapellaHeaderProof(fluffyProof, hexToBytes(testVector.execution_block_header)),
)
})
})
Loading

0 comments on commit 68c82ee

Please sign in to comment.