Skip to content

Commit

Permalink
Portal Spec Test Runner + SSZ fixes for BlockHeader Proof types (#736)
Browse files Browse the repository at this point in the history
* Remove union type

* fix tests and utils

* fix more tests

* remove unused method

* Add spec test runner

* wip

* Add results tabulation and error logs

* Revert duplicate code

* updates

* Rework ssz types for post merge header proofs

* fix post capella test

* turn off spec test runner for now
  • Loading branch information
acolytec3 authored Feb 24, 2025
1 parent a543709 commit 11d0efe
Show file tree
Hide file tree
Showing 8 changed files with 447 additions and 229 deletions.
444 changes: 244 additions & 200 deletions packages/cli/scripts/postCapellaBlockBridge.ts

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion packages/portal-spec-tests
15 changes: 10 additions & 5 deletions packages/portalnetwork/src/networks/history/history.ts
Original file line number Diff line number Diff line change
Expand Up @@ -176,8 +176,9 @@ export class HistoryNetwork extends BaseNetwork {
try {
deserializedProof = AccumulatorProofType.deserialize(proof)
} catch (err: any) {
this.logger(`invalid proof for block ${bytesToHex(header.hash())}`)
throw new Error(`invalid proof for block ${bytesToHex(header.hash())}`)
const msg = `invalid proof for block ${header.number} - ${bytesToHex(header.hash())}`
this.logger(msg)
throw new Error(msg)
}
let validated = false
if ('blockHash' in validation) {
Expand All @@ -201,14 +202,17 @@ export class HistoryNetwork extends BaseNetwork {
try {
deserializedProof = HistoricalRootsBlockProof.deserialize(proof)
} catch (err: any) {
this.logger(`invalid proof for block ${bytesToHex(header.hash())}`)
throw new Error(`invalid proof for block ${bytesToHex(header.hash())}`)
const msg = `invalid proof for block ${header.number} - ${bytesToHex(header.hash())}`
this.logger(msg)
throw new Error(msg)
}
let validated = false
try {
validated = verifyPreCapellaHeaderProof(deserializedProof, header.hash())
} catch (err: any) {
this.logger(`Unable to validate proof for post-merge header: ${err.message}`)
const msg = `Unable to validate proof for post-merge header: ${err.message}`
this.logger(msg)
throw new Error(msg)
}
if (!validated) {
throw new Error('Unable to validate proof for post-merge header')
Expand All @@ -219,6 +223,7 @@ export class HistoryNetwork extends BaseNetwork {
let deserializedProof: ReturnType<typeof HistoricalSummariesBlockProof.deserialize>
try {
deserializedProof = HistoricalSummariesBlockProof.deserialize(proof)
console.log(HistoricalSummariesBlockProof.toJson(deserializedProof))
} catch (err: any) {
this.logger(`invalid proof for block ${bytesToHex(header.hash())}`)
throw new Error(`invalid proof for block ${bytesToHex(header.hash())}`)
Expand Down
16 changes: 8 additions & 8 deletions packages/portalnetwork/src/networks/history/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -185,23 +185,23 @@ export const BlockNumberKey = new ContainerType({

/** Post-merge pre-Capella block header proof types */
export const SlotType = new UintBigintType(8)
export const BeaconBlockProof = new ListCompositeType(Bytes32Type, 12)
export const HistoricalRootsProof = new VectorCompositeType(Bytes32Type, 14)
export const BeaconBlockProofHistoricalRoots = new VectorCompositeType(Bytes32Type, 14)
export const PostMergeExecutionBlockProof = new VectorCompositeType(Bytes32Type, 11)

export const HistoricalRootsBlockProof = new ContainerType({
beaconBlockProof: BeaconBlockProof,
historicalRootsProof: BeaconBlockProofHistoricalRoots,
beaconBlockRoot: Bytes32Type,
historicalRootsProof: HistoricalRootsProof,
beaconBlockProof: PostMergeExecutionBlockProof,
slot: SlotType,
})

/** Post-Capella block header proof types */
export const HistoricalSummariesProof = new VectorCompositeType(Bytes32Type, 13)

export const PostCapellaExecutionBlockProof = new ListCompositeType(Bytes32Type, 12)
export const BeaconBlockProofHistoricalSummaries = new VectorCompositeType(Bytes32Type, 13)
export const HistoricalSummariesBlockProof = new ContainerType({
beaconBlockProof: BeaconBlockProof,
historicalSummariesProof: BeaconBlockProofHistoricalSummaries,
beaconBlockRoot: Bytes32Type,
historicalSummariesProof: HistoricalSummariesProof,
beaconBlockProof: PostCapellaExecutionBlockProof,
slot: SlotType,
})

Expand Down
Original file line number Diff line number Diff line change
@@ -1,26 +1,25 @@
import { readFileSync } from 'fs'
import { SignableENR } from '@chainsafe/enr'
import { concatBytes, hexToBytes } from '@ethereumjs/util'
import { keys } from '@libp2p/crypto'
import { createBeaconConfig } from '@lodestar/config'
import { mainnetChainConfig } from '@lodestar/config/configs'
import { genesisData } from '@lodestar/config/networks'
import { computeEpochAtSlot, getChainForkConfigFromNetwork } from '@lodestar/light-client/utils'
import { ssz } from '@lodestar/types'
import { bytesToHex, concatBytes, hexToBytes } from '@ethereumjs/util'
import { multiaddr } from '@multiformats/multiaddr'
import { readFileSync } from 'fs'
import { assert, describe, it, vi } from 'vitest'
import {
BeaconLightClientNetworkContentType,
BlockHeaderWithProof,
HistoricalSummariesKey,
HistoricalSummariesWithProof,
HistoryNetworkContentType,
LightClientBootstrapKey,
NetworkId,
PortalNetwork,
getBeaconContentKey,
getContentKey,
getContentKey
} from '../../src/index.js'
import { createBeaconConfig } from '@lodestar/config'
import { mainnetChainConfig } from '@lodestar/config/configs'
import { genesisData } from '@lodestar/config/networks'
import { computeEpochAtSlot, getChainForkConfigFromNetwork } from '@lodestar/light-client/utils'
import { assert, describe, it, vi } from 'vitest'
import { multiaddr } from '@multiformats/multiaddr'
import { SignableENR } from '@chainsafe/enr'
import { keys } from '@libp2p/crypto'

describe('Block Bridge Data Test', () => {
it('should store and retrieve block header data', async () => {
Expand Down Expand Up @@ -109,7 +108,6 @@ describe('Block Bridge Data Test', () => {
const blockHash = fullBlock.data.message.body.execution_payload.block_hash

const headerKey = getContentKey(HistoryNetworkContentType.BlockHeader, hexToBytes(blockHash))
console.log(bytesToHex(BlockHeaderWithProof.deserialize(hexToBytes(headerWithProof)).header))
await history?.store(headerKey, hexToBytes(headerWithProof))

// Verify block header can be retrieved
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
"0x080000006f020000f90264a01624e3d62872568e41233178997c38dd75fa3baccc89351026beff7026179bfba01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347944838b106fce9647bdf1e7877bf73ce8b0bad5f97a02e065520386645261b7a6153924904a31c07bf6d37bca90fe35b21d864499c05a0eaa4d1ba5182915e8732ddd43e557a6ec713732e72964593275124fddf28a2aba0a9f528fe24151b34969ceb581a59f107d7bf38d8f364072dd50908f326226e71b90100f5bbd9423d137076afdb9b31d2f1db9239cff06e951d0dee6e2d8a302cfb14632d9c2725ed7a08043a5f6e97df7dc311867ba9749f08ed735fbb6a03802ce196f5eed77c97d3bd3dc81ce76fdd61b3ec55cc71d511567e685d0c2cec8c24ffee35856338dae6da75f20f9c8efe338fcffb7d515fde9d7fe7f5305bb4da1a3585a3be7e0e378581692dd9bbfa95b1c7565419eed5eff471ff29296fdd7d733efddfec7f62e4e3fa9eb3f159c609c7dd5f23bdd1b4baff8e283f65ea3d24680bd4cb36f61f00dd5b56ff49bdb77a3d98ee56a7b6438c48dfba4aa5034e6ad3e2e63ff7fccb69b5b492c5ed93cffd71a92e5dddf6ff5161ace6da24c0bb9d00ec5f8084014977bd8401c9c38083f94439846780b05798546974616e2028746974616e6275696c6465722e78797a29a07737f9743d49ab14e32b78a4989729b13bfd28f09c7729669d14c35a3e265d5288000000000000000084bc232c85a01fd1ee2a1b4008ac9f8749a3a5980645510788ccbf0922b77efd67224f8a2234830200008403cc0000a0e1c45ed325063d2380fb27927e7dd5024dada52070c9af2889ee51a105040bcfcc0100003a317faa00ffbca7c1fc9464025ada6aa0db71fd9c050ed433bab104834f0f26c2c38585972af1a49f5358e96748e08f62d9e06883d6d3bbc285f5bbdc3fdde7b87b8f3009f2311f723310e9600cd62f3a0ba54257645d03dbb94700997c2ff1bea5a1dbb8434febb2338912ebd94cfb183cc76b79a1449374aca82445a60e1641d6c37404381727480db4e3e124c9ce9345eb3761882ba31a2444e19f7f4be6446d9460a465986305984f731af89318ee7a965c33d5f6e293bd7c77050a7275d50d2f96d67ae9ec187ef09d1c19c8688ad0a9c00675a55bf61e5e412d5b0b309b9b4a4f9baaa4e6d6cd518eb62f41a133594cab49cf290ffa3570b65376b9dab2632457193d929218ee57d4d2dc8df537f9e2929746d934595d00ff258485173737414bac51a48ca8e03d442cca16a9d9d485ff0447a52427aada4c790ac75fdb8c75ab9503bd1eb67bfb5e2f8f403654b57a1e206aac23546088f1d7284b27d0fc56523cfafc7fb5d233f5faee241365fa4cb0c1d90792d8b558a3d0d5a63121993447a9a5d26cda2d9ad060c8d13f66d84623cc800e3dbb00097ce81a4debe6e5bf0ec7b02915aefabb6e7b1fb8468f228ab04e8e789a26ad9d727ac8430400e0a40000000000f5a23c1219f35dd7d4dc73e816727b6062b26168d5de32a8c44a5e91eeca9f1e04a8748db8fd9595375edfe16660a66ca721079109bd18775eeb54ce07b1400f37d1c9f9c839443ae1aff0d273f1801ffe724770a23d81a63f22f7babe39715d5b5687af2260ef0ed892a68d169c580b03ccb5bc59b0596a4221568de8bf1c7091e37807ec7706f1534c3c24b6664110fa6cf57c3512bb58d4ae666eb8aaed347bdd0fcc693ae5642a10264eb24e1274823f434d66307d9c1cd3c15be194a462b19dea48b339562bd54eac0517d81d7c1d6897ccbf3df258cb75677a235768dfdb56114e00fdd4c1f85c892bf35ac9a89289aaecb1ebd0a96cde606a748b5d7134fd95b5494fd350c63f8bff9d7be30f0bfb3a78daae9f2a0f1ef38f358fd6010000000000000000000000000000000000000000000000000000000000000000f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b6e1e55e9e4793d44338f52522fd838eb70bd3ac0bb6492923efa3cabf6f5cae3"
"0x080000006f020000f90264a01624e3d62872568e41233178997c38dd75fa3baccc89351026beff7026179bfba01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347944838b106fce9647bdf1e7877bf73ce8b0bad5f97a02e065520386645261b7a6153924904a31c07bf6d37bca90fe35b21d864499c05a0eaa4d1ba5182915e8732ddd43e557a6ec713732e72964593275124fddf28a2aba0a9f528fe24151b34969ceb581a59f107d7bf38d8f364072dd50908f326226e71b90100f5bbd9423d137076afdb9b31d2f1db9239cff06e951d0dee6e2d8a302cfb14632d9c2725ed7a08043a5f6e97df7dc311867ba9749f08ed735fbb6a03802ce196f5eed77c97d3bd3dc81ce76fdd61b3ec55cc71d511567e685d0c2cec8c24ffee35856338dae6da75f20f9c8efe338fcffb7d515fde9d7fe7f5305bb4da1a3585a3be7e0e378581692dd9bbfa95b1c7565419eed5eff471ff29296fdd7d733efddfec7f62e4e3fa9eb3f159c609c7dd5f23bdd1b4baff8e283f65ea3d24680bd4cb36f61f00dd5b56ff49bdb77a3d98ee56a7b6438c48dfba4aa5034e6ad3e2e63ff7fccb69b5b492c5ed93cffd71a92e5dddf6ff5161ace6da24c0bb9d00ec5f8084014977bd8401c9c38083f94439846780b05798546974616e2028746974616e6275696c6465722e78797a29a07737f9743d49ab14e32b78a4989729b13bfd28f09c7729669d14c35a3e265d5288000000000000000084bc232c85a01fd1ee2a1b4008ac9f8749a3a5980645510788ccbf0922b77efd67224f8a2234830200008403cc0000a0e1c45ed325063d2380fb27927e7dd5024dada52070c9af2889ee51a105040bcfc2c38585972af1a49f5358e96748e08f62d9e06883d6d3bbc285f5bbdc3fdde7b87b8f3009f2311f723310e9600cd62f3a0ba54257645d03dbb94700997c2ff1bea5a1dbb8434febb2338912ebd94cfb183cc76b79a1449374aca82445a60e1641d6c37404381727480db4e3e124c9ce9345eb3761882ba31a2444e19f7f4be6446d9460a465986305984f731af89318ee7a965c33d5f6e293bd7c77050a7275d50d2f96d67ae9ec187ef09d1c19c8688ad0a9c00675a55bf61e5e412d5b0b309b9b4a4f9baaa4e6d6cd518eb62f41a133594cab49cf290ffa3570b65376b9dab2632457193d929218ee57d4d2dc8df537f9e2929746d934595d00ff258485173737414bac51a48ca8e03d442cca16a9d9d485ff0447a52427aada4c790ac75fdb8c75ab9503bd1eb67bfb5e2f8f403654b57a1e206aac23546088f1d7284b27d0fc56523cfafc7fb5d233f5faee241365fa4cb0c1d90792d8b558a3d0d5a63121993447a9a5d26cda2d9ad060c8d13f66d84623cc800e3dbb00097ce81a4debe6e5bf0ec7b02915aefabb6e7b1fb8468f228ab04e8e789a26ad9d727ac843043a317faa00ffbca7c1fc9464025ada6aa0db71fd9c050ed433bab104834f0f26cc01000000e0a40000000000f5a23c1219f35dd7d4dc73e816727b6062b26168d5de32a8c44a5e91eeca9f1e04a8748db8fd9595375edfe16660a66ca721079109bd18775eeb54ce07b1400f37d1c9f9c839443ae1aff0d273f1801ffe724770a23d81a63f22f7babe39715d5b5687af2260ef0ed892a68d169c580b03ccb5bc59b0596a4221568de8bf1c7091e37807ec7706f1534c3c24b6664110fa6cf57c3512bb58d4ae666eb8aaed347bdd0fcc693ae5642a10264eb24e1274823f434d66307d9c1cd3c15be194a462b19dea48b339562bd54eac0517d81d7c1d6897ccbf3df258cb75677a235768dfdb56114e00fdd4c1f85c892bf35ac9a89289aaecb1ebd0a96cde606a748b5d7134fd95b5494fd350c63f8bff9d7be30f0bfb3a78daae9f2a0f1ef38f358fd6010000000000000000000000000000000000000000000000000000000000000000f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b6e1e55e9e4793d44338f52522fd838eb70bd3ac0bb6492923efa3cabf6f5cae3"
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ describe('Header Tests', async () => {
await network.store(headerKey, fakeProof)
assert.fail('should have thrown')
} catch (err: any) {
assert.ok(err.message.includes('invalid proof'))
assert.ok(err.message.includes('Unable to validate proof'))
}
})
})
171 changes: 171 additions & 0 deletions packages/portalnetwork/test/networks/history/spec-test-runner.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,171 @@
/* eslint-disable no-console */
import { bytesToHex, hexToBytes } from '@ethereumjs/util'
import { readFileSync, readdirSync, statSync } from 'fs'
import yaml from 'js-yaml'
import { join, resolve } from 'path'
import { afterAll, beforeAll, describe, it } from 'vitest'
import type { HistoryNetwork } from '../../../src/index.js'
import {
HistoryNetworkContentType,
PortalNetwork,
decodeHistoryNetworkContentKey,
getContentKey,
} from '../../../src/index.js'

describe.skip('should run all spec tests', () => {
// This retrieves all the yaml files from the spec tests directory
const getAllYamlFiles = (dir: string): string[] => {
const files: string[] = []
const items = readdirSync(dir)

for (const item of items) {
const fullPath = join(dir, item)
if (statSync(fullPath).isDirectory()) {
files.push(...getAllYamlFiles(fullPath))
} else if (item.endsWith('.yaml') || item.endsWith('.yml')) {
files.push(fullPath)
}
}

return files
}

const runHistoryTest = async (
history: HistoryNetwork,
contentKey: Uint8Array,
contentValue: Uint8Array,
) => {
try {
// Store the content. `store` parses the content key, deserializes per the content type,
// and then validates the content
await history?.store(contentKey, contentValue)
if (contentKey[0] !== HistoryNetworkContentType.BlockHeaderByNumber) {
// BlockHeaderByNumber requires a conversion to blockhash since we store headers by blockhash in the db
const retrieved = await history?.get(contentKey)
if (retrieved === bytesToHex(contentValue)) {
return true
} else {
return false
}
} else {
const blockNumber = decodeHistoryNetworkContentKey(contentKey)
const hash = history?.blockNumberToHash(blockNumber.keyOpt as bigint)
const hashKey = getContentKey(HistoryNetworkContentType.BlockHeader, hash!)
const retrieved = await history?.get(hashKey)
if (retrieved === bytesToHex(contentValue)) {
return true
} else {
return false
}
}
} catch (e) {
if ('message' in e) {
// If we get an error, return it for triage
return e
} else {
return false
}
}
}

const networkFiles = {
history: {},
state: {},
beacon_chain: {},
}

const results = {
history: {
passed: 0,
failed: 0,
errors: [] as string[],
},
state: {
passed: 0,
failed: 0,
errors: [] as string[],
},
beacon_chain: {
passed: 0,
failed: 0,
errors: [] as string[],
},
}

let yamlFiles: string[] = []
beforeAll(() => {
// Parses all yaml files into JSON objects
const testDir = resolve(__dirname, '../../../../portal-spec-tests/tests')
yamlFiles = getAllYamlFiles(testDir)

for (const file of yamlFiles) {
try {
const content = yaml.load(readFileSync(file, 'utf-8'))
// Split test suites up by network
if (file.includes('/history/')) {
networkFiles.history[file] = content
} else if (file.includes('/state/')) {
networkFiles.state[file] = content
} else if (file.includes('/beacon_chain/')) {
networkFiles.beacon_chain[file] = content
}
} catch (error) {
console.error(`Error reading ${file}:`, error)
}
}
})
it('should run all serialized history spec tests', async () => {
// This test inspects all the `history` test inputs and runs all the ones
// with serialized content keys and values
// The basic idea of the test is can we deserialize the content, store it,
// and then retrieve it using the original content key
const client = await PortalNetwork.create({})
const history = client.network()['0x500b']!
for (const testData of Object.entries(networkFiles.history)) {
// Some test vectors are parsed into a tuple of [file name, [test vector]]
if (Array.isArray(testData) && Array.isArray(testData[1])) {
for (const vector of testData[1]) {
if ('content_key' in vector && 'content_value' in vector) {
const key = hexToBytes(vector.content_key)
const value = hexToBytes(vector.content_value)
const result = await runHistoryTest(history, key, value)
if (result === true) {
results.history.passed++
} else {
results.history.failed++
results.history.errors.push(
`Key: ${bytesToHex(key)} in file ${testData[0]} -- Error: ${result ?? 'no error reported'}`,
)
}
}
}
} else if (
Array.isArray(testData) &&
'content_key' in testData[1] &&
'content_value' in testData[1]
) {
// Some tests are stored as a tuple of [file name, test vector]
const key = hexToBytes(testData[1].content_key as string) // Content key is stored as a hex string
const value = hexToBytes(testData[1].content_value as string) // Content value is stored as a hex string
const result = await runHistoryTest(history, key, value)
if (result === true) {
results.history.passed++
} else {
results.history.failed++
if (typeof result !== 'boolean') {
results.history.errors.push(
`Key: ${bytesToHex(key)} in file ${testData[0]} -- ${result}`,
)
}
}
}
}
})
afterAll(() => {
console.log('--------------------------------')
console.log('History Results')
console.log('--------------------------------')
console.log(results.history)
console.log('--------------------------------')
})
})

0 comments on commit 11d0efe

Please sign in to comment.