diff --git a/packages/cli/scripts/postCapellaBlockBridge.ts b/packages/cli/scripts/postCapellaBlockBridge.ts index 797c33b3f..1b2da3ae2 100644 --- a/packages/cli/scripts/postCapellaBlockBridge.ts +++ b/packages/cli/scripts/postCapellaBlockBridge.ts @@ -3,7 +3,20 @@ import { bytesToHex, concatBytes, hexToBytes, initKZG } from '@ethereumjs/util' import { Common } from '@ethereumjs/common' import { ssz, sszTypesFor } from '@lodestar/types' import jayson from 'jayson/promise/index.js' -import { BeaconLightClientNetworkContentType, BlockHeaderWithProof, getBeaconContentKey, getContentKey, HistoricalSummariesBlockProof, HistoricalSummariesKey, HistoricalSummariesWithProof, HistoryNetworkContentType, LightClientBootstrapKey, LightClientFinalityUpdateKey, LightClientOptimisticUpdateKey, slotToHistoricalBatchIndex } from 'portalnetwork' +import { + BeaconLightClientNetworkContentType, + BlockHeaderWithProof, + getBeaconContentKey, + getContentKey, + HistoricalSummariesBlockProof, + HistoricalSummariesKey, + HistoricalSummariesWithProof, + HistoryNetworkContentType, + LightClientBootstrapKey, + LightClientFinalityUpdateKey, + LightClientOptimisticUpdateKey, + slotToHistoricalBatchIndex, +} from 'portalnetwork' import type { SingleProof } from '@chainsafe/persistent-merkle-tree' import { computeEpochAtSlot, getChainForkConfigFromNetwork } from '@lodestar/light-client/utils' import { mainnetChainConfig } from '@lodestar/config/configs' @@ -20,215 +33,246 @@ import { loadKZG } from 'kzg-wasm' const { Client } = jayson const main = async () => { - const kzg = await loadKZG() - const forkConfig = getChainForkConfigFromNetwork('mainnet') - const beaconConfig = mainnetChainConfig - - - const beaconNode = 'https://lodestar-mainnet.chainsafe.io' - const ultralight = Client.http({ host: '127.0.0.1', port: 8545 }) - - // In order to be able to verify post-capella blocks, the light client embedded in - // the Beacon network needs to be initialized. We fetch the latest finality update - // from the Beacon node and use it's slot as a reference to the latest bootstrap - //and Historical Summaries - const finalityUpdate = ssz.deneb.LightClientFinalityUpdate.fromJson( - (await (await fetch(beaconNode + '/eth/v1/beacon/light_client/finality_update')).json()).data, - ) - - const optimisticUpdate = ssz.deneb.LightClientOptimisticUpdate.fromJson( - (await (await fetch(beaconNode + '/eth/v1/beacon/light_client/optimistic_update')).json()).data, - ) - console.log( - `Retrieved latest optimistic update for slot ${BigInt(optimisticUpdate.signatureSlot)}`, - ) - const optimisticUpdateKey = getBeaconContentKey( - BeaconLightClientNetworkContentType.LightClientOptimisticUpdate, - LightClientOptimisticUpdateKey.serialize({ - signatureSlot: BigInt(optimisticUpdate.signatureSlot), + const kzg = await loadKZG() + const forkConfig = getChainForkConfigFromNetwork('mainnet') + const beaconConfig = mainnetChainConfig + + const beaconNode = 'https://lodestar-mainnet.chainsafe.io' + const ultralight = Client.http({ host: '127.0.0.1', port: 8545 }) + + // In order to be able to verify post-capella blocks, the light client embedded in + // the Beacon network needs to be initialized. We fetch the latest finality update + // from the Beacon node and use it's slot as a reference to the latest bootstrap + //and Historical Summaries + const finalityUpdate = ssz.deneb.LightClientFinalityUpdate.fromJson( + (await (await fetch(beaconNode + '/eth/v1/beacon/light_client/finality_update')).json()).data, + ) + + const optimisticUpdate = ssz.deneb.LightClientOptimisticUpdate.fromJson( + (await (await fetch(beaconNode + '/eth/v1/beacon/light_client/optimistic_update')).json()).data, + ) + console.log( + `Retrieved latest optimistic update for slot ${BigInt(optimisticUpdate.signatureSlot)}`, + ) + const optimisticUpdateKey = getBeaconContentKey( + BeaconLightClientNetworkContentType.LightClientOptimisticUpdate, + LightClientOptimisticUpdateKey.serialize({ + signatureSlot: BigInt(optimisticUpdate.signatureSlot), + }), + ) + + const bootstrapSlot = finalityUpdate.finalizedHeader.beacon.slot + const bootstrapRes = await ( + await fetch(beaconNode + `/eth/v1/beacon/blocks/${bootstrapSlot}/root`) + ).json() + + const bootstrapRoot = bootstrapRes.data.root + const bootstrap = ssz.deneb.LightClientBootstrap.fromJson( + ( + await ( + await fetch(beaconNode + `/eth/v1/beacon/light_client/bootstrap/${bootstrapRoot}`) + ).json() + ).data, + ) + + const forkName = forkConfig.getForkName(bootstrapSlot) + const forkDigest = createBeaconConfig( + beaconConfig, + hexToBytes(genesisData.mainnet.genesisValidatorsRoot), + ).forkName2ForkDigest(forkName) + console.log(`Retrieved bootstrap for finalized checkpoint ${bootstrapRoot}`) + + // Push the bootstrap into the Portal Network + let res = await ultralight.request('portal_beaconStore', [ + bytesToHex( + getBeaconContentKey( + BeaconLightClientNetworkContentType.LightClientBootstrap, + LightClientBootstrapKey.serialize({ blockHash: hexToBytes(bootstrapRoot) }), + ), + ), + bytesToHex(concatBytes(forkDigest, ssz[forkName].LightClientBootstrap.serialize(bootstrap))), + ]) + console.log('Pushed bootstrap into Portal Network', res) + + // Star the light client using the bootstrap slot's block root + res = await ultralight.request('portal_beaconStartLightClient', [bootstrapRoot]) + console.log('Started Beacon Light Client Sync', res) + + // Push the latest optimistic update so the light client is synced (maybe not necessary) + res = await ultralight.request('portal_beaconStore', [ + bytesToHex(optimisticUpdateKey), + bytesToHex( + concatBytes(forkDigest, ssz.deneb.LightClientOptimisticUpdate.serialize(optimisticUpdate)), + ), + ]) + + // Retrieve the historical summaries at the bootstrap/finality update slot + console.log('Retrieving latest historical summaries...') + const res2 = await fetch( + beaconNode + + `/eth/v1/lodestar/historical_summaries/${finalityUpdate.finalizedHeader.beacon.slot}`, + ) + const res2Json = await res2.json() + + const historicalSummaries = ssz.deneb.BeaconState.fields.historicalSummaries.fromJson( + res2Json.data.historical_summaries, + ) + const finalityEpoch = computeEpochAtSlot(finalityUpdate.finalizedHeader.beacon.slot) + const proof = res2Json.data.proof.map((el) => hexToBytes(el)) + + // Push the historical summaries into the Portal Network + // Note - Ultralight should be able to verify the historical summaries using the proof from the Beacon node + + res = await ultralight.request('portal_beaconStore', [ + bytesToHex( + getBeaconContentKey( + BeaconLightClientNetworkContentType.HistoricalSummaries, + HistoricalSummariesKey.serialize({ epoch: BigInt(finalityEpoch) }), + ), + ), + bytesToHex( + concatBytes( + forkDigest, + HistoricalSummariesWithProof.serialize({ + epoch: BigInt(finalityEpoch), + historicalSummaries, + proof, }), + ), + ), + ]) + + // Now we have a synced light client so should be able to verify post capella blocks (as long as they are not from the current sync period + + // In order to construct post Capella block proofs, we need to get the Historical Summary for the sync period we are serving + // blocks from. We can get these Historical Summaries from an era file for that sync period by reading the beacon state snapshot + // pulling the `BlockRoots` from the `BeaconState` object. The root of this object will match the `block_summary_root` index of + // the Historical Summaries object we retrieved from the Beacon node + + // NOTE: You can any era file from post capella to construct header proofs for (provided the beacon blocks are available from a beacon node) + console.log(`Reading era file for period ${1320}`) + const eraFile = new Uint8Array(readFileSync(`./scripts/eras/mainnet-01320-59f1c8c0.era`)) + const indices = getEraIndexes(eraFile) + const stateEntry = readEntry( + eraFile.slice(indices.stateSlotIndex.recordStart + indices.stateSlotIndex.slotOffsets[0]), + ) + const state = await decompressBeaconState(stateEntry.data, indices.stateSlotIndex.startSlot) + const stateFork = forkConfig.getForkName(indices.stateSlotIndex.startSlot) + + // Now we can construct block proofs for any block in the sync period + const x = 0 + try { + // Read a Beacon Block from the era file + const blockEntry = readEntry( + eraFile.slice(indices.blockSlotIndex!.recordStart + indices.blockSlotIndex!.slotOffsets[x]), ) - - const bootstrapSlot = finalityUpdate.finalizedHeader.beacon.slot - const bootstrapRes = ( - await (await fetch(beaconNode + `/eth/v1/beacon/blocks/${bootstrapSlot}/root`)).json() - ) - - const bootstrapRoot = bootstrapRes.data.root - const bootstrap = ssz.deneb.LightClientBootstrap.fromJson( - ( - await ( - await fetch(beaconNode + `/eth/v1/beacon/light_client/bootstrap/${bootstrapRoot}`) - ).json() - ).data, - ) - - const forkName = forkConfig.getForkName(bootstrapSlot) - const forkDigest = createBeaconConfig(beaconConfig, hexToBytes(genesisData.mainnet.genesisValidatorsRoot)).forkName2ForkDigest(forkName) - console.log( - `Retrieved bootstrap for finalized checkpoint ${bootstrapRoot}`, - ) - - // Push the bootstrap into the Portal Network - let res = await ultralight.request('portal_beaconStore', [ - bytesToHex(getBeaconContentKey( - BeaconLightClientNetworkContentType.LightClientBootstrap, - LightClientBootstrapKey.serialize({ blockHash: hexToBytes(bootstrapRoot) }), - )), - bytesToHex( - concatBytes(forkDigest, ssz[forkName].LightClientBootstrap.serialize(bootstrap)), - ), - ]) - console.log('Pushed bootstrap into Portal Network', res) - - // Star the light client using the bootstrap slot's block root - res = await ultralight.request('portal_beaconStartLightClient', [ - bootstrapRoot + const block = await decompressBeaconBlock(blockEntry.data, indices.blockSlotIndex!.startSlot) + const blockFork = ForkName.deneb + // Retrieve the full Beacon Block object from the Beacon node since the era files don't contain + // the Execution Payload + const fullBlockJson = await ( + await fetch(beaconNode + `/eth/v2/beacon/blocks/${block.message.slot}`) + ).json() + + const fullBlock = sszTypesFor(blockFork).BeaconBlock.fromJson(fullBlockJson.data.message) + + // Build the Beacon Block Proof that anchors the EL block hash in the Beacon Block + const elBlockHashPath = ssz[blockFork].BeaconBlock.getPathInfo([ + 'body', + 'executionPayload', + 'blockHash', ]) - console.log('Started Beacon Light Client Sync', res) - - // Push the latest optimistic update so the light client is synced (maybe not necessary) - res = await ultralight.request('portal_beaconStore', [ - bytesToHex(optimisticUpdateKey), - bytesToHex( - concatBytes( - forkDigest, - ssz.deneb.LightClientOptimisticUpdate.serialize(optimisticUpdate), - ), - ), - ]) - - // Retrieve the historical summaries at the bootstrap/finality update slot - console.log('Retrieving latest historical summaries...') - const res2 = await fetch(beaconNode + `/eth/v1/lodestar/historical_summaries/${finalityUpdate.finalizedHeader.beacon.slot}`) - const res2Json = await res2.json() - - const historicalSummaries = ssz.deneb.BeaconState.fields.historicalSummaries.fromJson(res2Json.data.historical_summaries) - const finalityEpoch = computeEpochAtSlot(finalityUpdate.finalizedHeader.beacon.slot) - const proof = res2Json.data.proof.map((el) => hexToBytes(el)) - // Push the historical summaries into the Portal Network - // Note - Ultralight should be able to verify the historical summaries using the proof from the Beacon node + const beaconBlockProof = createProof(ssz[blockFork].BeaconBlock.toView(fullBlock).node, { + gindex: elBlockHashPath.gindex, + type: ProofType.single, + }) as SingleProof - res = await ultralight.request('portal_beaconStore', - [bytesToHex(getBeaconContentKey(BeaconLightClientNetworkContentType.HistoricalSummaries, HistoricalSummariesKey.serialize({ epoch: BigInt(finalityEpoch) }))), - bytesToHex(concatBytes(forkDigest, HistoricalSummariesWithProof.serialize({ epoch: BigInt(finalityEpoch), historicalSummaries, proof })))]) - - // Now we have a synced light client so should be able to verify post capella blocks (as long as they are not from the current sync period - - // In order to construct post Capella block proofs, we need to get the Historical Summary for the sync period we are serving - // blocks from. We can get these Historical Summaries from an era file for that sync period by reading the beacon state snapshot - // pulling the `BlockRoots` from the `BeaconState` object. The root of this object will match the `block_summary_root` index of - // the Historical Summaries object we retrieved from the Beacon node + // Build a proof that anchors the Beacon Block root in the Historical Summary for the sync period + const batchIndex = Number(slotToHistoricalBatchIndex(BigInt(block.message.slot))) + const historicalSummariesPath = ssz[stateFork].BeaconState.fields.blockRoots.getPathInfo([ + batchIndex, + ]) - // NOTE: You can any era file from post capella to construct header proofs for (provided the beacon blocks are available from a beacon node) - console.log(`Reading era file for period ${1320}`) - const eraFile = new Uint8Array(readFileSync(`./scripts/eras/mainnet-01320-59f1c8c0.era`)) - const indices = getEraIndexes(eraFile) - const stateEntry = readEntry( - eraFile.slice(indices.stateSlotIndex.recordStart + indices.stateSlotIndex.slotOffsets[0]), + const blockRootsProof = createProof( + ssz[stateFork].BeaconState.fields.blockRoots.toView(state.blockRoots).node, + { + gindex: historicalSummariesPath.gindex, + type: ProofType.single, + }, + ) as SingleProof + + // Construct the aggregate proof + const blockProof = HistoricalSummariesBlockProof.fromJson({ + slot: block.message.slot, + historicalSummariesProof: blockRootsProof.witnesses.map((witness) => bytesToHex(witness)), + beaconBlockProof: beaconBlockProof.witnesses.map((witness) => bytesToHex(witness)), + beaconBlockRoot: bytesToHex(ssz[blockFork].BeaconBlock.value_toTree(fullBlock).root), + }) + + // Hackery to allow us to construct an EL block header from the Beacon Block data + // TODO: Get rid of this once we update ethjs to latest releases + const common = new Common({ + chain: 'mainnet', + hardfork: 'cancun', + customCrypto: { + kzg: { + loadTrustedSetup: async () => { + return await loadKZG() + }, + blobToKzgCommitment: (blob) => { + return hexToBytes(kzg.blobToKZGCommitment(bytesToHex(blob))) + }, + computeBlobKzgProof: (blob, commitment) => { + return hexToBytes(kzg.computeBlobKZGProof(bytesToHex(blob), bytesToHex(commitment))) + }, + verifyBlobKzgProofBatch: (blobs, commitments, proof) => { + return kzg.verifyBlobKZGProofBatch( + blobs.map((blob) => bytesToHex(blob)), + commitments.map((commitment) => bytesToHex(commitment)), + proof.map((proof) => bytesToHex(proof)), + ) + }, + verifyKzgProof: (blob, z, y, proof) => { + return kzg.verifyKZGProof( + bytesToHex(blob), + bytesToHex(z), + bytesToHex(y), + bytesToHex(proof), + ) + }, + }, + }, + }) + const execPayload = executionPayloadFromBeaconPayload( + fullBlockJson.data.message.body.execution_payload, ) - const state = await decompressBeaconState(stateEntry.data, indices.stateSlotIndex.startSlot) - const stateFork = forkConfig.getForkName(indices.stateSlotIndex.startSlot) - - // Now we can construct block proofs for any block in the sync period - const x = 0 - try { - - // Read a Beacon Block from the era file - const blockEntry = readEntry(eraFile.slice(indices.blockSlotIndex!.recordStart + indices.blockSlotIndex!.slotOffsets[x])) - const block = await decompressBeaconBlock(blockEntry.data, indices.blockSlotIndex!.startSlot) - const blockFork = ForkName.deneb - // Retrieve the full Beacon Block object from the Beacon node since the era files don't contain - // the Execution Payload - const fullBlockJson = await (await fetch(beaconNode + `/eth/v2/beacon/blocks/${block.message.slot}`)).json() - - const fullBlock = sszTypesFor(blockFork).BeaconBlock.fromJson(fullBlockJson.data.message) - - // Build the Beacon Block Proof that anchors the EL block hash in the Beacon Block - const elBlockHashPath = ssz[blockFork].BeaconBlock.getPathInfo([ - 'body', - 'executionPayload', - 'blockHash', - ]) - - const beaconBlockProof = createProof(ssz[blockFork].BeaconBlock.toView(fullBlock).node, { - gindex: elBlockHashPath.gindex, - type: ProofType.single, - }) as SingleProof - - // Build a proof that anchors the Beacon Block root in the Historical Summary for the sync period - const batchIndex = Number(slotToHistoricalBatchIndex(BigInt(block.message.slot))) - const historicalSummariesPath = ssz[stateFork].BeaconState.fields.blockRoots.getPathInfo([batchIndex]) - - const blockRootsProof = createProof(ssz[stateFork].BeaconState.fields.blockRoots.toView(state.blockRoots).node, { - gindex: historicalSummariesPath.gindex, - type: ProofType.single, - }) as SingleProof - - - // Construct the aggregate proof - const blockProof = HistoricalSummariesBlockProof.fromJson({ - slot: block.message.slot, - historicalSummariesProof: blockRootsProof.witnesses.map((witness) => bytesToHex(witness)), - beaconBlockProof: beaconBlockProof.witnesses.map((witness) => bytesToHex(witness)), - beaconBlockRoot: bytesToHex(ssz[blockFork].BeaconBlock.value_toTree(fullBlock).root), - }) - - // Hackery to allow us to construct an EL block header from the Beacon Block data - // TODO: Get rid of this once we update ethjs to latest releases - const common = new Common({ - chain: 'mainnet', hardfork: 'cancun', customCrypto: { - kzg: { - loadTrustedSetup: async () => { - return await loadKZG() - }, - blobToKzgCommitment: (blob) => { - return hexToBytes(kzg.blobToKZGCommitment(bytesToHex(blob))) - }, - computeBlobKzgProof: (blob, commitment) => { - return hexToBytes(kzg.computeBlobKZGProof(bytesToHex(blob), bytesToHex(commitment))) - }, - verifyBlobKzgProofBatch: (blobs, commitments, proof) => { - return kzg.verifyBlobKZGProofBatch(blobs.map((blob) => bytesToHex(blob)), commitments.map((commitment) => bytesToHex(commitment)), proof.map((proof) => bytesToHex(proof))) - }, - verifyKzgProof: (blob, z, y, proof) => { - return kzg.verifyKZGProof(bytesToHex(blob), bytesToHex(z), bytesToHex(y), bytesToHex(proof)) - } - } - } - }) - const execPayload = executionPayloadFromBeaconPayload(fullBlockJson.data.message.body.execution_payload) - execPayload.parentBeaconBlockRoot = bytesToHex(fullBlock.parentRoot) - const elBlock = await Block.fromExecutionPayload(execPayload, { common, setHardfork: true }) - const header = elBlock.header - const headerWithProof = BlockHeaderWithProof.serialize({ - header: header.serialize(), - proof: { - value: blockProof, - selector: 3 - } - }) - - // Store the EL block header in the Portal Network - res = await ultralight.request('portal_historyStore', [ - bytesToHex(getContentKey(HistoryNetworkContentType.BlockHeader, header.hash())), - bytesToHex(headerWithProof) - ]) - console.log(res) - - res = await ultralight.request('eth_getBlockByHash', [execPayload.blockHash, false]) - console.log('Retrieved block', execPayload.blockHash, res) - - process.exit(0) - } catch (err) { - console.log(err) - } + execPayload.parentBeaconBlockRoot = bytesToHex(fullBlock.parentRoot) + const elBlock = await Block.fromExecutionPayload(execPayload, { common, setHardfork: true }) + const header = elBlock.header + const headerWithProof = BlockHeaderWithProof.serialize({ + header: header.serialize(), + proof: HistoricalSummariesBlockProof.serialize(blockProof), + }) + + console.log(bytesToHex(headerWithProof)) + // Store the EL block header in the Portal Network + res = await ultralight.request('portal_historyStore', [ + bytesToHex(getContentKey(HistoryNetworkContentType.BlockHeader, header.hash())), + bytesToHex(headerWithProof), + ]) + console.log(res) + res = await ultralight.request('eth_getBlockByHash', [execPayload.blockHash, false]) + console.log('Retrieved block', execPayload.blockHash, res) + process.exit(0) + } catch (err) { + console.log(err) + } } main().catch((err) => { - console.log('caught error', err) - process.exit(0) + console.log('caught error', err) + process.exit(0) }) diff --git a/packages/portal-spec-tests b/packages/portal-spec-tests index eb0882356..628b5deec 160000 --- a/packages/portal-spec-tests +++ b/packages/portal-spec-tests @@ -1 +1 @@ -Subproject commit eb08823561e46dcb26d025bbc7b5112668655e0b +Subproject commit 628b5deec6db682ec3220f5d7057710a316dc03a diff --git a/packages/portalnetwork/src/networks/history/history.ts b/packages/portalnetwork/src/networks/history/history.ts index 02f66bd04..343c641ab 100644 --- a/packages/portalnetwork/src/networks/history/history.ts +++ b/packages/portalnetwork/src/networks/history/history.ts @@ -176,8 +176,9 @@ export class HistoryNetwork extends BaseNetwork { try { deserializedProof = AccumulatorProofType.deserialize(proof) } catch (err: any) { - this.logger(`invalid proof for block ${bytesToHex(header.hash())}`) - throw new Error(`invalid proof for block ${bytesToHex(header.hash())}`) + const msg = `invalid proof for block ${header.number} - ${bytesToHex(header.hash())}` + this.logger(msg) + throw new Error(msg) } let validated = false if ('blockHash' in validation) { @@ -201,14 +202,17 @@ export class HistoryNetwork extends BaseNetwork { try { deserializedProof = HistoricalRootsBlockProof.deserialize(proof) } catch (err: any) { - this.logger(`invalid proof for block ${bytesToHex(header.hash())}`) - throw new Error(`invalid proof for block ${bytesToHex(header.hash())}`) + const msg = `invalid proof for block ${header.number} - ${bytesToHex(header.hash())}` + this.logger(msg) + throw new Error(msg) } let validated = false try { validated = verifyPreCapellaHeaderProof(deserializedProof, header.hash()) } catch (err: any) { - this.logger(`Unable to validate proof for post-merge header: ${err.message}`) + const msg = `Unable to validate proof for post-merge header: ${err.message}` + this.logger(msg) + throw new Error(msg) } if (!validated) { throw new Error('Unable to validate proof for post-merge header') @@ -219,6 +223,7 @@ export class HistoryNetwork extends BaseNetwork { let deserializedProof: ReturnType try { deserializedProof = HistoricalSummariesBlockProof.deserialize(proof) + console.log(HistoricalSummariesBlockProof.toJson(deserializedProof)) } catch (err: any) { this.logger(`invalid proof for block ${bytesToHex(header.hash())}`) throw new Error(`invalid proof for block ${bytesToHex(header.hash())}`) diff --git a/packages/portalnetwork/src/networks/history/types.ts b/packages/portalnetwork/src/networks/history/types.ts index 60ed8f32e..77b99144e 100644 --- a/packages/portalnetwork/src/networks/history/types.ts +++ b/packages/portalnetwork/src/networks/history/types.ts @@ -185,23 +185,23 @@ export const BlockNumberKey = new ContainerType({ /** Post-merge pre-Capella block header proof types */ export const SlotType = new UintBigintType(8) -export const BeaconBlockProof = new ListCompositeType(Bytes32Type, 12) -export const HistoricalRootsProof = new VectorCompositeType(Bytes32Type, 14) +export const BeaconBlockProofHistoricalRoots = new VectorCompositeType(Bytes32Type, 14) +export const PostMergeExecutionBlockProof = new VectorCompositeType(Bytes32Type, 11) export const HistoricalRootsBlockProof = new ContainerType({ - beaconBlockProof: BeaconBlockProof, + historicalRootsProof: BeaconBlockProofHistoricalRoots, beaconBlockRoot: Bytes32Type, - historicalRootsProof: HistoricalRootsProof, + beaconBlockProof: PostMergeExecutionBlockProof, slot: SlotType, }) /** Post-Capella block header proof types */ -export const HistoricalSummariesProof = new VectorCompositeType(Bytes32Type, 13) - +export const PostCapellaExecutionBlockProof = new ListCompositeType(Bytes32Type, 12) +export const BeaconBlockProofHistoricalSummaries = new VectorCompositeType(Bytes32Type, 13) export const HistoricalSummariesBlockProof = new ContainerType({ - beaconBlockProof: BeaconBlockProof, + historicalSummariesProof: BeaconBlockProofHistoricalSummaries, beaconBlockRoot: Bytes32Type, - historicalSummariesProof: HistoricalSummariesProof, + beaconBlockProof: PostCapellaExecutionBlockProof, slot: SlotType, }) diff --git a/packages/portalnetwork/test/integration/postCapellaHeaderProof.spec.ts b/packages/portalnetwork/test/integration/postCapellaHeaderProof.spec.ts index 1bc01180e..5eb82fc88 100644 --- a/packages/portalnetwork/test/integration/postCapellaHeaderProof.spec.ts +++ b/packages/portalnetwork/test/integration/postCapellaHeaderProof.spec.ts @@ -1,9 +1,16 @@ -import { readFileSync } from 'fs' +import { SignableENR } from '@chainsafe/enr' +import { concatBytes, hexToBytes } from '@ethereumjs/util' +import { keys } from '@libp2p/crypto' +import { createBeaconConfig } from '@lodestar/config' +import { mainnetChainConfig } from '@lodestar/config/configs' +import { genesisData } from '@lodestar/config/networks' +import { computeEpochAtSlot, getChainForkConfigFromNetwork } from '@lodestar/light-client/utils' import { ssz } from '@lodestar/types' -import { bytesToHex, concatBytes, hexToBytes } from '@ethereumjs/util' +import { multiaddr } from '@multiformats/multiaddr' +import { readFileSync } from 'fs' +import { assert, describe, it, vi } from 'vitest' import { BeaconLightClientNetworkContentType, - BlockHeaderWithProof, HistoricalSummariesKey, HistoricalSummariesWithProof, HistoryNetworkContentType, @@ -11,16 +18,8 @@ import { NetworkId, PortalNetwork, getBeaconContentKey, - getContentKey, + getContentKey } from '../../src/index.js' -import { createBeaconConfig } from '@lodestar/config' -import { mainnetChainConfig } from '@lodestar/config/configs' -import { genesisData } from '@lodestar/config/networks' -import { computeEpochAtSlot, getChainForkConfigFromNetwork } from '@lodestar/light-client/utils' -import { assert, describe, it, vi } from 'vitest' -import { multiaddr } from '@multiformats/multiaddr' -import { SignableENR } from '@chainsafe/enr' -import { keys } from '@libp2p/crypto' describe('Block Bridge Data Test', () => { it('should store and retrieve block header data', async () => { @@ -109,7 +108,6 @@ describe('Block Bridge Data Test', () => { const blockHash = fullBlock.data.message.body.execution_payload.block_hash const headerKey = getContentKey(HistoryNetworkContentType.BlockHeader, hexToBytes(blockHash)) - console.log(bytesToHex(BlockHeaderWithProof.deserialize(hexToBytes(headerWithProof)).header)) await history?.store(headerKey, hexToBytes(headerWithProof)) // Verify block header can be retrieved diff --git a/packages/portalnetwork/test/integration/testdata/postCapellaData/header_with_proof.json b/packages/portalnetwork/test/integration/testdata/postCapellaData/header_with_proof.json index dd7c7a334..03edc6c76 100644 --- a/packages/portalnetwork/test/integration/testdata/postCapellaData/header_with_proof.json +++ b/packages/portalnetwork/test/integration/testdata/postCapellaData/header_with_proof.json @@ -1 +1 @@ -"0x080000006f020000f90264a01624e3d62872568e41233178997c38dd75fa3baccc89351026beff7026179bfba01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347944838b106fce9647bdf1e7877bf73ce8b0bad5f97a02e065520386645261b7a6153924904a31c07bf6d37bca90fe35b21d864499c05a0eaa4d1ba5182915e8732ddd43e557a6ec713732e72964593275124fddf28a2aba0a9f528fe24151b34969ceb581a59f107d7bf38d8f364072dd50908f326226e71b90100f5bbd9423d137076afdb9b31d2f1db9239cff06e951d0dee6e2d8a302cfb14632d9c2725ed7a08043a5f6e97df7dc311867ba9749f08ed735fbb6a03802ce196f5eed77c97d3bd3dc81ce76fdd61b3ec55cc71d511567e685d0c2cec8c24ffee35856338dae6da75f20f9c8efe338fcffb7d515fde9d7fe7f5305bb4da1a3585a3be7e0e378581692dd9bbfa95b1c7565419eed5eff471ff29296fdd7d733efddfec7f62e4e3fa9eb3f159c609c7dd5f23bdd1b4baff8e283f65ea3d24680bd4cb36f61f00dd5b56ff49bdb77a3d98ee56a7b6438c48dfba4aa5034e6ad3e2e63ff7fccb69b5b492c5ed93cffd71a92e5dddf6ff5161ace6da24c0bb9d00ec5f8084014977bd8401c9c38083f94439846780b05798546974616e2028746974616e6275696c6465722e78797a29a07737f9743d49ab14e32b78a4989729b13bfd28f09c7729669d14c35a3e265d5288000000000000000084bc232c85a01fd1ee2a1b4008ac9f8749a3a5980645510788ccbf0922b77efd67224f8a2234830200008403cc0000a0e1c45ed325063d2380fb27927e7dd5024dada52070c9af2889ee51a105040bcfcc0100003a317faa00ffbca7c1fc9464025ada6aa0db71fd9c050ed433bab104834f0f26c2c38585972af1a49f5358e96748e08f62d9e06883d6d3bbc285f5bbdc3fdde7b87b8f3009f2311f723310e9600cd62f3a0ba54257645d03dbb94700997c2ff1bea5a1dbb8434febb2338912ebd94cfb183cc76b79a1449374aca82445a60e1641d6c37404381727480db4e3e124c9ce9345eb3761882ba31a2444e19f7f4be6446d9460a465986305984f731af89318ee7a965c33d5f6e293bd7c77050a7275d50d2f96d67ae9ec187ef09d1c19c8688ad0a9c00675a55bf61e5e412d5b0b309b9b4a4f9baaa4e6d6cd518eb62f41a133594cab49cf290ffa3570b65376b9dab2632457193d929218ee57d4d2dc8df537f9e2929746d934595d00ff258485173737414bac51a48ca8e03d442cca16a9d9d485ff0447a52427aada4c790ac75fdb8c75ab9503bd1eb67bfb5e2f8f403654b57a1e206aac23546088f1d7284b27d0fc56523cfafc7fb5d233f5faee241365fa4cb0c1d90792d8b558a3d0d5a63121993447a9a5d26cda2d9ad060c8d13f66d84623cc800e3dbb00097ce81a4debe6e5bf0ec7b02915aefabb6e7b1fb8468f228ab04e8e789a26ad9d727ac8430400e0a40000000000f5a23c1219f35dd7d4dc73e816727b6062b26168d5de32a8c44a5e91eeca9f1e04a8748db8fd9595375edfe16660a66ca721079109bd18775eeb54ce07b1400f37d1c9f9c839443ae1aff0d273f1801ffe724770a23d81a63f22f7babe39715d5b5687af2260ef0ed892a68d169c580b03ccb5bc59b0596a4221568de8bf1c7091e37807ec7706f1534c3c24b6664110fa6cf57c3512bb58d4ae666eb8aaed347bdd0fcc693ae5642a10264eb24e1274823f434d66307d9c1cd3c15be194a462b19dea48b339562bd54eac0517d81d7c1d6897ccbf3df258cb75677a235768dfdb56114e00fdd4c1f85c892bf35ac9a89289aaecb1ebd0a96cde606a748b5d7134fd95b5494fd350c63f8bff9d7be30f0bfb3a78daae9f2a0f1ef38f358fd6010000000000000000000000000000000000000000000000000000000000000000f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b6e1e55e9e4793d44338f52522fd838eb70bd3ac0bb6492923efa3cabf6f5cae3" \ No newline at end of file +"0x080000006f020000f90264a01624e3d62872568e41233178997c38dd75fa3baccc89351026beff7026179bfba01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347944838b106fce9647bdf1e7877bf73ce8b0bad5f97a02e065520386645261b7a6153924904a31c07bf6d37bca90fe35b21d864499c05a0eaa4d1ba5182915e8732ddd43e557a6ec713732e72964593275124fddf28a2aba0a9f528fe24151b34969ceb581a59f107d7bf38d8f364072dd50908f326226e71b90100f5bbd9423d137076afdb9b31d2f1db9239cff06e951d0dee6e2d8a302cfb14632d9c2725ed7a08043a5f6e97df7dc311867ba9749f08ed735fbb6a03802ce196f5eed77c97d3bd3dc81ce76fdd61b3ec55cc71d511567e685d0c2cec8c24ffee35856338dae6da75f20f9c8efe338fcffb7d515fde9d7fe7f5305bb4da1a3585a3be7e0e378581692dd9bbfa95b1c7565419eed5eff471ff29296fdd7d733efddfec7f62e4e3fa9eb3f159c609c7dd5f23bdd1b4baff8e283f65ea3d24680bd4cb36f61f00dd5b56ff49bdb77a3d98ee56a7b6438c48dfba4aa5034e6ad3e2e63ff7fccb69b5b492c5ed93cffd71a92e5dddf6ff5161ace6da24c0bb9d00ec5f8084014977bd8401c9c38083f94439846780b05798546974616e2028746974616e6275696c6465722e78797a29a07737f9743d49ab14e32b78a4989729b13bfd28f09c7729669d14c35a3e265d5288000000000000000084bc232c85a01fd1ee2a1b4008ac9f8749a3a5980645510788ccbf0922b77efd67224f8a2234830200008403cc0000a0e1c45ed325063d2380fb27927e7dd5024dada52070c9af2889ee51a105040bcfc2c38585972af1a49f5358e96748e08f62d9e06883d6d3bbc285f5bbdc3fdde7b87b8f3009f2311f723310e9600cd62f3a0ba54257645d03dbb94700997c2ff1bea5a1dbb8434febb2338912ebd94cfb183cc76b79a1449374aca82445a60e1641d6c37404381727480db4e3e124c9ce9345eb3761882ba31a2444e19f7f4be6446d9460a465986305984f731af89318ee7a965c33d5f6e293bd7c77050a7275d50d2f96d67ae9ec187ef09d1c19c8688ad0a9c00675a55bf61e5e412d5b0b309b9b4a4f9baaa4e6d6cd518eb62f41a133594cab49cf290ffa3570b65376b9dab2632457193d929218ee57d4d2dc8df537f9e2929746d934595d00ff258485173737414bac51a48ca8e03d442cca16a9d9d485ff0447a52427aada4c790ac75fdb8c75ab9503bd1eb67bfb5e2f8f403654b57a1e206aac23546088f1d7284b27d0fc56523cfafc7fb5d233f5faee241365fa4cb0c1d90792d8b558a3d0d5a63121993447a9a5d26cda2d9ad060c8d13f66d84623cc800e3dbb00097ce81a4debe6e5bf0ec7b02915aefabb6e7b1fb8468f228ab04e8e789a26ad9d727ac843043a317faa00ffbca7c1fc9464025ada6aa0db71fd9c050ed433bab104834f0f26cc01000000e0a40000000000f5a23c1219f35dd7d4dc73e816727b6062b26168d5de32a8c44a5e91eeca9f1e04a8748db8fd9595375edfe16660a66ca721079109bd18775eeb54ce07b1400f37d1c9f9c839443ae1aff0d273f1801ffe724770a23d81a63f22f7babe39715d5b5687af2260ef0ed892a68d169c580b03ccb5bc59b0596a4221568de8bf1c7091e37807ec7706f1534c3c24b6664110fa6cf57c3512bb58d4ae666eb8aaed347bdd0fcc693ae5642a10264eb24e1274823f434d66307d9c1cd3c15be194a462b19dea48b339562bd54eac0517d81d7c1d6897ccbf3df258cb75677a235768dfdb56114e00fdd4c1f85c892bf35ac9a89289aaecb1ebd0a96cde606a748b5d7134fd95b5494fd350c63f8bff9d7be30f0bfb3a78daae9f2a0f1ef38f358fd6010000000000000000000000000000000000000000000000000000000000000000f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b6e1e55e9e4793d44338f52522fd838eb70bd3ac0bb6492923efa3cabf6f5cae3" \ No newline at end of file diff --git a/packages/portalnetwork/test/networks/history/historyNetwork.spec.ts b/packages/portalnetwork/test/networks/history/historyNetwork.spec.ts index 052f02256..613d24fba 100644 --- a/packages/portalnetwork/test/networks/history/historyNetwork.spec.ts +++ b/packages/portalnetwork/test/networks/history/historyNetwork.spec.ts @@ -206,7 +206,7 @@ describe('Header Tests', async () => { await network.store(headerKey, fakeProof) assert.fail('should have thrown') } catch (err: any) { - assert.ok(err.message.includes('invalid proof')) + assert.ok(err.message.includes('Unable to validate proof')) } }) }) diff --git a/packages/portalnetwork/test/networks/history/spec-test-runner.spec.ts b/packages/portalnetwork/test/networks/history/spec-test-runner.spec.ts new file mode 100644 index 000000000..2a37da64f --- /dev/null +++ b/packages/portalnetwork/test/networks/history/spec-test-runner.spec.ts @@ -0,0 +1,171 @@ +/* eslint-disable no-console */ +import { bytesToHex, hexToBytes } from '@ethereumjs/util' +import { readFileSync, readdirSync, statSync } from 'fs' +import yaml from 'js-yaml' +import { join, resolve } from 'path' +import { afterAll, beforeAll, describe, it } from 'vitest' +import type { HistoryNetwork } from '../../../src/index.js' +import { + HistoryNetworkContentType, + PortalNetwork, + decodeHistoryNetworkContentKey, + getContentKey, +} from '../../../src/index.js' + +describe.skip('should run all spec tests', () => { + // This retrieves all the yaml files from the spec tests directory + const getAllYamlFiles = (dir: string): string[] => { + const files: string[] = [] + const items = readdirSync(dir) + + for (const item of items) { + const fullPath = join(dir, item) + if (statSync(fullPath).isDirectory()) { + files.push(...getAllYamlFiles(fullPath)) + } else if (item.endsWith('.yaml') || item.endsWith('.yml')) { + files.push(fullPath) + } + } + + return files + } + + const runHistoryTest = async ( + history: HistoryNetwork, + contentKey: Uint8Array, + contentValue: Uint8Array, + ) => { + try { + // Store the content. `store` parses the content key, deserializes per the content type, + // and then validates the content + await history?.store(contentKey, contentValue) + if (contentKey[0] !== HistoryNetworkContentType.BlockHeaderByNumber) { + // BlockHeaderByNumber requires a conversion to blockhash since we store headers by blockhash in the db + const retrieved = await history?.get(contentKey) + if (retrieved === bytesToHex(contentValue)) { + return true + } else { + return false + } + } else { + const blockNumber = decodeHistoryNetworkContentKey(contentKey) + const hash = history?.blockNumberToHash(blockNumber.keyOpt as bigint) + const hashKey = getContentKey(HistoryNetworkContentType.BlockHeader, hash!) + const retrieved = await history?.get(hashKey) + if (retrieved === bytesToHex(contentValue)) { + return true + } else { + return false + } + } + } catch (e) { + if ('message' in e) { + // If we get an error, return it for triage + return e + } else { + return false + } + } + } + + const networkFiles = { + history: {}, + state: {}, + beacon_chain: {}, + } + + const results = { + history: { + passed: 0, + failed: 0, + errors: [] as string[], + }, + state: { + passed: 0, + failed: 0, + errors: [] as string[], + }, + beacon_chain: { + passed: 0, + failed: 0, + errors: [] as string[], + }, + } + + let yamlFiles: string[] = [] + beforeAll(() => { + // Parses all yaml files into JSON objects + const testDir = resolve(__dirname, '../../../../portal-spec-tests/tests') + yamlFiles = getAllYamlFiles(testDir) + + for (const file of yamlFiles) { + try { + const content = yaml.load(readFileSync(file, 'utf-8')) + // Split test suites up by network + if (file.includes('/history/')) { + networkFiles.history[file] = content + } else if (file.includes('/state/')) { + networkFiles.state[file] = content + } else if (file.includes('/beacon_chain/')) { + networkFiles.beacon_chain[file] = content + } + } catch (error) { + console.error(`Error reading ${file}:`, error) + } + } + }) + it('should run all serialized history spec tests', async () => { + // This test inspects all the `history` test inputs and runs all the ones + // with serialized content keys and values + // The basic idea of the test is can we deserialize the content, store it, + // and then retrieve it using the original content key + const client = await PortalNetwork.create({}) + const history = client.network()['0x500b']! + for (const testData of Object.entries(networkFiles.history)) { + // Some test vectors are parsed into a tuple of [file name, [test vector]] + if (Array.isArray(testData) && Array.isArray(testData[1])) { + for (const vector of testData[1]) { + if ('content_key' in vector && 'content_value' in vector) { + const key = hexToBytes(vector.content_key) + const value = hexToBytes(vector.content_value) + const result = await runHistoryTest(history, key, value) + if (result === true) { + results.history.passed++ + } else { + results.history.failed++ + results.history.errors.push( + `Key: ${bytesToHex(key)} in file ${testData[0]} -- Error: ${result ?? 'no error reported'}`, + ) + } + } + } + } else if ( + Array.isArray(testData) && + 'content_key' in testData[1] && + 'content_value' in testData[1] + ) { + // Some tests are stored as a tuple of [file name, test vector] + const key = hexToBytes(testData[1].content_key as string) // Content key is stored as a hex string + const value = hexToBytes(testData[1].content_value as string) // Content value is stored as a hex string + const result = await runHistoryTest(history, key, value) + if (result === true) { + results.history.passed++ + } else { + results.history.failed++ + if (typeof result !== 'boolean') { + results.history.errors.push( + `Key: ${bytesToHex(key)} in file ${testData[0]} -- ${result}`, + ) + } + } + } + } + }) + afterAll(() => { + console.log('--------------------------------') + console.log('History Results') + console.log('--------------------------------') + console.log(results.history) + console.log('--------------------------------') + }) +})