diff --git a/.github/actions/core-dump/action.yml b/.github/actions/core-dump/action.yml new file mode 100644 index 000000000000..6735f0f2c355 --- /dev/null +++ b/.github/actions/core-dump/action.yml @@ -0,0 +1,13 @@ +name: 'Take core dump files' +description: 'List down and upload core dumps as artifacts' +runs: + using: "composite" + steps: + - run: ls -l + shell: sh + + - name: Backup core dump + uses: actions/upload-artifact@v2 + with: + name: core-dump + path: core.* diff --git a/.github/actions/setup-debug-node/action.yml b/.github/actions/setup-debug-node/action.yml new file mode 100644 index 000000000000..3e9a12cd208f --- /dev/null +++ b/.github/actions/setup-debug-node/action.yml @@ -0,0 +1,39 @@ +name: 'Setup node with debug support' +description: 'Setup the nodejs version with debug support' +inputs: + node-version: + description: 'Version of nodejs' + required: true + default: '20' + debug: + description: Enable the debug version + required: true + default: 'false' + +runs: + using: "composite" + steps: + - uses: actions/setup-node@v3 + with: + node-version: ${{ inputs.node-version }} + check-latest: true + cache: yarn + + # For now we only have the Node 20 debug build + - run: sudo apt-get install unzip && curl -L "https://drive.google.com/uc?export=download&id=1hlhbbQi-NJi8_WjULvOdo-K_tfZFzN3Z&confirm=t" > nodejs.zip && unzip nodejs.zip + shell: sh + if: ${{ inputs.debug == 'true' }} + - run: sudo cp -f node $(which node) + shell: sh + if: ${{ inputs.debug == 'true' }} + - run: sudo sh -c "ulimit -c unlimited" + shell: sh + if: ${{ inputs.debug == 'true' }} + - run: sudo sh -c "echo core > /proc/sys/kernel/core_pattern" + shell: sh + if: ${{ inputs.debug == 'true' }} + - run: echo $(node --print "process.version") + shell: sh + - run: echo $(node --print "process.features.debug") + shell: sh + diff --git a/.github/workflows/build-debug-node.yml b/.github/workflows/build-debug-node.yml index 24eae6a04c18..d464397c01a0 100644 --- a/.github/workflows/build-debug-node.yml +++ b/.github/workflows/build-debug-node.yml @@ -29,7 +29,7 @@ jobs: working-directory: 'nodejs' - name: Compile the nodejs - run: make -j4 + run: make -j$(nproc --all) working-directory: 'nodejs' - name: Verify the build @@ -39,16 +39,12 @@ jobs: - name: Create destination folder run: mkdir -p ${{ github.workspace }}/nodejs-debug-build-${{ github.event.inputs.version }} - - name: Copy nodejs build - run: make install + - name: Copy nodejs debug build + run: cp out/Debug/node ${{ github.workspace }}/nodejs-debug-build-${{ github.event.inputs.version }} working-directory: 'nodejs' - env: - DESTDIR: ${{ github.workspace }}/nodejs-debug-build-${{ github.event.inputs.version }} - name: Upload build to artifacts uses: actions/upload-artifact@v3 with: name: nodejs-debug-build-${{ github.event.inputs.version }} path: nodejs-debug-build-${{ github.event.inputs.version }} - - diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9b04631c8e80..2f4f252da9ff 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -153,12 +153,12 @@ jobs: matrix: node: [20] steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 - with: + - uses: actions/checkout@v3 + - uses: './.github/actions/setup-debug-node' + with: node-version: ${{matrix.node}} - check-latest: true - cache: yarn + debug: 'true' + - name: Restore build cache id: cache-primes-restore uses: actions/cache/restore@v3 @@ -181,7 +181,12 @@ jobs: - name: Rebuild native modules with debug run: npm rebuild --debug - name: Unit tests + id: unit_tests run: yarn test:unit + + - uses: './.github/actions/core-dump' + if: ${{ failure() && steps.unit_tests.conclusion == 'failure' }} + - name: Upload coverage data run: yarn coverage diff --git a/dashboards/lodestar_networking.json b/dashboards/lodestar_networking.json index 8633faeb7668..77e4be04048f 100644 --- a/dashboards/lodestar_networking.json +++ b/dashboards/lodestar_networking.json @@ -1,12 +1,12 @@ { "__inputs": [ { - "name": "DS_PROMETHEUS", - "type": "datasource", - "label": "Prometheus", "description": "", + "label": "Prometheus", + "name": "DS_PROMETHEUS", "pluginId": "prometheus", - "pluginName": "Prometheus" + "pluginName": "Prometheus", + "type": "datasource" } ], "annotations": { diff --git a/dashboards/lodestar_validator_monitor.json b/dashboards/lodestar_validator_monitor.json index 5bc844a639fc..7579a595b550 100644 --- a/dashboards/lodestar_validator_monitor.json +++ b/dashboards/lodestar_validator_monitor.json @@ -112,6 +112,7 @@ "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, + "description": "https://hackmd.io/@dapplion/lodestar_attestation_summary", "fieldConfig": { "defaults": { "color": { @@ -166,6 +167,7 @@ "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, + "description": "https://hackmd.io/@dapplion/lodestar_attestation_summary", "fieldConfig": { "defaults": { "color": { diff --git a/lerna.json b/lerna.json index 4307b0dc78b7..4130f46a317b 100644 --- a/lerna.json +++ b/lerna.json @@ -1,8 +1,10 @@ { - "packages": ["packages/*"], + "packages": [ + "packages/*" + ], "npmClient": "yarn", "useNx": true, - "version": "1.11.3", + "version": "1.12.0", "stream": true, "command": { "version": { diff --git a/packages/api/package.json b/packages/api/package.json index 4cddda4b4033..2f1c5953a673 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.11.3", + "version": "1.12.0", "type": "module", "exports": { ".": { @@ -70,11 +70,11 @@ }, "dependencies": { "@chainsafe/persistent-merkle-tree": "^0.6.1", - "@chainsafe/ssz": "^0.13.0", - "@lodestar/config": "^1.11.3", - "@lodestar/params": "^1.11.3", - "@lodestar/types": "^1.11.3", - "@lodestar/utils": "^1.11.3", + "@chainsafe/ssz": "^0.14.0", + "@lodestar/config": "^1.12.0", + "@lodestar/params": "^1.12.0", + "@lodestar/types": "^1.12.0", + "@lodestar/utils": "^1.12.0", "eventsource": "^2.0.2", "qs": "^6.11.1" }, diff --git a/packages/api/src/beacon/routes/beacon/block.ts b/packages/api/src/beacon/routes/beacon/block.ts index 278637d53a2c..2d887790dd39 100644 --- a/packages/api/src/beacon/routes/beacon/block.ts +++ b/packages/api/src/beacon/routes/beacon/block.ts @@ -193,7 +193,7 @@ export type Api = { publishBlockV2( blockOrContents: allForks.SignedBeaconBlockOrContents, - opts: {broadcastValidation?: BroadcastValidation} + opts?: {broadcastValidation?: BroadcastValidation} ): Promise< ApiClientResponse< { @@ -341,7 +341,7 @@ export function getReqSerializers(config: ChainForkConfig): ReqSerializers ({ + writeReq: (item, {broadcastValidation} = {}) => ({ body: AllForksSignedBlockOrContents.toJson(item), query: {broadcast_validation: broadcastValidation}, }), diff --git a/packages/api/src/beacon/routes/beacon/state.ts b/packages/api/src/beacon/routes/beacon/state.ts index 75d3549eb5a7..7047eaa42e77 100644 --- a/packages/api/src/beacon/routes/beacon/state.ts +++ b/packages/api/src/beacon/routes/beacon/state.ts @@ -109,6 +109,23 @@ export type Api = { > >; + /** + * Fetch the RANDAO mix for the requested epoch from the state identified by 'stateId'. + * + * @param stateId State identifier. + * Can be one of: "head" (canonical head in node's view), "genesis", "finalized", "justified", \, \. + * @param epoch Fetch randao mix for the given epoch. If an epoch is not specified then the RANDAO mix for the state's current epoch will be returned. + */ + getStateRandao( + stateId: StateId, + epoch?: Epoch + ): Promise< + ApiClientResponse< + {[HttpStatusCode.OK]: {data: {randao: Root}; executionOptimistic: ExecutionOptimistic}}, + HttpStatusCode.BAD_REQUEST | HttpStatusCode.NOT_FOUND + > + >; + /** * Get state finality checkpoints * Returns finality checkpoints for state with given 'stateId'. @@ -216,6 +233,7 @@ export const routesData: RoutesData = { getStateFinalityCheckpoints: {url: "/eth/v1/beacon/states/{state_id}/finality_checkpoints", method: "GET"}, getStateFork: {url: "/eth/v1/beacon/states/{state_id}/fork", method: "GET"}, getStateRoot: {url: "/eth/v1/beacon/states/{state_id}/root", method: "GET"}, + getStateRandao: {url: "/eth/v1/beacon/states/{state_id}/randao", method: "GET"}, getStateValidator: {url: "/eth/v1/beacon/states/{state_id}/validators/{validator_id}", method: "GET"}, getStateValidators: {url: "/eth/v1/beacon/states/{state_id}/validators", method: "GET"}, getStateValidatorBalances: {url: "/eth/v1/beacon/states/{state_id}/validator_balances", method: "GET"}, @@ -231,6 +249,7 @@ export type ReqTypes = { getStateFinalityCheckpoints: StateIdOnlyReq; getStateFork: StateIdOnlyReq; getStateRoot: StateIdOnlyReq; + getStateRandao: {params: {state_id: StateId}; query: {epoch?: number}}; getStateValidator: {params: {state_id: StateId; validator_id: ValidatorId}}; getStateValidators: {params: {state_id: StateId}; query: {id?: ValidatorId[]; status?: ValidatorStatus[]}}; getStateValidatorBalances: {params: {state_id: StateId}; query: {id?: ValidatorId[]}}; @@ -266,6 +285,15 @@ export function getReqSerializers(): ReqSerializers { getStateFork: stateIdOnlyReq, getStateRoot: stateIdOnlyReq, + getStateRandao: { + writeReq: (state_id, epoch) => ({params: {state_id}, query: {epoch}}), + parseReq: ({params, query}) => [params.state_id, query.epoch], + schema: { + params: {state_id: Schema.StringRequired}, + query: {epoch: Schema.Uint}, + }, + }, + getStateValidator: { writeReq: (state_id, validator_id) => ({params: {state_id, validator_id}}), parseReq: ({params}) => [params.state_id, params.validator_id], @@ -299,6 +327,10 @@ export function getReturnTypes(): ReturnTypes { root: ssz.Root, }); + const RandaoContainer = new ContainerType({ + randao: ssz.Root, + }); + const FinalityCheckpoints = new ContainerType( { previousJustified: ssz.phase0.Checkpoint, @@ -346,6 +378,7 @@ export function getReturnTypes(): ReturnTypes { return { getStateRoot: ContainerDataExecutionOptimistic(RootContainer), getStateFork: ContainerDataExecutionOptimistic(ssz.phase0.Fork), + getStateRandao: ContainerDataExecutionOptimistic(RandaoContainer), getStateFinalityCheckpoints: ContainerDataExecutionOptimistic(FinalityCheckpoints), getStateValidators: ContainerDataExecutionOptimistic(ArrayOf(ValidatorResponse)), getStateValidator: ContainerDataExecutionOptimistic(ValidatorResponse), diff --git a/packages/api/src/builder/routes.ts b/packages/api/src/builder/routes.ts index dcff20705c17..0136f1deeac4 100644 --- a/packages/api/src/builder/routes.ts +++ b/packages/api/src/builder/routes.ts @@ -1,6 +1,6 @@ import {fromHexString, toHexString} from "@chainsafe/ssz"; import {ssz, allForks, bellatrix, Slot, Root, BLSPubkey} from "@lodestar/types"; -import {ForkName, isForkExecution} from "@lodestar/params"; +import {ForkName, isForkExecution, isForkBlobs} from "@lodestar/params"; import {ChainForkConfig} from "@lodestar/config"; import { @@ -34,11 +34,14 @@ export type Api = { HttpStatusCode.NOT_FOUND | HttpStatusCode.BAD_REQUEST > >; - submitBlindedBlock( - signedBlock: allForks.SignedBlindedBeaconBlockOrContents - ): Promise< + submitBlindedBlock(signedBlock: allForks.SignedBlindedBeaconBlockOrContents): Promise< ApiClientResponse< - {[HttpStatusCode.OK]: {data: allForks.ExecutionPayload; version: ForkName}}, + { + [HttpStatusCode.OK]: { + data: allForks.ExecutionPayload | allForks.ExecutionPayloadAndBlobsBundle; + version: ForkName; + }; + }, HttpStatusCode.SERVICE_UNAVAILABLE > >; @@ -84,8 +87,13 @@ export function getReturnTypes(): ReturnTypes { getHeader: WithVersion((fork: ForkName) => isForkExecution(fork) ? ssz.allForksExecution[fork].SignedBuilderBid : ssz.bellatrix.SignedBuilderBid ), - submitBlindedBlock: WithVersion((fork: ForkName) => - isForkExecution(fork) ? ssz.allForksExecution[fork].ExecutionPayload : ssz.bellatrix.ExecutionPayload + submitBlindedBlock: WithVersion( + (fork: ForkName) => + isForkBlobs(fork) + ? ssz.allForksBlobs[fork].ExecutionPayloadAndBlobsBundle + : isForkExecution(fork) + ? ssz.allForksExecution[fork].ExecutionPayload + : ssz.bellatrix.ExecutionPayload ), }; } diff --git a/packages/api/src/keymanager/routes.ts b/packages/api/src/keymanager/routes.ts index 4c9b3ff1003a..09f5e7610604 100644 --- a/packages/api/src/keymanager/routes.ts +++ b/packages/api/src/keymanager/routes.ts @@ -64,6 +64,10 @@ export type FeeRecipientData = { pubkey: string; ethaddress: string; }; +export type GraffitiData = { + pubkey: string; + graffiti: string; +}; export type GasLimitData = { pubkey: string; gasLimit: number; @@ -205,6 +209,25 @@ export type Api = { > >; + listGraffiti(pubkey: string): Promise>; + setGraffiti( + pubkey: string, + graffiti: string + ): Promise< + ApiClientResponse< + {[HttpStatusCode.OK]: void; [HttpStatusCode.NO_CONTENT]: void}, + HttpStatusCode.UNAUTHORIZED | HttpStatusCode.FORBIDDEN | HttpStatusCode.NOT_FOUND + > + >; + deleteGraffiti( + pubkey: string + ): Promise< + ApiClientResponse< + {[HttpStatusCode.OK]: void; [HttpStatusCode.NO_CONTENT]: void}, + HttpStatusCode.UNAUTHORIZED | HttpStatusCode.FORBIDDEN | HttpStatusCode.NOT_FOUND + > + >; + getGasLimit(pubkey: string): Promise>; setGasLimit( pubkey: string, @@ -259,6 +282,10 @@ export const routesData: RoutesData = { setFeeRecipient: {url: "/eth/v1/validator/{pubkey}/feerecipient", method: "POST", statusOk: 202}, deleteFeeRecipient: {url: "/eth/v1/validator/{pubkey}/feerecipient", method: "DELETE", statusOk: 204}, + listGraffiti: {url: "/eth/v1/validator/{pubkey}/graffiti", method: "GET"}, + setGraffiti: {url: "/eth/v1/validator/{pubkey}/graffiti", method: "POST", statusOk: 202}, + deleteGraffiti: {url: "/eth/v1/validator/{pubkey}/graffiti", method: "DELETE", statusOk: 204}, + getGasLimit: {url: "/eth/v1/validator/{pubkey}/gas_limit", method: "GET"}, setGasLimit: {url: "/eth/v1/validator/{pubkey}/gas_limit", method: "POST", statusOk: 202}, deleteGasLimit: {url: "/eth/v1/validator/{pubkey}/gas_limit", method: "DELETE", statusOk: 204}, @@ -291,6 +318,10 @@ export type ReqTypes = { setFeeRecipient: {params: {pubkey: string}; body: {ethaddress: string}}; deleteFeeRecipient: {params: {pubkey: string}}; + listGraffiti: {params: {pubkey: string}}; + setGraffiti: {params: {pubkey: string}; body: {graffiti: string}}; + deleteGraffiti: {params: {pubkey: string}}; + getGasLimit: {params: {pubkey: string}}; setGasLimit: {params: {pubkey: string}; body: {gas_limit: string}}; deleteGasLimit: {params: {pubkey: string}}; @@ -347,6 +378,29 @@ export function getReqSerializers(): ReqSerializers { }, }, + listGraffiti: { + writeReq: (pubkey) => ({params: {pubkey}}), + parseReq: ({params: {pubkey}}) => [pubkey], + schema: { + params: {pubkey: Schema.StringRequired}, + }, + }, + setGraffiti: { + writeReq: (pubkey, graffiti) => ({params: {pubkey}, body: {graffiti}}), + parseReq: ({params: {pubkey}, body: {graffiti}}) => [pubkey, graffiti], + schema: { + params: {pubkey: Schema.StringRequired}, + body: Schema.Object, + }, + }, + deleteGraffiti: { + writeReq: (pubkey) => ({params: {pubkey}}), + parseReq: ({params: {pubkey}}) => [pubkey], + schema: { + params: {pubkey: Schema.StringRequired}, + }, + }, + getGasLimit: { writeReq: (pubkey) => ({params: {pubkey}}), parseReq: ({params: {pubkey}}) => [pubkey], @@ -391,6 +445,7 @@ export function getReturnTypes(): ReturnTypes { deleteRemoteKeys: jsonType("snake"), listFeeRecipient: jsonType("snake"), + listGraffiti: jsonType("snake"), getGasLimit: ContainerData( new ContainerType( { diff --git a/packages/api/test/unit/beacon/testData/beacon.ts b/packages/api/test/unit/beacon/testData/beacon.ts index bb9697cf9587..5cb35540cf7a 100644 --- a/packages/api/test/unit/beacon/testData/beacon.ts +++ b/packages/api/test/unit/beacon/testData/beacon.ts @@ -10,6 +10,7 @@ import { import {GenericServerTestCases} from "../../../utils/genericServerTest.js"; const root = Buffer.alloc(32, 1); +const randao = Buffer.alloc(32, 1); const balance = 32e9; const pubkeyHex = toHexString(Buffer.alloc(48, 1)); @@ -131,6 +132,10 @@ export const testData: GenericServerTestCases = { args: ["head"], res: {executionOptimistic: true, data: ssz.phase0.Fork.defaultValue()}, }, + getStateRandao: { + args: ["head", 1], + res: {executionOptimistic: true, data: {randao}}, + }, getStateFinalityCheckpoints: { args: ["head"], res: { diff --git a/packages/api/test/unit/keymanager/testData.ts b/packages/api/test/unit/keymanager/testData.ts index 3be3896b7147..a4fc72fc8e2d 100644 --- a/packages/api/test/unit/keymanager/testData.ts +++ b/packages/api/test/unit/keymanager/testData.ts @@ -11,6 +11,7 @@ import {GenericServerTestCases} from "../../utils/genericServerTest.js"; // randomly pregenerated pubkey const pubkeyRand = "0x84105a985058fc8740a48bf1ede9d223ef09e8c6b1735ba0a55cf4a9ff2ff92376b778798365e488dab07a652eb04576"; const ethaddressRand = "0xabcf8e0d4e9587369b2301d0790347320302cc09"; +const graffitiRandUtf8 = "636861696e736166652f6c6f64657374"; const gasLimitRand = 30_000_000; export const testData: GenericServerTestCases = { @@ -69,6 +70,19 @@ export const testData: GenericServerTestCases = { res: undefined, }, + listGraffiti: { + args: [pubkeyRand], + res: {data: {pubkey: pubkeyRand, graffiti: graffitiRandUtf8}}, + }, + setGraffiti: { + args: [pubkeyRand, graffitiRandUtf8], + res: undefined, + }, + deleteGraffiti: { + args: [pubkeyRand], + res: undefined, + }, + getGasLimit: { args: [pubkeyRand], res: {data: {pubkey: pubkeyRand, gasLimit: gasLimitRand}}, diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json index 6b9b37c3ba68..b7f0c3affa44 100644 --- a/packages/beacon-node/package.json +++ b/packages/beacon-node/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.11.3", + "version": "1.12.0", "type": "module", "exports": { ".": { @@ -104,7 +104,7 @@ "@chainsafe/libp2p-noise": "^13.0.1", "@chainsafe/persistent-merkle-tree": "^0.6.1", "@chainsafe/prometheus-gc-stats": "^1.0.0", - "@chainsafe/ssz": "^0.13.0", + "@chainsafe/ssz": "^0.14.0", "@chainsafe/threads": "^1.11.1", "@ethersproject/abi": "^5.7.0", "@fastify/bearer-auth": "^9.0.0", @@ -119,18 +119,18 @@ "@libp2p/peer-id-factory": "^3.0.4", "@libp2p/prometheus-metrics": "^2.0.7", "@libp2p/tcp": "8.0.8", - "@lodestar/api": "^1.11.3", - "@lodestar/config": "^1.11.3", - "@lodestar/db": "^1.11.3", - "@lodestar/fork-choice": "^1.11.3", - "@lodestar/light-client": "^1.11.3", - "@lodestar/logger": "^1.11.3", - "@lodestar/params": "^1.11.3", - "@lodestar/reqresp": "^1.11.3", - "@lodestar/state-transition": "^1.11.3", - "@lodestar/types": "^1.11.3", - "@lodestar/utils": "^1.11.3", - "@lodestar/validator": "^1.11.3", + "@lodestar/api": "^1.12.0", + "@lodestar/config": "^1.12.0", + "@lodestar/db": "^1.12.0", + "@lodestar/fork-choice": "^1.12.0", + "@lodestar/light-client": "^1.12.0", + "@lodestar/logger": "^1.12.0", + "@lodestar/params": "^1.12.0", + "@lodestar/reqresp": "^1.12.0", + "@lodestar/state-transition": "^1.12.0", + "@lodestar/types": "^1.12.0", + "@lodestar/utils": "^1.12.0", + "@lodestar/validator": "^1.12.0", "@multiformats/multiaddr": "^12.1.3", "@types/datastore-level": "^3.0.0", "buffer-xor": "^2.0.2", @@ -162,8 +162,8 @@ "@types/supertest": "^2.0.12", "@types/tmp": "^0.2.3", "eventsource": "^2.0.2", - "it-pair": "^2.0.6", "it-drain": "^3.0.3", + "it-pair": "^2.0.6", "leveldown": "^6.1.1", "rewiremock": "^3.14.5", "rimraf": "^4.4.1", diff --git a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts index 81eac4ed6e47..3b36674dd613 100644 --- a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts +++ b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts @@ -1,9 +1,13 @@ import {fromHexString, toHexString} from "@chainsafe/ssz"; import {routes, ServerApi, ResponseFormat} from "@lodestar/api"; -import {computeTimeAtSlot, signedBlindedBlockToFull, signedBlindedBlobSidecarsToFull} from "@lodestar/state-transition"; +import { + computeTimeAtSlot, + parseSignedBlindedBlockOrContents, + reconstructFullBlockOrContents, +} from "@lodestar/state-transition"; import {SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; -import {sleep, toHex, LogDataBasic} from "@lodestar/utils"; -import {allForks, deneb, isSignedBlockContents, isSignedBlindedBlockContents} from "@lodestar/types"; +import {sleep, toHex} from "@lodestar/utils"; +import {allForks, deneb, isSignedBlockContents, ProducedBlockSource} from "@lodestar/types"; import {BlockSource, getBlockInput, ImportBlockOpts, BlockInput} from "../../../../chain/blocks/types.js"; import {promiseAllMaybeAsync} from "../../../../util/promises.js"; import {isOptimisticBlock} from "../../../../util/forkChoice.js"; @@ -15,11 +19,6 @@ import {resolveBlockId, toBeaconHeaderResponse} from "./utils.js"; type PublishBlockOpts = ImportBlockOpts & {broadcastValidation?: routes.beacon.BroadcastValidation}; -type ParsedSignedBlindedBlockOrContents = { - signedBlindedBlock: allForks.SignedBlindedBeaconBlock; - signedBlindedBlobSidecars: deneb.SignedBlindedBlobSidecars | null; -}; - /** * Validator clock may be advanced from beacon's clock. If the validator requests a resource in a * future slot, wait some time instead of rejecting the request because it's in the future @@ -152,27 +151,28 @@ export function getBeaconBlockApi({ .getBlindedForkTypes(signedBlindedBlock.message.slot) .BeaconBlock.hashTreeRoot(signedBlindedBlock.message) ); - const logCtx = {blockRoot, slot}; // Either the payload/blobs are cached from i) engine locally or ii) they are from the builder // - // executionPayload can be null or a real payload in locally produced, its only undefined when - // the block came from the builder - const executionPayload = chain.producedBlockRoot.get(blockRoot); + // executionPayload can be null or a real payload in locally produced so check for presence of root + const source = chain.producedBlockRoot.has(blockRoot) ? ProducedBlockSource.engine : ProducedBlockSource.builder; + + const executionPayload = chain.producedBlockRoot.get(blockRoot) ?? null; + const blobSidecars = executionPayload + ? chain.producedBlobSidecarsCache.get(toHex(executionPayload.blockHash)) + : undefined; + const blobs = blobSidecars ? blobSidecars.map((blobSidecar) => blobSidecar.blob) : null; + const signedBlockOrContents = - executionPayload !== undefined - ? reconstructLocalBlockOrContents( - chain, - {signedBlindedBlock, signedBlindedBlobSidecars}, - executionPayload, - logCtx - ) - : await reconstructBuilderBlockOrContents(chain, signedBlindedBlockOrContents, logCtx); + source === ProducedBlockSource.engine + ? reconstructFullBlockOrContents({signedBlindedBlock, signedBlindedBlobSidecars}, {executionPayload, blobs}) + : await reconstructBuilderBlockOrContents(chain, signedBlindedBlockOrContents); // the full block is published by relay and it's possible that the block is already known to us // by gossip // // see: https://github.com/ChainSafe/lodestar/issues/5404 + chain.logger.info("Publishing assembled block", {blockRoot, slot, source}); return publishBlock(signedBlockOrContents, {...opts, ignoreIfKnown: true}); }; @@ -365,73 +365,15 @@ export function getBeaconBlockApi({ }; } -function parseSignedBlindedBlockOrContents( - signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents -): ParsedSignedBlindedBlockOrContents { - if (isSignedBlindedBlockContents(signedBlindedBlockOrContents)) { - const signedBlindedBlock = signedBlindedBlockOrContents.signedBlindedBlock; - const signedBlindedBlobSidecars = signedBlindedBlockOrContents.signedBlindedBlobSidecars; - return {signedBlindedBlock, signedBlindedBlobSidecars}; - } else { - return {signedBlindedBlock: signedBlindedBlockOrContents, signedBlindedBlobSidecars: null}; - } -} - -function reconstructLocalBlockOrContents( - chain: ApiModules["chain"], - {signedBlindedBlock, signedBlindedBlobSidecars}: ParsedSignedBlindedBlockOrContents, - executionPayload: allForks.ExecutionPayload | null, - logCtx: Record -): allForks.SignedBeaconBlockOrContents { - const signedBlock = signedBlindedBlockToFull(signedBlindedBlock, executionPayload); - if (executionPayload !== null) { - Object.assign(logCtx, {transactions: executionPayload.transactions.length}); - } - - if (signedBlindedBlobSidecars !== null) { - if (executionPayload === null) { - throw Error("Missing locally produced executionPayload for deneb+ publishBlindedBlock"); - } - - const blockHash = toHex(executionPayload.blockHash); - const blobSidecars = chain.producedBlobSidecarsCache.get(blockHash); - if (blobSidecars === undefined) { - throw Error("Missing blobSidecars from the local execution cache"); - } - if (blobSidecars.length !== signedBlindedBlobSidecars.length) { - throw Error( - `Length mismatch signedBlindedBlobSidecars=${signedBlindedBlobSidecars.length} blobSidecars=${blobSidecars.length}` - ); - } - const signedBlobSidecars = signedBlindedBlobSidecarsToFull( - signedBlindedBlobSidecars, - blobSidecars.map((blobSidecar) => blobSidecar.blob) - ); - - Object.assign(logCtx, {blobs: signedBlindedBlobSidecars.length}); - chain.logger.verbose("Block & blobs assembled from locally cached payload", logCtx); - return {signedBlock, signedBlobSidecars} as allForks.SignedBeaconBlockOrContents; - } else { - chain.logger.verbose("Block assembled from locally cached payload", logCtx); - return signedBlock as allForks.SignedBeaconBlockOrContents; - } -} - async function reconstructBuilderBlockOrContents( chain: ApiModules["chain"], - signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents, - logCtx: Record + signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents ): Promise { - // Mechanism for blobs & blocks on builder is implemenented separately in a followup deneb-builder PR - if (isSignedBlindedBlockContents(signedBlindedBlockOrContents)) { - throw Error("exeutionBuilder not yet implemented for deneb+ forks"); - } const executionBuilder = chain.executionBuilder; if (!executionBuilder) { throw Error("exeutionBuilder required to publish SignedBlindedBeaconBlock"); } const signedBlockOrContents = await executionBuilder.submitBlindedBlock(signedBlindedBlockOrContents); - chain.logger.verbose("Publishing block assembled from the builder", logCtx); return signedBlockOrContents; } diff --git a/packages/beacon-node/src/api/impl/beacon/state/index.ts b/packages/beacon-node/src/api/impl/beacon/state/index.ts index 54d663234afe..c9f74b45a9f2 100644 --- a/packages/beacon-node/src/api/impl/beacon/state/index.ts +++ b/packages/beacon-node/src/api/impl/beacon/state/index.ts @@ -5,7 +5,9 @@ import { computeEpochAtSlot, computeStartSlotAtEpoch, getCurrentEpoch, + getRandaoMix, } from "@lodestar/state-transition"; +import {EPOCHS_PER_HISTORICAL_VECTOR} from "@lodestar/params"; import {ApiError} from "../../errors.js"; import {ApiModules} from "../../types.js"; import { @@ -43,6 +45,25 @@ export function getBeaconStateApi({ }; }, + async getStateRandao(stateId, epoch) { + const {state, executionOptimistic} = await getState(stateId); + const stateEpoch = computeEpochAtSlot(state.slot); + const usedEpoch = epoch ?? stateEpoch; + + if (!(stateEpoch < usedEpoch + EPOCHS_PER_HISTORICAL_VECTOR && usedEpoch <= stateEpoch)) { + throw new ApiError(400, "Requested epoch is out of range"); + } + + const randao = getRandaoMix(state, usedEpoch); + + return { + executionOptimistic, + data: { + randao, + }, + }; + }, + async getStateFinalityCheckpoints(stateId) { const {state, executionOptimistic} = await getState(stateId); return { diff --git a/packages/beacon-node/src/api/impl/validator/index.ts b/packages/beacon-node/src/api/impl/validator/index.ts index ec3a02dfa26f..c0230a21bcc6 100644 --- a/packages/beacon-node/src/api/impl/validator/index.ts +++ b/packages/beacon-node/src/api/impl/validator/index.ts @@ -32,6 +32,7 @@ import { BLSSignature, isBlindedBeaconBlock, isBlindedBlockContents, + phase0, } from "@lodestar/types"; import {ExecutionStatus} from "@lodestar/fork-choice"; import {toHex, racePromisesWithCutoff, RaceEvent} from "@lodestar/utils"; @@ -172,12 +173,17 @@ export function getValidatorApi({ /** * This function is called 1s before next epoch, usually at that time PrepareNextSlotScheduler finishes - * so we should have checkpoint state, otherwise wait for up to `timeoutMs`. + * so we should have checkpoint state, otherwise wait for up to the slot 1 of epoch. + * slot epoch 0 1 + * |------------|------------| + * ^ ^ + * | | + * | | + * | waitForCheckpointState (1s before slot 0 of epoch, wait until slot 1 of epoch) + * | + * prepareNextSlot (4s before next slot) */ - async function waitForCheckpointState( - cpHex: CheckpointHex, - timeoutMs: number - ): Promise { + async function waitForCheckpointState(cpHex: CheckpointHex): Promise { const cpState = chain.regen.getCheckpointStateSync(cpHex); if (cpState) { return cpState; @@ -186,16 +192,30 @@ export function getValidatorApi({ epoch: cpHex.epoch, root: fromHexString(cpHex.rootHex), }; - // if not, wait for ChainEvent.checkpoint event until timeoutMs - return new Promise((resolve) => { - const timer = setTimeout(() => resolve(null), timeoutMs); - chain.emitter.on(ChainEvent.checkpoint, (eventCp, cpState) => { - if (ssz.phase0.Checkpoint.equals(eventCp, cp)) { - clearTimeout(timer); - resolve(cpState); - } - }); - }); + const slot0 = computeStartSlotAtEpoch(cp.epoch); + // if not, wait for ChainEvent.checkpoint event until slot 1 of epoch + let listener: ((eventCp: phase0.Checkpoint) => void) | null = null; + const foundCPState = await Promise.race([ + new Promise((resolve) => { + listener = (eventCp) => { + resolve(ssz.phase0.Checkpoint.equals(eventCp, cp)); + }; + chain.emitter.once(ChainEvent.checkpoint, listener); + }), + // in rare case, checkpoint state cache may happen up to 6s of slot 0 of epoch + // so we wait for it until the slot 1 of epoch + chain.clock.waitForSlot(slot0 + 1), + ]); + + if (listener != null) { + chain.emitter.off(ChainEvent.checkpoint, listener); + } + + if (foundCPState === true) { + return chain.regen.getCheckpointStateSync(cpHex); + } + + return null; } /** @@ -310,14 +330,17 @@ export function getValidatorApi({ const version = config.getForkName(block.slot); if (isForkBlobs(version)) { - if (!isBlindedBlockContents(block)) { - throw Error(`Expected BlockContents response at fork=${version}`); + const blockHash = toHex((block as bellatrix.BlindedBeaconBlock).body.executionPayloadHeader.blockHash); + const blindedBlobSidecars = chain.producedBlindedBlobSidecarsCache.get(blockHash); + if (blindedBlobSidecars === undefined) { + throw Error("blobSidecars missing in cache"); } - return {data: block, version, executionPayloadValue}; + return { + data: {blindedBlock: block, blindedBlobSidecars} as allForks.BlindedBlockContents, + version, + executionPayloadValue, + }; } else { - if (isBlindedBlockContents(block)) { - throw Error(`Invalid BlockContents response at fork=${version}`); - } return {data: block, version, executionPayloadValue}; } } finally { @@ -718,7 +741,7 @@ export function getValidatorApi({ // this is to avoid missed block proposal due to 0 epoch look ahead if (epoch === nextEpoch && toNextEpochMs < prepareNextSlotLookAheadMs) { // wait for maximum 1 slot for cp state which is the timeout of validator api - const cpState = await waitForCheckpointState({rootHex: head.blockRoot, epoch}, slotMs); + const cpState = await waitForCheckpointState({rootHex: head.blockRoot, epoch}); if (cpState) { state = cpState; metrics?.duties.requestNextEpochProposalDutiesHit.inc(); diff --git a/packages/beacon-node/src/chain/blocks/verifyBlock.ts b/packages/beacon-node/src/chain/blocks/verifyBlock.ts index 77c6b4832bbc..4f8bcfa86d01 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlock.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlock.ts @@ -71,6 +71,7 @@ export async function verifyBlocksInEpoch( if (!isStateValidatorsNodesPopulated(preState0)) { this.logger.verbose("verifyBlocksInEpoch preState0 SSZ cache stats", { + slot: preState0.slot, cache: isStateValidatorsNodesPopulated(preState0), clonedCount: preState0.clonedCount, clonedCountWithTransferCache: preState0.clonedCountWithTransferCache, diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 31068d20de27..18c37a3ba437 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -131,6 +131,7 @@ export class BeaconChain implements IBeaconChain { readonly checkpointBalancesCache: CheckpointBalancesCache; /** Map keyed by executionPayload.blockHash of the block for those blobs */ readonly producedBlobSidecarsCache = new Map(); + readonly producedBlindedBlobSidecarsCache = new Map(); // Cache payload from the local execution so that produceBlindedBlock or produceBlockV3 and // send and get signed/published blinded versions which beacon can assemble into full before @@ -522,7 +523,7 @@ export class BeaconChain implements IBeaconChain { // publishing the blinded block's full version if (blobs.type === BlobsResultType.produced) { // body is of full type here - const blockHash = toHex((block as bellatrix.BeaconBlock).body.executionPayload.blockHash); + const blockHash = blobs.blockHash; const blobSidecars = blobs.blobSidecars.map((blobSidecar) => ({ ...blobSidecar, blockRoot, @@ -533,6 +534,21 @@ export class BeaconChain implements IBeaconChain { this.producedBlobSidecarsCache.set(blockHash, blobSidecars); this.metrics?.blockProductionCaches.producedBlobSidecarsCache.set(this.producedBlobSidecarsCache.size); + } else if (blobs.type === BlobsResultType.blinded) { + // body is of blinded type here + const blockHash = blobs.blockHash; + const blindedBlobSidecars = blobs.blobSidecars.map((blindedBlobSidecar) => ({ + ...blindedBlobSidecar, + blockRoot, + slot, + blockParentRoot: parentBlockRoot, + proposerIndex, + })); + + this.producedBlindedBlobSidecarsCache.set(blockHash, blindedBlobSidecars); + this.metrics?.blockProductionCaches.producedBlindedBlobSidecarsCache.set( + this.producedBlindedBlobSidecarsCache.size + ); } return {block, executionPayloadValue}; @@ -792,6 +808,14 @@ export class BeaconChain implements IBeaconChain { this.opts.maxCachedBlobSidecars ?? DEFAULT_MAX_CACHED_BLOB_SIDECARS ); this.metrics?.blockProductionCaches.producedBlobSidecarsCache.set(this.producedBlobSidecarsCache.size); + + pruneSetToMax( + this.producedBlindedBlobSidecarsCache, + this.opts.maxCachedBlobSidecars ?? DEFAULT_MAX_CACHED_BLOB_SIDECARS + ); + this.metrics?.blockProductionCaches.producedBlindedBlobSidecarsCache.set( + this.producedBlindedBlobSidecarsCache.size + ); } const metrics = this.metrics; diff --git a/packages/beacon-node/src/chain/interface.ts b/packages/beacon-node/src/chain/interface.ts index b7f33555545a..8d6f7f419d7b 100644 --- a/packages/beacon-node/src/chain/interface.ts +++ b/packages/beacon-node/src/chain/interface.ts @@ -95,6 +95,7 @@ export interface IBeaconChain { readonly checkpointBalancesCache: CheckpointBalancesCache; readonly producedBlobSidecarsCache: Map; readonly producedBlockRoot: Map; + readonly producedBlindedBlobSidecarsCache: Map; readonly producedBlindedBlockRoot: Set; readonly opts: IChainOptions; diff --git a/packages/beacon-node/src/chain/prepareNextSlot.ts b/packages/beacon-node/src/chain/prepareNextSlot.ts index 8babd82756f8..43fac1d1b120 100644 --- a/packages/beacon-node/src/chain/prepareNextSlot.ts +++ b/packages/beacon-node/src/chain/prepareNextSlot.ts @@ -98,7 +98,9 @@ export class PrepareNextSlotScheduler { const prepareState = await this.chain.regen.getBlockSlotState( headRoot, prepareSlot, - {dontTransferCache: true}, + // the slot 0 of next epoch will likely use this Previous Root Checkpoint state for state transition so we transfer cache here + // for other slots dontTransferCached=true because we don't run state transition on this state + {dontTransferCache: !isEpochTransition}, RegenCaller.precomputeEpoch ); @@ -116,6 +118,7 @@ export class PrepareNextSlotScheduler { nextEpoch, headSlot, prepareSlot, + previousHits, }); } diff --git a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts index 5224aae65035..acefbbf765a1 100644 --- a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts +++ b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts @@ -35,7 +35,10 @@ import {PayloadId, IExecutionEngine, IExecutionBuilder, PayloadAttributes} from import {ZERO_HASH, ZERO_HASH_HEX} from "../../constants/index.js"; import {IEth1ForBlockProduction} from "../../eth1/index.js"; import {numToQuantity} from "../../eth1/provider/utils.js"; -import {validateBlobsAndKzgCommitments} from "./validateBlobsAndKzgCommitments.js"; +import { + validateBlobsAndKzgCommitments, + validateBlindedBlobsAndKzgCommitments, +} from "./validateBlobsAndKzgCommitments.js"; // Time to provide the EL to generate a payload from new payload id const PAYLOAD_GENERATION_TIME_MS = 500; @@ -70,8 +73,9 @@ export enum BlobsResultType { } export type BlobsResult = - | {type: BlobsResultType.preDeneb | BlobsResultType.blinded} - | {type: BlobsResultType.produced; blobSidecars: deneb.BlobSidecars; blockHash: RootHex}; + | {type: BlobsResultType.preDeneb} + | {type: BlobsResultType.produced; blobSidecars: deneb.BlobSidecars; blockHash: RootHex} + | {type: BlobsResultType.blinded; blobSidecars: deneb.BlindedBlobSidecars; blockHash: RootHex}; export async function produceBlockBody( this: BeaconChain, @@ -195,16 +199,47 @@ export async function produceBlockBody( ); (blockBody as allForks.BlindedBeaconBlockBody).executionPayloadHeader = builderRes.header; executionPayloadValue = builderRes.executionPayloadValue; - this.logger.verbose("Fetched execution payload header from builder", {slot: blockSlot, executionPayloadValue}); + + const fetchedTime = Date.now() / 1000 - computeTimeAtSlot(this.config, blockSlot, this.genesisTime); + const prepType = "blinded"; + this.metrics?.blockPayload.payloadFetchedTime.observe({prepType}, fetchedTime); + this.logger.verbose("Fetched execution payload header from builder", { + slot: blockSlot, + executionPayloadValue, + prepType, + fetchedTime, + }); + if (ForkSeq[fork] >= ForkSeq.deneb) { - const {blobKzgCommitments} = builderRes; - if (blobKzgCommitments === undefined) { - throw Error(`Invalid builder getHeader response for fork=${fork}, missing blobKzgCommitments`); + const {blindedBlobsBundle} = builderRes; + if (blindedBlobsBundle === undefined) { + throw Error(`Invalid builder getHeader response for fork=${fork}, missing blindedBlobsBundle`); } - (blockBody as deneb.BlindedBeaconBlockBody).blobKzgCommitments = blobKzgCommitments; - blobsResult = {type: BlobsResultType.blinded}; - Object.assign(logMeta, {blobs: blobKzgCommitments.length}); + // validate blindedBlobsBundle + if (this.opts.sanityCheckExecutionEngineBlobs) { + validateBlindedBlobsAndKzgCommitments(builderRes.header, blindedBlobsBundle); + } + + (blockBody as deneb.BlindedBeaconBlockBody).blobKzgCommitments = blindedBlobsBundle.commitments; + const blockHash = toHex(builderRes.header.blockHash); + + const blobSidecars = Array.from({length: blindedBlobsBundle.blobRoots.length}, (_v, index) => { + const blobRoot = blindedBlobsBundle.blobRoots[index]; + const commitment = blindedBlobsBundle.commitments[index]; + const proof = blindedBlobsBundle.proofs[index]; + const blindedBlobSidecar = { + index, + blobRoot, + kzgProof: proof, + kzgCommitment: commitment, + }; + // Other fields will be injected after postState is calculated + return blindedBlobSidecar; + }) as deneb.BlindedBlobSidecars; + blobsResult = {type: BlobsResultType.blinded, blobSidecars, blockHash}; + + Object.assign(logMeta, {blobs: blindedBlobsBundle.commitments.length}); } else { blobsResult = {type: BlobsResultType.preDeneb}; } @@ -270,7 +305,7 @@ export async function produceBlockBody( throw Error(`Missing blobsBundle response from getPayload at fork=${fork}`); } - // Optionally sanity-check that the KZG commitments match the versioned hashes in the transactions + // validate blindedBlobsBundle if (this.opts.sanityCheckExecutionEngineBlobs) { validateBlobsAndKzgCommitments(executionPayload, blobsBundle); } @@ -288,6 +323,7 @@ export async function produceBlockBody( kzgProof: proof, kzgCommitment: commitment, }; + // Other fields will be injected after postState is calculated return blobSidecar; }) as deneb.BlobSidecars; blobsResult = {type: BlobsResultType.produced, blobSidecars, blockHash}; @@ -443,21 +479,19 @@ async function prepareExecutionPayloadHeader( ): Promise<{ header: allForks.ExecutionPayloadHeader; executionPayloadValue: Wei; - blobKzgCommitments?: deneb.BlobKzgCommitments; + blindedBlobsBundle?: deneb.BlindedBlobsBundle; }> { if (!chain.executionBuilder) { throw Error("executionBuilder required"); } const parentHashRes = await getExecutionPayloadParentHash(chain, state); - if (parentHashRes.isPremerge) { - // TODO: Is this okay? throw Error("Execution builder disabled pre-merge"); } const {parentHash} = parentHashRes; - return chain.executionBuilder.getHeader(state.slot, parentHash, proposerPubKey); + return chain.executionBuilder.getHeader(fork, state.slot, parentHash, proposerPubKey); } export async function getExecutionPayloadParentHash( diff --git a/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts b/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts index 54e90672d189..0d00d0c8bd72 100644 --- a/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts +++ b/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts @@ -1,4 +1,4 @@ -import {allForks} from "@lodestar/types"; +import {allForks, deneb} from "@lodestar/types"; import {BlobsBundle} from "../../execution/index.js"; /** @@ -13,3 +13,15 @@ export function validateBlobsAndKzgCommitments(payload: allForks.ExecutionPayloa ); } } + +export function validateBlindedBlobsAndKzgCommitments( + payload: allForks.ExecutionPayloadHeader, + blindedBlobsBundle: deneb.BlindedBlobsBundle +): void { + // sanity-check that the KZG commitments match the blobs (as produced by the execution engine) + if (blindedBlobsBundle.blobRoots.length !== blindedBlobsBundle.commitments.length) { + throw Error( + `BlindedBlobs bundle blobs len ${blindedBlobsBundle.blobRoots.length} != commitments len ${blindedBlobsBundle.commitments.length}` + ); + } +} diff --git a/packages/beacon-node/src/chain/regen/queued.ts b/packages/beacon-node/src/chain/regen/queued.ts index dd111f14b4d1..5305502c8c05 100644 --- a/packages/beacon-node/src/chain/regen/queued.ts +++ b/packages/beacon-node/src/chain/regen/queued.ts @@ -149,14 +149,7 @@ export class QueuedStateRegenerator implements IStateRegenerator { if (parentEpoch < blockEpoch) { const checkpointState = this.checkpointStateCache.getLatest(parentRoot, blockEpoch); if (checkpointState && computeEpochAtSlot(checkpointState.slot) === blockEpoch) { - // TODO: Miss-use of checkpointStateCache here return checkpointState; - // console.error({ - // "checkpointState.slot": checkpointState.slot, - // "block.slot": block.slot, - // blockEpoch, - // blockEpochStartSlot: computeStartSlotAtEpoch(blockEpoch), - // }); } } diff --git a/packages/beacon-node/src/eth1/provider/eth1Provider.ts b/packages/beacon-node/src/eth1/provider/eth1Provider.ts index 151d729e66e3..2d1feeb8d1e7 100644 --- a/packages/beacon-node/src/eth1/provider/eth1Provider.ts +++ b/packages/beacon-node/src/eth1/provider/eth1Provider.ts @@ -73,7 +73,10 @@ export class Eth1Provider implements IEth1Provider { this.logger = opts.logger; this.deployBlock = opts.depositContractDeployBlock ?? 0; this.depositContractAddress = toHexString(config.DEPOSIT_CONTRACT_ADDRESS); - this.rpc = new JsonRpcHttpClient(opts.providerUrls ?? DEFAULT_PROVIDER_URLS, { + + const providerUrls = opts.providerUrls ?? DEFAULT_PROVIDER_URLS; + this.logger?.info("Eth1 provider", {urls: providerUrls.toString()}); + this.rpc = new JsonRpcHttpClient(providerUrls, { signal, // Don't fallback with is truncated error. Throw early and let the retry on this class handle it shouldNotFallback: isJsonRpcTruncatedError, diff --git a/packages/beacon-node/src/execution/builder/http.ts b/packages/beacon-node/src/execution/builder/http.ts index 9a423e0f832d..43710bca83e1 100644 --- a/packages/beacon-node/src/execution/builder/http.ts +++ b/packages/beacon-node/src/execution/builder/http.ts @@ -1,8 +1,14 @@ import {byteArrayEquals, toHexString} from "@chainsafe/ssz"; import {allForks, bellatrix, Slot, Root, BLSPubkey, ssz, deneb, Wei} from "@lodestar/types"; +import { + parseSignedBlindedBlockOrContents, + parseExecutionPayloadAndBlobsBundle, + reconstructFullBlockOrContents, +} from "@lodestar/state-transition"; import {ChainForkConfig} from "@lodestar/config"; +import {Logger} from "@lodestar/logger"; import {getClient, Api as BuilderApi} from "@lodestar/api/builder"; -import {SLOTS_PER_EPOCH} from "@lodestar/params"; +import {SLOTS_PER_EPOCH, ForkExecution} from "@lodestar/params"; import {ApiError} from "@lodestar/api"; import {Metrics} from "../../metrics/metrics.js"; @@ -36,9 +42,15 @@ export class ExecutionBuilderHttp implements IExecutionBuilder { faultInspectionWindow: number; allowedFaults: number; - constructor(opts: ExecutionBuilderHttpOpts, config: ChainForkConfig, metrics: Metrics | null = null) { + constructor( + opts: ExecutionBuilderHttpOpts, + config: ChainForkConfig, + metrics: Metrics | null = null, + logger?: Logger + ) { const baseUrl = opts.urls[0]; if (!baseUrl) throw Error("No Url provided for executionBuilder"); + logger?.info("External builder", {urls: opts.urls.toString()}); this.api = getClient( { baseUrl, @@ -91,27 +103,36 @@ export class ExecutionBuilderHttp implements IExecutionBuilder { } async getHeader( + fork: ForkExecution, slot: Slot, parentHash: Root, proposerPubKey: BLSPubkey ): Promise<{ header: allForks.ExecutionPayloadHeader; executionPayloadValue: Wei; - blobKzgCommitments?: deneb.BlobKzgCommitments; + blindedBlobsBundle?: deneb.BlindedBlobsBundle; }> { const res = await this.api.getHeader(slot, parentHash, proposerPubKey); ApiError.assert(res, "execution.builder.getheader"); const {header, value: executionPayloadValue} = res.response.data.message; - const {blobKzgCommitments} = res.response.data.message as {blobKzgCommitments?: deneb.BlobKzgCommitments}; - return {header, executionPayloadValue, blobKzgCommitments}; + const {blindedBlobsBundle} = res.response.data.message as deneb.BuilderBid; + return {header, executionPayloadValue, blindedBlobsBundle}; } - async submitBlindedBlock(signedBlock: allForks.SignedBlindedBeaconBlock): Promise { - const res = await this.api.submitBlindedBlock(signedBlock); + async submitBlindedBlock( + signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents + ): Promise { + const res = await this.api.submitBlindedBlock(signedBlindedBlockOrContents); ApiError.assert(res, "execution.builder.submitBlindedBlock"); - const executionPayload = res.response.data; - const expectedTransactionsRoot = signedBlock.message.body.executionPayloadHeader.transactionsRoot; - const actualTransactionsRoot = ssz.bellatrix.Transactions.hashTreeRoot(res.response.data.transactions); + const {data} = res.response; + + const {executionPayload, blobsBundle} = parseExecutionPayloadAndBlobsBundle(data); + const {signedBlindedBlock, signedBlindedBlobSidecars} = + parseSignedBlindedBlockOrContents(signedBlindedBlockOrContents); + + // some validations for execution payload + const expectedTransactionsRoot = signedBlindedBlock.message.body.executionPayloadHeader.transactionsRoot; + const actualTransactionsRoot = ssz.bellatrix.Transactions.hashTreeRoot(executionPayload.transactions); if (!byteArrayEquals(expectedTransactionsRoot, actualTransactionsRoot)) { throw Error( `Invalid transactionsRoot of the builder payload, expected=${toHexString( @@ -119,10 +140,8 @@ export class ExecutionBuilderHttp implements IExecutionBuilder { )}, actual=${toHexString(actualTransactionsRoot)}` ); } - const fullySignedBlock: bellatrix.SignedBeaconBlock = { - ...signedBlock, - message: {...signedBlock.message, body: {...signedBlock.message.body, executionPayload}}, - }; - return fullySignedBlock; + + const blobs = blobsBundle ? blobsBundle.blobs : null; + return reconstructFullBlockOrContents({signedBlindedBlock, signedBlindedBlobSidecars}, {executionPayload, blobs}); } } diff --git a/packages/beacon-node/src/execution/builder/index.ts b/packages/beacon-node/src/execution/builder/index.ts index 530d541f8450..2f584ad7dadd 100644 --- a/packages/beacon-node/src/execution/builder/index.ts +++ b/packages/beacon-node/src/execution/builder/index.ts @@ -1,4 +1,5 @@ import {ChainForkConfig} from "@lodestar/config"; +import {Logger} from "@lodestar/logger"; import {Metrics} from "../../metrics/metrics.js"; import {IExecutionBuilder} from "./interface.js"; @@ -12,11 +13,12 @@ export const defaultExecutionBuilderOpts: ExecutionBuilderOpts = defaultExecutio export function initializeExecutionBuilder( opts: ExecutionBuilderOpts, config: ChainForkConfig, - metrics: Metrics | null = null + metrics: Metrics | null = null, + logger?: Logger ): IExecutionBuilder { switch (opts.mode) { case "http": default: - return new ExecutionBuilderHttp(opts, config, metrics); + return new ExecutionBuilderHttp(opts, config, metrics, logger); } } diff --git a/packages/beacon-node/src/execution/builder/interface.ts b/packages/beacon-node/src/execution/builder/interface.ts index 2bc7a19765a0..e9a2cabb69ef 100644 --- a/packages/beacon-node/src/execution/builder/interface.ts +++ b/packages/beacon-node/src/execution/builder/interface.ts @@ -1,4 +1,5 @@ import {allForks, bellatrix, Root, Slot, BLSPubkey, deneb, Wei} from "@lodestar/types"; +import {ForkExecution} from "@lodestar/params"; export interface IExecutionBuilder { /** @@ -17,13 +18,16 @@ export interface IExecutionBuilder { checkStatus(): Promise; registerValidator(registrations: bellatrix.SignedValidatorRegistrationV1[]): Promise; getHeader( + fork: ForkExecution, slot: Slot, parentHash: Root, proposerPubKey: BLSPubkey ): Promise<{ header: allForks.ExecutionPayloadHeader; executionPayloadValue: Wei; - blobKzgCommitments?: deneb.BlobKzgCommitments; + blindedBlobsBundle?: deneb.BlindedBlobsBundle; }>; - submitBlindedBlock(signedBlock: allForks.SignedBlindedBeaconBlock): Promise; + submitBlindedBlock( + signedBlock: allForks.SignedBlindedBeaconBlockOrContents + ): Promise; } diff --git a/packages/beacon-node/src/execution/engine/index.ts b/packages/beacon-node/src/execution/engine/index.ts index 743abf203de9..1692ea61cf92 100644 --- a/packages/beacon-node/src/execution/engine/index.ts +++ b/packages/beacon-node/src/execution/engine/index.ts @@ -31,6 +31,7 @@ export function getExecutionEngineHttp( opts: ExecutionEngineHttpOpts, modules: ExecutionEngineModules ): IExecutionEngine { + modules.logger.info("Execution client", {urls: opts.urls.toString()}); const rpc = new JsonRpcHttpClient(opts.urls, { ...opts, signal: modules.signal, diff --git a/packages/beacon-node/src/metrics/metrics/beacon.ts b/packages/beacon-node/src/metrics/metrics/beacon.ts index 9ea233b18fa2..2b763599f6e1 100644 --- a/packages/beacon-node/src/metrics/metrics/beacon.ts +++ b/packages/beacon-node/src/metrics/metrics/beacon.ts @@ -151,6 +151,10 @@ export function createBeaconMetrics(register: RegistryMetricCreator) { name: "beacon_blobsidecars_produced_cache_total", help: "Count of cached produced blob sidecars", }), + producedBlindedBlobSidecarsCache: register.gauge({ + name: "beacon_blinded_blobsidecars_produced_cache_total", + help: "Count of cached produced blinded blob sidecars", + }), }, blockPayload: { diff --git a/packages/beacon-node/src/network/core/metrics.ts b/packages/beacon-node/src/network/core/metrics.ts index e5ce0bede447..4f416ad4fba2 100644 --- a/packages/beacon-node/src/network/core/metrics.ts +++ b/packages/beacon-node/src/network/core/metrics.ts @@ -291,45 +291,6 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { labelNames: ["subnet"], }), }, - - // Gossip block - gossipBlock: { - elapsedTimeTillReceived: register.histogram({ - name: "lodestar_gossip_block_elapsed_time_till_received", - help: "Time elapsed between block slot time and the time block received via gossip", - buckets: [0.5, 1, 2, 4, 6, 12], - }), - elapsedTimeTillProcessed: register.histogram({ - name: "lodestar_gossip_block_elapsed_time_till_processed", - help: "Time elapsed between block slot time and the time block processed", - buckets: [0.5, 1, 2, 4, 6, 12], - }), - receivedToGossipValidate: register.histogram({ - name: "lodestar_gossip_block_received_to_gossip_validate", - help: "Time elapsed between block received and block validated", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], - }), - receivedToStateTransition: register.histogram({ - name: "lodestar_gossip_block_received_to_state_transition", - help: "Time elapsed between block received and block state transition", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], - }), - receivedToSignaturesVerification: register.histogram({ - name: "lodestar_gossip_block_received_to_signatures_verification", - help: "Time elapsed between block received and block signatures verification", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], - }), - receivedToExecutionPayloadVerification: register.histogram({ - name: "lodestar_gossip_block_received_to_execution_payload_verification", - help: "Time elapsed between block received and execution payload verification", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], - }), - receivedToBlockImport: register.histogram({ - name: "lodestar_gossip_block_received_to_block_import", - help: "Time elapsed between block received and block import", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], - }), - }, }; } diff --git a/packages/beacon-node/src/network/network.ts b/packages/beacon-node/src/network/network.ts index fba3c7a23e22..834ebacaa7a8 100644 --- a/packages/beacon-node/src/network/network.ts +++ b/packages/beacon-node/src/network/network.ts @@ -8,7 +8,7 @@ import {computeStartSlotAtEpoch, computeTimeAtSlot} from "@lodestar/state-transi import {phase0, allForks, deneb, altair, Root, capella, SlotRootHex} from "@lodestar/types"; import {routes} from "@lodestar/api"; import {ResponseIncoming} from "@lodestar/reqresp"; -import {ForkName, ForkSeq, MAX_BLOBS_PER_BLOCK} from "@lodestar/params"; +import {ForkSeq, MAX_BLOBS_PER_BLOCK} from "@lodestar/params"; import {Metrics, RegistryMetricCreator} from "../metrics/index.js"; import {IBeaconChain} from "../chain/index.js"; import {IBeaconDb} from "../db/interface.js"; @@ -33,6 +33,7 @@ import {GetReqRespHandlerFn, Version, requestSszTypeByMethod, responseSszTypeByM import {collectSequentialBlocksInRange} from "./reqresp/utils/collectSequentialBlocksInRange.js"; import {getGossipSSZType, gossipTopicIgnoreDuplicatePublishError, stringifyGossipTopic} from "./gossip/topic.js"; import {AggregatorTracker} from "./processor/aggregatorTracker.js"; +import {getActiveForks} from "./forks.js"; type NetworkModules = { opts: NetworkOptions; @@ -323,11 +324,22 @@ export class Network implements INetwork { } async publishBlsToExecutionChange(blsToExecutionChange: capella.SignedBLSToExecutionChange): Promise { - return this.publishGossip( - {type: GossipType.bls_to_execution_change, fork: ForkName.capella}, - blsToExecutionChange, - {ignoreDuplicatePublishError: true} - ); + const publishChanges = []; + for (const fork of getActiveForks(this.config, this.clock.currentEpoch)) { + if (ForkSeq[fork] >= ForkSeq.capella) { + const publishPromise = this.publishGossip( + {type: GossipType.bls_to_execution_change, fork}, + blsToExecutionChange, + {ignoreDuplicatePublishError: true} + ); + publishChanges.push(publishPromise); + } + } + + if (publishChanges.length === 0) { + throw Error("No capella+ fork active yet to publish blsToExecutionChange"); + } + return Promise.any(publishChanges); } async publishProposerSlashing(proposerSlashing: phase0.ProposerSlashing): Promise { diff --git a/packages/beacon-node/src/node/nodejs.ts b/packages/beacon-node/src/node/nodejs.ts index 3c9f2ec0b54b..da4f802a521c 100644 --- a/packages/beacon-node/src/node/nodejs.ts +++ b/packages/beacon-node/src/node/nodejs.ts @@ -214,7 +214,7 @@ export class BeaconNode { logger: logger.child({module: LoggerModule.execution}), }), executionBuilder: opts.executionBuilder.enabled - ? initializeExecutionBuilder(opts.executionBuilder, config, metrics) + ? initializeExecutionBuilder(opts.executionBuilder, config, metrics, logger) : undefined, }); diff --git a/packages/cli/package.json b/packages/cli/package.json index a595d401caed..4089b1c2d5ed 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@chainsafe/lodestar", - "version": "1.11.3", + "version": "1.12.0", "description": "Command line interface for lodestar", "author": "ChainSafe Systems", "license": "LGPL-3.0", @@ -59,23 +59,23 @@ "@chainsafe/bls-keystore": "^2.0.0", "@chainsafe/blst": "^0.2.9", "@chainsafe/discv5": "^5.1.0", - "@chainsafe/ssz": "^0.13.0", + "@chainsafe/persistent-merkle-tree": "^0.6.1", + "@chainsafe/ssz": "^0.14.0", "@chainsafe/threads": "^1.11.1", "@libp2p/crypto": "^2.0.4", "@libp2p/peer-id": "^3.0.2", "@libp2p/peer-id-factory": "^3.0.4", - "@chainsafe/persistent-merkle-tree": "^0.6.1", - "@lodestar/api": "^1.11.3", - "@lodestar/beacon-node": "^1.11.3", - "@lodestar/config": "^1.11.3", - "@lodestar/db": "^1.11.3", - "@lodestar/light-client": "^1.11.3", - "@lodestar/logger": "^1.11.3", - "@lodestar/params": "^1.11.3", - "@lodestar/state-transition": "^1.11.3", - "@lodestar/types": "^1.11.3", - "@lodestar/utils": "^1.11.3", - "@lodestar/validator": "^1.11.3", + "@lodestar/api": "^1.12.0", + "@lodestar/beacon-node": "^1.12.0", + "@lodestar/config": "^1.12.0", + "@lodestar/db": "^1.12.0", + "@lodestar/light-client": "^1.12.0", + "@lodestar/logger": "^1.12.0", + "@lodestar/params": "^1.12.0", + "@lodestar/state-transition": "^1.12.0", + "@lodestar/types": "^1.12.0", + "@lodestar/utils": "^1.12.0", + "@lodestar/validator": "^1.12.0", "@multiformats/multiaddr": "^12.1.3", "@types/lockfile": "^1.0.2", "bip39": "^3.1.0", @@ -96,7 +96,7 @@ "yargs": "^17.7.1" }, "devDependencies": { - "@lodestar/test-utils": "^1.11.3", + "@lodestar/test-utils": "^1.12.0", "@types/debug": "^4.1.7", "@types/expand-tilde": "^2.0.0", "@types/got": "^9.6.12", diff --git a/packages/cli/src/cmds/validator/handler.ts b/packages/cli/src/cmds/validator/handler.ts index 703398d4f026..8537500684ca 100644 --- a/packages/cli/src/cmds/validator/handler.ts +++ b/packages/cli/src/cmds/validator/handler.ts @@ -1,7 +1,13 @@ import path from "node:path"; import {setMaxListeners} from "node:events"; import {LevelDbController} from "@lodestar/db"; -import {ProcessShutdownCallback, SlashingProtection, Validator, ValidatorProposerConfig} from "@lodestar/validator"; +import { + ProcessShutdownCallback, + SlashingProtection, + Validator, + ValidatorProposerConfig, + defaultOptions, +} from "@lodestar/validator"; import {routes} from "@lodestar/api"; import {getMetrics, MetricsRegister} from "@lodestar/validator"; import { @@ -216,7 +222,9 @@ function getProposerConfigFromArgs( feeRecipient: args.suggestedFeeRecipient ? parseFeeRecipient(args.suggestedFeeRecipient) : undefined, builder: { gasLimit: args.defaultGasLimit, - selection: parseBuilderSelection(args["builder.selection"]), + selection: parseBuilderSelection( + args["builder.selection"] ?? (args["builder"] ? defaultOptions.builderAliasSelection : undefined) + ), }, }; diff --git a/packages/cli/src/cmds/validator/keymanager/impl.ts b/packages/cli/src/cmds/validator/keymanager/impl.ts index f4b28edfb3d1..c6b0ab200c01 100644 --- a/packages/cli/src/cmds/validator/keymanager/impl.ts +++ b/packages/cli/src/cmds/validator/keymanager/impl.ts @@ -60,6 +60,28 @@ export class KeymanagerApi implements Api { ); } + async listGraffiti(pubkeyHex: string): ReturnType { + return {data: {pubkey: pubkeyHex, graffiti: this.validator.validatorStore.getGraffiti(pubkeyHex)}}; + } + + async setGraffiti(pubkeyHex: string, graffiti: string): Promise { + this.checkIfProposerWriteEnabled(); + this.validator.validatorStore.setGraffiti(pubkeyHex, graffiti); + this.persistedKeysBackend.writeProposerConfig( + pubkeyHex, + this.validator.validatorStore.getProposerConfig(pubkeyHex) + ); + } + + async deleteGraffiti(pubkeyHex: string): Promise { + this.checkIfProposerWriteEnabled(); + this.validator.validatorStore.deleteGraffiti(pubkeyHex); + this.persistedKeysBackend.writeProposerConfig( + pubkeyHex, + this.validator.validatorStore.getProposerConfig(pubkeyHex) + ); + } + async getGasLimit(pubkeyHex: string): ReturnType { const gasLimit = this.validator.validatorStore.getGasLimit(pubkeyHex); return {data: {pubkey: pubkeyHex, gasLimit}}; diff --git a/packages/cli/src/cmds/validator/options.ts b/packages/cli/src/cmds/validator/options.ts index 8daa1feda4bf..d3af927deca6 100644 --- a/packages/cli/src/cmds/validator/options.ts +++ b/packages/cli/src/cmds/validator/options.ts @@ -233,14 +233,13 @@ export const validatorOptions: CliCommandOptions = { builder: { type: "boolean", - description: "Enable execution payload production via a builder for better rewards", + description: `An alias for \`--builder.selection ${defaultOptions.builderAliasSelection}\` for the builder flow, ignored if \`--builder.selection\` is explicitly provided`, group: "builder", - deprecated: "enabling or disabling builder flow is now solely managed by `builder.selection` flag", }, "builder.selection": { type: "string", - description: "Default builder block selection strategy: `maxprofit`, `builderalways`, or `builderonly`", + description: "Builder block selection strategy `maxprofit`, `builderalways`, `builderonly` or `executiononly`", defaultDescription: `\`${defaultOptions.builderSelection}\``, group: "builder", }, diff --git a/packages/cli/src/networks/ephemery.ts b/packages/cli/src/networks/ephemery.ts new file mode 100644 index 000000000000..f3d2dcacd7f1 --- /dev/null +++ b/packages/cli/src/networks/ephemery.ts @@ -0,0 +1,8 @@ +export {ephemeryChainConfig as chainConfig} from "@lodestar/config/networks"; + +export const depositContractDeployBlock = 0; +export const genesisFileUrl = "https://ephemery.dev/latest/genesis.ssz"; +export const bootnodesFileUrl = "https://ephemery.dev/latest/bootstrap_nodes.txt"; + +// Pick from above file +export const bootEnrs = []; diff --git a/packages/cli/src/networks/index.ts b/packages/cli/src/networks/index.ts index a44575b94351..85846164b473 100644 --- a/packages/cli/src/networks/index.ts +++ b/packages/cli/src/networks/index.ts @@ -18,8 +18,18 @@ import * as ropsten from "./ropsten.js"; import * as sepolia from "./sepolia.js"; import * as holesky from "./holesky.js"; import * as chiado from "./chiado.js"; - -export type NetworkName = "mainnet" | "dev" | "gnosis" | "goerli" | "ropsten" | "sepolia" | "holesky" | "chiado"; +import * as ephemery from "./ephemery.js"; + +export type NetworkName = + | "mainnet" + | "dev" + | "gnosis" + | "goerli" + | "ropsten" + | "sepolia" + | "holesky" + | "chiado" + | "ephemery"; export const networkNames: NetworkName[] = [ "mainnet", "gnosis", @@ -28,6 +38,7 @@ export const networkNames: NetworkName[] = [ "sepolia", "holesky", "chiado", + "ephemery", // Leave always as last network. The order matters for the --help printout "dev", @@ -69,6 +80,8 @@ export function getNetworkData(network: NetworkName): { return holesky; case "chiado": return chiado; + case "ephemery": + return ephemery; default: throw Error(`Network not supported: ${network}`); } diff --git a/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts b/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts index a57bf87ae016..01a2ba81c984 100644 --- a/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts +++ b/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts @@ -17,11 +17,13 @@ describe("import keystores from api, test DefaultProposerConfig", function () { const defaultOptions = { suggestedFeeRecipient: "0x0000000000000000000000000000000000000000", gasLimit: 30_000_000, + graffiti: "aaaa", }; const updatedOptions = { suggestedFeeRecipient: "0xcccccccccccccccccccccccccccccccccccccccc", gasLimit: 35_000_000, + graffiti: "bbbb", }; before("Clean dataDir", () => { @@ -47,7 +49,10 @@ describe("import keystores from api, test DefaultProposerConfig", function () { const slashingProtectionStr = JSON.stringify(slashingProtection); it("1 . run 'validator' import keys from API, getdefaultfeeRecipient", async () => { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([`--graffiti ${defaultOptions.graffiti}`], { + dataDir, + testContext, + }); // Produce and encrypt keystores // Import test keys const keystoresStr = await getKeystoresStr(passphrase, secretKeys); @@ -73,6 +78,26 @@ describe("import keystores from api, test DefaultProposerConfig", function () { "FeeRecipient Check updated" ); + //////////////// Graffiti + + let graffiti0 = await keymanagerClient.listGraffiti(pubkeys[0]); + ApiError.assert(graffiti0); + expectDeepEquals( + graffiti0.response.data, + {pubkey: pubkeys[0], graffiti: defaultOptions.graffiti}, + "Graffiti Check default" + ); + + // Set Graffiti to updatedOptions + ApiError.assert(await keymanagerClient.setGraffiti(pubkeys[0], updatedOptions.graffiti)); + graffiti0 = await keymanagerClient.listGraffiti(pubkeys[0]); + ApiError.assert(graffiti0); + expectDeepEquals( + graffiti0.response.data, + {pubkey: pubkeys[0], graffiti: updatedOptions.graffiti}, + "FeeRecipient Check updated" + ); + /////////// GasLimit let gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); @@ -95,7 +120,10 @@ describe("import keystores from api, test DefaultProposerConfig", function () { }); it("2 . run 'validator' Check last feeRecipient and gasLimit persists", async () => { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([`--graffiti ${defaultOptions.graffiti}`], { + dataDir, + testContext, + }); // next time check edited feeRecipient persists let feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); @@ -116,6 +144,25 @@ describe("import keystores from api, test DefaultProposerConfig", function () { "FeeRecipient Check default after delete" ); + // graffiti persists + let graffiti0 = await keymanagerClient.listGraffiti(pubkeys[0]); + ApiError.assert(graffiti0); + expectDeepEquals( + graffiti0.response.data, + {pubkey: pubkeys[0], graffiti: updatedOptions.graffiti}, + "FeeRecipient Check default persists" + ); + + // after deletion graffiti restored to default + ApiError.assert(await keymanagerClient.deleteGraffiti(pubkeys[0])); + graffiti0 = await keymanagerClient.listGraffiti(pubkeys[0]); + ApiError.assert(graffiti0); + expectDeepEquals( + graffiti0.response.data, + {pubkey: pubkeys[0], graffiti: defaultOptions.graffiti}, + "FeeRecipient Check default after delete" + ); + // gasLimit persists let gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); ApiError.assert(gasLimit0); @@ -136,7 +183,10 @@ describe("import keystores from api, test DefaultProposerConfig", function () { }); it("3 . run 'validator' FeeRecipient and GasLimit should be default after delete", async () => { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([`--graffiti ${defaultOptions.graffiti}`], { + dataDir, + testContext, + }); const feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); ApiError.assert(feeRecipient0); @@ -146,10 +196,17 @@ describe("import keystores from api, test DefaultProposerConfig", function () { "FeeRecipient Check default persists" ); - let gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); + ApiError.assert(await keymanagerClient.deleteGraffiti(pubkeys[0])); + const graffiti0 = await keymanagerClient.listGraffiti(pubkeys[0]); + ApiError.assert(graffiti0); + expectDeepEquals( + graffiti0.response.data, + {pubkey: pubkeys[0], graffiti: defaultOptions.graffiti}, + "FeeRecipient Check default persists" + ); ApiError.assert(await keymanagerClient.deleteGasLimit(pubkeys[0])); - gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); + const gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); ApiError.assert(gasLimit0); expectDeepEquals( gasLimit0.response.data, diff --git a/packages/cli/test/sim/multi_fork.test.ts b/packages/cli/test/sim/multi_fork.test.ts index 2cc07445ce95..0ac8d18ed055 100644 --- a/packages/cli/test/sim/multi_fork.test.ts +++ b/packages/cli/test/sim/multi_fork.test.ts @@ -218,7 +218,7 @@ await connectNewNode(unknownBlockSync, env.nodes); await sleep(5000); try { - ApiError.assert(await unknownBlockSync.beacon.api.beacon.publishBlock(headForUnknownBlockSync.response.data)); + ApiError.assert(await unknownBlockSync.beacon.api.beacon.publishBlockV2(headForUnknownBlockSync.response.data)); env.tracker.record({ message: "Publishing unknown block should fail", diff --git a/packages/config/package.json b/packages/config/package.json index 79df337ea1c5..7814c9a18778 100644 --- a/packages/config/package.json +++ b/packages/config/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/config", - "version": "1.11.3", + "version": "1.12.0", "description": "Chain configuration required for lodestar", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -64,8 +64,8 @@ "blockchain" ], "dependencies": { - "@chainsafe/ssz": "^0.13.0", - "@lodestar/params": "^1.11.3", - "@lodestar/types": "^1.11.3" + "@chainsafe/ssz": "^0.14.0", + "@lodestar/params": "^1.12.0", + "@lodestar/types": "^1.12.0" } } diff --git a/packages/config/src/chainConfig/networks/ephemery.ts b/packages/config/src/chainConfig/networks/ephemery.ts new file mode 100644 index 000000000000..c338c26f2cf3 --- /dev/null +++ b/packages/config/src/chainConfig/networks/ephemery.ts @@ -0,0 +1,61 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import {fromHexString as b} from "@chainsafe/ssz"; +import {ChainConfig} from "../types.js"; +import {chainConfig as mainnet} from "../presets/mainnet.js"; + +// Ephemery dynamic beacon chain config: +// https://github.com/ephemery-testnet/ephemery-genesis/blob/master/cl-config.yaml + +// Ephemery specification: +// https://github.com/taxmeifyoucan/EIPs/blob/d298cdd8eaf47a21e7770e5c6efef870587c924d/EIPS/eip-6916.md + +// iteration 0, "base"-genesis +const baseChainConfig: ChainConfig = { + ...mainnet, + + CONFIG_NAME: "ephemery", + + // Genesis + // --------------------------------------------------------------- + MIN_GENESIS_ACTIVE_VALIDATOR_COUNT: 64, + // Thu Dec 02 2021 19:00:00 GMT+0000 + MIN_GENESIS_TIME: 1638471600, + GENESIS_FORK_VERSION: b("0x1000101b"), + GENESIS_DELAY: 300, + + // Forking + // --------------------------------------------------------------- + // Altair + ALTAIR_FORK_VERSION: b("0x2000101b"), + ALTAIR_FORK_EPOCH: 0, + // Merge + BELLATRIX_FORK_VERSION: b("0x3000101b"), + BELLATRIX_FORK_EPOCH: 0, + TERMINAL_TOTAL_DIFFICULTY: BigInt("0"), + // Capella + CAPELLA_FORK_VERSION: b("0x4000101b"), + CAPELLA_FORK_EPOCH: 0, + // Deneb + DENEB_FORK_VERSION: b("0x5000101b"), + + // Deposit contract + // --------------------------------------------------------------- + DEPOSIT_CHAIN_ID: 39438000, + DEPOSIT_NETWORK_ID: 39438000, + DEPOSIT_CONTRACT_ADDRESS: b("0x4242424242424242424242424242424242424242"), + + ETH1_FOLLOW_DISTANCE: 12, +}; + +// Reset interval (7 days) in milliseconds, based on ephemery-genesis values.env: +// https://github.com/ephemery-testnet/ephemery-genesis/blob/9a28fbef950c8547d78785f8a0ea49a95ce19a48/values.env#L5 +const RESET_INTERVAL_MS = 604800000; +const iteration = Math.floor(Date.now() - baseChainConfig.MIN_GENESIS_TIME) / RESET_INTERVAL_MS; + +export const ephemeryChainConfig: ChainConfig = { + ...baseChainConfig, + + MIN_GENESIS_TIME: RESET_INTERVAL_MS * iteration + baseChainConfig.MIN_GENESIS_TIME, + DEPOSIT_CHAIN_ID: baseChainConfig.DEPOSIT_CHAIN_ID + iteration, + DEPOSIT_NETWORK_ID: baseChainConfig.DEPOSIT_NETWORK_ID + iteration, +}; diff --git a/packages/config/src/networks.ts b/packages/config/src/networks.ts index e9d549fa1e75..8ff3cdd15256 100644 --- a/packages/config/src/networks.ts +++ b/packages/config/src/networks.ts @@ -6,6 +6,7 @@ import {ropstenChainConfig} from "./chainConfig/networks/ropsten.js"; import {sepoliaChainConfig} from "./chainConfig/networks/sepolia.js"; import {holeskyChainConfig} from "./chainConfig/networks/holesky.js"; import {chiadoChainConfig} from "./chainConfig/networks/chiado.js"; +import {ephemeryChainConfig} from "./chainConfig/networks/ephemery.js"; export { mainnetChainConfig, @@ -15,9 +16,10 @@ export { sepoliaChainConfig, holeskyChainConfig, chiadoChainConfig, + ephemeryChainConfig, }; -export type NetworkName = "mainnet" | "gnosis" | "goerli" | "ropsten" | "sepolia" | "holesky" | "chiado"; +export type NetworkName = "mainnet" | "gnosis" | "goerli" | "ropsten" | "sepolia" | "holesky" | "chiado" | "ephemery"; export const networksChainConfig: Record = { mainnet: mainnetChainConfig, gnosis: gnosisChainConfig, @@ -26,6 +28,7 @@ export const networksChainConfig: Record = { sepolia: sepoliaChainConfig, holesky: holeskyChainConfig, chiado: chiadoChainConfig, + ephemery: ephemeryChainConfig, }; export type GenesisData = { @@ -62,4 +65,8 @@ export const genesisData: Record = { genesisTime: 1665396300, genesisValidatorsRoot: "0x9d642dac73058fbf39c0ae41ab1e34e4d889043cb199851ded7095bc99eb4c1e", }, + ephemery: { + genesisTime: ephemeryChainConfig.MIN_GENESIS_TIME + ephemeryChainConfig.GENESIS_DELAY, + genesisValidatorsRoot: "0x0000000000000000000000000000000000000000000000000000000000000000", + }, }; diff --git a/packages/db/package.json b/packages/db/package.json index a8e48a5eb020..961dadf19ecf 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/db", - "version": "1.11.3", + "version": "1.12.0", "description": "DB modules of Lodestar", "author": "ChainSafe Systems", "homepage": "https://github.com/ChainSafe/lodestar#readme", @@ -37,14 +37,14 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/ssz": "^0.13.0", - "@lodestar/config": "^1.11.3", - "@lodestar/utils": "^1.11.3", + "@chainsafe/ssz": "^0.14.0", + "@lodestar/config": "^1.12.0", + "@lodestar/utils": "^1.12.0", "@types/levelup": "^4.3.3", "it-all": "^3.0.2", "level": "^8.0.0" }, "devDependencies": { - "@lodestar/logger": "^1.11.3" + "@lodestar/logger": "^1.12.0" } } diff --git a/packages/flare/package.json b/packages/flare/package.json index e311c17fc040..dc4157415bd8 100644 --- a/packages/flare/package.json +++ b/packages/flare/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/flare", - "version": "1.11.3", + "version": "1.12.0", "description": "Beacon chain debugging tool", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -60,12 +60,12 @@ "dependencies": { "@chainsafe/bls": "7.1.1", "@chainsafe/bls-keygen": "^0.3.0", - "@lodestar/api": "^1.11.3", - "@lodestar/config": "^1.11.3", - "@lodestar/params": "^1.11.3", - "@lodestar/state-transition": "^1.11.3", - "@lodestar/types": "^1.11.3", - "@lodestar/utils": "^1.11.3", + "@lodestar/api": "^1.12.0", + "@lodestar/config": "^1.12.0", + "@lodestar/params": "^1.12.0", + "@lodestar/state-transition": "^1.12.0", + "@lodestar/types": "^1.12.0", + "@lodestar/utils": "^1.12.0", "source-map-support": "^0.5.21", "yargs": "^17.7.1" }, diff --git a/packages/fork-choice/package.json b/packages/fork-choice/package.json index ec12497592ac..5789c0109dd9 100644 --- a/packages/fork-choice/package.json +++ b/packages/fork-choice/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.11.3", + "version": "1.12.0", "type": "module", "exports": "./lib/index.js", "types": "./lib/index.d.ts", @@ -38,12 +38,12 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/ssz": "^0.13.0", - "@lodestar/config": "^1.11.3", - "@lodestar/params": "^1.11.3", - "@lodestar/state-transition": "^1.11.3", - "@lodestar/types": "^1.11.3", - "@lodestar/utils": "^1.11.3" + "@chainsafe/ssz": "^0.14.0", + "@lodestar/config": "^1.12.0", + "@lodestar/params": "^1.12.0", + "@lodestar/state-transition": "^1.12.0", + "@lodestar/types": "^1.12.0", + "@lodestar/utils": "^1.12.0" }, "keywords": [ "ethereum", diff --git a/packages/light-client/package.json b/packages/light-client/package.json index 8a33f2fa862c..c6068fa1b4d9 100644 --- a/packages/light-client/package.json +++ b/packages/light-client/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.11.3", + "version": "1.12.0", "type": "module", "exports": { ".": { @@ -66,13 +66,13 @@ "dependencies": { "@chainsafe/bls": "7.1.1", "@chainsafe/persistent-merkle-tree": "^0.6.1", - "@chainsafe/ssz": "^0.13.0", - "@lodestar/api": "^1.11.3", - "@lodestar/config": "^1.11.3", - "@lodestar/params": "^1.11.3", - "@lodestar/state-transition": "^1.11.3", - "@lodestar/types": "^1.11.3", - "@lodestar/utils": "^1.11.3", + "@chainsafe/ssz": "^0.14.0", + "@lodestar/api": "^1.12.0", + "@lodestar/config": "^1.12.0", + "@lodestar/params": "^1.12.0", + "@lodestar/state-transition": "^1.12.0", + "@lodestar/types": "^1.12.0", + "@lodestar/utils": "^1.12.0", "mitt": "^3.0.0", "strict-event-emitter-types": "^2.0.0" }, diff --git a/packages/logger/package.json b/packages/logger/package.json index a7960889644d..cc26cb7123c7 100644 --- a/packages/logger/package.json +++ b/packages/logger/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.11.3", + "version": "1.12.0", "type": "module", "exports": { ".": { @@ -63,13 +63,13 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@lodestar/utils": "^1.11.3", + "@lodestar/utils": "^1.12.0", "winston": "^3.8.2", "winston-daily-rotate-file": "^4.7.1", "winston-transport": "^4.5.0" }, "devDependencies": { - "@lodestar/test-utils": "^1.11.3", + "@lodestar/test-utils": "^1.12.0", "@types/triple-beam": "^1.3.2", "rimraf": "^4.4.1", "triple-beam": "^1.3.0" diff --git a/packages/logger/src/utils/format.ts b/packages/logger/src/utils/format.ts index 21e2521c5796..2340876a371e 100644 --- a/packages/logger/src/utils/format.ts +++ b/packages/logger/src/utils/format.ts @@ -1,5 +1,5 @@ import winston from "winston"; -import {isEmptyObject} from "@lodestar/utils"; +import {LodestarError, isEmptyObject} from "@lodestar/utils"; import {LoggerOptions, TimestampFormatCode} from "../interface.js"; import {logCtxToJson, logCtxToString, LogData} from "./json.js"; import {formatEpochSlotTime} from "./timeFormat.js"; @@ -88,7 +88,14 @@ function humanReadableTemplateFn(_info: {[key: string]: any; level: string; mess str += `[${infoString}] ${info.level.padStart(infoPad)}: ${info.message}`; if (info.context !== undefined && !isEmptyObject(info.context)) str += " " + logCtxToString(info.context); - if (info.error !== undefined) str += " - " + logCtxToString(info.error); + if (info.error !== undefined) { + str += + // LodestarError is formatted in the same way as context, it is either appended to + // the log message (" ") or extends existing context properties (", "). For any other + // error, the message is printed out and clearly separated from the log message (" - "). + (info.error instanceof LodestarError ? (isEmptyObject(info.context) ? " " : ", ") : " - ") + + logCtxToString(info.error); + } return str; } diff --git a/packages/logger/test/fixtures/loggerFormats.ts b/packages/logger/test/fixtures/loggerFormats.ts index fffaaf9ea2f0..563f3094882d 100644 --- a/packages/logger/test/fixtures/loggerFormats.ts +++ b/packages/logger/test/fixtures/loggerFormats.ts @@ -71,10 +71,27 @@ export const formatsTestCases: (TestCase | (() => TestCase))[] = [ id: "error with metadata", opts: {module: "test"}, message: "foo bar", + context: {}, + error: error, + output: { + human: `[test] \u001b[33mwarn\u001b[39m: foo bar code=SAMPLE_ERROR, data=foo=bar\n${error.stack}`, + json: '{"context":{},"error":{"code":"SAMPLE_ERROR","data":{"foo":"bar"},"stack":"$STACK"},"level":"warn","message":"foo bar","module":"test"}', + }, + }; + }, + + () => { + const error = new LodestarError({code: "SAMPLE_ERROR", data: {foo: "bar"}}); + error.stack = "$STACK"; + return { + id: "error and log with metadata", + opts: {module: "test"}, + message: "foo bar", + context: {meta: "data"}, error: error, output: { - human: `[test] \u001b[33mwarn\u001b[39m: foo bar - code=SAMPLE_ERROR, data=foo=bar\n${error.stack}`, - json: '{"error":{"code":"SAMPLE_ERROR","data":{"foo":"bar"},"stack":"$STACK"},"level":"warn","message":"foo bar","module":"test"}', + human: `[test] \u001b[33mwarn\u001b[39m: foo bar meta=data, code=SAMPLE_ERROR, data=foo=bar\n${error.stack}`, + json: '{"context":{"meta":"data"},"error":{"code":"SAMPLE_ERROR","data":{"foo":"bar"},"stack":"$STACK"},"level":"warn","message":"foo bar","module":"test"}', }, }; }, diff --git a/packages/params/package.json b/packages/params/package.json index 53cffc878c4c..e3cacfc45b60 100644 --- a/packages/params/package.json +++ b/packages/params/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/params", - "version": "1.11.3", + "version": "1.12.0", "description": "Chain parameters required for lodestar", "author": "ChainSafe Systems", "license": "Apache-2.0", diff --git a/packages/prover/package.json b/packages/prover/package.json index e2d8e810ec1e..3016c3aeb95f 100644 --- a/packages/prover/package.json +++ b/packages/prover/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.11.3", + "version": "1.12.0", "type": "module", "exports": { ".": { @@ -72,13 +72,13 @@ "@ethereumjs/tx": "^4.1.2", "@ethereumjs/util": "^8.0.6", "@ethereumjs/vm": "^6.4.2", - "@lodestar/api": "^1.11.3", - "@lodestar/config": "^1.11.3", - "@lodestar/light-client": "^1.11.3", - "@lodestar/logger": "^1.11.3", - "@lodestar/params": "^1.11.3", - "@lodestar/types": "^1.11.3", - "@lodestar/utils": "^1.11.3", + "@lodestar/api": "^1.12.0", + "@lodestar/config": "^1.12.0", + "@lodestar/light-client": "^1.12.0", + "@lodestar/logger": "^1.12.0", + "@lodestar/params": "^1.12.0", + "@lodestar/types": "^1.12.0", + "@lodestar/utils": "^1.12.0", "ethereum-cryptography": "^1.2.0", "find-up": "^6.3.0", "http-proxy": "^1.18.1", @@ -87,7 +87,7 @@ "yargs": "^17.7.1" }, "devDependencies": { - "@lodestar/test-utils": "^1.11.3", + "@lodestar/test-utils": "^1.12.0", "@types/http-proxy": "^1.17.10", "@types/yargs": "^17.0.24", "axios": "^1.3.4", diff --git a/packages/prover/src/utils/evm.ts b/packages/prover/src/utils/evm.ts index ecebda78b8ad..63cfd3f14026 100644 --- a/packages/prover/src/utils/evm.ts +++ b/packages/prover/src/utils/evm.ts @@ -56,7 +56,7 @@ export async function getVMWithState({ const accessListTx = cleanObject({ to, from, - data: tx.data, + data: tx.input ? tx.input : tx.data, value: tx.value, gas: tx.gas ? tx.gas : numberToHex(gasLimit), gasPrice: "0x0", diff --git a/packages/prover/src/utils/execution.ts b/packages/prover/src/utils/execution.ts index dcab3d7d7fb4..083f15e1e5dd 100644 --- a/packages/prover/src/utils/execution.ts +++ b/packages/prover/src/utils/execution.ts @@ -46,6 +46,7 @@ export function getChainCommon(network: string): Common { case "ropsten": case "sepolia": case "holesky": + case "ephemery": // TODO: Not sure how to detect the fork during runtime return new Common({chain: network, hardfork: Hardfork.Shanghai}); case "minimal": diff --git a/packages/reqresp/package.json b/packages/reqresp/package.json index af826c0467f2..4e8923e6cf60 100644 --- a/packages/reqresp/package.json +++ b/packages/reqresp/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.11.3", + "version": "1.12.0", "type": "module", "exports": { ".": { @@ -56,9 +56,9 @@ "dependencies": { "@chainsafe/fast-crc32c": "^4.1.1", "@libp2p/interface": "^0.1.2", - "@lodestar/config": "^1.11.3", - "@lodestar/params": "^1.11.3", - "@lodestar/utils": "^1.11.3", + "@lodestar/config": "^1.12.0", + "@lodestar/params": "^1.12.0", + "@lodestar/utils": "^1.12.0", "it-all": "^3.0.2", "it-pipe": "^3.0.1", "snappy": "^7.2.2", @@ -67,8 +67,8 @@ "uint8arraylist": "^2.4.3" }, "devDependencies": { - "@lodestar/logger": "^1.11.3", - "@lodestar/types": "^1.11.3", + "@lodestar/logger": "^1.12.0", + "@lodestar/types": "^1.12.0", "libp2p": "0.46.12" }, "peerDependencies": { diff --git a/packages/spec-test-util/package.json b/packages/spec-test-util/package.json index 5a79e277ae8c..79324d813caa 100644 --- a/packages/spec-test-util/package.json +++ b/packages/spec-test-util/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/spec-test-util", - "version": "1.11.3", + "version": "1.12.0", "description": "Spec test suite generator from yaml test files", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -45,7 +45,7 @@ "blockchain" ], "dependencies": { - "@lodestar/utils": "^1.11.3", + "@lodestar/utils": "^1.12.0", "async-retry": "^1.3.3", "axios": "^1.3.4", "chai": "^4.3.7", diff --git a/packages/state-transition/package.json b/packages/state-transition/package.json index ec2b7dfe0b31..f743861f54ec 100644 --- a/packages/state-transition/package.json +++ b/packages/state-transition/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.11.3", + "version": "1.12.0", "type": "module", "exports": { ".": { @@ -59,13 +59,14 @@ "dependencies": { "@chainsafe/as-sha256": "^0.3.1", "@chainsafe/bls": "7.1.1", + "@chainsafe/blst": "^0.2.9", "@chainsafe/persistent-merkle-tree": "^0.6.1", "@chainsafe/persistent-ts": "^0.19.1", - "@chainsafe/ssz": "^0.13.0", - "@lodestar/config": "^1.11.3", - "@lodestar/params": "^1.11.3", - "@lodestar/types": "^1.11.3", - "@lodestar/utils": "^1.11.3", + "@chainsafe/ssz": "^0.14.0", + "@lodestar/config": "^1.12.0", + "@lodestar/params": "^1.12.0", + "@lodestar/types": "^1.12.0", + "@lodestar/utils": "^1.12.0", "bigint-buffer": "^1.1.5", "buffer-xor": "^2.0.2" }, diff --git a/packages/state-transition/src/cache/epochCache.ts b/packages/state-transition/src/cache/epochCache.ts index db698b40f053..0ca09526e7ec 100644 --- a/packages/state-transition/src/cache/epochCache.ts +++ b/packages/state-transition/src/cache/epochCache.ts @@ -26,12 +26,12 @@ import { computeProposers, getActivationChurnLimit, } from "../util/index.js"; -import {computeEpochShuffling, EpochShuffling} from "../util/epochShuffling.js"; +import {computeEpochShuffling, EpochShuffling, getShufflingDecisionBlock} from "../util/epochShuffling.js"; import {computeBaseRewardPerIncrement, computeSyncParticipantReward} from "../util/syncCommittee.js"; import {sumTargetUnslashedBalanceIncrements} from "../util/targetUnslashedBalance.js"; import {EffectiveBalanceIncrements, getEffectiveBalanceIncrementsWithLen} from "./effectiveBalanceIncrements.js"; import {Index2PubkeyCache, PubkeyIndexMap, syncPubkeys} from "./pubkeyCache.js"; -import {BeaconStateAllForks, BeaconStateAltair} from "./types.js"; +import {BeaconStateAllForks, BeaconStateAltair, ShufflingGetter} from "./types.js"; import { computeSyncCommitteeCache, getSyncCommitteeCache, @@ -51,6 +51,7 @@ export type EpochCacheImmutableData = { export type EpochCacheOpts = { skipSyncCommitteeCache?: boolean; skipSyncPubkeys?: boolean; + shufflingGetter?: ShufflingGetter; }; /** Defers computing proposers by persisting only the seed, and dropping it once indexes are computed */ @@ -280,21 +281,32 @@ export class EpochCache { const currentActiveIndices: ValidatorIndex[] = []; const nextActiveIndices: ValidatorIndex[] = []; + // BeaconChain could provide a shuffling cache to avoid re-computing shuffling every epoch + // in that case, we don't need to compute shufflings again + const previousShufflingDecisionBlock = getShufflingDecisionBlock(state, previousEpoch); + const cachedPreviousShuffling = opts?.shufflingGetter?.(previousEpoch, previousShufflingDecisionBlock); + const currentShufflingDecisionBlock = getShufflingDecisionBlock(state, currentEpoch); + const cachedCurrentShuffling = opts?.shufflingGetter?.(currentEpoch, currentShufflingDecisionBlock); + const nextShufflingDecisionBlock = getShufflingDecisionBlock(state, nextEpoch); + const cachedNextShuffling = opts?.shufflingGetter?.(nextEpoch, nextShufflingDecisionBlock); + for (let i = 0; i < validatorCount; i++) { const validator = validators[i]; // Note: Not usable for fork-choice balances since in-active validators are not zero'ed effectiveBalanceIncrements[i] = Math.floor(validator.effectiveBalance / EFFECTIVE_BALANCE_INCREMENT); - if (isActiveValidator(validator, previousEpoch)) { + // we only need to track active indices for previous, current and next epoch if we have to compute shufflings + // skip doing that if we already have cached shufflings + if (cachedPreviousShuffling == null && isActiveValidator(validator, previousEpoch)) { previousActiveIndices.push(i); } - if (isActiveValidator(validator, currentEpoch)) { + if (cachedCurrentShuffling == null && isActiveValidator(validator, currentEpoch)) { currentActiveIndices.push(i); // We track totalActiveBalanceIncrements as ETH to fit total network balance in a JS number (53 bits) totalActiveBalanceIncrements += effectiveBalanceIncrements[i]; } - if (isActiveValidator(validator, nextEpoch)) { + if (cachedNextShuffling == null && isActiveValidator(validator, nextEpoch)) { nextActiveIndices.push(i); } @@ -317,11 +329,11 @@ export class EpochCache { throw Error("totalActiveBalanceIncrements >= Number.MAX_SAFE_INTEGER. MAX_EFFECTIVE_BALANCE is too low."); } - const currentShuffling = computeEpochShuffling(state, currentActiveIndices, currentEpoch); - const previousShuffling = isGenesis - ? currentShuffling - : computeEpochShuffling(state, previousActiveIndices, previousEpoch); - const nextShuffling = computeEpochShuffling(state, nextActiveIndices, nextEpoch); + const currentShuffling = cachedCurrentShuffling ?? computeEpochShuffling(state, currentActiveIndices, currentEpoch); + const previousShuffling = + cachedPreviousShuffling ?? + (isGenesis ? currentShuffling : computeEpochShuffling(state, previousActiveIndices, previousEpoch)); + const nextShuffling = cachedNextShuffling ?? computeEpochShuffling(state, nextActiveIndices, nextEpoch); const currentProposerSeed = getSeed(state, currentEpoch, DOMAIN_BEACON_PROPOSER); diff --git a/packages/state-transition/src/cache/stateCache.ts b/packages/state-transition/src/cache/stateCache.ts index f8ce97d5ffbd..14a29b5f09c0 100644 --- a/packages/state-transition/src/cache/stateCache.ts +++ b/packages/state-transition/src/cache/stateCache.ts @@ -1,4 +1,7 @@ +import bls from "@chainsafe/bls"; +import {CoordType} from "@chainsafe/blst"; import {BeaconConfig} from "@lodestar/config"; +import {loadState} from "../util/loadState/loadState.js"; import {EpochCache, EpochCacheImmutableData, EpochCacheOpts} from "./epochCache.js"; import { BeaconStateAllForks, @@ -137,13 +140,49 @@ export function createCachedBeaconState( immutableData: EpochCacheImmutableData, opts?: EpochCacheOpts ): T & BeaconStateCache { - return getCachedBeaconState(state, { + const epochCache = EpochCache.createFromState(state, immutableData, opts); + const cachedState = getCachedBeaconState(state, { config: immutableData.config, - epochCtx: EpochCache.createFromState(state, immutableData, opts), + epochCtx: epochCache, clonedCount: 0, clonedCountWithTransferCache: 0, createdWithTransferCache: false, }); + + return cachedState; +} + +/** + * Create a CachedBeaconState given a cached seed state and state bytes + * This guarantees that the returned state shares the same tree with the seed state + * Check loadState() api for more details + * TODO: after EIP-6110 need to provide a pivotValidatorIndex to decide which comes to finalized validators cache, which comes to unfinalized cache + */ +export function loadUnfinalizedCachedBeaconState( + cachedSeedState: T, + stateBytes: Uint8Array, + opts?: EpochCacheOpts +): T { + const {state: migratedState, modifiedValidators} = loadState(cachedSeedState.config, cachedSeedState, stateBytes); + const {pubkey2index, index2pubkey} = cachedSeedState.epochCtx; + // Get the validators sub tree once for all the loop + const validators = migratedState.validators; + for (const validatorIndex of modifiedValidators) { + const validator = validators.getReadonly(validatorIndex); + const pubkey = validator.pubkey; + pubkey2index.set(pubkey, validatorIndex); + index2pubkey[validatorIndex] = bls.PublicKey.fromBytes(pubkey, CoordType.jacobian); + } + + return createCachedBeaconState( + migratedState, + { + config: cachedSeedState.config, + pubkey2index, + index2pubkey, + }, + {...(opts ?? {}), ...{skipSyncPubkeys: true}} + ) as T; } /** diff --git a/packages/state-transition/src/cache/types.ts b/packages/state-transition/src/cache/types.ts index 9d0115cee780..39b1dbb4b45b 100644 --- a/packages/state-transition/src/cache/types.ts +++ b/packages/state-transition/src/cache/types.ts @@ -1,5 +1,6 @@ import {CompositeViewDU} from "@chainsafe/ssz"; -import {ssz} from "@lodestar/types"; +import {Epoch, RootHex, ssz} from "@lodestar/types"; +import {EpochShuffling} from "../util/epochShuffling.js"; export type BeaconStatePhase0 = CompositeViewDU; export type BeaconStateAltair = CompositeViewDU; @@ -20,3 +21,5 @@ export type BeaconStateAllForks = | BeaconStateDeneb; export type BeaconStateExecutions = BeaconStateBellatrix | BeaconStateCapella | BeaconStateDeneb; + +export type ShufflingGetter = (shufflingEpoch: Epoch, dependentRoot: RootHex) => EpochShuffling | null; diff --git a/packages/state-transition/src/index.ts b/packages/state-transition/src/index.ts index 433aa45cf7e6..e72b6fa0581c 100644 --- a/packages/state-transition/src/index.ts +++ b/packages/state-transition/src/index.ts @@ -25,6 +25,7 @@ export type { // Main state caches export { createCachedBeaconState, + loadUnfinalizedCachedBeaconState, type BeaconStateCache, isCachedBeaconState, isStateBalancesNodesPopulated, diff --git a/packages/state-transition/src/util/blindedBlock.ts b/packages/state-transition/src/util/blindedBlock.ts index 02f0397a33e7..8c271e7fec81 100644 --- a/packages/state-transition/src/util/blindedBlock.ts +++ b/packages/state-transition/src/util/blindedBlock.ts @@ -1,9 +1,24 @@ import {ChainForkConfig} from "@lodestar/config"; import {ForkSeq} from "@lodestar/params"; -import {allForks, phase0, Root, isBlindedBeaconBlock, isBlindedBlobSidecar, deneb, ssz} from "@lodestar/types"; +import { + allForks, + phase0, + Root, + deneb, + ssz, + isBlindedBeaconBlock, + isBlindedBlobSidecar, + isSignedBlindedBlockContents, + isExecutionPayloadAndBlobsBundle, +} from "@lodestar/types"; import {executionPayloadToPayloadHeader} from "./execution.js"; +type ParsedSignedBlindedBlockOrContents = { + signedBlindedBlock: allForks.SignedBlindedBeaconBlock; + signedBlindedBlobSidecars: deneb.SignedBlindedBlobSidecars | null; +}; + export function blindedOrFullBlockHashTreeRoot( config: ChainForkConfig, blindedOrFull: allForks.FullOrBlindedBeaconBlock @@ -99,3 +114,55 @@ export function signedBlindedBlobSidecarsToFull( }); return signedBlobSidecars; } + +export function parseSignedBlindedBlockOrContents( + signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents +): ParsedSignedBlindedBlockOrContents { + if (isSignedBlindedBlockContents(signedBlindedBlockOrContents)) { + const signedBlindedBlock = signedBlindedBlockOrContents.signedBlindedBlock; + const signedBlindedBlobSidecars = signedBlindedBlockOrContents.signedBlindedBlobSidecars; + return {signedBlindedBlock, signedBlindedBlobSidecars}; + } else { + return {signedBlindedBlock: signedBlindedBlockOrContents, signedBlindedBlobSidecars: null}; + } +} + +export function parseExecutionPayloadAndBlobsBundle( + data: allForks.ExecutionPayload | allForks.ExecutionPayloadAndBlobsBundle +): {executionPayload: allForks.ExecutionPayload; blobsBundle: deneb.BlobsBundle | null} { + if (isExecutionPayloadAndBlobsBundle(data)) { + return data; + } else { + return { + executionPayload: data, + blobsBundle: null, + }; + } +} + +export function reconstructFullBlockOrContents( + {signedBlindedBlock, signedBlindedBlobSidecars}: ParsedSignedBlindedBlockOrContents, + {executionPayload, blobs}: {executionPayload: allForks.ExecutionPayload | null; blobs: deneb.Blobs | null} +): allForks.SignedBeaconBlockOrContents { + const signedBlock = signedBlindedBlockToFull(signedBlindedBlock, executionPayload); + + if (signedBlindedBlobSidecars !== null) { + if (executionPayload === null) { + throw Error("Missing locally produced executionPayload for deneb+ publishBlindedBlock"); + } + + if (blobs === null) { + throw Error("Missing blobs from the local execution cache"); + } + if (blobs.length !== signedBlindedBlobSidecars.length) { + throw Error( + `Length mismatch signedBlindedBlobSidecars=${signedBlindedBlobSidecars.length} blobs=${blobs.length}` + ); + } + const signedBlobSidecars = signedBlindedBlobSidecarsToFull(signedBlindedBlobSidecars, blobs); + + return {signedBlock, signedBlobSidecars} as allForks.SignedBeaconBlockOrContents; + } else { + return signedBlock as allForks.SignedBeaconBlockOrContents; + } +} diff --git a/packages/state-transition/src/util/epochShuffling.ts b/packages/state-transition/src/util/epochShuffling.ts index 37ac6ba0c8d9..f9172126250f 100644 --- a/packages/state-transition/src/util/epochShuffling.ts +++ b/packages/state-transition/src/util/epochShuffling.ts @@ -1,4 +1,5 @@ -import {Epoch, ValidatorIndex} from "@lodestar/types"; +import {toHexString} from "@chainsafe/ssz"; +import {Epoch, RootHex, ValidatorIndex} from "@lodestar/types"; import {intDiv} from "@lodestar/utils"; import { DOMAIN_BEACON_ATTESTER, @@ -9,6 +10,8 @@ import { import {BeaconStateAllForks} from "../types.js"; import {getSeed} from "./seed.js"; import {unshuffleList} from "./shuffle.js"; +import {computeStartSlotAtEpoch} from "./epoch.js"; +import {getBlockRootAtSlot} from "./blockRoot.js"; /** * Readonly interface for EpochShuffling. @@ -95,3 +98,8 @@ export function computeEpochShuffling( committeesPerSlot, }; } + +export function getShufflingDecisionBlock(state: BeaconStateAllForks, epoch: Epoch): RootHex { + const pivotSlot = computeStartSlotAtEpoch(epoch - 1) - 1; + return toHexString(getBlockRootAtSlot(state, pivotSlot)); +} diff --git a/packages/state-transition/src/util/loadState/findModifiedInactivityScores.ts b/packages/state-transition/src/util/loadState/findModifiedInactivityScores.ts new file mode 100644 index 000000000000..f76e4dc650dc --- /dev/null +++ b/packages/state-transition/src/util/loadState/findModifiedInactivityScores.ts @@ -0,0 +1,47 @@ +// UintNum64 = 8 bytes +export const INACTIVITY_SCORE_SIZE = 8; + +/** + * As monitored on mainnet, inactivityScores are not changed much and they are mostly 0 + * Using Buffer.compare is the fastest way as noted in `./findModifiedValidators.ts` + * @returns output parameter modifiedValidators: validator indices that are modified + */ +export function findModifiedInactivityScores( + inactivityScoresBytes: Uint8Array, + inactivityScoresBytes2: Uint8Array, + modifiedValidators: number[], + validatorOffset = 0 +): void { + if (inactivityScoresBytes.length !== inactivityScoresBytes2.length) { + throw new Error( + "inactivityScoresBytes.length !== inactivityScoresBytes2.length " + + inactivityScoresBytes.length + + " vs " + + inactivityScoresBytes2.length + ); + } + + if (Buffer.compare(inactivityScoresBytes, inactivityScoresBytes2) === 0) { + return; + } + + if (inactivityScoresBytes.length === INACTIVITY_SCORE_SIZE) { + modifiedValidators.push(validatorOffset); + return; + } + + const numValidator = Math.floor(inactivityScoresBytes.length / INACTIVITY_SCORE_SIZE); + const halfValidator = Math.floor(numValidator / 2); + findModifiedInactivityScores( + inactivityScoresBytes.subarray(0, halfValidator * INACTIVITY_SCORE_SIZE), + inactivityScoresBytes2.subarray(0, halfValidator * INACTIVITY_SCORE_SIZE), + modifiedValidators, + validatorOffset + ); + findModifiedInactivityScores( + inactivityScoresBytes.subarray(halfValidator * INACTIVITY_SCORE_SIZE), + inactivityScoresBytes2.subarray(halfValidator * INACTIVITY_SCORE_SIZE), + modifiedValidators, + validatorOffset + halfValidator + ); +} diff --git a/packages/state-transition/src/util/loadState/findModifiedValidators.ts b/packages/state-transition/src/util/loadState/findModifiedValidators.ts new file mode 100644 index 000000000000..b47789f42b47 --- /dev/null +++ b/packages/state-transition/src/util/loadState/findModifiedValidators.ts @@ -0,0 +1,46 @@ +import {VALIDATOR_BYTES_SIZE} from "../sszBytes.js"; + +/** + * Find modified validators by comparing two validators bytes using Buffer.compare() recursively + * - As noted in packages/state-transition/test/perf/util/loadState/findModifiedValidators.test.ts, serializing validators and compare Uint8Array is the fastest way + * - The performance is quite stable and can afford a lot of difference in validators (the benchmark tested up to 10k but it's not likely we have that difference in mainnet) + * - Also packages/state-transition/test/perf/misc/byteArrayEquals.test.ts shows that Buffer.compare() is very efficient for large Uint8Array + * + * @returns output parameter modifiedValidators: validator indices that are modified + */ +export function findModifiedValidators( + validatorsBytes: Uint8Array, + validatorsBytes2: Uint8Array, + modifiedValidators: number[], + validatorOffset = 0 +): void { + if (validatorsBytes.length !== validatorsBytes2.length) { + throw new Error( + "validatorsBytes.length !== validatorsBytes2.length " + validatorsBytes.length + " vs " + validatorsBytes2.length + ); + } + + if (Buffer.compare(validatorsBytes, validatorsBytes2) === 0) { + return; + } + + if (validatorsBytes.length === VALIDATOR_BYTES_SIZE) { + modifiedValidators.push(validatorOffset); + return; + } + + const numValidator = Math.floor(validatorsBytes.length / VALIDATOR_BYTES_SIZE); + const halfValidator = Math.floor(numValidator / 2); + findModifiedValidators( + validatorsBytes.subarray(0, halfValidator * VALIDATOR_BYTES_SIZE), + validatorsBytes2.subarray(0, halfValidator * VALIDATOR_BYTES_SIZE), + modifiedValidators, + validatorOffset + ); + findModifiedValidators( + validatorsBytes.subarray(halfValidator * VALIDATOR_BYTES_SIZE), + validatorsBytes2.subarray(halfValidator * VALIDATOR_BYTES_SIZE), + modifiedValidators, + validatorOffset + halfValidator + ); +} diff --git a/packages/state-transition/src/util/loadState/loadState.ts b/packages/state-transition/src/util/loadState/loadState.ts new file mode 100644 index 000000000000..83377101609d --- /dev/null +++ b/packages/state-transition/src/util/loadState/loadState.ts @@ -0,0 +1,201 @@ +import {deserializeContainerIgnoreFields, ssz} from "@lodestar/types"; +import {ForkSeq} from "@lodestar/params"; +import {ChainForkConfig} from "@lodestar/config"; +import {BeaconStateAllForks, BeaconStateAltair} from "../../types.js"; +import {VALIDATOR_BYTES_SIZE, getForkFromStateBytes, getStateTypeFromBytes} from "../sszBytes.js"; +import {findModifiedValidators} from "./findModifiedValidators.js"; +import {findModifiedInactivityScores} from "./findModifiedInactivityScores.js"; +import {loadValidator} from "./loadValidator.js"; + +type MigrateStateOutput = {state: BeaconStateAllForks; modifiedValidators: number[]}; + +/** + * Load state from bytes given a seed state so that we share the same base tree. This gives some benefits: + * - Have single base tree across the application + * - Faster to load state + * - Less memory usage + * - Utilize the cached HashObjects in seed state due to a lot of validators are not changed, also the inactivity scores. + * @returns the new state and modified validators + */ +export function loadState( + config: ChainForkConfig, + seedState: BeaconStateAllForks, + stateBytes: Uint8Array +): MigrateStateOutput { + // casting only to make typescript happy + const stateType = getStateTypeFromBytes(config, stateBytes) as typeof ssz.capella.BeaconState; + const dataView = new DataView(stateBytes.buffer, stateBytes.byteOffset, stateBytes.byteLength); + const fieldRanges = stateType.getFieldRanges(dataView, 0, stateBytes.length); + const allFields = Object.keys(stateType.fields); + const validatorsFieldIndex = allFields.indexOf("validators"); + // start with default view has the same performance to start with seed state + // and it is not fork dependent + const migratedState = deserializeContainerIgnoreFields( + stateType, + stateBytes, + ["validators", "inactivityScores"], + fieldRanges + ) as BeaconStateAllForks; + + // validators are rarely changed + const validatorsRange = fieldRanges[validatorsFieldIndex]; + const modifiedValidators = loadValidators( + migratedState, + seedState, + stateBytes.subarray(validatorsRange.start, validatorsRange.end) + ); + + // inactivityScores are rarely changed + // this saves ~500ms of hashTreeRoot() time of state + const fork = getForkFromStateBytes(config, stateBytes); + const seedFork = config.getForkSeq(seedState.slot); + + if (fork >= ForkSeq.altair && seedFork >= ForkSeq.altair) { + const inactivityScoresIndex = allFields.indexOf("inactivityScores"); + const inactivityScoresRange = fieldRanges[inactivityScoresIndex]; + loadInactivityScores( + migratedState as BeaconStateAltair, + seedState as BeaconStateAltair, + stateBytes.subarray(inactivityScoresRange.start, inactivityScoresRange.end) + ); + } + migratedState.commit(); + + return {state: migratedState, modifiedValidators}; +} + +/** + * This value is rarely changed as monitored 3 month state diffs on mainnet as of Sep 2023. + * Reusing this data helps save hashTreeRoot time of state ~500ms + * + * Given the below tree: + * + * seedState.inactivityScores ====> ROOT + * / \ + * Hash01 Hash23 + * / \ / \ + * Sco0 Sco1 Sco2 Sco3 + * + * if score 3 is modified, the new tree looks like this: + * + * migratedState.inactivityScores ====> ROOTa + * / \ + * Hash01 Hash23a + * / \ / \ + * Sco0 Sco1 Sco2 Sco3a + */ +function loadInactivityScores( + migratedState: BeaconStateAltair, + seedState: BeaconStateAltair, + inactivityScoresBytes: Uint8Array +): void { + // migratedState starts with the same inactivityScores to seed state + migratedState.inactivityScores = seedState.inactivityScores.clone(); + const oldValidator = migratedState.inactivityScores.length; + // UintNum64 = 8 bytes + const newValidator = inactivityScoresBytes.length / 8; + const minValidator = Math.min(oldValidator, newValidator); + const oldInactivityScores = migratedState.inactivityScores.serialize(); + const isMoreValidator = newValidator >= oldValidator; + const modifiedValidators: number[] = []; + findModifiedInactivityScores( + isMoreValidator ? oldInactivityScores : oldInactivityScores.subarray(0, minValidator * 8), + isMoreValidator ? inactivityScoresBytes.subarray(0, minValidator * 8) : inactivityScoresBytes, + modifiedValidators + ); + + for (const validatorIndex of modifiedValidators) { + migratedState.inactivityScores.set( + validatorIndex, + ssz.UintNum64.deserialize(inactivityScoresBytes.subarray(validatorIndex * 8, (validatorIndex + 1) * 8)) + ); + } + + if (isMoreValidator) { + // add new inactivityScores + for (let validatorIndex = oldValidator; validatorIndex < newValidator; validatorIndex++) { + migratedState.inactivityScores.push( + ssz.UintNum64.deserialize(inactivityScoresBytes.subarray(validatorIndex * 8, (validatorIndex + 1) * 8)) + ); + } + } else { + if (newValidator - 1 < 0) { + migratedState.inactivityScores = ssz.altair.InactivityScores.defaultViewDU(); + } else { + migratedState.inactivityScores = migratedState.inactivityScores.sliceTo(newValidator - 1); + } + } +} + +/** + * As of Sep 2021, common validators of 2 mainnet states are rarely changed. However, the benchmark shows that + * 10k modified validators is not an issue. (see packages/state-transition/test/perf/util/loadState/findModifiedValidators.test.ts) + * + * This method loads validators from bytes given a seed state so that they share the same base tree. This gives some benefits: + * - Have single base tree across the application + * - Faster to load state + * - Less memory usage + * - Utilize the cached HashObjects in seed state due to a lot of validators are not changed + * + * Given the below tree: + * + * seedState.validators ====> ROOT + * / \ + * Hash01 Hash23 + * / \ / \ + * Val0 Val1 Val2 Val3 + * + * if validator 3 is modified, the new tree looks like this: + * + * migratedState.validators ====> ROOTa + * / \ + * Hash01 Hash23a + * / \ / \ + * Val0 Val1 Val2 Val3a + * + * @param migratedState state to be migrated, the validators are loaded to this state + * @returns modified validator indices + */ +function loadValidators( + migratedState: BeaconStateAllForks, + seedState: BeaconStateAllForks, + newValidatorsBytes: Uint8Array +): number[] { + const seedValidatorCount = seedState.validators.length; + const newValidatorCount = Math.floor(newValidatorsBytes.length / VALIDATOR_BYTES_SIZE); + const isMoreValidator = newValidatorCount >= seedValidatorCount; + const minValidatorCount = Math.min(seedValidatorCount, newValidatorCount); + // migrated state starts with the same validators to seed state + migratedState.validators = seedState.validators.clone(); + const seedValidatorsBytes = seedState.validators.serialize(); + const modifiedValidators: number[] = []; + findModifiedValidators( + isMoreValidator ? seedValidatorsBytes : seedValidatorsBytes.subarray(0, minValidatorCount * VALIDATOR_BYTES_SIZE), + isMoreValidator ? newValidatorsBytes.subarray(0, minValidatorCount * VALIDATOR_BYTES_SIZE) : newValidatorsBytes, + modifiedValidators + ); + + for (const i of modifiedValidators) { + const seedValidator = seedState.validators.get(i); + const newValidatorBytes = newValidatorsBytes.subarray(i * VALIDATOR_BYTES_SIZE, (i + 1) * VALIDATOR_BYTES_SIZE); + migratedState.validators.set(i, loadValidator(seedValidator, newValidatorBytes)); + } + + if (newValidatorCount >= seedValidatorCount) { + // add new validators + for (let validatorIndex = seedValidatorCount; validatorIndex < newValidatorCount; validatorIndex++) { + migratedState.validators.push( + ssz.phase0.Validator.deserializeToViewDU( + newValidatorsBytes.subarray( + validatorIndex * VALIDATOR_BYTES_SIZE, + (validatorIndex + 1) * VALIDATOR_BYTES_SIZE + ) + ) + ); + modifiedValidators.push(validatorIndex); + } + } else { + migratedState.validators = migratedState.validators.sliceTo(newValidatorCount - 1); + } + return modifiedValidators; +} diff --git a/packages/state-transition/src/util/loadState/loadValidator.ts b/packages/state-transition/src/util/loadState/loadValidator.ts new file mode 100644 index 000000000000..dcf5051c9c6d --- /dev/null +++ b/packages/state-transition/src/util/loadState/loadValidator.ts @@ -0,0 +1,44 @@ +import {CompositeViewDU} from "@chainsafe/ssz"; +import {deserializeContainerIgnoreFields, ssz} from "@lodestar/types"; + +/** + * Load validator from bytes given a seed validator. + * - Reuse pubkey and withdrawal credentials if possible to save memory + * - If it's a new validator, deserialize it + */ +export function loadValidator( + seedValidator: CompositeViewDU, + newValidatorBytes: Uint8Array +): CompositeViewDU { + const ignoredFields = getSameFields(seedValidator, newValidatorBytes); + if (ignoredFields.length > 0) { + const newValidatorValue = deserializeContainerIgnoreFields(ssz.phase0.Validator, newValidatorBytes, ignoredFields); + for (const field of ignoredFields) { + newValidatorValue[field] = seedValidator[field]; + } + return ssz.phase0.Validator.toViewDU(newValidatorValue); + } else { + return ssz.phase0.Validator.deserializeToViewDU(newValidatorBytes); + } +} + +/** + * Return pubkey or withdrawalCredentials or both if they are the same. + */ +function getSameFields( + validator: CompositeViewDU, + validatorBytes: Uint8Array +): ("pubkey" | "withdrawalCredentials")[] { + const ignoredFields: ("pubkey" | "withdrawalCredentials")[] = []; + const pubkey = validatorBytes.subarray(0, 48); + if (Buffer.compare(pubkey, validator.pubkey) === 0) { + ignoredFields.push("pubkey"); + } + + const withdrawalCredentials = validatorBytes.subarray(48, 80); + if (Buffer.compare(withdrawalCredentials, validator.withdrawalCredentials) === 0) { + ignoredFields.push("withdrawalCredentials"); + } + + return ignoredFields; +} diff --git a/packages/state-transition/src/util/sszBytes.ts b/packages/state-transition/src/util/sszBytes.ts new file mode 100644 index 000000000000..25b65626a0dd --- /dev/null +++ b/packages/state-transition/src/util/sszBytes.ts @@ -0,0 +1,55 @@ +import {ChainForkConfig} from "@lodestar/config"; +import {ForkSeq} from "@lodestar/params"; +import {Slot, allForks} from "@lodestar/types"; +import {bytesToInt} from "@lodestar/utils"; + +/** + * Slot uint64 + */ +const SLOT_BYTE_COUNT = 8; + +/** + * 48 + 32 + 8 + 1 + 8 + 8 + 8 + 8 = 121 + * ``` + * class Validator(Container): + pubkey: BLSPubkey [fixed - 48 bytes] + withdrawal_credentials: Bytes32 [fixed - 32 bytes] + effective_balance: Gwei [fixed - 8 bytes] + slashed: boolean [fixed - 1 byte] + # Status epochs + activation_eligibility_epoch: Epoch [fixed - 8 bytes] + activation_epoch: Epoch [fixed - 8 bytes] + exit_epoch: Epoch [fixed - 8 bytes] + withdrawable_epoch: Epoch [fixed - 8 bytes] + ``` + */ +export const VALIDATOR_BYTES_SIZE = 121; + +/** + * 8 + 32 = 40 + * ``` + * class BeaconState(Container): + * genesis_time: uint64 [fixed - 8 bytes] + * genesis_validators_root: Root [fixed - 32 bytes] + * slot: Slot [fixed - 8 bytes] + * ... + * ``` + */ +const SLOT_BYTES_POSITION_IN_STATE = 40; + +export function getForkFromStateBytes(config: ChainForkConfig, bytes: Buffer | Uint8Array): ForkSeq { + const slot = bytesToInt(bytes.subarray(SLOT_BYTES_POSITION_IN_STATE, SLOT_BYTES_POSITION_IN_STATE + SLOT_BYTE_COUNT)); + return config.getForkSeq(slot); +} + +export function getStateTypeFromBytes( + config: ChainForkConfig, + bytes: Buffer | Uint8Array +): allForks.AllForksSSZTypes["BeaconState"] { + const slot = getStateSlotFromBytes(bytes); + return config.getForkTypes(slot).BeaconState; +} + +export function getStateSlotFromBytes(bytes: Uint8Array): Slot { + return bytesToInt(bytes.subarray(SLOT_BYTES_POSITION_IN_STATE, SLOT_BYTES_POSITION_IN_STATE + SLOT_BYTE_COUNT)); +} diff --git a/packages/state-transition/test/perf/misc/byteArrayEquals.test.ts b/packages/state-transition/test/perf/misc/byteArrayEquals.test.ts new file mode 100644 index 000000000000..64057a26d103 --- /dev/null +++ b/packages/state-transition/test/perf/misc/byteArrayEquals.test.ts @@ -0,0 +1,114 @@ +import crypto from "node:crypto"; +import {itBench} from "@dapplion/benchmark"; +import {byteArrayEquals} from "@chainsafe/ssz"; +import {generateState} from "../../utils/state.js"; +import {generateValidators} from "../../utils/validator.js"; + +/** + * compare Uint8Array, the longer the array, the better performance Buffer.compare() is + * - with 32 bytes, Buffer.compare() is 1.5x faster (rootEquals.test.ts showed > 2x faster) + * ✔ byteArrayEquals 32 1.004480e+7 ops/s 99.55400 ns/op - 19199 runs 2.08 s + * ✔ Buffer.compare 32 1.553495e+7 ops/s 64.37100 ns/op - 3634 runs 0.303 s + * + * - with 1024 bytes, Buffer.compare() is 21.8x faster + * ✔ byteArrayEquals 1024 379239.7 ops/s 2.636855 us/op - 117 runs 0.811 s + * ✔ Buffer.compare 1024 8269999 ops/s 120.9190 ns/op - 3330 runs 0.525 s + * + * - with 16384 bytes, Buffer.compare() is 41x faster + * ✔ byteArrayEquals 16384 23808.76 ops/s 42.00135 us/op - 13 runs 1.05 s + * ✔ Buffer.compare 16384 975058.0 ops/s 1.025580 us/op - 297 runs 0.806 s + * + * - with 123687377 bytes, Buffer.compare() is 38x faster + * ✔ byteArrayEquals 123687377 3.077884 ops/s 324.8985 ms/op - 1 runs 64.5 s + * ✔ Buffer.compare 123687377 114.7834 ops/s 8.712061 ms/op - 13 runs 12.1 s + */ +describe("compare Uint8Array using byteArrayEquals() vs Buffer.compare()", () => { + const numValidator = 1_000_000; + const validators = generateValidators(numValidator); + const state = generateState({validators: validators}); + const stateBytes = state.serialize(); + + const lengths = [32, 1024, 16384, stateBytes.length]; + describe("same bytes", () => { + for (const length of lengths) { + const runsFactor = length > 16384 ? 100 : 1000; + const bytes = stateBytes.subarray(0, length); + const bytes2 = bytes.slice(); + itBench({ + id: `byteArrayEquals ${length}`, + fn: () => { + for (let i = 0; i < runsFactor; i++) { + byteArrayEquals(bytes, bytes2); + } + }, + runsFactor, + }); + + itBench({ + id: `Buffer.compare ${length}`, + fn: () => { + for (let i = 0; i < runsFactor; i++) { + Buffer.compare(bytes, bytes2); + } + }, + runsFactor, + }); + } + }); + + describe("different at the last byte", () => { + for (const length of lengths) { + const runsFactor = length > 16384 ? 100 : 1000; + const bytes = stateBytes.subarray(0, length); + const bytes2 = bytes.slice(); + bytes2[bytes2.length - 1] = bytes2[bytes2.length - 1] + 1; + itBench({ + id: `byteArrayEquals ${length} - diff last byte`, + fn: () => { + for (let i = 0; i < runsFactor; i++) { + byteArrayEquals(bytes, bytes2); + } + }, + runsFactor, + }); + + itBench({ + id: `Buffer.compare ${length} - diff last byte`, + fn: () => { + for (let i = 0; i < runsFactor; i++) { + Buffer.compare(bytes, bytes2); + } + }, + runsFactor, + }); + } + }); + + describe("totally different", () => { + for (const length of lengths) { + const runsFactor = length > 16384 ? 100 : 1000; + const bytes = crypto.randomBytes(length); + const bytes2 = crypto.randomBytes(length); + + itBench({ + id: `byteArrayEquals ${length} - random bytes`, + fn: () => { + for (let i = 0; i < runsFactor; i++) { + byteArrayEquals(bytes, bytes2); + } + }, + runsFactor, + }); + + itBench({ + id: `Buffer.compare ${length} - random bytes`, + fn: () => { + for (let i = 0; i < runsFactor; i++) { + Buffer.compare(bytes, bytes2); + } + }, + runsFactor, + }); + } + }); +}); diff --git a/packages/state-transition/test/perf/misc/rootEquals.test.ts b/packages/state-transition/test/perf/misc/rootEquals.test.ts index 9e39ebe13f89..f941e764c26b 100644 --- a/packages/state-transition/test/perf/misc/rootEquals.test.ts +++ b/packages/state-transition/test/perf/misc/rootEquals.test.ts @@ -2,12 +2,11 @@ import {itBench, setBenchOpts} from "@dapplion/benchmark"; import {byteArrayEquals, fromHexString} from "@chainsafe/ssz"; import {ssz} from "@lodestar/types"; -// As of Jun 17 2021 -// Compare state root -// ================================================================ -// ssz.Root.equals 891265.6 ops/s 1.122000 us/op 10017946 runs 15.66 s -// ssz.Root.equals with valueOf() 692041.5 ops/s 1.445000 us/op 8179741 runs 15.28 s -// byteArrayEquals with valueOf() 853971.0 ops/s 1.171000 us/op 9963051 runs 16.07 s +// As of Sep 2023 +// root equals +// ✔ ssz.Root.equals 2.703872e+7 ops/s 36.98400 ns/op - 74234 runs 2.83 s +// ✔ byteArrayEquals 2.773617e+7 ops/s 36.05400 ns/op - 15649 runs 0.606 s +// ✔ Buffer.compare 7.099247e+7 ops/s 14.08600 ns/op - 26965 runs 0.404 s describe("root equals", () => { setBenchOpts({noThreshold: true}); @@ -16,11 +15,34 @@ describe("root equals", () => { const rootTree = ssz.Root.toViewDU(stateRoot); // This benchmark is very unstable in CI. We already know that "ssz.Root.equals" is the fastest - itBench("ssz.Root.equals", () => { - ssz.Root.equals(rootTree, stateRoot); + const runsFactor = 1000; + itBench({ + id: "ssz.Root.equals", + fn: () => { + for (let i = 0; i < runsFactor; i++) { + ssz.Root.equals(rootTree, stateRoot); + } + }, + runsFactor, }); - itBench("byteArrayEquals", () => { - byteArrayEquals(rootTree, stateRoot); + itBench({ + id: "byteArrayEquals", + fn: () => { + for (let i = 0; i < runsFactor; i++) { + byteArrayEquals(rootTree, stateRoot); + } + }, + runsFactor, + }); + + itBench({ + id: "Buffer.compare", + fn: () => { + for (let i = 0; i < runsFactor; i++) { + Buffer.compare(rootTree, stateRoot); + } + }, + runsFactor, }); }); diff --git a/packages/state-transition/test/perf/util.ts b/packages/state-transition/test/perf/util.ts index 169b205ce5c6..46faf11c50f1 100644 --- a/packages/state-transition/test/perf/util.ts +++ b/packages/state-transition/test/perf/util.ts @@ -211,8 +211,11 @@ export function cachedStateAltairPopulateCaches(state: CachedBeaconStateAltair): state.inactivityScores.getAll(); } -export function generatePerfTestCachedStateAltair(opts?: {goBackOneSlot: boolean}): CachedBeaconStateAltair { - const {pubkeys, pubkeysMod, pubkeysModObj} = getPubkeys(); +export function generatePerfTestCachedStateAltair(opts?: { + goBackOneSlot: boolean; + vc?: number; +}): CachedBeaconStateAltair { + const {pubkeys, pubkeysMod, pubkeysModObj} = getPubkeys(opts?.vc); const {pubkey2index, index2pubkey} = getPubkeyCaches({pubkeys, pubkeysMod, pubkeysModObj}); // eslint-disable-next-line @typescript-eslint/naming-convention @@ -247,7 +250,7 @@ export function generatePerfTestCachedStateAltair(opts?: {goBackOneSlot: boolean export function generatePerformanceStateAltair(pubkeysArg?: Uint8Array[]): BeaconStateAltair { if (!altairState) { const pubkeys = pubkeysArg || getPubkeys().pubkeys; - const statePhase0 = buildPerformanceStatePhase0(); + const statePhase0 = buildPerformanceStatePhase0(pubkeys); const state = statePhase0 as allForks.BeaconState as altair.BeaconState; state.previousEpochParticipation = newFilledArray(pubkeys.length, 0b111); diff --git a/packages/state-transition/test/perf/util/loadState/findModifiedValidators.test.ts b/packages/state-transition/test/perf/util/loadState/findModifiedValidators.test.ts new file mode 100644 index 000000000000..4028104f0bdc --- /dev/null +++ b/packages/state-transition/test/perf/util/loadState/findModifiedValidators.test.ts @@ -0,0 +1,185 @@ +import {expect} from "chai"; +import {itBench} from "@dapplion/benchmark"; +import {CompositeViewDU} from "@chainsafe/ssz"; +import {ssz} from "@lodestar/types"; +import {bytesToInt} from "@lodestar/utils"; +import {findModifiedValidators} from "../../../../src/util/loadState/findModifiedValidators.js"; +import {VALIDATOR_BYTES_SIZE} from "../../../../src/util/sszBytes.js"; +import {generateValidators} from "../../../utils/validator.js"; +import {generateState} from "../../../utils/state.js"; + +/** + * find modified validators by different ways. This proves that findModifiedValidators() leveraging Buffer.compare() is the fastest way. + * - Method 0 - serialize validators then findModifiedValidators, this is the selected implementation + * ✔ findModifiedValidators - 10000 modified validators 2.261799 ops/s 442.1260 ms/op - 14 runs 7.80 s + * ✔ findModifiedValidators - 1000 modified validators 2.310899 ops/s 432.7321 ms/op - 12 runs 6.35 s + * ✔ findModifiedValidators - 100 modified validators 2.259907 ops/s 442.4960 ms/op - 16 runs 7.93 s + * ✔ findModifiedValidators - 10 modified validators 2.297018 ops/s 435.3470 ms/op - 12 runs 6.23 s + * ✔ findModifiedValidators - 1 modified validators 2.344447 ops/s 426.5398 ms/op - 12 runs 5.81 s + * ✔ findModifiedValidators - no difference 2.327252 ops/s 429.6914 ms/op - 12 runs 5.70 s + * + * - Method 1 - deserialize validators then compare validator ViewDUs: 8.8x slower + * ✔ compare ViewDUs 0.2643101 ops/s 3.783434 s/op - 12 runs 50.3 s + * + * - Method 2 - serialize each validator then compare Uin8Array: 3.1x slower + * ✔ compare each validator Uint8Array 0.7424619 ops/s 1.346870 s/op - 12 runs 17.8 s + * + * - Method 3 - compare validator ViewDU to Uint8Array: 3x slower + * ✔ compare ViewDU to Uint8Array 0.7791557 ops/s 1.283441 s/op - 12 runs 16.8 s + */ +describe("find modified validators by different ways", function () { + this.timeout(0); + // To get state bytes from any persisted state, do this: + // const stateBytes = new Uint8Array(fs.readFileSync(path.join(folder, "mainnet_state_7335296.ssz"))); + // const stateType = ssz.capella.BeaconState; + const numValidator = 1_000_000; + const validators = generateValidators(numValidator); + const state = generateState({validators: validators}); + const stateType = ssz.phase0.BeaconState; + const stateBytes = state.serialize(); + + // const state = stateType.deserializeToViewDU(stateBytes); + const dataView = new DataView(stateBytes.buffer, stateBytes.byteOffset, stateBytes.byteLength); + const fieldRanges = stateType.getFieldRanges(dataView, 0, stateBytes.length); + const validatorsFieldIndex = Object.keys(stateType.fields).indexOf("validators"); + const validatorsRange = fieldRanges[validatorsFieldIndex]; + + describe("serialize validators then findModifiedValidators", () => { + const expectedModifiedValidatorsArr: number[][] = [ + // mainnet state has 700k validators as of Sep 2023 + Array.from({length: 10_000}, (_, i) => 70 * i), + Array.from({length: 1_000}, (_, i) => 700 * i), + Array.from({length: 100}, (_, i) => 700 * i), + Array.from({length: 10}, (_, i) => 700 * i), + Array.from({length: 1}, (_, i) => 10 * i), + [], + ]; + for (const expectedModifiedValidators of expectedModifiedValidatorsArr) { + const prefix = "findModifiedValidators"; + const testCaseName = + expectedModifiedValidators.length === 0 + ? "no difference" + : expectedModifiedValidators.length + " modified validators"; + itBench({ + id: `${prefix} - ${testCaseName}`, + beforeEach: () => { + const clonedState = state.clone(); + for (const validatorIndex of expectedModifiedValidators) { + clonedState.validators.get(validatorIndex).pubkey = Buffer.alloc(48, 0); + } + clonedState.commit(); + return clonedState; + }, + fn: (clonedState) => { + const validatorsBytes = Uint8Array.from(stateBytes.subarray(validatorsRange.start, validatorsRange.end)); + const validatorsBytes2 = clonedState.validators.serialize(); + const modifiedValidators: number[] = []; + findModifiedValidators(validatorsBytes, validatorsBytes2, modifiedValidators); + expect(modifiedValidators.sort((a, b) => a - b)).to.be.deep.equal(expectedModifiedValidators); + }, + }); + } + }); + + describe("deserialize validators then compare validator ViewDUs", () => { + const validatorsBytes = stateBytes.subarray(validatorsRange.start, validatorsRange.end); + itBench("compare ViewDUs", () => { + const numValidator = state.validators.length; + const validators = stateType.fields.validators.deserializeToViewDU(validatorsBytes); + for (let i = 0; i < numValidator; i++) { + if (!ssz.phase0.Validator.equals(state.validators.get(i), validators.get(i))) { + throw Error(`validator ${i} is not equal`); + } + } + }); + }); + + describe("serialize each validator then compare Uin8Array", () => { + const validators = state.validators.getAllReadonly(); + itBench("compare each validator Uint8Array", () => { + for (let i = 0; i < state.validators.length; i++) { + const validatorBytes = ssz.phase0.Validator.serialize(validators[i]); + if ( + Buffer.compare( + validatorBytes, + stateBytes.subarray( + validatorsRange.start + i * VALIDATOR_BYTES_SIZE, + validatorsRange.start + (i + 1) * VALIDATOR_BYTES_SIZE + ) + ) !== 0 + ) { + throw Error(`validator ${i} is not equal`); + } + } + }); + }); + + describe("compare validator ViewDU to Uint8Array", () => { + itBench("compare ViewDU to Uint8Array", () => { + const numValidator = state.validators.length; + for (let i = 0; i < numValidator; i++) { + const diff = validatorDiff( + state.validators.get(i), + stateBytes.subarray( + validatorsRange.start + i * VALIDATOR_BYTES_SIZE, + validatorsRange.start + (i + 1) * VALIDATOR_BYTES_SIZE + ) + ); + + if (diff !== null) { + throw Error(`validator ${i} is not equal at ${diff}`); + } + } + }); + }); +}); + +function validatorDiff(validator: CompositeViewDU, bytes: Uint8Array): string | null { + const pubkey = bytes.subarray(0, 48); + if (Buffer.compare(validator.pubkey, pubkey) !== 0) { + return "pubkey"; + } + + const withdrawalCredentials = bytes.subarray(48, 80); + if (Buffer.compare(validator.withdrawalCredentials, withdrawalCredentials) !== 0) { + return "withdrawalCredentials"; + } + + if (validator.effectiveBalance !== bytesToInt(bytes.subarray(80, 88))) { + return "effectiveBalance"; + } + + if (validator.slashed !== Boolean(bytes[88])) { + return "slashed"; + } + + if (validator.activationEligibilityEpoch !== toNumberOrInfinity(bytes.subarray(89, 97))) { + return "activationEligibilityEpoch"; + } + + if (validator.activationEpoch !== toNumberOrInfinity(bytes.subarray(97, 105))) { + return "activationEpoch"; + } + + if (validator.exitEpoch !== toNumberOrInfinity(bytes.subarray(105, 113))) { + return "exitEpoch"; + } + + if (validator.withdrawableEpoch !== toNumberOrInfinity(bytes.subarray(113, 121))) { + return "withdrawableEpoch"; + } + + return null; +} + +function toNumberOrInfinity(bytes: Uint8Array): number { + let isInfinity = true; + for (const byte of bytes) { + if (byte !== 255) { + isInfinity = false; + break; + } + } + + return isInfinity ? Infinity : bytesToInt(bytes); +} diff --git a/packages/state-transition/test/perf/util/loadState/loadState.test.ts b/packages/state-transition/test/perf/util/loadState/loadState.test.ts new file mode 100644 index 000000000000..c0df6cf1af47 --- /dev/null +++ b/packages/state-transition/test/perf/util/loadState/loadState.test.ts @@ -0,0 +1,98 @@ +import bls from "@chainsafe/bls"; +import {CoordType} from "@chainsafe/blst"; +import {itBench, setBenchOpts} from "@dapplion/benchmark"; +import {loadState} from "../../../../src/util/loadState/loadState.js"; +import {createCachedBeaconState} from "../../../../src/cache/stateCache.js"; +import {Index2PubkeyCache, PubkeyIndexMap} from "../../../../src/cache/pubkeyCache.js"; +import {generatePerfTestCachedStateAltair} from "../../util.js"; + +/** + * This benchmark shows a stable performance from 2s to 3s on a Mac M1. And it does not really depend on the seed validators, + * only the modified and new validators + * + * - On mainnet, as of Oct 2023, there are ~1M validators + * + * ✔ migrate state 1000000 validators, 24 modified, 0 new 0.4475463 ops/s 2.234406 s/op - 3 runs 62.1 s + * ✔ migrate state 1000000 validators, 1700 modified, 1000 new 0.3663298 ops/s 2.729781 s/op - 21 runs 62.1 s + * ✔ migrate state 1000000 validators, 3400 modified, 2000 new 0.3413125 ops/s 2.929866 s/op - 19 runs 60.9 s + + * - On holesky, there are ~1.5M validators + * ✔ migrate state 1500000 validators, 24 modified, 0 new 0.4278145 ops/s 2.337461 s/op - 24 runs 61.1 s + * ✔ migrate state 1500000 validators, 1700 modified, 1000 new 0.3642085 ops/s 2.745680 s/op - 20 runs 60.1 s + * ✔ migrate state 1500000 validators, 3400 modified, 2000 new 0.3344296 ops/s 2.990166 s/op - 19 runs 62.4 s + */ +describe("loadState", function () { + this.timeout(0); + + setBenchOpts({ + minMs: 60_000, + }); + + const testCases: {seedValidators: number; numModifiedValidators: number; numNewValidators: number}[] = [ + // this 1_000_000 is similar to mainnet state as of Oct 2023 + // similar to migrating from state 7335296 to state 7335360 on mainnet, this is 2 epochs difference + {seedValidators: 1_000_000, numModifiedValidators: 24, numNewValidators: 0}, + {seedValidators: 1_000_000, numModifiedValidators: 1700, numNewValidators: 1000}, + // similar to migrating from state 7327776 to state 7335360 on mainnet, this is 237 epochs difference ~ 1 day + {seedValidators: 1_000_000, numModifiedValidators: 3400, numNewValidators: 2000}, + // same tests on holesky with 1_500_000 validators + {seedValidators: 1_500_000, numModifiedValidators: 24, numNewValidators: 0}, + {seedValidators: 1_500_000, numModifiedValidators: 1700, numNewValidators: 1000}, + {seedValidators: 1_500_000, numModifiedValidators: 3400, numNewValidators: 2000}, + ]; + for (const {seedValidators, numModifiedValidators, numNewValidators} of testCases) { + itBench({ + id: `migrate state ${seedValidators} validators, ${numModifiedValidators} modified, ${numNewValidators} new`, + before: () => { + const seedState = generatePerfTestCachedStateAltair({vc: seedValidators, goBackOneSlot: false}); + // cache all HashObjects + seedState.hashTreeRoot(); + const newState = seedState.clone(); + for (let i = 0; i < numModifiedValidators; i++) { + const validatorIndex = i * Math.floor((seedState.validators.length - 1) / numModifiedValidators); + const modifiedValidator = newState.validators.get(validatorIndex); + modifiedValidator.withdrawalCredentials = Buffer.alloc(32, 0x01); + newState.inactivityScores.set(validatorIndex, 100); + } + + for (let i = 0; i < numNewValidators; i++) { + newState.validators.push(seedState.validators.get(0).clone()); + newState.inactivityScores.push(seedState.inactivityScores.get(0)); + newState.balances.push(seedState.balances.get(0)); + } + + const newStateBytes = newState.serialize(); + return {seedState, newStateBytes}; + }, + beforeEach: ({seedState, newStateBytes}) => { + return {seedState: seedState.clone(), newStateBytes}; + }, + fn: ({seedState, newStateBytes}) => { + const {state: migratedState, modifiedValidators} = loadState(seedState.config, seedState, newStateBytes); + migratedState.hashTreeRoot(); + // Get the validators sub tree once for all the loop + const validators = migratedState.validators; + const pubkey2index = new PubkeyIndexMap(); + const index2pubkey: Index2PubkeyCache = []; + for (const validatorIndex of modifiedValidators) { + const validator = validators.getReadonly(validatorIndex); + const pubkey = validator.pubkey; + pubkey2index.set(pubkey, validatorIndex); + index2pubkey[validatorIndex] = bls.PublicKey.fromBytes(pubkey, CoordType.jacobian); + } + // skip computimg shuffling in performance test because in reality we have a ShufflingCache + // eslint-disable-next-line @typescript-eslint/explicit-function-return-type + const shufflingGetter = () => seedState.epochCtx.currentShuffling; + createCachedBeaconState( + migratedState, + { + config: seedState.config, + pubkey2index, + index2pubkey, + }, + {skipSyncPubkeys: true, skipSyncCommitteeCache: true, shufflingGetter} + ); + }, + }); + } +}); diff --git a/packages/state-transition/test/unit/cachedBeaconState.test.ts b/packages/state-transition/test/unit/cachedBeaconState.test.ts index 0367fd636e78..072261c1000e 100644 --- a/packages/state-transition/test/unit/cachedBeaconState.test.ts +++ b/packages/state-transition/test/unit/cachedBeaconState.test.ts @@ -1,7 +1,13 @@ import {expect} from "chai"; import {ssz} from "@lodestar/types"; import {toHexString} from "@lodestar/utils"; +import {config} from "@lodestar/config/default"; +import {createBeaconConfig} from "@lodestar/config"; import {createCachedBeaconStateTest} from "../utils/state.js"; +import {PubkeyIndexMap} from "../../src/cache/pubkeyCache.js"; +import {createCachedBeaconState, loadUnfinalizedCachedBeaconState} from "../../src/cache/stateCache.js"; +import {interopPubkeysCached} from "../utils/interop.js"; +import {modifyStateSameValidator, newStateWithValidators} from "../utils/capella.js"; describe("CachedBeaconState", () => { it("Clone and mutate", () => { @@ -54,4 +60,96 @@ describe("CachedBeaconState", () => { ".serialize() does not automatically commit" ); }); + + describe("loadCachedBeaconState", () => { + const numValidator = 16; + const pubkeys = interopPubkeysCached(2 * numValidator); + + const stateView = newStateWithValidators(numValidator); + const seedState = createCachedBeaconState( + stateView, + { + config: createBeaconConfig(config, stateView.genesisValidatorsRoot), + pubkey2index: new PubkeyIndexMap(), + index2pubkey: [], + }, + {skipSyncCommitteeCache: true} + ); + + const capellaStateType = ssz.capella.BeaconState; + + for (let validatorCountDelta = -numValidator; validatorCountDelta <= numValidator; validatorCountDelta++) { + const testName = `loadCachedBeaconState - ${validatorCountDelta > 0 ? "more" : "less"} ${Math.abs( + validatorCountDelta + )} validators`; + it(testName, () => { + const state = modifyStateSameValidator(stateView); + for (let i = 0; i < state.validators.length; i++) { + // only modify some validators + if (i % 5 === 0) { + state.inactivityScores.set(i, state.inactivityScores.get(i) + 1); + state.validators.get(i).effectiveBalance += 1; + } + } + + if (validatorCountDelta < 0) { + state.validators = state.validators.sliceTo(state.validators.length - 1 + validatorCountDelta); + + // inactivityScores + if (state.inactivityScores.length - 1 + validatorCountDelta >= 0) { + state.inactivityScores = state.inactivityScores.sliceTo( + state.inactivityScores.length - 1 + validatorCountDelta + ); + } else { + state.inactivityScores = capellaStateType.fields.inactivityScores.defaultViewDU(); + } + + // previousEpochParticipation + if (state.previousEpochParticipation.length - 1 + validatorCountDelta >= 0) { + state.previousEpochParticipation = state.previousEpochParticipation.sliceTo( + state.previousEpochParticipation.length - 1 + validatorCountDelta + ); + } else { + state.previousEpochParticipation = capellaStateType.fields.previousEpochParticipation.defaultViewDU(); + } + + // currentEpochParticipation + if (state.currentEpochParticipation.length - 1 + validatorCountDelta >= 0) { + state.currentEpochParticipation = state.currentEpochParticipation.sliceTo( + state.currentEpochParticipation.length - 1 + validatorCountDelta + ); + } else { + state.currentEpochParticipation = capellaStateType.fields.currentEpochParticipation.defaultViewDU(); + } + } else { + // more validators + for (let i = 0; i < validatorCountDelta; i++) { + const validator = ssz.phase0.Validator.defaultViewDU(); + validator.pubkey = pubkeys[numValidator + i]; + state.validators.push(validator); + state.inactivityScores.push(1); + state.previousEpochParticipation.push(0b11111111); + state.currentEpochParticipation.push(0b11111111); + } + } + state.commit(); + + // confirm loadState() result + const stateBytes = state.serialize(); + const newCachedState = loadUnfinalizedCachedBeaconState(seedState, stateBytes, {skipSyncCommitteeCache: true}); + const newStateBytes = newCachedState.serialize(); + expect(newStateBytes).to.be.deep.equal(stateBytes, "loadState: state bytes are not equal"); + expect(newCachedState.hashTreeRoot()).to.be.deep.equal( + state.hashTreeRoot(), + "loadState: state root is not equal" + ); + + // confirm loadUnfinalizedCachedBeaconState() result + for (let i = 0; i < newCachedState.validators.length; i++) { + expect(newCachedState.epochCtx.pubkey2index.get(newCachedState.validators.get(i).pubkey)).to.be.equal(i); + expect(newCachedState.epochCtx.index2pubkey[i].toBytes()).to.be.deep.equal(pubkeys[i]); + } + }); + } + }); }); diff --git a/packages/state-transition/test/unit/upgradeState.test.ts b/packages/state-transition/test/unit/upgradeState.test.ts index 13ec613d69bf..ba9ff187a26c 100644 --- a/packages/state-transition/test/unit/upgradeState.test.ts +++ b/packages/state-transition/test/unit/upgradeState.test.ts @@ -1,11 +1,12 @@ import {expect} from "chai"; import {ssz} from "@lodestar/types"; import {ForkName} from "@lodestar/params"; -import {createCachedBeaconState, PubkeyIndexMap} from "@lodestar/state-transition"; import {createBeaconConfig, ChainForkConfig, createChainForkConfig} from "@lodestar/config"; import {config as chainConfig} from "@lodestar/config/default"; import {upgradeStateToDeneb} from "../../src/slot/upgradeStateToDeneb.js"; +import {createCachedBeaconState} from "../../src/cache/stateCache.js"; +import {PubkeyIndexMap} from "../../src/cache/pubkeyCache.js"; describe("upgradeState", () => { it("upgradeStateToDeneb", () => { diff --git a/packages/state-transition/test/unit/util/loadState/findModifiedInactivityScores.test.ts b/packages/state-transition/test/unit/util/loadState/findModifiedInactivityScores.test.ts new file mode 100644 index 000000000000..e1ad0cf972da --- /dev/null +++ b/packages/state-transition/test/unit/util/loadState/findModifiedInactivityScores.test.ts @@ -0,0 +1,33 @@ +import {expect} from "chai"; +import { + INACTIVITY_SCORE_SIZE, + findModifiedInactivityScores, +} from "../../../../src/util/loadState/findModifiedInactivityScores.js"; + +describe("findModifiedInactivityScores", () => { + const numValidator = 100; + const expectedModifiedValidatorsArr: number[][] = [ + [], + [0, 2], + [0, 2, 4, 5, 6, 7, 8, 9], + [10, 20, 30, 40, 50, 60, 70, 80, 90, 91, 92, 93, 94], + ]; + + const inactivityScoresBytes = new Uint8Array(numValidator * INACTIVITY_SCORE_SIZE); + + for (const expectedModifiedValidators of expectedModifiedValidatorsArr) { + const testCaseName = + expectedModifiedValidators.length === 0 + ? "no difference" + : expectedModifiedValidators.length + " modified validators"; + it(testCaseName, () => { + const inactivityScoresBytes2 = inactivityScoresBytes.slice(); + for (const validatorIndex of expectedModifiedValidators) { + inactivityScoresBytes2[validatorIndex * INACTIVITY_SCORE_SIZE] = 1; + } + const modifiedValidators: number[] = []; + findModifiedInactivityScores(inactivityScoresBytes, inactivityScoresBytes2, modifiedValidators); + expect(modifiedValidators.sort((a, b) => a - b)).to.be.deep.equal(expectedModifiedValidators); + }); + } +}); diff --git a/packages/state-transition/test/unit/util/loadState/findModifiedValidators.test.ts b/packages/state-transition/test/unit/util/loadState/findModifiedValidators.test.ts new file mode 100644 index 000000000000..aa2378276d22 --- /dev/null +++ b/packages/state-transition/test/unit/util/loadState/findModifiedValidators.test.ts @@ -0,0 +1,41 @@ +import {expect} from "chai"; +import {fromHexString} from "@chainsafe/ssz"; +import {findModifiedValidators} from "../../../../src/util/loadState/findModifiedValidators.js"; +import {generateState} from "../../../utils/state.js"; +import {generateValidators} from "../../../utils/validator.js"; + +describe("findModifiedValidators", () => { + const numValidator = 800_000; + const expectedModifiedValidatorsArr: number[][] = [ + Array.from({length: 10_000}, (_, i) => 70 * i), + Array.from({length: 1_000}, (_, i) => 700 * i), + Array.from({length: 100}, (_, i) => 700 * i), + Array.from({length: 10}, (_, i) => 700 * i), + Array.from({length: 1}, (_, i) => 10 * i), + [], + ]; + + const validators = generateValidators(numValidator); + const state = generateState({validators: validators}); + const validatorsBytes = state.validators.serialize(); + + for (const expectedModifiedValidators of expectedModifiedValidatorsArr) { + const testCaseName = + expectedModifiedValidators.length === 0 + ? "no difference" + : expectedModifiedValidators.length + " modified validators"; + const modifiedPubkey = fromHexString( + "0x98d732925b0388ceb8b2b7efbe1163e4bc39082bb791940b2cda3837b0982c8de8fad8ee7912abca4ab0ae7ad50d1b95" + ); + it(testCaseName, () => { + const clonedState = state.clone(); + for (const validatorIndex of expectedModifiedValidators) { + clonedState.validators.get(validatorIndex).pubkey = modifiedPubkey; + } + const validatorsBytes2 = clonedState.validators.serialize(); + const modifiedValidators: number[] = []; + findModifiedValidators(validatorsBytes, validatorsBytes2, modifiedValidators); + expect(modifiedValidators.sort((a, b) => a - b)).to.be.deep.equal(expectedModifiedValidators); + }); + } +}); diff --git a/packages/state-transition/test/unit/util/loadState/loadValidator.test.ts b/packages/state-transition/test/unit/util/loadState/loadValidator.test.ts new file mode 100644 index 000000000000..7c3112537490 --- /dev/null +++ b/packages/state-transition/test/unit/util/loadState/loadValidator.test.ts @@ -0,0 +1,123 @@ +import {expect} from "chai"; +import {CompositeViewDU} from "@chainsafe/ssz"; +import {phase0, ssz} from "@lodestar/types"; +import {loadValidator} from "../../../../src/util/loadState/loadValidator.js"; + +describe("loadValidator", () => { + const validatorValue: phase0.Validator = { + pubkey: Buffer.from( + "0xb18e1737e1a1a76b8dff905ba7a4cb1ff5c526a4b7b0788188aade0488274c91e9c797e75f0f8452384ff53d44fad3df", + "hex" + ), + withdrawalCredentials: Buffer.from("0x98d732925b0388ceb8b2b7efbe1163e4bc39082bb791940b2cda3837b0982c8d", "hex"), + effectiveBalance: 32, + slashed: false, + activationEligibilityEpoch: 10, + activationEpoch: 20, + exitEpoch: 30, + withdrawableEpoch: 40, + }; + const validator = ssz.phase0.Validator.toViewDU(validatorValue); + + const testCases: {name: string; getValidator: () => CompositeViewDU}[] = [ + { + name: "diff pubkey", + getValidator: () => { + const newValidator = validator.clone(); + newValidator.pubkey = Buffer.alloc(1, 48); + return newValidator; + }, + }, + { + name: "diff withdrawal credentials", + getValidator: () => { + const newValidator = validator.clone(); + newValidator.withdrawalCredentials = Buffer.alloc(1, 32); + return newValidator; + }, + }, + { + name: "diff effective balance", + getValidator: () => { + const newValidator = validator.clone(); + newValidator.effectiveBalance = 100; + return newValidator; + }, + }, + { + name: "diff slashed", + getValidator: () => { + const newValidator = validator.clone(); + newValidator.slashed = true; + return newValidator; + }, + }, + { + name: "diff activation eligibility epoch", + getValidator: () => { + const newValidator = validator.clone(); + newValidator.activationEligibilityEpoch = 100; + return newValidator; + }, + }, + { + name: "diff activation epoch", + getValidator: () => { + const newValidator = validator.clone(); + newValidator.activationEpoch = 100; + return newValidator; + }, + }, + { + name: "diff exit epoch", + getValidator: () => { + const newValidator = validator.clone(); + newValidator.exitEpoch = 100; + return newValidator; + }, + }, + { + name: "diff withdrawable epoch", + getValidator: () => { + const newValidator = validator.clone(); + newValidator.withdrawableEpoch = 100; + return newValidator; + }, + }, + { + name: "diff all", + getValidator: () => { + const newValidator = validator.clone(); + newValidator.pubkey = Buffer.alloc(1, 48); + newValidator.withdrawalCredentials = Buffer.alloc(1, 32); + newValidator.effectiveBalance = 100; + newValidator.slashed = true; + newValidator.activationEligibilityEpoch = 100; + newValidator.activationEpoch = 100; + newValidator.exitEpoch = 100; + newValidator.withdrawableEpoch = 100; + return newValidator; + }, + }, + { + name: "same validator", + getValidator: () => validator.clone(), + }, + ]; + + for (const {name, getValidator} of testCases) { + it(name, () => { + const newValidator = getValidator(); + const newValidatorBytes = newValidator.serialize(); + const loadedValidator = loadValidator(validator, newValidatorBytes); + expect(Buffer.compare(loadedValidator.hashTreeRoot(), newValidator.hashTreeRoot())).to.be.equal( + 0, + "root is not correct" + ); + expect(Buffer.compare(loadedValidator.serialize(), newValidator.serialize())).to.be.equal( + 0, + "serialized value is not correct" + ); + }); + } +}); diff --git a/packages/state-transition/test/utils/capella.ts b/packages/state-transition/test/utils/capella.ts index f0f44ae94710..5789c260f67c 100644 --- a/packages/state-transition/test/utils/capella.ts +++ b/packages/state-transition/test/utils/capella.ts @@ -1,9 +1,11 @@ +import crypto from "node:crypto"; import {ssz} from "@lodestar/types"; import {config} from "@lodestar/config/default"; -import {BLS_WITHDRAWAL_PREFIX, ETH1_ADDRESS_WITHDRAWAL_PREFIX} from "@lodestar/params"; -import {CachedBeaconStateCapella} from "../../src/index.js"; +import {BLS_WITHDRAWAL_PREFIX, ETH1_ADDRESS_WITHDRAWAL_PREFIX, SLOTS_PER_EPOCH} from "@lodestar/params"; +import {BeaconStateCapella, CachedBeaconStateCapella} from "../../src/index.js"; import {createCachedBeaconStateTest} from "./state.js"; import {mulberry32} from "./rand.js"; +import {interopPubkeysCached} from "./interop.js"; export interface WithdrawalOpts { excessBalance: number; @@ -58,3 +60,59 @@ export function getExpectedWithdrawalsTestData(vc: number, opts: WithdrawalOpts) return createCachedBeaconStateTest(state, config, {skipSyncPubkeys: true}); } + +export function newStateWithValidators(numValidator: number): BeaconStateCapella { + // use real pubkeys to test loadCachedBeaconState api + const pubkeys = interopPubkeysCached(numValidator); + const capellaStateType = ssz.capella.BeaconState; + const stateView = capellaStateType.defaultViewDU(); + stateView.slot = config.CAPELLA_FORK_EPOCH * SLOTS_PER_EPOCH + 100; + + for (let i = 0; i < numValidator; i++) { + const validator = ssz.phase0.Validator.defaultViewDU(); + validator.pubkey = pubkeys[i]; + stateView.validators.push(validator); + stateView.balances.push(32); + stateView.inactivityScores.push(0); + stateView.previousEpochParticipation.push(0b11111111); + stateView.currentEpochParticipation.push(0b11111111); + } + stateView.commit(); + return stateView; +} + +/** + * Modify a state without changing number of validators + */ +export function modifyStateSameValidator(seedState: BeaconStateCapella): BeaconStateCapella { + const state = seedState.clone(); + state.slot = seedState.slot + 10; + state.latestBlockHeader = ssz.phase0.BeaconBlockHeader.toViewDU({ + slot: state.slot, + proposerIndex: 0, + parentRoot: state.hashTreeRoot(), + stateRoot: state.hashTreeRoot(), + bodyRoot: ssz.phase0.BeaconBlockBody.hashTreeRoot(ssz.phase0.BeaconBlockBody.defaultValue()), + }); + state.blockRoots.set(0, crypto.randomBytes(32)); + state.stateRoots.set(0, crypto.randomBytes(32)); + state.historicalRoots.push(crypto.randomBytes(32)); + state.eth1Data.depositCount = 1000; + state.eth1DataVotes.push(ssz.phase0.Eth1Data.toViewDU(ssz.phase0.Eth1Data.defaultValue())); + state.eth1DepositIndex = 1000; + state.balances.set(0, 30); + state.randaoMixes.set(0, crypto.randomBytes(32)); + state.slashings.set(0, 1n); + state.previousEpochParticipation.set(0, 0b11111110); + state.currentEpochParticipation.set(0, 0b11111110); + state.justificationBits.set(0, true); + state.previousJustifiedCheckpoint.epoch = 1; + state.currentJustifiedCheckpoint.epoch = 1; + state.finalizedCheckpoint.epoch++; + state.latestExecutionPayloadHeader.blockNumber = 1; + state.nextWithdrawalIndex = 1000; + state.nextWithdrawalValidatorIndex = 1000; + state.historicalSummaries.push(ssz.capella.HistoricalSummary.toViewDU(ssz.capella.HistoricalSummary.defaultValue())); + state.commit(); + return state; +} diff --git a/packages/test-utils/package.json b/packages/test-utils/package.json index f40b826999aa..b03a5f7c68d7 100644 --- a/packages/test-utils/package.json +++ b/packages/test-utils/package.json @@ -1,7 +1,7 @@ { "name": "@lodestar/test-utils", "private": true, - "version": "1.11.3", + "version": "1.12.0", "description": "Test utilities reused across other packages", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -61,7 +61,7 @@ "blockchain" ], "dependencies": { - "@lodestar/utils": "^1.11.3", + "@lodestar/utils": "^1.12.0", "axios": "^1.3.4", "chai": "^4.3.7", "mocha": "^10.2.0", diff --git a/packages/types/package.json b/packages/types/package.json index da9c8c179933..e5e6d4fd5e25 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.11.3", + "version": "1.12.0", "type": "module", "exports": { ".": { @@ -67,8 +67,8 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@chainsafe/ssz": "^0.13.0", - "@lodestar/params": "^1.11.3" + "@chainsafe/ssz": "^0.14.0", + "@lodestar/params": "^1.12.0" }, "keywords": [ "ethereum", diff --git a/packages/types/src/allForks/sszTypes.ts b/packages/types/src/allForks/sszTypes.ts index 023d7bc86369..463e5c57bd0d 100644 --- a/packages/types/src/allForks/sszTypes.ts +++ b/packages/types/src/allForks/sszTypes.ts @@ -156,5 +156,6 @@ export const allForksBlobs = { deneb: { BlobSidecar: deneb.BlobSidecar, BlindedBlobSidecar: deneb.BlindedBlobSidecar, + ExecutionPayloadAndBlobsBundle: deneb.ExecutionPayloadAndBlobsBundle, }, }; diff --git a/packages/types/src/allForks/types.ts b/packages/types/src/allForks/types.ts index a525820aac02..01c597b8a245 100644 --- a/packages/types/src/allForks/types.ts +++ b/packages/types/src/allForks/types.ts @@ -96,6 +96,7 @@ export type SignedBlindedBeaconBlockOrContents = SignedBlindedBeaconBlock | Sign export type BuilderBid = bellatrix.BuilderBid | capella.BuilderBid | deneb.BuilderBid; export type SignedBuilderBid = bellatrix.SignedBuilderBid | capella.SignedBuilderBid | deneb.SignedBuilderBid; +export type ExecutionPayloadAndBlobsBundle = deneb.ExecutionPayloadAndBlobsBundle; export type LightClientHeader = altair.LightClientHeader | capella.LightClientHeader | deneb.LightClientHeader; export type LightClientBootstrap = @@ -308,4 +309,5 @@ export type AllForksLightClientSSZTypes = { export type AllForksBlobsSSZTypes = { BlobSidecar: AllForksTypeOf; BlindedBlobSidecar: AllForksTypeOf; + ExecutionPayloadAndBlobsBundle: AllForksTypeOf; }; diff --git a/packages/types/src/deneb/sszTypes.ts b/packages/types/src/deneb/sszTypes.ts index a527cf3b4f48..96509d1d898b 100644 --- a/packages/types/src/deneb/sszTypes.ts +++ b/packages/types/src/deneb/sszTypes.ts @@ -149,6 +149,15 @@ export const SignedBlobSidecar = new ContainerType( ); export const SignedBlobSidecars = new ListCompositeType(SignedBlobSidecar, MAX_BLOB_COMMITMENTS_PER_BLOCK); +export const BlobsBundle = new ContainerType( + { + commitments: BlobKzgCommitments, + proofs: KZGProofs, + blobs: Blobs, + }, + {typeName: "BlobsBundle", jsonCase: "eth2"} +); + export const BlindedBlobSidecar = new ContainerType( { blockRoot: Root, @@ -204,12 +213,21 @@ export const SignedBlindedBeaconBlock = new ContainerType( {typeName: "SignedBlindedBeaconBlock", jsonCase: "eth2"} ); +export const BlindedBlobsBundle = new ContainerType( + { + commitments: BlobKzgCommitments, + proofs: KZGProofs, + blobRoots: BlindedBlobs, + }, + {typeName: "BlindedBlobsBundle", jsonCase: "eth2"} +); + export const BuilderBid = new ContainerType( { header: ExecutionPayloadHeader, + blindedBlobsBundle: BlindedBlobsBundle, value: UintBn256, pubkey: BLSPubkey, - blobKzgCommitments: BlobKzgCommitments, }, {typeName: "BuilderBid", jsonCase: "eth2"} ); @@ -222,6 +240,14 @@ export const SignedBuilderBid = new ContainerType( {typeName: "SignedBuilderBid", jsonCase: "eth2"} ); +export const ExecutionPayloadAndBlobsBundle = new ContainerType( + { + executionPayload: ExecutionPayload, + blobsBundle: BlobsBundle, + }, + {typeName: "ExecutionPayloadAndBlobsBundle", jsonCase: "eth2"} +); + // We don't spread capella.BeaconState fields since we need to replace // latestExecutionPayloadHeader and we cannot keep order doing that export const BeaconState = new ContainerType( diff --git a/packages/types/src/deneb/types.ts b/packages/types/src/deneb/types.ts index 93ea514aea75..1d6eb5fca5aa 100644 --- a/packages/types/src/deneb/types.ts +++ b/packages/types/src/deneb/types.ts @@ -16,6 +16,8 @@ export type SignedBlobSidecar = ValueOf; export type SignedBlobSidecars = ValueOf; export type SignedBlindedBlobSidecar = ValueOf; export type SignedBlindedBlobSidecars = ValueOf; +export type ExecutionPayloadAndBlobsBundle = ValueOf; +export type BlobsBundle = ValueOf; export type BlobKzgCommitments = ValueOf; export type KZGProofs = ValueOf; @@ -40,6 +42,7 @@ export type SignedBlindedBeaconBlock = ValueOf; export type BuilderBid = ValueOf; export type SignedBuilderBid = ValueOf; export type SSEPayloadAttributes = ValueOf; diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts index 825b962c5f1f..d90b55909884 100644 --- a/packages/types/src/index.ts +++ b/packages/types/src/index.ts @@ -4,3 +4,5 @@ export * as ssz from "./sszTypes.js"; export * from "./utils/typeguards.js"; // String type export {StringType, stringType} from "./utils/StringType.js"; +// Container utils +export * from "./utils/container.js"; diff --git a/packages/types/src/utils/container.ts b/packages/types/src/utils/container.ts new file mode 100644 index 000000000000..9fc21c201d80 --- /dev/null +++ b/packages/types/src/utils/container.ts @@ -0,0 +1,37 @@ +import {CompositeTypeAny, CompositeViewDU, ContainerType, Type} from "@chainsafe/ssz"; +type BytesRange = {start: number; end: number}; + +/** + * Deserialize a state from bytes ignoring some fields. + */ +export function deserializeContainerIgnoreFields>>( + sszType: ContainerType, + bytes: Uint8Array, + ignoreFields: (keyof Fields)[], + fieldRanges?: BytesRange[] +): CompositeViewDU { + const allFields = Object.keys(sszType.fields); + const object = sszType.defaultViewDU(); + if (!fieldRanges) { + const dataView = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength); + fieldRanges = sszType.getFieldRanges(dataView, 0, bytes.length); + } + + for (const [field, type] of Object.entries(sszType.fields)) { + // loaded above + if (ignoreFields.includes(field)) { + continue; + } + const fieldIndex = allFields.indexOf(field); + const fieldRange = fieldRanges[fieldIndex]; + if (type.isBasic) { + object[field as keyof Fields] = type.deserialize(bytes.subarray(fieldRange.start, fieldRange.end)) as never; + } else { + object[field as keyof Fields] = (type as CompositeTypeAny).deserializeToViewDU( + bytes.subarray(fieldRange.start, fieldRange.end) + ) as never; + } + } + + return object; +} diff --git a/packages/types/src/utils/typeguards.ts b/packages/types/src/utils/typeguards.ts index a3e4393c51cb..0b9bee97d17a 100644 --- a/packages/types/src/utils/typeguards.ts +++ b/packages/types/src/utils/typeguards.ts @@ -16,6 +16,8 @@ import { SignedBlockContents, SignedBeaconBlock, SignedBlindedBeaconBlockOrContents, + ExecutionPayload, + ExecutionPayloadAndBlobsBundle, } from "../allForks/types.js"; import {ts as deneb} from "../deneb/index.js"; @@ -67,3 +69,9 @@ export function isSignedBlindedBlockContents( ): data is SignedBlindedBlockContents { return (data as SignedBlindedBlockContents).signedBlindedBlobSidecars !== undefined; } + +export function isExecutionPayloadAndBlobsBundle( + data: ExecutionPayload | ExecutionPayloadAndBlobsBundle +): data is ExecutionPayloadAndBlobsBundle { + return (data as ExecutionPayloadAndBlobsBundle).blobsBundle !== undefined; +} diff --git a/packages/utils/package.json b/packages/utils/package.json index 7acce05a0e31..457218fa4f4c 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.11.3", + "version": "1.12.0", "type": "module", "exports": "./lib/index.js", "files": [ diff --git a/packages/utils/test/unit/bytes.test.ts b/packages/utils/test/unit/bytes.test.ts index b09625d7f135..f47e4c7ac3ed 100644 --- a/packages/utils/test/unit/bytes.test.ts +++ b/packages/utils/test/unit/bytes.test.ts @@ -1,6 +1,6 @@ import "../setup.js"; import {assert, expect} from "chai"; -import {intToBytes, bytesToInt} from "../../src/index.js"; +import {intToBytes, bytesToInt, toHex, fromHex, toHexString} from "../../src/index.js"; describe("intToBytes", () => { const zeroedArray = (length: number): number[] => Array.from({length}, () => 0); @@ -47,3 +47,54 @@ describe("bytesToInt", () => { }); } }); + +describe("toHex", () => { + const testCases: {input: Buffer | Uint8Array | string; output: string}[] = [ + {input: Buffer.from("Hello, World!", "utf-8"), output: "0x48656c6c6f2c20576f726c6421"}, + {input: new Uint8Array([72, 101, 108, 108, 111]), output: "0x48656c6c6f"}, + {input: Buffer.from([72, 101, 108, 108, 111]), output: "0x48656c6c6f"}, + {input: Buffer.from([]), output: "0x"}, + ]; + for (const {input, output} of testCases) { + it(`should convert Uint8Array to hex string ${output}`, () => { + expect(toHex(input)).to.be.equal(output); + }); + } +}); + +describe("fromHex", () => { + const testCases: {input: string; output: Buffer | Uint8Array}[] = [ + { + input: "0x48656c6c6f2c20576f726c6421", + output: new Uint8Array([72, 101, 108, 108, 111, 44, 32, 87, 111, 114, 108, 100, 33]), + }, + { + input: "48656c6c6f2c20576f726c6421", + output: new Uint8Array([72, 101, 108, 108, 111, 44, 32, 87, 111, 114, 108, 100, 33]), + }, + {input: "0x", output: new Uint8Array([])}, + ]; + + for (const {input, output} of testCases) { + it(`should convert hex string ${input} to Uint8Array`, () => { + expect(fromHex(input)).to.deep.equal(output); + }); + } +}); + +describe("toHexString", () => { + const testCases: {input: Uint8Array; output: string}[] = [ + {input: new Uint8Array([1, 2, 3]), output: "0x010203"}, + {input: new Uint8Array([72, 101, 108, 108, 111]), output: "0x48656c6c6f"}, + {input: new Uint8Array([]), output: "0x"}, + {input: new Uint8Array([0, 0, 0, 0]), output: "0x00000000"}, + {input: new Uint8Array([15, 255, 16, 0, 127]), output: "0x0fff10007f"}, + {input: new Uint8Array(5).fill(255), output: "0x" + "ff".repeat(5)}, + ]; + + for (const {input, output} of testCases) { + it(`should convert Uint8Array to hex string ${output}`, () => { + expect(toHexString(input)).to.be.equal(output); + }); + } +}); diff --git a/packages/validator/package.json b/packages/validator/package.json index 5294318b5536..8e659c94bd9e 100644 --- a/packages/validator/package.json +++ b/packages/validator/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/validator", - "version": "1.11.3", + "version": "1.12.0", "description": "A Typescript implementation of the validator client", "author": "ChainSafe Systems", "license": "LGPL-3.0", @@ -49,14 +49,14 @@ ], "dependencies": { "@chainsafe/bls": "7.1.1", - "@chainsafe/ssz": "^0.13.0", - "@lodestar/api": "^1.11.3", - "@lodestar/config": "^1.11.3", - "@lodestar/db": "^1.11.3", - "@lodestar/params": "^1.11.3", - "@lodestar/state-transition": "^1.11.3", - "@lodestar/types": "^1.11.3", - "@lodestar/utils": "^1.11.3", + "@chainsafe/ssz": "^0.14.0", + "@lodestar/api": "^1.12.0", + "@lodestar/config": "^1.12.0", + "@lodestar/db": "^1.12.0", + "@lodestar/params": "^1.12.0", + "@lodestar/state-transition": "^1.12.0", + "@lodestar/types": "^1.12.0", + "@lodestar/utils": "^1.12.0", "bigint-buffer": "^1.1.5", "strict-event-emitter-types": "^2.0.0" }, diff --git a/packages/validator/src/services/block.ts b/packages/validator/src/services/block.ts index bbe96ac772a8..c9eeadb06630 100644 --- a/packages/validator/src/services/block.ts +++ b/packages/validator/src/services/block.ts @@ -178,7 +178,7 @@ export class BlockProposingService { ApiError.assert( isBlindedBeaconBlock(signedBlock.message) ? await this.api.beacon.publishBlindedBlock(signedBlock as allForks.SignedBlindedBeaconBlock) - : await this.api.beacon.publishBlock(signedBlock as allForks.SignedBeaconBlock) + : await this.api.beacon.publishBlockV2(signedBlock as allForks.SignedBeaconBlock) ); } else { ApiError.assert( @@ -187,7 +187,7 @@ export class BlockProposingService { signedBlindedBlock: signedBlock, signedBlindedBlobSidecars: signedBlobSidecars, } as allForks.SignedBlindedBlockContents) - : await this.api.beacon.publishBlock({signedBlock, signedBlobSidecars} as allForks.SignedBlockContents) + : await this.api.beacon.publishBlockV2({signedBlock, signedBlobSidecars} as allForks.SignedBlockContents) ); } }; diff --git a/packages/validator/src/services/blockDuties.ts b/packages/validator/src/services/blockDuties.ts index d22cc087714b..67b6e5834417 100644 --- a/packages/validator/src/services/blockDuties.ts +++ b/packages/validator/src/services/blockDuties.ts @@ -133,7 +133,9 @@ export class BlockDutiesService { const isLastSlotEpoch = computeStartSlotAtEpoch(nextEpoch) === currentSlot + 1; if (isLastSlotEpoch) { // no need to await for other steps, just poll proposers for next epoch - void this.pollBeaconProposersNextEpoch(currentSlot, nextEpoch, signal); + this.pollBeaconProposersNextEpoch(currentSlot, nextEpoch, signal).catch((e) => { + this.logger.error("Error on pollBeaconProposersNextEpoch", {}, e); + }); } // Notify the block proposal service for any proposals that we have in our cache. @@ -163,7 +165,7 @@ export class BlockDutiesService { } /** - * This is to avoid some delay on the first slot of the opoch when validators has proposal duties. + * This is to avoid some delay on the first slot of the epoch when validators have proposal duties. * See https://github.com/ChainSafe/lodestar/issues/5792 */ private async pollBeaconProposersNextEpoch(currentSlot: Slot, nextEpoch: Epoch, signal: AbortSignal): Promise { diff --git a/packages/validator/src/services/validatorStore.ts b/packages/validator/src/services/validatorStore.ts index 2afbeddbc091..42979f7c71e8 100644 --- a/packages/validator/src/services/validatorStore.ts +++ b/packages/validator/src/services/validatorStore.ts @@ -122,7 +122,8 @@ type ValidatorData = ProposerConfig & { export const defaultOptions = { suggestedFeeRecipient: "0x0000000000000000000000000000000000000000", defaultGasLimit: 30_000_000, - builderSelection: routes.validator.BuilderSelection.MaxProfit, + builderSelection: routes.validator.BuilderSelection.ExecutionOnly, + builderAliasSelection: routes.validator.BuilderSelection.MaxProfit, // turn it off by default, turn it back on once other clients support v3 api useProduceBlockV3: false, }; @@ -232,6 +233,22 @@ export class ValidatorStore { return this.validators.get(pubkeyHex)?.graffiti ?? this.defaultProposerConfig.graffiti; } + setGraffiti(pubkeyHex: PubkeyHex, graffiti: string): void { + const validatorData = this.validators.get(pubkeyHex); + if (validatorData === undefined) { + throw Error(`Validator pubkey ${pubkeyHex} not known`); + } + validatorData.graffiti = graffiti; + } + + deleteGraffiti(pubkeyHex: PubkeyHex): void { + const validatorData = this.validators.get(pubkeyHex); + if (validatorData === undefined) { + throw Error(`Validator pubkey ${pubkeyHex} not known`); + } + delete validatorData["graffiti"]; + } + getBuilderSelection(pubkeyHex: PubkeyHex): routes.validator.BuilderSelection { return (this.validators.get(pubkeyHex)?.builder || {}).selection ?? this.defaultProposerConfig.builder.selection; } diff --git a/packages/validator/src/validator.ts b/packages/validator/src/validator.ts index b9bdc0be742a..68ae2135b564 100644 --- a/packages/validator/src/validator.ts +++ b/packages/validator/src/validator.ts @@ -266,6 +266,7 @@ export class Validator { let api: Api; if (typeof opts.api === "string" || Array.isArray(opts.api)) { const urls = typeof opts.api === "string" ? [opts.api] : opts.api; + logger.info("Beacon node", {urls: urls.toString()}); // This new api instance can make do with default timeout as a faster timeout is // not necessary since this instance won't be used for validator duties api = getClient({urls, getAbortSignal: () => opts.abortController.signal}, {config, logger}); @@ -284,6 +285,18 @@ export class Validator { await assertEqualGenesis(opts, genesis); logger.info("Verified connected beacon node and validator have the same genesisValidatorRoot"); + const {useProduceBlockV3 = defaultOptions.useProduceBlockV3, valProposerConfig} = opts; + const defaultBuilderSelection = + valProposerConfig?.defaultConfig.builder?.selection ?? defaultOptions.builderSelection; + const strictFeeRecipientCheck = valProposerConfig?.defaultConfig.strictFeeRecipientCheck ?? false; + const suggestedFeeRecipient = valProposerConfig?.defaultConfig.feeRecipient ?? defaultOptions.suggestedFeeRecipient; + logger.info("Initializing validator", { + useProduceBlockV3, + defaultBuilderSelection, + suggestedFeeRecipient, + strictFeeRecipientCheck, + }); + return Validator.init(opts, genesis, metrics); } diff --git a/packages/validator/test/unit/services/block.test.ts b/packages/validator/test/unit/services/block.test.ts index ce7fb3465220..0a533b140a9c 100644 --- a/packages/validator/test/unit/services/block.test.ts +++ b/packages/validator/test/unit/services/block.test.ts @@ -67,7 +67,7 @@ describe("BlockDutiesService", function () { ok: true, status: HttpStatusCode.OK, }); - api.beacon.publishBlock.resolves(); + api.beacon.publishBlockV2.resolves(); // Trigger block production for slot 1 const notifyBlockProductionFn = blockService["dutiesService"]["notifyBlockProductionFn"]; @@ -77,7 +77,7 @@ describe("BlockDutiesService", function () { await sleep(20, controller.signal); // Must have submitted the block received on signBlock() - expect(api.beacon.publishBlock.callCount).to.equal(1, "publishBlock() must be called once"); - expect(api.beacon.publishBlock.getCall(0).args).to.deep.equal([signedBlock], "wrong publishBlock() args"); + expect(api.beacon.publishBlockV2.callCount).to.equal(1, "publishBlock() must be called once"); + expect(api.beacon.publishBlockV2.getCall(0).args).to.deep.equal([signedBlock], "wrong publishBlock() args"); }); }); diff --git a/yarn.lock b/yarn.lock index 16e6df6045af..a43ea9d03d5b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -664,10 +664,10 @@ "@chainsafe/as-sha256" "^0.4.1" "@chainsafe/persistent-merkle-tree" "^0.6.1" -"@chainsafe/ssz@^0.13.0": - version "0.13.0" - resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.13.0.tgz#0bd11af6abe023d4cc24067a46889dcabbe573e5" - integrity sha512-73PF5bFXE9juLD1+dkmYV/CMO/5ip0TmyzgYw87vAn8Cn+CbwCOp/HyNNdYCmdl104a2bqcORFJzirCvvc+nNw== +"@chainsafe/ssz@^0.14.0": + version "0.14.0" + resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.14.0.tgz#fe9e4fd3cf673013bd57f77c3ab0fdc5ebc5d916" + integrity sha512-KTc33pWu7ItXlzMAz5/1osOHsvhx25kpM3j7Ez+PNZLyyhIoNzAhhozvxy+ul0fCDfHbvaCRp3lJQnzsb5Iv0A== dependencies: "@chainsafe/as-sha256" "^0.4.1" "@chainsafe/persistent-merkle-tree" "^0.6.1"