diff --git a/packages/api/package.json b/packages/api/package.json index 58566a0b23f..3d12970bfe4 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -70,8 +70,8 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/persistent-merkle-tree": "^0.8.0", - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/persistent-merkle-tree": "file:../../../ssz/packages/persistent-merkle-tree", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/config": "^1.22.0", "@lodestar/params": "^1.22.0", "@lodestar/types": "^1.22.0", diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json index e3d095b6f1e..d344c9f309d 100644 --- a/packages/beacon-node/package.json +++ b/packages/beacon-node/package.json @@ -94,15 +94,15 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/as-sha256": "^0.5.0", + "@chainsafe/as-sha256": "file:../../../ssz/packages/as-sha256", "@chainsafe/blst": "^2.0.3", "@chainsafe/discv5": "^10.0.1", "@chainsafe/enr": "^4.0.1", "@chainsafe/libp2p-gossipsub": "^14.1.0", "@chainsafe/libp2p-noise": "^16.0.0", - "@chainsafe/persistent-merkle-tree": "^0.8.0", + "@chainsafe/persistent-merkle-tree": "file:../../../ssz/packages/persistent-merkle-tree", "@chainsafe/prometheus-gc-stats": "^1.0.0", - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@chainsafe/threads": "^1.11.1", "@chainsafe/pubkey-index-map": "2.0.0", "@ethersproject/abi": "^5.7.0", diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts index 49cd4622000..53f4e5411cf 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts @@ -12,6 +12,12 @@ import {BlockProcessOpts} from "../options.js"; import {byteArrayEquals} from "../../util/bytes.js"; import {nextEventLoop} from "../../util/eventLoop.js"; import {BlockInput, ImportBlockOpts} from "./types.js"; +import {HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; + +/** + * Data in a BeaconBlock is bounded so we can use a single HashComputationGroup for all blocks + */ +const blockHCGroup = new HashComputationGroup(); /** * Verifies 1 or more blocks are fully valid running the full state transition; from a linear sequence of blocks. @@ -63,7 +69,8 @@ export async function verifyBlocksStateTransitionOnly( const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ source: StateHashTreeRootSource.blockTransition, }); - const stateRoot = postState.hashTreeRoot(); + // state root is computed inside stateTransition(), so it should take no time here + const stateRoot = postState.batchHashTreeRoot(blockHCGroup); hashTreeRootTimer?.(); // Check state root matches diff --git a/packages/beacon-node/src/chain/prepareNextSlot.ts b/packages/beacon-node/src/chain/prepareNextSlot.ts index bda61875884..96d86457a68 100644 --- a/packages/beacon-node/src/chain/prepareNextSlot.ts +++ b/packages/beacon-node/src/chain/prepareNextSlot.ts @@ -19,6 +19,7 @@ import {prepareExecutionPayload, getPayloadAttributesForSSE} from "./produceBloc import {IBeaconChain} from "./interface.js"; import {RegenCaller} from "./regen/index.js"; import {ForkchoiceCaller} from "./forkChoice/index.js"; +import {HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; /* With 12s slot times, this scheduler will run 4s before the start of each slot (`12 / 3 = 4`). */ export const SCHEDULER_LOOKAHEAD_FACTOR = 3; @@ -26,6 +27,11 @@ export const SCHEDULER_LOOKAHEAD_FACTOR = 3; /* We don't want to do more epoch transition than this */ const PREPARE_EPOCH_LIMIT = 1; +/** + * The same HashComputationGroup to be used for all epoch transition. + */ +const epochHCGroup = new HashComputationGroup(); + /** * At Bellatrix, if we are responsible for proposing in next slot, we want to prepare payload * 4s (1/3 slot) before the start of next slot @@ -232,7 +238,12 @@ export class PrepareNextSlotScheduler { const hashTreeRootTimer = this.metrics?.stateHashTreeRootTime.startTimer({ source: isEpochTransition ? StateHashTreeRootSource.prepareNextEpoch : StateHashTreeRootSource.prepareNextSlot, }); - state.hashTreeRoot(); + if (isEpochTransition) { + state.batchHashTreeRoot(epochHCGroup); + } else { + // normal slot, not worth to batch hash + state.node.rootHashObject; + } hashTreeRootTimer?.(); } } diff --git a/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts b/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts index bfa30e570e0..55362957f9b 100644 --- a/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts +++ b/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts @@ -8,6 +8,12 @@ import { import {BeaconBlock, BlindedBeaconBlock, Gwei, Root} from "@lodestar/types"; import {ZERO_HASH} from "../../constants/index.js"; import {Metrics} from "../../metrics/index.js"; +import {HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; + +/** + * Data in a BeaconBlock is bounded so we can use a single HashComputationGroup for all blocks + */ +const blockHCGroup = new HashComputationGroup(); /** * Instead of running fastStateTransition(), only need to process block since @@ -48,7 +54,8 @@ export function computeNewStateRoot( const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ source: StateHashTreeRootSource.computeNewStateRoot, }); - const newStateRoot = postState.hashTreeRoot(); + // state root is computed inside stateTransition(), so it should take no time here + const newStateRoot = postState.batchHashTreeRoot(blockHCGroup); hashTreeRootTimer?.(); return {newStateRoot, proposerReward}; diff --git a/packages/beacon-node/test/spec/utils/runValidSszTest.ts b/packages/beacon-node/test/spec/utils/runValidSszTest.ts index 748a7770b19..32c3b91d5f6 100644 --- a/packages/beacon-node/test/spec/utils/runValidSszTest.ts +++ b/packages/beacon-node/test/spec/utils/runValidSszTest.ts @@ -81,14 +81,11 @@ export function runValidSszTest(type: Type, testData: ValidTestCaseData // 0x0000000000000000000000000000000000000000000000000000000000000000 if (process.env.RENDER_ROOTS) { if (type.isBasic) { - console.log("ROOTS Basic", toHexString(type.serialize(testDataValue))); + console.log("Chunk Basic", toHexString(type.serialize(testDataValue))); } else { // biome-ignore lint/complexity/useLiteralKeys: The `getRoots` is a protected attribute - const roots = (type as CompositeType)["getRoots"](testDataValue); - console.log( - "ROOTS Composite", - roots.map((root) => toHexString(root)) - ); + const chunkBytes = (type as CompositeType)["getChunkBytes"](testDataValue); + console.log("Chunk Bytes Composite", toHexString(chunkBytes)); } } diff --git a/packages/cli/package.json b/packages/cli/package.json index 67d0dfd332c..4c21331862e 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -56,8 +56,8 @@ "@chainsafe/blst": "^2.0.3", "@chainsafe/discv5": "^10.0.1", "@chainsafe/enr": "^4.0.1", - "@chainsafe/persistent-merkle-tree": "^0.8.0", - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/persistent-merkle-tree": "file:../../../ssz/packages/persistent-merkle-tree", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@chainsafe/threads": "^1.11.1", "@libp2p/crypto": "^5.0.4", "@libp2p/interface": "^2.1.2", diff --git a/packages/cli/src/applyPreset.ts b/packages/cli/src/applyPreset.ts index 612c5d648c6..09bda76fedc 100644 --- a/packages/cli/src/applyPreset.ts +++ b/packages/cli/src/applyPreset.ts @@ -1,6 +1,6 @@ // MUST import this file first before anything and not import any Lodestar code. -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/as-sha256.js"; +import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/hashtree.js"; import {setHasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index.js"; // without setting this first, persistent-merkle-tree will use noble instead diff --git a/packages/config/package.json b/packages/config/package.json index 434000db2a0..ed7425fd27a 100644 --- a/packages/config/package.json +++ b/packages/config/package.json @@ -64,7 +64,7 @@ "blockchain" ], "dependencies": { - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/params": "^1.22.0", "@lodestar/utils": "^1.22.0", "@lodestar/types": "^1.22.0" diff --git a/packages/db/package.json b/packages/db/package.json index 2a10d36766b..d484caff6b6 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -35,7 +35,7 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/config": "^1.22.0", "@lodestar/utils": "^1.22.0", "classic-level": "^1.4.1", diff --git a/packages/fork-choice/package.json b/packages/fork-choice/package.json index 2197ad90a9f..b14d007b4ce 100644 --- a/packages/fork-choice/package.json +++ b/packages/fork-choice/package.json @@ -36,7 +36,7 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/config": "^1.22.0", "@lodestar/params": "^1.22.0", "@lodestar/state-transition": "^1.22.0", diff --git a/packages/light-client/package.json b/packages/light-client/package.json index a503d6bc510..17f4f9d9a54 100644 --- a/packages/light-client/package.json +++ b/packages/light-client/package.json @@ -75,8 +75,8 @@ "dependencies": { "@chainsafe/bls": "7.1.3", "@chainsafe/blst": "^0.2.0", - "@chainsafe/persistent-merkle-tree": "^0.8.0", - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/persistent-merkle-tree": "file:../../../ssz/packages/persistent-merkle-tree", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/api": "^1.22.0", "@lodestar/config": "^1.22.0", "@lodestar/params": "^1.22.0", @@ -85,7 +85,7 @@ "mitt": "^3.0.0" }, "devDependencies": { - "@chainsafe/as-sha256": "^0.5.0", + "@chainsafe/as-sha256": "file:../../../ssz/packages/as-sha256", "@types/qs": "^6.9.7", "fastify": "^5.0.0", "qs": "^6.11.1", diff --git a/packages/prover/src/cli/applyPreset.ts b/packages/prover/src/cli/applyPreset.ts index f0c3d83c775..8facf5171d2 100644 --- a/packages/prover/src/cli/applyPreset.ts +++ b/packages/prover/src/cli/applyPreset.ts @@ -1,6 +1,6 @@ // MUST import this file first before anything and not import any Lodestar code. -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/as-sha256.js"; +import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/hashtree.js"; import {setHasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index.js"; // without setting this first, persistent-merkle-tree will use noble instead diff --git a/packages/state-transition/package.json b/packages/state-transition/package.json index a01d835bae9..8e323aaf6b7 100644 --- a/packages/state-transition/package.json +++ b/packages/state-transition/package.json @@ -58,11 +58,11 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@chainsafe/as-sha256": "^0.5.0", + "@chainsafe/as-sha256": "file:../../../ssz/packages/as-sha256", "@chainsafe/blst": "^2.0.3", - "@chainsafe/persistent-merkle-tree": "^0.8.0", + "@chainsafe/persistent-merkle-tree": "file:../../../ssz/packages/persistent-merkle-tree", "@chainsafe/persistent-ts": "^0.19.1", - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@chainsafe/swap-or-not-shuffle": "^0.0.2", "@lodestar/config": "^1.22.0", "@lodestar/params": "^1.22.0", diff --git a/packages/state-transition/src/block/processEth1Data.ts b/packages/state-transition/src/block/processEth1Data.ts index 92ab147aa77..3846eaaa5f5 100644 --- a/packages/state-transition/src/block/processEth1Data.ts +++ b/packages/state-transition/src/block/processEth1Data.ts @@ -48,12 +48,12 @@ export function becomesNewEth1Data( // Then isEqualEth1DataView compares cached roots (HashObject as of Jan 2022) which is much cheaper // than doing structural equality, which requires tree -> value conversions let sameVotesCount = 0; - const eth1DataVotes = state.eth1DataVotes.getAllReadonly(); - for (let i = 0; i < eth1DataVotes.length; i++) { - if (isEqualEth1DataView(eth1DataVotes[i], newEth1Data)) { + // biome-ignore lint/complexity/noForEach: ssz api + state.eth1DataVotes.forEach((eth1DataVote) => { + if (isEqualEth1DataView(eth1DataVote, newEth1Data)) { sameVotesCount++; } - } + }); // The +1 is to account for the `eth1Data` supplied to the function. if ((sameVotesCount + 1) * 2 > SLOTS_PER_ETH1_VOTING_PERIOD) { diff --git a/packages/state-transition/src/cache/epochTransitionCache.ts b/packages/state-transition/src/cache/epochTransitionCache.ts index 27b781e8a6a..a97274d7171 100644 --- a/packages/state-transition/src/cache/epochTransitionCache.ts +++ b/packages/state-transition/src/cache/epochTransitionCache.ts @@ -1,4 +1,4 @@ -import {phase0, Epoch, RootHex, ValidatorIndex} from "@lodestar/types"; +import {Epoch, RootHex, ValidatorIndex} from "@lodestar/types"; import {intDiv, toRootHex} from "@lodestar/utils"; import { EPOCHS_PER_SLASHINGS_VECTOR, @@ -19,7 +19,12 @@ import { FLAG_CURR_TARGET_ATTESTER, FLAG_CURR_HEAD_ATTESTER, } from "../util/attesterStatus.js"; -import {CachedBeaconStateAllForks, CachedBeaconStateAltair, CachedBeaconStatePhase0} from "../index.js"; +import { + CachedBeaconStateAllForks, + CachedBeaconStateAltair, + CachedBeaconStatePhase0, + hasCompoundingWithdrawalCredential, +} from "../index.js"; import {computeBaseRewardPerIncrement} from "../util/altair.js"; import {processPendingAttestations} from "../epoch/processPendingAttestations.js"; @@ -133,11 +138,7 @@ export interface EpochTransitionCache { flags: number[]; - /** - * Validators in the current epoch, should use it for read-only value instead of accessing state.validators directly. - * Note that during epoch processing, validators could be updated so need to use it with care. - */ - validators: phase0.Validator[]; + isCompoundingValidatorArr: boolean[]; /** * This is for electra only @@ -216,6 +217,11 @@ const inclusionDelays = new Array(); const flags = new Array(); /** WARNING: reused, never gc'd */ const nextEpochShufflingActiveValidatorIndices = new Array(); +/** WARNING: reused, never gc'd */ +const isCompoundingValidatorArr = new Array(); + +const previousEpochParticipation = new Array(); +const currentEpochParticipation = new Array(); export function beforeProcessEpoch( state: CachedBeaconStateAllForks, @@ -233,17 +239,14 @@ export function beforeProcessEpoch( const indicesToSlash: ValidatorIndex[] = []; const indicesEligibleForActivationQueue: ValidatorIndex[] = []; - const indicesEligibleForActivation: ValidatorIndex[] = []; + const indicesEligibleForActivation: {validatorIndex: ValidatorIndex; activationEligibilityEpoch: Epoch}[] = []; const indicesToEject: ValidatorIndex[] = []; let totalActiveStakeByIncrement = 0; - - // To optimize memory each validator node in `state.validators` is represented with a special node type - // `BranchNodeStruct` that represents the data as struct internally. This utility grabs the struct data directly - // from the nodes without any extra transformation. The returned `validators` array contains native JS objects. - const validators = state.validators.getAllReadonlyValues(); - const validatorCount = validators.length; - + const validatorCount = state.validators.length; + if (forkSeq >= ForkSeq.electra) { + isCompoundingValidatorArr.length = validatorCount; + } nextEpochShufflingActiveValidatorIndices.length = validatorCount; let nextEpochShufflingActiveIndicesLength = 0; // pre-fill with true (most validators are active) @@ -273,10 +276,13 @@ export function beforeProcessEpoch( const effectiveBalancesByIncrements = epochCtx.effectiveBalanceIncrements; - for (let i = 0; i < validatorCount; i++) { - const validator = validators[i]; + state.validators.forEachValue((validator, i) => { let flag = 0; + if (forkSeq >= ForkSeq.electra) { + isCompoundingValidatorArr[i] = hasCompoundingWithdrawalCredential(validator.withdrawalCredentials); + } + if (validator.slashed) { if (slashingsEpoch === validator.withdrawableEpoch) { indicesToSlash.push(i); @@ -339,7 +345,10 @@ export function beforeProcessEpoch( // // Use `else` since indicesEligibleForActivationQueue + indicesEligibleForActivation are mutually exclusive else if (validator.activationEpoch === FAR_FUTURE_EPOCH && validator.activationEligibilityEpoch <= currentEpoch) { - indicesEligibleForActivation.push(i); + indicesEligibleForActivation.push({ + validatorIndex: i, + activationEligibilityEpoch: validator.activationEligibilityEpoch, + }); } // To optimize process_registry_updates(): @@ -364,7 +373,7 @@ export function beforeProcessEpoch( if (isActiveNext2) { nextEpochShufflingActiveValidatorIndices[nextEpochShufflingActiveIndicesLength++] = i; } - } + }); // Trigger async build of shuffling for epoch after next (nextShuffling post epoch transition) const epochAfterNext = state.epochCtx.nextEpoch + 1; @@ -396,7 +405,7 @@ export function beforeProcessEpoch( // To optimize process_registry_updates(): // order by sequence of activationEligibilityEpoch setting and then index indicesEligibleForActivation.sort( - (a, b) => validators[a].activationEligibilityEpoch - validators[b].activationEligibilityEpoch || a - b + (a, b) => a.activationEligibilityEpoch - b.activationEligibilityEpoch || a.validatorIndex - b.validatorIndex ); if (forkSeq === ForkSeq.phase0) { @@ -427,8 +436,10 @@ export function beforeProcessEpoch( FLAG_CURR_HEAD_ATTESTER ); } else { - const previousEpochParticipation = (state as CachedBeaconStateAltair).previousEpochParticipation.getAll(); - const currentEpochParticipation = (state as CachedBeaconStateAltair).currentEpochParticipation.getAll(); + previousEpochParticipation.length = (state as CachedBeaconStateAltair).previousEpochParticipation.length; + (state as CachedBeaconStateAltair).previousEpochParticipation.getAll(previousEpochParticipation); + currentEpochParticipation.length = (state as CachedBeaconStateAltair).currentEpochParticipation.length; + (state as CachedBeaconStateAltair).currentEpochParticipation.getAll(currentEpochParticipation); for (let i = 0; i < validatorCount; i++) { flags[i] |= // checking active status first is required to pass random spec tests in altair @@ -505,7 +516,7 @@ export function beforeProcessEpoch( currEpochUnslashedTargetStakeByIncrement: currTargetUnslStake, indicesToSlash, indicesEligibleForActivationQueue, - indicesEligibleForActivation, + indicesEligibleForActivation: indicesEligibleForActivation.map(({validatorIndex}) => validatorIndex), indicesToEject, nextShufflingDecisionRoot, nextShufflingActiveIndices, @@ -517,7 +528,7 @@ export function beforeProcessEpoch( proposerIndices, inclusionDelays, flags, - validators, + isCompoundingValidatorArr, // will be assigned in processPendingConsolidations() newCompoundingValidators: undefined, // Will be assigned in processRewardsAndPenalties() diff --git a/packages/state-transition/src/epoch/getRewardsAndPenalties.ts b/packages/state-transition/src/epoch/getRewardsAndPenalties.ts index bf766fe4666..cf0a29fd8fe 100644 --- a/packages/state-transition/src/epoch/getRewardsAndPenalties.ts +++ b/packages/state-transition/src/epoch/getRewardsAndPenalties.ts @@ -17,7 +17,7 @@ import { FLAG_PREV_TARGET_ATTESTER_UNSLASHED, hasMarkers, } from "../util/attesterStatus.js"; -import {isInInactivityLeak, newZeroedArray} from "../util/index.js"; +import {isInInactivityLeak} from "../util/index.js"; type RewardPenaltyItem = { baseReward: number; @@ -28,6 +28,11 @@ type RewardPenaltyItem = { timelyHeadReward: number; }; +/** + * This data is reused and never gc. + */ +const rewards = new Array(); +const penalties = new Array(); /** * An aggregate of getFlagIndexDeltas and getInactivityPenaltyDeltas that loop through process.flags 1 time instead of 4. * @@ -48,8 +53,10 @@ export function getRewardsAndPenaltiesAltair( // TODO: Is there a cheaper way to measure length that going to `state.validators`? const validatorCount = state.validators.length; const activeIncrements = cache.totalActiveStakeByIncrement; - const rewards = newZeroedArray(validatorCount); - const penalties = newZeroedArray(validatorCount); + rewards.length = validatorCount; + rewards.fill(0); + penalties.length = validatorCount; + penalties.fill(0); const isInInactivityLeakBn = isInInactivityLeak(state); // effectiveBalance is multiple of EFFECTIVE_BALANCE_INCREMENT and less than MAX_EFFECTIVE_BALANCE diff --git a/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts b/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts index 0ea4b49dddf..9203c0419a9 100644 --- a/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts +++ b/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts @@ -10,7 +10,6 @@ import { TIMELY_TARGET_FLAG_INDEX, } from "@lodestar/params"; import {EpochTransitionCache, CachedBeaconStateAllForks, BeaconStateAltair} from "../types.js"; -import {hasCompoundingWithdrawalCredential} from "../util/electra.js"; /** Same to https://github.com/ethereum/eth2.0-specs/blob/v1.1.0-alpha.5/specs/altair/beacon-chain.md#has_flag */ const TIMELY_TARGET = 1 << TIMELY_TARGET_FLAG_INDEX; @@ -45,7 +44,7 @@ export function processEffectiveBalanceUpdates( // and updated in processPendingBalanceDeposits() and processPendingConsolidations() // so it's recycled here for performance. const balances = cache.balances ?? state.balances.getAll(); - const currentEpochValidators = cache.validators; + const {isCompoundingValidatorArr} = cache; const newCompoundingValidators = cache.newCompoundingValidators ?? new Set(); let numUpdate = 0; @@ -61,9 +60,7 @@ export function processEffectiveBalanceUpdates( effectiveBalanceLimit = MAX_EFFECTIVE_BALANCE; } else { // from electra, effectiveBalanceLimit is per validator - const isCompoundingValidator = - hasCompoundingWithdrawalCredential(currentEpochValidators[i].withdrawalCredentials) || - newCompoundingValidators.has(i); + const isCompoundingValidator = isCompoundingValidatorArr[i] || newCompoundingValidators.has(i); effectiveBalanceLimit = isCompoundingValidator ? MAX_EFFECTIVE_BALANCE_ELECTRA : MIN_ACTIVATION_BALANCE; } diff --git a/packages/state-transition/src/epoch/processInactivityUpdates.ts b/packages/state-transition/src/epoch/processInactivityUpdates.ts index 4a9b129ee79..4d1e28d92bf 100644 --- a/packages/state-transition/src/epoch/processInactivityUpdates.ts +++ b/packages/state-transition/src/epoch/processInactivityUpdates.ts @@ -3,6 +3,11 @@ import {CachedBeaconStateAltair, EpochTransitionCache} from "../types.js"; import * as attesterStatusUtil from "../util/attesterStatus.js"; import {isInInactivityLeak} from "../util/index.js"; +/** + * This data is reused and never gc. + */ +const inactivityScoresArr = new Array(); + /** * Mutates `inactivityScores` from pre-calculated validator flags. * @@ -30,7 +35,8 @@ export function processInactivityUpdates(state: CachedBeaconStateAltair, cache: // this avoids importing FLAG_ELIGIBLE_ATTESTER inside the for loop, check the compiled code const {FLAG_PREV_TARGET_ATTESTER_UNSLASHED, FLAG_ELIGIBLE_ATTESTER, hasMarkers} = attesterStatusUtil; - const inactivityScoresArr = inactivityScores.getAll(); + inactivityScoresArr.length = state.validators.length; + inactivityScores.getAll(inactivityScoresArr); for (let i = 0; i < flags.length; i++) { const flag = flags[i]; diff --git a/packages/state-transition/src/epoch/processRewardsAndPenalties.ts b/packages/state-transition/src/epoch/processRewardsAndPenalties.ts index 6c5d5aa3cb5..ef074dfd682 100644 --- a/packages/state-transition/src/epoch/processRewardsAndPenalties.ts +++ b/packages/state-transition/src/epoch/processRewardsAndPenalties.ts @@ -9,6 +9,10 @@ import { import {getAttestationDeltas} from "./getAttestationDeltas.js"; import {getRewardsAndPenaltiesAltair} from "./getRewardsAndPenalties.js"; +/** + * This data is reused and never gc. + */ +const balances = new Array(); /** * Iterate over all validator and compute rewards and penalties to apply to balances. * @@ -25,7 +29,8 @@ export function processRewardsAndPenalties( } const [rewards, penalties] = getRewardsAndPenalties(state, cache); - const balances = state.balances.getAll(); + balances.length = state.balances.length; + state.balances.getAll(balances); for (let i = 0, len = rewards.length; i < len; i++) { const result = balances[i] + rewards[i] - penalties[i] - (slashingPenalties[i] ?? 0); diff --git a/packages/state-transition/src/stateTransition.ts b/packages/state-transition/src/stateTransition.ts index f025c685b1a..7b14727e997 100644 --- a/packages/state-transition/src/stateTransition.ts +++ b/packages/state-transition/src/stateTransition.ts @@ -1,3 +1,4 @@ +import {HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; import {SignedBeaconBlock, SignedBlindedBeaconBlock, Slot, ssz} from "@lodestar/types"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; import {toRootHex} from "@lodestar/utils"; @@ -58,6 +59,11 @@ export enum StateHashTreeRootSource { computeNewStateRoot = "compute_new_state_root", } +/** + * Data in a BeaconBlock is bounded so we can use a single HashComputationGroup for all blocks + */ +const hcGroup = new HashComputationGroup(); + /** * Implementation Note: follows the optimizations in protolambda's eth2fastspec (https://github.com/protolambda/eth2fastspec) */ @@ -105,13 +111,13 @@ export function stateTransition( processBlock(fork, postState, block, options, options); + // Note: time only on success. This does not include hashTreeRoot() time + processBlockTimer?.(); + const processBlockCommitTimer = metrics?.processBlockCommitTime.startTimer(); postState.commit(); processBlockCommitTimer?.(); - // Note: time only on success. Include processBlock and commit - processBlockTimer?.(); - if (metrics) { onPostStateMetrics(postState, metrics); } @@ -121,9 +127,11 @@ export function stateTransition( const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ source: StateHashTreeRootSource.stateTransition, }); - const stateRoot = postState.hashTreeRoot(); + // commit() is done inside batchHashTreeRoot() + // with batchHashTreeRoot(), we're not able to measure commit() time separately + // note that at commit() phase, we batch hash validators via ListValidatorTreeViewDU so this metric is a little bit confusing + const stateRoot = postState.batchHashTreeRoot(hcGroup); hashTreeRootTimer?.(); - if (!ssz.Root.equals(block.stateRoot, stateRoot)) { throw new Error( `Invalid state root at slot ${block.slot}, expected=${toRootHex(block.stateRoot)}, actual=${toRootHex( diff --git a/packages/state-transition/src/util/balance.ts b/packages/state-transition/src/util/balance.ts index c6b196846ec..ce5c2a46b4d 100644 --- a/packages/state-transition/src/util/balance.ts +++ b/packages/state-transition/src/util/balance.ts @@ -63,13 +63,13 @@ export function getEffectiveBalanceIncrementsZeroInactive( validatorCount ); - const validators = justifiedState.validators.getAllReadonly(); let j = 0; - for (let i = 0; i < validatorCount; i++) { + justifiedState.validators.forEachValue((validator, i) => { + const {slashed} = validator; if (i === activeIndices[j]) { // active validator j++; - if (validators[i].slashed) { + if (slashed) { // slashed validator effectiveBalanceIncrementsZeroInactive[i] = 0; } @@ -77,7 +77,7 @@ export function getEffectiveBalanceIncrementsZeroInactive( // inactive validator effectiveBalanceIncrementsZeroInactive[i] = 0; } - } + }); return effectiveBalanceIncrementsZeroInactive; } diff --git a/packages/state-transition/test/unit/cachedBeaconState.test.ts b/packages/state-transition/test/unit/cachedBeaconState.test.ts index 668f22e13a1..b8051220633 100644 --- a/packages/state-transition/test/unit/cachedBeaconState.test.ts +++ b/packages/state-transition/test/unit/cachedBeaconState.test.ts @@ -143,6 +143,8 @@ describe("CachedBeaconState", () => { } } + state.commit(); + if (validatorCountDelta < 0) { state.validators = state.validators.sliceTo(state.validators.length - 1 + validatorCountDelta); diff --git a/packages/types/package.json b/packages/types/package.json index 1c020b40907..0ce13360bc9 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -73,7 +73,7 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/params": "^1.22.0", "ethereum-cryptography": "^2.0.0" }, diff --git a/packages/types/src/phase0/listValidator.ts b/packages/types/src/phase0/listValidator.ts new file mode 100644 index 00000000000..3a19931d1f5 --- /dev/null +++ b/packages/types/src/phase0/listValidator.ts @@ -0,0 +1,15 @@ +import {ListCompositeType, ListCompositeTreeViewDU} from "@chainsafe/ssz"; +import {Node} from "@chainsafe/persistent-merkle-tree"; +import {ValidatorNodeStructType} from "./validator.js"; +import {ListValidatorTreeViewDU} from "./viewDU/listValidator.js"; + +export class ListValidatorType extends ListCompositeType { + constructor(limit: number) { + super(new ValidatorNodeStructType(), limit); + } + + getViewDU(node: Node, cache?: unknown): ListCompositeTreeViewDU { + // biome-ignore lint/suspicious/noExplicitAny: ssz api + return new ListValidatorTreeViewDU(this, node, cache as any); + } +} diff --git a/packages/types/src/phase0/sszTypes.ts b/packages/types/src/phase0/sszTypes.ts index 4a04701b789..84a812d83a8 100644 --- a/packages/types/src/phase0/sszTypes.ts +++ b/packages/types/src/phase0/sszTypes.ts @@ -29,6 +29,7 @@ import { } from "@lodestar/params"; import * as primitiveSsz from "../primitive/sszTypes.js"; import {ValidatorNodeStruct} from "./validator.js"; +import {ListValidatorType} from "./listValidator.js"; const { Bytes32, @@ -228,7 +229,7 @@ export const HistoricalBatchRoots = new ContainerType( export const Validator = ValidatorNodeStruct; // Export as stand-alone for direct tree optimizations -export const Validators = new ListCompositeType(ValidatorNodeStruct, VALIDATOR_REGISTRY_LIMIT); +export const Validators = new ListValidatorType(VALIDATOR_REGISTRY_LIMIT); // this ListUintNum64Type is used to cache Leaf Nodes of BeaconState.balances after epoch transition export const Balances = new ListUintNum64Type(VALIDATOR_REGISTRY_LIMIT); export const RandaoMixes = new VectorCompositeType(Bytes32, EPOCHS_PER_HISTORICAL_VECTOR); diff --git a/packages/types/src/phase0/validator.ts b/packages/types/src/phase0/validator.ts index a6ec0fb1810..9a64d9569f9 100644 --- a/packages/types/src/phase0/validator.ts +++ b/packages/types/src/phase0/validator.ts @@ -14,6 +14,7 @@ const UINT32_SIZE = 4; const PUBKEY_SIZE = 48; const WITHDRAWAL_CREDENTIALS_SIZE = 32; const SLASHED_SIZE = 1; +const CHUNK_SIZE = 32; export const ValidatorType = { pubkey: BLSPubkey, @@ -60,6 +61,58 @@ export class ValidatorNodeStructType extends ContainerNodeStructType +): void { + const { + pubkey, + withdrawalCredentials, + effectiveBalance, + slashed, + activationEligibilityEpoch, + activationEpoch, + exitEpoch, + withdrawableEpoch, + } = value; + const {uint8Array: outputLevel3, dataView} = level3; + + // pubkey = 48 bytes which is 2 * CHUNK_SIZE + level4.set(pubkey, 0); + let offset = CHUNK_SIZE; + outputLevel3.set(withdrawalCredentials, offset); + offset += CHUNK_SIZE; + // effectiveBalance is UintNum64 + dataView.setUint32(offset, effectiveBalance & 0xffffffff, true); + dataView.setUint32(offset + 4, (effectiveBalance / NUMBER_2_POW_32) & 0xffffffff, true); + + offset += CHUNK_SIZE; + dataView.setUint32(offset, slashed ? 1 : 0, true); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, activationEligibilityEpoch); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, activationEpoch); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, exitEpoch); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, withdrawableEpoch); +} + function writeEpochInf(dataView: DataView, offset: number, value: number): number { if (value === Infinity) { dataView.setUint32(offset, 0xffffffff, true); @@ -74,4 +127,3 @@ function writeEpochInf(dataView: DataView, offset: number, value: number): numbe } return offset; } -export const ValidatorNodeStruct = new ValidatorNodeStructType(); diff --git a/packages/types/src/phase0/viewDU/listValidator.ts b/packages/types/src/phase0/viewDU/listValidator.ts new file mode 100644 index 00000000000..adec88a94b1 --- /dev/null +++ b/packages/types/src/phase0/viewDU/listValidator.ts @@ -0,0 +1,180 @@ +import { + ListCompositeType, + ArrayCompositeTreeViewDUCache, + ListCompositeTreeViewDU, + ByteViews, + ContainerNodeStructTreeViewDU, +} from "@chainsafe/ssz"; +import {HashComputationLevel, Node, digestNLevel, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; +import {byteArrayIntoHashObject} from "@chainsafe/as-sha256"; +import {ValidatorNodeStructType, ValidatorType, validatorToChunkBytes} from "../validator.js"; +import {ValidatorIndex} from "../../types.js"; + +/** + * hashtree has a MAX_SIZE of 1024 bytes = 32 chunks + * Given a level3 of validators have 8 chunks, we can hash 4 validators at a time + */ +const PARALLEL_FACTOR = 4; +/** + * Allocate memory once for batch hash validators. + */ +// each level 3 of validator has 8 chunks, each chunk has 32 bytes +const batchLevel3Bytes = new Uint8Array(PARALLEL_FACTOR * 8 * 32); +const level3ByteViewsArr: ByteViews[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + const uint8Array = batchLevel3Bytes.subarray(i * 8 * 32, (i + 1) * 8 * 32); + const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); + level3ByteViewsArr.push({uint8Array, dataView}); +} +// each level 4 of validator has 2 chunks for pubkey, each chunk has 32 bytes +const batchLevel4Bytes = new Uint8Array(PARALLEL_FACTOR * 2 * 32); +const level4BytesArr: Uint8Array[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + level4BytesArr.push(batchLevel4Bytes.subarray(i * 2 * 32, (i + 1) * 2 * 32)); +} +const pubkeyRoots: Uint8Array[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + pubkeyRoots.push(batchLevel4Bytes.subarray(i * 32, (i + 1) * 32)); +} + +const validatorRoots: Uint8Array[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + validatorRoots.push(batchLevel3Bytes.subarray(i * 32, (i + 1) * 32)); +} +const validatorRoot = new Uint8Array(32); + +/** + * Similar to ListCompositeTreeViewDU with some differences: + * - if called without params, it's from hashTreeRoot() api call, no need to compute root + * - otherwise it's from batchHashTreeRoot() call, compute validator roots in batch + */ +export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU { + constructor( + readonly type: ListCompositeType, + protected _rootNode: Node, + cache?: ArrayCompositeTreeViewDUCache + ) { + super(type, _rootNode, cache); + } + + commit(hcOffset = 0, hcByLevel: HashComputationLevel[] | null = null): void { + if (hcByLevel === null) { + // this is not from batchHashTreeRoot() call, go with regular flow + super.commit(); + return; + } + + const isOldRootHashed = this._rootNode.h0 !== null; + if (this.viewsChanged.size === 0) { + if (!isOldRootHashed && hcByLevel !== null) { + // not possible to get HashComputations due to BranchNodeStruct + this._rootNode.root; + } + return; + } + + // TODO - batch: remove this type cast + const viewsChanged = this.viewsChanged as unknown as Map< + number, + ContainerNodeStructTreeViewDU + >; + + const indicesChanged: number[] = []; + for (const [index, viewChanged] of viewsChanged) { + // should not have any params here in order not to compute root + viewChanged.commit(); + // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal + this.nodes[index] = viewChanged.node; + // `validators.get(i)` was called but it may not modify any property, do not need to compute root + if (viewChanged.node.h0 === null) { + indicesChanged.push(index); + } + } + + // these validators don't have roots, we compute roots in batch + const sortedIndicesChanged = indicesChanged.sort((a, b) => a - b); + const nodesChanged: {index: ValidatorIndex; node: Node}[] = new Array(sortedIndicesChanged.length); + for (const [i, validatorIndex] of sortedIndicesChanged.entries()) { + nodesChanged[i] = {index: validatorIndex, node: this.nodes[validatorIndex]}; + } + doBatchHashTreeRootValidators(sortedIndicesChanged, viewsChanged); + + // do the remaining commit step the same to parent (ArrayCompositeTreeViewDU) + const indexes = nodesChanged.map((entry) => entry.index); + const nodes = nodesChanged.map((entry) => entry.node); + const chunksNode = this.type.tree_getChunksNode(this._rootNode); + const offsetThis = hcOffset + this.type.tree_chunksNodeOffset(); + const byLevelThis = hcByLevel != null && isOldRootHashed ? hcByLevel : null; + const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, offsetThis, byLevelThis); + + this._rootNode = this.type.tree_setChunksNode( + this._rootNode, + newChunksNode, + this.dirtyLength ? this._length : null, + hcOffset, + hcByLevel + ); + + if (!isOldRootHashed && hcByLevel !== null) { + // should never happen, handle just in case + // not possible to get HashComputations due to BranchNodeStruct + this._rootNode.root; + } + + this.viewsChanged.clear(); + this.dirtyLength = false; + } +} + +function doBatchHashTreeRootValidators( + indices: ValidatorIndex[], + validators: Map> +): void { + const endBatch = indices.length - (indices.length % PARALLEL_FACTOR); + + // commit every 16 validators in batch + for (let i = 0; i < endBatch; i++) { + if (i % PARALLEL_FACTOR === 0) { + batchLevel3Bytes.fill(0); + batchLevel4Bytes.fill(0); + } + const indexInBatch = i % PARALLEL_FACTOR; + const viewIndex = indices[i]; + const validator = validators.get(viewIndex); + if (validator) { + validatorToChunkBytes(level3ByteViewsArr[indexInBatch], level4BytesArr[indexInBatch], validator.value); + } + + if (indexInBatch === PARALLEL_FACTOR - 1) { + // hash level 4, this is populated to pubkeyRoots + digestNLevel(batchLevel4Bytes, 1); + for (let j = 0; j < PARALLEL_FACTOR; j++) { + level3ByteViewsArr[j].uint8Array.set(pubkeyRoots[j], 0); + } + // hash level 3, this is populated to validatorRoots + digestNLevel(batchLevel3Bytes, 3); + // commit all validators in this batch + for (let j = PARALLEL_FACTOR - 1; j >= 0; j--) { + const viewIndex = indices[i - j]; + const indexInBatch = (i - j) % PARALLEL_FACTOR; + const viewChanged = validators.get(viewIndex); + if (viewChanged) { + const branchNodeStruct = viewChanged.node; + byteArrayIntoHashObject(validatorRoots[indexInBatch], 0, branchNodeStruct); + } + } + } + } + + // commit the remaining validators, we can do in batch too but don't want to create new Uint8Array views + // it's not much different to commit one by one + for (let i = endBatch; i < indices.length; i++) { + const viewIndex = indices[i]; + const viewChanged = validators.get(viewIndex); + if (viewChanged) { + // compute root for each validator + viewChanged.type.hashTreeRootInto(viewChanged.value, validatorRoot, 0); + byteArrayIntoHashObject(validatorRoot, 0, viewChanged.node); + } + } +} diff --git a/packages/utils/package.json b/packages/utils/package.json index 6c8218741e3..ad2ef98c8fc 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -39,7 +39,7 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@chainsafe/as-sha256": "^0.5.0", + "@chainsafe/as-sha256": "file:../../../ssz/packages/as-sha256", "any-signal": "3.0.1", "bigint-buffer": "^1.1.5", "case": "^1.6.3", diff --git a/packages/validator/package.json b/packages/validator/package.json index 932eedac1db..35c05809029 100644 --- a/packages/validator/package.json +++ b/packages/validator/package.json @@ -46,7 +46,7 @@ ], "dependencies": { "@chainsafe/blst": "^2.0.3", - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/api": "^1.22.0", "@lodestar/config": "^1.22.0", "@lodestar/db": "^1.22.0", diff --git a/yarn.lock b/yarn.lock index 32c923a514e..a7e1f4bad27 100644 --- a/yarn.lock +++ b/yarn.lock @@ -357,10 +357,8 @@ resolved "https://registry.yarnpkg.com/@chainsafe/as-chacha20poly1305/-/as-chacha20poly1305-0.1.0.tgz#7da6f8796f9b42dac6e830a086d964f1f9189e09" integrity sha512-BpNcL8/lji/GM3+vZ/bgRWqJ1q5kwvTFmGPk7pxm/QQZDbaMI98waOHjEymTjq2JmdD/INdNBFOVSyJofXg7ew== -"@chainsafe/as-sha256@0.5.0", "@chainsafe/as-sha256@^0.5.0": +"@chainsafe/as-sha256@0.5.0", "@chainsafe/as-sha256@file:../ssz/packages/as-sha256": version "0.5.0" - resolved "https://registry.yarnpkg.com/@chainsafe/as-sha256/-/as-sha256-0.5.0.tgz#2523fbef2b80b5000f9aa71f4a76e5c2c5c076bb" - integrity sha512-dTIY6oUZNdC5yDTVP5Qc9hAlKAsn0QTQ2DnQvvsbTnKSTbYs3p5RPN0aIUqN0liXei/9h24c7V0dkV44cnWIQA== "@chainsafe/as-sha256@^0.4.1": version "0.4.1" @@ -578,7 +576,7 @@ dependencies: "@chainsafe/is-ip" "^2.0.1" -"@chainsafe/persistent-merkle-tree@0.8.0", "@chainsafe/persistent-merkle-tree@^0.8.0": +"@chainsafe/persistent-merkle-tree@0.8.0": version "0.8.0" resolved "https://registry.yarnpkg.com/@chainsafe/persistent-merkle-tree/-/persistent-merkle-tree-0.8.0.tgz#18e2f0a5de3a0b59c6e5be8797a78e0d209dd7dc" integrity sha512-hh6C1JO6SKlr0QGNTNtTLqgGVMA/Bc20wD6CeMHp+wqbFKCULRJuBUxhF4WDx/7mX8QlqF3nFriF/Eo8oYJ4/A== @@ -595,6 +593,13 @@ "@chainsafe/as-sha256" "^0.4.1" "@noble/hashes" "^1.3.0" +"@chainsafe/persistent-merkle-tree@file:../ssz/packages/persistent-merkle-tree": + version "0.8.0" + dependencies: + "@chainsafe/as-sha256" "0.5.0" + "@chainsafe/hashtree" "1.0.1" + "@noble/hashes" "^1.3.0" + "@chainsafe/persistent-ts@^0.19.1": version "0.19.1" resolved "https://registry.npmjs.org/@chainsafe/persistent-ts/-/persistent-ts-0.19.1.tgz" @@ -649,10 +654,8 @@ "@chainsafe/as-sha256" "^0.4.1" "@chainsafe/persistent-merkle-tree" "^0.6.1" -"@chainsafe/ssz@^0.17.1": +"@chainsafe/ssz@file:../ssz/packages/ssz": version "0.17.1" - resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.17.1.tgz#7986afbcad5e6971006d596fdb7dfa34bc195131" - integrity sha512-1ay46QqYcVTBvUnDXTPTi5WTiENu7tIxpZGMDpUWps1/nYBmh/We/UoCF/jO+o/fkcDD3p8xQPlHbcCfy+jyjA== dependencies: "@chainsafe/as-sha256" "0.5.0" "@chainsafe/persistent-merkle-tree" "0.8.0"