From 6f2405236f086df9d507fff77186353b40a71f8a Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Tue, 15 Oct 2024 16:52:07 +0700 Subject: [PATCH 01/15] feat: implement batch hash tree root for lodestar --- packages/api/package.json | 4 +- packages/beacon-node/package.json | 6 +- .../beacon-node/src/chain/archiver/index.ts | 2 +- .../src/chain/balancesTreeCache.ts | 38 ++++ .../src/chain/blocks/importBlock.ts | 11 +- packages/beacon-node/src/chain/chain.ts | 14 ++ packages/beacon-node/src/chain/interface.ts | 2 + .../beacon-node/src/chain/prepareNextSlot.ts | 13 +- .../chain/produceBlock/computeNewStateRoot.ts | 1 + .../beacon-node/src/chain/regen/queued.ts | 23 ++- .../stateCache/inMemoryCheckpointsCache.ts | 25 ++- .../stateCache/persistentCheckpointsCache.ts | 36 +++- .../beacon-node/src/chain/stateCache/types.ts | 7 +- .../src/metrics/metrics/lodestar.ts | 15 ++ packages/cli/package.json | 4 +- packages/cli/src/applyPreset.ts | 2 +- packages/config/package.json | 2 +- packages/db/package.json | 2 +- packages/fork-choice/package.json | 2 +- packages/light-client/package.json | 6 +- packages/prover/src/cli/applyPreset.ts | 2 +- packages/state-transition/package.json | 6 +- .../src/block/processEth1Data.ts | 7 +- .../src/cache/balancesTreeCache.ts | 5 + .../state-transition/src/cache/epochCache.ts | 9 +- .../src/cache/epochTransitionCache.ts | 59 +++--- .../src/epoch/getRewardsAndPenalties.ts | 13 +- .../epoch/processEffectiveBalanceUpdates.ts | 7 +- .../src/epoch/processInactivityUpdates.ts | 8 +- .../src/epoch/processRewardsAndPenalties.ts | 9 +- packages/state-transition/src/index.ts | 1 + .../state-transition/src/stateTransition.ts | 28 +-- packages/state-transition/src/util/balance.ts | 10 +- .../test/unit/cachedBeaconState.test.ts | 2 + packages/types/package.json | 2 +- packages/types/src/phase0/listValidator.ts | 14 ++ packages/types/src/phase0/sszTypes.ts | 3 +- packages/types/src/phase0/validator.ts | 55 +++++- .../types/src/phase0/viewDU/listValidator.ts | 176 ++++++++++++++++++ packages/utils/package.json | 2 +- packages/validator/package.json | 2 +- yarn.lock | 17 +- 42 files changed, 539 insertions(+), 113 deletions(-) create mode 100644 packages/beacon-node/src/chain/balancesTreeCache.ts create mode 100644 packages/state-transition/src/cache/balancesTreeCache.ts create mode 100644 packages/types/src/phase0/listValidator.ts create mode 100644 packages/types/src/phase0/viewDU/listValidator.ts diff --git a/packages/api/package.json b/packages/api/package.json index 58566a0b23f6..3d12970bfe46 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -70,8 +70,8 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/persistent-merkle-tree": "^0.8.0", - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/persistent-merkle-tree": "file:../../../ssz/packages/persistent-merkle-tree", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/config": "^1.22.0", "@lodestar/params": "^1.22.0", "@lodestar/types": "^1.22.0", diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json index e3d095b6f1e0..d344c9f309d9 100644 --- a/packages/beacon-node/package.json +++ b/packages/beacon-node/package.json @@ -94,15 +94,15 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/as-sha256": "^0.5.0", + "@chainsafe/as-sha256": "file:../../../ssz/packages/as-sha256", "@chainsafe/blst": "^2.0.3", "@chainsafe/discv5": "^10.0.1", "@chainsafe/enr": "^4.0.1", "@chainsafe/libp2p-gossipsub": "^14.1.0", "@chainsafe/libp2p-noise": "^16.0.0", - "@chainsafe/persistent-merkle-tree": "^0.8.0", + "@chainsafe/persistent-merkle-tree": "file:../../../ssz/packages/persistent-merkle-tree", "@chainsafe/prometheus-gc-stats": "^1.0.0", - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@chainsafe/threads": "^1.11.1", "@chainsafe/pubkey-index-map": "2.0.0", "@ethersproject/abi": "^5.7.0", diff --git a/packages/beacon-node/src/chain/archiver/index.ts b/packages/beacon-node/src/chain/archiver/index.ts index 45169b2fa802..36042c4b805e 100644 --- a/packages/beacon-node/src/chain/archiver/index.ts +++ b/packages/beacon-node/src/chain/archiver/index.ts @@ -109,7 +109,7 @@ export class Archiver { // should be after ArchiveBlocksTask to handle restart cleanly await this.statesArchiver.maybeArchiveState(finalized, this.metrics); - this.chain.regen.pruneOnFinalized(finalizedEpoch); + this.chain.pruneOnFinalized(finalizedEpoch); // tasks rely on extended fork choice const prunedBlocks = this.chain.forkChoice.prune(finalized.rootHex); diff --git a/packages/beacon-node/src/chain/balancesTreeCache.ts b/packages/beacon-node/src/chain/balancesTreeCache.ts new file mode 100644 index 000000000000..462ae860809e --- /dev/null +++ b/packages/beacon-node/src/chain/balancesTreeCache.ts @@ -0,0 +1,38 @@ +import {ListBasicTreeViewDU, UintNumberType} from "@chainsafe/ssz"; +import {IBalancesTreeCache, CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {Metrics} from "../metrics/index.js"; + +const MAX_ITEMS = 2; + +export class BalancesTreeCache implements IBalancesTreeCache { + private readonly unusedBalancesTrees: ListBasicTreeViewDU[] = []; + + constructor(private readonly metrics: Metrics | null = null) { + if (metrics) { + metrics.balancesTreeCache.size.addCollect(() => { + metrics.balancesTreeCache.size.set(this.unusedBalancesTrees.length); + }); + } + } + + processUnusedState(state: CachedBeaconStateAllForks | undefined): void { + if (state === undefined) { + return; + } + + this.unusedBalancesTrees.push(state.balances); + while (this.unusedBalancesTrees.length > MAX_ITEMS) { + this.unusedBalancesTrees.shift(); + } + } + + getUnusedBalances(): ListBasicTreeViewDU | undefined { + if (this.unusedBalancesTrees.length === 0) { + this.metrics?.balancesTreeCache.miss.inc(); + return undefined; + } + + this.metrics?.balancesTreeCache.hit.inc(); + return this.unusedBalancesTrees.shift(); + } +} diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts index 596f01f391a4..4fea6b38042e 100644 --- a/packages/beacon-node/src/chain/blocks/importBlock.ts +++ b/packages/beacon-node/src/chain/blocks/importBlock.ts @@ -96,7 +96,16 @@ export async function importBlock( // This adds the state necessary to process the next block // Some block event handlers require state being in state cache so need to do this before emitting EventType.block - this.regen.processState(blockRootHex, postState); + this.regen.processState(blockRootHex, postState).then((prunedStates) => { + if (prunedStates) { + for (const states of prunedStates.values()) { + // cp states on the same epoch shares the same balances seed tree so only need one of them + this.balancesTreeCache.processUnusedState(states[0]); + } + } + }).catch((e) => { + this.logger.error("Regen error to process state for block", {slot: blockSlot, root: blockRootHex}, e as Error); + }); this.metrics?.importBlock.bySource.inc({source}); this.logger.verbose("Added block to forkchoice and state cache", {slot: blockSlot, root: blockRootHex}); diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 195b8736b2c3..573cc94a7309 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -101,6 +101,7 @@ import {DbCPStateDatastore} from "./stateCache/datastore/db.js"; import {FileCPStateDatastore} from "./stateCache/datastore/file.js"; import {SyncCommitteeRewards, computeSyncCommitteeRewards} from "./rewards/syncCommitteeRewards.js"; import {AttestationsRewards, computeAttestationsRewards} from "./rewards/attestationsRewards.js"; +import {BalancesTreeCache} from "./balancesTreeCache.js"; /** * Arbitrary constants, blobs and payloads should be consumed immediately in the same slot @@ -158,6 +159,7 @@ export class BeaconChain implements IBeaconChain { readonly beaconProposerCache: BeaconProposerCache; readonly checkpointBalancesCache: CheckpointBalancesCache; readonly shufflingCache: ShufflingCache; + readonly balancesTreeCache: BalancesTreeCache; /** Map keyed by executionPayload.blockHash of the block for those blobs */ readonly producedContentsCache = new Map(); @@ -246,6 +248,7 @@ export class BeaconChain implements IBeaconChain { this.beaconProposerCache = new BeaconProposerCache(opts); this.checkpointBalancesCache = new CheckpointBalancesCache(); + this.balancesTreeCache = new BalancesTreeCache(metrics); // Restore state caches // anchorState may already by a CachedBeaconState. If so, don't create the cache again, since deserializing all @@ -259,6 +262,7 @@ export class BeaconChain implements IBeaconChain { config, pubkey2index: new PubkeyIndexMap(), index2pubkey: [], + balancesTreeCache: this.balancesTreeCache, }); this.shufflingCache = cachedState.epochCtx.shufflingCache = new ShufflingCache(metrics, logger, this.opts, [ @@ -874,6 +878,16 @@ export class BeaconChain implements IBeaconChain { } } + pruneOnFinalized(finalizedEpoch: Epoch): void { + const prunedStates = this.regen.pruneOnFinalized(finalizedEpoch); + if (prunedStates) { + // cp states on the same epoch shares the same balances seed tree so only need one of them + for (const states of prunedStates.values()) { + this.balancesTreeCache.processUnusedState(states[0]); + } + } + } + /** * Regenerate state for attestation verification, this does not happen with default chain option of maxSkipSlots = 32 . * However, need to handle just in case. Lodestar doesn't support multiple regen state requests for attestation verification diff --git a/packages/beacon-node/src/chain/interface.ts b/packages/beacon-node/src/chain/interface.ts index 3b44ffd594ae..ab0f9a5da8fb 100644 --- a/packages/beacon-node/src/chain/interface.ts +++ b/packages/beacon-node/src/chain/interface.ts @@ -242,6 +242,8 @@ export interface IBeaconChain { blockRef: BeaconBlock | BlindedBeaconBlock, validatorIds?: (ValidatorIndex | string)[] ): Promise; + + pruneOnFinalized(finalizedEpoch: Epoch): void; } export type SSZObjectType = diff --git a/packages/beacon-node/src/chain/prepareNextSlot.ts b/packages/beacon-node/src/chain/prepareNextSlot.ts index bda618758842..96d86457a684 100644 --- a/packages/beacon-node/src/chain/prepareNextSlot.ts +++ b/packages/beacon-node/src/chain/prepareNextSlot.ts @@ -19,6 +19,7 @@ import {prepareExecutionPayload, getPayloadAttributesForSSE} from "./produceBloc import {IBeaconChain} from "./interface.js"; import {RegenCaller} from "./regen/index.js"; import {ForkchoiceCaller} from "./forkChoice/index.js"; +import {HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; /* With 12s slot times, this scheduler will run 4s before the start of each slot (`12 / 3 = 4`). */ export const SCHEDULER_LOOKAHEAD_FACTOR = 3; @@ -26,6 +27,11 @@ export const SCHEDULER_LOOKAHEAD_FACTOR = 3; /* We don't want to do more epoch transition than this */ const PREPARE_EPOCH_LIMIT = 1; +/** + * The same HashComputationGroup to be used for all epoch transition. + */ +const epochHCGroup = new HashComputationGroup(); + /** * At Bellatrix, if we are responsible for proposing in next slot, we want to prepare payload * 4s (1/3 slot) before the start of next slot @@ -232,7 +238,12 @@ export class PrepareNextSlotScheduler { const hashTreeRootTimer = this.metrics?.stateHashTreeRootTime.startTimer({ source: isEpochTransition ? StateHashTreeRootSource.prepareNextEpoch : StateHashTreeRootSource.prepareNextSlot, }); - state.hashTreeRoot(); + if (isEpochTransition) { + state.batchHashTreeRoot(epochHCGroup); + } else { + // normal slot, not worth to batch hash + state.node.rootHashObject; + } hashTreeRootTimer?.(); } } diff --git a/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts b/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts index bfa30e570e06..483a521401ed 100644 --- a/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts +++ b/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts @@ -48,6 +48,7 @@ export function computeNewStateRoot( const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ source: StateHashTreeRootSource.computeNewStateRoot, }); + // state root is computed inside stateTransition(), so it should take no time here const newStateRoot = postState.hashTreeRoot(); hashTreeRootTimer?.(); diff --git a/packages/beacon-node/src/chain/regen/queued.ts b/packages/beacon-node/src/chain/regen/queued.ts index 694e8635a3b7..3ce082b1b130 100644 --- a/packages/beacon-node/src/chain/regen/queued.ts +++ b/packages/beacon-node/src/chain/regen/queued.ts @@ -148,16 +148,25 @@ export class QueuedStateRegenerator implements IStateRegenerator { this.blockStateCache.prune(headStateRoot); } - pruneOnFinalized(finalizedEpoch: number): void { - this.checkpointStateCache.pruneFinalized(finalizedEpoch); + pruneOnFinalized(finalizedEpoch: number): Map | null { + const prunedStates = this.checkpointStateCache.pruneFinalized(finalizedEpoch); this.blockStateCache.deleteAllBeforeEpoch(finalizedEpoch); + + return prunedStates; } - processState(blockRootHex: RootHex, postState: CachedBeaconStateAllForks): void { - this.blockStateCache.add(postState); - this.checkpointStateCache.processState(blockRootHex, postState).catch((e) => { - this.logger.debug("Error processing block state", {blockRootHex, slot: postState.slot}, e); - }); + async processState( + blockRootHex: RootHex, + postState: CachedBeaconStateAllForks + ): Promise | null> { + let prunedStates: Map | null = null; + try { + prunedStates = await this.checkpointStateCache.processState(blockRootHex, postState); + } catch (e) { + this.logger.debug("Error processing block state", {blockRootHex, slot: postState.slot}, e as Error); + } + + return prunedStates; } addCheckpointState(cp: phase0.Checkpoint, item: CachedBeaconStateAllForks): void { diff --git a/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts index 38aeabb97955..bb1ff18e25de 100644 --- a/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts @@ -59,9 +59,9 @@ export class InMemoryCheckpointStateCache implements CheckpointStateCache { return this.getLatest(rootHex, maxEpoch, opts); } - async processState(): Promise { + async processState(): Promise | null> { // do nothing, this class does not support prunning - return 0; + return null; } get(cp: CheckpointHex, opts?: StateCloneOpts): CachedBeaconStateAllForks | null { @@ -122,12 +122,17 @@ export class InMemoryCheckpointStateCache implements CheckpointStateCache { return previousHits; } - pruneFinalized(finalizedEpoch: Epoch): void { + pruneFinalized(finalizedEpoch: Epoch): Map { + const result = new Map(); + for (const epoch of this.epochIndex.keys()) { if (epoch < finalizedEpoch) { - this.deleteAllEpochItems(epoch); + const deletedStates = this.deleteAllEpochItems(epoch); + result.set(epoch, deletedStates); } } + + return result; } prune(finalizedEpoch: Epoch, justifiedEpoch: Epoch): void { @@ -153,11 +158,19 @@ export class InMemoryCheckpointStateCache implements CheckpointStateCache { } } - deleteAllEpochItems(epoch: Epoch): void { + deleteAllEpochItems(epoch: Epoch): CachedBeaconStateAllForks[] { + const states = []; for (const rootHex of this.epochIndex.get(epoch) || []) { - this.cache.delete(toCheckpointKey({rootHex, epoch})); + const key = toCheckpointKey({rootHex, epoch}); + const state = this.cache.get(key); + if (state) { + states.push(state); + } + this.cache.delete(key); } this.epochIndex.delete(epoch); + + return states; } clear(): void { diff --git a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts index 0719efcfd309..291f2896dd40 100644 --- a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts @@ -410,7 +410,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { /** * Prune all checkpoint states before the provided finalized epoch. */ - pruneFinalized(finalizedEpoch: Epoch): void { + pruneFinalized(finalizedEpoch: Epoch): Map | null { for (const epoch of this.epochIndex.keys()) { if (epoch < finalizedEpoch) { this.deleteAllEpochItems(epoch).catch((e) => @@ -418,6 +418,9 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { ); } } + + // not likely to return anything in-memory state because we may persist states even before they are finalized + return null; } /** @@ -470,12 +473,15 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { * * As of Mar 2024, it takes <=350ms to persist a holesky state on fast server */ - async processState(blockRootHex: RootHex, state: CachedBeaconStateAllForks): Promise { + async processState( + blockRootHex: RootHex, + state: CachedBeaconStateAllForks + ): Promise | null> { let persistCount = 0; // it's important to sort the epochs in ascending order, in case of big reorg we always want to keep the most recent checkpoint states const sortedEpochs = Array.from(this.epochIndex.keys()).sort((a, b) => a - b); if (sortedEpochs.length <= this.maxEpochsInMemory) { - return 0; + return null; } const blockSlot = state.slot; @@ -491,13 +497,16 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { // normally the block persist happens at 2/3 of slot 0 of epoch, if it's already late then just skip to allow other tasks to run // there are plenty of chances in the same epoch to persist checkpoint states, also if block is late it could be reorged this.logger.verbose("Skip persist checkpoint states", {blockSlot, root: blockRootHex}); - return 0; + return null; } const persistEpochs = sortedEpochs.slice(0, sortedEpochs.length - this.maxEpochsInMemory); + + const result = new Map(); for (const lowestEpoch of persistEpochs) { // usually there is only 0 or 1 epoch to persist in this loop - persistCount += await this.processPastEpoch(blockRootHex, state, lowestEpoch); + const prunedStates = await this.processPastEpoch(blockRootHex, state, lowestEpoch); + result.set(lowestEpoch, prunedStates); } if (persistCount > 0) { @@ -508,7 +517,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { persistEpochs: persistEpochs.length, }); } - return persistCount; + return result; } /** @@ -642,8 +651,9 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { blockRootHex: RootHex, state: CachedBeaconStateAllForks, epoch: Epoch - ): Promise { + ): Promise { let persistCount = 0; + const prunedStates: CachedBeaconStateAllForks[] = []; const epochBoundarySlot = computeStartSlotAtEpoch(epoch); const epochBoundaryRoot = epochBoundarySlot === state.slot ? fromHex(blockRootHex) : getBlockRootAtSlot(state, epochBoundarySlot); @@ -731,10 +741,20 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { this.metrics?.cpStateCache.statePruneFromMemoryCount.inc(); this.logger.verbose("Pruned checkpoint state from memory", logMeta); } + + prunedStates.push(state); } } - return persistCount; + if (persistCount > 0) { + this.logger.verbose("Persisted checkpoint states", { + stateSlot: state.slot, + blockRoot: blockRootHex, + persistCount, + }); + } + + return prunedStates; } /** diff --git a/packages/beacon-node/src/chain/stateCache/types.ts b/packages/beacon-node/src/chain/stateCache/types.ts index 1e8d6bd1bd62..cd93c34bde89 100644 --- a/packages/beacon-node/src/chain/stateCache/types.ts +++ b/packages/beacon-node/src/chain/stateCache/types.ts @@ -72,8 +72,11 @@ export interface CheckpointStateCache { ): Promise; updatePreComputedCheckpoint(rootHex: RootHex, epoch: Epoch): number | null; prune(finalizedEpoch: Epoch, justifiedEpoch: Epoch): void; - pruneFinalized(finalizedEpoch: Epoch): void; - processState(blockRootHex: RootHex, state: CachedBeaconStateAllForks): Promise; + pruneFinalized(finalizedEpoch: Epoch): Map | null; + processState( + blockRootHex: RootHex, + state: CachedBeaconStateAllForks + ): Promise | null>; clear(): void; dumpSummary(): routes.lodestar.StateCacheItem[]; /** Expose beacon states stored in cache. Use with caution */ diff --git a/packages/beacon-node/src/metrics/metrics/lodestar.ts b/packages/beacon-node/src/metrics/metrics/lodestar.ts index f15e195faa20..be9ffd79ef16 100644 --- a/packages/beacon-node/src/metrics/metrics/lodestar.ts +++ b/packages/beacon-node/src/metrics/metrics/lodestar.ts @@ -1332,6 +1332,21 @@ export function createLodestarMetrics( }), }, + balancesTreeCache: { + size: register.gauge({ + name: "lodestar_balances_tree_cache_size", + help: "Balances tree cache size", + }), + hit: register.gauge({ + name: "lodestar_balances_tree_cache_hit_total", + help: "Total number of balances tree cache hits", + }), + miss: register.gauge({ + name: "lodestar_balances_tree_cache_miss_total", + help: "Total number of balances tree cache misses", + }), + }, + seenCache: { aggregatedAttestations: { superSetCheckTotal: register.histogram({ diff --git a/packages/cli/package.json b/packages/cli/package.json index 67d0dfd332cc..4c21331862e3 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -56,8 +56,8 @@ "@chainsafe/blst": "^2.0.3", "@chainsafe/discv5": "^10.0.1", "@chainsafe/enr": "^4.0.1", - "@chainsafe/persistent-merkle-tree": "^0.8.0", - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/persistent-merkle-tree": "file:../../../ssz/packages/persistent-merkle-tree", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@chainsafe/threads": "^1.11.1", "@libp2p/crypto": "^5.0.4", "@libp2p/interface": "^2.1.2", diff --git a/packages/cli/src/applyPreset.ts b/packages/cli/src/applyPreset.ts index 612c5d648c63..09bda76fedc9 100644 --- a/packages/cli/src/applyPreset.ts +++ b/packages/cli/src/applyPreset.ts @@ -1,6 +1,6 @@ // MUST import this file first before anything and not import any Lodestar code. -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/as-sha256.js"; +import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/hashtree.js"; import {setHasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index.js"; // without setting this first, persistent-merkle-tree will use noble instead diff --git a/packages/config/package.json b/packages/config/package.json index 434000db2a0f..ed7425fd27a4 100644 --- a/packages/config/package.json +++ b/packages/config/package.json @@ -64,7 +64,7 @@ "blockchain" ], "dependencies": { - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/params": "^1.22.0", "@lodestar/utils": "^1.22.0", "@lodestar/types": "^1.22.0" diff --git a/packages/db/package.json b/packages/db/package.json index 2a10d36766bf..d484caff6b60 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -35,7 +35,7 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/config": "^1.22.0", "@lodestar/utils": "^1.22.0", "classic-level": "^1.4.1", diff --git a/packages/fork-choice/package.json b/packages/fork-choice/package.json index 2197ad90a9fd..b14d007b4ce7 100644 --- a/packages/fork-choice/package.json +++ b/packages/fork-choice/package.json @@ -36,7 +36,7 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/config": "^1.22.0", "@lodestar/params": "^1.22.0", "@lodestar/state-transition": "^1.22.0", diff --git a/packages/light-client/package.json b/packages/light-client/package.json index a503d6bc510e..17f4f9d9a54b 100644 --- a/packages/light-client/package.json +++ b/packages/light-client/package.json @@ -75,8 +75,8 @@ "dependencies": { "@chainsafe/bls": "7.1.3", "@chainsafe/blst": "^0.2.0", - "@chainsafe/persistent-merkle-tree": "^0.8.0", - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/persistent-merkle-tree": "file:../../../ssz/packages/persistent-merkle-tree", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/api": "^1.22.0", "@lodestar/config": "^1.22.0", "@lodestar/params": "^1.22.0", @@ -85,7 +85,7 @@ "mitt": "^3.0.0" }, "devDependencies": { - "@chainsafe/as-sha256": "^0.5.0", + "@chainsafe/as-sha256": "file:../../../ssz/packages/as-sha256", "@types/qs": "^6.9.7", "fastify": "^5.0.0", "qs": "^6.11.1", diff --git a/packages/prover/src/cli/applyPreset.ts b/packages/prover/src/cli/applyPreset.ts index f0c3d83c7751..8facf5171d2e 100644 --- a/packages/prover/src/cli/applyPreset.ts +++ b/packages/prover/src/cli/applyPreset.ts @@ -1,6 +1,6 @@ // MUST import this file first before anything and not import any Lodestar code. -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/as-sha256.js"; +import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/hashtree.js"; import {setHasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index.js"; // without setting this first, persistent-merkle-tree will use noble instead diff --git a/packages/state-transition/package.json b/packages/state-transition/package.json index a01d835bae95..8e323aaf6b78 100644 --- a/packages/state-transition/package.json +++ b/packages/state-transition/package.json @@ -58,11 +58,11 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@chainsafe/as-sha256": "^0.5.0", + "@chainsafe/as-sha256": "file:../../../ssz/packages/as-sha256", "@chainsafe/blst": "^2.0.3", - "@chainsafe/persistent-merkle-tree": "^0.8.0", + "@chainsafe/persistent-merkle-tree": "file:../../../ssz/packages/persistent-merkle-tree", "@chainsafe/persistent-ts": "^0.19.1", - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@chainsafe/swap-or-not-shuffle": "^0.0.2", "@lodestar/config": "^1.22.0", "@lodestar/params": "^1.22.0", diff --git a/packages/state-transition/src/block/processEth1Data.ts b/packages/state-transition/src/block/processEth1Data.ts index 92ab147aa772..8e6ded467821 100644 --- a/packages/state-transition/src/block/processEth1Data.ts +++ b/packages/state-transition/src/block/processEth1Data.ts @@ -48,12 +48,11 @@ export function becomesNewEth1Data( // Then isEqualEth1DataView compares cached roots (HashObject as of Jan 2022) which is much cheaper // than doing structural equality, which requires tree -> value conversions let sameVotesCount = 0; - const eth1DataVotes = state.eth1DataVotes.getAllReadonly(); - for (let i = 0; i < eth1DataVotes.length; i++) { - if (isEqualEth1DataView(eth1DataVotes[i], newEth1Data)) { + state.eth1DataVotes.forEach((eth1DataVote) => { + if (isEqualEth1DataView(eth1DataVote, newEth1Data)) { sameVotesCount++; } - } + }); // The +1 is to account for the `eth1Data` supplied to the function. if ((sameVotesCount + 1) * 2 > SLOTS_PER_ETH1_VOTING_PERIOD) { diff --git a/packages/state-transition/src/cache/balancesTreeCache.ts b/packages/state-transition/src/cache/balancesTreeCache.ts new file mode 100644 index 000000000000..0466824e490d --- /dev/null +++ b/packages/state-transition/src/cache/balancesTreeCache.ts @@ -0,0 +1,5 @@ +import {UintNumberType, ListBasicTreeViewDU} from "@chainsafe/ssz"; + +export interface IBalancesTreeCache { + getUnusedBalances(): ListBasicTreeViewDU | undefined; +} diff --git a/packages/state-transition/src/cache/epochCache.ts b/packages/state-transition/src/cache/epochCache.ts index 66ce12b82d18..4c77c4137db0 100644 --- a/packages/state-transition/src/cache/epochCache.ts +++ b/packages/state-transition/src/cache/epochCache.ts @@ -67,6 +67,7 @@ import { SyncCommitteeCacheEmpty, } from "./syncCommitteeCache.js"; import {CachedBeaconStateAllForks} from "./stateCache.js"; +import {IBalancesTreeCache} from "./balancesTreeCache.js"; /** `= PROPOSER_WEIGHT / (WEIGHT_DENOMINATOR - PROPOSER_WEIGHT)` */ export const PROPOSER_WEIGHT_FACTOR = PROPOSER_WEIGHT / (WEIGHT_DENOMINATOR - PROPOSER_WEIGHT); @@ -76,6 +77,7 @@ export type EpochCacheImmutableData = { pubkey2index: PubkeyIndexMap; index2pubkey: Index2PubkeyCache; shufflingCache?: IShufflingCache; + balancesTreeCache?: IBalancesTreeCache; }; export type EpochCacheOpts = { @@ -135,6 +137,7 @@ export class EpochCache { * Unique pubkey registry shared in the same fork. There should only exist one for the fork. */ unfinalizedPubkey2index: UnfinalizedPubkeyIndexMap; + balancesTreeCache?: IBalancesTreeCache; /** * ShufflingCache is passed in from `beacon-node` so should be available at runtime but may not be * present during testing. @@ -273,6 +276,7 @@ export class EpochCache { pubkey2index: PubkeyIndexMap; index2pubkey: Index2PubkeyCache; unfinalizedPubkey2index: UnfinalizedPubkeyIndexMap; + balancesTreeCache?: IBalancesTreeCache; shufflingCache?: IShufflingCache; proposers: number[]; proposersPrevEpoch: number[] | null; @@ -306,6 +310,7 @@ export class EpochCache { this.pubkey2index = data.pubkey2index; this.index2pubkey = data.index2pubkey; this.unfinalizedPubkey2index = data.unfinalizedPubkey2index; + this.balancesTreeCache = data.balancesTreeCache; this.shufflingCache = data.shufflingCache; this.proposers = data.proposers; this.proposersPrevEpoch = data.proposersPrevEpoch; @@ -344,7 +349,7 @@ export class EpochCache { */ static createFromState( state: BeaconStateAllForks, - {config, pubkey2index, index2pubkey, shufflingCache}: EpochCacheImmutableData, + {config, pubkey2index, index2pubkey, balancesTreeCache, shufflingCache}: EpochCacheImmutableData, opts?: EpochCacheOpts ): EpochCache { const currentEpoch = computeEpochAtSlot(state.slot); @@ -553,6 +558,7 @@ export class EpochCache { index2pubkey, // `createFromFinalizedState()` creates cache with empty unfinalizedPubkey2index. Be cautious to only pass in finalized state unfinalizedPubkey2index: newUnfinalizedPubkeyIndexMap(), + balancesTreeCache, shufflingCache, proposers, // On first epoch, set to null to prevent unnecessary work since this is only used for metrics @@ -599,6 +605,7 @@ export class EpochCache { index2pubkey: this.index2pubkey, // No need to clone this reference. On each mutation the `unfinalizedPubkey2index` reference is replaced, @see `addPubkey` unfinalizedPubkey2index: this.unfinalizedPubkey2index, + balancesTreeCache: this.balancesTreeCache, shufflingCache: this.shufflingCache, // Immutable data proposers: this.proposers, diff --git a/packages/state-transition/src/cache/epochTransitionCache.ts b/packages/state-transition/src/cache/epochTransitionCache.ts index 27b781e8a6a1..a97274d71712 100644 --- a/packages/state-transition/src/cache/epochTransitionCache.ts +++ b/packages/state-transition/src/cache/epochTransitionCache.ts @@ -1,4 +1,4 @@ -import {phase0, Epoch, RootHex, ValidatorIndex} from "@lodestar/types"; +import {Epoch, RootHex, ValidatorIndex} from "@lodestar/types"; import {intDiv, toRootHex} from "@lodestar/utils"; import { EPOCHS_PER_SLASHINGS_VECTOR, @@ -19,7 +19,12 @@ import { FLAG_CURR_TARGET_ATTESTER, FLAG_CURR_HEAD_ATTESTER, } from "../util/attesterStatus.js"; -import {CachedBeaconStateAllForks, CachedBeaconStateAltair, CachedBeaconStatePhase0} from "../index.js"; +import { + CachedBeaconStateAllForks, + CachedBeaconStateAltair, + CachedBeaconStatePhase0, + hasCompoundingWithdrawalCredential, +} from "../index.js"; import {computeBaseRewardPerIncrement} from "../util/altair.js"; import {processPendingAttestations} from "../epoch/processPendingAttestations.js"; @@ -133,11 +138,7 @@ export interface EpochTransitionCache { flags: number[]; - /** - * Validators in the current epoch, should use it for read-only value instead of accessing state.validators directly. - * Note that during epoch processing, validators could be updated so need to use it with care. - */ - validators: phase0.Validator[]; + isCompoundingValidatorArr: boolean[]; /** * This is for electra only @@ -216,6 +217,11 @@ const inclusionDelays = new Array(); const flags = new Array(); /** WARNING: reused, never gc'd */ const nextEpochShufflingActiveValidatorIndices = new Array(); +/** WARNING: reused, never gc'd */ +const isCompoundingValidatorArr = new Array(); + +const previousEpochParticipation = new Array(); +const currentEpochParticipation = new Array(); export function beforeProcessEpoch( state: CachedBeaconStateAllForks, @@ -233,17 +239,14 @@ export function beforeProcessEpoch( const indicesToSlash: ValidatorIndex[] = []; const indicesEligibleForActivationQueue: ValidatorIndex[] = []; - const indicesEligibleForActivation: ValidatorIndex[] = []; + const indicesEligibleForActivation: {validatorIndex: ValidatorIndex; activationEligibilityEpoch: Epoch}[] = []; const indicesToEject: ValidatorIndex[] = []; let totalActiveStakeByIncrement = 0; - - // To optimize memory each validator node in `state.validators` is represented with a special node type - // `BranchNodeStruct` that represents the data as struct internally. This utility grabs the struct data directly - // from the nodes without any extra transformation. The returned `validators` array contains native JS objects. - const validators = state.validators.getAllReadonlyValues(); - const validatorCount = validators.length; - + const validatorCount = state.validators.length; + if (forkSeq >= ForkSeq.electra) { + isCompoundingValidatorArr.length = validatorCount; + } nextEpochShufflingActiveValidatorIndices.length = validatorCount; let nextEpochShufflingActiveIndicesLength = 0; // pre-fill with true (most validators are active) @@ -273,10 +276,13 @@ export function beforeProcessEpoch( const effectiveBalancesByIncrements = epochCtx.effectiveBalanceIncrements; - for (let i = 0; i < validatorCount; i++) { - const validator = validators[i]; + state.validators.forEachValue((validator, i) => { let flag = 0; + if (forkSeq >= ForkSeq.electra) { + isCompoundingValidatorArr[i] = hasCompoundingWithdrawalCredential(validator.withdrawalCredentials); + } + if (validator.slashed) { if (slashingsEpoch === validator.withdrawableEpoch) { indicesToSlash.push(i); @@ -339,7 +345,10 @@ export function beforeProcessEpoch( // // Use `else` since indicesEligibleForActivationQueue + indicesEligibleForActivation are mutually exclusive else if (validator.activationEpoch === FAR_FUTURE_EPOCH && validator.activationEligibilityEpoch <= currentEpoch) { - indicesEligibleForActivation.push(i); + indicesEligibleForActivation.push({ + validatorIndex: i, + activationEligibilityEpoch: validator.activationEligibilityEpoch, + }); } // To optimize process_registry_updates(): @@ -364,7 +373,7 @@ export function beforeProcessEpoch( if (isActiveNext2) { nextEpochShufflingActiveValidatorIndices[nextEpochShufflingActiveIndicesLength++] = i; } - } + }); // Trigger async build of shuffling for epoch after next (nextShuffling post epoch transition) const epochAfterNext = state.epochCtx.nextEpoch + 1; @@ -396,7 +405,7 @@ export function beforeProcessEpoch( // To optimize process_registry_updates(): // order by sequence of activationEligibilityEpoch setting and then index indicesEligibleForActivation.sort( - (a, b) => validators[a].activationEligibilityEpoch - validators[b].activationEligibilityEpoch || a - b + (a, b) => a.activationEligibilityEpoch - b.activationEligibilityEpoch || a.validatorIndex - b.validatorIndex ); if (forkSeq === ForkSeq.phase0) { @@ -427,8 +436,10 @@ export function beforeProcessEpoch( FLAG_CURR_HEAD_ATTESTER ); } else { - const previousEpochParticipation = (state as CachedBeaconStateAltair).previousEpochParticipation.getAll(); - const currentEpochParticipation = (state as CachedBeaconStateAltair).currentEpochParticipation.getAll(); + previousEpochParticipation.length = (state as CachedBeaconStateAltair).previousEpochParticipation.length; + (state as CachedBeaconStateAltair).previousEpochParticipation.getAll(previousEpochParticipation); + currentEpochParticipation.length = (state as CachedBeaconStateAltair).currentEpochParticipation.length; + (state as CachedBeaconStateAltair).currentEpochParticipation.getAll(currentEpochParticipation); for (let i = 0; i < validatorCount; i++) { flags[i] |= // checking active status first is required to pass random spec tests in altair @@ -505,7 +516,7 @@ export function beforeProcessEpoch( currEpochUnslashedTargetStakeByIncrement: currTargetUnslStake, indicesToSlash, indicesEligibleForActivationQueue, - indicesEligibleForActivation, + indicesEligibleForActivation: indicesEligibleForActivation.map(({validatorIndex}) => validatorIndex), indicesToEject, nextShufflingDecisionRoot, nextShufflingActiveIndices, @@ -517,7 +528,7 @@ export function beforeProcessEpoch( proposerIndices, inclusionDelays, flags, - validators, + isCompoundingValidatorArr, // will be assigned in processPendingConsolidations() newCompoundingValidators: undefined, // Will be assigned in processRewardsAndPenalties() diff --git a/packages/state-transition/src/epoch/getRewardsAndPenalties.ts b/packages/state-transition/src/epoch/getRewardsAndPenalties.ts index bf766fe4666a..cf0a29fd8fe7 100644 --- a/packages/state-transition/src/epoch/getRewardsAndPenalties.ts +++ b/packages/state-transition/src/epoch/getRewardsAndPenalties.ts @@ -17,7 +17,7 @@ import { FLAG_PREV_TARGET_ATTESTER_UNSLASHED, hasMarkers, } from "../util/attesterStatus.js"; -import {isInInactivityLeak, newZeroedArray} from "../util/index.js"; +import {isInInactivityLeak} from "../util/index.js"; type RewardPenaltyItem = { baseReward: number; @@ -28,6 +28,11 @@ type RewardPenaltyItem = { timelyHeadReward: number; }; +/** + * This data is reused and never gc. + */ +const rewards = new Array(); +const penalties = new Array(); /** * An aggregate of getFlagIndexDeltas and getInactivityPenaltyDeltas that loop through process.flags 1 time instead of 4. * @@ -48,8 +53,10 @@ export function getRewardsAndPenaltiesAltair( // TODO: Is there a cheaper way to measure length that going to `state.validators`? const validatorCount = state.validators.length; const activeIncrements = cache.totalActiveStakeByIncrement; - const rewards = newZeroedArray(validatorCount); - const penalties = newZeroedArray(validatorCount); + rewards.length = validatorCount; + rewards.fill(0); + penalties.length = validatorCount; + penalties.fill(0); const isInInactivityLeakBn = isInInactivityLeak(state); // effectiveBalance is multiple of EFFECTIVE_BALANCE_INCREMENT and less than MAX_EFFECTIVE_BALANCE diff --git a/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts b/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts index 0ea4b49dddf4..9203c0419a95 100644 --- a/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts +++ b/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts @@ -10,7 +10,6 @@ import { TIMELY_TARGET_FLAG_INDEX, } from "@lodestar/params"; import {EpochTransitionCache, CachedBeaconStateAllForks, BeaconStateAltair} from "../types.js"; -import {hasCompoundingWithdrawalCredential} from "../util/electra.js"; /** Same to https://github.com/ethereum/eth2.0-specs/blob/v1.1.0-alpha.5/specs/altair/beacon-chain.md#has_flag */ const TIMELY_TARGET = 1 << TIMELY_TARGET_FLAG_INDEX; @@ -45,7 +44,7 @@ export function processEffectiveBalanceUpdates( // and updated in processPendingBalanceDeposits() and processPendingConsolidations() // so it's recycled here for performance. const balances = cache.balances ?? state.balances.getAll(); - const currentEpochValidators = cache.validators; + const {isCompoundingValidatorArr} = cache; const newCompoundingValidators = cache.newCompoundingValidators ?? new Set(); let numUpdate = 0; @@ -61,9 +60,7 @@ export function processEffectiveBalanceUpdates( effectiveBalanceLimit = MAX_EFFECTIVE_BALANCE; } else { // from electra, effectiveBalanceLimit is per validator - const isCompoundingValidator = - hasCompoundingWithdrawalCredential(currentEpochValidators[i].withdrawalCredentials) || - newCompoundingValidators.has(i); + const isCompoundingValidator = isCompoundingValidatorArr[i] || newCompoundingValidators.has(i); effectiveBalanceLimit = isCompoundingValidator ? MAX_EFFECTIVE_BALANCE_ELECTRA : MIN_ACTIVATION_BALANCE; } diff --git a/packages/state-transition/src/epoch/processInactivityUpdates.ts b/packages/state-transition/src/epoch/processInactivityUpdates.ts index 4a9b129ee793..4d1e28d92bf4 100644 --- a/packages/state-transition/src/epoch/processInactivityUpdates.ts +++ b/packages/state-transition/src/epoch/processInactivityUpdates.ts @@ -3,6 +3,11 @@ import {CachedBeaconStateAltair, EpochTransitionCache} from "../types.js"; import * as attesterStatusUtil from "../util/attesterStatus.js"; import {isInInactivityLeak} from "../util/index.js"; +/** + * This data is reused and never gc. + */ +const inactivityScoresArr = new Array(); + /** * Mutates `inactivityScores` from pre-calculated validator flags. * @@ -30,7 +35,8 @@ export function processInactivityUpdates(state: CachedBeaconStateAltair, cache: // this avoids importing FLAG_ELIGIBLE_ATTESTER inside the for loop, check the compiled code const {FLAG_PREV_TARGET_ATTESTER_UNSLASHED, FLAG_ELIGIBLE_ATTESTER, hasMarkers} = attesterStatusUtil; - const inactivityScoresArr = inactivityScores.getAll(); + inactivityScoresArr.length = state.validators.length; + inactivityScores.getAll(inactivityScoresArr); for (let i = 0; i < flags.length; i++) { const flag = flags[i]; diff --git a/packages/state-transition/src/epoch/processRewardsAndPenalties.ts b/packages/state-transition/src/epoch/processRewardsAndPenalties.ts index 6c5d5aa3cb5a..5b42f4175a04 100644 --- a/packages/state-transition/src/epoch/processRewardsAndPenalties.ts +++ b/packages/state-transition/src/epoch/processRewardsAndPenalties.ts @@ -9,6 +9,10 @@ import { import {getAttestationDeltas} from "./getAttestationDeltas.js"; import {getRewardsAndPenaltiesAltair} from "./getRewardsAndPenalties.js"; +/** + * This data is reused and never gc. + */ +const balances = new Array(); /** * Iterate over all validator and compute rewards and penalties to apply to balances. * @@ -25,7 +29,8 @@ export function processRewardsAndPenalties( } const [rewards, penalties] = getRewardsAndPenalties(state, cache); - const balances = state.balances.getAll(); + balances.length = state.balances.length; + state.balances.getAll(balances); for (let i = 0, len = rewards.length; i < len; i++) { const result = balances[i] + rewards[i] - penalties[i] - (slashingPenalties[i] ?? 0); @@ -34,7 +39,7 @@ export function processRewardsAndPenalties( // important: do not change state one balance at a time. Set them all at once, constructing the tree in one go // cache the balances array, too - state.balances = ssz.phase0.Balances.toViewDU(balances); + state.balances = ssz.phase0.Balances.toViewDU(balances, state.epochCtx.balancesTreeCache?.getUnusedBalances()); // For processEffectiveBalanceUpdates() to prevent having to re-compute the balances array. // For validator metrics diff --git a/packages/state-transition/src/index.ts b/packages/state-transition/src/index.ts index 600bbf173462..ac6fa8b3d1fc 100644 --- a/packages/state-transition/src/index.ts +++ b/packages/state-transition/src/index.ts @@ -43,6 +43,7 @@ export { } from "./cache/epochCache.js"; export {toMemoryEfficientHexStr} from "./cache/pubkeyCache.js"; export {type EpochTransitionCache, beforeProcessEpoch} from "./cache/epochTransitionCache.js"; +export type {IBalancesTreeCache} from "./cache/balancesTreeCache.js"; // Aux data-structures export { diff --git a/packages/state-transition/src/stateTransition.ts b/packages/state-transition/src/stateTransition.ts index f025c685b1a6..7adf872e42f3 100644 --- a/packages/state-transition/src/stateTransition.ts +++ b/packages/state-transition/src/stateTransition.ts @@ -1,3 +1,4 @@ +import {HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; import {SignedBeaconBlock, SignedBlindedBeaconBlock, Slot, ssz} from "@lodestar/types"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; import {toRootHex} from "@lodestar/utils"; @@ -58,6 +59,11 @@ export enum StateHashTreeRootSource { computeNewStateRoot = "compute_new_state_root", } +/** + * Data in a BeaconBlock is bounded so we can use a single HashComputationGroup for all blocks + */ +const hcGroup = new HashComputationGroup(); + /** * Implementation Note: follows the optimizations in protolambda's eth2fastspec (https://github.com/protolambda/eth2fastspec) */ @@ -105,25 +111,25 @@ export function stateTransition( processBlock(fork, postState, block, options, options); - const processBlockCommitTimer = metrics?.processBlockCommitTime.startTimer(); - postState.commit(); - processBlockCommitTimer?.(); - - // Note: time only on success. Include processBlock and commit + // Note: time only on success. This does not include hashTreeRoot() time processBlockTimer?.(); + // TODO - batch: remove processBlockCommitTime? + const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ + source: StateHashTreeRootSource.stateTransition, + }); + // commit() is done inside batchHashTreeRoot() + // with batchHashTreeRoot(), we're not able to measure commit() time separately + // note that at commit() phase, we batch hash validators via ListValidatorTreeViewDU so this metric is a little bit confusing + const stateRoot = postState.batchHashTreeRoot(hcGroup); + hashTreeRootTimer?.(); + if (metrics) { onPostStateMetrics(postState, metrics); } // Verify state root if (verifyStateRoot) { - const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ - source: StateHashTreeRootSource.stateTransition, - }); - const stateRoot = postState.hashTreeRoot(); - hashTreeRootTimer?.(); - if (!ssz.Root.equals(block.stateRoot, stateRoot)) { throw new Error( `Invalid state root at slot ${block.slot}, expected=${toRootHex(block.stateRoot)}, actual=${toRootHex( diff --git a/packages/state-transition/src/util/balance.ts b/packages/state-transition/src/util/balance.ts index c6b196846ec9..a1b086cbd591 100644 --- a/packages/state-transition/src/util/balance.ts +++ b/packages/state-transition/src/util/balance.ts @@ -2,7 +2,7 @@ import {EFFECTIVE_BALANCE_INCREMENT} from "@lodestar/params"; import {Gwei, ValidatorIndex} from "@lodestar/types"; import {bigIntMax} from "@lodestar/utils"; import {EffectiveBalanceIncrements} from "../cache/effectiveBalanceIncrements.js"; -import {BeaconStateAllForks} from "../index.js"; +import {BeaconStateAllForks} from ".."; import {CachedBeaconStateAllForks} from "../types.js"; /** @@ -63,13 +63,13 @@ export function getEffectiveBalanceIncrementsZeroInactive( validatorCount ); - const validators = justifiedState.validators.getAllReadonly(); let j = 0; - for (let i = 0; i < validatorCount; i++) { + justifiedState.validators.forEachValue((validator, i) => { + const {slashed} = validator; if (i === activeIndices[j]) { // active validator j++; - if (validators[i].slashed) { + if (slashed) { // slashed validator effectiveBalanceIncrementsZeroInactive[i] = 0; } @@ -77,7 +77,7 @@ export function getEffectiveBalanceIncrementsZeroInactive( // inactive validator effectiveBalanceIncrementsZeroInactive[i] = 0; } - } + }); return effectiveBalanceIncrementsZeroInactive; } diff --git a/packages/state-transition/test/unit/cachedBeaconState.test.ts b/packages/state-transition/test/unit/cachedBeaconState.test.ts index 668f22e13a1e..b80512206331 100644 --- a/packages/state-transition/test/unit/cachedBeaconState.test.ts +++ b/packages/state-transition/test/unit/cachedBeaconState.test.ts @@ -143,6 +143,8 @@ describe("CachedBeaconState", () => { } } + state.commit(); + if (validatorCountDelta < 0) { state.validators = state.validators.sliceTo(state.validators.length - 1 + validatorCountDelta); diff --git a/packages/types/package.json b/packages/types/package.json index 1c020b409071..0ce13360bc91 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -73,7 +73,7 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/params": "^1.22.0", "ethereum-cryptography": "^2.0.0" }, diff --git a/packages/types/src/phase0/listValidator.ts b/packages/types/src/phase0/listValidator.ts new file mode 100644 index 000000000000..a2228d5ec410 --- /dev/null +++ b/packages/types/src/phase0/listValidator.ts @@ -0,0 +1,14 @@ +import {ListCompositeType, ListCompositeTreeViewDU} from "@chainsafe/ssz"; +import {Node} from "@chainsafe/persistent-merkle-tree"; +import {ValidatorNodeStructType} from "./validator.js"; +import {ListValidatorTreeViewDU} from "./viewDU/listValidator.js"; + +export class ListValidatorType extends ListCompositeType { + constructor(limit: number) { + super(new ValidatorNodeStructType(), limit); + } + + getViewDU(node: Node, cache?: unknown): ListCompositeTreeViewDU { + return new ListValidatorTreeViewDU(this, node, cache as any); + } +} diff --git a/packages/types/src/phase0/sszTypes.ts b/packages/types/src/phase0/sszTypes.ts index 4a04701b789d..84a812d83a83 100644 --- a/packages/types/src/phase0/sszTypes.ts +++ b/packages/types/src/phase0/sszTypes.ts @@ -29,6 +29,7 @@ import { } from "@lodestar/params"; import * as primitiveSsz from "../primitive/sszTypes.js"; import {ValidatorNodeStruct} from "./validator.js"; +import {ListValidatorType} from "./listValidator.js"; const { Bytes32, @@ -228,7 +229,7 @@ export const HistoricalBatchRoots = new ContainerType( export const Validator = ValidatorNodeStruct; // Export as stand-alone for direct tree optimizations -export const Validators = new ListCompositeType(ValidatorNodeStruct, VALIDATOR_REGISTRY_LIMIT); +export const Validators = new ListValidatorType(VALIDATOR_REGISTRY_LIMIT); // this ListUintNum64Type is used to cache Leaf Nodes of BeaconState.balances after epoch transition export const Balances = new ListUintNum64Type(VALIDATOR_REGISTRY_LIMIT); export const RandaoMixes = new VectorCompositeType(Bytes32, EPOCHS_PER_HISTORICAL_VECTOR); diff --git a/packages/types/src/phase0/validator.ts b/packages/types/src/phase0/validator.ts index a6ec0fb18103..2861f52d2deb 100644 --- a/packages/types/src/phase0/validator.ts +++ b/packages/types/src/phase0/validator.ts @@ -1,7 +1,6 @@ import {ByteViews, ContainerNodeStructType, ValueOfFields} from "@chainsafe/ssz"; import * as primitiveSsz from "../primitive/sszTypes.js"; -// biome-ignore lint/suspicious/noShadowRestrictedNames: We explicitly want `Boolean` name to be imported const {Boolean, Bytes32, UintNum64, BLSPubkey, EpochInf} = primitiveSsz; // this is to work with uint32, see https://github.com/ChainSafe/ssz/blob/ssz-v0.15.1/packages/ssz/src/type/uint.ts @@ -14,6 +13,7 @@ const UINT32_SIZE = 4; const PUBKEY_SIZE = 48; const WITHDRAWAL_CREDENTIALS_SIZE = 32; const SLASHED_SIZE = 1; +const CHUNK_SIZE = 32; export const ValidatorType = { pubkey: BLSPubkey, @@ -60,6 +60,58 @@ export class ValidatorNodeStructType extends ContainerNodeStructType +): void { + const { + pubkey, + withdrawalCredentials, + effectiveBalance, + slashed, + activationEligibilityEpoch, + activationEpoch, + exitEpoch, + withdrawableEpoch, + } = value; + const {uint8Array: outputLevel3, dataView} = level3; + + // pubkey = 48 bytes which is 2 * CHUNK_SIZE + level4.set(pubkey, 0); + let offset = CHUNK_SIZE; + outputLevel3.set(withdrawalCredentials, offset); + offset += CHUNK_SIZE; + // effectiveBalance is UintNum64 + dataView.setUint32(offset, effectiveBalance & 0xffffffff, true); + dataView.setUint32(offset + 4, (effectiveBalance / NUMBER_2_POW_32) & 0xffffffff, true); + + offset += CHUNK_SIZE; + dataView.setUint32(offset, slashed ? 1 : 0, true); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, activationEligibilityEpoch); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, activationEpoch); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, exitEpoch); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, withdrawableEpoch); +} + function writeEpochInf(dataView: DataView, offset: number, value: number): number { if (value === Infinity) { dataView.setUint32(offset, 0xffffffff, true); @@ -74,4 +126,3 @@ function writeEpochInf(dataView: DataView, offset: number, value: number): numbe } return offset; } -export const ValidatorNodeStruct = new ValidatorNodeStructType(); diff --git a/packages/types/src/phase0/viewDU/listValidator.ts b/packages/types/src/phase0/viewDU/listValidator.ts new file mode 100644 index 000000000000..05aeeebd75a5 --- /dev/null +++ b/packages/types/src/phase0/viewDU/listValidator.ts @@ -0,0 +1,176 @@ +import { + ListCompositeType, + ArrayCompositeTreeViewDUCache, + ListCompositeTreeViewDU, + ByteViews, + ContainerNodeStructTreeViewDU, +} from "@chainsafe/ssz"; +import {HashComputationLevel, Node, digestNLevel, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; +import {byteArrayIntoHashObject} from "@chainsafe/as-sha256"; +import {ValidatorNodeStructType, ValidatorType, validatorToChunkBytes} from "../validator.js"; +import {ValidatorIndex} from "../../types.js"; + +/** + * hashtree has a MAX_SIZE of 1024 bytes = 32 chunks + * Given a level3 of validators have 8 chunks, we can hash 4 validators at a time + */ +const PARALLEL_FACTOR = 4; +/** + * Allocate memory once for batch hash validators. + */ +// each level 3 of validator has 8 chunks, each chunk has 32 bytes +const batchLevel3Bytes = new Uint8Array(PARALLEL_FACTOR * 8 * 32); +const level3ByteViewsArr: ByteViews[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + const uint8Array = batchLevel3Bytes.subarray(i * 8 * 32, (i + 1) * 8 * 32); + const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); + level3ByteViewsArr.push({uint8Array, dataView}); +} +// each level 4 of validator has 2 chunks for pubkey, each chunk has 32 bytes +const batchLevel4Bytes = new Uint8Array(PARALLEL_FACTOR * 2 * 32); +const level4BytesArr: Uint8Array[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + level4BytesArr.push(batchLevel4Bytes.subarray(i * 2 * 32, (i + 1) * 2 * 32)); +} +const pubkeyRoots: Uint8Array[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + pubkeyRoots.push(batchLevel4Bytes.subarray(i * 32, (i + 1) * 32)); +} + +const validatorRoots: Uint8Array[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + validatorRoots.push(batchLevel3Bytes.subarray(i * 32, (i + 1) * 32)); +} +const validatorRoot = new Uint8Array(32); + +/** + * Similar to ListCompositeTreeViewDU with some differences: + * - if called without params, it's from hashTreeRoot() api call, no need to compute root + * - otherwise it's from batchHashTreeRoot() call, compute validator roots in batch + */ +export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU { + constructor( + readonly type: ListCompositeType, + protected _rootNode: Node, + cache?: ArrayCompositeTreeViewDUCache + ) { + super(type, _rootNode, cache); + } + + commit(hcOffset = 0, hcByLevel: HashComputationLevel[] | null = null): void { + if (hcByLevel === null) { + // this is not from batchHashTreeRoot() call, go with regular flow + return super.commit(); + } + + const isOldRootHashed = this._rootNode.h0 !== null; + if (this.viewsChanged.size === 0) { + if (!isOldRootHashed && hcByLevel !== null) { + // not possible to get HashComputations due to BranchNodeStruct + this._rootNode.root; + } + return; + } + + // TODO - batch: remove this type cast + const viewsChanged = this.viewsChanged as unknown as Map< + number, + ContainerNodeStructTreeViewDU + >; + + const indicesChanged: number[] = []; + for (const [index, viewChanged] of viewsChanged) { + // should not have any params here in order not to compute root + viewChanged.commit(); + // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal + this.nodes[index] = viewChanged.node; + // `validators.get(i)` was called but it may not modify any property, do not need to compute root + if (viewChanged.node.h0 === null) { + indicesChanged.push(index); + } + } + + // these validators don't have roots, we compute roots in batch + const sortedIndicesChanged = indicesChanged.sort((a, b) => a - b); + const nodesChanged: {index: ValidatorIndex; node: Node}[] = new Array(sortedIndicesChanged.length); + for (const [i, validatorIndex] of sortedIndicesChanged.entries()) { + nodesChanged[i] = {index: validatorIndex, node: this.nodes[validatorIndex]}; + } + doBatchHashTreeRootValidators(sortedIndicesChanged, viewsChanged); + + // do the remaining commit step the same to parent (ArrayCompositeTreeViewDU) + const indexes = nodesChanged.map((entry) => entry.index); + const nodes = nodesChanged.map((entry) => entry.node); + const chunksNode = this.type.tree_getChunksNode(this._rootNode); + const offsetThis = hcOffset + this.type.tree_chunksNodeOffset(); + const byLevelThis = hcByLevel != null && isOldRootHashed ? hcByLevel : null; + const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, offsetThis, byLevelThis); + + this._rootNode = this.type.tree_setChunksNode( + this._rootNode, + newChunksNode, + this.dirtyLength ? this._length : null, + hcOffset, + hcByLevel + ); + + if (!isOldRootHashed && hcByLevel !== null) { + // should never happen, handle just in case + // not possible to get HashComputations due to BranchNodeStruct + this._rootNode.root; + } + + this.viewsChanged.clear(); + this.dirtyLength = false; + } +} + +function doBatchHashTreeRootValidators(indices: ValidatorIndex[], validators: Map>): void { + const endBatch = indices.length - (indices.length % PARALLEL_FACTOR); + + // commit every 16 validators in batch + for (let i = 0; i < endBatch; i++) { + if (i % PARALLEL_FACTOR === 0) { + batchLevel3Bytes.fill(0); + batchLevel4Bytes.fill(0); + } + const indexInBatch = i % PARALLEL_FACTOR; + const viewIndex = indices[i]; + const validator = validators.get(viewIndex); + if (validator) { + validatorToChunkBytes(level3ByteViewsArr[indexInBatch], level4BytesArr[indexInBatch], validator.value); + } + + if (indexInBatch === PARALLEL_FACTOR - 1) { + // hash level 4, this is populated to pubkeyRoots + digestNLevel(batchLevel4Bytes, 1); + for (let j = 0; j < PARALLEL_FACTOR; j++) { + level3ByteViewsArr[j].uint8Array.set(pubkeyRoots[j], 0); + } + // hash level 3, this is populated to validatorRoots + digestNLevel(batchLevel3Bytes, 3); + // commit all validators in this batch + for (let j = PARALLEL_FACTOR - 1; j >= 0; j--) { + const viewIndex = indices[i - j]; + const indexInBatch = (i - j) % PARALLEL_FACTOR; + const viewChanged = validators.get(viewIndex); + if (viewChanged) { + const branchNodeStruct = viewChanged.node; + byteArrayIntoHashObject(validatorRoots[indexInBatch], 0, branchNodeStruct); + } + } + } + } + + // commit the remaining validators, we can do in batch too but don't want to create new Uint8Array views + // it's not much different to commit one by one + for (let i = endBatch; i < indices.length; i++) { + const viewIndex = indices[i]; + const viewChanged = validators.get(viewIndex); + if (viewChanged) { + // compute root for each validator + viewChanged.type.hashTreeRootInto(viewChanged.value, validatorRoot, 0); + byteArrayIntoHashObject(validatorRoot, 0, viewChanged.node); + } + } +} diff --git a/packages/utils/package.json b/packages/utils/package.json index 6c8218741e30..ad2ef98c8fc1 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -39,7 +39,7 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@chainsafe/as-sha256": "^0.5.0", + "@chainsafe/as-sha256": "file:../../../ssz/packages/as-sha256", "any-signal": "3.0.1", "bigint-buffer": "^1.1.5", "case": "^1.6.3", diff --git a/packages/validator/package.json b/packages/validator/package.json index 932eedac1dba..35c058090298 100644 --- a/packages/validator/package.json +++ b/packages/validator/package.json @@ -46,7 +46,7 @@ ], "dependencies": { "@chainsafe/blst": "^2.0.3", - "@chainsafe/ssz": "^0.17.1", + "@chainsafe/ssz": "file:../../../ssz/packages/ssz", "@lodestar/api": "^1.22.0", "@lodestar/config": "^1.22.0", "@lodestar/db": "^1.22.0", diff --git a/yarn.lock b/yarn.lock index 32c923a514e9..a7e1f4bad27b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -357,10 +357,8 @@ resolved "https://registry.yarnpkg.com/@chainsafe/as-chacha20poly1305/-/as-chacha20poly1305-0.1.0.tgz#7da6f8796f9b42dac6e830a086d964f1f9189e09" integrity sha512-BpNcL8/lji/GM3+vZ/bgRWqJ1q5kwvTFmGPk7pxm/QQZDbaMI98waOHjEymTjq2JmdD/INdNBFOVSyJofXg7ew== -"@chainsafe/as-sha256@0.5.0", "@chainsafe/as-sha256@^0.5.0": +"@chainsafe/as-sha256@0.5.0", "@chainsafe/as-sha256@file:../ssz/packages/as-sha256": version "0.5.0" - resolved "https://registry.yarnpkg.com/@chainsafe/as-sha256/-/as-sha256-0.5.0.tgz#2523fbef2b80b5000f9aa71f4a76e5c2c5c076bb" - integrity sha512-dTIY6oUZNdC5yDTVP5Qc9hAlKAsn0QTQ2DnQvvsbTnKSTbYs3p5RPN0aIUqN0liXei/9h24c7V0dkV44cnWIQA== "@chainsafe/as-sha256@^0.4.1": version "0.4.1" @@ -578,7 +576,7 @@ dependencies: "@chainsafe/is-ip" "^2.0.1" -"@chainsafe/persistent-merkle-tree@0.8.0", "@chainsafe/persistent-merkle-tree@^0.8.0": +"@chainsafe/persistent-merkle-tree@0.8.0": version "0.8.0" resolved "https://registry.yarnpkg.com/@chainsafe/persistent-merkle-tree/-/persistent-merkle-tree-0.8.0.tgz#18e2f0a5de3a0b59c6e5be8797a78e0d209dd7dc" integrity sha512-hh6C1JO6SKlr0QGNTNtTLqgGVMA/Bc20wD6CeMHp+wqbFKCULRJuBUxhF4WDx/7mX8QlqF3nFriF/Eo8oYJ4/A== @@ -595,6 +593,13 @@ "@chainsafe/as-sha256" "^0.4.1" "@noble/hashes" "^1.3.0" +"@chainsafe/persistent-merkle-tree@file:../ssz/packages/persistent-merkle-tree": + version "0.8.0" + dependencies: + "@chainsafe/as-sha256" "0.5.0" + "@chainsafe/hashtree" "1.0.1" + "@noble/hashes" "^1.3.0" + "@chainsafe/persistent-ts@^0.19.1": version "0.19.1" resolved "https://registry.npmjs.org/@chainsafe/persistent-ts/-/persistent-ts-0.19.1.tgz" @@ -649,10 +654,8 @@ "@chainsafe/as-sha256" "^0.4.1" "@chainsafe/persistent-merkle-tree" "^0.6.1" -"@chainsafe/ssz@^0.17.1": +"@chainsafe/ssz@file:../ssz/packages/ssz": version "0.17.1" - resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.17.1.tgz#7986afbcad5e6971006d596fdb7dfa34bc195131" - integrity sha512-1ay46QqYcVTBvUnDXTPTi5WTiENu7tIxpZGMDpUWps1/nYBmh/We/UoCF/jO+o/fkcDD3p8xQPlHbcCfy+jyjA== dependencies: "@chainsafe/as-sha256" "0.5.0" "@chainsafe/persistent-merkle-tree" "0.8.0" From 327f710e2b4ca9c1de6adaac2e46220da442efa6 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 16 Oct 2024 16:20:49 +0700 Subject: [PATCH 02/15] fix: merge issue, add postState to state cache --- packages/beacon-node/src/chain/regen/queued.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/beacon-node/src/chain/regen/queued.ts b/packages/beacon-node/src/chain/regen/queued.ts index 3ce082b1b130..ec8a330eec19 100644 --- a/packages/beacon-node/src/chain/regen/queued.ts +++ b/packages/beacon-node/src/chain/regen/queued.ts @@ -159,6 +159,7 @@ export class QueuedStateRegenerator implements IStateRegenerator { blockRootHex: RootHex, postState: CachedBeaconStateAllForks ): Promise | null> { + this.blockStateCache.add(postState); let prunedStates: Map | null = null; try { prunedStates = await this.checkpointStateCache.processState(blockRootHex, postState); From ea1dc3cfb9dbef2fc641280829b7060ff2dcb963 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 16 Oct 2024 17:09:20 +0700 Subject: [PATCH 03/15] fix: return pruned states from PersistentCheckpointsCache --- .../beacon-node/src/chain/archiver/index.ts | 2 +- packages/beacon-node/src/chain/chain.ts | 4 +-- packages/beacon-node/src/chain/interface.ts | 2 +- .../beacon-node/src/chain/regen/queued.ts | 4 +-- .../stateCache/inMemoryCheckpointsCache.ts | 2 +- .../stateCache/persistentCheckpointsCache.ts | 33 ++++++++++++++----- .../beacon-node/src/chain/stateCache/types.ts | 2 +- 7 files changed, 33 insertions(+), 16 deletions(-) diff --git a/packages/beacon-node/src/chain/archiver/index.ts b/packages/beacon-node/src/chain/archiver/index.ts index 36042c4b805e..e12a6157e8ed 100644 --- a/packages/beacon-node/src/chain/archiver/index.ts +++ b/packages/beacon-node/src/chain/archiver/index.ts @@ -109,7 +109,7 @@ export class Archiver { // should be after ArchiveBlocksTask to handle restart cleanly await this.statesArchiver.maybeArchiveState(finalized, this.metrics); - this.chain.pruneOnFinalized(finalizedEpoch); + await this.chain.pruneOnFinalized(finalizedEpoch); // tasks rely on extended fork choice const prunedBlocks = this.chain.forkChoice.prune(finalized.rootHex); diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 573cc94a7309..6553c48f11e5 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -878,8 +878,8 @@ export class BeaconChain implements IBeaconChain { } } - pruneOnFinalized(finalizedEpoch: Epoch): void { - const prunedStates = this.regen.pruneOnFinalized(finalizedEpoch); + async pruneOnFinalized(finalizedEpoch: Epoch): Promise { + const prunedStates = await this.regen.pruneOnFinalized(finalizedEpoch); if (prunedStates) { // cp states on the same epoch shares the same balances seed tree so only need one of them for (const states of prunedStates.values()) { diff --git a/packages/beacon-node/src/chain/interface.ts b/packages/beacon-node/src/chain/interface.ts index ab0f9a5da8fb..16050a9becc2 100644 --- a/packages/beacon-node/src/chain/interface.ts +++ b/packages/beacon-node/src/chain/interface.ts @@ -243,7 +243,7 @@ export interface IBeaconChain { validatorIds?: (ValidatorIndex | string)[] ): Promise; - pruneOnFinalized(finalizedEpoch: Epoch): void; + pruneOnFinalized(finalizedEpoch: Epoch): Promise; } export type SSZObjectType = diff --git a/packages/beacon-node/src/chain/regen/queued.ts b/packages/beacon-node/src/chain/regen/queued.ts index ec8a330eec19..5ae76c2d41a3 100644 --- a/packages/beacon-node/src/chain/regen/queued.ts +++ b/packages/beacon-node/src/chain/regen/queued.ts @@ -148,8 +148,8 @@ export class QueuedStateRegenerator implements IStateRegenerator { this.blockStateCache.prune(headStateRoot); } - pruneOnFinalized(finalizedEpoch: number): Map | null { - const prunedStates = this.checkpointStateCache.pruneFinalized(finalizedEpoch); + async pruneOnFinalized(finalizedEpoch: number): Promise | null> { + const prunedStates = await this.checkpointStateCache.pruneFinalized(finalizedEpoch); this.blockStateCache.deleteAllBeforeEpoch(finalizedEpoch); return prunedStates; diff --git a/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts index bb1ff18e25de..6c8074539d63 100644 --- a/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts @@ -122,7 +122,7 @@ export class InMemoryCheckpointStateCache implements CheckpointStateCache { return previousHits; } - pruneFinalized(finalizedEpoch: Epoch): Map { + async pruneFinalized(finalizedEpoch: Epoch): Promise> { const result = new Map(); for (const epoch of this.epochIndex.keys()) { diff --git a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts index 291f2896dd40..c69d636b59b1 100644 --- a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts @@ -410,17 +410,23 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { /** * Prune all checkpoint states before the provided finalized epoch. */ - pruneFinalized(finalizedEpoch: Epoch): Map | null { + async pruneFinalized(finalizedEpoch: Epoch): Promise | null> { + const result = new Map(); + for (const epoch of this.epochIndex.keys()) { if (epoch < finalizedEpoch) { - this.deleteAllEpochItems(epoch).catch((e) => - this.logger.debug("Error delete all epoch items", {epoch, finalizedEpoch}, e as Error) - ); + try { + const prunedStates = await this.deleteAllEpochItems(epoch); + result.set(epoch, prunedStates); + } catch(e) { + this.logger.debug("Error prune finalized epoch", {epoch, finalizedEpoch}, e as Error); + } } } - // not likely to return anything in-memory state because we may persist states even before they are finalized - return null; + // we may persist states even before they are finalized + // in that case this return null + return result; } /** @@ -472,6 +478,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { * - 2 then we'll persist {root: b2, epoch n-2} checkpoint state to disk, there are also 2 checkpoint states in memory at epoch n, same to the above (maxEpochsInMemory=1) * * As of Mar 2024, it takes <=350ms to persist a holesky state on fast server + * This function returns a map of pruned states for each epoch */ async processState( blockRootHex: RootHex, @@ -643,6 +650,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { * - PRCS is the checkpoint state that could be justified/finalized later based on the view of the state * - unknown root checkpoint state is persisted to handle the reorg back to that branch later * + * This returns pruned states in the epoch * Performance note: * - In normal condition, we persist 1 checkpoint state per epoch. * - In reorged condition, we may persist multiple (most likely 2) checkpoint states per epoch. @@ -760,15 +768,22 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { /** * Delete all items of an epoch from disk and memory */ - private async deleteAllEpochItems(epoch: Epoch): Promise { + private async deleteAllEpochItems(epoch: Epoch): Promise { let persistCount = 0; const rootHexes = this.epochIndex.get(epoch) || []; + const prunedStates: CachedBeaconStateAllForks[] = []; for (const rootHex of rootHexes) { const key = toCacheKey({rootHex, epoch}); const cacheItem = this.cache.get(key); if (cacheItem) { - const persistedKey = isPersistedCacheItem(cacheItem) ? cacheItem.value : cacheItem.persistedKey; + let persistedKey: Uint8Array | undefined = undefined; + if (isPersistedCacheItem(cacheItem)) { + persistedKey = cacheItem.value; + } else { + persistedKey = cacheItem.persistedKey; + prunedStates.push(cacheItem.state); + } if (persistedKey) { await this.datastore.remove(persistedKey); persistCount++; @@ -783,6 +798,8 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { persistCount, rootHexes: Array.from(rootHexes).join(","), }); + + return prunedStates; } /** diff --git a/packages/beacon-node/src/chain/stateCache/types.ts b/packages/beacon-node/src/chain/stateCache/types.ts index cd93c34bde89..3d2d153d4610 100644 --- a/packages/beacon-node/src/chain/stateCache/types.ts +++ b/packages/beacon-node/src/chain/stateCache/types.ts @@ -72,7 +72,7 @@ export interface CheckpointStateCache { ): Promise; updatePreComputedCheckpoint(rootHex: RootHex, epoch: Epoch): number | null; prune(finalizedEpoch: Epoch, justifiedEpoch: Epoch): void; - pruneFinalized(finalizedEpoch: Epoch): Map | null; + pruneFinalized(finalizedEpoch: Epoch): Promise | null>; processState( blockRootHex: RootHex, state: CachedBeaconStateAllForks From 519bfa3c3ab1726e3482256643e12f49a520ac57 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 17 Oct 2024 08:08:04 +0700 Subject: [PATCH 04/15] fix: lint --- .../src/chain/blocks/importBlock.ts | 21 +++++++++++-------- .../stateCache/persistentCheckpointsCache.ts | 11 +--------- .../src/block/processEth1Data.ts | 1 + packages/state-transition/src/util/balance.ts | 2 +- packages/types/src/phase0/listValidator.ts | 1 + packages/types/src/phase0/validator.ts | 1 + .../types/src/phase0/viewDU/listValidator.ts | 8 +++++-- 7 files changed, 23 insertions(+), 22 deletions(-) diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts index 4fea6b38042e..38c54f63f69a 100644 --- a/packages/beacon-node/src/chain/blocks/importBlock.ts +++ b/packages/beacon-node/src/chain/blocks/importBlock.ts @@ -96,16 +96,19 @@ export async function importBlock( // This adds the state necessary to process the next block // Some block event handlers require state being in state cache so need to do this before emitting EventType.block - this.regen.processState(blockRootHex, postState).then((prunedStates) => { - if (prunedStates) { - for (const states of prunedStates.values()) { - // cp states on the same epoch shares the same balances seed tree so only need one of them - this.balancesTreeCache.processUnusedState(states[0]); + this.regen + .processState(blockRootHex, postState) + .then((prunedStates) => { + if (prunedStates) { + for (const states of prunedStates.values()) { + // cp states on the same epoch shares the same balances seed tree so only need one of them + this.balancesTreeCache.processUnusedState(states[0]); + } } - } - }).catch((e) => { - this.logger.error("Regen error to process state for block", {slot: blockSlot, root: blockRootHex}, e as Error); - }); + }) + .catch((e) => { + this.logger.error("Regen error to process state for block", {slot: blockSlot, root: blockRootHex}, e as Error); + }); this.metrics?.importBlock.bySource.inc({source}); this.logger.verbose("Added block to forkchoice and state cache", {slot: blockSlot, root: blockRootHex}); diff --git a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts index c69d636b59b1..4d3e50d78d7f 100644 --- a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts @@ -418,7 +418,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { try { const prunedStates = await this.deleteAllEpochItems(epoch); result.set(epoch, prunedStates); - } catch(e) { + } catch (e) { this.logger.debug("Error prune finalized epoch", {epoch, finalizedEpoch}, e as Error); } } @@ -484,7 +484,6 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { blockRootHex: RootHex, state: CachedBeaconStateAllForks ): Promise | null> { - let persistCount = 0; // it's important to sort the epochs in ascending order, in case of big reorg we always want to keep the most recent checkpoint states const sortedEpochs = Array.from(this.epochIndex.keys()).sort((a, b) => a - b); if (sortedEpochs.length <= this.maxEpochsInMemory) { @@ -516,14 +515,6 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { result.set(lowestEpoch, prunedStates); } - if (persistCount > 0) { - this.logger.verbose("Persisted checkpoint states", { - slot: blockSlot, - root: blockRootHex, - persistCount, - persistEpochs: persistEpochs.length, - }); - } return result; } diff --git a/packages/state-transition/src/block/processEth1Data.ts b/packages/state-transition/src/block/processEth1Data.ts index 8e6ded467821..3846eaaa5f55 100644 --- a/packages/state-transition/src/block/processEth1Data.ts +++ b/packages/state-transition/src/block/processEth1Data.ts @@ -48,6 +48,7 @@ export function becomesNewEth1Data( // Then isEqualEth1DataView compares cached roots (HashObject as of Jan 2022) which is much cheaper // than doing structural equality, which requires tree -> value conversions let sameVotesCount = 0; + // biome-ignore lint/complexity/noForEach: ssz api state.eth1DataVotes.forEach((eth1DataVote) => { if (isEqualEth1DataView(eth1DataVote, newEth1Data)) { sameVotesCount++; diff --git a/packages/state-transition/src/util/balance.ts b/packages/state-transition/src/util/balance.ts index a1b086cbd591..ce5c2a46b4da 100644 --- a/packages/state-transition/src/util/balance.ts +++ b/packages/state-transition/src/util/balance.ts @@ -2,7 +2,7 @@ import {EFFECTIVE_BALANCE_INCREMENT} from "@lodestar/params"; import {Gwei, ValidatorIndex} from "@lodestar/types"; import {bigIntMax} from "@lodestar/utils"; import {EffectiveBalanceIncrements} from "../cache/effectiveBalanceIncrements.js"; -import {BeaconStateAllForks} from ".."; +import {BeaconStateAllForks} from "../index.js"; import {CachedBeaconStateAllForks} from "../types.js"; /** diff --git a/packages/types/src/phase0/listValidator.ts b/packages/types/src/phase0/listValidator.ts index a2228d5ec410..3a19931d1f5b 100644 --- a/packages/types/src/phase0/listValidator.ts +++ b/packages/types/src/phase0/listValidator.ts @@ -9,6 +9,7 @@ export class ListValidatorType extends ListCompositeType { + // biome-ignore lint/suspicious/noExplicitAny: ssz api return new ListValidatorTreeViewDU(this, node, cache as any); } } diff --git a/packages/types/src/phase0/validator.ts b/packages/types/src/phase0/validator.ts index 2861f52d2deb..9a64d9569f92 100644 --- a/packages/types/src/phase0/validator.ts +++ b/packages/types/src/phase0/validator.ts @@ -1,6 +1,7 @@ import {ByteViews, ContainerNodeStructType, ValueOfFields} from "@chainsafe/ssz"; import * as primitiveSsz from "../primitive/sszTypes.js"; +// biome-ignore lint/suspicious/noShadowRestrictedNames: We explicitly want `Boolean` name to be imported const {Boolean, Bytes32, UintNum64, BLSPubkey, EpochInf} = primitiveSsz; // this is to work with uint32, see https://github.com/ChainSafe/ssz/blob/ssz-v0.15.1/packages/ssz/src/type/uint.ts diff --git a/packages/types/src/phase0/viewDU/listValidator.ts b/packages/types/src/phase0/viewDU/listValidator.ts index 05aeeebd75a5..adec88a94b10 100644 --- a/packages/types/src/phase0/viewDU/listValidator.ts +++ b/packages/types/src/phase0/viewDU/listValidator.ts @@ -60,7 +60,8 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU>): void { +function doBatchHashTreeRootValidators( + indices: ValidatorIndex[], + validators: Map> +): void { const endBatch = indices.length - (indices.length % PARALLEL_FACTOR); // commit every 16 validators in batch From c1631c736b9390169dc781626107f3735234b3fb Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 17 Oct 2024 09:07:06 +0700 Subject: [PATCH 05/15] fix: persistentCheckpointsCache.test.ts unit test --- .../persistentCheckpointsCache.test.ts | 114 ++++++++++-------- 1 file changed, 63 insertions(+), 51 deletions(-) diff --git a/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts b/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts index f98b180fa983..132e36e4eed4 100644 --- a/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts +++ b/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts @@ -1,8 +1,8 @@ import {describe, it, expect, beforeAll, beforeEach} from "vitest"; import {SLOTS_PER_EPOCH, SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; import {CachedBeaconStateAllForks, computeEpochAtSlot, computeStartSlotAtEpoch} from "@lodestar/state-transition"; -import {RootHex, phase0} from "@lodestar/types"; -import {mapValues, toHexString} from "@lodestar/utils"; +import {Epoch, RootHex, phase0} from "@lodestar/types"; +import {mapValues, toHex, toRootHex} from "@lodestar/utils"; import {PersistentCheckpointStateCache} from "../../../../src/chain/stateCache/persistentCheckpointsCache.js"; import {checkpointToDatastoreKey} from "../../../../src/chain/stateCache/datastore/index.js"; import {generateCachedState} from "../../../utils/state.js"; @@ -45,7 +45,7 @@ describe("PersistentCheckpointStateCache", () => { cp1 = {epoch: 21, root: root1}; cp2 = {epoch: 22, root: root2}; [cp0aHex, cp0bHex, cp1Hex, cp2Hex] = [cp0a, cp0b, cp1, cp2].map((cp) => toCheckpointHex(cp)); - persistent0bKey = toHexString(checkpointToDatastoreKey(cp0b)); + persistent0bKey = toHex(checkpointToDatastoreKey(cp0b)); const allStates = [cp0a, cp0b, cp1, cp2] .map((cp) => generateCachedState({slot: cp.epoch * SLOTS_PER_EPOCH})) .map((state, i) => { @@ -117,7 +117,7 @@ describe("PersistentCheckpointStateCache", () => { it("getOrReloadLatest", async () => { cache.add(cp2, states["cp2"]); - expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); // cp0b is persisted expect(fileApisBuffer.size).toEqual(1); @@ -140,7 +140,7 @@ describe("PersistentCheckpointStateCache", () => { expect(((await cache.getStateOrBytes(cp0bHex)) as CachedBeaconStateAllForks).hashTreeRoot()).toEqual( states["cp0b"].hashTreeRoot() ); - expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); // cp0 is persisted expect(fileApisBuffer.size).toEqual(1); expect(Array.from(fileApisBuffer.keys())).toEqual([persistent0bKey]); @@ -150,7 +150,7 @@ describe("PersistentCheckpointStateCache", () => { // cp2 is in memory expect(cache.get(cp2Hex)).not.toBeNull(); // finalize epoch cp2 - cache.pruneFinalized(cp2.epoch); + await cache.pruneFinalized(cp2.epoch); expect(fileApisBuffer.size).toEqual(0); expect(cache.get(cp1Hex)).toBeNull(); expect(cache.get(cp2Hex)).not.toBeNull(); @@ -184,7 +184,7 @@ describe("PersistentCheckpointStateCache", () => { // 0a it("single state at lowest memory epoch", async () => { cache.add(cp2, states["cp2"]); - expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); expect(cache.findSeedStateToReload(cp0aHex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(cache.findSeedStateToReload(cp0bHex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); }); @@ -200,7 +200,7 @@ describe("PersistentCheckpointStateCache", () => { // cp1a={0a, 21} {0a, 22}=cp2a it("multiple states at lowest memory epoch", async () => { cache.add(cp2, states["cp2"]); - expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); const cp1a = {epoch: 21, root: root0a}; const cp1aState = states["cp0a"].clone(); @@ -220,7 +220,7 @@ describe("PersistentCheckpointStateCache", () => { const state3 = cp2aState.clone(); state3.slot = 22 * SLOTS_PER_EPOCH + 3; state3.commit(); - await cache.processState(toHexString(root3), state3); + expect(countPrunedStates(await cache.processState(toRootHex(root3), state3))).toEqual(0); // state of {0a, 21} is choosen because it was built from cp0a expect(cache.findSeedStateToReload(cp0aHex)?.hashTreeRoot()).toEqual(cp1aState.hashTreeRoot()); @@ -228,7 +228,7 @@ describe("PersistentCheckpointStateCache", () => { expect(cache.findSeedStateToReload(cp0bHex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); const randomRoot = Buffer.alloc(32, 101); // for other random root it'll pick the first state of epoch 21 which is states["cp1"] - expect(cache.findSeedStateToReload({epoch: 20, rootHex: toHexString(randomRoot)})?.hashTreeRoot()).toEqual( + expect(cache.findSeedStateToReload({epoch: 20, rootHex: toRootHex(randomRoot)})?.hashTreeRoot()).toEqual( states["cp1"].hashTreeRoot() ); }); @@ -262,7 +262,7 @@ describe("PersistentCheckpointStateCache", () => { it("no reorg", async () => { expect(fileApisBuffer.size).toEqual(0); cache.add(cp2, states["cp2"]); - expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); expect(cache.get(cp2Hex)?.hashTreeRoot()).toEqual(states["cp2"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -271,7 +271,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; const root3 = Buffer.alloc(32, 100); // process state of root3 - await cache.processState(toHexString(root3), blockStateRoot3); + await cache.processState(toRootHex(root3), blockStateRoot3); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // epoch 22 has 1 checkpoint state @@ -297,7 +297,7 @@ describe("PersistentCheckpointStateCache", () => { // mostly the same to the above test expect(fileApisBuffer.size).toEqual(0); cache.add(cp2, states["cp2"]); - expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); expect(cache.get(cp2Hex)?.hashTreeRoot()).toEqual(states["cp2"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -306,14 +306,14 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; const root3 = Buffer.alloc(32, 100); // process state of root3 - await cache.processState(toHexString(root3), blockStateRoot3); + await cache.processState(toRootHex(root3), blockStateRoot3); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); const blockStateRoot4 = states["cp2"].clone(); blockStateRoot4.slot = 22 * SLOTS_PER_EPOCH + 4; const root4 = Buffer.alloc(32, 101); // process state of root4 - await cache.processState(toHexString(root4), blockStateRoot4); + await cache.processState(toRootHex(root4), blockStateRoot4); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // epoch 22 has 1 checkpoint state @@ -341,7 +341,7 @@ describe("PersistentCheckpointStateCache", () => { it("reorg 1 epoch", async () => { // process root2 state cache.add(cp2, states["cp2"]); - expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // regen generates cp2a @@ -360,7 +360,7 @@ describe("PersistentCheckpointStateCache", () => { const root3 = Buffer.alloc(32, 101); // process state of root3 - await cache.processState(toHexString(root3), blockStateRoot3); + await cache.processState(toRootHex(root3), blockStateRoot3); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // epoch 22 has 2 checkpoint states expect(cache.get(cp2Hex)).not.toBeNull(); @@ -385,7 +385,7 @@ describe("PersistentCheckpointStateCache", () => { it("reorg 2 epochs", async () => { // process root2 state cache.add(cp2, states["cp2"]); - expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // reload cp0b from disk @@ -413,7 +413,7 @@ describe("PersistentCheckpointStateCache", () => { const root3 = Buffer.alloc(32, 101); // process state of root3 - await cache.processState(toHexString(root3), blockStateRoot3); + await cache.processState(toRootHex(root3), blockStateRoot3); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // epoch 21 and 22 have 2 checkpoint states expect(cache.get(cp1Hex)).not.toBeNull(); @@ -438,7 +438,7 @@ describe("PersistentCheckpointStateCache", () => { it("reorg 3 epochs, persist cp 0a", async () => { // process root2 state cache.add(cp2, states["cp2"]); - expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // cp0a was pruned from memory and not in disc expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); @@ -469,7 +469,7 @@ describe("PersistentCheckpointStateCache", () => { const root3 = Buffer.alloc(32, 100); // process state of root3 - expect(await cache.processState(toHexString(root3), blockStateRoot3)).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(root3), blockStateRoot3))).toEqual(1); await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); // epoch 21 and 22 have 2 checkpoint states expect(cache.get(cp1Hex)).not.toBeNull(); @@ -494,7 +494,7 @@ describe("PersistentCheckpointStateCache", () => { it("reorg 3 epochs, prune but no persist", async () => { // process root2 state cache.add(cp2, states["cp2"]); - expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // cp0a was pruned from memory and not in disc expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); @@ -524,7 +524,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; const root3 = Buffer.alloc(32, 100); // process state of root3, nothing is persisted - expect(await cache.processState(toHexString(root3), blockStateRoot3)).toEqual(0); + expect(countPrunedStates(await cache.processState(toRootHex(root3), blockStateRoot3))).toEqual(1); // but state of cp0b is pruned from memory expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -567,7 +567,7 @@ describe("PersistentCheckpointStateCache", () => { it("no reorg", async () => { expect(fileApisBuffer.size).toEqual(0); cache.add(cp1, states["cp1"]); - expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(2); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -576,7 +576,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; const root2 = Buffer.alloc(32, 100); // process state of root2 - await cache.processState(toHexString(root2), blockStateRoot2); + await cache.processState(toRootHex(root2), blockStateRoot2); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); @@ -602,7 +602,7 @@ describe("PersistentCheckpointStateCache", () => { // almost the same to "no reorg" test expect(fileApisBuffer.size).toEqual(0); cache.add(cp1, states["cp1"]); - expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(2); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -611,14 +611,14 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; const root2 = Buffer.alloc(32, 100); // process state of root2 - await cache.processState(toHexString(root2), blockStateRoot2); + await cache.processState(toRootHex(root2), blockStateRoot2); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); const blockStateRoot3 = states["cp1"].clone(); blockStateRoot3.slot = 21 * SLOTS_PER_EPOCH + 4; const root3 = Buffer.alloc(32, 101); // process state of root3 - await cache.processState(toHexString(root3), blockStateRoot3); + await cache.processState(toRootHex(root3), blockStateRoot3); // epoch 21 has 1 checkpoint state expect(cache.get(cp1Hex)).not.toBeNull(); @@ -646,13 +646,13 @@ describe("PersistentCheckpointStateCache", () => { const state1a = states["cp0b"].clone(); state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH - 1; state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); - expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(0); + expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(0); expect(fileApisBuffer.size).toEqual(0); await assertPersistedCheckpointState([], []); // cp1 cache.add(cp1, states["cp1"]); - expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(2); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -667,7 +667,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; const root2 = Buffer.alloc(32, 100); // process state of root2 - expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(0); + expect(countPrunedStates(await cache.processState(toRootHex(root2), blockStateRoot2))).toEqual(0); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); // keep these 2 cp states at epoch 21 @@ -686,7 +686,7 @@ describe("PersistentCheckpointStateCache", () => { expect(fileApisBuffer.size).toEqual(0); // cp1 cache.add(cp1, states["cp1"]); - expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(2); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -706,7 +706,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; const root2 = Buffer.alloc(32, 100); // process state of root2, nothing is persisted - expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(0); + expect(countPrunedStates(await cache.processState(toRootHex(root2), blockStateRoot2))).toEqual(1); // but cp0b in-memory state is pruned expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); @@ -731,7 +731,7 @@ describe("PersistentCheckpointStateCache", () => { const state1a = states["cp0a"].clone(); state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH - 1; state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); - expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(0); + expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(0); expect(fileApisBuffer.size).toEqual(0); // at epoch 20, there should be 2 cps in memory expect(cache.get(cp0aHex)).not.toBeNull(); @@ -740,7 +740,7 @@ describe("PersistentCheckpointStateCache", () => { // cp1 cache.add(cp1, states["cp1"]); - expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(2); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -762,7 +762,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; const root2 = Buffer.alloc(32, 100); // process state of root2, persist cp0a - expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(root2), blockStateRoot2))).toEqual(1); await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); // keep these 2 cp states at epoch 21 @@ -782,7 +782,7 @@ describe("PersistentCheckpointStateCache", () => { it("reorg 2 epochs", async () => { // cp1 cache.add(cp1, states["cp1"]); - expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(2); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -804,7 +804,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; const root2 = Buffer.alloc(32, 100); // process state of root2, persist cp0a - expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(root2), blockStateRoot2))).toEqual(1); await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); // keep these 2 cp states at epoch 21 @@ -837,7 +837,7 @@ describe("PersistentCheckpointStateCache", () => { // | // 0a it("no reorg", async () => { - expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(root0b), states["cp0b"]))).toEqual(2); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); @@ -846,7 +846,7 @@ describe("PersistentCheckpointStateCache", () => { const state1a = states["cp0b"].clone(); state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH + 3; state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); - expect(await cache.processState(toHexString(root1a), state1a)).toEqual(0); + expect(countPrunedStates(await cache.processState(toRootHex(root1a), state1a))).toEqual(0); // nothing change expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); @@ -861,7 +861,7 @@ describe("PersistentCheckpointStateCache", () => { // | \ | // 0a \------root1b it("reorg in same epoch", async () => { - expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(root0b), states["cp0b"]))).toEqual(2); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); @@ -870,7 +870,7 @@ describe("PersistentCheckpointStateCache", () => { const state1a = states["cp0b"].clone(); state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH + 3; state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); - expect(await cache.processState(toHexString(root1a), state1a)).toEqual(0); + expect(countPrunedStates(await cache.processState(toRootHex(root1a), state1a))).toEqual(0); // nothing change expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); @@ -886,7 +886,7 @@ describe("PersistentCheckpointStateCache", () => { state1b.slot = state1a.slot + 1; state1b.blockRoots.set(state1b.slot % SLOTS_PER_HISTORICAL_ROOT, root1b); // but no need to persist cp1b - expect(await cache.processState(toHexString(root1b), state1b)).toEqual(0); + expect(countPrunedStates(await cache.processState(toRootHex(root1b), state1b))).toEqual(1); // although states["cp0b"] is pruned expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); @@ -934,7 +934,7 @@ describe("PersistentCheckpointStateCache", () => { cache.add(cp0a, states["cp0a"]); // need to persist 2 checkpoint states - expect(await cache.processState(toHexString(root1b), state1b)).toEqual(2); + expect(countPrunedStates(await cache.processState(toRootHex(root1b), state1b))).toEqual(2); // both are persisited expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); expect(await cache.getStateOrBytes(cp0aHex)).toEqual(stateBytes["cp0a"]); @@ -949,7 +949,7 @@ describe("PersistentCheckpointStateCache", () => { // | | // 0a---------root1b it("reorg 1 epoch, processState twice", async () => { - expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(root0b), states["cp0b"]))).toEqual(2); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); @@ -958,7 +958,7 @@ describe("PersistentCheckpointStateCache", () => { const state1a = states["cp0b"].clone(); state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH + 3; state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); - expect(await cache.processState(toHexString(root1a), state1a)).toEqual(0); + expect(countPrunedStates(await cache.processState(toRootHex(root1a), state1a))).toEqual(0); // nothing change expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); @@ -970,7 +970,7 @@ describe("PersistentCheckpointStateCache", () => { state1b.blockRoots.set(state1b.slot % SLOTS_PER_HISTORICAL_ROOT, root1b); // regen should reload cp0a from disk cache.add(cp0a, states["cp0a"]); - expect(await cache.processState(toHexString(root1b), state1b)).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(root1b), state1b))).toEqual(1); await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); // both cp0a and cp0b are persisted @@ -988,13 +988,13 @@ describe("PersistentCheckpointStateCache", () => { // ^ // {0a, 21}=cp1a it("reorg 2 epochs", async () => { - expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(root0b), states["cp0b"]))).toEqual(2); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); cache.add(cp1, states["cp1"]); - expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(1); await assertPersistedCheckpointState([cp0b, cp1], [stateBytes["cp0b"], stateBytes["cp1"]]); // regen should populate cp0a and cp1a checkpoint states @@ -1010,7 +1010,7 @@ describe("PersistentCheckpointStateCache", () => { const state2 = cp1aState.clone(); state2.slot = 21 * SLOTS_PER_EPOCH + 3; state2.blockRoots.set(state2.slot % SLOTS_PER_HISTORICAL_ROOT, root2); - expect(await cache.processState(toHexString(root2), state2)).toEqual(2); + expect(countPrunedStates(await cache.processState(toRootHex(root2), state2))).toEqual(2); // expect 4 cp states are persisted await assertPersistedCheckpointState( [cp0b, cp1, cp0a, cp1a], @@ -1021,7 +1021,7 @@ describe("PersistentCheckpointStateCache", () => { }); async function assertPersistedCheckpointState(cps: phase0.Checkpoint[], stateBytesArr: Uint8Array[]): Promise { - const persistedKeys = cps.map((cp) => toHexString(checkpointToDatastoreKey(cp))); + const persistedKeys = cps.map((cp) => toHex(checkpointToDatastoreKey(cp))); expect(Array.from(fileApisBuffer.keys())).toStrictEqual(persistedKeys); for (const [i, persistedKey] of persistedKeys.entries()) { expect(fileApisBuffer.get(persistedKey)).toStrictEqual(stateBytesArr[i]); @@ -1034,3 +1034,15 @@ describe("PersistentCheckpointStateCache", () => { } } }); + +function countPrunedStates(prunedStates: Map | null): number { + if (!prunedStates) { + return 0; + } + + let count = 0; + for (const states of prunedStates.values()) { + count += states.length; + } + return count; +} From cc5cfe1ea6661cbb27ded2e40376ebb3ebd2b624 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 17 Oct 2024 11:41:53 +0700 Subject: [PATCH 06/15] fix: only reuse balances tree in good network condition --- packages/beacon-node/src/chain/blocks/importBlock.ts | 4 +++- packages/beacon-node/src/chain/chain.ts | 8 +++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts index 38c54f63f69a..1c9f4e12a495 100644 --- a/packages/beacon-node/src/chain/blocks/importBlock.ts +++ b/packages/beacon-node/src/chain/blocks/importBlock.ts @@ -99,7 +99,9 @@ export async function importBlock( this.regen .processState(blockRootHex, postState) .then((prunedStates) => { - if (prunedStates) { + // if the node is syncing, we don't want to reuse balances tree + const isCurrentSlot = this.clock.currentSlot === blockSlot; + if (isCurrentSlot && prunedStates) { for (const states of prunedStates.values()) { // cp states on the same epoch shares the same balances seed tree so only need one of them this.balancesTreeCache.processUnusedState(states[0]); diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 6553c48f11e5..87ed50b786d4 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -110,6 +110,11 @@ import {BalancesTreeCache} from "./balancesTreeCache.js"; */ const DEFAULT_MAX_CACHED_PRODUCED_ROOTS = 4; +/** + * The distance between the finalized epoch and the current epoch to consider safe. + */ +const SAFE_FINALIZED_EPOCH_TO_CURRENT_EPOCH_DIFF = 3; + export class BeaconChain implements IBeaconChain { readonly genesisTime: UintNum64; readonly genesisValidatorsRoot: Root; @@ -880,7 +885,8 @@ export class BeaconChain implements IBeaconChain { async pruneOnFinalized(finalizedEpoch: Epoch): Promise { const prunedStates = await this.regen.pruneOnFinalized(finalizedEpoch); - if (prunedStates) { + // if the node is syncing or unfinality time, we don't want to reuse balances tree + if (this.clock.currentEpoch - finalizedEpoch <= SAFE_FINALIZED_EPOCH_TO_CURRENT_EPOCH_DIFF && prunedStates) { // cp states on the same epoch shares the same balances seed tree so only need one of them for (const states of prunedStates.values()) { this.balancesTreeCache.processUnusedState(states[0]); From 550c7007bb3bcf9ea20d8c3c13a79fcb56844d46 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 17 Oct 2024 12:22:29 +0700 Subject: [PATCH 07/15] fix: balancesTreeCache to store one balances tree per epoch --- .../src/chain/balancesTreeCache.ts | 27 ++++++++++++++----- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/packages/beacon-node/src/chain/balancesTreeCache.ts b/packages/beacon-node/src/chain/balancesTreeCache.ts index 462ae860809e..6658ce66cf6a 100644 --- a/packages/beacon-node/src/chain/balancesTreeCache.ts +++ b/packages/beacon-node/src/chain/balancesTreeCache.ts @@ -1,16 +1,21 @@ import {ListBasicTreeViewDU, UintNumberType} from "@chainsafe/ssz"; import {IBalancesTreeCache, CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {Metrics} from "../metrics/index.js"; +import {Epoch} from "@lodestar/types"; const MAX_ITEMS = 2; +/** + * A cached of unused balances tree + * States in the same epoch share the same balances tree so we only want to cache max once per epoch + */ export class BalancesTreeCache implements IBalancesTreeCache { - private readonly unusedBalancesTrees: ListBasicTreeViewDU[] = []; + private readonly unusedBalancesTrees: Map> = new Map(); constructor(private readonly metrics: Metrics | null = null) { if (metrics) { metrics.balancesTreeCache.size.addCollect(() => { - metrics.balancesTreeCache.size.set(this.unusedBalancesTrees.length); + metrics.balancesTreeCache.size.set(this.unusedBalancesTrees.size); }); } } @@ -19,20 +24,28 @@ export class BalancesTreeCache implements IBalancesTreeCache { if (state === undefined) { return; } + const stateEpoch = state.epochCtx.epoch; + if (this.unusedBalancesTrees.has(stateEpoch)) { + return; + } - this.unusedBalancesTrees.push(state.balances); - while (this.unusedBalancesTrees.length > MAX_ITEMS) { - this.unusedBalancesTrees.shift(); + this.unusedBalancesTrees.set(stateEpoch, state.balances); + while (this.unusedBalancesTrees.size > MAX_ITEMS) { + const firstEpoch = Array.from(this.unusedBalancesTrees.keys())[0]; + this.unusedBalancesTrees.delete(firstEpoch); } } getUnusedBalances(): ListBasicTreeViewDU | undefined { - if (this.unusedBalancesTrees.length === 0) { + if (this.unusedBalancesTrees.size === 0) { this.metrics?.balancesTreeCache.miss.inc(); return undefined; } this.metrics?.balancesTreeCache.hit.inc(); - return this.unusedBalancesTrees.shift(); + const firstEpoch = Array.from(this.unusedBalancesTrees.keys())[0]; + const unusedBalances = this.unusedBalancesTrees.get(firstEpoch); + this.unusedBalancesTrees.delete(firstEpoch); + return unusedBalances; } } From f94ff92dbe054cc6a1f3e20d41a1682dfd1fa2dc Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 17 Oct 2024 13:16:29 +0700 Subject: [PATCH 08/15] chore: track source of cached balances tree --- packages/beacon-node/src/chain/balancesTreeCache.ts | 9 ++++++++- packages/beacon-node/src/chain/blocks/importBlock.ts | 3 ++- packages/beacon-node/src/chain/chain.ts | 4 ++-- packages/beacon-node/src/metrics/metrics/lodestar.ts | 6 ++++++ 4 files changed, 18 insertions(+), 4 deletions(-) diff --git a/packages/beacon-node/src/chain/balancesTreeCache.ts b/packages/beacon-node/src/chain/balancesTreeCache.ts index 6658ce66cf6a..2c70264b15df 100644 --- a/packages/beacon-node/src/chain/balancesTreeCache.ts +++ b/packages/beacon-node/src/chain/balancesTreeCache.ts @@ -5,6 +5,11 @@ import {Epoch} from "@lodestar/types"; const MAX_ITEMS = 2; +export enum BalancesTreeSource { + PRUNE_ON_FINALIZED = "pruned_on_finalized", + IMPORT_BLOCK = "import_block", +}; + /** * A cached of unused balances tree * States in the same epoch share the same balances tree so we only want to cache max once per epoch @@ -20,7 +25,7 @@ export class BalancesTreeCache implements IBalancesTreeCache { } } - processUnusedState(state: CachedBeaconStateAllForks | undefined): void { + processUnusedState(state: CachedBeaconStateAllForks | undefined, source: BalancesTreeSource): void { if (state === undefined) { return; } @@ -29,6 +34,8 @@ export class BalancesTreeCache implements IBalancesTreeCache { return; } + this.metrics?.balancesTreeCache.total.inc({source}); + this.unusedBalancesTrees.set(stateEpoch, state.balances); while (this.unusedBalancesTrees.size > MAX_ITEMS) { const firstEpoch = Array.from(this.unusedBalancesTrees.keys())[0]; diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts index 1c9f4e12a495..324c837b1edb 100644 --- a/packages/beacon-node/src/chain/blocks/importBlock.ts +++ b/packages/beacon-node/src/chain/blocks/importBlock.ts @@ -23,6 +23,7 @@ import {ForkchoiceCaller} from "../forkChoice/index.js"; import {FullyVerifiedBlock, ImportBlockOpts, AttestationImportOpt, BlockInputType} from "./types.js"; import {getCheckpointFromState} from "./utils/checkpoint.js"; import {writeBlockInputToDb} from "./writeBlockInputToDb.js"; +import {BalancesTreeSource} from "../balancesTreeCache.js"; /** * Fork-choice allows to import attestations from current (0) or past (1) epoch. @@ -104,7 +105,7 @@ export async function importBlock( if (isCurrentSlot && prunedStates) { for (const states of prunedStates.values()) { // cp states on the same epoch shares the same balances seed tree so only need one of them - this.balancesTreeCache.processUnusedState(states[0]); + this.balancesTreeCache.processUnusedState(states[0], BalancesTreeSource.IMPORT_BLOCK); } } }) diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 87ed50b786d4..81e0affd6619 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -101,7 +101,7 @@ import {DbCPStateDatastore} from "./stateCache/datastore/db.js"; import {FileCPStateDatastore} from "./stateCache/datastore/file.js"; import {SyncCommitteeRewards, computeSyncCommitteeRewards} from "./rewards/syncCommitteeRewards.js"; import {AttestationsRewards, computeAttestationsRewards} from "./rewards/attestationsRewards.js"; -import {BalancesTreeCache} from "./balancesTreeCache.js"; +import {BalancesTreeCache, BalancesTreeSource} from "./balancesTreeCache.js"; /** * Arbitrary constants, blobs and payloads should be consumed immediately in the same slot @@ -889,7 +889,7 @@ export class BeaconChain implements IBeaconChain { if (this.clock.currentEpoch - finalizedEpoch <= SAFE_FINALIZED_EPOCH_TO_CURRENT_EPOCH_DIFF && prunedStates) { // cp states on the same epoch shares the same balances seed tree so only need one of them for (const states of prunedStates.values()) { - this.balancesTreeCache.processUnusedState(states[0]); + this.balancesTreeCache.processUnusedState(states[0], BalancesTreeSource.PRUNE_ON_FINALIZED); } } } diff --git a/packages/beacon-node/src/metrics/metrics/lodestar.ts b/packages/beacon-node/src/metrics/metrics/lodestar.ts index be9ffd79ef16..2664c33b8f4c 100644 --- a/packages/beacon-node/src/metrics/metrics/lodestar.ts +++ b/packages/beacon-node/src/metrics/metrics/lodestar.ts @@ -19,6 +19,7 @@ import {RegistryMetricCreator} from "../utils/registryMetricCreator.js"; import {OpSource} from "../validatorMonitor.js"; import {CacheItemType} from "../../chain/stateCache/types.js"; import {AllocSource} from "../../util/bufferPool.js"; +import {BalancesTreeSource} from "../../chain/balancesTreeCache.js"; export type LodestarMetrics = ReturnType; @@ -1337,6 +1338,11 @@ export function createLodestarMetrics( name: "lodestar_balances_tree_cache_size", help: "Balances tree cache size", }), + total: register.gauge<{source: BalancesTreeSource}>({ + name: "lodestar_balances_tree_cache_total", + help: "Total number of balances tree cache", + labelNames: ["source"], + }), hit: register.gauge({ name: "lodestar_balances_tree_cache_hit_total", help: "Total number of balances tree cache hits", From bceb911ad34cf91839e25a467b4e99c44a99ef08 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 17 Oct 2024 14:14:07 +0700 Subject: [PATCH 09/15] feat: add reuseBalancesTree option --- packages/beacon-node/src/chain/blocks/importBlock.ts | 2 +- packages/beacon-node/src/chain/chain.ts | 2 +- packages/beacon-node/src/chain/options.ts | 3 +++ packages/cli/src/options/beaconNodeOptions/chain.ts | 10 ++++++++++ .../cli/test/unit/options/beaconNodeOptions.test.ts | 2 ++ 5 files changed, 17 insertions(+), 2 deletions(-) diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts index 324c837b1edb..89b86a88f063 100644 --- a/packages/beacon-node/src/chain/blocks/importBlock.ts +++ b/packages/beacon-node/src/chain/blocks/importBlock.ts @@ -102,7 +102,7 @@ export async function importBlock( .then((prunedStates) => { // if the node is syncing, we don't want to reuse balances tree const isCurrentSlot = this.clock.currentSlot === blockSlot; - if (isCurrentSlot && prunedStates) { + if (this.opts.reuseBalancesTree && isCurrentSlot && prunedStates) { for (const states of prunedStates.values()) { // cp states on the same epoch shares the same balances seed tree so only need one of them this.balancesTreeCache.processUnusedState(states[0], BalancesTreeSource.IMPORT_BLOCK); diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 81e0affd6619..0bd9b09245c7 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -886,7 +886,7 @@ export class BeaconChain implements IBeaconChain { async pruneOnFinalized(finalizedEpoch: Epoch): Promise { const prunedStates = await this.regen.pruneOnFinalized(finalizedEpoch); // if the node is syncing or unfinality time, we don't want to reuse balances tree - if (this.clock.currentEpoch - finalizedEpoch <= SAFE_FINALIZED_EPOCH_TO_CURRENT_EPOCH_DIFF && prunedStates) { + if (this.opts.reuseBalancesTree && this.clock.currentEpoch - finalizedEpoch <= SAFE_FINALIZED_EPOCH_TO_CURRENT_EPOCH_DIFF && prunedStates) { // cp states on the same epoch shares the same balances seed tree so only need one of them for (const states of prunedStates.values()) { this.balancesTreeCache.processUnusedState(states[0], BalancesTreeSource.PRUNE_ON_FINALIZED); diff --git a/packages/beacon-node/src/chain/options.ts b/packages/beacon-node/src/chain/options.ts index bc2b73256272..0668082266e6 100644 --- a/packages/beacon-node/src/chain/options.ts +++ b/packages/beacon-node/src/chain/options.ts @@ -38,6 +38,8 @@ export type IChainOptions = BlockProcessOpts & archiveBlobEpochs?: number; nHistoricalStates?: boolean; nHistoricalStatesFileDataStore?: boolean; + /** Reuse balances tree or not */ + reuseBalancesTree?: boolean; }; export type BlockProcessOpts = { @@ -115,4 +117,5 @@ export const defaultChainOptions: IChainOptions = { nHistoricalStatesFileDataStore: false, maxBlockStates: DEFAULT_MAX_BLOCK_STATES, maxCPStateEpochsInMemory: DEFAULT_MAX_CP_STATE_EPOCHS_IN_MEMORY, + reuseBalancesTree: false, }; diff --git a/packages/cli/src/options/beaconNodeOptions/chain.ts b/packages/cli/src/options/beaconNodeOptions/chain.ts index 78ffd47da8f4..145fc02984da 100644 --- a/packages/cli/src/options/beaconNodeOptions/chain.ts +++ b/packages/cli/src/options/beaconNodeOptions/chain.ts @@ -32,6 +32,7 @@ export type ChainArgs = { "chain.nHistoricalStatesFileDataStore"?: boolean; "chain.maxBlockStates"?: number; "chain.maxCPStateEpochsInMemory"?: number; + "chain.reuseBalancesTree"?: boolean; }; export function parseArgs(args: ChainArgs): IBeaconNodeOptions["chain"] { @@ -66,6 +67,7 @@ export function parseArgs(args: ChainArgs): IBeaconNodeOptions["chain"] { args["chain.nHistoricalStatesFileDataStore"] ?? defaultOptions.chain.nHistoricalStatesFileDataStore, maxBlockStates: args["chain.maxBlockStates"] ?? defaultOptions.chain.maxBlockStates, maxCPStateEpochsInMemory: args["chain.maxCPStateEpochsInMemory"] ?? defaultOptions.chain.maxCPStateEpochsInMemory, + reuseBalancesTree: args["chain.reuseBalancesTree"] ?? defaultOptions.chain.reuseBalancesTree, }; } @@ -273,4 +275,12 @@ Will double processing times. Use only for debugging purposes.", default: defaultOptions.chain.maxCPStateEpochsInMemory, group: "chain", }, + + "chain.reuseBalancesTree": { + hidden: true, + description: "Reuse balances tree or not", + type: "boolean", + default: defaultOptions.chain.reuseBalancesTree, + group: "chain", + }, }; diff --git a/packages/cli/test/unit/options/beaconNodeOptions.test.ts b/packages/cli/test/unit/options/beaconNodeOptions.test.ts index 879b5bfa2fc9..3ae145de58fc 100644 --- a/packages/cli/test/unit/options/beaconNodeOptions.test.ts +++ b/packages/cli/test/unit/options/beaconNodeOptions.test.ts @@ -43,6 +43,7 @@ describe("options / beaconNodeOptions", () => { "chain.nHistoricalStatesFileDataStore": true, "chain.maxBlockStates": 100, "chain.maxCPStateEpochsInMemory": 100, + "chain.reuseBalancesTree": true, emitPayloadAttributes: false, eth1: true, @@ -151,6 +152,7 @@ describe("options / beaconNodeOptions", () => { nHistoricalStatesFileDataStore: true, maxBlockStates: 100, maxCPStateEpochsInMemory: 100, + reuseBalancesTree: true, }, eth1: { enabled: true, From b3a5341a67368632bf9260e532812e2b4500d78d Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 18 Oct 2024 05:47:07 +0700 Subject: [PATCH 10/15] fix: BalancesTreeCache - MAX_ITEMS=3 MIN_STATES_CACHE=2 --- .../src/chain/balancesTreeCache.ts | 23 +++++++++++-------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/packages/beacon-node/src/chain/balancesTreeCache.ts b/packages/beacon-node/src/chain/balancesTreeCache.ts index 2c70264b15df..2bff9926bc0e 100644 --- a/packages/beacon-node/src/chain/balancesTreeCache.ts +++ b/packages/beacon-node/src/chain/balancesTreeCache.ts @@ -3,16 +3,23 @@ import {IBalancesTreeCache, CachedBeaconStateAllForks} from "@lodestar/state-tra import {Metrics} from "../metrics/index.js"; import {Epoch} from "@lodestar/types"; -const MAX_ITEMS = 2; +const MAX_ITEMS = 3; +const MIN_STATES_CACHE = 2; export enum BalancesTreeSource { PRUNE_ON_FINALIZED = "pruned_on_finalized", IMPORT_BLOCK = "import_block", }; + /** - * A cached of unused balances tree - * States in the same epoch share the same balances tree so we only want to cache max once per epoch + * Experimental feature to reuse balances tree. Note that this is dangerous so should be disabled by default (check chain.reuseBalancesTree flag) + * In theory, all data should be immutable, however we never read/write pre-finalized states, we can use their + * balances tree for the next epoch transition. Some more constraints to make this safer: + * - don't do this when node is syncing + * - don't do this when network is not stable + * - enforce the age of balances tree through MIN_STATES_CACHE + * - given MAX_ITEMS = MIN_STATES_CACHE + 1, only 1 balances tree is reused at an epoch */ export class BalancesTreeCache implements IBalancesTreeCache { private readonly unusedBalancesTrees: Map> = new Map(); @@ -30,21 +37,17 @@ export class BalancesTreeCache implements IBalancesTreeCache { return; } const stateEpoch = state.epochCtx.epoch; - if (this.unusedBalancesTrees.has(stateEpoch)) { + // it's safer to reuse old balances tree + if (this.unusedBalancesTrees.has(stateEpoch) || this.unusedBalancesTrees.size >= MAX_ITEMS) { return; } this.metrics?.balancesTreeCache.total.inc({source}); - this.unusedBalancesTrees.set(stateEpoch, state.balances); - while (this.unusedBalancesTrees.size > MAX_ITEMS) { - const firstEpoch = Array.from(this.unusedBalancesTrees.keys())[0]; - this.unusedBalancesTrees.delete(firstEpoch); - } } getUnusedBalances(): ListBasicTreeViewDU | undefined { - if (this.unusedBalancesTrees.size === 0) { + if (this.unusedBalancesTrees.size <= MIN_STATES_CACHE) { this.metrics?.balancesTreeCache.miss.inc(); return undefined; } From a4badea02f42d88bfdc481898e48013e387363cf Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 18 Oct 2024 07:23:43 +0700 Subject: [PATCH 11/15] fix: check clock slot is same to head --- packages/beacon-node/src/chain/chain.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 0bd9b09245c7..fbc0ea531fb3 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -885,8 +885,12 @@ export class BeaconChain implements IBeaconChain { async pruneOnFinalized(finalizedEpoch: Epoch): Promise { const prunedStates = await this.regen.pruneOnFinalized(finalizedEpoch); + const head = this.forkChoice.getHead(); // if the node is syncing or unfinality time, we don't want to reuse balances tree - if (this.opts.reuseBalancesTree && this.clock.currentEpoch - finalizedEpoch <= SAFE_FINALIZED_EPOCH_TO_CURRENT_EPOCH_DIFF && prunedStates) { + if (this.opts.reuseBalancesTree && + this.clock.currentEpoch - finalizedEpoch <= SAFE_FINALIZED_EPOCH_TO_CURRENT_EPOCH_DIFF && + head.slot === this.clock.currentSlot && + prunedStates) { // cp states on the same epoch shares the same balances seed tree so only need one of them for (const states of prunedStates.values()) { this.balancesTreeCache.processUnusedState(states[0], BalancesTreeSource.PRUNE_ON_FINALIZED); From 293417ab0534b31686a1cac8660f5eac725e5fe4 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 18 Oct 2024 09:42:46 +0700 Subject: [PATCH 12/15] fix: remove BalancesTreeCache --- .../beacon-node/src/chain/archiver/index.ts | 2 +- .../src/chain/balancesTreeCache.ts | 61 ---------- .../src/chain/blocks/importBlock.ts | 17 +-- packages/beacon-node/src/chain/chain.ts | 24 ---- packages/beacon-node/src/chain/interface.ts | 2 - packages/beacon-node/src/chain/options.ts | 3 - .../beacon-node/src/chain/regen/queued.ts | 22 +--- .../stateCache/inMemoryCheckpointsCache.ts | 25 +--- .../stateCache/persistentCheckpointsCache.ts | 72 ++++------- .../beacon-node/src/chain/stateCache/types.ts | 7 +- .../src/metrics/metrics/lodestar.ts | 21 ---- .../persistentCheckpointsCache.test.ts | 114 ++++++++---------- .../src/options/beaconNodeOptions/chain.ts | 10 -- .../unit/options/beaconNodeOptions.test.ts | 2 - .../state-transition/src/cache/epochCache.ts | 9 +- .../src/epoch/processRewardsAndPenalties.ts | 2 +- packages/state-transition/src/index.ts | 1 - 17 files changed, 91 insertions(+), 303 deletions(-) delete mode 100644 packages/beacon-node/src/chain/balancesTreeCache.ts diff --git a/packages/beacon-node/src/chain/archiver/index.ts b/packages/beacon-node/src/chain/archiver/index.ts index e12a6157e8ed..45169b2fa802 100644 --- a/packages/beacon-node/src/chain/archiver/index.ts +++ b/packages/beacon-node/src/chain/archiver/index.ts @@ -109,7 +109,7 @@ export class Archiver { // should be after ArchiveBlocksTask to handle restart cleanly await this.statesArchiver.maybeArchiveState(finalized, this.metrics); - await this.chain.pruneOnFinalized(finalizedEpoch); + this.chain.regen.pruneOnFinalized(finalizedEpoch); // tasks rely on extended fork choice const prunedBlocks = this.chain.forkChoice.prune(finalized.rootHex); diff --git a/packages/beacon-node/src/chain/balancesTreeCache.ts b/packages/beacon-node/src/chain/balancesTreeCache.ts deleted file mode 100644 index 2bff9926bc0e..000000000000 --- a/packages/beacon-node/src/chain/balancesTreeCache.ts +++ /dev/null @@ -1,61 +0,0 @@ -import {ListBasicTreeViewDU, UintNumberType} from "@chainsafe/ssz"; -import {IBalancesTreeCache, CachedBeaconStateAllForks} from "@lodestar/state-transition"; -import {Metrics} from "../metrics/index.js"; -import {Epoch} from "@lodestar/types"; - -const MAX_ITEMS = 3; -const MIN_STATES_CACHE = 2; - -export enum BalancesTreeSource { - PRUNE_ON_FINALIZED = "pruned_on_finalized", - IMPORT_BLOCK = "import_block", -}; - - -/** - * Experimental feature to reuse balances tree. Note that this is dangerous so should be disabled by default (check chain.reuseBalancesTree flag) - * In theory, all data should be immutable, however we never read/write pre-finalized states, we can use their - * balances tree for the next epoch transition. Some more constraints to make this safer: - * - don't do this when node is syncing - * - don't do this when network is not stable - * - enforce the age of balances tree through MIN_STATES_CACHE - * - given MAX_ITEMS = MIN_STATES_CACHE + 1, only 1 balances tree is reused at an epoch - */ -export class BalancesTreeCache implements IBalancesTreeCache { - private readonly unusedBalancesTrees: Map> = new Map(); - - constructor(private readonly metrics: Metrics | null = null) { - if (metrics) { - metrics.balancesTreeCache.size.addCollect(() => { - metrics.balancesTreeCache.size.set(this.unusedBalancesTrees.size); - }); - } - } - - processUnusedState(state: CachedBeaconStateAllForks | undefined, source: BalancesTreeSource): void { - if (state === undefined) { - return; - } - const stateEpoch = state.epochCtx.epoch; - // it's safer to reuse old balances tree - if (this.unusedBalancesTrees.has(stateEpoch) || this.unusedBalancesTrees.size >= MAX_ITEMS) { - return; - } - - this.metrics?.balancesTreeCache.total.inc({source}); - this.unusedBalancesTrees.set(stateEpoch, state.balances); - } - - getUnusedBalances(): ListBasicTreeViewDU | undefined { - if (this.unusedBalancesTrees.size <= MIN_STATES_CACHE) { - this.metrics?.balancesTreeCache.miss.inc(); - return undefined; - } - - this.metrics?.balancesTreeCache.hit.inc(); - const firstEpoch = Array.from(this.unusedBalancesTrees.keys())[0]; - const unusedBalances = this.unusedBalancesTrees.get(firstEpoch); - this.unusedBalancesTrees.delete(firstEpoch); - return unusedBalances; - } -} diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts index 89b86a88f063..596f01f391a4 100644 --- a/packages/beacon-node/src/chain/blocks/importBlock.ts +++ b/packages/beacon-node/src/chain/blocks/importBlock.ts @@ -23,7 +23,6 @@ import {ForkchoiceCaller} from "../forkChoice/index.js"; import {FullyVerifiedBlock, ImportBlockOpts, AttestationImportOpt, BlockInputType} from "./types.js"; import {getCheckpointFromState} from "./utils/checkpoint.js"; import {writeBlockInputToDb} from "./writeBlockInputToDb.js"; -import {BalancesTreeSource} from "../balancesTreeCache.js"; /** * Fork-choice allows to import attestations from current (0) or past (1) epoch. @@ -97,21 +96,7 @@ export async function importBlock( // This adds the state necessary to process the next block // Some block event handlers require state being in state cache so need to do this before emitting EventType.block - this.regen - .processState(blockRootHex, postState) - .then((prunedStates) => { - // if the node is syncing, we don't want to reuse balances tree - const isCurrentSlot = this.clock.currentSlot === blockSlot; - if (this.opts.reuseBalancesTree && isCurrentSlot && prunedStates) { - for (const states of prunedStates.values()) { - // cp states on the same epoch shares the same balances seed tree so only need one of them - this.balancesTreeCache.processUnusedState(states[0], BalancesTreeSource.IMPORT_BLOCK); - } - } - }) - .catch((e) => { - this.logger.error("Regen error to process state for block", {slot: blockSlot, root: blockRootHex}, e as Error); - }); + this.regen.processState(blockRootHex, postState); this.metrics?.importBlock.bySource.inc({source}); this.logger.verbose("Added block to forkchoice and state cache", {slot: blockSlot, root: blockRootHex}); diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index fbc0ea531fb3..195b8736b2c3 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -101,7 +101,6 @@ import {DbCPStateDatastore} from "./stateCache/datastore/db.js"; import {FileCPStateDatastore} from "./stateCache/datastore/file.js"; import {SyncCommitteeRewards, computeSyncCommitteeRewards} from "./rewards/syncCommitteeRewards.js"; import {AttestationsRewards, computeAttestationsRewards} from "./rewards/attestationsRewards.js"; -import {BalancesTreeCache, BalancesTreeSource} from "./balancesTreeCache.js"; /** * Arbitrary constants, blobs and payloads should be consumed immediately in the same slot @@ -110,11 +109,6 @@ import {BalancesTreeCache, BalancesTreeSource} from "./balancesTreeCache.js"; */ const DEFAULT_MAX_CACHED_PRODUCED_ROOTS = 4; -/** - * The distance between the finalized epoch and the current epoch to consider safe. - */ -const SAFE_FINALIZED_EPOCH_TO_CURRENT_EPOCH_DIFF = 3; - export class BeaconChain implements IBeaconChain { readonly genesisTime: UintNum64; readonly genesisValidatorsRoot: Root; @@ -164,7 +158,6 @@ export class BeaconChain implements IBeaconChain { readonly beaconProposerCache: BeaconProposerCache; readonly checkpointBalancesCache: CheckpointBalancesCache; readonly shufflingCache: ShufflingCache; - readonly balancesTreeCache: BalancesTreeCache; /** Map keyed by executionPayload.blockHash of the block for those blobs */ readonly producedContentsCache = new Map(); @@ -253,7 +246,6 @@ export class BeaconChain implements IBeaconChain { this.beaconProposerCache = new BeaconProposerCache(opts); this.checkpointBalancesCache = new CheckpointBalancesCache(); - this.balancesTreeCache = new BalancesTreeCache(metrics); // Restore state caches // anchorState may already by a CachedBeaconState. If so, don't create the cache again, since deserializing all @@ -267,7 +259,6 @@ export class BeaconChain implements IBeaconChain { config, pubkey2index: new PubkeyIndexMap(), index2pubkey: [], - balancesTreeCache: this.balancesTreeCache, }); this.shufflingCache = cachedState.epochCtx.shufflingCache = new ShufflingCache(metrics, logger, this.opts, [ @@ -883,21 +874,6 @@ export class BeaconChain implements IBeaconChain { } } - async pruneOnFinalized(finalizedEpoch: Epoch): Promise { - const prunedStates = await this.regen.pruneOnFinalized(finalizedEpoch); - const head = this.forkChoice.getHead(); - // if the node is syncing or unfinality time, we don't want to reuse balances tree - if (this.opts.reuseBalancesTree && - this.clock.currentEpoch - finalizedEpoch <= SAFE_FINALIZED_EPOCH_TO_CURRENT_EPOCH_DIFF && - head.slot === this.clock.currentSlot && - prunedStates) { - // cp states on the same epoch shares the same balances seed tree so only need one of them - for (const states of prunedStates.values()) { - this.balancesTreeCache.processUnusedState(states[0], BalancesTreeSource.PRUNE_ON_FINALIZED); - } - } - } - /** * Regenerate state for attestation verification, this does not happen with default chain option of maxSkipSlots = 32 . * However, need to handle just in case. Lodestar doesn't support multiple regen state requests for attestation verification diff --git a/packages/beacon-node/src/chain/interface.ts b/packages/beacon-node/src/chain/interface.ts index 16050a9becc2..3b44ffd594ae 100644 --- a/packages/beacon-node/src/chain/interface.ts +++ b/packages/beacon-node/src/chain/interface.ts @@ -242,8 +242,6 @@ export interface IBeaconChain { blockRef: BeaconBlock | BlindedBeaconBlock, validatorIds?: (ValidatorIndex | string)[] ): Promise; - - pruneOnFinalized(finalizedEpoch: Epoch): Promise; } export type SSZObjectType = diff --git a/packages/beacon-node/src/chain/options.ts b/packages/beacon-node/src/chain/options.ts index 0668082266e6..bc2b73256272 100644 --- a/packages/beacon-node/src/chain/options.ts +++ b/packages/beacon-node/src/chain/options.ts @@ -38,8 +38,6 @@ export type IChainOptions = BlockProcessOpts & archiveBlobEpochs?: number; nHistoricalStates?: boolean; nHistoricalStatesFileDataStore?: boolean; - /** Reuse balances tree or not */ - reuseBalancesTree?: boolean; }; export type BlockProcessOpts = { @@ -117,5 +115,4 @@ export const defaultChainOptions: IChainOptions = { nHistoricalStatesFileDataStore: false, maxBlockStates: DEFAULT_MAX_BLOCK_STATES, maxCPStateEpochsInMemory: DEFAULT_MAX_CP_STATE_EPOCHS_IN_MEMORY, - reuseBalancesTree: false, }; diff --git a/packages/beacon-node/src/chain/regen/queued.ts b/packages/beacon-node/src/chain/regen/queued.ts index 5ae76c2d41a3..694e8635a3b7 100644 --- a/packages/beacon-node/src/chain/regen/queued.ts +++ b/packages/beacon-node/src/chain/regen/queued.ts @@ -148,26 +148,16 @@ export class QueuedStateRegenerator implements IStateRegenerator { this.blockStateCache.prune(headStateRoot); } - async pruneOnFinalized(finalizedEpoch: number): Promise | null> { - const prunedStates = await this.checkpointStateCache.pruneFinalized(finalizedEpoch); + pruneOnFinalized(finalizedEpoch: number): void { + this.checkpointStateCache.pruneFinalized(finalizedEpoch); this.blockStateCache.deleteAllBeforeEpoch(finalizedEpoch); - - return prunedStates; } - async processState( - blockRootHex: RootHex, - postState: CachedBeaconStateAllForks - ): Promise | null> { + processState(blockRootHex: RootHex, postState: CachedBeaconStateAllForks): void { this.blockStateCache.add(postState); - let prunedStates: Map | null = null; - try { - prunedStates = await this.checkpointStateCache.processState(blockRootHex, postState); - } catch (e) { - this.logger.debug("Error processing block state", {blockRootHex, slot: postState.slot}, e as Error); - } - - return prunedStates; + this.checkpointStateCache.processState(blockRootHex, postState).catch((e) => { + this.logger.debug("Error processing block state", {blockRootHex, slot: postState.slot}, e); + }); } addCheckpointState(cp: phase0.Checkpoint, item: CachedBeaconStateAllForks): void { diff --git a/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts index 6c8074539d63..38aeabb97955 100644 --- a/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts @@ -59,9 +59,9 @@ export class InMemoryCheckpointStateCache implements CheckpointStateCache { return this.getLatest(rootHex, maxEpoch, opts); } - async processState(): Promise | null> { + async processState(): Promise { // do nothing, this class does not support prunning - return null; + return 0; } get(cp: CheckpointHex, opts?: StateCloneOpts): CachedBeaconStateAllForks | null { @@ -122,17 +122,12 @@ export class InMemoryCheckpointStateCache implements CheckpointStateCache { return previousHits; } - async pruneFinalized(finalizedEpoch: Epoch): Promise> { - const result = new Map(); - + pruneFinalized(finalizedEpoch: Epoch): void { for (const epoch of this.epochIndex.keys()) { if (epoch < finalizedEpoch) { - const deletedStates = this.deleteAllEpochItems(epoch); - result.set(epoch, deletedStates); + this.deleteAllEpochItems(epoch); } } - - return result; } prune(finalizedEpoch: Epoch, justifiedEpoch: Epoch): void { @@ -158,19 +153,11 @@ export class InMemoryCheckpointStateCache implements CheckpointStateCache { } } - deleteAllEpochItems(epoch: Epoch): CachedBeaconStateAllForks[] { - const states = []; + deleteAllEpochItems(epoch: Epoch): void { for (const rootHex of this.epochIndex.get(epoch) || []) { - const key = toCheckpointKey({rootHex, epoch}); - const state = this.cache.get(key); - if (state) { - states.push(state); - } - this.cache.delete(key); + this.cache.delete(toCheckpointKey({rootHex, epoch})); } this.epochIndex.delete(epoch); - - return states; } clear(): void { diff --git a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts index 4d3e50d78d7f..0719efcfd309 100644 --- a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts @@ -410,23 +410,14 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { /** * Prune all checkpoint states before the provided finalized epoch. */ - async pruneFinalized(finalizedEpoch: Epoch): Promise | null> { - const result = new Map(); - + pruneFinalized(finalizedEpoch: Epoch): void { for (const epoch of this.epochIndex.keys()) { if (epoch < finalizedEpoch) { - try { - const prunedStates = await this.deleteAllEpochItems(epoch); - result.set(epoch, prunedStates); - } catch (e) { - this.logger.debug("Error prune finalized epoch", {epoch, finalizedEpoch}, e as Error); - } + this.deleteAllEpochItems(epoch).catch((e) => + this.logger.debug("Error delete all epoch items", {epoch, finalizedEpoch}, e as Error) + ); } } - - // we may persist states even before they are finalized - // in that case this return null - return result; } /** @@ -478,16 +469,13 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { * - 2 then we'll persist {root: b2, epoch n-2} checkpoint state to disk, there are also 2 checkpoint states in memory at epoch n, same to the above (maxEpochsInMemory=1) * * As of Mar 2024, it takes <=350ms to persist a holesky state on fast server - * This function returns a map of pruned states for each epoch */ - async processState( - blockRootHex: RootHex, - state: CachedBeaconStateAllForks - ): Promise | null> { + async processState(blockRootHex: RootHex, state: CachedBeaconStateAllForks): Promise { + let persistCount = 0; // it's important to sort the epochs in ascending order, in case of big reorg we always want to keep the most recent checkpoint states const sortedEpochs = Array.from(this.epochIndex.keys()).sort((a, b) => a - b); if (sortedEpochs.length <= this.maxEpochsInMemory) { - return null; + return 0; } const blockSlot = state.slot; @@ -503,19 +491,24 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { // normally the block persist happens at 2/3 of slot 0 of epoch, if it's already late then just skip to allow other tasks to run // there are plenty of chances in the same epoch to persist checkpoint states, also if block is late it could be reorged this.logger.verbose("Skip persist checkpoint states", {blockSlot, root: blockRootHex}); - return null; + return 0; } const persistEpochs = sortedEpochs.slice(0, sortedEpochs.length - this.maxEpochsInMemory); - - const result = new Map(); for (const lowestEpoch of persistEpochs) { // usually there is only 0 or 1 epoch to persist in this loop - const prunedStates = await this.processPastEpoch(blockRootHex, state, lowestEpoch); - result.set(lowestEpoch, prunedStates); + persistCount += await this.processPastEpoch(blockRootHex, state, lowestEpoch); } - return result; + if (persistCount > 0) { + this.logger.verbose("Persisted checkpoint states", { + slot: blockSlot, + root: blockRootHex, + persistCount, + persistEpochs: persistEpochs.length, + }); + } + return persistCount; } /** @@ -641,7 +634,6 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { * - PRCS is the checkpoint state that could be justified/finalized later based on the view of the state * - unknown root checkpoint state is persisted to handle the reorg back to that branch later * - * This returns pruned states in the epoch * Performance note: * - In normal condition, we persist 1 checkpoint state per epoch. * - In reorged condition, we may persist multiple (most likely 2) checkpoint states per epoch. @@ -650,9 +642,8 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { blockRootHex: RootHex, state: CachedBeaconStateAllForks, epoch: Epoch - ): Promise { + ): Promise { let persistCount = 0; - const prunedStates: CachedBeaconStateAllForks[] = []; const epochBoundarySlot = computeStartSlotAtEpoch(epoch); const epochBoundaryRoot = epochBoundarySlot === state.slot ? fromHex(blockRootHex) : getBlockRootAtSlot(state, epochBoundarySlot); @@ -740,41 +731,24 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { this.metrics?.cpStateCache.statePruneFromMemoryCount.inc(); this.logger.verbose("Pruned checkpoint state from memory", logMeta); } - - prunedStates.push(state); } } - if (persistCount > 0) { - this.logger.verbose("Persisted checkpoint states", { - stateSlot: state.slot, - blockRoot: blockRootHex, - persistCount, - }); - } - - return prunedStates; + return persistCount; } /** * Delete all items of an epoch from disk and memory */ - private async deleteAllEpochItems(epoch: Epoch): Promise { + private async deleteAllEpochItems(epoch: Epoch): Promise { let persistCount = 0; const rootHexes = this.epochIndex.get(epoch) || []; - const prunedStates: CachedBeaconStateAllForks[] = []; for (const rootHex of rootHexes) { const key = toCacheKey({rootHex, epoch}); const cacheItem = this.cache.get(key); if (cacheItem) { - let persistedKey: Uint8Array | undefined = undefined; - if (isPersistedCacheItem(cacheItem)) { - persistedKey = cacheItem.value; - } else { - persistedKey = cacheItem.persistedKey; - prunedStates.push(cacheItem.state); - } + const persistedKey = isPersistedCacheItem(cacheItem) ? cacheItem.value : cacheItem.persistedKey; if (persistedKey) { await this.datastore.remove(persistedKey); persistCount++; @@ -789,8 +763,6 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { persistCount, rootHexes: Array.from(rootHexes).join(","), }); - - return prunedStates; } /** diff --git a/packages/beacon-node/src/chain/stateCache/types.ts b/packages/beacon-node/src/chain/stateCache/types.ts index 3d2d153d4610..1e8d6bd1bd62 100644 --- a/packages/beacon-node/src/chain/stateCache/types.ts +++ b/packages/beacon-node/src/chain/stateCache/types.ts @@ -72,11 +72,8 @@ export interface CheckpointStateCache { ): Promise; updatePreComputedCheckpoint(rootHex: RootHex, epoch: Epoch): number | null; prune(finalizedEpoch: Epoch, justifiedEpoch: Epoch): void; - pruneFinalized(finalizedEpoch: Epoch): Promise | null>; - processState( - blockRootHex: RootHex, - state: CachedBeaconStateAllForks - ): Promise | null>; + pruneFinalized(finalizedEpoch: Epoch): void; + processState(blockRootHex: RootHex, state: CachedBeaconStateAllForks): Promise; clear(): void; dumpSummary(): routes.lodestar.StateCacheItem[]; /** Expose beacon states stored in cache. Use with caution */ diff --git a/packages/beacon-node/src/metrics/metrics/lodestar.ts b/packages/beacon-node/src/metrics/metrics/lodestar.ts index 2664c33b8f4c..f15e195faa20 100644 --- a/packages/beacon-node/src/metrics/metrics/lodestar.ts +++ b/packages/beacon-node/src/metrics/metrics/lodestar.ts @@ -19,7 +19,6 @@ import {RegistryMetricCreator} from "../utils/registryMetricCreator.js"; import {OpSource} from "../validatorMonitor.js"; import {CacheItemType} from "../../chain/stateCache/types.js"; import {AllocSource} from "../../util/bufferPool.js"; -import {BalancesTreeSource} from "../../chain/balancesTreeCache.js"; export type LodestarMetrics = ReturnType; @@ -1333,26 +1332,6 @@ export function createLodestarMetrics( }), }, - balancesTreeCache: { - size: register.gauge({ - name: "lodestar_balances_tree_cache_size", - help: "Balances tree cache size", - }), - total: register.gauge<{source: BalancesTreeSource}>({ - name: "lodestar_balances_tree_cache_total", - help: "Total number of balances tree cache", - labelNames: ["source"], - }), - hit: register.gauge({ - name: "lodestar_balances_tree_cache_hit_total", - help: "Total number of balances tree cache hits", - }), - miss: register.gauge({ - name: "lodestar_balances_tree_cache_miss_total", - help: "Total number of balances tree cache misses", - }), - }, - seenCache: { aggregatedAttestations: { superSetCheckTotal: register.histogram({ diff --git a/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts b/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts index 132e36e4eed4..f98b180fa983 100644 --- a/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts +++ b/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts @@ -1,8 +1,8 @@ import {describe, it, expect, beforeAll, beforeEach} from "vitest"; import {SLOTS_PER_EPOCH, SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; import {CachedBeaconStateAllForks, computeEpochAtSlot, computeStartSlotAtEpoch} from "@lodestar/state-transition"; -import {Epoch, RootHex, phase0} from "@lodestar/types"; -import {mapValues, toHex, toRootHex} from "@lodestar/utils"; +import {RootHex, phase0} from "@lodestar/types"; +import {mapValues, toHexString} from "@lodestar/utils"; import {PersistentCheckpointStateCache} from "../../../../src/chain/stateCache/persistentCheckpointsCache.js"; import {checkpointToDatastoreKey} from "../../../../src/chain/stateCache/datastore/index.js"; import {generateCachedState} from "../../../utils/state.js"; @@ -45,7 +45,7 @@ describe("PersistentCheckpointStateCache", () => { cp1 = {epoch: 21, root: root1}; cp2 = {epoch: 22, root: root2}; [cp0aHex, cp0bHex, cp1Hex, cp2Hex] = [cp0a, cp0b, cp1, cp2].map((cp) => toCheckpointHex(cp)); - persistent0bKey = toHex(checkpointToDatastoreKey(cp0b)); + persistent0bKey = toHexString(checkpointToDatastoreKey(cp0b)); const allStates = [cp0a, cp0b, cp1, cp2] .map((cp) => generateCachedState({slot: cp.epoch * SLOTS_PER_EPOCH})) .map((state, i) => { @@ -117,7 +117,7 @@ describe("PersistentCheckpointStateCache", () => { it("getOrReloadLatest", async () => { cache.add(cp2, states["cp2"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); // cp0b is persisted expect(fileApisBuffer.size).toEqual(1); @@ -140,7 +140,7 @@ describe("PersistentCheckpointStateCache", () => { expect(((await cache.getStateOrBytes(cp0bHex)) as CachedBeaconStateAllForks).hashTreeRoot()).toEqual( states["cp0b"].hashTreeRoot() ); - expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); // cp0 is persisted expect(fileApisBuffer.size).toEqual(1); expect(Array.from(fileApisBuffer.keys())).toEqual([persistent0bKey]); @@ -150,7 +150,7 @@ describe("PersistentCheckpointStateCache", () => { // cp2 is in memory expect(cache.get(cp2Hex)).not.toBeNull(); // finalize epoch cp2 - await cache.pruneFinalized(cp2.epoch); + cache.pruneFinalized(cp2.epoch); expect(fileApisBuffer.size).toEqual(0); expect(cache.get(cp1Hex)).toBeNull(); expect(cache.get(cp2Hex)).not.toBeNull(); @@ -184,7 +184,7 @@ describe("PersistentCheckpointStateCache", () => { // 0a it("single state at lowest memory epoch", async () => { cache.add(cp2, states["cp2"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); expect(cache.findSeedStateToReload(cp0aHex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(cache.findSeedStateToReload(cp0bHex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); }); @@ -200,7 +200,7 @@ describe("PersistentCheckpointStateCache", () => { // cp1a={0a, 21} {0a, 22}=cp2a it("multiple states at lowest memory epoch", async () => { cache.add(cp2, states["cp2"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); const cp1a = {epoch: 21, root: root0a}; const cp1aState = states["cp0a"].clone(); @@ -220,7 +220,7 @@ describe("PersistentCheckpointStateCache", () => { const state3 = cp2aState.clone(); state3.slot = 22 * SLOTS_PER_EPOCH + 3; state3.commit(); - expect(countPrunedStates(await cache.processState(toRootHex(root3), state3))).toEqual(0); + await cache.processState(toHexString(root3), state3); // state of {0a, 21} is choosen because it was built from cp0a expect(cache.findSeedStateToReload(cp0aHex)?.hashTreeRoot()).toEqual(cp1aState.hashTreeRoot()); @@ -228,7 +228,7 @@ describe("PersistentCheckpointStateCache", () => { expect(cache.findSeedStateToReload(cp0bHex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); const randomRoot = Buffer.alloc(32, 101); // for other random root it'll pick the first state of epoch 21 which is states["cp1"] - expect(cache.findSeedStateToReload({epoch: 20, rootHex: toRootHex(randomRoot)})?.hashTreeRoot()).toEqual( + expect(cache.findSeedStateToReload({epoch: 20, rootHex: toHexString(randomRoot)})?.hashTreeRoot()).toEqual( states["cp1"].hashTreeRoot() ); }); @@ -262,7 +262,7 @@ describe("PersistentCheckpointStateCache", () => { it("no reorg", async () => { expect(fileApisBuffer.size).toEqual(0); cache.add(cp2, states["cp2"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); expect(cache.get(cp2Hex)?.hashTreeRoot()).toEqual(states["cp2"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -271,7 +271,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; const root3 = Buffer.alloc(32, 100); // process state of root3 - await cache.processState(toRootHex(root3), blockStateRoot3); + await cache.processState(toHexString(root3), blockStateRoot3); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // epoch 22 has 1 checkpoint state @@ -297,7 +297,7 @@ describe("PersistentCheckpointStateCache", () => { // mostly the same to the above test expect(fileApisBuffer.size).toEqual(0); cache.add(cp2, states["cp2"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); expect(cache.get(cp2Hex)?.hashTreeRoot()).toEqual(states["cp2"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -306,14 +306,14 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; const root3 = Buffer.alloc(32, 100); // process state of root3 - await cache.processState(toRootHex(root3), blockStateRoot3); + await cache.processState(toHexString(root3), blockStateRoot3); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); const blockStateRoot4 = states["cp2"].clone(); blockStateRoot4.slot = 22 * SLOTS_PER_EPOCH + 4; const root4 = Buffer.alloc(32, 101); // process state of root4 - await cache.processState(toRootHex(root4), blockStateRoot4); + await cache.processState(toHexString(root4), blockStateRoot4); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // epoch 22 has 1 checkpoint state @@ -341,7 +341,7 @@ describe("PersistentCheckpointStateCache", () => { it("reorg 1 epoch", async () => { // process root2 state cache.add(cp2, states["cp2"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // regen generates cp2a @@ -360,7 +360,7 @@ describe("PersistentCheckpointStateCache", () => { const root3 = Buffer.alloc(32, 101); // process state of root3 - await cache.processState(toRootHex(root3), blockStateRoot3); + await cache.processState(toHexString(root3), blockStateRoot3); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // epoch 22 has 2 checkpoint states expect(cache.get(cp2Hex)).not.toBeNull(); @@ -385,7 +385,7 @@ describe("PersistentCheckpointStateCache", () => { it("reorg 2 epochs", async () => { // process root2 state cache.add(cp2, states["cp2"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // reload cp0b from disk @@ -413,7 +413,7 @@ describe("PersistentCheckpointStateCache", () => { const root3 = Buffer.alloc(32, 101); // process state of root3 - await cache.processState(toRootHex(root3), blockStateRoot3); + await cache.processState(toHexString(root3), blockStateRoot3); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // epoch 21 and 22 have 2 checkpoint states expect(cache.get(cp1Hex)).not.toBeNull(); @@ -438,7 +438,7 @@ describe("PersistentCheckpointStateCache", () => { it("reorg 3 epochs, persist cp 0a", async () => { // process root2 state cache.add(cp2, states["cp2"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // cp0a was pruned from memory and not in disc expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); @@ -469,7 +469,7 @@ describe("PersistentCheckpointStateCache", () => { const root3 = Buffer.alloc(32, 100); // process state of root3 - expect(countPrunedStates(await cache.processState(toRootHex(root3), blockStateRoot3))).toEqual(1); + expect(await cache.processState(toHexString(root3), blockStateRoot3)).toEqual(1); await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); // epoch 21 and 22 have 2 checkpoint states expect(cache.get(cp1Hex)).not.toBeNull(); @@ -494,7 +494,7 @@ describe("PersistentCheckpointStateCache", () => { it("reorg 3 epochs, prune but no persist", async () => { // process root2 state cache.add(cp2, states["cp2"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp2.root), states["cp2"]))).toEqual(2); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); // cp0a was pruned from memory and not in disc expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); @@ -524,7 +524,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; const root3 = Buffer.alloc(32, 100); // process state of root3, nothing is persisted - expect(countPrunedStates(await cache.processState(toRootHex(root3), blockStateRoot3))).toEqual(1); + expect(await cache.processState(toHexString(root3), blockStateRoot3)).toEqual(0); // but state of cp0b is pruned from memory expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -567,7 +567,7 @@ describe("PersistentCheckpointStateCache", () => { it("no reorg", async () => { expect(fileApisBuffer.size).toEqual(0); cache.add(cp1, states["cp1"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(2); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -576,7 +576,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; const root2 = Buffer.alloc(32, 100); // process state of root2 - await cache.processState(toRootHex(root2), blockStateRoot2); + await cache.processState(toHexString(root2), blockStateRoot2); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); @@ -602,7 +602,7 @@ describe("PersistentCheckpointStateCache", () => { // almost the same to "no reorg" test expect(fileApisBuffer.size).toEqual(0); cache.add(cp1, states["cp1"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(2); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -611,14 +611,14 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; const root2 = Buffer.alloc(32, 100); // process state of root2 - await cache.processState(toRootHex(root2), blockStateRoot2); + await cache.processState(toHexString(root2), blockStateRoot2); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); const blockStateRoot3 = states["cp1"].clone(); blockStateRoot3.slot = 21 * SLOTS_PER_EPOCH + 4; const root3 = Buffer.alloc(32, 101); // process state of root3 - await cache.processState(toRootHex(root3), blockStateRoot3); + await cache.processState(toHexString(root3), blockStateRoot3); // epoch 21 has 1 checkpoint state expect(cache.get(cp1Hex)).not.toBeNull(); @@ -646,13 +646,13 @@ describe("PersistentCheckpointStateCache", () => { const state1a = states["cp0b"].clone(); state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH - 1; state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); - expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(0); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(0); expect(fileApisBuffer.size).toEqual(0); await assertPersistedCheckpointState([], []); // cp1 cache.add(cp1, states["cp1"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(2); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -667,7 +667,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; const root2 = Buffer.alloc(32, 100); // process state of root2 - expect(countPrunedStates(await cache.processState(toRootHex(root2), blockStateRoot2))).toEqual(0); + expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(0); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); // keep these 2 cp states at epoch 21 @@ -686,7 +686,7 @@ describe("PersistentCheckpointStateCache", () => { expect(fileApisBuffer.size).toEqual(0); // cp1 cache.add(cp1, states["cp1"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(2); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -706,7 +706,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; const root2 = Buffer.alloc(32, 100); // process state of root2, nothing is persisted - expect(countPrunedStates(await cache.processState(toRootHex(root2), blockStateRoot2))).toEqual(1); + expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(0); // but cp0b in-memory state is pruned expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); @@ -731,7 +731,7 @@ describe("PersistentCheckpointStateCache", () => { const state1a = states["cp0a"].clone(); state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH - 1; state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); - expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(0); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(0); expect(fileApisBuffer.size).toEqual(0); // at epoch 20, there should be 2 cps in memory expect(cache.get(cp0aHex)).not.toBeNull(); @@ -740,7 +740,7 @@ describe("PersistentCheckpointStateCache", () => { // cp1 cache.add(cp1, states["cp1"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(2); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -762,7 +762,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; const root2 = Buffer.alloc(32, 100); // process state of root2, persist cp0a - expect(countPrunedStates(await cache.processState(toRootHex(root2), blockStateRoot2))).toEqual(1); + expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(1); await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); // keep these 2 cp states at epoch 21 @@ -782,7 +782,7 @@ describe("PersistentCheckpointStateCache", () => { it("reorg 2 epochs", async () => { // cp1 cache.add(cp1, states["cp1"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(2); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); expect(fileApisBuffer.size).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); @@ -804,7 +804,7 @@ describe("PersistentCheckpointStateCache", () => { blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; const root2 = Buffer.alloc(32, 100); // process state of root2, persist cp0a - expect(countPrunedStates(await cache.processState(toRootHex(root2), blockStateRoot2))).toEqual(1); + expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(1); await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); // keep these 2 cp states at epoch 21 @@ -837,7 +837,7 @@ describe("PersistentCheckpointStateCache", () => { // | // 0a it("no reorg", async () => { - expect(countPrunedStates(await cache.processState(toRootHex(root0b), states["cp0b"]))).toEqual(2); + expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); @@ -846,7 +846,7 @@ describe("PersistentCheckpointStateCache", () => { const state1a = states["cp0b"].clone(); state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH + 3; state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); - expect(countPrunedStates(await cache.processState(toRootHex(root1a), state1a))).toEqual(0); + expect(await cache.processState(toHexString(root1a), state1a)).toEqual(0); // nothing change expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); @@ -861,7 +861,7 @@ describe("PersistentCheckpointStateCache", () => { // | \ | // 0a \------root1b it("reorg in same epoch", async () => { - expect(countPrunedStates(await cache.processState(toRootHex(root0b), states["cp0b"]))).toEqual(2); + expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); @@ -870,7 +870,7 @@ describe("PersistentCheckpointStateCache", () => { const state1a = states["cp0b"].clone(); state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH + 3; state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); - expect(countPrunedStates(await cache.processState(toRootHex(root1a), state1a))).toEqual(0); + expect(await cache.processState(toHexString(root1a), state1a)).toEqual(0); // nothing change expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); @@ -886,7 +886,7 @@ describe("PersistentCheckpointStateCache", () => { state1b.slot = state1a.slot + 1; state1b.blockRoots.set(state1b.slot % SLOTS_PER_HISTORICAL_ROOT, root1b); // but no need to persist cp1b - expect(countPrunedStates(await cache.processState(toRootHex(root1b), state1b))).toEqual(1); + expect(await cache.processState(toHexString(root1b), state1b)).toEqual(0); // although states["cp0b"] is pruned expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); @@ -934,7 +934,7 @@ describe("PersistentCheckpointStateCache", () => { cache.add(cp0a, states["cp0a"]); // need to persist 2 checkpoint states - expect(countPrunedStates(await cache.processState(toRootHex(root1b), state1b))).toEqual(2); + expect(await cache.processState(toHexString(root1b), state1b)).toEqual(2); // both are persisited expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); expect(await cache.getStateOrBytes(cp0aHex)).toEqual(stateBytes["cp0a"]); @@ -949,7 +949,7 @@ describe("PersistentCheckpointStateCache", () => { // | | // 0a---------root1b it("reorg 1 epoch, processState twice", async () => { - expect(countPrunedStates(await cache.processState(toRootHex(root0b), states["cp0b"]))).toEqual(2); + expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); @@ -958,7 +958,7 @@ describe("PersistentCheckpointStateCache", () => { const state1a = states["cp0b"].clone(); state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH + 3; state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); - expect(countPrunedStates(await cache.processState(toRootHex(root1a), state1a))).toEqual(0); + expect(await cache.processState(toHexString(root1a), state1a)).toEqual(0); // nothing change expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); @@ -970,7 +970,7 @@ describe("PersistentCheckpointStateCache", () => { state1b.blockRoots.set(state1b.slot % SLOTS_PER_HISTORICAL_ROOT, root1b); // regen should reload cp0a from disk cache.add(cp0a, states["cp0a"]); - expect(countPrunedStates(await cache.processState(toRootHex(root1b), state1b))).toEqual(1); + expect(await cache.processState(toHexString(root1b), state1b)).toEqual(1); await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); // both cp0a and cp0b are persisted @@ -988,13 +988,13 @@ describe("PersistentCheckpointStateCache", () => { // ^ // {0a, 21}=cp1a it("reorg 2 epochs", async () => { - expect(countPrunedStates(await cache.processState(toRootHex(root0b), states["cp0b"]))).toEqual(2); + expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); expect(await cache.getStateOrBytes(cp0aHex)).toBeNull(); expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); cache.add(cp1, states["cp1"]); - expect(countPrunedStates(await cache.processState(toRootHex(cp1.root), states["cp1"]))).toEqual(1); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); await assertPersistedCheckpointState([cp0b, cp1], [stateBytes["cp0b"], stateBytes["cp1"]]); // regen should populate cp0a and cp1a checkpoint states @@ -1010,7 +1010,7 @@ describe("PersistentCheckpointStateCache", () => { const state2 = cp1aState.clone(); state2.slot = 21 * SLOTS_PER_EPOCH + 3; state2.blockRoots.set(state2.slot % SLOTS_PER_HISTORICAL_ROOT, root2); - expect(countPrunedStates(await cache.processState(toRootHex(root2), state2))).toEqual(2); + expect(await cache.processState(toHexString(root2), state2)).toEqual(2); // expect 4 cp states are persisted await assertPersistedCheckpointState( [cp0b, cp1, cp0a, cp1a], @@ -1021,7 +1021,7 @@ describe("PersistentCheckpointStateCache", () => { }); async function assertPersistedCheckpointState(cps: phase0.Checkpoint[], stateBytesArr: Uint8Array[]): Promise { - const persistedKeys = cps.map((cp) => toHex(checkpointToDatastoreKey(cp))); + const persistedKeys = cps.map((cp) => toHexString(checkpointToDatastoreKey(cp))); expect(Array.from(fileApisBuffer.keys())).toStrictEqual(persistedKeys); for (const [i, persistedKey] of persistedKeys.entries()) { expect(fileApisBuffer.get(persistedKey)).toStrictEqual(stateBytesArr[i]); @@ -1034,15 +1034,3 @@ describe("PersistentCheckpointStateCache", () => { } } }); - -function countPrunedStates(prunedStates: Map | null): number { - if (!prunedStates) { - return 0; - } - - let count = 0; - for (const states of prunedStates.values()) { - count += states.length; - } - return count; -} diff --git a/packages/cli/src/options/beaconNodeOptions/chain.ts b/packages/cli/src/options/beaconNodeOptions/chain.ts index 145fc02984da..78ffd47da8f4 100644 --- a/packages/cli/src/options/beaconNodeOptions/chain.ts +++ b/packages/cli/src/options/beaconNodeOptions/chain.ts @@ -32,7 +32,6 @@ export type ChainArgs = { "chain.nHistoricalStatesFileDataStore"?: boolean; "chain.maxBlockStates"?: number; "chain.maxCPStateEpochsInMemory"?: number; - "chain.reuseBalancesTree"?: boolean; }; export function parseArgs(args: ChainArgs): IBeaconNodeOptions["chain"] { @@ -67,7 +66,6 @@ export function parseArgs(args: ChainArgs): IBeaconNodeOptions["chain"] { args["chain.nHistoricalStatesFileDataStore"] ?? defaultOptions.chain.nHistoricalStatesFileDataStore, maxBlockStates: args["chain.maxBlockStates"] ?? defaultOptions.chain.maxBlockStates, maxCPStateEpochsInMemory: args["chain.maxCPStateEpochsInMemory"] ?? defaultOptions.chain.maxCPStateEpochsInMemory, - reuseBalancesTree: args["chain.reuseBalancesTree"] ?? defaultOptions.chain.reuseBalancesTree, }; } @@ -275,12 +273,4 @@ Will double processing times. Use only for debugging purposes.", default: defaultOptions.chain.maxCPStateEpochsInMemory, group: "chain", }, - - "chain.reuseBalancesTree": { - hidden: true, - description: "Reuse balances tree or not", - type: "boolean", - default: defaultOptions.chain.reuseBalancesTree, - group: "chain", - }, }; diff --git a/packages/cli/test/unit/options/beaconNodeOptions.test.ts b/packages/cli/test/unit/options/beaconNodeOptions.test.ts index 3ae145de58fc..879b5bfa2fc9 100644 --- a/packages/cli/test/unit/options/beaconNodeOptions.test.ts +++ b/packages/cli/test/unit/options/beaconNodeOptions.test.ts @@ -43,7 +43,6 @@ describe("options / beaconNodeOptions", () => { "chain.nHistoricalStatesFileDataStore": true, "chain.maxBlockStates": 100, "chain.maxCPStateEpochsInMemory": 100, - "chain.reuseBalancesTree": true, emitPayloadAttributes: false, eth1: true, @@ -152,7 +151,6 @@ describe("options / beaconNodeOptions", () => { nHistoricalStatesFileDataStore: true, maxBlockStates: 100, maxCPStateEpochsInMemory: 100, - reuseBalancesTree: true, }, eth1: { enabled: true, diff --git a/packages/state-transition/src/cache/epochCache.ts b/packages/state-transition/src/cache/epochCache.ts index 4c77c4137db0..66ce12b82d18 100644 --- a/packages/state-transition/src/cache/epochCache.ts +++ b/packages/state-transition/src/cache/epochCache.ts @@ -67,7 +67,6 @@ import { SyncCommitteeCacheEmpty, } from "./syncCommitteeCache.js"; import {CachedBeaconStateAllForks} from "./stateCache.js"; -import {IBalancesTreeCache} from "./balancesTreeCache.js"; /** `= PROPOSER_WEIGHT / (WEIGHT_DENOMINATOR - PROPOSER_WEIGHT)` */ export const PROPOSER_WEIGHT_FACTOR = PROPOSER_WEIGHT / (WEIGHT_DENOMINATOR - PROPOSER_WEIGHT); @@ -77,7 +76,6 @@ export type EpochCacheImmutableData = { pubkey2index: PubkeyIndexMap; index2pubkey: Index2PubkeyCache; shufflingCache?: IShufflingCache; - balancesTreeCache?: IBalancesTreeCache; }; export type EpochCacheOpts = { @@ -137,7 +135,6 @@ export class EpochCache { * Unique pubkey registry shared in the same fork. There should only exist one for the fork. */ unfinalizedPubkey2index: UnfinalizedPubkeyIndexMap; - balancesTreeCache?: IBalancesTreeCache; /** * ShufflingCache is passed in from `beacon-node` so should be available at runtime but may not be * present during testing. @@ -276,7 +273,6 @@ export class EpochCache { pubkey2index: PubkeyIndexMap; index2pubkey: Index2PubkeyCache; unfinalizedPubkey2index: UnfinalizedPubkeyIndexMap; - balancesTreeCache?: IBalancesTreeCache; shufflingCache?: IShufflingCache; proposers: number[]; proposersPrevEpoch: number[] | null; @@ -310,7 +306,6 @@ export class EpochCache { this.pubkey2index = data.pubkey2index; this.index2pubkey = data.index2pubkey; this.unfinalizedPubkey2index = data.unfinalizedPubkey2index; - this.balancesTreeCache = data.balancesTreeCache; this.shufflingCache = data.shufflingCache; this.proposers = data.proposers; this.proposersPrevEpoch = data.proposersPrevEpoch; @@ -349,7 +344,7 @@ export class EpochCache { */ static createFromState( state: BeaconStateAllForks, - {config, pubkey2index, index2pubkey, balancesTreeCache, shufflingCache}: EpochCacheImmutableData, + {config, pubkey2index, index2pubkey, shufflingCache}: EpochCacheImmutableData, opts?: EpochCacheOpts ): EpochCache { const currentEpoch = computeEpochAtSlot(state.slot); @@ -558,7 +553,6 @@ export class EpochCache { index2pubkey, // `createFromFinalizedState()` creates cache with empty unfinalizedPubkey2index. Be cautious to only pass in finalized state unfinalizedPubkey2index: newUnfinalizedPubkeyIndexMap(), - balancesTreeCache, shufflingCache, proposers, // On first epoch, set to null to prevent unnecessary work since this is only used for metrics @@ -605,7 +599,6 @@ export class EpochCache { index2pubkey: this.index2pubkey, // No need to clone this reference. On each mutation the `unfinalizedPubkey2index` reference is replaced, @see `addPubkey` unfinalizedPubkey2index: this.unfinalizedPubkey2index, - balancesTreeCache: this.balancesTreeCache, shufflingCache: this.shufflingCache, // Immutable data proposers: this.proposers, diff --git a/packages/state-transition/src/epoch/processRewardsAndPenalties.ts b/packages/state-transition/src/epoch/processRewardsAndPenalties.ts index 5b42f4175a04..ef074dfd6820 100644 --- a/packages/state-transition/src/epoch/processRewardsAndPenalties.ts +++ b/packages/state-transition/src/epoch/processRewardsAndPenalties.ts @@ -39,7 +39,7 @@ export function processRewardsAndPenalties( // important: do not change state one balance at a time. Set them all at once, constructing the tree in one go // cache the balances array, too - state.balances = ssz.phase0.Balances.toViewDU(balances, state.epochCtx.balancesTreeCache?.getUnusedBalances()); + state.balances = ssz.phase0.Balances.toViewDU(balances); // For processEffectiveBalanceUpdates() to prevent having to re-compute the balances array. // For validator metrics diff --git a/packages/state-transition/src/index.ts b/packages/state-transition/src/index.ts index ac6fa8b3d1fc..600bbf173462 100644 --- a/packages/state-transition/src/index.ts +++ b/packages/state-transition/src/index.ts @@ -43,7 +43,6 @@ export { } from "./cache/epochCache.js"; export {toMemoryEfficientHexStr} from "./cache/pubkeyCache.js"; export {type EpochTransitionCache, beforeProcessEpoch} from "./cache/epochTransitionCache.js"; -export type {IBalancesTreeCache} from "./cache/balancesTreeCache.js"; // Aux data-structures export { From f26181217df7c4a051caf2770a0942a23c4ce7e3 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 18 Oct 2024 09:45:30 +0700 Subject: [PATCH 13/15] fix: check types --- packages/beacon-node/test/spec/utils/runValidSszTest.ts | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/packages/beacon-node/test/spec/utils/runValidSszTest.ts b/packages/beacon-node/test/spec/utils/runValidSszTest.ts index 748a7770b19c..32c3b91d5f60 100644 --- a/packages/beacon-node/test/spec/utils/runValidSszTest.ts +++ b/packages/beacon-node/test/spec/utils/runValidSszTest.ts @@ -81,14 +81,11 @@ export function runValidSszTest(type: Type, testData: ValidTestCaseData // 0x0000000000000000000000000000000000000000000000000000000000000000 if (process.env.RENDER_ROOTS) { if (type.isBasic) { - console.log("ROOTS Basic", toHexString(type.serialize(testDataValue))); + console.log("Chunk Basic", toHexString(type.serialize(testDataValue))); } else { // biome-ignore lint/complexity/useLiteralKeys: The `getRoots` is a protected attribute - const roots = (type as CompositeType)["getRoots"](testDataValue); - console.log( - "ROOTS Composite", - roots.map((root) => toHexString(root)) - ); + const chunkBytes = (type as CompositeType)["getChunkBytes"](testDataValue); + console.log("Chunk Bytes Composite", toHexString(chunkBytes)); } } From f752f36974898ea47b3a5d15a5e103007f7c0cbe Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 18 Oct 2024 10:21:03 +0700 Subject: [PATCH 14/15] fix: remove BalancesTreeCache interface --- packages/state-transition/src/cache/balancesTreeCache.ts | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 packages/state-transition/src/cache/balancesTreeCache.ts diff --git a/packages/state-transition/src/cache/balancesTreeCache.ts b/packages/state-transition/src/cache/balancesTreeCache.ts deleted file mode 100644 index 0466824e490d..000000000000 --- a/packages/state-transition/src/cache/balancesTreeCache.ts +++ /dev/null @@ -1,5 +0,0 @@ -import {UintNumberType, ListBasicTreeViewDU} from "@chainsafe/ssz"; - -export interface IBalancesTreeCache { - getUnusedBalances(): ListBasicTreeViewDU | undefined; -} From 4c31f2e9c495949a65f305a5d1a353ef6b62dd32 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sat, 19 Oct 2024 15:47:43 +0700 Subject: [PATCH 15/15] feat: beacon node to compute post state roto in batch --- .../blocks/verifyBlocksStateTransitionOnly.ts | 9 ++++++++- .../chain/produceBlock/computeNewStateRoot.ts | 8 +++++++- .../state-transition/src/stateTransition.ts | 20 ++++++++++--------- 3 files changed, 26 insertions(+), 11 deletions(-) diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts index 49cd46220008..53f4e5411cf0 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts @@ -12,6 +12,12 @@ import {BlockProcessOpts} from "../options.js"; import {byteArrayEquals} from "../../util/bytes.js"; import {nextEventLoop} from "../../util/eventLoop.js"; import {BlockInput, ImportBlockOpts} from "./types.js"; +import {HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; + +/** + * Data in a BeaconBlock is bounded so we can use a single HashComputationGroup for all blocks + */ +const blockHCGroup = new HashComputationGroup(); /** * Verifies 1 or more blocks are fully valid running the full state transition; from a linear sequence of blocks. @@ -63,7 +69,8 @@ export async function verifyBlocksStateTransitionOnly( const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ source: StateHashTreeRootSource.blockTransition, }); - const stateRoot = postState.hashTreeRoot(); + // state root is computed inside stateTransition(), so it should take no time here + const stateRoot = postState.batchHashTreeRoot(blockHCGroup); hashTreeRootTimer?.(); // Check state root matches diff --git a/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts b/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts index 483a521401ed..55362957f9bb 100644 --- a/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts +++ b/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts @@ -8,6 +8,12 @@ import { import {BeaconBlock, BlindedBeaconBlock, Gwei, Root} from "@lodestar/types"; import {ZERO_HASH} from "../../constants/index.js"; import {Metrics} from "../../metrics/index.js"; +import {HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; + +/** + * Data in a BeaconBlock is bounded so we can use a single HashComputationGroup for all blocks + */ +const blockHCGroup = new HashComputationGroup(); /** * Instead of running fastStateTransition(), only need to process block since @@ -49,7 +55,7 @@ export function computeNewStateRoot( source: StateHashTreeRootSource.computeNewStateRoot, }); // state root is computed inside stateTransition(), so it should take no time here - const newStateRoot = postState.hashTreeRoot(); + const newStateRoot = postState.batchHashTreeRoot(blockHCGroup); hashTreeRootTimer?.(); return {newStateRoot, proposerReward}; diff --git a/packages/state-transition/src/stateTransition.ts b/packages/state-transition/src/stateTransition.ts index 7adf872e42f3..7b14727e9970 100644 --- a/packages/state-transition/src/stateTransition.ts +++ b/packages/state-transition/src/stateTransition.ts @@ -114,15 +114,9 @@ export function stateTransition( // Note: time only on success. This does not include hashTreeRoot() time processBlockTimer?.(); - // TODO - batch: remove processBlockCommitTime? - const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ - source: StateHashTreeRootSource.stateTransition, - }); - // commit() is done inside batchHashTreeRoot() - // with batchHashTreeRoot(), we're not able to measure commit() time separately - // note that at commit() phase, we batch hash validators via ListValidatorTreeViewDU so this metric is a little bit confusing - const stateRoot = postState.batchHashTreeRoot(hcGroup); - hashTreeRootTimer?.(); + const processBlockCommitTimer = metrics?.processBlockCommitTime.startTimer(); + postState.commit(); + processBlockCommitTimer?.(); if (metrics) { onPostStateMetrics(postState, metrics); @@ -130,6 +124,14 @@ export function stateTransition( // Verify state root if (verifyStateRoot) { + const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ + source: StateHashTreeRootSource.stateTransition, + }); + // commit() is done inside batchHashTreeRoot() + // with batchHashTreeRoot(), we're not able to measure commit() time separately + // note that at commit() phase, we batch hash validators via ListValidatorTreeViewDU so this metric is a little bit confusing + const stateRoot = postState.batchHashTreeRoot(hcGroup); + hashTreeRootTimer?.(); if (!ssz.Root.equals(block.stateRoot, stateRoot)) { throw new Error( `Invalid state root at slot ${block.slot}, expected=${toRootHex(block.stateRoot)}, actual=${toRootHex(