Skip to content

Commit

Permalink
chore: v1.12.1 release (#6144)
Browse files Browse the repository at this point in the history
  • Loading branch information
philknows authored Dec 12, 2023
2 parents 7000473 + a488374 commit 85e44ef
Show file tree
Hide file tree
Showing 38 changed files with 393 additions and 179 deletions.
2 changes: 1 addition & 1 deletion lerna.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
],
"npmClient": "yarn",
"useNx": true,
"version": "1.12.0",
"version": "1.12.1",
"stream": true,
"command": {
"version": {
Expand Down
10 changes: 5 additions & 5 deletions packages/api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"bugs": {
"url": "https://github.com/ChainSafe/lodestar/issues"
},
"version": "1.12.0",
"version": "1.12.1",
"type": "module",
"exports": {
".": {
Expand Down Expand Up @@ -71,10 +71,10 @@
"dependencies": {
"@chainsafe/persistent-merkle-tree": "^0.6.1",
"@chainsafe/ssz": "^0.14.0",
"@lodestar/config": "^1.12.0",
"@lodestar/params": "^1.12.0",
"@lodestar/types": "^1.12.0",
"@lodestar/utils": "^1.12.0",
"@lodestar/config": "^1.12.1",
"@lodestar/params": "^1.12.1",
"@lodestar/types": "^1.12.1",
"@lodestar/utils": "^1.12.1",
"eventsource": "^2.0.2",
"qs": "^6.11.1"
},
Expand Down
1 change: 0 additions & 1 deletion packages/api/src/beacon/routes/beacon/block.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ export type BlockHeaderResponse = {
};

export enum BroadcastValidation {
none = "none",
gossip = "gossip",
consensus = "consensus",
consensusAndEquivocation = "consensus_and_equivocation",
Expand Down
4 changes: 2 additions & 2 deletions packages/api/test/unit/beacon/testData/beacon.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,15 +59,15 @@ export const testData: GenericServerTestCases<Api> = {
res: undefined,
},
publishBlockV2: {
args: [ssz.phase0.SignedBeaconBlock.defaultValue(), {broadcastValidation: BroadcastValidation.none}],
args: [ssz.phase0.SignedBeaconBlock.defaultValue(), {broadcastValidation: BroadcastValidation.consensus}],
res: undefined,
},
publishBlindedBlock: {
args: [getDefaultBlindedBlock(64)],
res: undefined,
},
publishBlindedBlockV2: {
args: [getDefaultBlindedBlock(64), {broadcastValidation: BroadcastValidation.none}],
args: [getDefaultBlindedBlock(64), {broadcastValidation: BroadcastValidation.consensus}],
res: undefined,
},
getBlobSidecars: {
Expand Down
28 changes: 14 additions & 14 deletions packages/beacon-node/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"bugs": {
"url": "https://github.com/ChainSafe/lodestar/issues"
},
"version": "1.12.0",
"version": "1.12.1",
"type": "module",
"exports": {
".": {
Expand Down Expand Up @@ -100,7 +100,7 @@
"@chainsafe/bls": "7.1.1",
"@chainsafe/blst": "^0.2.9",
"@chainsafe/discv5": "^5.1.0",
"@chainsafe/libp2p-gossipsub": "^10.1.0",
"@chainsafe/libp2p-gossipsub": "^10.1.1",
"@chainsafe/libp2p-noise": "^13.0.1",
"@chainsafe/persistent-merkle-tree": "^0.6.1",
"@chainsafe/prometheus-gc-stats": "^1.0.0",
Expand All @@ -119,18 +119,18 @@
"@libp2p/peer-id-factory": "^3.0.4",
"@libp2p/prometheus-metrics": "^2.0.7",
"@libp2p/tcp": "8.0.8",
"@lodestar/api": "^1.12.0",
"@lodestar/config": "^1.12.0",
"@lodestar/db": "^1.12.0",
"@lodestar/fork-choice": "^1.12.0",
"@lodestar/light-client": "^1.12.0",
"@lodestar/logger": "^1.12.0",
"@lodestar/params": "^1.12.0",
"@lodestar/reqresp": "^1.12.0",
"@lodestar/state-transition": "^1.12.0",
"@lodestar/types": "^1.12.0",
"@lodestar/utils": "^1.12.0",
"@lodestar/validator": "^1.12.0",
"@lodestar/api": "^1.12.1",
"@lodestar/config": "^1.12.1",
"@lodestar/db": "^1.12.1",
"@lodestar/fork-choice": "^1.12.1",
"@lodestar/light-client": "^1.12.1",
"@lodestar/logger": "^1.12.1",
"@lodestar/params": "^1.12.1",
"@lodestar/reqresp": "^1.12.1",
"@lodestar/state-transition": "^1.12.1",
"@lodestar/types": "^1.12.1",
"@lodestar/utils": "^1.12.1",
"@lodestar/validator": "^1.12.1",
"@multiformats/multiaddr": "^12.1.3",
"@types/datastore-level": "^3.0.0",
"buffer-xor": "^2.0.2",
Expand Down
83 changes: 73 additions & 10 deletions packages/beacon-node/src/api/impl/beacon/blocks/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import {
computeTimeAtSlot,
parseSignedBlindedBlockOrContents,
reconstructFullBlockOrContents,
DataAvailableStatus,
} from "@lodestar/state-transition";
import {SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params";
import {sleep, toHex} from "@lodestar/utils";
Expand All @@ -15,6 +16,9 @@ import {BlockError, BlockErrorCode} from "../../../../chain/errors/index.js";
import {OpSource} from "../../../../metrics/validatorMonitor.js";
import {NetworkEvent} from "../../../../network/index.js";
import {ApiModules} from "../../types.js";
import {validateGossipBlock} from "../../../../chain/validation/block.js";
import {verifyBlocksInEpoch} from "../../../../chain/blocks/verifyBlock.js";
import {BeaconChain} from "../../../../chain/chain.js";
import {resolveBlockId, toBeaconHeaderResponse} from "./utils.js";

type PublishBlockOpts = ImportBlockOpts & {broadcastValidation?: routes.beacon.BroadcastValidation};
Expand Down Expand Up @@ -64,29 +68,86 @@ export function getBeaconBlockApi({

// check what validations have been requested before broadcasting and publishing the block
// TODO: add validation time to metrics
const broadcastValidation = opts.broadcastValidation ?? routes.beacon.BroadcastValidation.none;
const broadcastValidation = opts.broadcastValidation ?? routes.beacon.BroadcastValidation.gossip;
// if block is locally produced, full or blinded, it already is 'consensus' validated as it went through
// state transition to produce the stateRoot
const slot = signedBlock.message.slot;
const fork = config.getForkName(slot);
const blockRoot = toHex(chain.config.getForkTypes(slot).BeaconBlock.hashTreeRoot(signedBlock.message));
// bodyRoot should be the same to produced block
const bodyRoot = toHex(chain.config.getForkTypes(slot).BeaconBlockBody.hashTreeRoot(signedBlock.message.body));
const blockLocallyProduced =
chain.producedBlockRoot.has(blockRoot) || chain.producedBlindedBlockRoot.has(blockRoot);
const valLogMeta = {broadcastValidation, blockRoot, blockLocallyProduced, slot};
const valLogMeta = {broadcastValidation, blockRoot, bodyRoot, blockLocallyProduced, slot};

switch (broadcastValidation) {
case routes.beacon.BroadcastValidation.none: {
if (blockLocallyProduced) {
chain.logger.debug("No broadcast validation requested for the block", valLogMeta);
} else {
chain.logger.warn("No broadcast validation requested for the block", valLogMeta);
case routes.beacon.BroadcastValidation.gossip: {
if (!blockLocallyProduced) {
try {
await validateGossipBlock(config, chain, signedBlock, fork);
} catch (error) {
chain.logger.error("Gossip validations failed while publishing the block", valLogMeta, error as Error);
chain.persistInvalidSszValue(
chain.config.getForkTypes(slot).SignedBeaconBlock,
signedBlock,
"api_reject_gossip_failure"
);
throw error;
}
}
chain.logger.debug("Gossip checks validated while publishing the block", valLogMeta);
break;
}

case routes.beacon.BroadcastValidation.consensusAndEquivocation:
case routes.beacon.BroadcastValidation.consensus: {
// check if this beacon node produced the block else run validations
if (!blockLocallyProduced) {
// error or log warning that we support consensus val on blocks produced via this beacon node
const message = "Consensus validation not implemented yet for block not produced by this beacon node";
const parentBlock = chain.forkChoice.getBlock(signedBlock.message.parentRoot);
if (parentBlock === null) {
network.events.emit(NetworkEvent.unknownBlockParent, {
blockInput: blockForImport,
peer: IDENTITY_PEER_ID,
});
chain.persistInvalidSszValue(
chain.config.getForkTypes(slot).SignedBeaconBlock,
signedBlock,
"api_reject_parent_unknown"
);
throw new BlockError(signedBlock, {
code: BlockErrorCode.PARENT_UNKNOWN,
parentRoot: toHexString(signedBlock.message.parentRoot),
});
}

try {
await verifyBlocksInEpoch.call(
chain as BeaconChain,
parentBlock,
[blockForImport],
[DataAvailableStatus.available],
{
...opts,
verifyOnly: true,
skipVerifyBlockSignatures: true,
skipVerifyExecutionPayload: true,
}
);
} catch (error) {
chain.logger.error("Consensus checks failed while publishing the block", valLogMeta, error as Error);
chain.persistInvalidSszValue(
chain.config.getForkTypes(slot).SignedBeaconBlock,
signedBlock,
"api_reject_consensus_failure"
);
throw error;
}
}

chain.logger.debug("Consensus validated while publishing block", valLogMeta);

if (broadcastValidation === routes.beacon.BroadcastValidation.consensusAndEquivocation) {
const message = `Equivocation checks not yet implemented for broadcastValidation=${broadcastValidation}`;
if (chain.opts.broadcastValidationStrictness === "error") {
throw Error(message);
} else {
Expand All @@ -102,7 +163,7 @@ export function getBeaconBlockApi({
if (chain.opts.broadcastValidationStrictness === "error") {
throw Error(message);
} else {
chain.logger.warn(message);
chain.logger.warn(message, valLogMeta);
}
}
}
Expand Down Expand Up @@ -163,6 +224,8 @@ export function getBeaconBlockApi({
: undefined;
const blobs = blobSidecars ? blobSidecars.map((blobSidecar) => blobSidecar.blob) : null;

chain.logger.debug("Assembling blinded block for publishing", {source, blockRoot, slot});

const signedBlockOrContents =
source === ProducedBlockSource.engine
? reconstructFullBlockOrContents({signedBlindedBlock, signedBlindedBlobSidecars}, {executionPayload, blobs})
Expand Down
16 changes: 13 additions & 3 deletions packages/beacon-node/src/api/impl/validator/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,9 @@ export function getValidatorApi({
});

const version = config.getForkName(block.slot);
if (chain.opts.persistProducedBlocks) {
void chain.persistBlock(block, "produced_builder_block");
}
if (isForkBlobs(version)) {
const blockHash = toHex((block as bellatrix.BlindedBeaconBlock).body.executionPayloadHeader.blockHash);
const blindedBlobSidecars = chain.producedBlindedBlobSidecarsCache.get(blockHash);
Expand Down Expand Up @@ -377,6 +380,9 @@ export function getValidatorApi({
executionPayloadValue,
root: toHexString(config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block)),
});
if (chain.opts.persistProducedBlocks) {
void chain.persistBlock(block, "produced_engine_block");
}
if (isForkBlobs(version)) {
const blockHash = toHex((block as bellatrix.BeaconBlock).body.executionPayload.blockHash);
const blobSidecars = chain.producedBlobSidecarsCache.get(blockHash);
Expand Down Expand Up @@ -478,16 +484,17 @@ export function getValidatorApi({
delayMs,
cutoffMs: BLOCK_PRODUCTION_RACE_CUTOFF_MS,
timeoutMs: BLOCK_PRODUCTION_RACE_TIMEOUT_MS,
slot,
});
}
);
if (blindedBlock instanceof Error) {
// error here means race cutoff exceeded
logger.error("Failed to produce builder block", {}, blindedBlock);
logger.error("Failed to produce builder block", {slot}, blindedBlock);
blindedBlock = null;
}
if (fullBlock instanceof Error) {
logger.error("Failed to produce execution block", {}, fullBlock);
logger.error("Failed to produce execution block", {slot}, fullBlock);
fullBlock = null;
}
} else if (blindedBlockPromise !== null && fullBlockPromise === null) {
Expand Down Expand Up @@ -535,23 +542,26 @@ export function getValidatorApi({
// winston logger doesn't like bigint
enginePayloadValue: `${enginePayloadValue}`,
builderPayloadValue: `${builderPayloadValue}`,
slot,
});
} else if (fullBlock && !blindedBlock) {
selectedSource = ProducedBlockSource.engine;
logger.verbose("Selected engine block: no builder block produced", {
// winston logger doesn't like bigint
enginePayloadValue: `${enginePayloadValue}`,
slot,
});
} else if (blindedBlock && !fullBlock) {
selectedSource = ProducedBlockSource.builder;
logger.verbose("Selected builder block: no engine block produced", {
// winston logger doesn't like bigint
builderPayloadValue: `${builderPayloadValue}`,
slot,
});
}

if (selectedSource === null) {
throw Error("Failed to produce engine or builder block");
throw Error(`Failed to produce engine or builder block for slot=${slot}`);
}

if (selectedSource === ProducedBlockSource.engine) {
Expand Down
59 changes: 36 additions & 23 deletions packages/beacon-node/src/chain/blocks/verifyBlock.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import {
} from "@lodestar/state-transition";
import {bellatrix} from "@lodestar/types";
import {ForkName} from "@lodestar/params";
import {ProtoBlock} from "@lodestar/fork-choice";
import {ProtoBlock, ExecutionStatus} from "@lodestar/fork-choice";
import {ChainForkConfig} from "@lodestar/config";
import {Logger} from "@lodestar/utils";
import {BlockError, BlockErrorCode} from "../errors/index.js";
Expand Down Expand Up @@ -89,7 +89,14 @@ export async function verifyBlocksInEpoch(
// batch all I/O operations to reduce overhead
const [segmentExecStatus, {postStates, proposerBalanceDeltas}] = await Promise.all([
// Execution payloads
verifyBlocksExecutionPayload(this, parentBlock, blocks, preState0, abortController.signal, opts),
opts.skipVerifyExecutionPayload !== true
? verifyBlocksExecutionPayload(this, parentBlock, blocks, preState0, abortController.signal, opts)
: Promise.resolve({
execAborted: null,
executionStatuses: blocks.map((_blk) => ExecutionStatus.Syncing),
mergeBlockFound: null,
} as SegmentExecStatus),

// Run state transition only
// TODO: Ensure it yields to allow flushing to workers and engine API
verifyBlocksStateTransitionOnly(
Expand All @@ -103,37 +110,43 @@ export async function verifyBlocksInEpoch(
),

// All signatures at once
verifyBlocksSignatures(this.bls, this.logger, this.metrics, preState0, blocks, opts),
opts.skipVerifyBlockSignatures !== true
? verifyBlocksSignatures(this.bls, this.logger, this.metrics, preState0, blocks, opts)
: Promise.resolve(),

// ideally we want to only persist blocks after verifying them however the reality is there are
// rarely invalid blocks we'll batch all I/O operation here to reduce the overhead if there's
// an error, we'll remove blocks not in forkchoice
opts.eagerPersistBlock ? writeBlockInputToDb.call(this, blocksInput) : Promise.resolve(),
opts.verifyOnly !== true && opts.eagerPersistBlock
? writeBlockInputToDb.call(this, blocksInput)
: Promise.resolve(),
]);

if (segmentExecStatus.execAborted === null && segmentExecStatus.mergeBlockFound !== null) {
// merge block found and is fully valid = state transition + signatures + execution payload.
// TODO: Will this banner be logged during syncing?
logOnPowBlock(this.logger, this.config, segmentExecStatus.mergeBlockFound);
}
if (opts.verifyOnly !== true) {
if (segmentExecStatus.execAborted === null && segmentExecStatus.mergeBlockFound !== null) {
// merge block found and is fully valid = state transition + signatures + execution payload.
// TODO: Will this banner be logged during syncing?
logOnPowBlock(this.logger, this.config, segmentExecStatus.mergeBlockFound);
}

const fromFork = this.config.getForkName(parentBlock.slot);
const toFork = this.config.getForkName(blocks[blocks.length - 1].message.slot);
const fromFork = this.config.getForkName(parentBlock.slot);
const toFork = this.config.getForkName(blocks[blocks.length - 1].message.slot);

// If transition through toFork, note won't happen if ${toFork}_EPOCH = 0, will log double on re-org
if (toFork !== fromFork) {
switch (toFork) {
case ForkName.capella:
this.logger.info(CAPELLA_OWL_BANNER);
this.logger.info("Activating withdrawals", {epoch: this.config.CAPELLA_FORK_EPOCH});
break;
// If transition through toFork, note won't happen if ${toFork}_EPOCH = 0, will log double on re-org
if (toFork !== fromFork) {
switch (toFork) {
case ForkName.capella:
this.logger.info(CAPELLA_OWL_BANNER);
this.logger.info("Activating withdrawals", {epoch: this.config.CAPELLA_FORK_EPOCH});
break;

case ForkName.deneb:
this.logger.info(DENEB_BLOWFISH_BANNER);
this.logger.info("Activating blobs", {epoch: this.config.DENEB_FORK_EPOCH});
break;
case ForkName.deneb:
this.logger.info(DENEB_BLOWFISH_BANNER);
this.logger.info("Activating blobs", {epoch: this.config.DENEB_FORK_EPOCH});
break;

default:
default:
}
}
}

Expand Down
Loading

0 comments on commit 85e44ef

Please sign in to comment.