diff --git a/config/eslint.cjs b/config/eslint.cjs index ce8c6e484b..d2f97618a8 100644 --- a/config/eslint.cjs +++ b/config/eslint.cjs @@ -19,7 +19,6 @@ module.exports = { 'benchmarks', 'coverage', 'dist', - 'examples', 'node_modules', 'prettier.config.js', 'recipes', @@ -116,11 +115,11 @@ module.exports = { parserOptions: { extraFileExtensions: ['.json'], sourceType: 'module', - project: './tsconfig.json', + project: './tsconfig.lint.json', }, overrides: [ { - files: ['test/**/*.ts', 'tests/**/*.ts'], + files: ['test/**/*.ts', 'tests/**/*.ts', 'examples/**/*.ts'], rules: { 'implicit-dependencies/no-implicit': 'off', 'import/no-extraneous-dependencies': 'off', diff --git a/config/tsconfig.lint.json b/config/tsconfig.lint.json new file mode 100644 index 0000000000..574cc64e1f --- /dev/null +++ b/config/tsconfig.lint.json @@ -0,0 +1,15 @@ +{ + "extends": "./tsconfig.json", + "include": [ + "../packages/**/src/**/*.ts", + "../packages/**/test/**/*.ts", + "../packages/**/examples/**/*.ts", + "../packages/**/examples/**/*.cjs", + "../packages/**/examples/**/*.js", + "../packages/**/benchmarks/**/*.ts", + "../packages/**/bin/**/*.ts" + ], + "compilerOptions": { + "noEmit": true + } +} diff --git a/package-lock.json b/package-lock.json index 191f1c226a..efe96a9f05 100644 --- a/package-lock.json +++ b/package-lock.json @@ -19,20 +19,20 @@ "@vitest/ui": "^v2.0.0-beta.12", "c8": "7.12.0", "embedme": "1.22.1", - "eslint": "8.45.0", - "eslint-config-prettier": "8.8.0", - "eslint-config-typestrict": "1.0.5", + "eslint": "8.57.0", + "eslint-config-prettier": "^9.1.0", + "eslint-config-typestrict": "^1.0.5", "eslint-formatter-codeframe": "7.32.1", "eslint-plugin-ethereumjs": "file:./eslint", "eslint-plugin-github": "4.9.2", "eslint-plugin-implicit-dependencies": "1.1.1", "eslint-plugin-import": "2.26.0", - "eslint-plugin-prettier": "4.2.1", + "eslint-plugin-prettier": "^5.2.1", "eslint-plugin-simple-import-sort": "7.0.0", "eslint-plugin-sonarjs": "0.19.0", "lint-staged": "13.0.3", "lockfile-lint-api": "^5.5.1", - "prettier": "2.7.1", + "prettier": "^3.3.3", "sort-package-json": "1.57.0", "tape": "5.6.0", "tsx": "^4.6.2", @@ -1011,9 +1011,9 @@ } }, "node_modules/@eslint/js": { - "version": "8.44.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.44.0.tgz", - "integrity": "sha512-Ag+9YM4ocKQx9AarydN0KY2j0ErMHNIocPDrVo8zAE44xLTjEtz81OdR68/cydGtk6m6jDb5Za3r2useMzYmSw==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", + "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -3719,6 +3719,12 @@ "url": "https://opencollective.com/typescript-eslint" } }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "dev": true + }, "node_modules/@vitest/browser": { "version": "2.0.0-beta.12", "resolved": "https://registry.npmjs.org/@vitest/browser/-/browser-2.0.0-beta.12.tgz", @@ -7069,27 +7075,28 @@ } }, "node_modules/eslint": { - "version": "8.45.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.45.0.tgz", - "integrity": "sha512-pd8KSxiQpdYRfYa9Wufvdoct3ZPQQuVuU5O6scNgMuOMYuxvH0IGaYK0wUFjo4UYYQQCUndlXiMbnxopwvvTiw==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", + "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.4.0", - "@eslint/eslintrc": "^2.1.0", - "@eslint/js": "8.44.0", - "@humanwhocodes/config-array": "^0.11.10", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.0", + "@humanwhocodes/config-array": "^0.11.14", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", - "ajv": "^6.10.0", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.2.0", - "eslint-visitor-keys": "^3.4.1", - "espree": "^9.6.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -7123,9 +7130,9 @@ } }, "node_modules/eslint-config-prettier": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.8.0.tgz", - "integrity": "sha512-wLbQiFre3tdGgpDv67NQKnJuTlcUVYHas3k+DZCc2U2BadthoEY4B7hLPvAxaqdyOGCzuLfii2fqGph10va7oA==", + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", "dev": true, "bin": { "eslint-config-prettier": "bin/cli.js" @@ -7492,36 +7499,6 @@ "balanced-match": "^1.0.0" } }, - "node_modules/eslint-plugin-github/node_modules/eslint-plugin-prettier": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.1.3.tgz", - "integrity": "sha512-C9GCVAs4Eq7ZC/XFQHITLiHJxQngdtraXaM+LoUFoFp/lHNl2Zn8f3WQbe9HvTBBQ9YnKFB0/2Ajdqwo5D1EAw==", - "dev": true, - "dependencies": { - "prettier-linter-helpers": "^1.0.0", - "synckit": "^0.8.6" - }, - "engines": { - "node": "^14.18.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint-plugin-prettier" - }, - "peerDependencies": { - "@types/eslint": ">=8.0.0", - "eslint": ">=8.0.0", - "eslint-config-prettier": "*", - "prettier": ">=3.0.0" - }, - "peerDependenciesMeta": { - "@types/eslint": { - "optional": true - }, - "eslint-config-prettier": { - "optional": true - } - } - }, "node_modules/eslint-plugin-github/node_modules/minimatch": { "version": "9.0.3", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", @@ -7537,21 +7514,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/eslint-plugin-github/node_modules/prettier": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.2.tgz", - "integrity": "sha512-rAVeHYMcv8ATV5d508CFdn+8/pHPpXeIid1DdrPwXnaAdH7cqjVbpJaT5eq4yRAFU/lsbwYwSF/n5iNrdJHPQA==", - "dev": true, - "bin": { - "prettier": "bin/prettier.cjs" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" - } - }, "node_modules/eslint-plugin-i18n-text": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/eslint-plugin-i18n-text/-/eslint-plugin-i18n-text-1.0.1.tgz", @@ -7674,21 +7636,30 @@ } }, "node_modules/eslint-plugin-prettier": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz", - "integrity": "sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.2.1.tgz", + "integrity": "sha512-gH3iR3g4JfF+yYPaJYkN7jEl9QbweL/YfkoRlNnuIEHEz1vHVlCmWOS+eGGiRuzHQXdJFCOTxRgvju9b8VUmrw==", "dev": true, "dependencies": { - "prettier-linter-helpers": "^1.0.0" + "prettier-linter-helpers": "^1.0.0", + "synckit": "^0.9.1" }, "engines": { - "node": ">=12.0.0" + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint-plugin-prettier" }, "peerDependencies": { - "eslint": ">=7.28.0", - "prettier": ">=2.0.0" + "@types/eslint": ">=8.0.0", + "eslint": ">=8.0.0", + "eslint-config-prettier": "*", + "prettier": ">=3.0.0" }, "peerDependenciesMeta": { + "@types/eslint": { + "optional": true + }, "eslint-config-prettier": { "optional": true } @@ -12812,15 +12783,15 @@ } }, "node_modules/prettier": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz", - "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", + "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", "dev": true, "bin": { - "prettier": "bin-prettier.js" + "prettier": "bin/prettier.cjs" }, "engines": { - "node": ">=10.13.0" + "node": ">=14" }, "funding": { "url": "https://github.com/prettier/prettier?sponsor=1" @@ -14719,9 +14690,9 @@ } }, "node_modules/synckit": { - "version": "0.8.8", - "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.8.8.tgz", - "integrity": "sha512-HwOKAP7Wc5aRGYdKH+dw0PRRpbO841v2DENBtjnR5HFWoiNByAl7vrx3p0G/rCyYXQsrxqtX48TImFtPcIHSpQ==", + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.9.1.tgz", + "integrity": "sha512-7gr8p9TQP6RAHusBOSLs46F4564ZrjV8xFmw5zCmgmhGUcw2hxsShhJ6CEiHQMgPDwAQ1fWHPM0ypc4RMAig4A==", "dev": true, "dependencies": { "@pkgr/core": "^0.1.0", diff --git a/package.json b/package.json index 356faf9b30..59d32a1b05 100644 --- a/package.json +++ b/package.json @@ -34,20 +34,20 @@ "@vitest/ui": "^v2.0.0-beta.12", "c8": "7.12.0", "embedme": "1.22.1", - "eslint": "8.45.0", - "eslint-config-prettier": "8.8.0", - "eslint-config-typestrict": "1.0.5", + "eslint": "8.57.0", + "eslint-config-prettier": "^9.1.0", + "eslint-config-typestrict": "^1.0.5", "eslint-formatter-codeframe": "7.32.1", "eslint-plugin-ethereumjs": "file:./eslint", "eslint-plugin-github": "4.9.2", "eslint-plugin-implicit-dependencies": "1.1.1", "eslint-plugin-import": "2.26.0", - "eslint-plugin-prettier": "4.2.1", + "eslint-plugin-prettier": "^5.2.1", "eslint-plugin-simple-import-sort": "7.0.0", "eslint-plugin-sonarjs": "0.19.0", "lint-staged": "13.0.3", "lockfile-lint-api": "^5.5.1", - "prettier": "2.7.1", + "prettier": "^3.3.3", "sort-package-json": "1.57.0", "tape": "5.6.0", "tsx": "^4.6.2", diff --git a/packages/block/.eslintrc.cjs b/packages/block/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/block/.eslintrc.cjs +++ b/packages/block/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/block/examples/1559.ts b/packages/block/examples/1559.ts index a77afbc323..ed836dec4f 100644 --- a/packages/block/examples/1559.ts +++ b/packages/block/examples/1559.ts @@ -1,4 +1,4 @@ -import { Block, createBlockFromBlockData } from '@ethereumjs/block' +import { createBlockFromBlockData } from '@ethereumjs/block' import { Chain, Common, Hardfork } from '@ethereumjs/common' const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) @@ -10,7 +10,7 @@ const block = createBlockFromBlockData( gasUsed: BigInt(60), }, }, - { common } + { common }, ) // Base fee will increase for next block since the @@ -27,7 +27,7 @@ const blockWithMatchingBaseFee = createBlockFromBlockData( gasUsed: BigInt(60), }, }, - { common } + { common }, ) console.log(Number(blockWithMatchingBaseFee.header.baseFeePerGas)) // 11 diff --git a/packages/block/examples/4844.ts b/packages/block/examples/4844.ts index a73cb682ba..5331a68e20 100644 --- a/packages/block/examples/4844.ts +++ b/packages/block/examples/4844.ts @@ -17,7 +17,7 @@ const main = async () => { }) const blobTx = create4844BlobTx( { blobsData: ['myFirstBlob'], to: Address.fromPrivateKey(randomBytes(32)) }, - { common } + { common }, ) const block = createBlockFromBlockData( @@ -30,14 +30,14 @@ const main = async () => { { common, skipConsensusFormatValidation: true, - } + }, ) console.log( `4844 block header with excessBlobGas=${block.header.excessBlobGas} created and ${ block.transactions.filter((tx) => tx.type === 3).length - } blob transactions` + } blob transactions`, ) } -main() +void main() diff --git a/packages/block/examples/clique.ts b/packages/block/examples/clique.ts index ba32b58cc1..2caa565d64 100644 --- a/packages/block/examples/clique.ts +++ b/packages/block/examples/clique.ts @@ -1,4 +1,4 @@ -import { Block, createBlockFromBlockData } from '@ethereumjs/block' +import { createBlockFromBlockData } from '@ethereumjs/block' import { Chain, Common, Hardfork } from '@ethereumjs/common' const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) diff --git a/packages/block/examples/pos.ts b/packages/block/examples/pos.ts index 1096669d55..c05220b05c 100644 --- a/packages/block/examples/pos.ts +++ b/packages/block/examples/pos.ts @@ -1,4 +1,4 @@ -import { Block, createBlockFromBlockData } from '@ethereumjs/block' +import { createBlockFromBlockData } from '@ethereumjs/block' import { Chain, Common } from '@ethereumjs/common' const common = new Common({ chain: Chain.Mainnet }) @@ -7,7 +7,7 @@ const block = createBlockFromBlockData( { // Provide your block data here or use default values }, - { common } + { common }, ) console.log(`Proof-of-Stake (default) block created with hardfork=${block.common.hardfork()}`) diff --git a/packages/block/examples/pow.ts b/packages/block/examples/pow.ts index 997e47c26c..8f2c1f4075 100644 --- a/packages/block/examples/pow.ts +++ b/packages/block/examples/pow.ts @@ -1,4 +1,4 @@ -import { Block, createBlockFromBlockData } from '@ethereumjs/block' +import { createBlockFromBlockData } from '@ethereumjs/block' import { Chain, Common, Hardfork } from '@ethereumjs/common' const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) diff --git a/packages/block/examples/withdrawals.ts b/packages/block/examples/withdrawals.ts index 73e5e7dfc1..5229db9dfa 100644 --- a/packages/block/examples/withdrawals.ts +++ b/packages/block/examples/withdrawals.ts @@ -1,6 +1,7 @@ -import { Block, createBlockFromBlockData } from '@ethereumjs/block' -import { Common, Chain } from '@ethereumjs/common' +import { createBlockFromBlockData } from '@ethereumjs/block' +import { Chain, Common } from '@ethereumjs/common' import { Address, hexToBytes } from '@ethereumjs/util' + import type { WithdrawalData } from '@ethereumjs/util' const common = new Common({ chain: Chain.Mainnet }) @@ -16,14 +17,14 @@ const block = createBlockFromBlockData( { header: { withdrawalsRoot: hexToBytes( - '0x69f28913c562b0d38f8dc81e72eb0d99052444d301bf8158dc1f3f94a4526357' + '0x69f28913c562b0d38f8dc81e72eb0d99052444d301bf8158dc1f3f94a4526357', ), }, withdrawals: [withdrawal], }, { common, - } + }, ) console.log(`Block with ${block.withdrawals!.length} withdrawal(s) created`) diff --git a/packages/block/src/block.ts b/packages/block/src/block.ts index 725e14879a..82b8597d94 100644 --- a/packages/block/src/block.ts +++ b/packages/block/src/block.ts @@ -98,7 +98,7 @@ export class Block { withdrawals?: Withdrawal[], opts: BlockOptions = {}, requests?: CLRequest[], - executionWitness?: VerkleExecutionWitness | null + executionWitness?: VerkleExecutionWitness | null, ) { this.header = header ?? BlockHeader.fromHeaderData({}, opts) this.common = this.header.common @@ -132,13 +132,13 @@ export class Block { this.validateUncles() if (this.common.consensusType() === ConsensusType.ProofOfAuthority) { const msg = this._errorMsg( - 'Block initialization with uncleHeaders on a PoA network is not allowed' + 'Block initialization with uncleHeaders on a PoA network is not allowed', ) throw new Error(msg) } if (this.common.consensusType() === ConsensusType.ProofOfStake) { const msg = this._errorMsg( - 'Block initialization with uncleHeaders on a PoS network is not allowed' + 'Block initialization with uncleHeaders on a PoS network is not allowed', ) throw new Error(msg) } @@ -181,7 +181,7 @@ export class Block { const bytesArray = [ this.header.raw(), this.transactions.map((tx) => - tx.supports(Capability.EIP2718TypedTransaction) ? tx.serialize() : tx.raw() + tx.supports(Capability.EIP2718TypedTransaction) ? tx.serialize() : tx.raw(), ) as Uint8Array[], this.uncleHeaders.map((uh) => uh.raw()), ] @@ -301,7 +301,7 @@ export class Block { blobGasUsed += BigInt(tx.numBlobs()) * blobGasPerBlob if (blobGasUsed > blobGasLimit) { errs.push( - `tx causes total blob gas of ${blobGasUsed} to exceed maximum blob gas per block of ${blobGasLimit}` + `tx causes total blob gas of ${blobGasUsed} to exceed maximum blob gas per block of ${blobGasLimit}`, ) } } @@ -357,7 +357,7 @@ export class Block { for (const [index, tx] of this.transactions.entries()) { if (!tx.isSigned()) { const msg = this._errorMsg( - `invalid transactions: transaction at index ${index} is unsigned` + `invalid transactions: transaction at index ${index} is unsigned`, ) throw new Error(msg) } @@ -406,7 +406,7 @@ export class Block { const expectedExcessBlobGas = parentHeader.calcNextExcessBlobGas() if (this.header.excessBlobGas !== expectedExcessBlobGas) { throw new Error( - `block excessBlobGas mismatch: have ${this.header.excessBlobGas}, want ${expectedExcessBlobGas}` + `block excessBlobGas mismatch: have ${this.header.excessBlobGas}, want ${expectedExcessBlobGas}`, ) } @@ -419,7 +419,7 @@ export class Block { throw new Error( `blob transaction maxFeePerBlobGas ${ tx.maxFeePerBlobGas - } < than block blob gas price ${blobGasPrice} - ${this.errorStr()}` + } < than block blob gas price ${blobGasPrice} - ${this.errorStr()}`, ) } @@ -427,7 +427,7 @@ export class Block { if (blobGasUsed > blobGasLimit) { throw new Error( - `tx causes total blob gas of ${blobGasUsed} to exceed maximum blob gas per block of ${blobGasLimit}` + `tx causes total blob gas of ${blobGasUsed} to exceed maximum blob gas per block of ${blobGasLimit}`, ) } } @@ -435,7 +435,7 @@ export class Block { if (this.header.blobGasUsed !== blobGasUsed) { throw new Error( - `block blobGasUsed mismatch: have ${this.header.blobGasUsed}, want ${blobGasUsed}` + `block blobGasUsed mismatch: have ${this.header.blobGasUsed}, want ${blobGasUsed}`, ) } } @@ -472,7 +472,7 @@ export class Block { if (this.cache.withdrawalsTrieRoot === undefined) { this.cache.withdrawalsTrieRoot = await genWithdrawalsTrieRoot( this.withdrawals!, - new Trie({ common: this.common }) + new Trie({ common: this.common }), ) } result = equalsBytes(this.cache.withdrawalsTrieRoot, this.header.withdrawalsRoot!) diff --git a/packages/block/src/constructors.ts b/packages/block/src/constructors.ts index bf409c6ec5..dee8d97578 100644 --- a/packages/block/src/constructors.ts +++ b/packages/block/src/constructors.ts @@ -108,7 +108,7 @@ export function createBlockFromBlockData(blockData: BlockData = {}, opts?: Block withdrawals, opts, clRequests, - executionWitness + executionWitness, ) } @@ -146,7 +146,7 @@ export function createBlockFromValuesArray(values: BlockBytes, opts?: BlockOptio (withdrawalBytes === undefined || !Array.isArray(withdrawalBytes)) ) { throw new Error( - 'Invalid serialized block input: EIP-4895 is active, and no withdrawals were provided as array' + 'Invalid serialized block input: EIP-4895 is active, and no withdrawals were provided as array', ) } @@ -155,13 +155,13 @@ export function createBlockFromValuesArray(values: BlockBytes, opts?: BlockOptio (requestBytes === undefined || !Array.isArray(requestBytes)) ) { throw new Error( - 'Invalid serialized block input: EIP-7685 is active, and no requestBytes were provided as array' + 'Invalid serialized block input: EIP-7685 is active, and no requestBytes were provided as array', ) } if (header.common.isActivatedEIP(6800) && executionWitnessBytes === undefined) { throw new Error( - 'Invalid serialized block input: EIP-6800 is active, and execution witness is undefined' + 'Invalid serialized block input: EIP-6800 is active, and execution witness is undefined', ) } @@ -173,7 +173,7 @@ export function createBlockFromValuesArray(values: BlockBytes, opts?: BlockOptio ...opts, // Use header common in case of setHardfork being activated common: header.common, - }) + }), ) } @@ -206,7 +206,7 @@ export function createBlockFromValuesArray(values: BlockBytes, opts?: BlockOptio let requests if (header.common.isActivatedEIP(7685)) { requests = (requestBytes as RequestBytes[]).map((bytes) => - CLRequestFactory.fromSerializedRequest(bytes) + CLRequestFactory.fromSerializedRequest(bytes), ) } // executionWitness are not part of the EL fetched blocks via eth_ bodies method @@ -231,7 +231,7 @@ export function createBlockFromValuesArray(values: BlockBytes, opts?: BlockOptio withdrawals, opts, requests, - executionWitness + executionWitness, ) } @@ -272,7 +272,7 @@ export function createBlockFromRPC(blockData: JsonRpcBlock, uncles?: any[], opts export const createBlockFromJsonRpcProvider = async ( provider: string | EthersProvider, blockTag: string | bigint, - opts: BlockOptions + opts: BlockOptions, ) => { let blockData const providerUrl = getProvider(provider) @@ -301,7 +301,7 @@ export const createBlockFromJsonRpcProvider = async ( }) } else { throw new Error( - `expected blockTag to be block hash, bigint, hex prefixed string, or earliest/latest/pending; got ${blockTag}` + `expected blockTag to be block hash, bigint, hex prefixed string, or earliest/latest/pending; got ${blockTag}`, ) } @@ -331,7 +331,7 @@ export const createBlockFromJsonRpcProvider = async ( */ export async function createBlockFromExecutionPayload( payload: ExecutionPayload, - opts?: BlockOptions + opts?: BlockOptions, ): Promise { const { blockNumber: number, @@ -409,7 +409,7 @@ export async function createBlockFromExecutionPayload( // we are not setting setHardfork as common is already set to the correct hf const block = createBlockFromBlockData( { header, transactions: txs, withdrawals, executionWitness, requests }, - opts + opts, ) if ( block.common.isActivatedEIP(6800) && @@ -436,7 +436,7 @@ export async function createBlockFromExecutionPayload( */ export async function createBlockFromBeaconPayloadJson( payload: BeaconPayloadJson, - opts?: BlockOptions + opts?: BlockOptions, ): Promise { const executionPayload = executionPayloadFromBeaconPayload(payload) return createBlockFromExecutionPayload(executionPayload, opts) diff --git a/packages/block/src/from-beacon-payload.ts b/packages/block/src/from-beacon-payload.ts index b72679d8a5..ec9c4d275f 100644 --- a/packages/block/src/from-beacon-payload.ts +++ b/packages/block/src/from-beacon-payload.ts @@ -185,7 +185,7 @@ export function executionPayloadFromBeaconPayload(payload: BeaconPayloadJson): E payload.execution_witness.verkleProof !== undefined ? payload.execution_witness : parseExecutionWitnessFromSnakeJson( - payload.execution_witness as unknown as VerkleExecutionWitnessSnakeJson + payload.execution_witness as unknown as VerkleExecutionWitnessSnakeJson, ) } diff --git a/packages/block/src/from-rpc.ts b/packages/block/src/from-rpc.ts index c0105afac9..513e7b402c 100644 --- a/packages/block/src/from-rpc.ts +++ b/packages/block/src/from-rpc.ts @@ -46,7 +46,7 @@ function normalizeTxParams(_txParams: any) { export function createBlockFromRpc( blockParams: JsonRpcBlock, uncles: any[] = [], - options?: BlockOptions + options?: BlockOptions, ) { const header = blockHeaderFromRpc(blockParams, options) @@ -66,6 +66,6 @@ export function createBlockFromRpc( }) return createBlockFromBlockData( { header, transactions, uncleHeaders, withdrawals: blockParams.withdrawals, requests }, - options + options, ) } diff --git a/packages/block/src/header-from-rpc.ts b/packages/block/src/header-from-rpc.ts index a4ba8f3d45..a22678b574 100644 --- a/packages/block/src/header-from-rpc.ts +++ b/packages/block/src/header-from-rpc.ts @@ -58,7 +58,7 @@ export function blockHeaderFromRpc(blockParams: JsonRpcBlock, options?: BlockOpt parentBeaconBlockRoot, requestsRoot, }, - options + options, ) return blockHeader diff --git a/packages/block/src/header.ts b/packages/block/src/header.ts index c07740a173..9a8939c001 100644 --- a/packages/block/src/header.ts +++ b/packages/block/src/header.ts @@ -79,7 +79,7 @@ export class BlockHeader { get prevRandao() { if (!this.common.isActivatedEIP(4399)) { const msg = this._errorMsg( - 'The prevRandao parameter can only be accessed when EIP-4399 is activated' + 'The prevRandao parameter can only be accessed when EIP-4399 is activated', ) throw new Error(msg) } @@ -192,7 +192,7 @@ export class BlockHeader { const parentHash = toType(headerData.parentHash, TypeOutput.Uint8Array) ?? defaults.parentHash const uncleHash = toType(headerData.uncleHash, TypeOutput.Uint8Array) ?? defaults.uncleHash const coinbase = new Address( - toType(headerData.coinbase ?? defaults.coinbase, TypeOutput.Uint8Array) + toType(headerData.coinbase ?? defaults.coinbase, TypeOutput.Uint8Array), ) const stateRoot = toType(headerData.stateRoot, TypeOutput.Uint8Array) ?? defaults.stateRoot const transactionsTrie = @@ -257,7 +257,7 @@ export class BlockHeader { if (!this.common.isActivatedEIP(4895) && withdrawalsRoot !== undefined) { throw new Error( - 'A withdrawalsRoot for a header can only be provided with EIP4895 being activated' + 'A withdrawalsRoot for a header can only be provided with EIP4895 being activated', ) } @@ -273,7 +273,7 @@ export class BlockHeader { if (!this.common.isActivatedEIP(4788) && parentBeaconBlockRoot !== undefined) { throw new Error( - 'A parentBeaconBlockRoot for a header can only be provided with EIP4788 being activated' + 'A parentBeaconBlockRoot for a header can only be provided with EIP4788 being activated', ) } @@ -352,13 +352,13 @@ export class BlockHeader { } if (transactionsTrie.length !== 32) { const msg = this._errorMsg( - `transactionsTrie must be 32 bytes, received ${transactionsTrie.length} bytes` + `transactionsTrie must be 32 bytes, received ${transactionsTrie.length} bytes`, ) throw new Error(msg) } if (receiptTrie.length !== 32) { const msg = this._errorMsg( - `receiptTrie must be 32 bytes, received ${receiptTrie.length} bytes` + `receiptTrie must be 32 bytes, received ${receiptTrie.length} bytes`, ) throw new Error(msg) } @@ -375,7 +375,7 @@ export class BlockHeader { // check if the block used too much gas if (this.gasUsed > this.gasLimit) { const msg = this._errorMsg( - `Invalid block: too much gas used. Used: ${this.gasUsed}, gas limit: ${this.gasLimit}` + `Invalid block: too much gas used. Used: ${this.gasUsed}, gas limit: ${this.gasLimit}`, ) throw new Error(msg) } @@ -407,7 +407,7 @@ export class BlockHeader { } if (this.withdrawalsRoot?.length !== 32) { const msg = this._errorMsg( - `withdrawalsRoot must be 32 bytes, received ${this.withdrawalsRoot!.length} bytes` + `withdrawalsRoot must be 32 bytes, received ${this.withdrawalsRoot!.length} bytes`, ) throw new Error(msg) } @@ -422,7 +422,7 @@ export class BlockHeader { const msg = this._errorMsg( `parentBeaconBlockRoot must be 32 bytes, received ${ this.parentBeaconBlockRoot!.length - } bytes` + } bytes`, ) throw new Error(msg) } @@ -459,7 +459,7 @@ export class BlockHeader { // ExtraData length on epoch transition if (this.extraData.length !== minLength) { const msg = this._errorMsg( - `extraData must be ${minLength} bytes on non-epoch transition blocks, received ${this.extraData.length} bytes` + `extraData must be ${minLength} bytes on non-epoch transition blocks, received ${this.extraData.length} bytes`, ) throw new Error(msg) } @@ -467,14 +467,14 @@ export class BlockHeader { const signerLength = this.extraData.length - minLength if (signerLength % 20 !== 0) { const msg = this._errorMsg( - `invalid signer list length in extraData, received signer length of ${signerLength} (not divisible by 20)` + `invalid signer list length in extraData, received signer length of ${signerLength} (not divisible by 20)`, ) throw new Error(msg) } // coinbase (beneficiary) on epoch transition if (!this.coinbase.isZero()) { const msg = this._errorMsg( - `coinbase must be filled with zeros on epoch transition blocks, received ${this.coinbase}` + `coinbase must be filled with zeros on epoch transition blocks, received ${this.coinbase}`, ) throw new Error(msg) } @@ -492,7 +492,7 @@ export class BlockHeader { if (!equalsBytes(uncleHash, KECCAK256_RLP_ARRAY)) { errorMsg += `, uncleHash: ${bytesToHex(uncleHash)} (expected: ${bytesToHex( - KECCAK256_RLP_ARRAY + KECCAK256_RLP_ARRAY, )})` error = true } @@ -504,7 +504,7 @@ export class BlockHeader { } if (extraData.length > 32) { errorMsg += `, extraData: ${bytesToHex( - extraData + extraData, )} (cannot exceed 32 bytes length, received ${extraData.length} bytes)` error = true } @@ -547,14 +547,14 @@ export class BlockHeader { if (gasLimit >= maxGasLimit) { const msg = this._errorMsg( - `gas limit increased too much. Gas limit: ${gasLimit}, max gas limit: ${maxGasLimit}` + `gas limit increased too much. Gas limit: ${gasLimit}, max gas limit: ${maxGasLimit}`, ) throw new Error(msg) } if (gasLimit <= minGasLimit) { const msg = this._errorMsg( - `gas limit decreased too much. Gas limit: ${gasLimit}, min gas limit: ${minGasLimit}` + `gas limit decreased too much. Gas limit: ${gasLimit}, min gas limit: ${minGasLimit}`, ) throw new Error(msg) } @@ -562,8 +562,8 @@ export class BlockHeader { if (gasLimit < this.common.param('minGasLimit')) { const msg = this._errorMsg( `gas limit decreased below minimum gas limit. Gas limit: ${gasLimit}, minimum gas limit: ${this.common.param( - 'minGasLimit' - )}` + 'minGasLimit', + )}`, ) throw new Error(msg) } @@ -575,7 +575,7 @@ export class BlockHeader { public calcNextBaseFee(): bigint { if (!this.common.isActivatedEIP(1559)) { const msg = this._errorMsg( - 'calcNextBaseFee() can only be called with EIP1559 being activated' + 'calcNextBaseFee() can only be called with EIP1559 being activated', ) throw new Error(msg) } @@ -625,7 +625,7 @@ export class BlockHeader { return fakeExponential( this.common.param('minBlobGas'), excessBlobGas, - this.common.param('blobGasPriceUpdateFraction') + this.common.param('blobGasPriceUpdateFraction'), ) } @@ -740,7 +740,7 @@ export class BlockHeader { protected _requireClique(name: string) { if (this.common.consensusAlgorithm() !== ConsensusAlgorithm.Clique) { const msg = this._errorMsg( - `BlockHeader.${name}() call only supported for clique PoA networks` + `BlockHeader.${name}() call only supported for clique PoA networks`, ) throw new Error(msg) } @@ -758,7 +758,7 @@ export class BlockHeader { } if (this.common.consensusAlgorithm() !== ConsensusAlgorithm.Ethash) { const msg = this._errorMsg( - 'difficulty calculation currently only supports the ethash algorithm' + 'difficulty calculation currently only supports the ethash algorithm', ) throw new Error(msg) } @@ -873,7 +873,7 @@ export class BlockHeader { const extraDataWithoutSeal = this.extraData.subarray( 0, - this.extraData.length - CLIQUE_EXTRA_SEAL + this.extraData.length - CLIQUE_EXTRA_SEAL, ) const extraData = concatBytes(extraDataWithoutSeal, signatureB) return extraData @@ -1004,8 +1004,8 @@ export class BlockHeader { if (drift <= DAO_ForceExtraDataRange && !equalsBytes(this.extraData, DAO_ExtraData)) { const msg = this._errorMsg( `extraData should be 'dao-hard-fork', got ${bytesToUtf8(this.extraData)} (hex: ${bytesToHex( - this.extraData - )})` + this.extraData, + )})`, ) throw new Error(msg) } diff --git a/packages/block/src/helpers.ts b/packages/block/src/helpers.ts index 145d051bb7..87d80e7883 100644 --- a/packages/block/src/helpers.ts +++ b/packages/block/src/helpers.ts @@ -51,12 +51,12 @@ export function valuesArrayToHeaderData(values: BlockHeaderBytes): HeaderData { if (values.length > 21) { throw new Error( - `invalid header. More values than expected were received. Max: 20, got: ${values.length}` + `invalid header. More values than expected were received. Max: 20, got: ${values.length}`, ) } if (values.length < 15) { throw new Error( - `invalid header. Less values than expected were received. Min: 15, got: ${values.length}` + `invalid header. Less values than expected were received. Min: 15, got: ${values.length}`, ) } diff --git a/packages/block/src/types.ts b/packages/block/src/types.ts index e1fa075796..5935fc6c97 100644 --- a/packages/block/src/types.ts +++ b/packages/block/src/types.ts @@ -139,7 +139,7 @@ export type BlockBytes = UncleHeadersBytes, WithdrawalsBytes, RequestsBytes, - ExecutionWitnessBytes + ExecutionWitnessBytes, ] /** @@ -150,7 +150,7 @@ export type BlockBodyBytes = [ TransactionsBytes, UncleHeadersBytes, WithdrawalsBytes?, - RequestBytes? + RequestBytes?, ] /** * TransactionsBytes can be an array of serialized txs for Typed Transactions or an array of Uint8Array Arrays for legacy transactions. diff --git a/packages/block/test/block.spec.ts b/packages/block/test/block.spec.ts index 9cf52e6f24..f58ee611c4 100644 --- a/packages/block/test/block.spec.ts +++ b/packages/block/test/block.spec.ts @@ -43,7 +43,7 @@ describe('[Block]: block functions', () => { block = createBlockFromBlockData({}, { freeze: false }) assert.ok( !Object.isFrozen(block), - 'block should not be frozen when freeze deactivated in options' + 'block should not be frozen when freeze deactivated in options', ) const rlpBlock = block.serialize() @@ -53,7 +53,7 @@ describe('[Block]: block functions', () => { block = createBlockFromRLPSerializedBlock(rlpBlock, { freeze: false }) assert.ok( !Object.isFrozen(block), - 'block should not be frozen when freeze deactivated in options' + 'block should not be frozen when freeze deactivated in options', ) const zero = new Uint8Array(0) @@ -79,7 +79,7 @@ describe('[Block]: block functions', () => { block = createBlockFromValuesArray(valuesArray, { common, freeze: false }) assert.ok( !Object.isFrozen(block), - 'block should not be frozen when freeze deactivated in options' + 'block should not be frozen when freeze deactivated in options', ) }) @@ -98,7 +98,7 @@ describe('[Block]: block functions', () => { extraData: new Uint8Array(97), }, }, - { common, setHardfork: true } + { common, setHardfork: true }, ) assert.equal(block.common.hardfork(), Hardfork.Berlin, 'should use setHardfork option') @@ -108,12 +108,12 @@ describe('[Block]: block functions', () => { number: 20, // Future block }, }, - { common, setHardfork: 5001 } + { common, setHardfork: 5001 }, ) assert.equal( block.common.hardfork(), Hardfork.Paris, - 'should use setHardfork option (td > threshold)' + 'should use setHardfork option (td > threshold)', ) block = createBlockFromBlockData( @@ -123,12 +123,12 @@ describe('[Block]: block functions', () => { extraData: new Uint8Array(97), }, }, - { common, setHardfork: 3000 } + { common, setHardfork: 3000 }, ) assert.equal( block.common.hardfork(), Hardfork.Berlin, - 'should work with setHardfork option (td < threshold)' + 'should work with setHardfork option (td < threshold)', ) }) @@ -150,7 +150,7 @@ describe('[Block]: block functions', () => { const common = new Common({ chain: Chain.Mainnet }) const uncleBlock = createBlockFromBlockData( { header: { extraData: new Uint8Array(117) } }, - { common } + { common }, ) assert.throws(function () { createBlockFromBlockData({ uncleHeaders: [uncleBlock.header] }, { common }) @@ -233,7 +233,7 @@ describe('[Block]: block functions', () => { const result = block.getTransactionsValidationErrors() assert.ok( result[0].includes('tx unable to pay base fee (non EIP-1559 tx)'), - 'should throw when legacy tx is unable to pay base fee' + 'should throw when legacy tx is unable to pay base fee', ) }) @@ -293,7 +293,7 @@ describe('[Block]: block functions', () => { uncleHash: KECCAK256_RLP_ARRAY, }, }, - { common: new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai }) } + { common: new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai }) }, ) await checkThrowsAsync(block.validateData(false, false), 'invalid withdrawals trie') @@ -304,7 +304,7 @@ describe('[Block]: block functions', () => { uncleHash: zeroRoot, }, }, - { common: new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) } + { common: new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) }, ) await checkThrowsAsync(block.validateData(false, false), 'invalid uncle hash') @@ -315,7 +315,7 @@ describe('[Block]: block functions', () => { block = createBlockFromBlockData({ executionWitness: null }, { common }) await checkThrowsAsync( block.validateData(false, false), - 'Invalid block: ethereumjs stateless client needs executionWitness' + 'Invalid block: ethereumjs stateless client needs executionWitness', ) }) @@ -361,7 +361,7 @@ describe('[Block]: block functions', () => { }, undefined, undefined, - 'input must be array' + 'input must be array', ) assert.throws( () => { @@ -369,7 +369,7 @@ describe('[Block]: block functions', () => { }, undefined, undefined, - 'input length must be 3 or less' + 'input length must be 3 or less', ) }) @@ -379,7 +379,7 @@ describe('[Block]: block functions', () => { toBytes(testDataPreLondon2.blocks[2].rlp as PrefixedHexString), { common, - } + }, ) const createBlockFromRaw = createBlockFromValuesArray(block.raw(), { common }) assert.ok(equalsBytes(block.hash(), createBlockFromRaw.hash())) @@ -391,14 +391,14 @@ describe('[Block]: block functions', () => { toBytes(testDataPreLondon2.blocks[2].rlp as PrefixedHexString), { common, - } + }, ) assert.equal(typeof block.toJSON(), 'object') }) it('DAO hardfork', () => { const blockData = RLP.decode( - testDataPreLondon2.blocks[0].rlp as PrefixedHexString + testDataPreLondon2.blocks[0].rlp as PrefixedHexString, ) as NestedUint8Array // Set block number from test block to mainnet DAO fork block 1920000 blockData[0][8] = hexToBytes('0x1D4C00') @@ -410,7 +410,7 @@ describe('[Block]: block functions', () => { }, /extraData should be 'dao-hard-fork/, undefined, - 'should throw on DAO HF block with wrong extra data' + 'should throw on DAO HF block with wrong extra data', ) // eslint-disable-line // Set extraData to dao-hard-fork @@ -435,14 +435,14 @@ describe('[Block]: block functions', () => { { header: nextBlockHeaderData, }, - { common } + { common }, ) // test if difficulty defaults to 0 assert.equal( blockWithoutDifficultyCalculation.header.difficulty, BigInt(0), - 'header difficulty should default to 0' + 'header difficulty should default to 0', ) // test if we set difficulty if we have a "difficulty header" in options; also verify this is equal to reported canonical difficulty. @@ -453,17 +453,17 @@ describe('[Block]: block functions', () => { { common, calcDifficultyFromHeader: genesis.header, - } + }, ) assert.ok( blockWithDifficultyCalculation.header.difficulty > BigInt(0), - 'header difficulty should be set if difficulty header is given' + 'header difficulty should be set if difficulty header is given', ) assert.ok( blockWithDifficultyCalculation.header.ethashCanonicalDifficulty(genesis.header) === blockWithDifficultyCalculation.header.difficulty, - 'header difficulty is canonical difficulty if difficulty header is given' + 'header difficulty is canonical difficulty if difficulty header is given', ) // test if we can provide a block which is too far ahead to still calculate difficulty @@ -479,12 +479,12 @@ describe('[Block]: block functions', () => { { common, calcDifficultyFromHeader: genesis.header, - } + }, ) assert.ok( block_farAhead.header.difficulty > BigInt(0), - 'should allow me to provide a bogus next block to calculate difficulty on when providing a difficulty header' + 'should allow me to provide a bogus next block to calculate difficulty on when providing a difficulty header', ) }) diff --git a/packages/block/test/clique.spec.ts b/packages/block/test/clique.spec.ts index bd5803ee0b..2c000d2bb3 100644 --- a/packages/block/test/clique.spec.ts +++ b/packages/block/test/clique.spec.ts @@ -15,29 +15,29 @@ describe('[Header]: Clique PoA Functionality', () => { }, undefined, undefined, - 'cliqueIsEpochTransition() -> should throw on PoW networks' + 'cliqueIsEpochTransition() -> should throw on PoW networks', ) header = BlockHeader.fromHeaderData({ extraData: new Uint8Array(97) }, { common }) assert.ok( header.cliqueIsEpochTransition(), - 'cliqueIsEpochTransition() -> should indicate an epoch transition for the genesis block' + 'cliqueIsEpochTransition() -> should indicate an epoch transition for the genesis block', ) header = BlockHeader.fromHeaderData({ number: 1, extraData: new Uint8Array(97) }, { common }) assert.notOk( header.cliqueIsEpochTransition(), - 'cliqueIsEpochTransition() -> should correctly identify a non-epoch block' + 'cliqueIsEpochTransition() -> should correctly identify a non-epoch block', ) assert.deepEqual( header.cliqueExtraVanity(), new Uint8Array(32), - 'cliqueExtraVanity() -> should return correct extra vanity value' + 'cliqueExtraVanity() -> should return correct extra vanity value', ) assert.deepEqual( header.cliqueExtraSeal(), new Uint8Array(65), - 'cliqueExtraSeal() -> should return correct extra seal value' + 'cliqueExtraSeal() -> should return correct extra seal value', ) assert.throws( () => { @@ -45,26 +45,26 @@ describe('[Header]: Clique PoA Functionality', () => { }, undefined, undefined, - 'cliqueEpochTransitionSigners() -> should throw on non-epch block' + 'cliqueEpochTransitionSigners() -> should throw on non-epch block', ) header = BlockHeader.fromHeaderData( { number: 60000, extraData: new Uint8Array(137) }, - { common } + { common }, ) assert.ok( header.cliqueIsEpochTransition(), - 'cliqueIsEpochTransition() -> should correctly identify an epoch block' + 'cliqueIsEpochTransition() -> should correctly identify an epoch block', ) assert.deepEqual( header.cliqueExtraVanity(), new Uint8Array(32), - 'cliqueExtraVanity() -> should return correct extra vanity value' + 'cliqueExtraVanity() -> should return correct extra vanity value', ) assert.deepEqual( header.cliqueExtraSeal(), new Uint8Array(65), - 'cliqueExtraSeal() -> should return correct extra seal value' + 'cliqueExtraSeal() -> should return correct extra seal value', ) const msg = 'cliqueEpochTransitionSigners() -> should return the correct epoch transition signer list on epoch block' @@ -81,7 +81,7 @@ describe('[Header]: Clique PoA Functionality', () => { address: new Address(hexToBytes('0x0b90087d864e82a284dca15923f3776de6bb016f')), privateKey: hexToBytes('0x64bf9cc30328b0e42387b3c82c614e6386259136235e20c1357bd11cdee86993'), publicKey: hexToBytes( - '0x40b2ebdf4b53206d2d3d3d59e7e2f13b1ea68305aec71d5d24cefe7f24ecae886d241f9267f04702d7f693655eb7b4aa23f30dcd0c3c5f2b970aad7c8a828195' + '0x40b2ebdf4b53206d2d3d3d59e7e2f13b1ea68305aec71d5d24cefe7f24ecae886d241f9267f04702d7f693655eb7b4aa23f30dcd0c3c5f2b970aad7c8a828195', ), } @@ -90,7 +90,7 @@ describe('[Header]: Clique PoA Functionality', () => { let header = BlockHeader.fromHeaderData( { number: 1, extraData: new Uint8Array(97) }, - { common, freeze: false, cliqueSigner } + { common, freeze: false, cliqueSigner }, ) assert.equal(header.extraData.length, 97) @@ -100,7 +100,7 @@ describe('[Header]: Clique PoA Functionality', () => { header = BlockHeader.fromHeaderData({ extraData: new Uint8Array(97) }, { common }) assert.ok( header.cliqueSigner().equals(Address.zero()), - 'should return zero address on default block' + 'should return zero address on default block', ) }) }) diff --git a/packages/block/test/difficulty.spec.ts b/packages/block/test/difficulty.spec.ts index 945d69fc03..0fe8987844 100644 --- a/packages/block/test/difficulty.spec.ts +++ b/packages/block/test/difficulty.spec.ts @@ -30,7 +30,7 @@ const hardforkTestData: TestData = { muirGlacier: Object.assign( difficultyEIP2384.difficultyEIP2384.Berlin, difficultyEIP2384_random.difficultyEIP2384_random.Berlin, - difficultyEIP2384_random_to20M.difficultyEIP2384_random_to20M.Berlin + difficultyEIP2384_random_to20M.difficultyEIP2384_random_to20M.Berlin, ), arrowGlacier: difficultyArrowGlacier.difficultyArrowGlacier.ArrowGlacier, grayGlacier: difficultyGrayGlacier.difficultyGrayGlacier.GrayGlacier, @@ -65,7 +65,7 @@ describe('[Header]: difficulty tests', () => { uncleHash, }, }, - blockOpts + blockOpts, ) const block = createBlockFromBlockData( @@ -76,7 +76,7 @@ describe('[Header]: difficulty tests', () => { number: test.currentBlockNumber, }, }, - blockOpts + blockOpts, ) runDifficultyTests(test, parentBlock, block, `fork determination by hardfork (${hardfork})`) @@ -101,7 +101,7 @@ describe('[Header]: difficulty tests', () => { uncleHash, }, }, - blockOpts + blockOpts, ) const block = createBlockFromBlockData( @@ -112,14 +112,14 @@ describe('[Header]: difficulty tests', () => { number: test.currentBlockNumber, }, }, - blockOpts + blockOpts, ) runDifficultyTests( test, parentBlock, block, - `fork determination by block number (${test.currentBlockNumber})` + `fork determination by block number (${test.currentBlockNumber})`, ) } } diff --git a/packages/block/test/eip1559block.spec.ts b/packages/block/test/eip1559block.spec.ts index 878a5907f5..0bf4b8bed5 100644 --- a/packages/block/test/eip1559block.spec.ts +++ b/packages/block/test/eip1559block.spec.ts @@ -45,12 +45,12 @@ describe('EIP1559 tests', () => { }, { common, - } + }, ) }, undefined, undefined, - 'should throw when setting baseFeePerGas with EIP1559 not being activated' + 'should throw when setting baseFeePerGas with EIP1559 not being activated', ) }) @@ -68,14 +68,14 @@ describe('EIP1559 tests', () => { calcDifficultyFromHeader: genesis.header, common, freeze: false, - } + }, ) assert.fail('should throw when baseFeePerGas is not set to initial base fee') } catch (e: any) { const expectedError = 'Initial EIP1559 block does not have initial base fee' assert.ok( e.message.includes(expectedError), - 'should throw if base fee is not set to initial value' + 'should throw if base fee is not set to initial value', ) } @@ -91,7 +91,7 @@ describe('EIP1559 tests', () => { calcDifficultyFromHeader: genesis.header, common, freeze: false, - } + }, ) ;(header as any).baseFeePerGas = undefined await (header as any)._genericFormatValidation() @@ -99,7 +99,7 @@ describe('EIP1559 tests', () => { const expectedError = 'EIP1559 block has no base fee field' assert.ok( e.message.includes(expectedError), - 'should throw with no base fee field when EIP1559 is activated' + 'should throw with no base fee field when EIP1559 is activated', ) } }) @@ -118,7 +118,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: genesis.header, common, - } + }, ) assert.ok(true, 'Valid initial EIP1559 header should be valid') @@ -137,7 +137,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: genesis.header, common, - } + }, ) assert.fail('should throw') } catch (e: any) { @@ -159,7 +159,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: genesis.header, common, - } + }, ) createBlockFromBlockData( { @@ -174,7 +174,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: block1.header, common, - } + }, ) assert.ok(true, 'should correctly validate subsequent EIP-1559 blocks') }) @@ -195,7 +195,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: genesis.header, common, - } + }, ) assert.fail('should throw') } catch (e: any) { @@ -216,7 +216,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: genesis.header, common, - } + }, ) assert.ok(true, 'should not throw when elasticity is exactly matched') @@ -235,7 +235,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: genesis.header, common, - } + }, ) it('Header -> validate() -> gasLimit -> success cases', async () => { @@ -251,7 +251,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: genesis.header, common, - } + }, ) assert.ok(true, 'should not throw if gas limit is between bounds (HF transition block)') @@ -267,7 +267,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: genesis.header, common, - } + }, ) assert.ok(true, 'should not throw if gas limit is between bounds (HF transition block)') @@ -284,7 +284,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: block1.header, common, - } + }, ) assert.ok(true, 'should not throw if gas limit is between bounds (post-HF transition block)') @@ -300,7 +300,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: block1.header, common, - } + }, ) assert.ok(true, 'should not throw if gas limit is between bounds (post-HF transition block)') @@ -319,7 +319,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: genesis.header, common, - } + }, ) try { header.validateGasLimit(genesis.header) @@ -327,7 +327,7 @@ describe('EIP1559 tests', () => { } catch (e: any) { assert.ok( e.message.includes('gas limit increased too much'), - 'should throw if gas limit is increased too much (HF transition block)' + 'should throw if gas limit is increased too much (HF transition block)', ) } @@ -343,7 +343,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: block1.header, common, - } + }, ) try { header.validateGasLimit(block1.header) @@ -351,7 +351,7 @@ describe('EIP1559 tests', () => { } catch (e: any) { assert.ok( e.message.includes('gas limit increased too much'), - 'should throw if gas limit is increased too much (post-HF transition block)' + 'should throw if gas limit is increased too much (post-HF transition block)', ) } }) @@ -369,7 +369,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: genesis.header, common, - } + }, ) try { header.validateGasLimit(genesis.header) @@ -377,7 +377,7 @@ describe('EIP1559 tests', () => { } catch (e: any) { assert.ok( e.message.includes('gas limit decreased too much'), - 'should throw if gas limit is decreased too much (HF transition block)' + 'should throw if gas limit is decreased too much (HF transition block)', ) } @@ -393,7 +393,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: block1.header, common, - } + }, ) try { header.validateGasLimit(block1.header) @@ -401,7 +401,7 @@ describe('EIP1559 tests', () => { } catch (e: any) { assert.ok( e.message.includes('gas limit decreased too much'), - 'should throw if gas limit is decreased too much (post-HF transition block)' + 'should throw if gas limit is decreased too much (post-HF transition block)', ) } }) @@ -412,7 +412,7 @@ describe('EIP1559 tests', () => { maxFeePerGas: BigInt(0), maxPriorityFeePerGas: BigInt(0), }, - { common } + { common }, ).sign(hexToBytes(`0x${'46'.repeat(32)}`)) const block = createBlockFromBlockData( { @@ -438,13 +438,13 @@ describe('EIP1559 tests', () => { { common, calcDifficultyFromHeader: genesis.header, - } + }, ) const errs = block.getTransactionsValidationErrors() assert.ok( errs[0].includes('unable to pay base fee'), - 'should throw if transaction is unable to pay base fee' + 'should throw if transaction is unable to pay base fee', ) }) @@ -457,7 +457,7 @@ describe('EIP1559 tests', () => { gasUsed: BigInt(item.parentGasUsed), gasLimit: BigInt(item.parentTargetGasUsed) * BigInt(2), }, - { common } + { common }, ).calcNextBaseFee() const expected = BigInt(item.expectedBaseFee) assert.equal(expected, result, 'base fee correct') @@ -475,7 +475,7 @@ describe('EIP1559 tests', () => { }, { common, - } + }, ) assert.equal(header.toJSON().baseFeePerGas, '0x5') }) diff --git a/packages/block/test/eip4788block.spec.ts b/packages/block/test/eip4788block.spec.ts index 7f294c832b..7b7a15f00a 100644 --- a/packages/block/test/eip4788block.spec.ts +++ b/packages/block/test/eip4788block.spec.ts @@ -18,12 +18,12 @@ describe('EIP4788 header tests', () => { }, { common: earlyCommon, - } + }, ) }, 'A parentBeaconBlockRoot for a header can only be provided with EIP4788 being activated', undefined, - 'should throw when setting parentBeaconBlockRoot with EIP4788 not being activated' + 'should throw when setting parentBeaconBlockRoot with EIP4788 not being activated', ) assert.throws( @@ -34,12 +34,12 @@ describe('EIP4788 header tests', () => { }, { common: earlyCommon, - } + }, ) }, 'blob gas used can only be provided with EIP4844 activated', undefined, - 'should throw when setting blobGasUsed with EIP4844 not being activated' + 'should throw when setting blobGasUsed with EIP4844 not being activated', ) assert.doesNotThrow(() => { BlockHeader.fromHeaderData( @@ -51,7 +51,7 @@ describe('EIP4788 header tests', () => { { common, skipConsensusFormatValidation: true, - } + }, ) }, 'correctly instantiates an EIP4788 block header') @@ -59,12 +59,12 @@ describe('EIP4788 header tests', () => { { header: BlockHeader.fromHeaderData({}, { common }), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) assert.equal( block.toJSON().header?.parentBeaconBlockRoot, bytesToHex(zeros(32)), - 'JSON output includes excessBlobGas' + 'JSON output includes excessBlobGas', ) }) }) diff --git a/packages/block/test/eip4844block.spec.ts b/packages/block/test/eip4844block.spec.ts index c3c316218e..de2a01234a 100644 --- a/packages/block/test/eip4844block.spec.ts +++ b/packages/block/test/eip4844block.spec.ts @@ -42,12 +42,12 @@ describe('EIP4844 header tests', () => { }, { common: earlyCommon, - } + }, ) }, 'excess blob gas can only be provided with EIP4844 activated', undefined, - 'should throw when setting excessBlobGas with EIP4844 not being activated' + 'should throw when setting excessBlobGas with EIP4844 not being activated', ) assert.throws( @@ -58,22 +58,22 @@ describe('EIP4844 header tests', () => { }, { common: earlyCommon, - } + }, ) }, 'blob gas used can only be provided with EIP4844 activated', undefined, - 'should throw when setting blobGasUsed with EIP4844 not being activated' + 'should throw when setting blobGasUsed with EIP4844 not being activated', ) const excessBlobGas = BlockHeader.fromHeaderData( {}, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ).excessBlobGas assert.equal( excessBlobGas, 0n, - 'instantiates block with reasonable default excess blob gas value when not provided' + 'instantiates block with reasonable default excess blob gas value when not provided', ) assert.doesNotThrow(() => { BlockHeader.fromHeaderData( @@ -83,7 +83,7 @@ describe('EIP4844 header tests', () => { { common, skipConsensusFormatValidation: true, - } + }, ) }, 'correctly instantiates an EIP4844 block header') @@ -91,7 +91,7 @@ describe('EIP4844 header tests', () => { { header: BlockHeader.fromHeaderData({}, { common, skipConsensusFormatValidation: true }), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) assert.equal(block.toJSON().header?.excessBlobGas, '0x0', 'JSON output includes excessBlobGas') }) @@ -116,19 +116,19 @@ describe('blob gas tests', () => { assert.equal( excessBlobGas, 0n, - 'excess blob gas where 4844 is not active on header should be 0' + 'excess blob gas where 4844 is not active on header should be 0', ) assert.throws( () => preShardingHeader.calcDataFee(1), 'header must have excessBlobGas field', undefined, - 'calcDataFee throws when header has no excessBlobGas field' + 'calcDataFee throws when header has no excessBlobGas field', ) const lowGasHeader = BlockHeader.fromHeaderData( { number: 1, excessBlobGas: 5000 }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) excessBlobGas = lowGasHeader.calcNextExcessBlobGas() @@ -137,7 +137,7 @@ describe('blob gas tests', () => { assert.equal(blobGasPrice, 1n, 'blob gas price should be 1n when low or no excess blob gas') const highGasHeader = BlockHeader.fromHeaderData( { number: 1, excessBlobGas: 6291456, blobGasUsed: BigInt(6) * blobGasPerBlob }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) excessBlobGas = highGasHeader.calcNextExcessBlobGas() blobGasPrice = highGasHeader.getBlobGasPrice() @@ -180,7 +180,7 @@ describe('transaction validation tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ).sign(randomBytes(32)) const tx2 = create4844BlobTx( { @@ -191,12 +191,12 @@ describe('transaction validation tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ).sign(randomBytes(32)) const parentHeader = BlockHeader.fromHeaderData( { number: 1n, excessBlobGas: 4194304, blobGasUsed: 0 }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) const excessBlobGas = parentHeader.calcNextExcessBlobGas() @@ -211,11 +211,11 @@ describe('transaction validation tests', () => { excessBlobGas, blobGasUsed: BigInt(blobs) * blobGasPerBlob, }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) const block = createBlockFromBlockData( { header: blockHeader, transactions }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) return block } @@ -228,7 +228,7 @@ describe('transaction validation tests', () => { assert.doesNotThrow( () => blockWithValidTx.validateBlobTransactions(parentHeader), - 'does not throw when all tx maxFeePerBlobGas are >= to block blob gas fee' + 'does not throw when all tx maxFeePerBlobGas are >= to block blob gas fee', ) const blockJson = blockWithValidTx.toJSON() blockJson.header!.blobGasUsed = '0x0' @@ -237,26 +237,26 @@ describe('transaction validation tests', () => { () => blockWithInvalidHeader.validateBlobTransactions(parentHeader), 'block blobGasUsed mismatch', undefined, - 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee' + 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee', ) assert.throws( () => blockWithInvalidTx.validateBlobTransactions(parentHeader), 'than block blob gas price', undefined, - 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee' + 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee', ) assert.throws( () => blockWithInvalidTx.validateBlobTransactions(parentHeader), 'than block blob gas price', undefined, - 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee' + 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee', ) assert.throws( () => blockWithTooManyBlobs.validateBlobTransactions(parentHeader), 'exceed maximum blob gas per block', undefined, - 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee' + 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee', ) assert.ok( @@ -264,7 +264,7 @@ describe('transaction validation tests', () => { .getTransactionsValidationErrors() .join(' ') .includes('exceed maximum blob gas per block'), - 'tx erros includes correct error message when too many blobs in a block' + 'tx erros includes correct error message when too many blobs in a block', ) }) }) @@ -292,7 +292,7 @@ describe('fake exponential', () => { assert.equal( fakeExponential(BigInt(input[0]), BigInt(input[1]), BigInt(input[2])), BigInt(input[3]), - 'fake exponential produced expected output' + 'fake exponential produced expected output', ) } }) diff --git a/packages/block/test/eip4895block.spec.ts b/packages/block/test/eip4895block.spec.ts index 2159b51bbc..fec9262db0 100644 --- a/packages/block/test/eip4895block.spec.ts +++ b/packages/block/test/eip4895block.spec.ts @@ -41,14 +41,14 @@ describe('EIP4895 tests', () => { // get withdwalsArray const gethBlockBytesArray = RLP.decode(hexToBytes(`0x${gethWithdrawals8BlockRlp}`)) const withdrawals = (gethBlockBytesArray[3] as WithdrawalBytes[]).map((wa) => - Withdrawal.fromValuesArray(wa) + Withdrawal.fromValuesArray(wa), ) assert.equal(withdrawals.length, 8, '8 withdrawals should have been found') const gethWitdrawalsRoot = (gethBlockBytesArray[0] as Uint8Array[])[16] as Uint8Array assert.deepEqual( await genWithdrawalsTrieRoot(withdrawals), gethWitdrawalsRoot, - 'withdrawalsRoot should be valid' + 'withdrawalsRoot should be valid', ) }) @@ -62,19 +62,19 @@ describe('EIP4895 tests', () => { }, { common: earlyCommon, - } + }, ) }, undefined, undefined, - 'should throw when setting withdrawalsRoot with EIP4895 not being activated' + 'should throw when setting withdrawalsRoot with EIP4895 not being activated', ) assert.doesNotThrow(() => { BlockHeader.fromHeaderData( {}, { common, - } + }, ) }, 'should not throw when withdrawalsRoot is undefined with EIP4895 being activated') assert.doesNotThrow(() => { @@ -84,7 +84,7 @@ describe('EIP4895 tests', () => { }, { common, - } + }, ) }, 'correctly instantiates an EIP4895 block header') }) @@ -98,19 +98,19 @@ describe('EIP4895 tests', () => { }, { common: earlyCommon, - } + }, ) }, undefined, undefined, - 'should throw when setting withdrawals with EIP4895 not being activated' + 'should throw when setting withdrawals with EIP4895 not being activated', ) assert.doesNotThrow(() => { createBlockFromBlockData( {}, { common, - } + }, ) }, 'should not throw when withdrawals is undefined with EIP4895 being activated') assert.doesNotThrow(() => { @@ -123,7 +123,7 @@ describe('EIP4895 tests', () => { }, { common, - } + }, ) }) const block = createBlockFromBlockData( @@ -135,17 +135,17 @@ describe('EIP4895 tests', () => { }, { common, - } + }, ) assert.notOk( await block.withdrawalsTrieIsValid(), - 'should invalidate the empty withdrawals root' + 'should invalidate the empty withdrawals root', ) const validHeader = BlockHeader.fromHeaderData( { withdrawalsRoot: KECCAK256_RLP, }, - { common } + { common }, ) const validBlock = createBlockFromBlockData( { @@ -154,7 +154,7 @@ describe('EIP4895 tests', () => { }, { common, - } + }, ) assert.ok(await validBlock.withdrawalsTrieIsValid(), 'should validate empty withdrawals root') @@ -169,18 +169,18 @@ describe('EIP4895 tests', () => { { header: { withdrawalsRoot: hexToBytes( - '0x897ca49edcb278aecab2688bcc2b7b7ee43524cc489672534fee332a172f1718' + '0x897ca49edcb278aecab2688bcc2b7b7ee43524cc489672534fee332a172f1718', ), }, withdrawals: [withdrawal], }, { common, - } + }, ) assert.ok( await validBlockWithWithdrawal.withdrawalsTrieIsValid(), - 'should validate withdrawals root' + 'should validate withdrawals root', ) const withdrawal2 = { @@ -194,18 +194,18 @@ describe('EIP4895 tests', () => { { header: { withdrawalsRoot: hexToBytes( - '0x3b514862c42008079d461392e29d5b6775dd5ed370a6c4441ccb8ab742bf2436' + '0x3b514862c42008079d461392e29d5b6775dd5ed370a6c4441ccb8ab742bf2436', ), }, withdrawals: [withdrawal, withdrawal2], }, { common, - } + }, ) assert.ok( await validBlockWithWithdrawal2.withdrawalsTrieIsValid(), - 'should validate withdrawals root' + 'should validate withdrawals root', ) assert.doesNotThrow(() => { validBlockWithWithdrawal.hash() @@ -229,7 +229,7 @@ describe('EIP4895 tests', () => { }, undefined, undefined, - 'should provide withdrawals array when 4895 is active' + 'should provide withdrawals array when 4895 is active', ) }) @@ -239,7 +239,7 @@ describe('EIP4895 tests', () => { block['cache'].withdrawalsTrieRoot = randomBytes(32) assert.ok( await block.withdrawalsTrieIsValid(), - 'correctly executed code path where withdrawals length is 0' + 'correctly executed code path where withdrawals length is 0', ) }) }) diff --git a/packages/block/test/eip7685block.spec.ts b/packages/block/test/eip7685block.spec.ts index f51f129bfd..251d172c8a 100644 --- a/packages/block/test/eip7685block.spec.ts +++ b/packages/block/test/eip7685block.spec.ts @@ -59,7 +59,7 @@ describe('7685 tests', () => { requests: [request], header: { requestsRoot }, }, - { common } + { common }, ) assert.equal(block.requests?.length, 1) assert.deepEqual(block.header.requestsRoot, requestsRoot) @@ -71,7 +71,7 @@ describe('7685 tests', () => { requests: [request], header: { requestsRoot: randomBytes(32) }, }, - { common } + { common }, ) assert.equal(await block.requestsTrieIsValid(), false) @@ -90,7 +90,7 @@ describe('7685 tests', () => { requests, header: { requestsRoot }, }, - { common } + { common }, ) assert.ok(await block.requestsTrieIsValid()) @@ -102,8 +102,8 @@ describe('7685 tests', () => { requests: [request1, request3, request2], header: { requestsRoot }, }, - { common } - ) + { common }, + ), ).rejects.toThrow('ascending order') }) }) @@ -114,7 +114,7 @@ describe('fromValuesArray tests', () => { [BlockHeader.fromHeaderData({}, { common }).raw(), [], [], [], []], { common, - } + }, ) assert.deepEqual(block.header.requestsRoot, KECCAK256_RLP) }) @@ -136,7 +136,7 @@ describe('fromValuesArray tests', () => { ], { common, - } + }, ) assert.deepEqual(block.header.requestsRoot, requestsRoot) assert.equal(block.requests?.length, 3) @@ -162,7 +162,7 @@ describe('fromRPC tests', () => { ], { common, - } + }, ) const jsonBlock = block.toJSON() const rpcBlock: any = { ...jsonBlock.header, requests: jsonBlock.requests } diff --git a/packages/block/test/from-beacon-payload.spec.ts b/packages/block/test/from-beacon-payload.spec.ts index 519c649adc..778a5c816b 100644 --- a/packages/block/test/from-beacon-payload.spec.ts +++ b/packages/block/test/from-beacon-payload.spec.ts @@ -37,7 +37,7 @@ describe('[fromExecutionPayloadJson]: 4844 devnet 5', () => { }) const parentHeader = BlockHeader.fromHeaderData( { excessBlobGas: BigInt(0), blobGasUsed: block.header.excessBlobGas! + BigInt(393216) }, - { common } + { common }, ) block.validateBlobTransactions(parentHeader) assert.ok(true, `successfully constructed block=${block.header.number}`) @@ -55,7 +55,7 @@ describe('[fromExecutionPayloadJson]: 4844 devnet 5', () => { ...payload87335, block_hash: payload87475.block_hash, } as BeaconPayloadJson, - { common } + { common }, ) assert.fail(`should have failed constructing the block`) } catch (e) { @@ -72,7 +72,7 @@ describe('[fromExecutionPayloadJson]: 4844 devnet 5', () => { ...payload87475, block_hash: '0x573714bdd0ca5e47bc32008751c4fc74237f8cb354fbc1475c1d0ece38236ea4', } as BeaconPayloadJson, - { common } + { common }, ) const parentHeader = BlockHeader.fromHeaderData({ excessBlobGas: BigInt(0) }, { common }) block.validateBlobTransactions(parentHeader) @@ -102,7 +102,7 @@ describe('[fromExecutionPayloadJson]: kaustinen', () => { assert.deepEqual( block.executionWitness, payloadKaustinen.execution_witness as VerkleExecutionWitness, - 'execution witness should match' + 'execution witness should match', ) }) }) diff --git a/packages/block/test/from-rpc.spec.ts b/packages/block/test/from-rpc.spec.ts index 23c255fbf2..1aecba0621 100644 --- a/packages/block/test/from-rpc.spec.ts +++ b/packages/block/test/from-rpc.spec.ts @@ -48,11 +48,11 @@ describe('[fromRPC]:', () => { const createBlockFromTransactionValueAsInteger = createBlockFromRpc( blockDataTransactionValueAsInteger as JsonRpcBlock, undefined, - { common } + { common }, ) assert.equal( createBlockFromTransactionValueAsInteger.transactions[0].value.toString(), - valueAsIntegerString + valueAsIntegerString, ) }) @@ -64,13 +64,13 @@ describe('[fromRPC]:', () => { const createBlockFromTransactionGasPriceAsInteger = createBlockFromRpc( blockDataTransactionGasPriceAsInteger as JsonRpcBlock, undefined, - { common } + { common }, ) assert.equal( ( createBlockFromTransactionGasPriceAsInteger.transactions[0] as LegacyTransaction ).gasPrice.toString(), - gasPriceAsIntegerString + gasPriceAsIntegerString, ) }) @@ -81,11 +81,11 @@ describe('[fromRPC]:', () => { undefined, { common, - } + }, ) assert.equal( blockDifficultyAsInteger.header.difficulty.toString(), - blockDataDifficultyAsInteger.difficulty + blockDataDifficultyAsInteger.difficulty, ) }) @@ -94,7 +94,7 @@ describe('[fromRPC]:', () => { const block = createBlockFromRpc(testDataFromRpcGoerliLondon as JsonRpcBlock, [], { common }) assert.equal( `0x${block.header.baseFeePerGas?.toString(16)}`, - testDataFromRpcGoerliLondon.baseFeePerGas + testDataFromRpcGoerliLondon.baseFeePerGas, ) assert.equal(bytesToHex(block.hash()), testDataFromRpcGoerliLondon.hash) }) @@ -137,13 +137,13 @@ describe('[fromRPC] - Alchemy/Infura API block responses', () => { assert.equal( bytesToHex(block.hash()), infura2000004woTxs.hash, - 'created premerge block w/o txns' + 'created premerge block w/o txns', ) block = createBlockFromRpc(infura2000004wTxs as JsonRpcBlock, [], { common, setHardfork: true }) assert.equal( bytesToHex(block.hash()), infura2000004wTxs.hash, - 'created premerge block with txns' + 'created premerge block with txns', ) block = createBlockFromRpc(infura15571241woTxs as JsonRpcBlock, [], { common, @@ -152,7 +152,7 @@ describe('[fromRPC] - Alchemy/Infura API block responses', () => { assert.equal( bytesToHex(block.hash()), infura15571241woTxs.hash, - 'created post merge block without txns' + 'created post merge block without txns', ) block = createBlockFromRpc(infura15571241wTxs as JsonRpcBlock, [], { @@ -162,7 +162,7 @@ describe('[fromRPC] - Alchemy/Infura API block responses', () => { assert.equal( bytesToHex(block.hash()), infura15571241wTxs.hash, - 'created post merge block with txns' + 'created post merge block with txns', ) }) @@ -212,7 +212,7 @@ describe('[fromJsonRpcProvider]', () => { assert.equal( bytesToHex(block.hash()), blockHash, - 'assembled a block from blockdata from a provider' + 'assembled a block from blockdata from a provider', ) try { await createBlockFromJsonRpcProvider(provider, bytesToHex(randomBytes(32)), {}) @@ -220,7 +220,7 @@ describe('[fromJsonRpcProvider]', () => { } catch (err: any) { assert.ok( err.message.includes('No block data returned from provider'), - 'returned correct error message' + 'returned correct error message', ) } global.fetch = realFetch diff --git a/packages/block/test/header.spec.ts b/packages/block/test/header.spec.ts index 4d4cafc066..92d4d11c75 100644 --- a/packages/block/test/header.spec.ts +++ b/packages/block/test/header.spec.ts @@ -57,14 +57,14 @@ describe('[Block]: Header functions', () => { assert.equal( header.common.hardfork(), 'chainstart', - 'should initialize with correct HF provided' + 'should initialize with correct HF provided', ) common.setHardfork(Hardfork.Byzantium) assert.equal( header.common.hardfork(), 'chainstart', - 'should stay on correct HF if outer common HF changes' + 'should stay on correct HF if outer common HF changes', ) header = BlockHeader.fromHeaderData({}, { common }) @@ -78,7 +78,7 @@ describe('[Block]: Header functions', () => { header = BlockHeader.fromHeaderData({}, { freeze: false }) assert.ok( !Object.isFrozen(header), - 'block should not be frozen when freeze deactivated in options' + 'block should not be frozen when freeze deactivated in options', ) }) @@ -98,7 +98,7 @@ describe('[Block]: Header functions', () => { }) assert.ok( !Object.isFrozen(header), - 'block should not be frozen when freeze deactivated in options' + 'block should not be frozen when freeze deactivated in options', ) assert.throws( @@ -110,19 +110,19 @@ describe('[Block]: Header functions', () => { }), 'A base fee', undefined, - 'throws when RLP serialized block with no base fee on default hardfork (london) and setHardfork left undefined' + 'throws when RLP serialized block with no base fee on default hardfork (london) and setHardfork left undefined', ) header = BlockHeader.fromRLPSerializedHeader( hexToBytes( - '0xf90214a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a0d7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000850400000000808213888080a011bbe8db4e347b4e8c937c1c8370e4b5ed33adb3db69cbdb7a38e1e50b1b82faa00000000000000000000000000000000000000000000000000000000000000000880000000000000042' + '0xf90214a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a0d7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000850400000000808213888080a011bbe8db4e347b4e8c937c1c8370e4b5ed33adb3db69cbdb7a38e1e50b1b82faa00000000000000000000000000000000000000000000000000000000000000000880000000000000042', ), - { common, setHardfork: false } + { common, setHardfork: false }, ) assert.equal( bytesToHex(header.hash()), '0xf0f936910ebf101b7b168bbe08e3f166ce1e75e16f513dd5a97af02fbe7de7c0', - 'genesis block should produce incorrect hash since default hardfork is london' + 'genesis block should produce incorrect hash since default hardfork is london', ) }) @@ -158,7 +158,7 @@ describe('[Block]: Header functions', () => { header = BlockHeader.fromValuesArray(headerArray, { common, freeze: false }) assert.ok( !Object.isFrozen(header), - 'block should not be frozen when freeze deactivated in options' + 'block should not be frozen when freeze deactivated in options', ) }) @@ -269,9 +269,9 @@ describe('[Block]: Header functions', () => { } catch (error: any) { assert.ok( (error.message as string).includes( - 'extraData must be 97 bytes on non-epoch transition blocks, received 32 bytes' + 'extraData must be 97 bytes on non-epoch transition blocks, received 32 bytes', ), - testCase + testCase, ) } @@ -281,7 +281,7 @@ describe('[Block]: Header functions', () => { new Uint8Array(32), new Uint8Array(65), new Uint8Array(20), - new Uint8Array(21) + new Uint8Array(21), ) const epoch = BigInt((common.consensusConfig() as CliqueConfig).epoch) try { @@ -290,9 +290,9 @@ describe('[Block]: Header functions', () => { } catch (error: any) { assert.ok( (error.message as string).includes( - 'invalid signer list length in extraData, received signer length of 41 (not divisible by 20)' + 'invalid signer list length in extraData, received signer length of 41 (not divisible by 20)', ), - testCase + testCase, ) } }) @@ -305,7 +305,7 @@ describe('[Block]: Header functions', () => { BlockHeader.fromHeaderData({ extraData }, { common, skipConsensusFormatValidation: true }) assert.ok( true, - 'should instantiate header with invalid extraData when skipConsensusFormatValidation === true' + 'should instantiate header with invalid extraData when skipConsensusFormatValidation === true', ) } catch (error: any) { assert.fail('should not throw') @@ -319,26 +319,26 @@ describe('[Block]: Header functions', () => { () => BlockHeader.fromHeaderData({ parentHash: badHash }), 'parentHash must be 32 bytes', undefined, - 'throws on invalid parent hash length' + 'throws on invalid parent hash length', ) assert.throws( () => BlockHeader.fromHeaderData({ stateRoot: badHash }), 'stateRoot must be 32 bytes', undefined, - 'throws on invalid state root hash length' + 'throws on invalid state root hash length', ) assert.throws( () => BlockHeader.fromHeaderData({ transactionsTrie: badHash }), 'transactionsTrie must be 32 bytes', undefined, - 'throws on invalid transactionsTrie root hash length' + 'throws on invalid transactionsTrie root hash length', ) assert.throws( () => BlockHeader.fromHeaderData({ nonce: new Uint8Array(5) }), 'nonce must be 8 bytes', undefined, - 'contains nonce length error message' + 'contains nonce length error message', ) }) /* @@ -460,12 +460,12 @@ describe('[Block]: Header functions', () => { for (const key of Object.keys(bcBlockGasLimitTestData)) { const genesisRlp = hexToBytes( bcBlockGasLimitTestData[key as keyof typeof bcBlockGasLimitTestData] - .genesisRLP as PrefixedHexString + .genesisRLP as PrefixedHexString, ) const parentBlock = createBlockFromRLPSerializedBlock(genesisRlp, { common }) const blockRlp = hexToBytes( bcBlockGasLimitTestData[key as keyof typeof bcBlockGasLimitTestData].blocks[0] - .rlp as PrefixedHexString + .rlp as PrefixedHexString, ) const block = createBlockFromRLPSerializedBlock(blockRlp, { common }) assert.doesNotThrow(() => block.validateGasLimit(parentBlock)) @@ -486,7 +486,7 @@ describe('[Block]: Header functions', () => { assert.equal( bytesToHex(header.hash()), '0x88e96d4537bea4d9c05d12549907b32561d3bf31f45aae734cdc119f13406cb6', - 'correct PoW hash (mainnet block 1)' + 'correct PoW hash (mainnet block 1)', ) common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) @@ -494,7 +494,7 @@ describe('[Block]: Header functions', () => { assert.equal( bytesToHex(header.hash()), '0x8f5bab218b6bb34476f51ca588e9f4553a3a7ce5e13a66c660a5283e97e9a85a', - 'correct PoA clique hash (goerli block 1)' + 'correct PoA clique hash (goerli block 1)', ) }) @@ -506,7 +506,7 @@ describe('[Block]: Header functions', () => { assert.deepEqual( header.withdrawalsRoot, KECCAK256_RLP, - 'withdrawalsRoot should be set to KECCAK256_RLP' + 'withdrawalsRoot should be set to KECCAK256_RLP', ) }) }) diff --git a/packages/block/test/mergeBlock.spec.ts b/packages/block/test/mergeBlock.spec.ts index f9275d225a..3a0ac43597 100644 --- a/packages/block/test/mergeBlock.spec.ts +++ b/packages/block/test/mergeBlock.spec.ts @@ -110,7 +110,7 @@ describe('[Header]: Casper PoS / The Merge Functionality', () => { undefined, { common, - } + }, ) assert.fail('should have thrown') } catch (e: any) { @@ -123,7 +123,7 @@ describe('[Header]: Casper PoS / The Merge Functionality', () => { let block = createBlockFromBlockData({ header: { mixHash } }, { common }) assert.ok( equalsBytes(block.header.prevRandao, mixHash), - 'prevRandao should return mixHash value' + 'prevRandao should return mixHash value', ) const commonLondon = common.copy() diff --git a/packages/block/test/util.ts b/packages/block/test/util.ts index 1383be87a0..c6d39e9847 100644 --- a/packages/block/test/util.ts +++ b/packages/block/test/util.ts @@ -17,7 +17,7 @@ function createBlock( parentBlock: Block, extraData: string, uncles?: BlockHeader[], - common?: Common + common?: Common, ): Block { uncles = uncles ?? [] common = common ?? new Common({ chain: Chain.Mainnet }) @@ -53,7 +53,7 @@ function createBlock( { common, calcDifficultyFromHeader: parentBlock.header, - } + }, ) } diff --git a/packages/block/tsconfig.lint.json b/packages/block/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/block/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/blockchain/.eslintrc.cjs b/packages/blockchain/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/blockchain/.eslintrc.cjs +++ b/packages/blockchain/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/blockchain/examples/clique.ts b/packages/blockchain/examples/clique.ts index 92a822a03f..4933f25081 100644 --- a/packages/blockchain/examples/clique.ts +++ b/packages/blockchain/examples/clique.ts @@ -1,6 +1,8 @@ -import { createBlockchain, CliqueConsensus, ConsensusDict } from '@ethereumjs/blockchain' +import { CliqueConsensus, createBlockchain } from '@ethereumjs/blockchain' import { Chain, Common, ConsensusAlgorithm, Hardfork } from '@ethereumjs/common' +import type { ConsensusDict } from '@ethereumjs/blockchain' + const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.London }) const consensusDict: ConsensusDict = {} diff --git a/packages/blockchain/examples/gethGenesis.ts b/packages/blockchain/examples/gethGenesis.ts index 97b7df1db4..b435181136 100644 --- a/packages/blockchain/examples/gethGenesis.ts +++ b/packages/blockchain/examples/gethGenesis.ts @@ -1,6 +1,7 @@ import { createBlockchain } from '@ethereumjs/blockchain' -import { Common, createCommonFromGethGenesis, parseGethGenesis } from '@ethereumjs/common' +import { createCommonFromGethGenesis } from '@ethereumjs/common' import { bytesToHex, parseGethGenesisState } from '@ethereumjs/util' + import gethGenesisJson from './genesisData/post-merge.json' const main = async () => { @@ -14,8 +15,8 @@ const main = async () => { const genesisBlockHash = blockchain.genesisBlock.hash() common.setForkHashes(genesisBlockHash) console.log( - `Genesis hash from geth genesis parameters - ${bytesToHex(blockchain.genesisBlock.hash())}` + `Genesis hash from geth genesis parameters - ${bytesToHex(blockchain.genesisBlock.hash())}`, ) } -main() +void main() diff --git a/packages/blockchain/examples/simple.ts b/packages/blockchain/examples/simple.ts index 38c4024bee..cd45156323 100644 --- a/packages/blockchain/examples/simple.ts +++ b/packages/blockchain/examples/simple.ts @@ -1,4 +1,4 @@ -import { Block, createBlockFromBlockData } from '@ethereumjs/block' +import { createBlockFromBlockData } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' import { Common, Hardfork } from '@ethereumjs/common' import { bytesToHex } from '@ethereumjs/util' @@ -21,7 +21,7 @@ const main = async () => { difficulty: blockchain.genesisBlock.header.difficulty + 1n, }, }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block2 = createBlockFromBlockData( { @@ -31,7 +31,7 @@ const main = async () => { difficulty: block.header.difficulty + 1n, }, }, - { common, setHardfork: true } + { common, setHardfork: true }, ) // See @ethereumjs/block for more details on how to create a block await blockchain.putBlock(block) @@ -47,4 +47,4 @@ const main = async () => { // Block 1: 0xa1a061528d74ba81f560e1ebc4f29d6b58171fc13b72b876cdffe6e43b01bdc5 // Block 2: 0x5583be91cf9fb14f5dbeb03ad56e8cef19d1728f267c35a25ba5a355a528f602 } -main() +void main() diff --git a/packages/blockchain/src/blockchain.ts b/packages/blockchain/src/blockchain.ts index 8e81b04404..a9a45d740e 100644 --- a/packages/blockchain/src/blockchain.ts +++ b/packages/blockchain/src/blockchain.ts @@ -141,7 +141,7 @@ export class Blockchain implements BlockchainInterface { private _consensusCheck() { if (this._validateConsensus && this.consensus === undefined) { throw new Error( - `Consensus object for ${this.common.consensusAlgorithm()} must be passed (see consensusDict option) if consensus validation is activated` + `Consensus object for ${this.common.consensusAlgorithm()} must be passed (see consensusDict option) if consensus validation is activated`, ) } } @@ -168,7 +168,7 @@ export class Blockchain implements BlockchainInterface { shallowCopy(): Blockchain { const copiedBlockchain = Object.create( Object.getPrototypeOf(this), - Object.getOwnPropertyDescriptors(this) + Object.getOwnPropertyDescriptors(this), ) copiedBlockchain.common = this.common.copy() return copiedBlockchain @@ -366,7 +366,7 @@ export class Blockchain implements BlockchainInterface { return } throw new Error( - 'Cannot put a different genesis block than current blockchain genesis: create a new Blockchain' + 'Cannot put a different genesis block than current blockchain genesis: create a new Blockchain', ) } @@ -379,7 +379,7 @@ export class Blockchain implements BlockchainInterface { if (block.common.chainId() !== this.common.chainId()) { throw new Error( - `Chain mismatch while trying to put block or header. Chain ID of block: ${block.common.chainId}, chain ID of blockchain : ${this.common.chainId}` + `Chain mismatch while trying to put block or header. Chain ID of block: ${block.common.chainId}, chain ID of blockchain : ${this.common.chainId}`, ) } @@ -515,7 +515,7 @@ export class Blockchain implements BlockchainInterface { if (!(dif < BIGINT_8 && dif > BIGINT_1)) { throw new Error( - `uncle block has a parent that is too old or too young ${header.errorStr()}` + `uncle block has a parent that is too old or too young ${header.errorStr()}`, ) } } @@ -637,7 +637,7 @@ export class Blockchain implements BlockchainInterface { if (!canonicalChainHashes[parentHash]) { throw new Error( - `The parent hash of the uncle header is not part of the canonical chain ${block.errorStr()}` + `The parent hash of the uncle header is not part of the canonical chain ${block.errorStr()}`, ) } @@ -712,7 +712,7 @@ export class Blockchain implements BlockchainInterface { blockId: Uint8Array | bigint | number, maxBlocks: number, skip: number, - reverse: boolean + reverse: boolean, ): Promise { return this.runWithLock(async () => { const blocks: Block[] = [] @@ -853,7 +853,7 @@ export class Blockchain implements BlockchainInterface { blockHash: Uint8Array, blockNumber: bigint, headHash: Uint8Array | null, - ops: DBOp[] + ops: DBOp[], ) { // delete header, body, hash to number mapping and td ops.push(DBOp.del(DBTarget.Header, { blockHash, blockNumber })) @@ -901,7 +901,7 @@ export class Blockchain implements BlockchainInterface { name: string, onBlock: OnBlock, maxBlocks?: number, - releaseLockOnCallback?: boolean + releaseLockOnCallback?: boolean, ): Promise { return this.runWithLock(async (): Promise => { let headHash = this._heads[name] ?? this.genesisBlock.hash() @@ -943,7 +943,7 @@ export class Blockchain implements BlockchainInterface { await this._lock.acquire() // If lock was released check if reorg occured const nextBlockMayBeReorged = await this.getBlock(nextBlockNumber).catch( - (_e) => null + (_e) => null, ) reorgWhileOnBlock = nextBlockMayBeReorged ? !equalsBytes(nextBlockMayBeReorged.hash(), nextBlock.hash()) @@ -1041,7 +1041,7 @@ export class Blockchain implements BlockchainInterface { private async _deleteCanonicalChainReferences( blockNumber: bigint, headHash: Uint8Array, - ops: DBOp[] + ops: DBOp[], ) { try { let hash: Uint8Array | false @@ -1177,7 +1177,7 @@ export class Blockchain implements BlockchainInterface { // LevelDB doesn't handle Uint8Arrays properly when they are part // of a JSON object being stored as a value in the DB const hexHeads = Object.fromEntries( - Object.entries(this._heads).map((entry) => [entry[0], bytesToUnprefixedHex(entry[1])]) + Object.entries(this._heads).map((entry) => [entry[0], bytesToUnprefixedHex(entry[1])]), ) return [ DBOp.set(DBTarget.Heads, hexHeads), @@ -1212,7 +1212,7 @@ export class Blockchain implements BlockchainInterface { async checkAndTransitionHardForkByNumber( number: BigIntLike, td?: BigIntLike, - timestamp?: BigIntLike + timestamp?: BigIntLike, ): Promise { this.common.setHardforkBy({ blockNumber: number, @@ -1284,7 +1284,7 @@ export class Blockchain implements BlockchainInterface { } return createBlockFromBlockData( { header, withdrawals: common.isActivatedEIP(4895) ? [] : undefined }, - { common } + { common }, ) } } diff --git a/packages/blockchain/src/consensus/clique.ts b/packages/blockchain/src/consensus/clique.ts index 9f59a7a95b..8d8a163142 100644 --- a/packages/blockchain/src/consensus/clique.ts +++ b/packages/blockchain/src/consensus/clique.ts @@ -42,7 +42,7 @@ type CliqueLatestSignerStates = CliqueSignerState[] // Clique Vote type CliqueVote = [ blockNumber: bigint, - vote: [signer: Address, beneficiary: Address, cliqueNonce: Uint8Array] + vote: [signer: Address, beneficiary: Address, cliqueNonce: Uint8Array], ] type CliqueLatestVotes = CliqueVote[] @@ -154,7 +154,7 @@ export class CliqueConsensus implements Consensus { for (const [i, cSigner] of checkpointSigners.entries()) { if (activeSigners[i]?.equals(cSigner) !== true) { throw new Error( - `checkpoint signer not found in active signers list at index ${i}: ${cSigner}` + `checkpoint signer not found in active signers list at index ${i}: ${cSigner}`, ) } } @@ -178,7 +178,7 @@ export class CliqueConsensus implements Consensus { throw new Error(`${msg} ${header.errorStr()}`) } const signerIndex = signers.findIndex((address: Address) => - address.equals(header.cliqueSigner()) + address.equals(header.cliqueSigner()), ) const inTurn = header.number % BigInt(signers.length) === BigInt(signerIndex) if ( @@ -336,7 +336,7 @@ export class CliqueConsensus implements Consensus { }) // Discard votes for added signer this._cliqueLatestVotes = this._cliqueLatestVotes.filter( - (vote) => !vote[1][1].equals(beneficiary) + (vote) => !vote[1][1].equals(beneficiary), ) debug(`[Block ${header.number}] Clique majority consensus (AUTH ${beneficiary})`) } @@ -370,7 +370,7 @@ export class CliqueConsensus implements Consensus { activeSigners = activeSigners.filter((signer) => !signer.equals(beneficiary)) this._cliqueLatestVotes = this._cliqueLatestVotes.filter( // Discard votes from removed signer and for removed signer - (vote) => !vote[1][0].equals(beneficiary) && !vote[1][1].equals(beneficiary) + (vote) => !vote[1][0].equals(beneficiary) && !vote[1][1].equals(beneficiary), ) debug(`[Block ${header.number}] Clique majority consensus (DROP ${beneficiary})`) } @@ -381,17 +381,17 @@ export class CliqueConsensus implements Consensus { debug( `[Block ${header.number}] New clique vote: ${signer} -> ${beneficiary} ${ equalsBytes(nonce, CLIQUE_NONCE_AUTH) ? 'AUTH' : 'DROP' - }` + }`, ) } if (consensus) { if (round === 1) { debug( - `[Block ${header.number}] Clique majority consensus on existing votes -> update signer states` + `[Block ${header.number}] Clique majority consensus on existing votes -> update signer states`, ) } else { debug( - `[Block ${header.number}] Clique majority consensus on new vote -> update signer states` + `[Block ${header.number}] Clique majority consensus on new vote -> update signer states`, ) } const newSignerState: CliqueSignerState = [header.number, activeSigners] @@ -483,7 +483,7 @@ export class CliqueConsensus implements Consensus { // remove blockNumber from clique snapshots // (latest signer states, latest votes, latest block signers) this._cliqueLatestSignerStates = this._cliqueLatestSignerStates.filter( - (s) => s[0] <= blockNumber + (s) => s[0] <= blockNumber, ) await this.cliqueUpdateSignerStates() @@ -491,7 +491,7 @@ export class CliqueConsensus implements Consensus { await this.cliqueUpdateVotes() this._cliqueLatestBlockSigners = this._cliqueLatestBlockSigners.filter( - (s) => s[0] <= blockNumber + (s) => s[0] <= blockNumber, ) await this.cliqueUpdateLatestBlockSigners() } @@ -518,7 +518,7 @@ export class CliqueConsensus implements Consensus { if (length > limit) { this._cliqueLatestBlockSigners = this._cliqueLatestBlockSigners.slice( length - limit, - length + length, ) } } @@ -555,7 +555,7 @@ export class CliqueConsensus implements Consensus { if (signerVotes === undefined) return [] const votes = RLP.decode(signerVotes as Uint8Array) as [ Uint8Array, - [Uint8Array, Uint8Array, Uint8Array] + [Uint8Array, Uint8Array, Uint8Array], ] return votes.map((vote) => { const blockNum = bytesToBigInt(vote[0] as Uint8Array) diff --git a/packages/blockchain/src/constructors.ts b/packages/blockchain/src/constructors.ts index 75c3cc2925..8be3ba2426 100644 --- a/packages/blockchain/src/constructors.ts +++ b/packages/blockchain/src/constructors.ts @@ -26,7 +26,7 @@ export async function createBlockchain(opts: BlockchainOptions = {}) { } else { stateRoot = await getGenesisStateRoot( Number(blockchain.common.chainId()) as Chain, - blockchain.common + blockchain.common, ) } } @@ -42,7 +42,7 @@ export async function createBlockchain(opts: BlockchainOptions = {}) { // DB is indeed the Genesis block generated or assigned. if (dbGenesisBlock !== undefined && !equalsBytes(genesisBlock.hash(), dbGenesisBlock.hash())) { throw new Error( - 'The genesis block in the DB has a different hash than the provided genesis block.' + 'The genesis block in the DB has a different hash than the provided genesis block.', ) } @@ -82,7 +82,7 @@ export async function createBlockchain(opts: BlockchainOptions = {}) { await blockchain.checkAndTransitionHardForkByNumber( latestHeader.number, td, - latestHeader.timestamp + latestHeader.timestamp, ) } @@ -98,7 +98,7 @@ export async function createBlockchain(opts: BlockchainOptions = {}) { */ export async function createBlockchainFromBlocksData( blocksData: BlockData[], - opts: BlockchainOptions = {} + opts: BlockchainOptions = {}, ) { const blockchain = await createBlockchain(opts) for (const blockData of blocksData) { diff --git a/packages/blockchain/src/db/helpers.ts b/packages/blockchain/src/db/helpers.ts index 141f9e7ce3..29685634d2 100644 --- a/packages/blockchain/src/db/helpers.ts +++ b/packages/blockchain/src/db/helpers.ts @@ -38,7 +38,7 @@ function DBSetBlockOrHeader(blockBody: Block | BlockHeader): DBOp[] { DBOp.set(DBTarget.Header, headerValue, { blockNumber, blockHash, - }) + }), ) const isGenesis = header.number === BIGINT_0 @@ -49,7 +49,7 @@ function DBSetBlockOrHeader(blockBody: Block | BlockHeader): DBOp[] { DBOp.set(DBTarget.Body, bodyValue, { blockNumber, blockHash, - }) + }), ) } @@ -73,7 +73,7 @@ function DBSaveLookups(blockHash: Uint8Array, blockNumber: bigint, skipNumIndex? ops.push( DBOp.set(DBTarget.HashToNumber, blockNumber8Bytes, { blockHash, - }) + }), ) return ops } diff --git a/packages/blockchain/src/db/manager.ts b/packages/blockchain/src/db/manager.ts index c66310ea5b..d983fc5fc0 100644 --- a/packages/blockchain/src/db/manager.ts +++ b/packages/blockchain/src/db/manager.ts @@ -249,8 +249,8 @@ export class DBManager { op.baseDBOp.type !== undefined ? op.baseDBOp.type : op.baseDBOp.value !== undefined - ? 'put' - : 'del' + ? 'put' + : 'del' const convertedOp = { key: op.baseDBOp.key, value: op.baseDBOp.value, diff --git a/packages/blockchain/src/db/operation.ts b/packages/blockchain/src/db/operation.ts index f1b66a7b28..a861b9605f 100644 --- a/packages/blockchain/src/db/operation.ts +++ b/packages/blockchain/src/db/operation.ts @@ -114,7 +114,7 @@ export class DBOp { public static set( operationTarget: DBTarget, value: Uint8Array | object, - key?: DatabaseKey + key?: DatabaseKey, ): DBOp { const dbOperation = new DBOp(operationTarget, key) dbOperation.baseDBOp.value = value diff --git a/packages/blockchain/src/helpers.ts b/packages/blockchain/src/helpers.ts index 2d4d6c3046..f654b314a7 100644 --- a/packages/blockchain/src/helpers.ts +++ b/packages/blockchain/src/helpers.ts @@ -19,7 +19,7 @@ import type { GenesisState } from '@ethereumjs/util' */ export async function genGenesisStateRoot( genesisState: GenesisState, - common: Common + common: Common, ): Promise { const genCommon = common.copy() genCommon.setHardforkBy({ diff --git a/packages/blockchain/src/types.ts b/packages/blockchain/src/types.ts index 10599b6964..c46debba28 100644 --- a/packages/blockchain/src/types.ts +++ b/packages/blockchain/src/types.ts @@ -44,7 +44,7 @@ export interface BlockchainInterface { name: string, onBlock: OnBlock, maxBlocks?: number, - releaseLockOnCallback?: boolean + releaseLockOnCallback?: boolean, ): Promise /** @@ -243,7 +243,7 @@ export interface Consensus { newBlock( block: Block, commonAncestor?: BlockHeader, - ancientHeaders?: BlockHeader[] + ancientHeaders?: BlockHeader[], ): Promise } diff --git a/packages/blockchain/test/blockValidation.spec.ts b/packages/blockchain/test/blockValidation.spec.ts index a2ea9d0717..85736948eb 100644 --- a/packages/blockchain/test/blockValidation.spec.ts +++ b/packages/blockchain/test/blockValidation.spec.ts @@ -35,7 +35,7 @@ describe('[Blockchain]: Block validation tests', () => { } catch (e: any) { assert.ok( e.message.includes('uncle is already included'), - 'block throws if uncle is already included' + 'block throws if uncle is already included', ) } }) @@ -62,7 +62,7 @@ describe('[Blockchain]: Block validation tests', () => { } catch (err: any) { assert.ok( err.message.includes('not found in DB'), - 'block throws if uncle parent hash is not part of the canonical chain' + 'block throws if uncle parent hash is not part of the canonical chain', ) } }) @@ -86,7 +86,7 @@ describe('[Blockchain]: Block validation tests', () => { lastBlock, 'too-old-uncle', [uncleBlock.header], - common + common, ) try { @@ -95,7 +95,7 @@ describe('[Blockchain]: Block validation tests', () => { } catch (e: any) { assert.ok( e.message.includes('uncle block has a parent that is too old'), - 'block throws uncle is too old' + 'block throws uncle is too old', ) } }) @@ -117,7 +117,7 @@ describe('[Blockchain]: Block validation tests', () => { } catch (e: any) { assert.ok( e.message.includes('uncle block has a parent that is too old or too young'), - 'block throws uncle is too young' + 'block throws uncle is too young', ) } }) @@ -139,7 +139,7 @@ describe('[Blockchain]: Block validation tests', () => { gasLimit: BigInt(5000), }, }, - { common } + { common }, ) const block1 = createBlock(genesis, 'block1', [], common) @@ -153,7 +153,7 @@ describe('[Blockchain]: Block validation tests', () => { } catch (e: any) { assert.ok( e.message.includes('invalid difficulty block header number=1 '), - 'block throws when uncle header is invalid' + 'block throws when uncle header is invalid', ) } }) @@ -176,7 +176,7 @@ describe('[Blockchain]: Block validation tests', () => { } catch (e: any) { assert.ok( e.message.includes('The uncle is a canonical block'), - 'block throws if an uncle is a canonical block' + 'block throws if an uncle is a canonical block', ) } }) @@ -198,7 +198,7 @@ describe('[Blockchain]: Block validation tests', () => { assert.deepEqual( (await blockchain.getCanonicalHeadHeader()).uncleHash, block2.header.uncleHash, - 'uncle blocks validated successfully' + 'uncle blocks validated successfully', ) }) @@ -236,7 +236,7 @@ describe('[Blockchain]: Block validation tests', () => { calcDifficultyFromHeader: genesis.header, common, freeze: false, - } + }, ) const block = createBlockFromBlockData({ header }, { common }) @@ -253,7 +253,7 @@ describe('[Blockchain]: Block validation tests', () => { { calcDifficultyFromHeader: block.header, common, - } + }, ) const block2 = createBlockFromBlockData({ header }, { common }) await blockchain.putBlock(block2) @@ -261,7 +261,7 @@ describe('[Blockchain]: Block validation tests', () => { const expectedError = 'Invalid block: base fee not correct' assert.ok( (e.message as string).includes(expectedError), - 'should throw when base fee is not correct' + 'should throw when base fee is not correct', ) } }) @@ -319,7 +319,7 @@ describe('[Blockchain]: Block validation tests', () => { gasLimit: BigInt(5000), }, }, - { common } + { common }, ) await blockchain.putBlock(rootBlock) @@ -332,7 +332,7 @@ describe('[Blockchain]: Block validation tests', () => { assert.deepEqual( (await blockchain.getCanonicalHeadHeader()).uncleHash, preForkBlock.header.uncleHash, - 'able to put pre-london block in chain with pre-london uncles' + 'able to put pre-london block in chain with pre-london uncles', ) common.setHardfork(Hardfork.London) const forkBlock = createBlock(preForkBlock, 'forkBlock', [], common) @@ -357,13 +357,13 @@ describe('[Blockchain]: Block validation tests', () => { { common, setHardfork: false, - } + }, ) assert.deepEqual( forkBlock_ValidCommon.uncleHeaders[0].hash(), uncleHeader.hash(), - 'successfully validated a pre-london uncle on a london block' + 'successfully validated a pre-london uncle on a london block', ) assert.equal(common.hardfork(), Hardfork.London, 'validation did not change common hardfork') @@ -377,9 +377,9 @@ describe('[Blockchain]: Block validation tests', () => { { common, setHardfork: false, - } + }, ), - 'should create block even with pre-London uncle and common evaluated with london since uncle is given default base fee' + 'should create block even with pre-London uncle and common evaluated with london since uncle is given default base fee', ) assert.equal(common.hardfork(), Hardfork.London, 'validation did not change common hardfork') }) @@ -405,7 +405,7 @@ describe('EIP 7685: requests field validation tests', () => { gasLimit: 5000, }, }, - { common } + { common }, ) await expect(async () => blockchain.putBlock(block)).rejects.toThrow('invalid requestsRoot') @@ -422,10 +422,10 @@ describe('EIP 7685: requests field validation tests', () => { }, requests: [{ type: 0x1, bytes: randomBytes(12), serialize: () => randomBytes(32) } as any], }, - { common } + { common }, ) await expect(async () => blockchain.putBlock(blockWithRequest)).rejects.toThrow( - 'invalid requestsRoot' + 'invalid requestsRoot', ) }) }) diff --git a/packages/blockchain/test/clique.spec.ts b/packages/blockchain/test/clique.spec.ts index 2b05a42cb3..544f6bf093 100644 --- a/packages/blockchain/test/clique.spec.ts +++ b/packages/blockchain/test/clique.spec.ts @@ -31,7 +31,7 @@ const A: Signer = { address: new Address(hexToBytes('0x0b90087d864e82a284dca15923f3776de6bb016f')), privateKey: hexToBytes('0x64bf9cc30328b0e42387b3c82c614e6386259136235e20c1357bd11cdee86993'), publicKey: hexToBytes( - '0x40b2ebdf4b53206d2d3d3d59e7e2f13b1ea68305aec71d5d24cefe7f24ecae886d241f9267f04702d7f693655eb7b4aa23f30dcd0c3c5f2b970aad7c8a828195' + '0x40b2ebdf4b53206d2d3d3d59e7e2f13b1ea68305aec71d5d24cefe7f24ecae886d241f9267f04702d7f693655eb7b4aa23f30dcd0c3c5f2b970aad7c8a828195', ), } @@ -39,7 +39,7 @@ const B: Signer = { address: new Address(hexToBytes('0x6f62d8382bf2587361db73ceca28be91b2acb6df')), privateKey: hexToBytes('0x2a6e9ad5a6a8e4f17149b8bc7128bf090566a11dbd63c30e5a0ee9f161309cd6'), publicKey: hexToBytes( - '0xca0a55f6e81cb897aee6a1c390aa83435c41048faa0564b226cfc9f3df48b73e846377fb0fd606df073addc7bd851f22547afbbdd5c3b028c91399df802083a2' + '0xca0a55f6e81cb897aee6a1c390aa83435c41048faa0564b226cfc9f3df48b73e846377fb0fd606df073addc7bd851f22547afbbdd5c3b028c91399df802083a2', ), } @@ -47,7 +47,7 @@ const C: Signer = { address: new Address(hexToBytes('0x83c30730d1972baa09765a1ac72a43db27fedce5')), privateKey: hexToBytes('0xf216ddcf276079043c52b5dd144aa073e6b272ad4bfeaf4fbbc044aa478d1927'), publicKey: hexToBytes( - '0x555b19a5cbe6dd082a4a1e1e0520dd52a82ba24fd5598ea31f0f31666c40905ed319314c5fb06d887b760229e1c0e616294e7b1cb5dfefb71507c9112132ce56' + '0x555b19a5cbe6dd082a4a1e1e0520dd52a82ba24fd5598ea31f0f31666c40905ed319314c5fb06d887b760229e1c0e616294e7b1cb5dfefb71507c9112132ce56', ), } @@ -55,7 +55,7 @@ const D: Signer = { address: new Address(hexToBytes('0x8458f408106c4875c96679f3f556a511beabe138')), privateKey: hexToBytes('0x159e95d07a6c64ddbafa6036cdb7b8114e6e8cdc449ca4b0468a6d0c955f991b'), publicKey: hexToBytes( - '0xf02724341e2df54cf53515f079b1354fa8d437e79c5b091b8d8cc7cbcca00fd8ad854cb3b3a85b06c44ecb7269404a67be88b561f2224c94d133e5fc21be915c' + '0xf02724341e2df54cf53515f079b1354fa8d437e79c5b091b8d8cc7cbcca00fd8ad854cb3b3a85b06c44ecb7269404a67be88b561f2224c94d133e5fc21be915c', ), } @@ -63,7 +63,7 @@ const E: Signer = { address: new Address(hexToBytes('0xab80a948c661aa32d09952d2a6c4ad77a4c947be')), privateKey: hexToBytes('0x48ec5a6c4a7fc67b10a9d4c8a8f594a81ae42e41ed061fa5218d96abb6012344'), publicKey: hexToBytes( - '0xadefb82b9f54e80aa3532263e4478739de16fcca6828f4ae842f8a07941c347fa59d2da1300569237009f0f122dc1fd6abb0db8fcb534280aa94948a5cc95f94' + '0xadefb82b9f54e80aa3532263e4478739de16fcca6828f4ae842f8a07941c347fa59d2da1300569237009f0f122dc1fd6abb0db8fcb534280aa94948a5cc95f94', ), } @@ -71,7 +71,7 @@ const F: Signer = { address: new Address(hexToBytes('0xdc7bc81ddf67d037d7439f8e6ff12f3d2a100f71')), privateKey: hexToBytes('0x86b0ff7b6cf70786f29f297c57562905ab0b6c32d69e177a46491e56da9e486e'), publicKey: hexToBytes( - '0xd3e3d2b722e325bfc085ff5638a112b4e7e88ff13f92fc7f6cfc14b5a25e8d1545a2f27d8537b96e8919949d5f8c139ae7fc81aea7cf7fe5d43d7faaa038e35b' + '0xd3e3d2b722e325bfc085ff5638a112b4e7e88ff13f92fc7f6cfc14b5a25e8d1545a2f27d8537b96e8919949d5f8c139ae7fc81aea7cf7fe5d43d7faaa038e35b', ), } @@ -82,11 +82,11 @@ const initWithSigners = async (signers: Signer[], common?: Common) => { const extraData = concatBytes( new Uint8Array(32), ...signers.map((s) => s.address.toBytes()), - new Uint8Array(65) + new Uint8Array(65), ) const genesisBlock = createBlockFromBlockData( { header: { gasLimit: GAS_LIMIT, extraData } }, - { common } + { common }, ) blocks.push(genesisBlock) @@ -109,7 +109,7 @@ function getBlock( signer: Signer, beneficiary?: [Signer, boolean], checkpointSigners?: Signer[], - common?: Common + common?: Common, ) { common = common ?? COMMON const number = lastBlock.header.number + BigInt(1) @@ -126,7 +126,7 @@ function getBlock( extraData = concatBytes( new Uint8Array(32), ...checkpointSigners.map((s) => s.address.toBytes()), - new Uint8Array(65) + new Uint8Array(65), ) } @@ -162,7 +162,7 @@ const addNextBlockReorg = async ( signer: Signer, beneficiary?: [Signer, boolean], checkpointSigners?: Signer[], - common?: Common + common?: Common, ) => { const block = getBlock(blockchain, forkBlock, signer, beneficiary, checkpointSigners, common) await blockchain.putBlock(block) @@ -176,7 +176,7 @@ const addNextBlock = async ( signer: Signer, beneficiary?: [Signer, boolean], checkpointSigners?: Signer[], - common?: Common + common?: Common, ) => { const block = getBlock( blockchain, @@ -184,7 +184,7 @@ const addNextBlock = async ( signer, beneficiary, checkpointSigners, - common + common, ) await blockchain.putBlock(block) blocks.push(block) @@ -204,7 +204,7 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners(head.header.number + BigInt(1)), head.header.cliqueEpochTransitionSigners(), - 'correct genesis signers' + 'correct genesis signers', ) }) @@ -219,11 +219,11 @@ describe('Clique: Initialization', () => { new Uint8Array(32), A.address.toBytes(), unauthorizedSigner.toBytes(), - new Uint8Array(65) + new Uint8Array(65), ) const block = createBlockFromBlockData( { header: { number, extraData } }, - { common: COMMON, cliqueSigner: A.privateKey } + { common: COMMON, cliqueSigner: A.privateKey }, ) try { await blockchain.putBlock(block) @@ -231,7 +231,7 @@ describe('Clique: Initialization', () => { } catch (error: any) { assert.ok( error.message.includes('checkpoint signer not found in active signers list'), - 'correct error' + 'correct error', ) } }) @@ -253,7 +253,7 @@ describe('Clique: Initialization', () => { timestamp: parentHeader.timestamp + BigInt(10000), }, }, - { common: COMMON } + { common: COMMON }, ) try { @@ -262,7 +262,7 @@ describe('Clique: Initialization', () => { } catch (error: any) { assert.ok( error.message.includes('difficulty for clique block must be INTURN (2) or NOTURN (1)'), - 'correct error' + 'correct error', ) } @@ -278,7 +278,7 @@ describe('Clique: Initialization', () => { timestamp: parentHeader.timestamp + BigInt(10000), }, }, - { common: COMMON, cliqueSigner } + { common: COMMON, cliqueSigner }, ) try { @@ -307,9 +307,9 @@ describe('Clique: Initialization', () => { assert.equal(block.header.number, BigInt(1)) assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - block.header.number + BigInt(1) + block.header.number + BigInt(1), ), - [A.address] + [A.address], ) }) @@ -320,10 +320,10 @@ describe('Clique: Initialization', () => { await addNextBlock(blockchain, blocks, A, [C, true]) assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address], - 'only accept first, second needs 2 votes' + 'only accept first, second needs 2 votes', ) }) @@ -339,10 +339,10 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address, C.address, D.address], - 'only accept first two, third needs 3 votes already' + 'only accept first two, third needs 3 votes already', ) }) @@ -368,10 +368,10 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [], - 'weird, but one less cornercase by explicitly allowing this' + 'weird, but one less cornercase by explicitly allowing this', ) }) @@ -381,10 +381,10 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address], - 'not fulfilled' + 'not fulfilled', ) }) @@ -395,10 +395,10 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address], - 'fulfilled' + 'fulfilled', ) }) @@ -409,9 +409,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address] + [A.address, B.address], ) }) @@ -422,9 +422,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address, C.address, D.address] + [A.address, B.address, C.address, D.address], ) }) @@ -436,9 +436,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address, C.address] + [A.address, B.address, C.address], ) }) @@ -452,9 +452,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address] + [A.address, B.address], ) }) @@ -471,9 +471,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address, C.address, D.address] + [A.address, B.address, C.address, D.address], ) }) @@ -487,9 +487,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address] + [A.address, B.address], ) }) @@ -509,9 +509,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address] + [A.address, B.address], ) }) @@ -524,10 +524,10 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address], - 'deauth votes' + 'deauth votes', ) }) @@ -540,10 +540,10 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address], - 'auth votes' + 'auth votes', ) }) @@ -563,9 +563,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address] + [A.address, B.address], ) }) @@ -585,9 +585,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address, C.address] + [A.address, B.address, C.address], ) }) @@ -613,9 +613,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [B.address, C.address, D.address, E.address, F.address] + [B.address, C.address, D.address, E.address, F.address], ) }) @@ -634,7 +634,7 @@ describe('Clique: Initialization', () => { { baseChain: Chain.Goerli, hardfork: Hardfork.Chainstart, - } + }, ) const { blocks, blockchain } = await initWithSigners([A, B], common) await addNextBlock(blockchain, blocks, A, [C, true], undefined, common) @@ -644,9 +644,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address] + [A.address, B.address], ) }) @@ -659,7 +659,7 @@ describe('Clique: Initialization', () => { } catch (error: any) { assert.ok( error.message.includes('invalid PoA block signature (clique)'), - 'correct error thrown' + 'correct error thrown', ) } }) @@ -690,7 +690,7 @@ describe('Clique: Initialization', () => { { baseChain: Chain.Goerli, hardfork: Hardfork.Chainstart, - } + }, ) const { blocks, blockchain } = await initWithSigners([A, B, C], common) await addNextBlock(blockchain, blocks, A, undefined, undefined, common) @@ -711,80 +711,80 @@ describe('Clique: Initialization', () => { assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( A.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( B.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.ok( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( C.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) // block 2: C, next signer: A await addNextBlock(blockchain, blocks, C) assert.ok( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( A.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( B.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( C.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) // block 3: A, next signer: B await addNextBlock(blockchain, blocks, A) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( A.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.ok( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( B.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( C.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) // block 4: B, next signer: C await addNextBlock(blockchain, blocks, B) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( A.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( B.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.ok( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( C.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) }) }) @@ -797,10 +797,10 @@ describe('clique: reorgs', () => { const headBlockUnforked = await addNextBlock(blockchain, blocks, B, [C, true]) assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address, C.address], - 'address C added to signers' + 'address C added to signers', ) assert.deepEqual((await blockchain.getCanonicalHeadBlock()).hash(), headBlockUnforked.hash()) await addNextBlockReorg(blockchain, blocks, genesis, B) @@ -811,10 +811,10 @@ describe('clique: reorgs', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address], - 'address C not added to signers' + 'address C not added to signers', ) }) diff --git a/packages/blockchain/test/customConsensus.spec.ts b/packages/blockchain/test/customConsensus.spec.ts index 1a82b93e50..8775375827 100644 --- a/packages/blockchain/test/customConsensus.spec.ts +++ b/packages/blockchain/test/customConsensus.spec.ts @@ -24,7 +24,7 @@ class fibonacciConsensus implements Consensus { validateConsensus(_block: Block): Promise { if (bytesToHex(_block.header.extraData) !== '0x12358d') { throw new Error( - 'header contains invalid extradata - must match first 6 elements of fibonacci sequence' + 'header contains invalid extradata - must match first 6 elements of fibonacci sequence', ) } return new Promise((resolve) => resolve()) @@ -55,7 +55,7 @@ describe('Optional consensus parameter in blockchain constructor', () => { assert.equal( (blockchain.consensus as fibonacciConsensus).algorithm, 'fibonacciConsensus', - 'consensus algorithm matches' + 'consensus algorithm matches', ) } catch (err) { assert.fail('blockchain should instantiate successfully') @@ -78,7 +78,7 @@ describe('Custom consensus validation rules', () => { gasLimit: blockchain.genesisBlock.header.gasLimit + 1n, }, }, - { common } + { common }, ) try { @@ -86,7 +86,7 @@ describe('Custom consensus validation rules', () => { assert.deepEqual( (await blockchain.getBlock(block.header.number)).header.hash(), block.header.hash(), - 'put block with valid difficulty and extraData' + 'put block with valid difficulty and extraData', ) } catch { assert.fail('should have put block with valid difficulty and extraData') @@ -102,7 +102,7 @@ describe('Custom consensus validation rules', () => { timestamp: block.header.timestamp + 1n, }, }, - { common } + { common }, ) try { await blockchain.putBlock(blockWithBadDifficulty) @@ -110,7 +110,7 @@ describe('Custom consensus validation rules', () => { } catch (err: any) { assert.ok( err.message.includes('invalid difficulty'), - 'failed to put block with invalid difficulty' + 'failed to put block with invalid difficulty', ) } @@ -125,7 +125,7 @@ describe('Custom consensus validation rules', () => { gasLimit: block.header.gasLimit + 1n, }, }, - { common } + { common }, ) try { await blockchain.putBlock(blockWithBadExtraData) @@ -134,7 +134,7 @@ describe('Custom consensus validation rules', () => { assert.ok( err.message === 'header contains invalid extradata - must match first 6 elements of fibonacci sequence', - 'failed to put block with invalid extraData' + 'failed to put block with invalid extraData', ) } }) @@ -150,7 +150,7 @@ describe('consensus transition checks', () => { assert.ok('checkAndTransitionHardForkByNumber does not throw with custom consensus') } catch (err: any) { assert.fail( - `checkAndTransitionHardForkByNumber should not throw with custom consensus, error=${err.message}` + `checkAndTransitionHardForkByNumber should not throw with custom consensus, error=${err.message}`, ) } @@ -159,7 +159,7 @@ describe('consensus transition checks', () => { try { await blockchain.checkAndTransitionHardForkByNumber(5n) assert.fail( - 'checkAndTransitionHardForkByNumber should throw when using standard consensus (ethash, clique, casper) but consensus algorithm defined in common is different' + 'checkAndTransitionHardForkByNumber should throw when using standard consensus (ethash, clique, casper) but consensus algorithm defined in common is different', ) } catch (err: any) { assert.ok(err.message.includes('Consensus object for ethash must be passed')) diff --git a/packages/blockchain/test/index.spec.ts b/packages/blockchain/test/index.spec.ts index 937ff6a187..7192f92852 100644 --- a/packages/blockchain/test/index.spec.ts +++ b/packages/blockchain/test/index.spec.ts @@ -34,14 +34,14 @@ describe('blockchain test', () => { assert.deepEqual( iteratorHead.hash(), blockchain.genesisBlock.hash(), - 'correct genesis hash (getIteratorHead())' + 'correct genesis hash (getIteratorHead())', ) blockchain = await createBlockchain({ common, hardforkByHeadBlockNumber: true }) assert.equal( common.hardfork(), 'chainstart', - 'correct HF setting with hardforkByHeadBlockNumber option' + 'correct HF setting with hardforkByHeadBlockNumber option', ) }) @@ -94,7 +94,7 @@ describe('blockchain test', () => { assert.deepEqual( genesisBlock.hash(), (await blockchain.getCanonicalHeadHeader()).hash(), - 'genesis block hash should be correct' + 'genesis block hash should be correct', ) }) @@ -223,7 +223,7 @@ describe('blockchain test', () => { } catch (e: any) { assert.ok( e.message.includes('not found in DB'), - `should throw for non-existing block-by-number request` + `should throw for non-existing block-by-number request`, ) } @@ -233,7 +233,7 @@ describe('blockchain test', () => { } catch (e: any) { assert.ok( e.message.includes('not found in DB'), - `should throw for non-existing block-by-hash request` + `should throw for non-existing block-by-hash request`, ) } }) @@ -270,7 +270,7 @@ describe('blockchain test', () => { assert.equal( err.message, 'header with number 22 not found in canonical chain', - 'canonical references correctly deleted' + 'canonical references correctly deleted', ) } @@ -285,7 +285,7 @@ describe('blockchain test', () => { assert.equal( bytesToHex(newblock22.hash()), bytesToHex(newheader22.hash()), - 'fetched block should match' + 'fetched block should match', ) }) @@ -308,7 +308,7 @@ describe('blockchain test', () => { assert.equal( getBlocks![1].header.number, blocks[3].header.number, - 'should skip two blocks apart' + 'should skip two blocks apart', ) assert.ok(!isConsecutive(getBlocks!), 'blocks should not be consecutive') }) @@ -541,7 +541,7 @@ describe('blockchain test', () => { assert.equal( e.message, 'uncle hash should be equal to hash of empty array', - 'block not constructed from empty bodies' + 'block not constructed from empty bodies', ) } }) @@ -775,7 +775,7 @@ describe('initialization tests', () => { assert.deepEqual( (await blockchain.getIteratorHead()).hash(), genesisHash, - 'head hash should equal expected mainnet genesis hash' + 'head hash should equal expected mainnet genesis hash', ) const db = blockchain.db @@ -785,7 +785,7 @@ describe('initialization tests', () => { assert.deepEqual( (await newBlockchain.getIteratorHead()).hash(), genesisHash, - 'head hash should be read from the provided db' + 'head hash should be read from the provided db', ) }) @@ -797,7 +797,7 @@ describe('initialization tests', () => { extraData: utf8ToBytes('custom extra data'), }, }, - { common } + { common }, ) const hash = genesisBlock.hash() const blockchain = await createBlockchain({ common, genesisBlock }) @@ -806,14 +806,14 @@ describe('initialization tests', () => { assert.deepEqual( (await blockchain.getIteratorHead()).hash(), hash, - 'blockchain should put custom genesis block' + 'blockchain should put custom genesis block', ) const newBlockchain = await createBlockchain({ db, genesisBlock }) assert.deepEqual( (await newBlockchain.getIteratorHead()).hash(), hash, - 'head hash should be read from the provided db' + 'head hash should be read from the provided db', ) }) @@ -825,7 +825,7 @@ describe('initialization tests', () => { extraData: utf8ToBytes('custom extra data'), }, }, - { common } + { common }, ) const hash = genesisBlock.hash() const blockchain = await createBlockchain({ common, genesisBlock }) @@ -837,7 +837,7 @@ describe('initialization tests', () => { extraData: utf8ToBytes('other extra data'), }, }, - { common } + { common }, ) // assert that this is a block with a new hash @@ -853,7 +853,7 @@ describe('initialization tests', () => { assert.equal( e.message, 'Cannot put a different genesis block than current blockchain genesis: create a new Blockchain', - 'putting a genesis block did throw (otherGenesisBlock not found in chain)' + 'putting a genesis block did throw (otherGenesisBlock not found in chain)', ) } @@ -865,7 +865,7 @@ describe('initialization tests', () => { assert.equal( e.message, 'The genesis block in the DB has a different hash than the provided genesis block.', - 'creating blockchain with different genesis block than in db throws' + 'creating blockchain with different genesis block than in db throws', ) } }) @@ -875,10 +875,10 @@ it('should correctly derive mainnet genesis block hash and stateRoot', async () const common = new Common({ chain: Chain.Mainnet }) const blockchain = await createBlockchain({ common }) const mainnetGenesisBlockHash = hexToBytes( - '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ) const mainnetGenesisStateRoot = hexToBytes( - '0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544' + '0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544', ) assert.deepEqual(blockchain.genesisBlock.hash(), mainnetGenesisBlockHash) assert.deepEqual(blockchain.genesisBlock.header.stateRoot, mainnetGenesisStateRoot) diff --git a/packages/blockchain/test/iterator.spec.ts b/packages/blockchain/test/iterator.spec.ts index 2e1690eed1..3edd1712ed 100644 --- a/packages/blockchain/test/iterator.spec.ts +++ b/packages/blockchain/test/iterator.spec.ts @@ -57,13 +57,13 @@ describe('blockchain test', () => { } }, undefined, - true + true, ) assert.equal(reorged, 1, 'should have reorged once') assert.equal( servedReorged, reorgedBlocks.length, - 'should have served all 21 reorged blocks with head resetting' + 'should have served all 21 reorged blocks with head resetting', ) assert.equal(iterated, 31, 'should have iterated 10 + 21 blocks in total') }) @@ -79,7 +79,7 @@ describe('blockchain test', () => { i++ } }, - 5 + 5, ) assert.equal(iterated, 5) assert.equal(i, 5) @@ -97,7 +97,7 @@ describe('blockchain test', () => { i++ } }, - 0 + 0, ) .catch(() => { assert.fail('Promise cannot throw when running 0 blocks') @@ -118,7 +118,7 @@ describe('blockchain test', () => { i++ } }, - -1 + -1, ) .catch(() => {}) // Note: if st.end() is not called (Promise did not throw), then this test fails, as it does not end. @@ -145,7 +145,7 @@ describe('blockchain test', () => { i++ } }, - 5 + 5, ) assert.equal(i, 1) @@ -186,7 +186,7 @@ describe('blockchain test', () => { assert.equal( bytesToHex((blockchain as any)._heads['head0']), '0xabcd', - 'should get state root heads' + 'should get state root heads', ) } else { assert.fail() diff --git a/packages/blockchain/test/pos.spec.ts b/packages/blockchain/test/pos.spec.ts index 406ebbe122..37c69f67a4 100644 --- a/packages/blockchain/test/pos.spec.ts +++ b/packages/blockchain/test/pos.spec.ts @@ -36,7 +36,7 @@ const buildChain = async (blockchain: Blockchain, common: Common, height: number calcDifficultyFromHeader: blocks[number - 1].header, common, setHardfork: await blockchain.getTotalDifficulty(blocks[number - 1].hash()), - } + }, ) blocks.push(block) await blockchain.putBlock(block) @@ -70,7 +70,7 @@ describe('Proof of Stake - inserting blocks into blockchain', () => { assert.equal( bytesToHex(genesisHeader.hash()), '0x1119dc5ff680bf7b4c3d9cd41168334dee127d46b3626482076025cdd498ed0b', - 'genesis hash matches' + 'genesis hash matches', ) await buildChain(blockchain, s.common, 15) @@ -80,13 +80,13 @@ describe('Proof of Stake - inserting blocks into blockchain', () => { assert.equal( (blockchain as any).common.hardfork(), 'paris', - 'HF should have been correctly updated' + 'HF should have been correctly updated', ) const td = await blockchain.getTotalDifficulty(latestHeader.hash()) assert.equal( td, BigInt(1313601), - 'should have calculated the correct post-Merge total difficulty' + 'should have calculated the correct post-Merge total difficulty', ) const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) @@ -100,7 +100,7 @@ describe('Proof of Stake - inserting blocks into blockchain', () => { gasLimit: BigInt(10000), }, }, - { common } + { common }, ) try { await blockchain.putBlock(powBlock) @@ -108,7 +108,7 @@ describe('Proof of Stake - inserting blocks into blockchain', () => { } catch (err: any) { assert.ok( err.message.includes('invalid difficulty'), - 'should throw with invalid difficulty message' + 'should throw with invalid difficulty message', ) } }) diff --git a/packages/blockchain/test/reorg.spec.ts b/packages/blockchain/test/reorg.spec.ts index fdbaf78ff6..b060838d3d 100644 --- a/packages/blockchain/test/reorg.spec.ts +++ b/packages/blockchain/test/reorg.spec.ts @@ -22,7 +22,7 @@ describe('reorg tests', () => { gasLimit: BigInt(8000000), }, }, - { common } + { common }, ) const blocks_lowTD: Block[] = [] @@ -39,7 +39,7 @@ describe('reorg tests', () => { while (TD_High < TD_Low) { blocks_lowTD.push(generateConsecutiveBlock(blocks_lowTD[blocks_lowTD.length - 1], 0)) blocks_highTD.push( - generateConsecutiveBlock(blocks_highTD[blocks_highTD.length - 1] ?? genesis, 1) + generateConsecutiveBlock(blocks_highTD[blocks_highTD.length - 1] ?? genesis, 1), ) TD_Low += blocks_lowTD[blocks_lowTD.length - 1].header.difficulty @@ -56,12 +56,12 @@ describe('reorg tests', () => { // ensure that the block difficulty is higher on the highTD chain when compared to the low TD chain assert.ok( number_lowTD > number_highTD, - 'low TD should have a lower TD than the reported high TD' + 'low TD should have a lower TD than the reported high TD', ) assert.ok( blocks_lowTD[blocks_lowTD.length - 1].header.number > blocks_highTD[blocks_highTD.length - 1].header.number, - 'low TD block should have a higher number than high TD block' + 'low TD block should have a higher number than high TD block', ) }) @@ -69,7 +69,7 @@ describe('reorg tests', () => { const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) const genesisBlock = createBlockFromBlockData( { header: { extraData: new Uint8Array(97) } }, - { common } + { common }, ) const consensusDict: ConsensusDict = {} @@ -83,7 +83,7 @@ describe('reorg tests', () => { }) const extraData = hexToBytes( - '0x506172697479205465636820417574686f7269747900000000000000000000002bbf886181970654ed46e3fae0ded41ee53fec702c47431988a7ae80e6576f3552684f069af80ba11d36327aaf846d470526e4a1c461601b2fd4ebdcdc2b734a01' + '0x506172697479205465636820417574686f7269747900000000000000000000002bbf886181970654ed46e3fae0ded41ee53fec702c47431988a7ae80e6576f3552684f069af80ba11d36327aaf846d470526e4a1c461601b2fd4ebdcdc2b734a01', ) // from goerli block 1 const { gasLimit } = genesisBlock.header const base = { extraData, gasLimit, difficulty: 1 } @@ -101,7 +101,7 @@ describe('reorg tests', () => { timestamp: genesisBlock.header.timestamp + BigInt(30), }, }, - { common } + { common }, ) const block2_low = createBlockFromBlockData( { @@ -114,7 +114,7 @@ describe('reorg tests', () => { coinbase: beneficiary1, }, }, - { common } + { common }, ) const block1_high = createBlockFromBlockData( @@ -126,7 +126,7 @@ describe('reorg tests', () => { timestamp: genesisBlock.header.timestamp + BigInt(15), }, }, - { common } + { common }, ) const block2_high = createBlockFromBlockData( { @@ -137,7 +137,7 @@ describe('reorg tests', () => { timestamp: block1_high.header.timestamp + BigInt(15), }, }, - { common } + { common }, ) const block3_high = createBlockFromBlockData( { @@ -150,7 +150,7 @@ describe('reorg tests', () => { coinbase: beneficiary2, }, }, - { common } + { common }, ) await blockchain.putBlocks([block1_low, block2_low]) @@ -161,9 +161,9 @@ describe('reorg tests', () => { assert.ok( !signerStates.find( - (s: any) => s[0] === BigInt(2) && s[1].find((a: Address) => a.equals(beneficiary1)) + (s: any) => s[0] === BigInt(2) && s[1].find((a: Address) => a.equals(beneficiary1)), ), - 'should not find reorged signer state' + 'should not find reorged signer state', ) let signerVotes = (blockchain.consensus as CliqueConsensus)._cliqueLatestVotes @@ -173,25 +173,25 @@ describe('reorg tests', () => { v[0] === BigInt(2) && v[1][0].equal(block1_low.header.cliqueSigner()) && v[1][1].equal(beneficiary1) && - equalsBytes(v[1][2], CLIQUE_NONCE_AUTH) + equalsBytes(v[1][2], CLIQUE_NONCE_AUTH), ), - 'should not find reorged clique vote' + 'should not find reorged clique vote', ) let blockSigners = (blockchain.consensus as CliqueConsensus)._cliqueLatestBlockSigners assert.ok( !blockSigners.find( - (s: any) => s[0] === BigInt(1) && s[1].equal(block1_low.header.cliqueSigner()) + (s: any) => s[0] === BigInt(1) && s[1].equal(block1_low.header.cliqueSigner()), ), - 'should not find reorged block signer' + 'should not find reorged block signer', ) signerStates = (blockchain.consensus as CliqueConsensus)._cliqueLatestSignerStates assert.ok( !!signerStates.find( - (s: any) => s[0] === BigInt(3) && s[1].find((a: Address) => a.equals(beneficiary2)) + (s: any) => s[0] === BigInt(3) && s[1].find((a: Address) => a.equals(beneficiary2)), ), - 'should find reorged signer state' + 'should find reorged signer state', ) signerVotes = (blockchain.consensus as CliqueConsensus)._cliqueLatestVotes @@ -200,9 +200,9 @@ describe('reorg tests', () => { blockSigners = (blockchain.consensus as CliqueConsensus)._cliqueLatestBlockSigners assert.ok( !!blockSigners.find( - (s: any) => s[0] === BigInt(3) && s[1].equals(block3_high.header.cliqueSigner()) + (s: any) => s[0] === BigInt(3) && s[1].equals(block3_high.header.cliqueSigner()), ), - 'should find reorged block signer' + 'should find reorged block signer', ) }) }) diff --git a/packages/blockchain/test/util.ts b/packages/blockchain/test/util.ts index bebc96fdce..aa1b972b40 100644 --- a/packages/blockchain/test/util.ts +++ b/packages/blockchain/test/util.ts @@ -76,7 +76,7 @@ export const generateBlockchain = async (numberOfBlocks: number, genesis?: Block export const generateConsecutiveBlock = ( parentBlock: Block, difficultyChangeFactor: number, - gasLimit: bigint = BigInt(8000000) + gasLimit: bigint = BigInt(8000000), ): Block => { if (difficultyChangeFactor > 1) { difficultyChangeFactor = 1 @@ -87,7 +87,7 @@ export const generateConsecutiveBlock = ( number: parentBlock.header.number + BigInt(1), timestamp: parentBlock.header.timestamp + BigInt(10 + -difficultyChangeFactor * 9), }, - { common } + { common }, ) const header = BlockHeader.fromHeaderData( { @@ -100,7 +100,7 @@ export const generateConsecutiveBlock = ( { common, calcDifficultyFromHeader: parentBlock.header, - } + }, ) const block = new Block(header, undefined, undefined, undefined, { common }, undefined) @@ -150,21 +150,21 @@ export const createTestDB = async (): Promise< { type: 'put', key: hexToBytes( - '0x680000000000000000d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0x680000000000000000d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ), value: genesis.header.serialize(), }, { type: 'put', key: hexToBytes( - '0x680000000000000000d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa374' + '0x680000000000000000d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa374', ), value: RLP.encode(toBytes(17179869184)), }, { type: 'put', key: hexToBytes( - '0x620000000000000000d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0x620000000000000000d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ), value: RLP.encode(genesis.raw().slice(1)), }, @@ -187,7 +187,7 @@ function createBlock( parentBlock: Block, extraData: string, uncles?: BlockHeader[], - common?: Common + common?: Common, ): Block { uncles = uncles ?? [] common = common ?? new Common({ chain: Chain.Mainnet }) @@ -223,7 +223,7 @@ function createBlock( { common, calcDifficultyFromHeader: parentBlock.header, - } + }, ) } diff --git a/packages/blockchain/test/utils.spec.ts b/packages/blockchain/test/utils.spec.ts index 6900c1fb86..073dc314f9 100644 --- a/packages/blockchain/test/utils.spec.ts +++ b/packages/blockchain/test/utils.spec.ts @@ -27,7 +27,7 @@ describe('[Utils/Parse]', () => { assert.equal( bytesToHex(stateRoot), '0x52e628c7f35996ba5a0402d02b34535993c89ff7fc4c430b2763ada8554bee62', - 'kiln stateRoot matches' + 'kiln stateRoot matches', ) }) @@ -38,7 +38,7 @@ describe('[Utils/Parse]', () => { assert.equal( bytesToHex(genesisHash), '0x51c7fe41be669f69c45c33a56982cbde405313342d9e2b00d7c91a7b284dd4f8', - 'kiln genesis hash matches' + 'kiln genesis hash matches', ) }) }) diff --git a/packages/blockchain/tsconfig.lint.json b/packages/blockchain/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/blockchain/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/client/.eslintrc.cjs b/packages/client/.eslintrc.cjs index 974e754e63..43d2c22619 100644 --- a/packages/client/.eslintrc.cjs +++ b/packages/client/.eslintrc.cjs @@ -1,14 +1,11 @@ module.exports = { extends: '../../config/eslint.cjs', - rules: { - 'import/extensions': 'off', - }, parserOptions: { - project: ['./tsconfig.json', './tsconfig.browser.json', './tsconfig.eslint.json'], + project: ['./tsconfig.lint.json'], }, overrides: [ { - files: ['bin/**.ts', 'test/sim/**.ts'], + files: ['bin/**.ts', 'test/sim/**.ts', 'examples/**/*.ts'], rules: { 'no-console': 'off', }, diff --git a/packages/client/bin/cli.ts b/packages/client/bin/cli.ts index ee1eccb7be..7488a7d1d6 100755 --- a/packages/client/bin/cli.ts +++ b/packages/client/bin/cli.ts @@ -553,7 +553,7 @@ async function executeBlocks(client: EthereumClient) { } } catch (e: any) { client.config.logger.error( - 'Wrong input format for block execution, allowed format types: 5, 5-10, 5[0xba4b5fd92a26badad3cad22eb6f7c7e745053739b5f5d1e8a3afb00f8fb2a280,[TX_HASH_2],...], 5[*] (all txs in verbose mode)' + 'Wrong input format for block execution, allowed format types: 5, 5-10, 5[0xba4b5fd92a26badad3cad22eb6f7c7e745053739b5f5d1e8a3afb00f8fb2a280,[TX_HASH_2],...], 5[*] (all txs in verbose mode)', ) process.exit() } @@ -597,7 +597,7 @@ async function startExecutionFrom(client: EthereumClient) { const startExecutionParent = await client.chain.getBlock(startExecutionBlock.header.parentHash) const startExecutionParentTd = await client.chain.getTd( startExecutionParent.hash(), - startExecutionParent.header.number + startExecutionParent.header.number, ) const startExecutionHardfork = client.config.execCommon.getHardforkBy({ @@ -616,7 +616,7 @@ async function startExecutionFrom(client: EthereumClient) { await client.chain.blockchain.setIteratorHead('vm', startExecutionParent.hash()) await client.chain.update(false) logger.info( - `vmHead set to ${client.chain.headers.height} for starting stateless execution at hardfork=${startExecutionHardfork}` + `vmHead set to ${client.chain.headers.height} for starting stateless execution at hardfork=${startExecutionHardfork}`, ) } catch (err: any) { logger.error(`Error setting vmHead for starting stateless execution: ${err}`) @@ -634,7 +634,7 @@ async function startExecutionFrom(client: EthereumClient) { */ async function startClient( config: Config, - genesisMeta: { genesisState?: GenesisState; genesisStateRoot?: Uint8Array } = {} + genesisMeta: { genesisState?: GenesisState; genesisStateRoot?: Uint8Array } = {}, ) { config.logger.info(`Data directory: ${config.datadir}`) if (config.lightserv) { @@ -691,11 +691,11 @@ async function startClient( config.logger.info( `Preloading block hash=0x${short(bytesToHex(block.header.hash()))} number=${ block.header.number - }` + }`, ) } catch (err: any) { config.logger.info( - `Encountered error while while preloading chain data error=${err.message}` + `Encountered error while while preloading chain data error=${err.message}`, ) break } @@ -837,7 +837,7 @@ async function inputAccounts() { for (const addressString of addresses) { const address = Address.fromString(addressString) const inputKey = (await question( - `Please enter the 0x-prefixed private key to unlock ${address}:\n` + `Please enter the 0x-prefixed private key to unlock ${address}:\n`, )) as PrefixedHexString ;(rl as any).history = (rl as any).history.slice(1) const privKey = hexToBytes(inputKey) @@ -846,7 +846,7 @@ async function inputAccounts() { accounts.push([address, privKey]) } else { console.error( - `Private key does not match for ${address} (address derived: ${derivedAddress})` + `Private key does not match for ${address} (address derived: ${derivedAddress})`, ) process.exit() } @@ -890,7 +890,7 @@ const stopClient = async ( clientStartPromise: Promise<{ client: EthereumClient servers: (RPCServer | http.Server)[] - } | null> + } | null>, ) => { config.logger.info('Caught interrupt signal. Obtaining client handle for clean shutdown...') config.logger.info('(This might take a little longer if client not yet fully started)') @@ -945,14 +945,14 @@ async function run() { v: bigint, r: Uint8Array, s: Uint8Array, - chainID?: bigint + chainID?: bigint, ) => secp256k1Expand( secp256k1Recover( msgHash, concatBytes(setLengthLeft(r, 32), setLengthLeft(s, 32)), - Number(calculateSigRecovery(v, chainID)) - ) + Number(calculateSigRecovery(v, chainID)), + ), ).slice(1) cryptoFunctions.sha256 = wasmSha256 cryptoFunctions.ecsign = (msg: Uint8Array, pk: Uint8Array, chainId?: bigint) => { @@ -1040,7 +1040,7 @@ async function run() { if (args.mine === true && accounts.length === 0) { console.error( - 'Please provide an account to mine blocks with `--unlock [address]` or use `--dev` to generate' + 'Please provide an account to mine blocks with `--unlock [address]` or use `--dev` to generate', ) process.exit() } diff --git a/packages/client/bin/startRpc.ts b/packages/client/bin/startRpc.ts index 7f28bc7b8b..b12dcbd55f 100644 --- a/packages/client/bin/startRpc.ts +++ b/packages/client/bin/startRpc.ts @@ -101,7 +101,7 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { if ((rpc || rpcEngine) && !config.saveReceipts) { logger?.warn( - `Starting client without --saveReceipts might lead to interop issues with a CL especially if the CL intends to propose blocks, omitting methods=${saveReceiptsMethods}` + `Starting client without --saveReceipts might lead to interop issues with a CL especially if the CL intends to propose blocks, omitting methods=${saveReceiptsMethods}`, ) } @@ -136,12 +136,12 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { logger.info( `Started JSON RPC Server address=http://${rpcAddr}:${rpcPort} namespaces=${namespaces}${ withEngineMethods ? ' rpcEngineAuth=' + rpcEngineAuth.toString() : '' - }` + }`, ) logger.debug( `Methods available at address=http://${rpcAddr}:${rpcPort} namespaces=${namespaces} methods=${Object.keys( - methods - ).join(',')}` + methods, + ).join(',')}`, ) } if (ws) { @@ -160,12 +160,12 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { logger.info( `Started JSON RPC Server address=ws://${wsAddr}:${wsPort} namespaces=${namespaces}${ withEngineMethods ? ` rpcEngineAuth=${rpcEngineAuth}` : '' - }` + }`, ) logger.debug( `Methods available at address=ws://${wsAddr}:${wsPort} namespaces=${namespaces} methods=${Object.keys( - methods - ).join(',')}` + methods, + ).join(',')}`, ) } } @@ -189,12 +189,12 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { }) rpcHttpServer.listen(rpcEnginePort, rpcEngineAddr) logger.info( - `Started JSON RPC server address=http://${rpcEngineAddr}:${rpcEnginePort} namespaces=${namespaces} rpcEngineAuth=${rpcEngineAuth}` + `Started JSON RPC server address=http://${rpcEngineAddr}:${rpcEnginePort} namespaces=${namespaces} rpcEngineAuth=${rpcEngineAuth}`, ) logger.debug( `Methods available at address=http://${rpcEngineAddr}:${rpcEnginePort} namespaces=${namespaces} methods=${Object.keys( - methods - ).join(',')}` + methods, + ).join(',')}`, ) if (ws) { @@ -212,12 +212,12 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { const rpcWsServer = createWsRPCServerListener(opts) if (rpcWsServer) rpcWsServer.listen(wsEnginePort, wsEngineAddr) logger.info( - `Started JSON RPC Server address=ws://${wsEngineAddr}:${wsEnginePort} namespaces=${namespaces} rpcEngineAuth=${rpcEngineAuth}` + `Started JSON RPC Server address=ws://${wsEngineAddr}:${wsEnginePort} namespaces=${namespaces} rpcEngineAuth=${rpcEngineAuth}`, ) logger.debug( `Methods available at address=ws://${wsEngineAddr}:${wsEnginePort} namespaces=${namespaces} methods=${Object.keys( - methods - ).join(',')}` + methods, + ).join(',')}`, ) } } diff --git a/packages/client/src/blockchain/chain.ts b/packages/client/src/blockchain/chain.ts index 63f03235dd..0164e6bce2 100644 --- a/packages/client/src/blockchain/chain.ts +++ b/packages/client/src/blockchain/chain.ts @@ -266,7 +266,7 @@ export class Chain { this.config.chainCommon.events.on('hardforkChanged', async (hardfork: string) => { const block = this.config.chainCommon.hardforkBlock() this.config.superMsg( - `New hardfork reached 🪢 ! hardfork=${hardfork} ${block !== null ? `block=${block}` : ''}` + `New hardfork reached 🪢 ! hardfork=${hardfork} ${block !== null ? `block=${block}` : ''}`, ) }) } @@ -353,17 +353,17 @@ export class Chain { if (this.config.chainCommon.hardforkGteHardfork(nextBlockHf, Hardfork.Paris)) { this.config.logger.info('*'.repeat(85)) this.config.logger.info( - `Paris (Merge) hardfork reached 🐼 👉 👈 🐼 ! block=${headers.height} td=${headers.td}` + `Paris (Merge) hardfork reached 🐼 👉 👈 🐼 ! block=${headers.height} td=${headers.td}`, ) this.config.logger.info('-'.repeat(85)) this.config.logger.info(' ') this.config.logger.info('Consensus layer client (CL) needed for continued sync:') this.config.logger.info( - 'https://ethereum.org/en/developers/docs/nodes-and-clients/#consensus-clients' + 'https://ethereum.org/en/developers/docs/nodes-and-clients/#consensus-clients', ) this.config.logger.info(' ') this.config.logger.info( - 'Make sure to have the JSON RPC (--rpc) and Engine API (--rpcEngine) endpoints exposed' + 'Make sure to have the JSON RPC (--rpc) and Engine API (--rpcEngine) endpoints exposed', ) this.config.logger.info('and JWT authentication configured (see client README).') this.config.logger.info(' ') @@ -371,7 +371,7 @@ export class Chain { this.config.logger.info( `Transitioning to PoS! First block for CL-framed execution: block=${ headers.height + BIGINT_1 - }` + }`, ) } } @@ -393,7 +393,7 @@ export class Chain { block: Uint8Array | bigint, max = 1, skip = 0, - reverse = false + reverse = false, ): Promise { if (!this.opened) throw new Error('Chain closed') return this.blockchain.getBlocks(block, max, skip, reverse) @@ -427,7 +427,7 @@ export class Chain { const canonicalBlock = await this.getBlock(block.header.number) if (!equalsBytes(canonicalBlock.hash(), block.hash())) { throw Error( - `Invalid putBlock for block=${block.header.number} before finalized=${this.headers.finalized.number}` + `Invalid putBlock for block=${block.header.number} before finalized=${this.headers.finalized.number}`, ) } } else { @@ -449,7 +449,7 @@ export class Chain { await this.blockchain.checkAndTransitionHardForkByNumber( b.header.number, td, - b.header.timestamp + b.header.timestamp, ) await this.blockchain.consensus?.setup({ blockchain: this.blockchain }) } @@ -479,7 +479,7 @@ export class Chain { block: Uint8Array | bigint, max: number, skip: number, - reverse: boolean + reverse: boolean, ): Promise { const blocks = await this.getBlocks(block, max, skip, reverse) return blocks.map((b) => b.header) diff --git a/packages/client/src/client.ts b/packages/client/src/client.ts index 09113ed96b..0bdc05a320 100644 --- a/packages/client/src/client.ts +++ b/packages/client/src/client.ts @@ -129,11 +129,11 @@ export class EthereumClient { const packageJson = JSON.parse( readFileSync( '/' + import.meta.url.split('client')[0].split('file:///')[1] + 'client/package.json', - 'utf-8' - ) + 'utf-8', + ), ) this.config.logger.info( - `Initializing Ethereumjs client version=v${packageJson.version} network=${name} chainId=${chainId}` + `Initializing Ethereumjs client version=v${packageJson.version} network=${name} chainId=${chainId}`, ) this.config.events.on(Event.SERVER_ERROR, (error) => { @@ -141,7 +141,7 @@ export class EthereumClient { }) this.config.events.on(Event.SERVER_LISTENING, (details) => { this.config.logger.info( - `Server listener up transport=${details.transport} url=${details.url}` + `Server listener up transport=${details.transport} url=${details.url}`, ) }) diff --git a/packages/client/src/config.ts b/packages/client/src/config.ts index 7d0386d0ae..c20cb6809f 100644 --- a/packages/client/src/config.ts +++ b/packages/client/src/config.ts @@ -608,7 +608,7 @@ export class Config { this.synchronized = true // Log to console the sync status this.superMsg( - `Synchronized blockchain at height=${height} hash=${short(latest.hash())} 🎉` + `Synchronized blockchain at height=${height} hash=${short(latest.hash())} 🎉`, ) } @@ -623,7 +623,7 @@ export class Config { if (diff >= this.syncedStateRemovalPeriod) { this.synchronized = false this.logger.info( - `Sync status reset (no chain updates for ${Math.round(diff / 1000)} seconds).` + `Sync status reset (no chain updates for ${Math.round(diff / 1000)} seconds).`, ) } } @@ -635,7 +635,7 @@ export class Config { latest !== null && latest !== undefined ? ' height=' + latest.number : '' } syncTargetHeight=${this.syncTargetHeight} lastSyncDate=${ (Date.now() - this.lastSyncDate) / 1000 - } secs ago` + } secs ago`, ) this.lastsyncronized = this.synchronized } diff --git a/packages/client/src/execution/level.ts b/packages/client/src/execution/level.ts index 3f80ac704b..dbebf46699 100644 --- a/packages/client/src/execution/level.ts +++ b/packages/client/src/execution/level.ts @@ -41,7 +41,7 @@ const getEncodings = (opts: EncodingOpts = {}) => { */ export class LevelDB< TKey extends Uint8Array | string = Uint8Array | string, - TValue extends Uint8Array | string | DBObject = Uint8Array | string | DBObject + TValue extends Uint8Array | string | DBObject = Uint8Array | string | DBObject, > implements DB { _leveldb: AbstractLevel @@ -52,7 +52,7 @@ export class LevelDB< * @param leveldb - An abstract-leveldown compliant store */ constructor( - leveldb?: AbstractLevel + leveldb?: AbstractLevel, ) { this._leveldb = leveldb ?? new MemoryLevel() } diff --git a/packages/client/src/execution/receipt.ts b/packages/client/src/execution/receipt.ts index 2c1f4c0904..b560b60c0f 100644 --- a/packages/client/src/execution/receipt.ts +++ b/packages/client/src/execution/receipt.ts @@ -38,7 +38,7 @@ type GetReceiptByTxHashReturn = [ receipt: TxReceipt, blockHash: Uint8Array, txIndex: number, - logIndex: number + logIndex: number, ] type GetLogsReturn = { log: Log @@ -121,17 +121,17 @@ export class ReceiptsManager extends MetaDBManager { async getReceipts( blockHash: Uint8Array, calcBloom?: boolean, - includeTxType?: true + includeTxType?: true, ): Promise async getReceipts( blockHash: Uint8Array, calcBloom?: boolean, - includeTxType?: false + includeTxType?: false, ): Promise async getReceipts( blockHash: Uint8Array, calcBloom = false, - includeTxType = false + includeTxType = false, ): Promise { const encoded = await this.get(DBKey.Receipts, blockHash) if (!encoded) return [] @@ -176,7 +176,7 @@ export class ReceiptsManager extends MetaDBManager { from: Block, to: Block, addresses?: Uint8Array[], - topics: (Uint8Array | Uint8Array[] | null)[] = [] + topics: (Uint8Array | Uint8Array[] | null)[] = [], ): Promise { const returnedLogs: GetLogsReturn = [] let returnedLogsSize = 0 @@ -194,7 +194,7 @@ export class ReceiptsManager extends MetaDBManager { tx: block.transactions[receiptIndex], txIndex: receiptIndex, logIndex: logIndex++, - })) + })), ) } if (addresses && addresses.length > 0) { @@ -245,7 +245,7 @@ export class ReceiptsManager extends MetaDBManager { private async updateIndex( operation: IndexOperation, type: IndexType.TxHash, - value: Block + value: Block, ): Promise private async updateIndex(operation: IndexOperation, type: IndexType, value: any): Promise { switch (type) { @@ -309,14 +309,14 @@ export class ReceiptsManager extends MetaDBManager { private rlp( conversion: RlpConvert.Decode, type: RlpType.Receipts, - values: Uint8Array + values: Uint8Array, ): TxReceipt[] private rlp(conversion: RlpConvert.Decode, type: RlpType.Logs, value: rlpLog[]): Log[] private rlp(conversion: RlpConvert.Decode, type: RlpType.TxHash, value: Uint8Array): TxHashIndex private rlp( conversion: RlpConvert, type: RlpType, - value: Uint8Array | rlpOut + value: Uint8Array | rlpOut, ): Uint8Array | rlpOut { switch (type) { case RlpType.Receipts: @@ -328,7 +328,7 @@ export class ReceiptsManager extends MetaDBManager { intToBytes((r as PostByzantiumTxReceipt).status), bigIntToBytes(r.cumulativeBlockGasUsed), this.rlp(RlpConvert.Encode, RlpType.Logs, r.logs), - ]) + ]), ) } else { const decoded = RLP.decode(value as Uint8Array) as unknown as rlpReceipt[] diff --git a/packages/client/src/execution/vmexecution.ts b/packages/client/src/execution/vmexecution.ts index 17ea600cd6..87e73cd7e3 100644 --- a/packages/client/src/execution/vmexecution.ts +++ b/packages/client/src/execution/vmexecution.ts @@ -129,7 +129,7 @@ export class VMExecution extends Execution { if (resolve !== undefined) { resolve() } - } + }, ) } if (this.config.savePreimages) { @@ -236,7 +236,7 @@ export class VMExecution extends Execution { const verkleStateRoot = await verkleStateManager.getTransitionStateRoot( merkleStateManager, - merkleStateRoot + merkleStateRoot, ) await verkleStateManager.setStateRoot(verkleStateRoot) @@ -285,7 +285,7 @@ export class VMExecution extends Execution { this.vm = this.verkleVM! } else { this.config.logger.info( - `Initializing VM merkle statemanager genesis hardfork=${this.hardfork}` + `Initializing VM merkle statemanager genesis hardfork=${this.hardfork}`, ) await this.setupMerkleVM() this.vm = this.merkleVM! @@ -369,7 +369,7 @@ export class VMExecution extends Execution { opts: RunBlockOpts & { parentBlock?: Block }, receipts?: TxReceipt[], blocking: boolean = false, - skipBlockchain: boolean = false + skipBlockchain: boolean = false, ): Promise { // if its not blocking request then return early if its already running else wait to grab the lock if ((!blocking && this.running) || !this.started || this.config.shutdown) return false @@ -508,7 +508,7 @@ export class VMExecution extends Execution { */ async setHead( blocks: Block[], - { finalizedBlock, safeBlock }: { finalizedBlock?: Block; safeBlock?: Block } = {} + { finalizedBlock, safeBlock }: { finalizedBlock?: Block; safeBlock?: Block } = {}, ): Promise { if (!this.started || this.config.shutdown) return false @@ -524,8 +524,8 @@ export class VMExecution extends Execution { // execution run will always fail throw Error( `vmHeadBlock's stateRoot not found number=${vmHeadBlock.header.number} root=${short( - vmHeadBlock.header.stateRoot - )}` + vmHeadBlock.header.stateRoot, + )}`, ) } @@ -563,7 +563,7 @@ export class VMExecution extends Execution { const td = await this.chain.getTd( vmHeadBlock.header.parentHash, - vmHeadBlock.header.number - BIGINT_1 + vmHeadBlock.header.number - BIGINT_1, ) const hardfork = this.config.execCommon.setHardforkBy({ blockNumber: vmHeadBlock.header.number, @@ -596,7 +596,7 @@ export class VMExecution extends Execution { return this.runWithLock(async () => { // check if the block is canonical in chain this.config.logger.warn( - `Setting execution head to hash=${short(jumpToHash)} number=${jumpToNumber}` + `Setting execution head to hash=${short(jumpToHash)} number=${jumpToNumber}`, ) await this.vm.blockchain.setIteratorHead('vm', jumpToHash) }) @@ -626,13 +626,13 @@ export class VMExecution extends Execution { if (typeof blockchain.getCanonicalHeadBlock !== 'function') { throw new Error( - 'cannot get iterator head: blockchain has no getCanonicalHeadBlock function' + 'cannot get iterator head: blockchain has no getCanonicalHeadBlock function', ) } let canonicalHead = await blockchain.getCanonicalHeadBlock() this.config.logger.debug( - `Running execution startHeadBlock=${startHeadBlock?.header.number} canonicalHead=${canonicalHead?.header.number} loop=${loop}` + `Running execution startHeadBlock=${startHeadBlock?.header.number} canonicalHead=${canonicalHead?.header.number} loop=${loop}`, ) let headBlock: Block | undefined @@ -672,7 +672,7 @@ export class VMExecution extends Execution { if (reorg) { clearCache = true this.config.logger.info( - `VM run: Chain reorged, setting new head to block number=${headBlock.header.number} clearCache=${clearCache}.` + `VM run: Chain reorged, setting new head to block number=${headBlock.header.number} clearCache=${clearCache}.`, ) } else { const prevVMStateRoot = await this.vm.stateManager.getStateRoot() @@ -688,7 +688,7 @@ export class VMExecution extends Execution { const { number, timestamp } = block.header if (typeof blockchain.getTotalDifficulty !== 'function') { throw new Error( - 'cannot get iterator head: blockchain has no getTotalDifficulty function' + 'cannot get iterator head: blockchain has no getTotalDifficulty function', ) } const td = await blockchain.getTotalDifficulty(block.header.parentHash) @@ -702,7 +702,7 @@ export class VMExecution extends Execution { const wasPrePrague = !this.config.execCommon.gteHardfork(Hardfork.Osaka) const hash = short(block.hash()) this.config.superMsg( - `Execution hardfork switch on block number=${number} hash=${hash} old=${this.hardfork} new=${hardfork}` + `Execution hardfork switch on block number=${number} hash=${hash} old=${this.hardfork} new=${hardfork}`, ) this.hardfork = this.config.execCommon.setHardforkBy({ blockNumber: number, @@ -724,7 +724,7 @@ export class VMExecution extends Execution { throw Error( `Invalid vm stateManager type=${typeof this.vm.stateManager} for fork=${ this.hardfork - }` + }`, ) } @@ -785,7 +785,7 @@ export class VMExecution extends Execution { }, this.config.numBlocksPerIteration, // release lock on this callback so other blockchain ops can happen while this block is being executed - true + true, ) // Ensure to catch and not throw as this would lead to unCaughtException with process exit .catch(async (error) => { @@ -812,7 +812,7 @@ export class VMExecution extends Execution { hasParentStateRoot = false if (headBlock !== undefined) { hasParentStateRoot = await this.vm.stateManager.hasStateRoot( - headBlock.header.stateRoot + headBlock.header.stateRoot, ) backStepTo = headBlock.header.number ?? BIGINT_0 - BIGINT_1 backStepToHash = headBlock.header.parentHash @@ -822,17 +822,17 @@ export class VMExecution extends Execution { if (hasParentStateRoot === true && backStepToHash !== undefined) { this.config.logger.warn( `${errorMsg}, backStepping vmHead to number=${backStepTo} hash=${short( - backStepToHash ?? 'na' - )} hasParentStateRoot=${short(backStepToRoot ?? 'na')}:\n${error}` + backStepToHash ?? 'na', + )} hasParentStateRoot=${short(backStepToRoot ?? 'na')}:\n${error}`, ) await this.vm.blockchain.setIteratorHead('vm', backStepToHash) } else { this.config.logger.error( `${errorMsg}, couldn't back step to vmHead number=${backStepTo} hash=${short( - backStepToHash ?? 'na' + backStepToHash ?? 'na', )} hasParentStateRoot=${hasParentStateRoot} backStepToRoot=${short( - backStepToRoot ?? 'na' - )}:\n${error}` + backStepToRoot ?? 'na', + )}:\n${error}`, ) } } else { @@ -869,7 +869,7 @@ export class VMExecution extends Execution { } this.config.events.emit(Event.SYNC_EXECUTION_VM_ERROR, error) const actualExecuted = Number( - errorBlock.header.number - startHeadBlock.header.number + errorBlock.header.number - startHeadBlock.header.number, ) return actualExecuted } else { @@ -885,7 +885,7 @@ export class VMExecution extends Execution { endHeadBlock = await this.vm.blockchain.getIteratorHead('vm') } else { throw new Error( - 'cannot get iterator head: blockchain has no getIteratorHead function' + 'cannot get iterator head: blockchain has no getIteratorHead function', ) } @@ -904,7 +904,7 @@ export class VMExecution extends Execution { ;(this.config.execCommon.gteHardfork(Hardfork.Paris) ? this.config.logger.debug : this.config.logger.info)( - `Executed blocks count=${numExecuted} first=${firstNumber} hash=${firstHash} ${tdAdd}${baseFeeAdd}hardfork=${this.hardfork} last=${lastNumber} hash=${lastHash} txs=${txCounter}` + `Executed blocks count=${numExecuted} first=${firstNumber} hash=${firstHash} ${tdAdd}${baseFeeAdd}hardfork=${this.hardfork} last=${lastNumber} hash=${lastHash} txs=${txCounter}`, ) await this.chain.update(false) @@ -912,13 +912,13 @@ export class VMExecution extends Execution { this.config.logger.debug( `No blocks executed past chain head hash=${short(endHeadBlock.hash())} number=${ endHeadBlock.header.number - }` + }`, ) } startHeadBlock = endHeadBlock if (typeof this.vm.blockchain.getCanonicalHeadBlock !== 'function') { throw new Error( - 'cannot get iterator head: blockchain has no getCanonicalHeadBlock function' + 'cannot get iterator head: blockchain has no getCanonicalHeadBlock function', ) } canonicalHead = await this.vm.blockchain.getCanonicalHeadBlock() @@ -939,7 +939,7 @@ export class VMExecution extends Execution { this._statsInterval = setInterval( // eslint-disable-next-line @typescript-eslint/await-thenable await this.stats.bind(this), - this.STATS_INTERVAL + this.STATS_INTERVAL, ) const { blockchain } = this.vm @@ -1075,7 +1075,7 @@ export class VMExecution extends Execution { if (allTxs || txHashes.includes(txHash)) { const res = await runTx(vm, { block, tx }) this.config.logger.info( - `Executed tx hash=${txHash} gasUsed=${res.totalGasSpent} from block num=${blockNumber}` + `Executed tx hash=${txHash} gasUsed=${res.totalGasSpent} from block num=${blockNumber}`, ) count += 1 } @@ -1099,22 +1099,22 @@ export class VMExecution extends Execution { // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions stats = !sm._accountCacheSettings.deactivate ? sm._accountCache.stats() : disactivatedStats this.config.logger.info( - `Account cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}` + `Account cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}`, ) // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions stats = !sm._storageCacheSettings.deactivate ? sm._storageCache.stats() : disactivatedStats this.config.logger.info( - `Storage cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}` + `Storage cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}`, ) // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions stats = !sm._codeCacheSettings.deactivate ? sm._codeCache.stats() : disactivatedStats this.config.logger.info( - `Code cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}` + `Code cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}`, ) const tStats = (sm._trie as Trie).database().stats() this.config.logger.info( `Trie cache stats size=${tStats.size} reads=${tStats.cache.reads} hits=${tStats.cache.hits} ` + - `writes=${tStats.cache.writes} readsDB=${tStats.db.reads} hitsDB=${tStats.db.hits} writesDB=${tStats.db.writes}` + `writes=${tStats.cache.writes} readsDB=${tStats.db.reads} hitsDB=${tStats.db.hits} writesDB=${tStats.db.writes}`, ) } } diff --git a/packages/client/src/ext/jwt-simple.ts b/packages/client/src/ext/jwt-simple.ts index 45ce840259..a33be6c7dc 100644 --- a/packages/client/src/ext/jwt-simple.ts +++ b/packages/client/src/ext/jwt-simple.ts @@ -102,7 +102,7 @@ const decode = function jwt_decode( token: string, key: string, noVerify: boolean = false, - algorithm: string = '' + algorithm: string = '', ) { // check token if (!token) { @@ -168,7 +168,7 @@ const encode = function jwt_encode( payload: any, key: string, algorithm: string = '', - options: any = undefined + options: any = undefined, ) { // Check key if (!key) { diff --git a/packages/client/src/ext/qheap.ts b/packages/client/src/ext/qheap.ts index 346245b232..dc83e56f23 100644 --- a/packages/client/src/ext/qheap.ts +++ b/packages/client/src/ext/qheap.ts @@ -63,10 +63,10 @@ export class Heap { // @ts-ignore return opts!.compar!(a, b) < 0 } - : opts.comparBefore ?? + : (opts.comparBefore ?? function (a: any, b: any): boolean { return a < b - } + }) this._sortBefore = opts.compar ?? diff --git a/packages/client/src/logging.ts b/packages/client/src/logging.ts index defa127bc7..b62749ead9 100644 --- a/packages/client/src/logging.ts +++ b/packages/client/src/logging.ts @@ -86,7 +86,7 @@ function logFormat(colors = false) { const msg = `[${info.timestamp}] ${level} ${CLLog}${HFLog}${info.message}` return msg - } + }, ) } @@ -99,7 +99,7 @@ function formatConfig(colors = false) { format.splat(), label({ label: 'ethereumjs' }), timestamp({ format: 'MM-DD|HH:mm:ss' }), - logFormat(colors) + logFormat(colors), ) } diff --git a/packages/client/src/miner/miner.ts b/packages/client/src/miner/miner.ts index a9bed0341d..a5ff3f719a 100644 --- a/packages/client/src/miner/miner.ts +++ b/packages/client/src/miner/miner.ts @@ -110,11 +110,11 @@ export class Miner { const number = parentBlock.header.number + BIGINT_1 const inTurn = await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( signerAddress, - number + number, ) if (inTurn === false) { const signerCount = (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - number + number, ).length timeout += Math.random() * signerCount * 500 } @@ -159,7 +159,7 @@ export class Miner { this.config.logger.debug( `Miner: Chain updated with block ${ latestBlockHeader.number - }. Queuing next block assembly in ${Math.round(timeout / 1000)}s` + }. Queuing next block assembly in ${Math.round(timeout / 1000)}s`, ) await this.queueNextAssembly(timeout) } @@ -211,7 +211,7 @@ export class Miner { const cliqueSigner = this.config.accounts[0][1] const header = BlockHeader.fromHeaderData( { number }, - { common: this.config.chainCommon, cliqueSigner } + { common: this.config.chainCommon, cliqueSigner }, ) if ( (this.service.chain.blockchain as any).consensus.cliqueCheckRecentlySigned(header) === true @@ -247,7 +247,7 @@ export class Miner { // Determine if signer is INTURN (2) or NOTURN (1) inTurn = await (vmCopy.blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( signerAddress, - number + number, ) difficulty = inTurn ? 2 : 1 } @@ -297,7 +297,7 @@ export class Miner { typeof baseFeePerGas === 'bigint' && baseFeePerGas !== BIGINT_0 ? `(baseFee: ${baseFeePerGas})` : '' - }` + }`, ) let index = 0 let blockFull = false @@ -319,14 +319,14 @@ export class Miner { // If block has less than 21000 gas remaining, consider it full blockFull = true this.config.logger.info( - `Miner: Assembled block full (gasLeft: ${gasLimit - blockBuilder.gasUsed})` + `Miner: Assembled block full (gasLeft: ${gasLimit - blockBuilder.gasUsed})`, ) } } else { // If there is an error adding a tx, it will be skipped const hash = bytesToHex(txs[index].hash()) this.config.logger.debug( - `Skipping tx ${hash}, error encountered when trying to add tx:\n${error}` + `Skipping tx ${hash}, error encountered when trying to add tx:\n${error}`, ) } } @@ -343,7 +343,7 @@ export class Miner { this.config.chainCommon.consensusType() === ConsensusType.ProofOfWork ? `(difficulty: ${block.header.difficulty})` : `(${inTurn === true ? 'in turn' : 'not in turn'})` - }` + }`, ) this.assembling = false if (interrupt) return diff --git a/packages/client/src/miner/pendingBlock.ts b/packages/client/src/miner/pendingBlock.ts index e5978359fd..0d08db6e76 100644 --- a/packages/client/src/miner/pendingBlock.ts +++ b/packages/client/src/miner/pendingBlock.ts @@ -98,7 +98,7 @@ export class PendingBlock { vm: VM, parentBlock: Block, headerData: Partial = {}, - withdrawals?: WithdrawalData[] + withdrawals?: WithdrawalData[], ) { const number = parentBlock.header.number + BIGINT_1 const { timestamp, mixHash, parentBeaconBlockRoot, coinbase } = headerData @@ -138,7 +138,7 @@ export class PendingBlock { for (const withdrawal of withdrawals) { const indexBuf = bigIntToUnpaddedBytes(toType(withdrawal.index ?? 0, TypeOutput.BigInt)) const validatorIndex = bigIntToUnpaddedBytes( - toType(withdrawal.validatorIndex ?? 0, TypeOutput.BigInt) + toType(withdrawal.validatorIndex ?? 0, TypeOutput.BigInt), ) const address = toType(withdrawal.address ?? Address.zero(), TypeOutput.Uint8Array) const amount = bigIntToUnpaddedBytes(toType(withdrawal.amount ?? 0, TypeOutput.BigInt)) @@ -158,9 +158,9 @@ export class PendingBlock { gasLimitBuf, parentBeaconBlockRootBuf, coinbaseBuf, - withdrawalsBuf - ) - ).subarray(0, 8) + withdrawalsBuf, + ), + ).subarray(0, 8), ) const payloadId = bytesToHex(payloadIdBytes) @@ -210,12 +210,12 @@ export class PendingBlock { allowedBlobs, }) this.config.logger.info( - `Pending: Assembling block from ${txs.length} eligible txs (baseFee: ${baseFeePerGas})` + `Pending: Assembling block from ${txs.length} eligible txs (baseFee: ${baseFeePerGas})`, ) const { addedTxs, skippedByAddErrors, blobTxs } = await this.addTransactions(builder, txs) this.config.logger.info( - `Pending: Added txs=${addedTxs} skippedByAddErrors=${skippedByAddErrors} from total=${txs.length} tx candidates` + `Pending: Added txs=${addedTxs} skippedByAddErrors=${skippedByAddErrors} from total=${txs.length} tx candidates`, ) // Construct initial blobs bundle when payload is constructed @@ -244,7 +244,7 @@ export class PendingBlock { * Returns the completed block */ async build( - payloadIdBytes: Uint8Array | string + payloadIdBytes: Uint8Array | string, ): Promise { const payloadId = typeof payloadIdBytes !== 'string' ? bytesToHex(payloadIdBytes) : payloadIdBytes @@ -283,8 +283,8 @@ export class PendingBlock { ).filter( (tx) => (builder as any).transactions.some((t: TypedTransaction) => - equalsBytes(t.hash(), tx.hash()) - ) === false + equalsBytes(t.hash(), tx.hash()), + ) === false, ) const { skippedByAddErrors, blobTxs } = await this.addTransactions(builder, txs) @@ -303,8 +303,8 @@ export class PendingBlock { `Pending: Built block number=${block.header.number} txs=${ block.transactions.length }${withdrawalsStr}${blobsStr} skippedByAddErrors=${skippedByAddErrors} hash=${bytesToHex( - block.hash() - )}` + block.hash(), + )}`, ) return [block, builder.transactionReceipts, builder.minerValue, blobs] @@ -365,15 +365,15 @@ export class PendingBlock { // Remove the blob tx which doesn't has blobs bundled this.txPool.removeByHash(bytesToHex(tx.hash()), tx) this.config.logger.error( - `Pending: Removed from txPool a blob tx ${bytesToHex(tx.hash())} with missing blobs` + `Pending: Removed from txPool a blob tx ${bytesToHex(tx.hash())} with missing blobs`, ) addTxResult = AddTxResult.RemovedByErrors } else { // If there is an error adding a tx, it will be skipped this.config.logger.debug( `Pending: Skipping tx ${bytesToHex( - tx.hash() - )}, error encountered when trying to add tx:\n${error}` + tx.hash(), + )}, error encountered when trying to add tx:\n${error}`, ) addTxResult = AddTxResult.SkippedByErrors } diff --git a/packages/client/src/net/peer/rlpxpeer.ts b/packages/client/src/net/peer/rlpxpeer.ts index 92277a2d3a..d2016f20f8 100644 --- a/packages/client/src/net/peer/rlpxpeer.ts +++ b/packages/client/src/net/peer/rlpxpeer.ts @@ -176,19 +176,19 @@ export class RlpxPeer extends Peer { const snapProtocol = snapRlpxProtocol !== undefined ? this.protocols.find( - (p) => p.name === snapRlpxProtocol?.constructor.name.toLowerCase() + (p) => p.name === snapRlpxProtocol?.constructor.name.toLowerCase(), ) : undefined if (snapProtocol !== undefined) { const snapSender = new RlpxSender( - snapRlpxProtocol as Devp2pETH | Devp2pLES | Devp2pSNAP + snapRlpxProtocol as Devp2pETH | Devp2pLES | Devp2pSNAP, ) return this.addProtocol(snapSender, snapProtocol) } } }) } - }) + }), ) this.connected = true } diff --git a/packages/client/src/net/peerpool.ts b/packages/client/src/net/peerpool.ts index f48a397405..c02daa800e 100644 --- a/packages/client/src/net/peerpool.ts +++ b/packages/client/src/net/peerpool.ts @@ -91,13 +91,13 @@ export class PeerPool { this._statusCheckInterval = setInterval( // eslint-disable-next-line @typescript-eslint/await-thenable await this._statusCheck.bind(this), - this.DEFAULT_STATUS_CHECK_INTERVAL + this.DEFAULT_STATUS_CHECK_INTERVAL, ) this._peerBestHeaderUpdateInterval = setInterval( // eslint-disable-next-line @typescript-eslint/await-thenable await this._peerBestHeaderUpdate.bind(this), - this.DEFAULT_PEER_BEST_HEADER_UPDATE_INTERVAL + this.DEFAULT_PEER_BEST_HEADER_UPDATE_INTERVAL, ) this.running = true diff --git a/packages/client/src/net/protocol/boundprotocol.ts b/packages/client/src/net/protocol/boundprotocol.ts index 5198d86dfa..299d18c5d2 100644 --- a/packages/client/src/net/protocol/boundprotocol.ts +++ b/packages/client/src/net/protocol/boundprotocol.ts @@ -84,7 +84,7 @@ export class BoundProtocol { } }) this.sender.on('error', (error: Error) => - this.config.events.emit(Event.PROTOCOL_ERROR, error, this.peer) + this.config.events.emit(Event.PROTOCOL_ERROR, error, this.peer), ) } @@ -137,7 +137,7 @@ export class BoundProtocol { Event.PROTOCOL_MESSAGE, { name: message.name, data }, this.protocol.name, - this.peer + this.peer, ) } } diff --git a/packages/client/src/net/protocol/ethprotocol.ts b/packages/client/src/net/protocol/ethprotocol.ts index b0cbdebd2e..ed8f811776 100644 --- a/packages/client/src/net/protocol/ethprotocol.ts +++ b/packages/client/src/net/protocol/ethprotocol.ts @@ -175,7 +175,7 @@ export class EthProtocol extends Protocol { // to correct hardfork choice const header = BlockHeader.fromValuesArray( h, - difficulty > 0 ? { common, setHardfork: true } : { common, setHardfork: this.chainTTD } + difficulty > 0 ? { common, setHardfork: true } : { common, setHardfork: this.chainTTD }, ) return header }), @@ -231,7 +231,7 @@ export class EthProtocol extends Protocol { ] }, decode: ( - params: Uint8Array[] | [types: PrefixedHexString, sizes: number[], hashes: Uint8Array[]] + params: Uint8Array[] | [types: PrefixedHexString, sizes: number[], hashes: Uint8Array[]], ) => { if (isNestedUint8Array(params) === true) { return params @@ -337,7 +337,7 @@ export class EthProtocol extends Protocol { Uint8Array, Uint8Array, Uint8Array, - Log[] + Log[], ] const receipt = { cumulativeBlockGasUsed: bytesToBigInt(cumulativeGasUsed), diff --git a/packages/client/src/net/protocol/flowcontrol.ts b/packages/client/src/net/protocol/flowcontrol.ts index 4330244e76..54414961dd 100644 --- a/packages/client/src/net/protocol/flowcontrol.ts +++ b/packages/client/src/net/protocol/flowcontrol.ts @@ -27,7 +27,7 @@ export class FlowControl { readonly bl: number readonly mrc: Mrc readonly mrr: number - readonly out: Map; + readonly out: Map readonly in: Map constructor(options?: FlowControlOptions) { diff --git a/packages/client/src/net/protocol/lesprotocol.ts b/packages/client/src/net/protocol/lesprotocol.ts index b64324b774..bf624f2c2b 100644 --- a/packages/client/src/net/protocol/lesprotocol.ts +++ b/packages/client/src/net/protocol/lesprotocol.ts @@ -41,7 +41,7 @@ type GetBlockHeadersOpts = { */ export interface LesProtocolMethods { getBlockHeaders: ( - opts: GetBlockHeadersOpts + opts: GetBlockHeadersOpts, ) => Promise<{ reqId: bigint; bv: bigint; headers: BlockHeader[] }> } @@ -112,7 +112,7 @@ export class LesProtocol extends Protocol { BlockHeader.fromValuesArray(h, { setHardfork: true, common: this.config.chainCommon, // eslint-disable-line no-invalid-this - }) + }), ), }), }, @@ -186,10 +186,10 @@ export class LesProtocol extends Protocol { const forkHash = this.config.chainCommon.forkHash( this.config.chainCommon.hardfork(), - this.chain.genesis.hash() + this.chain.genesis.hash(), ) const nextFork = this.config.chainCommon.nextHardforkBlockOrTimestamp( - this.config.chainCommon.hardfork() + this.config.chainCommon.hardfork(), ) const forkID = [hexToBytes(forkHash), bigIntToUnpaddedBytes(nextFork ?? 0n)] diff --git a/packages/client/src/net/protocol/snapprotocol.ts b/packages/client/src/net/protocol/snapprotocol.ts index db70510457..614ae8aaa7 100644 --- a/packages/client/src/net/protocol/snapprotocol.ts +++ b/packages/client/src/net/protocol/snapprotocol.ts @@ -81,7 +81,7 @@ type GetTrieNodesOpts = { */ export interface SnapProtocolMethods { getAccountRange: ( - opts: GetAccountRangeOpts + opts: GetAccountRangeOpts, ) => Promise<{ reqId: bigint; accounts: AccountData[]; proof: Uint8Array[] }> getStorageRanges: (opts: GetStorageRangesOpts) => Promise<{ reqId: bigint @@ -158,7 +158,7 @@ export class SnapProtocol extends Protocol { ({ hash, body: this.convertSlimBody === true ? accountBodyFromSlim(body) : body, - } as AccountData) + }) as AccountData, ), proof, } @@ -206,7 +206,7 @@ export class SnapProtocol extends Protocol { return [ bigIntToUnpaddedBytes(reqId ?? ++this.nextReqId), slots.map((accSlots) => - accSlots.map((slotData) => [setLengthLeft(slotData.hash, 32), slotData.body]) + accSlots.map((slotData) => [setLengthLeft(slotData.hash, 32), slotData.body]), ), proof, ] @@ -215,7 +215,7 @@ export class SnapProtocol extends Protocol { return { reqId: bytesToBigInt(reqId), slots: slots.map((accSlots: any) => - accSlots.map(([hash, body]: any) => ({ hash, body } as StorageData)) + accSlots.map(([hash, body]: any) => ({ hash, body }) as StorageData), ), proof, } diff --git a/packages/client/src/net/server/rlpxserver.ts b/packages/client/src/net/server/rlpxserver.ts index 11de958b9a..f041ff7a5c 100644 --- a/packages/client/src/net/server/rlpxserver.ts +++ b/packages/client/src/net/server/rlpxserver.ts @@ -41,7 +41,7 @@ const ignoredErrors = new RegExp( // Client 'Handshake timed out', // Protocol handshake 'Server already destroyed', // Bootstrap retrigger - ].join('|') + ].join('|'), ) /** @@ -243,7 +243,7 @@ export class RlpxServer extends Server { this.dpt.bind(this.config.port, '0.0.0.0') } this.config.logger.info( - `Started discovery service discV4=${this.config.discV4} dns=${this.config.discDns} refreshInterval=${this.refreshInterval}` + `Started discovery service discV4=${this.config.discV4} dns=${this.config.discDns} refreshInterval=${this.refreshInterval}`, ) }) } @@ -291,14 +291,14 @@ export class RlpxServer extends Server { if (peer) { this.peers.delete(peer.id) this.config.logger.debug( - `Peer disconnected (${rlpxPeer.getDisconnectPrefix(reason)}): ${peer}` + `Peer disconnected (${rlpxPeer.getDisconnectPrefix(reason)}): ${peer}`, ) this.config.events.emit(Event.PEER_DISCONNECTED, peer) } }) this.rlpx.events.on('peer:error', (rlpxPeer: Devp2pRLPxPeer, error: Error) => - this.error(error) + this.error(error), ) this.rlpx.events.on('error', (e: Error) => { diff --git a/packages/client/src/rpc/index.ts b/packages/client/src/rpc/index.ts index fc18973587..58ddd1972e 100644 --- a/packages/client/src/rpc/index.ts +++ b/packages/client/src/rpc/index.ts @@ -32,7 +32,7 @@ export class RPCManager { getMethods(engine = false, rpcDebug = false) { const methods: { [key: string]: Function } = {} const mods = modules.list.filter((name: string) => - engine ? name === 'Engine' : name !== 'Engine' + engine ? name === 'Engine' : name !== 'Engine', ) for (const modName of mods) { const mod = new (modules as any)[modName](this._client, rpcDebug) @@ -64,7 +64,7 @@ export class RPCManager { */ static getMethodNames(mod: Object): string[] { const methodNames = Object.getOwnPropertyNames((mod as any).prototype).filter( - (methodName: string) => methodName !== 'constructor' + (methodName: string) => methodName !== 'constructor', ) return methodNames } diff --git a/packages/client/src/rpc/modules/debug.ts b/packages/client/src/rpc/modules/debug.ts index e8e63b8c9e..729889f199 100644 --- a/packages/client/src/rpc/modules/debug.ts +++ b/packages/client/src/rpc/modules/debug.ts @@ -94,7 +94,7 @@ export class Debug { this.traceTransaction = middleware( callWithStackTrace(this.traceTransaction.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) this.traceCall = middleware(callWithStackTrace(this.traceCall.bind(this), this._rpcDebug), 2, [ [validators.transaction()], @@ -109,27 +109,27 @@ export class Debug { [validators.address], [validators.uint256], [validators.unsignedInteger], - ] + ], ) this.getRawBlock = middleware( callWithStackTrace(this.getRawBlock.bind(this), this._rpcDebug), 1, - [[validators.blockOption]] + [[validators.blockOption]], ) this.getRawHeader = middleware( callWithStackTrace(this.getRawHeader.bind(this), this._rpcDebug), 1, - [[validators.blockOption]] + [[validators.blockOption]], ) this.getRawReceipts = middleware( callWithStackTrace(this.getRawReceipts.bind(this), this._rpcDebug), 1, - [[validators.blockOption]] + [[validators.blockOption]], ) this.getRawTransaction = middleware( callWithStackTrace(this.getRawTransaction.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) } @@ -153,7 +153,7 @@ export class Debug { const opts = validateTracerConfig(config) const result = await this.service.execution.receiptsManager.getReceiptByTxHash( - hexToBytes(txHash) + hexToBytes(txHash), ) if (!result) return null const [_, blockHash, txIndex] = result @@ -318,7 +318,7 @@ export class Debug { * The object will also contain `nextKey`, the next (hashed) storage key after the range included in `storage`. */ async storageRangeAt( - params: [PrefixedHexString, number, PrefixedHexString, PrefixedHexString, number] + params: [PrefixedHexString, number, PrefixedHexString, PrefixedHexString, number], ) { const [blockHash, txIndex, account, startKey, limit] = params @@ -357,7 +357,7 @@ export class Debug { // Validator already verified that `account` and `startKey` are properly formatted. Address.fromString(account), BigInt(startKey), - limit + limit, ) } /** @@ -390,7 +390,7 @@ export class Debug { const receipts = await this.service.execution.receiptsManager.getReceipts( block.hash(), true, - true + true, ) return receipts.map((r) => bytesToHex(encodeReceipt(r, r.txType))) } @@ -402,7 +402,7 @@ export class Debug { const [txHash] = params if (!this.service.execution.receiptsManager) throw new Error('missing receiptsManager') const result = await this.service.execution.receiptsManager.getReceiptByTxHash( - hexToBytes(txHash) + hexToBytes(txHash), ) if (!result) return null const [_receipt, blockHash, txIndex] = result diff --git a/packages/client/src/rpc/modules/engine/CLConnectionManager.ts b/packages/client/src/rpc/modules/engine/CLConnectionManager.ts index 148772f1e0..da6e5c0776 100644 --- a/packages/client/src/rpc/modules/engine/CLConnectionManager.ts +++ b/packages/client/src/rpc/modules/engine/CLConnectionManager.ts @@ -137,15 +137,15 @@ export class CLConnectionManager { this._connectionCheckInterval = setInterval( // eslint-disable @typescript-eslint/await-thenable this.connectionCheck.bind(this), - this.DEFAULT_CONNECTION_CHECK_INTERVAL + this.DEFAULT_CONNECTION_CHECK_INTERVAL, ) this._payloadLogInterval = setInterval( this.lastPayloadLog.bind(this), - this.DEFAULT_PAYLOAD_LOG_INTERVAL + this.DEFAULT_PAYLOAD_LOG_INTERVAL, ) this._forkchoiceLogInterval = setInterval( this.lastForkchoiceLog.bind(this), - this.DEFAULT_FORKCHOICE_LOG_INTERVAL + this.DEFAULT_FORKCHOICE_LOG_INTERVAL, ) } @@ -166,11 +166,11 @@ export class CLConnectionManager { private _getPayloadLogMsg(payload: NewPayload) { let msg = `number=${Number(payload.payload.blockNumber)} hash=${short( - payload.payload.blockHash + payload.payload.blockHash, )} parentHash=${short(payload.payload.parentHash)} status=${ payload.response ? payload.response.status : '-' } gasUsed=${this.compactNum(Number(payload.payload.gasUsed))} baseFee=${Number( - payload.payload.baseFeePerGas + payload.payload.baseFeePerGas, )} txs=${payload.payload.transactions.length}` if ('withdrawals' in payload.payload && payload.payload.withdrawals !== null) { @@ -190,7 +190,7 @@ export class CLConnectionManager { msg += `number=${Number(update.headBlock.header.number)} ` } msg += `head=${short(update.state.headBlockHash)} finalized=${short( - update.state.finalizedBlockHash + update.state.finalizedBlockHash, )} response=${update.response ? update.response.payloadStatus.status : '-'}` if (update.headBlock) { msg += ` timestampDiff=${this.timeDiffStr(update.headBlock)}` @@ -217,7 +217,7 @@ export class CLConnectionManager { logCLStatus( this.config.logger, `Initial consensus forkchoice update ${this._getForkchoiceUpdateLogMsg(update)}`, - logLevel.INFO + logLevel.INFO, ) } this._lastForkchoiceUpdate = update @@ -230,7 +230,7 @@ export class CLConnectionManager { logCLStatus( this.config.logger, `Initial consensus payload received ${this._getPayloadLogMsg(payload)}`, - logLevel.INFO + logLevel.INFO, ) } this._lastPayload = payload @@ -319,12 +319,12 @@ export class CLConnectionManager { logCLStatus( this.config.logger, 'CL client connection is needed, Merge HF happening soon', - logLevel.WARN + logLevel.WARN, ) logCLStatus( this.config.logger, '(no CL <-> EL communication yet, connection might be in a workable state though)', - logLevel.WARN + logLevel.WARN, ) } } @@ -337,12 +337,12 @@ export class CLConnectionManager { logCLStatus( this.config.logger, 'Paris (Merge) HF activated, CL client connection is needed for continued block processing', - logLevel.INFO + logLevel.INFO, ) logCLStatus( this.config.logger, '(note that CL client might need to be synced up to beacon chain Merge transition slot until communication starts)', - logLevel.INFO + logLevel.INFO, ) } this.oneTimeMergeCLConnectionCheck = true @@ -365,7 +365,7 @@ export class CLConnectionManager { logCLStatus( this.config.logger, `Last consensus payload received ${payloadMsg}`, - logLevel.INFO + logLevel.INFO, ) const count = this._payloadToPayloadStats['blockCount'] const min = this._payloadToPayloadStats['minBlockNumber'] @@ -381,7 +381,7 @@ export class CLConnectionManager { `Payload stats blocks count=${count} minBlockNum=${min} maxBlockNum=${max} txsPerType=${ txsMsg.length > 0 ? txsMsg.join('|') : '0' }`, - logLevel.DEBUG + logLevel.DEBUG, ) this.clearPayloadStats() } @@ -398,7 +398,7 @@ export class CLConnectionManager { logCLStatus( this.config.logger, `New consensus payload received ${payloadMsg}`, - logLevel.INFO + logLevel.INFO, ) } } @@ -415,15 +415,15 @@ export class CLConnectionManager { logCLStatus( this.config.logger, `No consensus forkchoice update received yet`, - logLevel.INFO + logLevel.INFO, ) } else { logCLStatus( this.config.logger, `Last consensus forkchoice update ${this._getForkchoiceUpdateLogMsg( - this._lastForkchoiceUpdate + this._lastForkchoiceUpdate, )}`, - logLevel.INFO + logLevel.INFO, ) } } @@ -437,9 +437,9 @@ export class CLConnectionManager { logCLStatus( this.config.logger, `New chain head set (forkchoice update) ${this._getForkchoiceUpdateLogMsg( - this._lastForkchoiceUpdate + this._lastForkchoiceUpdate, )}`, - logLevel.INFO + logLevel.INFO, ) } } @@ -451,7 +451,7 @@ export class CLConnectionManager { */ export function middleware( methodFn: (params: any[]) => Promise, - handler: (params: any[], response: any, errormsg: any) => void + handler: (params: any[], response: any, errormsg: any) => void, ): any { return function (params: any[] = []) { return methodFn(params) diff --git a/packages/client/src/rpc/modules/engine/engine.ts b/packages/client/src/rpc/modules/engine/engine.ts index d9094eba22..7eddb70f0c 100644 --- a/packages/client/src/rpc/modules/engine/engine.ts +++ b/packages/client/src/rpc/modules/engine/engine.ts @@ -178,7 +178,7 @@ export class Engine { middleware(callWithStackTrace(this.newPayloadV1.bind(this), this._rpcDebug), 1, [ [validators.object(executionPayloadV1FieldValidators)], ]), - ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }) + ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }), ) this.newPayloadV2 = cmMiddleware( @@ -186,11 +186,11 @@ export class Engine { [ validators.either( validators.object(executionPayloadV1FieldValidators), - validators.object(executionPayloadV2FieldValidators) + validators.object(executionPayloadV2FieldValidators), ), ], ]), - ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }) + ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }), ) this.newPayloadV3 = cmMiddleware( @@ -202,9 +202,9 @@ export class Engine { [validators.array(validators.bytes32)], [validators.bytes32], ], - ['executionPayload', 'blobVersionedHashes', 'parentBeaconBlockRoot'] + ['executionPayload', 'blobVersionedHashes', 'parentBeaconBlockRoot'], ), - ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }) + ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }), ) this.newPayloadV4 = cmMiddleware( @@ -216,9 +216,9 @@ export class Engine { [validators.array(validators.bytes32)], [validators.bytes32], ], - ['executionPayload', 'blobVersionedHashes', 'parentBeaconBlockRoot'] + ['executionPayload', 'blobVersionedHashes', 'parentBeaconBlockRoot'], ), - ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }) + ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }), ) /** @@ -227,7 +227,7 @@ export class Engine { const forkchoiceUpdatedResponseCMHandler = ( [state]: ForkchoiceStateV1[], response?: ForkchoiceResponseV1 & { headBlock?: Block }, - error?: string + error?: string, ) => { this.connectionManager.lastForkchoiceUpdate({ state, @@ -244,21 +244,21 @@ export class Engine { [validators.object(forkchoiceFieldValidators)], [validators.optional(validators.object(payloadAttributesFieldValidatorsV1))], ]), - forkchoiceUpdatedResponseCMHandler + forkchoiceUpdatedResponseCMHandler, ) this.forkchoiceUpdatedV2 = cmMiddleware( middleware(callWithStackTrace(this.forkchoiceUpdatedV2.bind(this), this._rpcDebug), 1, [ [validators.object(forkchoiceFieldValidators)], [validators.optional(validators.object(payloadAttributesFieldValidatorsV2))], ]), - forkchoiceUpdatedResponseCMHandler + forkchoiceUpdatedResponseCMHandler, ) this.forkchoiceUpdatedV3 = cmMiddleware( middleware(callWithStackTrace(this.forkchoiceUpdatedV3.bind(this), this._rpcDebug), 1, [ [validators.object(forkchoiceFieldValidators)], [validators.optional(validators.object(payloadAttributesFieldValidatorsV3))], ]), - forkchoiceUpdatedResponseCMHandler + forkchoiceUpdatedResponseCMHandler, ) /** @@ -268,28 +268,28 @@ export class Engine { middleware(callWithStackTrace(this.getPayloadV1.bind(this), this._rpcDebug), 1, [ [validators.bytes8], ]), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) this.getPayloadV2 = cmMiddleware( middleware(callWithStackTrace(this.getPayloadV2.bind(this), this._rpcDebug), 1, [ [validators.bytes8], ]), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) this.getPayloadV3 = cmMiddleware( middleware(callWithStackTrace(this.getPayloadV3.bind(this), this._rpcDebug), 1, [ [validators.bytes8], ]), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) this.getPayloadV4 = cmMiddleware( middleware(callWithStackTrace(this.getPayloadV4.bind(this), this._rpcDebug), 1, [ [validators.bytes8], ]), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) /** @@ -307,9 +307,9 @@ export class Engine { terminalBlockNumber: validators.uint64, }), ], - ] + ], ), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) /** @@ -317,7 +317,7 @@ export class Engine { */ this.exchangeCapabilities = cmMiddleware( middleware(callWithStackTrace(this.exchangeCapabilities.bind(this), this._rpcDebug), 0, []), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) /** @@ -327,7 +327,7 @@ export class Engine { middleware(callWithStackTrace(this.getPayloadBodiesByHashV1.bind(this), this._rpcDebug), 1, [ [validators.array(validators.bytes32)], ]), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) /** @@ -338,7 +338,7 @@ export class Engine { [validators.bytes8], [validators.bytes8], ]), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) } @@ -360,7 +360,7 @@ export class Engine { * 3. validationError: String|null - validation error message */ private async newPayload( - params: [ExecutionPayload, (Bytes32[] | null)?, (Bytes32 | null)?] + params: [ExecutionPayload, (Bytes32[] | null)?, (Bytes32 | null)?], ): Promise { const [payload, blobVersionedHashes, parentBeaconBlockRoot] = params if (this.config.synchronized) { @@ -386,7 +386,7 @@ export class Engine { parentBeaconBlockRoot: parentBeaconBlockRoot ?? undefined, }, this.chain, - this.chainCache + this.chainCache, ) if (headBlock === undefined || error !== undefined) { let response = error @@ -396,7 +396,7 @@ export class Engine { const latestValidHash = await validHash( hexToBytes(parentHash as PrefixedHexString), this.chain, - this.chainCache + this.chainCache, ) response = { status: Status.INVALID, latestValidHash, validationError } } @@ -421,7 +421,7 @@ export class Engine { const latestValidHash = await validHash( hexToBytes(parentHash as PrefixedHexString), this.chain, - this.chainCache + this.chainCache, ) const response = { status: Status.INVALID, latestValidHash, validationError } // skip marking the block invalid as this is more of a data issue from CL @@ -432,7 +432,7 @@ export class Engine { const latestValidHash = await validHash( hexToBytes(parentHash as PrefixedHexString), this.chain, - this.chainCache + this.chainCache, ) const response = { status: Status.INVALID, latestValidHash, validationError } // skip marking the block invalid as this is more of a data issue from CL @@ -447,8 +447,8 @@ export class Engine { if (hardfork !== this.lastNewPayloadHF && this.lastNewPayloadHF !== '') { this.config.logger.info( `Hardfork change along new payload block number=${headBlock.header.number} hash=${short( - headBlock.hash() - )} old=${this.lastNewPayloadHF} new=${hardfork}` + headBlock.hash(), + )} old=${this.lastNewPayloadHF} new=${hardfork}`, ) } this.lastNewPayloadHF = hardfork @@ -474,7 +474,7 @@ export class Engine { } this.invalidBlocks.set( blockHash.slice(2), - new Error(response.validationError ?? 'Terminal block validation failed') + new Error(response.validationError ?? 'Terminal block validation failed'), ) return response } @@ -492,7 +492,7 @@ export class Engine { const latestValidHash = await validHash( hexToBytes(parentHash as PrefixedHexString), this.chain, - this.chainCache + this.chainCache, ) const response = { status: Status.INVALID, latestValidHash, validationError } // skip marking the block invalid as this is more of a data issue from CL @@ -546,17 +546,17 @@ export class Engine { // if the invalid block is canonical along the current chain return invalid const invalidBlock = await this.skeleton.getBlockByHash( this.execution.chainStatus.hash, - true + true, ) if (invalidBlock !== undefined) { // hard luck: block along canonical chain is invalid const latestValidHash = await validHash( invalidBlock.header.parentHash, this.chain, - this.chainCache + this.chainCache, ) const validationError = `Block number=${invalidBlock.header.number} hash=${short( - invalidBlock.hash() + invalidBlock.hash(), )} root=${short(invalidBlock.header.stateRoot)} along the canonical chain is invalid` const response = { @@ -633,10 +633,10 @@ export class Engine { const latestValidHash = await validHash( invalidBlock.header.parentHash, this.chain, - this.chainCache + this.chainCache, ) const validationError = `Block number=${invalidBlock.header.number} hash=${short( - invalidBlock.hash() + invalidBlock.hash(), )} root=${short(invalidBlock.header.stateRoot)} along the canonical chain is invalid` const response = { @@ -695,9 +695,9 @@ export class Engine { const blockParent = i > 0 ? blocks[i - 1] - : this.chainCache.remoteBlocks.get( - bytesToHex(block.header.parentHash).slice(2) - ) ?? (await this.chain.getBlock(block.header.parentHash)) + : (this.chainCache.remoteBlocks.get( + bytesToHex(block.header.parentHash).slice(2), + ) ?? (await this.chain.getBlock(block.header.parentHash))) const blockExecuted = await this.execution.runWithoutSetHead({ block, root: blockParent.header.stateRoot, @@ -711,12 +711,12 @@ export class Engine { if (!executed) { this.config.logger.debug( `Skipping block(s) execution for headBlock=${headBlock.header.number} hash=${short( - headBlock.hash() + headBlock.hash(), )} : pendingBlocks=${blocks.length - i}(limit=${ this.chain.config.engineNewpayloadMaxExecute }) transactions=${block.transactions.length}(limit=${ this.chain.config.engineNewpayloadMaxTxsExecute - }) executionBusy=${this.execution.running}` + }) executionBusy=${this.execution.running}`, ) // determind status to be returned depending on if block could extend chain or not const status = optimisticLookup === true ? Status.SYNCING : Status.ACCEPTED @@ -731,7 +731,7 @@ export class Engine { const latestValidHash = await validHash( headBlock.header.parentHash, this.chain, - this.chainCache + this.chainCache, ) const errorMsg = `${error}`.toLowerCase() @@ -922,7 +922,7 @@ export class Engine { * 3. headBlock: Block|undefined - Block corresponding to headBlockHash if found */ private async forkchoiceUpdated( - params: [forkchoiceState: ForkchoiceStateV1, payloadAttributes: PayloadAttributes | undefined] + params: [forkchoiceState: ForkchoiceStateV1, payloadAttributes: PayloadAttributes | undefined], ): Promise { const { headBlockHash, finalizedBlockHash, safeBlockHash } = params[0] const payloadAttributes = params[1] @@ -974,7 +974,7 @@ export class Engine { (await this.chain.getBlock(head)) } catch (error) { this.config.logger.debug( - `Forkchoice announced head block unknown to EL hash=${short(headBlockHash)}` + `Forkchoice announced head block unknown to EL hash=${short(headBlockHash)}`, ) const payloadStatus = { status: Status.SYNCING, @@ -993,7 +993,7 @@ export class Engine { this.config.logger.info( `Hardfork change along forkchoice head block update number=${ headBlock.header.number - } hash=${short(headBlock.hash())} old=${this.lastForkchoiceUpdatedHF} new=${hardfork}` + } hash=${short(headBlock.hash())} old=${this.lastForkchoiceUpdatedHF} new=${hardfork}`, ) } this.lastForkchoiceUpdatedHF = hardfork @@ -1002,8 +1002,8 @@ export class Engine { // requirements that might come later because of reorg or CL restarts this.config.logger.debug( `Forkchoice requested update to new head number=${headBlock.header.number} hash=${short( - headBlock.hash() - )}` + headBlock.hash(), + )}`, ) /** @@ -1064,17 +1064,17 @@ export class Engine { // see if the invalid block is canonical along the current skeleton/chain return invalid const invalidBlock = await this.skeleton.getBlockByHash( this.execution.chainStatus.hash, - true + true, ) if (invalidBlock !== undefined) { // hard luck: block along canonical chain is invalid const latestValidHash = await validHash( invalidBlock.header.parentHash, this.chain, - this.chainCache + this.chainCache, ) const validationError = `Block number=${invalidBlock.header.number} hash=${short( - invalidBlock.hash() + invalidBlock.hash(), )} root=${short(invalidBlock.header.stateRoot)} along the canonical chain is invalid` const payloadStatus = { @@ -1094,11 +1094,11 @@ export class Engine { ) { // jump the vm head to failing block so that next block can be executed this.config.logger.debug( - `Jumping the stalled vmHead forward to hash=${this.execution.chainStatus.hash} height=${this.execution.chainStatus.height} to continue the execution` + `Jumping the stalled vmHead forward to hash=${this.execution.chainStatus.hash} height=${this.execution.chainStatus.height} to continue the execution`, ) await this.execution.jumpVmHead( this.execution.chainStatus.hash, - this.execution.chainStatus.height + this.execution.chainStatus.height, ) } @@ -1129,7 +1129,7 @@ export class Engine { parentBlocks = await recursivelyFindParents( vmHeadHash, headBlock.header.parentHash, - this.chain + this.chain, ) } catch (error) { const payloadStatus = { @@ -1213,7 +1213,7 @@ export class Engine { coinbase: suggestedFeeRecipient, parentBeaconBlockRoot, }, - withdrawals + withdrawals, ) const latestValidHash = await validHash(headBlock.hash(), this.chain, this.chainCache) const payloadStatus = { status: Status.VALID, latestValidHash, validationError: null } @@ -1240,7 +1240,10 @@ export class Engine { * @returns */ private async forkchoiceUpdatedV1( - params: [forkchoiceState: ForkchoiceStateV1, payloadAttributes: PayloadAttributesV1 | undefined] + params: [ + forkchoiceState: ForkchoiceStateV1, + payloadAttributes: PayloadAttributesV1 | undefined, + ], ): Promise { const payloadAttributes = params[1] if (payloadAttributes !== undefined && payloadAttributes !== null) { @@ -1258,7 +1261,7 @@ export class Engine { 1, null, Hardfork.Paris, - BigInt(payloadAttributes.timestamp) + BigInt(payloadAttributes.timestamp), ) } @@ -1274,8 +1277,8 @@ export class Engine { private async forkchoiceUpdatedV2( params: [ forkchoiceState: ForkchoiceStateV1, - payloadAttributes: PayloadAttributesV1 | PayloadAttributesV2 | undefined - ] + payloadAttributes: PayloadAttributesV1 | PayloadAttributesV2 | undefined, + ], ): Promise { const payloadAttributes = params[1] if (payloadAttributes !== undefined && payloadAttributes !== null) { @@ -1294,7 +1297,7 @@ export class Engine { 2, null, Hardfork.Shanghai, - BigInt(payloadAttributes.timestamp) + BigInt(payloadAttributes.timestamp), ) const shanghaiTimestamp = this.chain.config.chainCommon.hardforkTimestamp(Hardfork.Shanghai) @@ -1335,7 +1338,10 @@ export class Engine { * @returns */ private async forkchoiceUpdatedV3( - params: [forkchoiceState: ForkchoiceStateV1, payloadAttributes: PayloadAttributesV3 | undefined] + params: [ + forkchoiceState: ForkchoiceStateV1, + payloadAttributes: PayloadAttributesV3 | undefined, + ], ): Promise { const payloadAttributes = params[1] if (payloadAttributes !== undefined && payloadAttributes !== null) { @@ -1355,7 +1361,7 @@ export class Engine { Hardfork.Cancun, // this could be valid post cancun as well, if not then update the valid till hf here null, - BigInt(payloadAttributes.timestamp) + BigInt(payloadAttributes.timestamp), ) } @@ -1432,7 +1438,7 @@ export class Engine { payloadVersion, checkNotBeforeHf, checkNotAfterHf, - BigInt(executionPayload.executionPayload.timestamp) + BigInt(executionPayload.executionPayload.timestamp), ) return executionPayload } catch (error: any) { @@ -1492,7 +1498,7 @@ export class Engine { * @returns Instance of {@link TransitionConfigurationV1} or an error */ async exchangeTransitionConfigurationV1( - params: [TransitionConfigurationV1] + params: [TransitionConfigurationV1], ): Promise { const { terminalTotalDifficulty, terminalBlockHash, terminalBlockNumber } = params[0] const ttd = this.chain.config.chainCommon.hardforkTTD(Hardfork.Paris) @@ -1506,7 +1512,7 @@ export class Engine { throw { code: INVALID_PARAMS, message: `terminalTotalDifficulty set to ${ttd}, received ${parseInt( - terminalTotalDifficulty + terminalTotalDifficulty, )}`, } } @@ -1535,7 +1541,7 @@ export class Engine { * @returns an array of ExecutionPayloadBodyV1 objects or null if a given execution payload isn't stored locally */ private async getPayloadBodiesByHashV1( - params: [[Bytes32]] + params: [[Bytes32]], ): Promise<(ExecutionPayloadBodyV1 | null)[]> { if (params[0].length > 32) { throw { @@ -1567,7 +1573,7 @@ export class Engine { * @returns an array of ExecutionPayloadBodyV1 objects or null if a given execution payload isn't stored locally */ private async getPayloadBodiesByRangeV1( - params: [Bytes8, Bytes8] + params: [Bytes8, Bytes8], ): Promise<(ExecutionPayloadBodyV1 | null)[]> { const start = BigInt(params[0]) let count = BigInt(params[1]) diff --git a/packages/client/src/rpc/modules/engine/util/generic.ts b/packages/client/src/rpc/modules/engine/util/generic.ts index f36d5805c8..618e06434d 100644 --- a/packages/client/src/rpc/modules/engine/util/generic.ts +++ b/packages/client/src/rpc/modules/engine/util/generic.ts @@ -15,7 +15,7 @@ import type { PrefixedHexString } from '@ethereumjs/util' export const recursivelyFindParents = async ( vmHeadHash: Uint8Array, parentHash: Uint8Array, - chain: Chain + chain: Chain, ) => { if (equalsBytes(parentHash, vmHeadHash) || equalsBytes(parentHash, new Uint8Array(32))) { return [] @@ -28,7 +28,7 @@ export const recursivelyFindParents = async ( while (!equalsBytes(parentBlocks[parentBlocks.length - 1].hash(), vmHeadHash)) { const block: Block = await chain.getBlock( - parentBlocks[parentBlocks.length - 1].header.parentHash + parentBlocks[parentBlocks.length - 1].header.parentHash, ) parentBlocks.push(block) @@ -50,7 +50,7 @@ export const recursivelyFindParents = async ( */ export const validExecutedChainBlock = async ( blockOrHash: Uint8Array | Block, - chain: Chain + chain: Chain, ): Promise => { try { const block = blockOrHash instanceof Block ? blockOrHash : await chain.getBlock(blockOrHash) @@ -77,7 +77,7 @@ export const validExecutedChainBlock = async ( export const validHash = async ( hash: Uint8Array, chain: Chain, - chainCache: ChainCache + chainCache: ChainCache, ): Promise => { const { remoteBlocks, executedBlocks, invalidBlocks, skeleton } = chainCache const maxDepth = chain.config.engineParentLookupMaxDepth @@ -136,7 +136,7 @@ export function validateHardforkRange( methodVersion: number, checkNotBeforeHf: Hardfork | null, checkNotAfterHf: Hardfork | null, - timestamp: bigint + timestamp: bigint, ) { if (checkNotBeforeHf !== null) { const hfTimeStamp = chainCommon.hardforkTimestamp(checkNotBeforeHf) diff --git a/packages/client/src/rpc/modules/engine/util/newPayload.ts b/packages/client/src/rpc/modules/engine/util/newPayload.ts index f566fb2361..efe07b16c4 100644 --- a/packages/client/src/rpc/modules/engine/util/newPayload.ts +++ b/packages/client/src/rpc/modules/engine/util/newPayload.ts @@ -20,7 +20,7 @@ import type { PrefixedHexString } from '@ethereumjs/util' export const assembleBlock = async ( payload: ExecutionPayload, chain: Chain, - chainCache: ChainCache + chainCache: ChainCache, ): Promise<{ block?: Block; error?: PayloadStatusV1 }> => { const { blockNumber, timestamp } = payload const { config } = chain @@ -44,7 +44,7 @@ export const assembleBlock = async ( const latestValidHash = await validHash( hexToBytes(payload.parentHash as PrefixedHexString), chain, - chainCache + chainCache, ) const response = { status: `${error}`.includes('Invalid blockHash') ? Status.INVALID_BLOCK_HASH : Status.INVALID, @@ -57,7 +57,7 @@ export const assembleBlock = async ( export const validate4844BlobVersionedHashes = ( headBlock: Block, - blobVersionedHashes: PrefixedHexString[] + blobVersionedHashes: PrefixedHexString[], ): string | null => { let validationError: string | null = null @@ -79,7 +79,7 @@ export const validate4844BlobVersionedHashes = ( // if mismatch, record error and break if (!equalsBytes(hexToBytes(blobVersionedHashes[vIndex]), txVersionedHashes[vIndex])) { validationError = `Error verifying blobVersionedHashes: mismatch at index=${vIndex} expected=${short( - txVersionedHashes[vIndex] + txVersionedHashes[vIndex], )} received=${short(blobVersionedHashes[vIndex])}` break } diff --git a/packages/client/src/rpc/modules/eth.ts b/packages/client/src/rpc/modules/eth.ts index 9afdf2c69b..afb2a8ca51 100644 --- a/packages/client/src/rpc/modules/eth.ts +++ b/packages/client/src/rpc/modules/eth.ts @@ -108,12 +108,12 @@ type JsonRpcLog = { const jsonRpcBlock = async ( block: Block, chain: Chain, - includeTransactions: boolean + includeTransactions: boolean, ): Promise => { const json = block.toJSON() const header = json!.header! const transactions = block.transactions.map((tx, txIndex) => - includeTransactions ? jsonRpcTx(tx, block, txIndex) : bytesToHex(tx.hash()) + includeTransactions ? jsonRpcTx(tx, block, txIndex) : bytesToHex(tx.hash()), ) const withdrawalsAttr = header.withdrawalsRoot !== undefined @@ -162,7 +162,7 @@ const jsonRpcLog = async ( block?: Block, tx?: TypedTransaction, txIndex?: number, - logIndex?: number + logIndex?: number, ): Promise => ({ removed: false, // TODO implement logIndex: logIndex !== undefined ? intToHex(logIndex) : null, @@ -188,7 +188,7 @@ const jsonRpcReceipt = async ( logIndex: number, contractAddress?: Address, blobGasUsed?: bigint, - blobGasPrice?: bigint + blobGasPrice?: bigint, ): Promise => ({ transactionHash: bytesToHex(tx.hash()), transactionIndex: intToHex(txIndex), @@ -201,7 +201,7 @@ const jsonRpcReceipt = async ( gasUsed: bigIntToHex(gasUsed), contractAddress: contractAddress?.toString() ?? null, logs: await Promise.all( - receipt.logs.map((l, i) => jsonRpcLog(l, block, tx, txIndex, logIndex + i)) + receipt.logs.map((l, i) => jsonRpcLog(l, block, tx, txIndex, logIndex + i)), ), logsBloom: bytesToHex(receipt.bitvector), root: @@ -220,7 +220,7 @@ const jsonRpcReceipt = async ( const calculateRewards = async ( block: Block, receiptsManager: ReceiptsManager, - priorityFeePercentiles: number[] + priorityFeePercentiles: number[], ) => { if (priorityFeePercentiles.length === 0) { return [] @@ -315,7 +315,7 @@ export class Eth { this.blockNumber = middleware( callWithStackTrace(this.blockNumber.bind(this), this._rpcDebug), - 0 + 0, ) this.call = middleware(callWithStackTrace(this.call.bind(this), this._rpcDebug), 2, [ @@ -328,13 +328,13 @@ export class Eth { this.estimateGas = middleware( callWithStackTrace(this.estimateGas.bind(this), this._rpcDebug), 1, - [[validators.transaction()], [validators.blockOption]] + [[validators.transaction()], [validators.blockOption]], ) this.getBalance = middleware( callWithStackTrace(this.getBalance.bind(this), this._rpcDebug), 2, - [[validators.address], [validators.blockOption]] + [[validators.address], [validators.blockOption]], ) this.coinbase = middleware(callWithStackTrace(this.coinbase.bind(this), this._rpcDebug), 0, []) @@ -342,19 +342,19 @@ export class Eth { this.getBlockByNumber = middleware( callWithStackTrace(this.getBlockByNumber.bind(this), this._rpcDebug), 2, - [[validators.blockOption], [validators.bool]] + [[validators.blockOption], [validators.bool]], ) this.getBlockByHash = middleware( callWithStackTrace(this.getBlockByHash.bind(this), this._rpcDebug), 2, - [[validators.hex, validators.blockHash], [validators.bool]] + [[validators.hex, validators.blockHash], [validators.bool]], ) this.getBlockTransactionCountByHash = middleware( callWithStackTrace(this.getBlockTransactionCountByHash.bind(this), this._rpcDebug), 1, - [[validators.hex, validators.blockHash]] + [[validators.hex, validators.blockHash]], ) this.getCode = middleware(callWithStackTrace(this.getCode.bind(this), this._rpcDebug), 2, [ @@ -365,54 +365,54 @@ export class Eth { this.getUncleCountByBlockNumber = middleware( callWithStackTrace(this.getUncleCountByBlockNumber.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) this.getStorageAt = middleware( callWithStackTrace(this.getStorageAt.bind(this), this._rpcDebug), 3, - [[validators.address], [validators.hex], [validators.blockOption]] + [[validators.address], [validators.hex], [validators.blockOption]], ) this.getTransactionByBlockHashAndIndex = middleware( callWithStackTrace(this.getTransactionByBlockHashAndIndex.bind(this), this._rpcDebug), 2, - [[validators.hex, validators.blockHash], [validators.hex]] + [[validators.hex, validators.blockHash], [validators.hex]], ) this.getTransactionByBlockNumberAndIndex = middleware( callWithStackTrace(this.getTransactionByBlockNumberAndIndex.bind(this), this._rpcDebug), 2, - [[validators.hex, validators.blockOption], [validators.hex]] + [[validators.hex, validators.blockOption], [validators.hex]], ) this.getTransactionByHash = middleware( callWithStackTrace(this.getTransactionByHash.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) this.getTransactionCount = middleware( callWithStackTrace(this.getTransactionCount.bind(this), this._rpcDebug), 2, - [[validators.address], [validators.blockOption]] + [[validators.address], [validators.blockOption]], ) this.getBlockReceipts = middleware( callWithStackTrace(this.getBlockReceipts.bind(this), this._rpcDebug), 1, - [[validators.blockOption]] + [[validators.blockOption]], ) this.getTransactionReceipt = middleware( callWithStackTrace(this.getTransactionReceipt.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) this.getUncleCountByBlockNumber = middleware( callWithStackTrace(this.getUncleCountByBlockNumber.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) this.getLogs = middleware(callWithStackTrace(this.getLogs.bind(this), this._rpcDebug), 1, [ @@ -421,14 +421,14 @@ export class Eth { fromBlock: validators.optional(validators.blockOption), toBlock: validators.optional(validators.blockOption), address: validators.optional( - validators.either(validators.array(validators.address), validators.address) + validators.either(validators.array(validators.address), validators.address), ), topics: validators.optional( validators.array( validators.optional( - validators.either(validators.hex, validators.array(validators.hex)) - ) - ) + validators.either(validators.hex, validators.array(validators.hex)), + ), + ), ), blockHash: validators.optional(validators.blockHash), }), @@ -438,13 +438,13 @@ export class Eth { this.sendRawTransaction = middleware( callWithStackTrace(this.sendRawTransaction.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) this.protocolVersion = middleware( callWithStackTrace(this.protocolVersion.bind(this), this._rpcDebug), 0, - [] + [], ) this.syncing = middleware(callWithStackTrace(this.syncing.bind(this), this._rpcDebug), 0, []) @@ -458,7 +458,7 @@ export class Eth { this.getBlockTransactionCountByNumber = middleware( callWithStackTrace(this.getBlockTransactionCountByNumber.bind(this), this._rpcDebug), 1, - [[validators.blockOption]] + [[validators.blockOption]], ) this.gasPrice = middleware(callWithStackTrace(this.gasPrice.bind(this), this._rpcDebug), 0, []) @@ -470,13 +470,13 @@ export class Eth { [validators.either(validators.hex, validators.integer)], [validators.either(validators.hex, validators.blockOption)], [validators.rewardPercentiles], - ] + ], ) this.blobBaseFee = middleware( callWithStackTrace(this.blobBaseFee.bind(this), this._rpcDebug), 0, - [] + [], ) } @@ -596,7 +596,7 @@ export class Eth { : undefined, }, }, - { common: vm.common, setHardfork: true } + { common: vm.common, setHardfork: true }, ) vm.common.setHardforkBy({ @@ -886,7 +886,7 @@ export class Eth { // Add pending txns to nonce if blockOpt is 'pending' if (blockOpt === 'pending') { pendingTxsCount = BigInt( - (this.service as FullEthereumService).txPool.pool.get(addressHex.slice(2))?.length ?? 0 + (this.service as FullEthereumService).txPool.pool.get(addressHex.slice(2))?.length ?? 0, ) } return bigIntToHex(account.nonce + pendingTxsCount) @@ -973,9 +973,9 @@ export class Eth { i, createdAddress, blobGasUsed, - blobGasPrice + blobGasPrice, ) - }) + }), ) return receipts } @@ -1035,7 +1035,7 @@ export class Eth { logIndex, createdAddress, blobGasUsed, - blobGasPrice + blobGasPrice, ) } @@ -1124,8 +1124,8 @@ export class Eth { const logs = await this.receiptsManager.getLogs(from, to, addressBytes, formattedTopics) return Promise.all( logs.map(({ log, block, tx, txIndex, logIndex }) => - jsonRpcLog(log, block, tx, txIndex, logIndex) - ) + jsonRpcLog(log, block, tx, txIndex, logIndex), + ), ) } @@ -1171,7 +1171,7 @@ export class Eth { throw Error( `tx blobs=${(tx.blobs ?? []).length} exceeds block limit=${ blobGasLimit / blobGasPerBlob - }` + }`, ) } } else { @@ -1229,7 +1229,7 @@ export class Eth { * @returns The {@link Proof} */ async getProof( - params: [PrefixedHexString, PrefixedHexString[], PrefixedHexString] + params: [PrefixedHexString, PrefixedHexString[], PrefixedHexString], ): Promise { const [addressHex, slotsHex, blockOpt] = params const block = await getBlockByOption(blockOpt, this._chain) @@ -1386,11 +1386,11 @@ export class Eth { const requestedBlockNumbers = Array.from( { length: Number(blockCount) }, - (_, i) => oldestBlockNumber + BigInt(i) + (_, i) => oldestBlockNumber + BigInt(i), ) const requestedBlocks = await Promise.all( - requestedBlockNumbers.map((n) => getBlockByOption(n.toString(), this._chain)) + requestedBlockNumbers.map((n) => getBlockByOption(n.toString(), this._chain)), ) const [baseFees, gasUsedRatios, baseFeePerBlobGas, blobGasUsedRatio] = requestedBlocks.reduce( @@ -1414,7 +1414,7 @@ export class Eth { return [prevBaseFees, prevGasUsedRatios, prevBaseFeesPerBlobGas, prevBlobGasUsedRatio] }, - [[], [], [], []] as [bigint[], number[], bigint[], number[]] + [[], [], [], []] as [bigint[], number[], bigint[], number[]], ) const londonHardforkBlockNumber = this._chain.blockchain.common.hardforkBlock(Hardfork.London)! @@ -1426,7 +1426,7 @@ export class Eth { if (this._chain.blockchain.common.isActivatedEIP(4844)) { baseFeePerBlobGas.push( - requestedBlocks[requestedBlocks.length - 1].header.calcNextBlobGasPrice() + requestedBlocks[requestedBlocks.length - 1].header.calcNextBlobGasPrice(), ) } else { // TODO (?): known bug @@ -1440,8 +1440,8 @@ export class Eth { if (this.receiptsManager && priorityFeePercentiles) { rewards = await Promise.all( requestedBlocks.map((b) => - calculateRewards(b, this.receiptsManager!, priorityFeePercentiles) - ) + calculateRewards(b, this.receiptsManager!, priorityFeePercentiles), + ), ) } diff --git a/packages/client/src/rpc/modules/net.ts b/packages/client/src/rpc/modules/net.ts index 54639921af..f53c10ca5f 100644 --- a/packages/client/src/rpc/modules/net.ts +++ b/packages/client/src/rpc/modules/net.ts @@ -33,12 +33,12 @@ export class Net { this.listening = middleware( callWithStackTrace(this.listening.bind(this), this._rpcDebug), 0, - [] + [], ) this.peerCount = middleware( callWithStackTrace(this.peerCount.bind(this), this._rpcDebug), 0, - [] + [], ) } diff --git a/packages/client/src/rpc/validation.ts b/packages/client/src/rpc/validation.ts index af2e2f30fe..4b150cdc85 100644 --- a/packages/client/src/rpc/validation.ts +++ b/packages/client/src/rpc/validation.ts @@ -11,7 +11,7 @@ export function middleware( method: any, requiredParamsCount: number, validators: any[] = [], - names: string[] = [] + names: string[] = [], ): any { return function (params: any[] = []) { return new Promise((resolve, reject) => { @@ -430,7 +430,13 @@ export const validators = { get depositRequest() { return ( - requiredFields: string[] = ['pubkey', 'withdrawalCredentials', 'amount', 'signature', 'index'] + requiredFields: string[] = [ + 'pubkey', + 'withdrawalCredentials', + 'amount', + 'signature', + 'index', + ], ) => { return (params: any[], index: number) => { if (typeof params[index] !== 'object') { diff --git a/packages/client/src/service/fullethereumservice.ts b/packages/client/src/service/fullethereumservice.ts index 94268bd9d4..684398ee3a 100644 --- a/packages/client/src/service/fullethereumservice.ts +++ b/packages/client/src/service/fullethereumservice.ts @@ -159,7 +159,7 @@ export class FullEthereumService extends Service { this.synchronizer instanceof BeaconSynchronizer ? 'BeaconSynchronizer' : 'FullSynchronizer' - }.` + }.`, ) } else { this.config.logger.info('Starting FullEthereumService with no syncing.') @@ -197,13 +197,13 @@ export class FullEthereumService extends Service { throw Error(`Currently stateful verkle execution not supported`) } this.execution.config.logger.info( - `Skipping VM verkle statemanager genesis hardfork=${this.execution.hardfork}` + `Skipping VM verkle statemanager genesis hardfork=${this.execution.hardfork}`, ) await this.execution.setupVerkleVM() this.execution.vm = this.execution.verkleVM! } else { this.execution.config.logger.info( - `Initializing VM merkle statemanager genesis hardfork=${this.execution.hardfork}` + `Initializing VM merkle statemanager genesis hardfork=${this.execution.hardfork}`, ) await this.execution.setupMerkleVM() this.execution.vm = this.execution.merkleVM! @@ -262,12 +262,12 @@ export class FullEthereumService extends Service { } } else { this.config.logger.debug( - `skipping snapsync since cl (skeleton) synchronized=${this.skeleton?.synchronized}` + `skipping snapsync since cl (skeleton) synchronized=${this.skeleton?.synchronized}`, ) } } else { this.config.logger.warn( - 'skipping building head state as neither execution is started nor snapsync is available' + 'skipping building head state as neither execution is started nor snapsync is available', ) } } catch (error) { @@ -329,7 +329,7 @@ export class FullEthereumService extends Service { chain: this.chain, flow: this.flow, timeout: this.timeout, - }) + }), ) } return protocols @@ -379,7 +379,7 @@ export class FullEthereumService extends Service { case 'GetBlockBodies': { const { reqId, hashes } = message.data const blocks: Block[] = await Promise.all( - hashes.map((hash: Uint8Array) => this.chain.getBlock(hash)) + hashes.map((hash: Uint8Array) => this.chain.getBlock(hash)), ) const bodies = blocks.map((block) => block.raw().slice(1)) peer.eth!.send('BlockBodies', { reqId, bodies }) @@ -388,7 +388,7 @@ export class FullEthereumService extends Service { case 'NewBlockHashes': { if (this.config.chainCommon.gteHardfork(Hardfork.Paris)) { this.config.logger.debug( - `Dropping peer ${peer.id} for sending NewBlockHashes after merge (EIP-3675)` + `Dropping peer ${peer.id} for sending NewBlockHashes after merge (EIP-3675)`, ) this.pool.ban(peer, 9000000) } else if (this.synchronizer instanceof FullSynchronizer) { @@ -403,7 +403,7 @@ export class FullEthereumService extends Service { case 'NewBlock': { if (this.config.chainCommon.gteHardfork(Hardfork.Paris)) { this.config.logger.debug( - `Dropping peer ${peer.id} for sending NewBlock after merge (EIP-3675)` + `Dropping peer ${peer.id} for sending NewBlock after merge (EIP-3675)`, ) this.pool.ban(peer, 9000000) } else if (this.synchronizer instanceof FullSynchronizer) { diff --git a/packages/client/src/service/service.ts b/packages/client/src/service/service.ts index aa1a9da8d3..5b9aee525a 100644 --- a/packages/client/src/service/service.ts +++ b/packages/client/src/service/service.ts @@ -86,7 +86,7 @@ export class Service { await this.handle(message, protocol, peer) } catch (error: any) { this.config.logger.debug( - `Error handling message (${protocol}:${message.name}): ${error.message}` + `Error handling message (${protocol}:${message.name}): ${error.message}`, ) } } @@ -126,13 +126,13 @@ export class Service { this.config.server && this.config.server.addProtocols(protocols) this.config.events.on(Event.POOL_PEER_BANNED, (peer) => - this.config.logger.debug(`Peer banned: ${peer}`) + this.config.logger.debug(`Peer banned: ${peer}`), ) this.config.events.on(Event.POOL_PEER_ADDED, (peer) => - this.config.logger.debug(`Peer added: ${peer}`) + this.config.logger.debug(`Peer added: ${peer}`), ) this.config.events.on(Event.POOL_PEER_REMOVED, (peer) => - this.config.logger.debug(`Peer removed: ${peer}`) + this.config.logger.debug(`Peer removed: ${peer}`), ) await this.pool.open() @@ -168,7 +168,7 @@ export class Service { this._statsInterval = setInterval( // eslint-disable-next-line @typescript-eslint/await-thenable await this.stats.bind(this), - this.STATS_INTERVAL + this.STATS_INTERVAL, ) this.running = true this.config.logger.info(`Started ${this.name} service.`) diff --git a/packages/client/src/service/skeleton.ts b/packages/client/src/service/skeleton.ts index e31815d85c..346dfe407e 100644 --- a/packages/client/src/service/skeleton.ts +++ b/packages/client/src/service/skeleton.ts @@ -216,7 +216,7 @@ export class Skeleton extends MetaDBManager { this.config.logger.debug( `Canonical subchain linked with main, removing junked chains ${junkedSubChains .map((s) => `[tail=${s.tail} head=${s.head} next=${short(s.next)}]`) - .join(',')}` + .join(',')}`, ) await this.writeSyncStatus() } @@ -262,7 +262,7 @@ export class Skeleton extends MetaDBManager { } lastchain.head = headBlock.header.number this.config.logger.debug( - `lastchain head fast forwarded from=${head} to=${lastchain.head} tail=${lastchain.tail}` + `lastchain head fast forwarded from=${head} to=${lastchain.head} tail=${lastchain.tail}`, ) } @@ -282,8 +282,8 @@ export class Skeleton extends MetaDBManager { if (!equalsBytes(this.chain.genesis.hash(), head.hash())) { throw Error( `Invalid genesis setHead announcement number=${number} hash=${short( - head.hash() - )} genesisHash=${short(this.chain.genesis.hash())}` + head.hash(), + )} genesisHash=${short(this.chain.genesis.hash())}`, ) } // genesis announcement @@ -302,14 +302,14 @@ export class Skeleton extends MetaDBManager { // Not a noop / double head announce, abort with a reorg if (force) { this.config.logger.warn( - `Skeleton setHead before tail, resetting skeleton tail=${lastchain.tail} head=${lastchain.head} newHead=${number}` + `Skeleton setHead before tail, resetting skeleton tail=${lastchain.tail} head=${lastchain.head} newHead=${number}`, ) lastchain.head = number lastchain.tail = number lastchain.next = head.header.parentHash } else { this.config.logger.debug( - `Skeleton announcement before tail, will reset skeleton tail=${lastchain.tail} head=${lastchain.head} newHead=${number}` + `Skeleton announcement before tail, will reset skeleton tail=${lastchain.tail} head=${lastchain.head} newHead=${number}`, ) } return true @@ -321,7 +321,7 @@ export class Skeleton extends MetaDBManager { this.config.logger.debug( `Skeleton duplicate ${force ? 'setHead' : 'announcement'} tail=${lastchain.tail} head=${ lastchain.head - } number=${number} hash=${short(head.hash())}` + } number=${number} hash=${short(head.hash())}`, ) return false } else { @@ -332,12 +332,12 @@ export class Skeleton extends MetaDBManager { `Skeleton head reorg tail=${lastchain.tail} head=${ lastchain.head } number=${number} expected=${short( - mayBeDupBlock?.hash() ?? zeroBlockHash - )} actual=${short(head.hash())}` + mayBeDupBlock?.hash() ?? zeroBlockHash, + )} actual=${short(head.hash())}`, ) } else { this.config.logger.debug( - `Skeleton differing announcement tail=${lastchain.tail} head=${lastchain.head} number=${number}` + `Skeleton differing announcement tail=${lastchain.tail} head=${lastchain.head} number=${number}`, ) } return true @@ -348,13 +348,13 @@ export class Skeleton extends MetaDBManager { // If its still less than number then its gapped head if (lastchain.head + BIGINT_1 < number) { this.config.logger.debug( - `Beacon chain gapped setHead head=${lastchain.head} newHead=${number}` + `Beacon chain gapped setHead head=${lastchain.head} newHead=${number}`, ) return true } } else { this.config.logger.debug( - `Beacon chain gapped announcement head=${lastchain.head} newHead=${number}` + `Beacon chain gapped announcement head=${lastchain.head} newHead=${number}`, ) return true } @@ -364,8 +364,8 @@ export class Skeleton extends MetaDBManager { if (force) { this.config.logger.warn( `Beacon chain forked ancestor=${parent?.header.number} hash=${short( - parent?.hash() ?? 'NA' - )} want=${short(head.header.parentHash)}` + parent?.hash() ?? 'NA', + )} want=${short(head.header.parentHash)}`, ) } return true @@ -380,8 +380,8 @@ export class Skeleton extends MetaDBManager { } this.config.logger.debug( `Beacon chain extended new head=${lastchain.head} tail=${lastchain.tail} next=${short( - lastchain.next - )}` + lastchain.next, + )}`, ) } return false @@ -415,8 +415,8 @@ export class Skeleton extends MetaDBManager { this.config.logger.debug( `New skeleton head announced number=${head.header.number} hash=${short( - head.hash() - )} force=${force}` + head.hash(), + )} force=${force}`, ) let [lastchain] = this.status.progress.subchains @@ -432,7 +432,7 @@ export class Skeleton extends MetaDBManager { this.config.logger.debug( `Initing empty skeleton with current chain head tail=${lastchain.tail} head=${ lastchain.head - } next=${short(lastchain.next)}` + } next=${short(lastchain.next)}`, ) this.status.progress.subchains.push(lastchain) } @@ -473,12 +473,12 @@ export class Skeleton extends MetaDBManager { this.config.logger.info( `Truncated subchain0 with head=${subchain.head} to a new tail=${ subchain.tail - } next=${short(subchain.next)} before overlaying a new subchain` + } next=${short(subchain.next)} before overlaying a new subchain`, ) } else { // clear out this subchain this.config.logger.info( - `Dropping subchain0 with head=${subchain.head} before overlaying a new subchain as trucateTailToNumber=${trucateTailToNumber} block not available ` + `Dropping subchain0 with head=${subchain.head} before overlaying a new subchain as trucateTailToNumber=${trucateTailToNumber} block not available `, ) this.status.progress.subchains.splice(0, 1) } @@ -531,7 +531,7 @@ export class Skeleton extends MetaDBManager { subchain.tail } next=${short(subchain.next)} linked=${this.status.linked} canonicalHeadReset=${ this.status.canonicalHeadReset - }` + }`, ) } else { subchain.tail = trucateTailTo.header.number @@ -542,7 +542,7 @@ export class Skeleton extends MetaDBManager { subchain.tail } next=${short(subchain.next)} linked=${this.status.linked} canonicalHeadReset=${ this.status.canonicalHeadReset - }` + }`, ) } } @@ -617,7 +617,7 @@ export class Skeleton extends MetaDBManager { this.synchronized = true // Log to console the sync status this.config.superMsg( - `Synchronized cl (skeleton) at height=${height} hash=${short(latest.hash())} 🎉` + `Synchronized cl (skeleton) at height=${height} hash=${short(latest.hash())} 🎉`, ) } } @@ -629,8 +629,8 @@ export class Skeleton extends MetaDBManager { this.synchronized = false this.config.logger.info( `Cl (skeleton) sync status reset (no chain updates for ${Math.round( - diff / 1000 - )} seconds).` + diff / 1000, + )} seconds).`, ) } } @@ -642,7 +642,7 @@ export class Skeleton extends MetaDBManager { latest !== null && latest !== undefined ? ' height=' + latest.number : '' } syncTargetHeight=${this.config.syncTargetHeight} lastSyncDate=${ (Date.now() - this.lastSyncDate) / 1000 - } secs ago` + } secs ago`, ) this.lastsyncronized = this.synchronized } @@ -653,7 +653,7 @@ export class Skeleton extends MetaDBManager { { safeBlockHash, finalizedBlockHash, - }: { safeBlockHash?: Uint8Array; finalizedBlockHash?: Uint8Array } = {} + }: { safeBlockHash?: Uint8Array; finalizedBlockHash?: Uint8Array } = {}, ): Promise<{ reorged: boolean; safeBlock?: Block; finalizedBlock?: Block }> { // setHead locks independently and between setHead unlocking and locking below there should // be no injected code as each of the async ops take the lock. so once setHead takes the @@ -665,7 +665,7 @@ export class Skeleton extends MetaDBManager { await this.blockingTailBackfillWithCutoff(this.chain.config.engineParentLookupMaxDepth).catch( (e) => { this.config.logger.debug(`blockingTailBackfillWithCutoff exited with error=${e}`) - } + }, ) } @@ -816,7 +816,7 @@ export class Skeleton extends MetaDBManager { return this.runWithLock(async () => { // check if the synced state's block is canonical and <= current safe and chain has synced till const syncedBlock = await this.getBlock( - syncedHeight + syncedHeight, // need to debug why this flag causes to return undefined when chain gets synced //, true ) @@ -883,7 +883,7 @@ export class Skeleton extends MetaDBManager { if (tail >= this.status.progress.subchains[0].tail) { // Fully overwritten, get rid of the subchain as a whole this.config.logger.debug( - `Previous subchain fully overwritten tail=${tail} head=${head} next=${short(next)}` + `Previous subchain fully overwritten tail=${tail} head=${head} next=${short(next)}`, ) this.status.progress.subchains.splice(1, 1) edited = true @@ -893,8 +893,8 @@ export class Skeleton extends MetaDBManager { this.status.progress.subchains[1].head = this.status.progress.subchains[0].tail - BIGINT_1 this.config.logger.debug( `Previous subchain partially overwritten tail=${tail} head=${head} next=${short( - next - )} with newHead=${this.status.progress.subchains[1].head}` + next, + )} with newHead=${this.status.progress.subchains[1].head}`, ) edited = true } @@ -913,7 +913,7 @@ export class Skeleton extends MetaDBManager { // if subChain1Head is not in the skeleton then all previous subchains are not useful // and better to junk this.config.logger.debug( - `Removing all previous subchains as skeleton missing block at previous subchain head=${this.status.progress.subchains[1].head} or its tail=${this.status.progress.subchains[1].tail}` + `Removing all previous subchains as skeleton missing block at previous subchain head=${this.status.progress.subchains[1].head} or its tail=${this.status.progress.subchains[1].tail}`, ) this.status.progress.subchains.splice(1, this.status.progress.subchains.length - 1) } else if ( @@ -923,7 +923,7 @@ export class Skeleton extends MetaDBManager { // to disruption of the block fetcher to start a fresh if (head - tail > this.config.skeletonSubchainMergeMinimum) { this.config.logger.debug( - `Previous subchain merged tail=${tail} head=${head} next=${short(next)}` + `Previous subchain merged tail=${tail} head=${head} next=${short(next)}`, ) this.status.progress.subchains[0].tail = tail this.status.progress.subchains[0].next = next @@ -933,7 +933,7 @@ export class Skeleton extends MetaDBManager { merged = true } else { this.config.logger.debug( - `Subchain ignored for merge tail=${tail} head=${head} count=${head - tail}` + `Subchain ignored for merge tail=${tail} head=${head} count=${head - tail}`, ) this.status.progress.subchains.splice(1, 1) } @@ -962,12 +962,12 @@ export class Skeleton extends MetaDBManager { let tailUpdated = false this.config.logger.debug( `Skeleton putBlocks start=${blocks[0]?.header.number} hash=${short( - blocks[0]?.hash() + blocks[0]?.hash(), )} fork=${blocks[0].common.hardfork()} end=${ blocks[blocks.length - 1]?.header.number } count=${blocks.length}, subchain head=${this.status.progress.subchains[0]?.head} tail = ${ this.status.progress.subchains[0].tail - } next=${short(this.status.progress.subchains[0]?.next)}` + } next=${short(this.status.progress.subchains[0]?.next)}`, ) for (const block of blocks) { const { number } = block.header @@ -979,8 +979,8 @@ export class Skeleton extends MetaDBManager { if (!equalsBytes(this.chain.genesis.hash(), block.hash())) { throw Error( `Skeleton pubBlocks with invalid genesis block number=${number} hash=${short( - block.hash() - )} genesisHash=${short(this.chain.genesis.hash())}` + block.hash(), + )} genesisHash=${short(this.chain.genesis.hash())}`, ) } continue @@ -1003,12 +1003,12 @@ export class Skeleton extends MetaDBManager { `Blocks don't extend canonical subchain tail=${ this.status.progress.subchains[0].tail } head=${this.status.progress.subchains[0].head} next=${short( - this.status.progress.subchains[0].next + this.status.progress.subchains[0].next, )} tailHash=${short( - tailBlock?.hash() ?? zeroBlockHash + tailBlock?.hash() ?? zeroBlockHash, )} tailFork=${tailBlock?.common.hardfork()}, block number=${number} tailparent=${short( - tailBlock?.header.parentHash ?? zeroBlockHash - )} hash=${short(block.hash())} fork=${block.common.hardfork()}` + tailBlock?.header.parentHash ?? zeroBlockHash, + )} hash=${short(block.hash())} fork=${block.common.hardfork()}`, ) throw Error(`Blocks don't extend canonical subchain`) } @@ -1035,7 +1035,7 @@ export class Skeleton extends MetaDBManager { // If the sync is finished, start filling the canonical chain. if (this.status.linked) { this.config.superMsg( - `Backfilling subchain completed, filling canonical chain=${!skipForwardFill}` + `Backfilling subchain completed, filling canonical chain=${!skipForwardFill}`, ) if (!skipForwardFill) { void this.fillCanonicalChain() @@ -1075,7 +1075,7 @@ export class Skeleton extends MetaDBManager { this.status.progress.subchains = [] await this.writeSyncStatus() this.config.logger.warn( - `Couldn't backStep subchain 0, dropping subchains for new head signal` + `Couldn't backStep subchain 0, dropping subchains for new head signal`, ) return null } @@ -1168,7 +1168,7 @@ export class Skeleton extends MetaDBManager { if (this.status.canonicalHeadReset) { if (subchain.tail > canonicalHead + BIGINT_1) { throw Error( - `Canonical head should already be on or ahead subchain tail canonicalHead=${canonicalHead} tail=${subchain.tail}` + `Canonical head should already be on or ahead subchain tail canonicalHead=${canonicalHead} tail=${subchain.tail}`, ) } let newHead = subchain.tail - BIGINT_1 @@ -1178,7 +1178,7 @@ export class Skeleton extends MetaDBManager { if (canonicalHead > BIGINT_0) { this.config.logger.debug( - `Resetting canonicalHead for fillCanonicalChain from=${canonicalHead} to=${newHead}` + `Resetting canonicalHead for fillCanonicalChain from=${canonicalHead} to=${newHead}`, ) canonicalHead = newHead await this.chain.resetCanonicalHead(canonicalHead) @@ -1192,7 +1192,7 @@ export class Skeleton extends MetaDBManager { const start = canonicalHead // This subchain is a reference to update the tail for the very subchain we are filling the data for this.config.logger.debug( - `Starting canonical chain fill canonicalHead=${canonicalHead} subchainHead=${subchain.head}` + `Starting canonical chain fill canonicalHead=${canonicalHead} subchainHead=${subchain.head}`, ) // run till it has not been determined that tail reset is required by concurrent setHead calls @@ -1210,7 +1210,7 @@ export class Skeleton extends MetaDBManager { // Else we should back step and fetch again as it indicates some concurrency/db errors if (!this.status.canonicalHeadReset) { this.config.logger.debug( - `fillCanonicalChain block number=${number} not found, backStepping...` + `fillCanonicalChain block number=${number} not found, backStepping...`, ) await this.runWithLock(async () => { // backstep the subchain from the block that was not found only if the canonicalHeadReset @@ -1219,7 +1219,7 @@ export class Skeleton extends MetaDBManager { }) } else { this.config.logger.debug( - `fillCanonicalChain block number=${number} not found canonicalHeadReset=${this.status.canonicalHeadReset}, breaking out...` + `fillCanonicalChain block number=${number} not found canonicalHeadReset=${this.status.canonicalHeadReset}, breaking out...`, ) } break @@ -1251,12 +1251,12 @@ export class Skeleton extends MetaDBManager { await this.runWithLock(async () => { if (!this.status.canonicalHeadReset) { this.config.logger.debug( - `fillCanonicalChain canonicalHeadReset=${this.status.canonicalHeadReset}, backStepping...` + `fillCanonicalChain canonicalHeadReset=${this.status.canonicalHeadReset}, backStepping...`, ) await this.backStep(number) } else { this.config.logger.debug( - `fillCanonicalChain canonicalHeadReset=${this.status.canonicalHeadReset}, breaking out...` + `fillCanonicalChain canonicalHeadReset=${this.status.canonicalHeadReset}, breaking out...`, ) } }) @@ -1274,8 +1274,8 @@ export class Skeleton extends MetaDBManager { if (numBlocksInserted !== 1) { this.config.logger.error( `Failed to put block number=${number} fork=${block.common.hardfork()} hash=${short( - block.hash() - )} parentHash=${short(block.header.parentHash)}from skeleton chain to canonical` + block.hash(), + )} parentHash=${short(block.header.parentHash)}from skeleton chain to canonical`, ) // Lets log some parent by number and parent by hash, that may help to understand whats going on let parent = null @@ -1283,8 +1283,8 @@ export class Skeleton extends MetaDBManager { parent = await this.chain.getBlock(number - BIGINT_1) this.config.logger.info( `ParentByNumber number=${parent?.header.number}, hash=${short( - parent?.hash() ?? 'undefined' - )} hf=${parent?.common.hardfork()}` + parent?.hash() ?? 'undefined', + )} hf=${parent?.common.hardfork()}`, ) } catch (e) { this.config.logger.error(`Failed to fetch parent of number=${number}`) @@ -1295,12 +1295,12 @@ export class Skeleton extends MetaDBManager { parentWithHash = await this.chain.getBlock(block.header.parentHash) this.config.logger.info( `parentByHash number=${parentWithHash?.header.number}, hash=${short( - parentWithHash?.hash() ?? 'undefined' - )} hf=${parentWithHash?.common.hardfork()} ` + parentWithHash?.hash() ?? 'undefined', + )} hf=${parentWithHash?.common.hardfork()} `, ) } catch (e) { this.config.logger.error( - `Failed to fetch parent with parentWithHash=${short(block.header.parentHash)}` + `Failed to fetch parent with parentWithHash=${short(block.header.parentHash)}`, ) } break @@ -1330,14 +1330,14 @@ export class Skeleton extends MetaDBManager { }) if (fillLogIndex >= this.config.numBlocksPerIteration) { this.config.logger.debug( - `Skeleton canonical chain fill status: canonicalHead=${canonicalHead} chainHead=${this.chain.blocks.height} subchainHead=${subchain.head}` + `Skeleton canonical chain fill status: canonicalHead=${canonicalHead} chainHead=${this.chain.blocks.height} subchainHead=${subchain.head}`, ) fillLogIndex = 0 } } this.filling = false this.config.logger.debug( - `Successfully put=${fillLogIndex} skipped (because already inserted)=${skippedLogIndex} blocks start=${start} end=${canonicalHead} skeletonHead=${subchain.head} from skeleton chain to canonical syncTargetHeight=${this.config.syncTargetHeight}` + `Successfully put=${fillLogIndex} skipped (because already inserted)=${skippedLogIndex} blocks start=${start} end=${canonicalHead} skeletonHead=${subchain.head} from skeleton chain to canonical syncTargetHeight=${this.config.syncTargetHeight}`, ) } @@ -1373,7 +1373,7 @@ export class Skeleton extends MetaDBManager { await this.put( DBKey.SkeletonBlockHashToNumber, block.hash(), - bigIntToBytes(block.header.number) + bigIntToBytes(block.header.number), ) } @@ -1418,7 +1418,7 @@ export class Skeleton extends MetaDBManager { */ async getBlockByHash( hash: Uint8Array, - onlyCanonical: boolean = false + onlyCanonical: boolean = false, ): Promise { const number = await this.get(DBKey.SkeletonBlockHashToNumber, hash) if (number) { @@ -1505,7 +1505,7 @@ export class Skeleton extends MetaDBManager { fetching?: boolean snapsync?: SnapFetcherDoneFlags peers?: number | string - } = {} + } = {}, ): string { const vmHead = this.chain.blocks.vm const subchain0 = this.status.progress.subchains[0] @@ -1534,10 +1534,10 @@ export class Skeleton extends MetaDBManager { const status = isValid ? 'VALID' : isSynced - ? vmexecution?.running === true - ? `EXECUTING` - : `SYNCED` - : `SYNCING` + ? vmexecution?.running === true + ? `EXECUTING` + : `SYNCED` + : `SYNCING` if (peers === undefined || peers === 0) { this.lastsyncedAt = 0 @@ -1645,7 +1645,7 @@ export class Skeleton extends MetaDBManager { extraStatus = '' } const chainHead = `el=${this.chain.blocks.latest?.header.number ?? 'na'} hash=${short( - this.chain.blocks.latest?.hash() ?? 'na' + this.chain.blocks.latest?.hash() ?? 'na', )}` forceShowInfo = forceShowInfo ?? false @@ -1662,7 +1662,7 @@ export class Skeleton extends MetaDBManager { const sinceStarted = (new Date().getTime() - this.started) / 1000 beaconSyncETA = `${timeDuration((sinceStarted / Number(this.pulled)) * Number(left))}` this.config.logger.debug( - `Syncing beacon headers downloaded=${this.pulled} left=${left} eta=${beaconSyncETA}` + `Syncing beacon headers downloaded=${this.pulled} left=${left} eta=${beaconSyncETA}`, ) } } @@ -1687,23 +1687,23 @@ export class Skeleton extends MetaDBManager { const { snapTargetHeight, snapTargetRoot, snapTargetHash } = snapsync if (snapsync.done === true) { snapLogInfo = `snapsync=synced height=${snapTargetHeight} hash=${short( - snapTargetHash ?? 'na' + snapTargetHash ?? 'na', )} root=${short(snapTargetRoot ?? 'na')}` } else if (snapsync.syncing) { const accountsDone = formatBigDecimal( snapsync.accountFetcher.first * BIGINT_100, BIGINT_2EXP256, - BIGINT_100 + BIGINT_100, ) const storageReqsDone = formatBigDecimal( snapsync.storageFetcher.first * BIGINT_100, snapsync.storageFetcher.count, - BIGINT_100 + BIGINT_100, ) const codeReqsDone = formatBigDecimal( snapsync.byteCodeFetcher.first * BIGINT_100, snapsync.byteCodeFetcher.count, - BIGINT_100 + BIGINT_100, ) const snapprogress = `accounts=${accountsDone}% storage=${storageReqsDone}% of ${snapsync.storageFetcher.count} codes=${codeReqsDone}% of ${snapsync.byteCodeFetcher.count}` @@ -1722,7 +1722,7 @@ export class Skeleton extends MetaDBManager { } snapLogInfo = `${stage} ${snapprogress} (hash=${short( - snapTargetHash ?? 'na' + snapTargetHash ?? 'na', )} root=${short(snapTargetRoot ?? 'na')})` } else { if (this.synchronized) { @@ -1760,7 +1760,7 @@ export class Skeleton extends MetaDBManager { } else { // else break into two this.config.logger.info( - `${logPrefix} ${status}${extraStatus} synchronized=${this.config.synchronized} peers=${peers}` + `${logPrefix} ${status}${extraStatus} synchronized=${this.config.synchronized} peers=${peers}`, ) if (snapLogInfo !== undefined && snapLogInfo !== '') { this.config.logger.info(`${logPrefix} ${snapLogInfo}`) @@ -1778,7 +1778,7 @@ export class Skeleton extends MetaDBManager { this.status.linked } subchains=${this.status.progress.subchains .map((s) => `[tail=${s.tail} head=${s.head} next=${short(s.next)}]`) - .join(',')} reset=${this.status.canonicalHeadReset} ${chainHead}` + .join(',')} reset=${this.status.canonicalHeadReset} ${chainHead}`, ) } return status @@ -1848,7 +1848,7 @@ export class Skeleton extends MetaDBManager { Uint8Array, // safe and finalized Uint8Array, - Uint8Array + Uint8Array, ] const subchains: SkeletonSubchain[] = rawStatus[0].map((raw) => ({ head: bytesToBigInt(raw[0]), diff --git a/packages/client/src/service/txpool.ts b/packages/client/src/service/txpool.ts index 693b5e9231..47ba8d9727 100644 --- a/packages/client/src/service/txpool.ts +++ b/packages/client/src/service/txpool.ts @@ -200,7 +200,7 @@ export class TxPool { } this._cleanupInterval = setInterval( this.cleanup.bind(this), - this.POOLED_STORAGE_TIME_LIMIT * 1000 * 60 + this.POOLED_STORAGE_TIME_LIMIT * 1000 * 60, ) if (this.config.logger.isInfoEnabled()) { @@ -242,7 +242,7 @@ export class TxPool { (existingTxGasPrice.maxFee * BigInt(MIN_GAS_PRICE_BUMP_PERCENT)) / BigInt(100) if (newGasPrice.tip < minTipCap || newGasPrice.maxFee < minFeeCap) { throw new Error( - `replacement gas too low, got tip ${newGasPrice.tip}, min: ${minTipCap}, got fee ${newGasPrice.maxFee}, min: ${minFeeCap}` + `replacement gas too low, got tip ${newGasPrice.tip}, min: ${minTipCap}, got fee ${newGasPrice.maxFee}, min: ${minFeeCap}`, ) } @@ -252,7 +252,7 @@ export class TxPool { (existingTx.maxFeePerBlobGas * BigInt(MIN_GAS_PRICE_BUMP_PERCENT)) / BigInt(100) if (addedTx.maxFeePerBlobGas < minblobGasFee) { throw new Error( - `replacement blob gas too low, got: ${addedTx.maxFeePerBlobGas}, min: ${minblobGasFee}` + `replacement blob gas too low, got: ${addedTx.maxFeePerBlobGas}, min: ${minblobGasFee}`, ) } } @@ -268,7 +268,7 @@ export class TxPool { } if (tx.data.length > TX_MAX_DATA_SIZE) { throw new Error( - `Tx is too large (${tx.data.length} bytes) and exceeds the max data size of ${TX_MAX_DATA_SIZE} bytes` + `Tx is too large (${tx.data.length} bytes) and exceeds the max data size of ${TX_MAX_DATA_SIZE} bytes`, ) } const currentGasPrice = this.txGasPrice(tx) @@ -291,7 +291,7 @@ export class TxPool { if (inPool) { if (!isLocalTransaction && inPool.length >= MAX_TXS_PER_ACCOUNT) { throw new Error( - `Cannot add tx for ${senderAddress}: already have max amount of txs for this account` + `Cannot add tx for ${senderAddress}: already have max amount of txs for this account`, ) } // Replace pooled txs with the same nonce @@ -307,13 +307,13 @@ export class TxPool { if (typeof block.baseFeePerGas === 'bigint' && block.baseFeePerGas !== BIGINT_0) { if (currentGasPrice.maxFee < block.baseFeePerGas / BIGINT_2 && !isLocalTransaction) { throw new Error( - `Tx cannot pay basefee of ${block.baseFeePerGas}, have ${currentGasPrice.maxFee} (not within 50% range of current basefee)` + `Tx cannot pay basefee of ${block.baseFeePerGas}, have ${currentGasPrice.maxFee} (not within 50% range of current basefee)`, ) } } if (tx.gasLimit > block.gasLimit) { throw new Error( - `Tx gaslimit of ${tx.gasLimit} exceeds block gas limit of ${block.gasLimit} (exceeds last block gas limit)` + `Tx gaslimit of ${tx.gasLimit} exceeds block gas limit of ${block.gasLimit} (exceeds last block gas limit)`, ) } @@ -327,13 +327,13 @@ export class TxPool { } if (account.nonce > tx.nonce) { throw new Error( - `0x${sender} tries to send a tx with nonce ${tx.nonce}, but account has nonce ${account.nonce} (tx nonce too low)` + `0x${sender} tries to send a tx with nonce ${tx.nonce}, but account has nonce ${account.nonce} (tx nonce too low)`, ) } const minimumBalance = tx.value + currentGasPrice.maxFee * tx.gasLimit if (account.balance < minimumBalance) { throw new Error( - `0x${sender} does not have enough balance to cover transaction costs, need ${minimumBalance}, but have ${account.balance} (insufficient balance)` + `0x${sender} does not have enough balance to cover transaction costs, need ${minimumBalance}, but have ${account.balance} (insufficient balance)`, ) } } @@ -571,7 +571,7 @@ export class TxPool { this.config.logger.debug(`TxPool: received new transactions number=${txs.length}`) this.addToKnownByPeer( txs.map((tx) => tx.hash()), - peer + peer, ) const newTxHashes: [number[], number[], Uint8Array[]] = [] as any @@ -583,7 +583,7 @@ export class TxPool { newTxHashes[2].push(tx.hash()) } catch (error: any) { this.config.logger.debug( - `Error adding tx to TxPool: ${error.message} (tx hash: ${bytesToHex(tx.hash())})` + `Error adding tx to TxPool: ${error.message} (tx hash: ${bytesToHex(tx.hash())})`, ) } } @@ -621,7 +621,7 @@ export class TxPool { const reqHashesStr: UnprefixedHash[] = reqHashes.map(bytesToUnprefixedHex) this.pending = this.pending.concat(reqHashesStr) this.config.logger.debug( - `TxPool: requesting txs number=${reqHashes.length} pending=${this.pending.length}` + `TxPool: requesting txs number=${reqHashes.length} pending=${this.pending.length}`, ) const getPooledTxs = await peer.eth?.getPooledTransactions({ hashes: reqHashes.slice(0, this.TX_RETRIEVAL_LIMIT), @@ -642,7 +642,7 @@ export class TxPool { await this.add(tx) } catch (error: any) { this.config.logger.debug( - `Error adding tx to TxPool: ${error.message} (tx hash: ${bytesToHex(tx.hash())})` + `Error adding tx to TxPool: ${error.message} (tx hash: ${bytesToHex(tx.hash())})`, ) } newTxHashes[0].push(tx.type) @@ -767,7 +767,7 @@ export class TxPool { */ async txsByPriceAndNonce( vm: VM, - { baseFee, allowedBlobs }: { baseFee?: bigint; allowedBlobs?: number } = {} + { baseFee, allowedBlobs }: { baseFee?: bigint; allowedBlobs?: number } = {}, ) { const txs: TypedTransaction[] = [] // Separate the transactions by account and sort by nonce @@ -846,7 +846,7 @@ export class TxPool { } } this.config.logger.info( - `txsByPriceAndNonce selected txs=${txs.length}, skipped byNonce=${skippedStats.byNonce} byPrice=${skippedStats.byPrice} byBlobsLimit=${skippedStats.byBlobsLimit}` + `txsByPriceAndNonce selected txs=${txs.length}, skipped byNonce=${skippedStats.byNonce} byPrice=${skippedStats.byPrice} byBlobsLimit=${skippedStats.byBlobsLimit}`, ) return txs } @@ -908,13 +908,13 @@ export class TxPool { } } this.config.logger.info( - `TxPool Statistics txs=${this.txsInPool} senders=${this.pool.size} peers=${this.service.pool.peers.length}` + `TxPool Statistics txs=${this.txsInPool} senders=${this.pool.size} peers=${this.service.pool.peers.length}`, ) this.config.logger.info( - `TxPool Statistics broadcasts=${broadcasts}/tx/peer broadcasterrors=${broadcasterrors}/tx/peer knownpeers=${knownpeers} since minutes=${this.POOLED_STORAGE_TIME_LIMIT}` + `TxPool Statistics broadcasts=${broadcasts}/tx/peer broadcasterrors=${broadcasterrors}/tx/peer knownpeers=${knownpeers} since minutes=${this.POOLED_STORAGE_TIME_LIMIT}`, ) this.config.logger.info( - `TxPool Statistics successfuladds=${handledadds} failedadds=${handlederrors} since minutes=${this.HANDLED_CLEANUP_TIME_LIMIT}` + `TxPool Statistics successfuladds=${handledadds} failedadds=${handlederrors} since minutes=${this.HANDLED_CLEANUP_TIME_LIMIT}`, ) } } diff --git a/packages/client/src/sync/beaconsync.ts b/packages/client/src/sync/beaconsync.ts index c852723022..372764b640 100644 --- a/packages/client/src/sync/beaconsync.ts +++ b/packages/client/src/sync/beaconsync.ts @@ -81,8 +81,8 @@ export class BeaconSynchronizer extends Synchronizer { this.config.logger.info( `Latest local block number=${Number(number)} td=${td} hash=${bytesToHex( - hash - )} hardfork=${this.config.chainCommon.hardfork()}` + hash, + )} hardfork=${this.config.chainCommon.hardfork()}`, ) const subchain = this.skeleton.bounds() @@ -165,8 +165,8 @@ export class BeaconSynchronizer extends Synchronizer { await this.stop() this.config.logger.debug( `Beacon sync reorged, new head number=${block.header.number} hash=${short( - block.header.hash() - )}` + block.header.hash(), + )}`, ) void this.start() } @@ -251,7 +251,7 @@ export class BeaconSynchronizer extends Synchronizer { this.fetcher === null ? '' : 'previous fetcher errored=' + this.fetcher.syncErrored?.message - }` + }`, ) this.fetcher = new ReverseBlockFetcher({ config: this.config, @@ -281,7 +281,7 @@ export class BeaconSynchronizer extends Synchronizer { const hash = short(blocks[0].hash()) this.config.logger.debug( - `Imported skeleton blocks count=${blocks.length} first=${first} last=${last} hash=${hash} peers=${this.pool.size}` + `Imported skeleton blocks count=${blocks.length} first=${first} last=${last} hash=${hash} peers=${this.pool.size}`, ) } diff --git a/packages/client/src/sync/fetcher/accountfetcher.ts b/packages/client/src/sync/fetcher/accountfetcher.ts index 3baf8e965b..dcd5a22879 100644 --- a/packages/client/src/sync/fetcher/accountfetcher.ts +++ b/packages/client/src/sync/fetcher/accountfetcher.ts @@ -138,8 +138,8 @@ export class AccountFetcher extends Fetcher this.debug( `Account fetcher instantiated root=${short(this.root)} origin=${short(origin)} limit=${short( - limit - )} destroyWhenDone=${this.destroyWhenDone}` + limit, + )} destroyWhenDone=${this.destroyWhenDone}`, ) } @@ -171,7 +171,7 @@ export class AccountFetcher extends Fetcher () => this.snapFetchersCompleted(StorageFetcher), () => { throw Error('Snap fetcher failed to exit') - } + }, ) : null const codeFetch = !this.fetcherDoneFlags.byteCodeFetcher.done @@ -179,12 +179,12 @@ export class AccountFetcher extends Fetcher () => this.snapFetchersCompleted(ByteCodeFetcher), () => { throw Error('Snap fetcher failed to exit') - } + }, ) : null this.config.superMsg( - `Snapsync: running storageFetch=${storageFetch !== null} codeFetch=${codeFetch !== null}` + `Snapsync: running storageFetch=${storageFetch !== null} codeFetch=${codeFetch !== null}`, ) this.storageFetcher.setDestroyWhenDone() @@ -196,7 +196,7 @@ export class AccountFetcher extends Fetcher this.fetcherDoneFlags.byteCodeFetcher.done !== true ) { throw Error( - `storageFetch or codeFetch didn't complete storageFetcherDone=${this.fetcherDoneFlags.storageFetcher.done} byteCodeFetcherDone=${this.fetcherDoneFlags.byteCodeFetcher.done}` + `storageFetch or codeFetch didn't complete storageFetcherDone=${this.fetcherDoneFlags.storageFetcher.done} byteCodeFetcherDone=${this.fetcherDoneFlags.byteCodeFetcher.done}`, ) } @@ -209,7 +209,7 @@ export class AccountFetcher extends Fetcher }, () => { throw Error('Snap fetcher failed to exit') - } + }, ) this.config.superMsg(`Snapsync: running trieNodeFetch=${trieNodeFetch !== null}`) this.trieNodeFetcher.setDestroyWhenDone() @@ -239,10 +239,10 @@ export class AccountFetcher extends Fetcher const fetcherProgress = formatBigDecimal( fetcherDoneFlags.accountFetcher.first * BIGINT_100, BIGINT_2EXP256, - BIGINT_100 + BIGINT_100, ) this.config.logger.warn( - `accountFetcher completed with pending range done=${fetcherProgress}%` + `accountFetcher completed with pending range done=${fetcherProgress}%`, ) } break @@ -253,10 +253,10 @@ export class AccountFetcher extends Fetcher const reqsDone = formatBigDecimal( fetcherDoneFlags.storageFetcher.first * BIGINT_100, fetcherDoneFlags.storageFetcher.count, - BIGINT_100 + BIGINT_100, ) this.config.logger.warn( - `storageFetcher completed with pending tasks done=${reqsDone}% of ${fetcherDoneFlags.storageFetcher.count} queued=${this.storageFetcher.storageRequests.length}` + `storageFetcher completed with pending tasks done=${reqsDone}% of ${fetcherDoneFlags.storageFetcher.count} queued=${this.storageFetcher.storageRequests.length}`, ) } @@ -268,10 +268,10 @@ export class AccountFetcher extends Fetcher const reqsDone = formatBigDecimal( fetcherDoneFlags.byteCodeFetcher.first * BIGINT_100, fetcherDoneFlags.byteCodeFetcher.count, - BIGINT_100 + BIGINT_100, ) this.config.logger.warn( - `byteCodeFetcher completed with pending tasks done=${reqsDone}% of ${fetcherDoneFlags.byteCodeFetcher.count}` + `byteCodeFetcher completed with pending tasks done=${reqsDone}% of ${fetcherDoneFlags.byteCodeFetcher.count}`, ) } break @@ -286,10 +286,10 @@ export class AccountFetcher extends Fetcher this.config.superMsg( `snapFetchersCompletion root=${short(this.root)} accountsRoot=${short( - fetcherDoneFlags.stateRoot ?? 'na' + fetcherDoneFlags.stateRoot ?? 'na', )} done=${this.fetcherDoneFlags.done} accountsDone=${accountFetcher.done} storageDone=${ storageFetcher.done - } byteCodesDone=${byteCodeFetcher.done} trieNodesDone=${trieNodeFetcher.done}` + } byteCodesDone=${byteCodeFetcher.done} trieNodesDone=${trieNodeFetcher.done}`, ) if (this.fetcherDoneFlags.done) { @@ -300,12 +300,12 @@ export class AccountFetcher extends Fetcher private async verifyRangeProof( stateRoot: Uint8Array, origin: Uint8Array, - { accounts, proof }: { accounts: AccountData[]; proof: Uint8Array[] } + { accounts, proof }: { accounts: AccountData[]; proof: Uint8Array[] }, ): Promise { this.debug( `verifyRangeProof accounts:${accounts.length} first=${bytesToHex( - accounts[0].hash - )} last=${short(accounts[accounts.length - 1].hash)}` + accounts[0].hash, + )} last=${short(accounts[accounts.length - 1].hash)}`, ) for (let i = 0; i < accounts.length - 1; i++) { @@ -314,7 +314,7 @@ export class AccountFetcher extends Fetcher throw Error( `Account hashes not monotonically increasing: ${i} ${accounts[i].hash} vs ${i + 1} ${ accounts[i + 1].hash - }` + }`, ) } } @@ -347,7 +347,7 @@ export class AccountFetcher extends Fetcher private isMissingRightRange( limit: Uint8Array, - { accounts, proof: _proof }: { accounts: AccountData[]; proof: Uint8Array[] } + { accounts, proof: _proof }: { accounts: AccountData[]; proof: Uint8Array[] }, ): boolean { if ( accounts.length > 0 && @@ -369,7 +369,7 @@ export class AccountFetcher extends Fetcher * @param peer */ async request( - job: Job + job: Job, ): Promise { const { peer } = job // Currently this is the only safe place to call peer.latest() without interfering with the fetcher @@ -410,7 +410,7 @@ export class AccountFetcher extends Fetcher [], [], rangeResult.proof, - { useKeyHashingFunction: keccak256 } + { useKeyHashingFunction: keccak256 }, ) // if proof is false, reject corrupt peer if (isMissingRightRange !== false) return undefined @@ -438,8 +438,8 @@ export class AccountFetcher extends Fetcher if (isMissingRightRange && this.isMissingRightRange(limit, rangeResult)) { this.debug( `Peer ${peerInfo} returned missing right range account=${bytesToHex( - rangeResult.accounts[rangeResult.accounts.length - 1].hash - )} limit=${bytesToHex(limit)}` + rangeResult.accounts[rangeResult.accounts.length - 1].hash, + )} limit=${bytesToHex(limit)}`, ) completed = false } else { @@ -460,7 +460,7 @@ export class AccountFetcher extends Fetcher */ process( job: Job, - result: AccountDataResponse + result: AccountDataResponse, ): AccountData[] | undefined { const fullResult = (job.partialResult ?? []).concat(result) @@ -533,11 +533,11 @@ export class AccountFetcher extends Fetcher if (storageFetchRequests.size > 0) this.storageFetcher.enqueueByStorageRequestList( - Array.from(storageFetchRequests) as StorageRequest[] + Array.from(storageFetchRequests) as StorageRequest[], ) if (byteCodeFetchRequests.size > 0) this.byteCodeFetcher.enqueueByByteCodeRequestList( - Array.from(byteCodeFetchRequests) as Uint8Array[] + Array.from(byteCodeFetchRequests) as Uint8Array[], ) } @@ -577,7 +577,7 @@ export class AccountFetcher extends Fetcher } debugStr += ` limit=${short( - setLengthLeft(bigIntToBytes(startedWith + pushedCount - BIGINT_1), 32) + setLengthLeft(bigIntToBytes(startedWith + pushedCount - BIGINT_1), 32), )}` this.debug(`Created new tasks num=${tasks.length} ${debugStr}`) return tasks @@ -626,7 +626,7 @@ export class AccountFetcher extends Fetcher processStoreError( error: Error, - _task: JobTask + _task: JobTask, ): { destroyFetcher: boolean; banPeer: boolean; stepBack: bigint } { const stepBack = BIGINT_0 const destroyFetcher = diff --git a/packages/client/src/sync/fetcher/blockfetcher.ts b/packages/client/src/sync/fetcher/blockfetcher.ts index 7568dcd259..732cc36ba7 100644 --- a/packages/client/src/sync/fetcher/blockfetcher.ts +++ b/packages/client/src/sync/fetcher/blockfetcher.ts @@ -69,7 +69,7 @@ export class BlockFetcher extends BlockFetcherBase { } const bodies = bodiesResult[1] this.debug( - `Requested blocks=${blocksRange} from ${peerInfo} (received: ${headers.length} headers / ${bodies.length} bodies)` + `Requested blocks=${blocksRange} from ${peerInfo} (received: ${headers.length} headers / ${bodies.length} bodies)`, ) const blocks: Block[] = [] for (const [i, [txsData, unclesData, withdrawalsData]] of bodies.entries()) { @@ -82,7 +82,7 @@ export class BlockFetcher extends BlockFetcherBase { (withdrawalsData?.length ?? 0) === 0) ) { this.debug( - `Requested block=${headers[i].number}} from peer ${peerInfo} missing non-empty txs=${txsData.length} or uncles=${unclesData.length} or withdrawals=${withdrawalsData?.length}` + `Requested block=${headers[i].number}} from peer ${peerInfo} missing non-empty txs=${txsData.length} or uncles=${unclesData.length} or withdrawals=${withdrawalsData?.length}`, ) return [] } @@ -100,7 +100,7 @@ export class BlockFetcher extends BlockFetcherBase { blocks.push(block) } this.debug( - `Returning blocks=${blocksRange} from ${peerInfo} (received: ${headers.length} headers / ${bodies.length} bodies)` + `Returning blocks=${blocksRange} from ${peerInfo} (received: ${headers.length} headers / ${bodies.length} bodies)`, ) return blocks } @@ -136,14 +136,14 @@ export class BlockFetcher extends BlockFetcherBase { this.debug( `Fetcher results stored in blockchain (blocks num=${blocks.length} first=${ blocks[0]?.header.number - } last=${blocks[blocks.length - 1]?.header.number})` + } last=${blocks[blocks.length - 1]?.header.number})`, ) this.config.events.emit(Event.SYNC_FETCHED_BLOCKS, blocks.slice(0, num)) } catch (e: any) { this.debug( `Error storing fetcher results in blockchain (blocks num=${blocks.length} first=${ blocks[0]?.header.number - } last=${blocks[blocks.length - 1]?.header.number}): ${e}` + } last=${blocks[blocks.length - 1]?.header.number}): ${e}`, ) throw e } diff --git a/packages/client/src/sync/fetcher/blockfetcherbase.ts b/packages/client/src/sync/fetcher/blockfetcherbase.ts index c84760d0bd..fc12a0d243 100644 --- a/packages/client/src/sync/fetcher/blockfetcherbase.ts +++ b/packages/client/src/sync/fetcher/blockfetcherbase.ts @@ -56,7 +56,7 @@ export abstract class BlockFetcherBase extends Fetcher< this.count = options.count this.reverse = options.reverse ?? false this.debug( - `Block fetcher instantiated interval=${this.interval} first=${this.first} count=${this.count} reverse=${this.reverse} destroyWhenDone=${this.destroyWhenDone}` + `Block fetcher instantiated interval=${this.interval} first=${this.first} count=${this.count} reverse=${this.reverse} destroyWhenDone=${this.destroyWhenDone}`, ) } @@ -105,7 +105,7 @@ export abstract class BlockFetcherBase extends Fetcher< this.processed - this.finished < this.config.maxFetcherRequests ) { this.debug( - `Fetcher pending with first=${this.first} count=${this.count} reverse=${this.reverse}` + `Fetcher pending with first=${this.first} count=${this.count} reverse=${this.reverse}`, ) const tasks = this.tasks(this.first, this.count) for (const task of tasks) { @@ -114,7 +114,7 @@ export abstract class BlockFetcherBase extends Fetcher< this.debug(`Enqueued num=${tasks.length} tasks`) } else { this.debug( - `No new tasks enqueued in=${this.in.length} count=${this.count} processed=${this.processed} finished=${this.finished}` + `No new tasks enqueued in=${this.in.length} count=${this.count} processed=${this.processed} finished=${this.finished}`, ) } } @@ -185,7 +185,7 @@ export abstract class BlockFetcherBase extends Fetcher< first: min, count: numBlocks, }, - true + true, ) } else { for (const first of numberList) { @@ -194,12 +194,12 @@ export abstract class BlockFetcherBase extends Fetcher< first, count: 1, }, - true + true, ) } } this.debug( - `Enqueued tasks by number list num=${numberList.length} min=${min} bulkRequest=${bulkRequest} ${updateHeightStr}` + `Enqueued tasks by number list num=${numberList.length} min=${min} bulkRequest=${bulkRequest} ${updateHeightStr}`, ) if (this.in.length === 0) { this.nextTasks() @@ -208,7 +208,7 @@ export abstract class BlockFetcherBase extends Fetcher< processStoreError( error: Error, - task: JobTask + task: JobTask, ): { destroyFetcher: boolean; banPeer: boolean; stepBack: bigint } { let stepBack = BIGINT_0 const destroyFetcher = !(error.message as string).includes('could not find parent header') diff --git a/packages/client/src/sync/fetcher/bytecodefetcher.ts b/packages/client/src/sync/fetcher/bytecodefetcher.ts index e0fdb957bc..d2f5c8207c 100644 --- a/packages/client/src/sync/fetcher/bytecodefetcher.ts +++ b/packages/client/src/sync/fetcher/bytecodefetcher.ts @@ -65,7 +65,7 @@ export class ByteCodeFetcher extends Fetcher if (this.hashes.length > 0) { const fullJob = { task: { hashes: this.hashes } } as Job this.debug( - `Bytecode fetcher instantiated ${fullJob.task.hashes.length} hash requests destroyWhenDone=${this.destroyWhenDone}` + `Bytecode fetcher instantiated ${fullJob.task.hashes.length} hash requests destroyWhenDone=${this.destroyWhenDone}`, ) } } @@ -82,7 +82,7 @@ export class ByteCodeFetcher extends Fetcher * @param peer */ async request( - job: Job + job: Job, ): Promise { const { task, peer } = job // Currently this is the only safe place to call peer.latest() without interfering with the fetcher @@ -155,7 +155,7 @@ export class ByteCodeFetcher extends Fetcher */ process( job: Job, - result: ByteCodeDataResponse + result: ByteCodeDataResponse, ): Uint8Array[] | undefined { const fullResult = (job.partialResult ?? []).concat(result) job.partialResult = undefined @@ -213,7 +213,7 @@ export class ByteCodeFetcher extends Fetcher this.fetcherDoneFlags.byteCodeFetcher.count = this.fetcherDoneFlags.byteCodeFetcher.first + BigInt(this.hashes.length) this.debug( - `Number of bytecode fetch requests added to fetcher queue: ${byteCodeRequestList.length}` + `Number of bytecode fetch requests added to fetcher queue: ${byteCodeRequestList.length}`, ) this.nextTasks() } @@ -269,7 +269,7 @@ export class ByteCodeFetcher extends Fetcher processStoreError( error: Error, - _task: JobTask + _task: JobTask, ): { destroyFetcher: boolean; banPeer: boolean; stepBack: bigint } { const stepBack = BIGINT_0 const destroyFetcher = diff --git a/packages/client/src/sync/fetcher/fetcher.ts b/packages/client/src/sync/fetcher/fetcher.ts index 6bc1e92d90..de527769ff 100644 --- a/packages/client/src/sync/fetcher/fetcher.ts +++ b/packages/client/src/sync/fetcher/fetcher.ts @@ -87,19 +87,19 @@ export abstract class Fetcher extends Readable this.maxQueue = options.maxQueue ?? 4 this.debug( - `Fetcher initialized timeout=${this.timeout} interval=${this.interval} banTime=${this.banTime} maxQueue=${this.maxQueue}` + `Fetcher initialized timeout=${this.timeout} interval=${this.interval} banTime=${this.banTime} maxQueue=${this.maxQueue}`, ) this.in = new Heap({ comparBefore: ( a: Job, - b: Job + b: Job, ) => a.index < b.index, }) as QHeap> this.out = new Heap({ comparBefore: ( a: Job, - b: Job + b: Job, ) => a.index < b.index, }) as QHeap> this.total = 0 @@ -119,7 +119,7 @@ export abstract class Fetcher extends Readable */ abstract request( _job?: Job, - _peer?: Peer + _peer?: Peer, ): Promise /** @@ -131,7 +131,7 @@ export abstract class Fetcher extends Readable */ abstract process( _job?: Job, - _result?: JobResult + _result?: JobResult, ): StorageItem[] | undefined /** @@ -146,7 +146,7 @@ export abstract class Fetcher extends Readable */ abstract processStoreError( _error: Error, - _task: JobTask | BlockFetcherJobTask + _task: JobTask | BlockFetcherJobTask, ): { destroyFetcher: boolean; banPeer: boolean; stepBack: bigint } abstract jobStr(job: Job, withIndex?: boolean): string @@ -259,8 +259,8 @@ export abstract class Fetcher extends Readable this.debug( `Re-enqueuing job ${jobStr} from peer id=${job.peer?.id?.substr( 0, - 8 - )} (${resultSet} result set returned).` + 8, + )} (${resultSet} result set returned).`, ) this.enqueue(job) void this.wait().then(() => { @@ -278,8 +278,8 @@ export abstract class Fetcher extends Readable this.debug( `Re-enqueuing job ${jobStr} from peer id=${job.peer?.id?.substr( 0, - 8 - )} (reply contains unexpected data).` + 8, + )} (reply contains unexpected data).`, ) this.enqueue(job) } @@ -297,7 +297,7 @@ export abstract class Fetcher extends Readable error?: Error, irrecoverable?: boolean, dequeued?: boolean, - banPeer?: boolean + banPeer?: boolean, ) { const jobItems = job instanceof Array ? job : [job] if (irrecoverable === true || banPeer === true) { @@ -314,8 +314,8 @@ export abstract class Fetcher extends Readable this.debug( `Failure - Re-enqueuing job ${jobStr} from peer id=${jobItem.peer?.id?.substr( 0, - 8 - )} (error: ${error}).` + 8, + )} (error: ${error}).`, ) // If the job has been dequeued, then the processed count needs to be decreased this.enqueue(jobItem, dequeued) @@ -339,7 +339,7 @@ export abstract class Fetcher extends Readable if (this.finished !== this.total) { // There are still jobs waiting to be processed out in the writer pipe this.debug( - `No job found as next task, skip next job execution processed=${this.processed} finished=${this.finished} total=${this.total}` + `No job found as next task, skip next job execution processed=${this.processed} finished=${this.finished} total=${this.total}`, ) } else { // There are no more jobs in the fetcher, so its better to resolve @@ -354,7 +354,7 @@ export abstract class Fetcher extends Readable this.debug( `Readable state length=${this._readableState!.length} exceeds max queue size=${ this.maxQueue - }, skip job ${jobStr} execution.` + }, skip job ${jobStr} execution.`, ) return false } @@ -403,7 +403,7 @@ export abstract class Fetcher extends Readable this.in.remove() } this.debug( - `Cleared out fetcher total=${this.total} processed=${this.processed} finished=${this.finished}` + `Cleared out fetcher total=${this.total} processed=${this.processed} finished=${this.finished}`, ) } @@ -435,7 +435,7 @@ export abstract class Fetcher extends Readable const _write = async ( job: Job | Job[], encoding: string | null, - cb: Function + cb: Function, ) => { const jobItems = job instanceof Array ? job : [job] this.debug(`Starting write for ${jobItems.length} jobs...`) @@ -449,7 +449,7 @@ export abstract class Fetcher extends Readable this.config.logger.warn(`Error storing received block or header result: ${error}`) const { destroyFetcher, banPeer, stepBack } = this.processStoreError( error, - jobItems[0].task + jobItems[0].task, ) if (!destroyFetcher) { // Non-fatal error: ban peer and re-enqueue job. @@ -475,12 +475,12 @@ export abstract class Fetcher extends Readable write: _write, writev: ( many: { chunk: Job; encoding: string }[], - cb: Function + cb: Function, ) => { const items = ([]>[]).concat( ...many.map( - (x: { chunk: Job; encoding: string }) => x.chunk - ) + (x: { chunk: Job; encoding: string }) => x.chunk, + ), ) return _write(items, null, cb) }, diff --git a/packages/client/src/sync/fetcher/headerfetcher.ts b/packages/client/src/sync/fetcher/headerfetcher.ts index 4c7aa0a281..0d75348487 100644 --- a/packages/client/src/sync/fetcher/headerfetcher.ts +++ b/packages/client/src/sync/fetcher/headerfetcher.ts @@ -92,14 +92,14 @@ export class HeaderFetcher extends BlockFetcherBase { try { this.debug( `verifyRangeProof slots:${slots.length} first=${short(slots[0].hash)} last=${short( - slots[slots.length - 1].hash - )}` + slots[slots.length - 1].hash, + )}`, ) const keys = slots.map((slot: any) => slot.hash) const values = slots.map((slot: any) => slot.body) @@ -136,7 +136,7 @@ export class StorageFetcher extends Fetcher 0 && @@ -214,7 +214,7 @@ export class StorageFetcher extends Fetcher + job: Job, ): Promise { const { task, peer } = job // Currently this is the only safe place to call peer.latest() without interfering with the fetcher @@ -230,7 +230,7 @@ export class StorageFetcher extends Fetcher bytesToHex(req.accountHash))}` + `requested account hashes: ${task.storageRequests.map((req) => bytesToHex(req.accountHash))}`, ) this.debug(`request is multi: ${job.task.multi}`) @@ -239,7 +239,7 @@ export class StorageFetcher extends FetcherrangeResult.proof, - { useKeyHashingFunction: keccak256 } + { useKeyHashingFunction: keccak256 }, ) // if proof is false, reject corrupt peer @@ -324,7 +324,7 @@ export class StorageFetcher extends Fetcher 0) { this.debug( - `Number of ignored account requests due to fragmentation: ${ignoredRequests.length}` + `Number of ignored account requests due to fragmentation: ${ignoredRequests.length}`, ) this.storageRequests.push(...ignoredRequests) } @@ -415,7 +415,7 @@ export class StorageFetcher extends Fetcher, - result: StorageDataResponse + result: StorageDataResponse, ): StorageData[][] | undefined { const accountSlots = (result[0] as any)[0] const highestReceivedhash = accountSlots[accountSlots.length - 1].hash @@ -442,7 +442,7 @@ export class StorageFetcher extends Fetcher { try { if (JSON.stringify(result[0]) === JSON.stringify({ skipped: true })) { @@ -468,7 +468,7 @@ export class StorageFetcher extends Fetcher 0) { this.debug( - `Number of accounts requested as a part of a multi-account request: ${this.storageRequests.length}` + `Number of accounts requested as a part of a multi-account request: ${this.storageRequests.length}`, ) tasks.unshift({ storageRequests: this.storageRequests, // TODO limit max number of accounts per single fetch request @@ -618,7 +618,7 @@ export class StorageFetcher extends Fetcher this.debug( `Trie node fetcher instantiated with ${this.pathToNodeRequestData.size()} node requests destroyWhenDone=${ this.destroyWhenDone - }` + }`, ) } @@ -139,7 +139,7 @@ export class TrieNodeFetcher extends Fetcher * @param peer */ async request( - job: Job + job: Job, ): Promise { const { task, peer } = job // Currently this is the only safe place to call peer.latest() without interfering with the fetcher @@ -193,7 +193,7 @@ export class TrieNodeFetcher extends Fetcher */ process( job: Job, - result: TrieNodesResponse + result: TrieNodesResponse, ): Uint8Array[] | undefined { const fullResult = (job.partialResult ?? []).concat(result) job.partialResult = undefined @@ -295,7 +295,7 @@ export class TrieNodeFetcher extends Fetcher // if error is thrown, than the node is unknown and should be queued for fetching unknownChildNodeCount++ const { parentAccountHash } = this.pathToNodeRequestData.getElementByKey( - pathString + pathString, ) as NodeRequestData this.pathToNodeRequestData.setElement(childNode.path, { nodeHash: bytesToHex(childNode.nodeHash as Uint8Array), @@ -307,13 +307,13 @@ export class TrieNodeFetcher extends Fetcher // record new node for batched storing after all subtrie nodes have been received const { nodeParentHash, parentAccountHash } = this.pathToNodeRequestData.getElementByKey( - pathString + pathString, ) as NodeRequestData if (storagePath !== undefined) { // if fetched node has a storagePath, it's storage node data and should be stored with // account leaf node data from where it originates const { pathToStorageNode } = this.fetchedAccountNodes.get( - parentAccountHash as string + parentAccountHash as string, ) as unknown as FetchedNodeData pathToStorageNode!.set(storagePath, nodeData as unknown as Uint8Array) } else { @@ -369,8 +369,8 @@ export class TrieNodeFetcher extends Fetcher const a = createAccountFromRLP(node.value()) this.debug( `Stored storageTrie with root actual=${bytesToHex( - storageTrie.root() - )} expected=${bytesToHex(a.storageRoot)}` + storageTrie.root(), + )} expected=${bytesToHex(a.storageRoot)}`, ) } } @@ -379,8 +379,8 @@ export class TrieNodeFetcher extends Fetcher await this.accountTrie.persistRoot() this.debug( `Stored accountTrie with root actual=${bytesToHex( - this.accountTrie.root() - )} expected=${bytesToHex(this.root)}` + this.accountTrie.root(), + )} expected=${bytesToHex(this.root)}`, ) } } catch (e) { @@ -468,7 +468,7 @@ export class TrieNodeFetcher extends Fetcher processStoreError( error: Error, - _task: JobTask + _task: JobTask, ): { destroyFetcher: boolean; banPeer: boolean; stepBack: bigint } { const stepBack = BIGINT_0 const destroyFetcher = diff --git a/packages/client/src/sync/fullsync.ts b/packages/client/src/sync/fullsync.ts index 53f3452368..4f7f5051a5 100644 --- a/packages/client/src/sync/fullsync.ts +++ b/packages/client/src/sync/fullsync.ts @@ -104,8 +104,8 @@ export class FullSynchronizer extends Synchronizer { this.config.logger.info( `Latest local block number=${Number(number)} td=${td} hash=${short( - hash - )} hardfork=${this.config.chainCommon.hardfork()}` + hash, + )} hardfork=${this.config.chainCommon.hardfork()}`, ) } @@ -260,7 +260,7 @@ export class FullSynchronizer extends Synchronizer { } first=${first} last=${last} hash=${hash} ${baseFeeAdd}hardfork=${this.config.chainCommon.hardfork()} peers=${ this.pool.size }`, - { attentionHF } + { attentionHF }, ) this.txPool.removeNewBlockTxs(blocks) @@ -320,7 +320,7 @@ export class FullSynchronizer extends Synchronizer { this.config.logger.debug( `Error processing new block from peer ${ peer ? `id=${peer.id.slice(0, 8)}` : '(no peer)' - } hash=${short(block.hash())}` + } hash=${short(block.hash())}`, ) this.config.logger.debug(err) return diff --git a/packages/client/src/sync/lightsync.ts b/packages/client/src/sync/lightsync.ts index 946ce20058..2b487080ea 100644 --- a/packages/client/src/sync/lightsync.ts +++ b/packages/client/src/sync/lightsync.ts @@ -146,7 +146,7 @@ export class LightSynchronizer extends Synchronizer { ? `baseFee=${headers[0].baseFeePerGas} ` : '' this.config.logger.info( - `Imported headers count=${headers.length} number=${first} hash=${hash} ${baseFeeAdd}peers=${this.pool.size}` + `Imported headers count=${headers.length} number=${first} hash=${hash} ${baseFeeAdd}peers=${this.pool.size}`, ) } diff --git a/packages/client/src/sync/snapsync.ts b/packages/client/src/sync/snapsync.ts index 69162ed194..97ae243777 100644 --- a/packages/client/src/sync/snapsync.ts +++ b/packages/client/src/sync/snapsync.ts @@ -62,7 +62,7 @@ export class SnapSynchronizer extends Synchronizer { await this.pool.open() this.config.logger.info( - `Opened SnapSynchronizer syncTargetHeight=${this.config.syncTargetHeight ?? 'NA'}` + `Opened SnapSynchronizer syncTargetHeight=${this.config.syncTargetHeight ?? 'NA'}`, ) } @@ -132,7 +132,7 @@ export class SnapSynchronizer extends Synchronizer { if (!this.fetcherDoneFlags.done) { throw Error( - `snap sync fetchers didn't sync complete state accountFetcherDone=${this.fetcherDoneFlags.accountFetcher.done} storageFetcherDone=${this.fetcherDoneFlags.storageFetcher.done} byteCodeFetcherDone=${this.fetcherDoneFlags.byteCodeFetcher.done} trieNodeFetcherDone=${this.fetcherDoneFlags.trieNodeFetcher.done}` + `snap sync fetchers didn't sync complete state accountFetcherDone=${this.fetcherDoneFlags.accountFetcher.done} storageFetcherDone=${this.fetcherDoneFlags.storageFetcher.done} byteCodeFetcherDone=${this.fetcherDoneFlags.byteCodeFetcher.done} trieNodeFetcherDone=${this.fetcherDoneFlags.trieNodeFetcher.done}`, ) } @@ -144,8 +144,8 @@ export class SnapSynchronizer extends Synchronizer { ) { throw Error( `Invalid synced data by snapsync snapTargetHeight=${snapTargetHeight} snapTargetRoot=${short( - snapTargetRoot ?? 'na' - )} snapTargetHash=${short(snapTargetHash ?? 'na')}` + snapTargetRoot ?? 'na', + )} snapTargetHash=${short(snapTargetHash ?? 'na')}`, ) } @@ -154,8 +154,8 @@ export class SnapSynchronizer extends Synchronizer { if (!equalsBytes(syncedRoot, snapTargetRoot)) { throw Error( `Invalid snap syncedRoot=${short(syncedRoot)} targetRoot=${short( - snapTargetRoot - )} for target height=${snapTargetHeight} hash=${short(snapTargetHash)}` + snapTargetRoot, + )} for target height=${snapTargetHeight} hash=${short(snapTargetHash)}`, ) // TODO: figure out what needs to be reinited // this.fetcherDoneFlags.accountFetcher.done = false; @@ -165,7 +165,7 @@ export class SnapSynchronizer extends Synchronizer { } const snapDoneMsg = `snapsync complete!!! height=${snapTargetHeight} root=${short( - snapTargetRoot + snapTargetRoot, )} hash=${short(snapTargetHash)}` if (fetchingAlreadyDone) { this.config.logger.debug(snapDoneMsg) @@ -223,7 +223,7 @@ export class SnapSynchronizer extends Synchronizer { this.fetcher === null ? '' : 'previous fetcher errored=' + this.fetcher.syncErrored?.message - }` + }`, ) this.fetcher = new AccountFetcher({ config: this.config, diff --git a/packages/client/src/sync/sync.ts b/packages/client/src/sync/sync.ts index 65741f07e9..53ce2d018c 100644 --- a/packages/client/src/sync/sync.ts +++ b/packages/client/src/sync/sync.ts @@ -123,7 +123,7 @@ export abstract class Synchronizer { this._syncedStatusCheckInterval = setInterval( this._syncedStatusCheck.bind(this), - this.SYNCED_STATE_REMOVAL_PERIOD + this.SYNCED_STATE_REMOVAL_PERIOD, ) const timeout = setTimeout(() => { @@ -161,7 +161,7 @@ export abstract class Synchronizer { return this.resolveSync() } catch (error: any) { this.config.logger.error( - `Received sync error, stopping sync and clearing fetcher: ${error.message ?? error}` + `Received sync error, stopping sync and clearing fetcher: ${error.message ?? error}`, ) this.clearFetcher() throw error diff --git a/packages/client/src/util/debug.ts b/packages/client/src/util/debug.ts index c4a549e83d..2226c849cc 100644 --- a/packages/client/src/util/debug.ts +++ b/packages/client/src/util/debug.ts @@ -38,7 +38,7 @@ const main = async () => { execution.hardfork }' }) const block = createBlockFromRLPSerializedBlock(hexToBytes('${bytesToHex( - block.serialize() + block.serialize(), )}'), { common }) const stateDB = new Level('${execution.config.getDataDirectory(DataDirectory.State)}') @@ -46,7 +46,7 @@ const main = async () => { const stateManager = new DefaultStateManager({ trie, common }) // Ensure we run on the right root stateManager.setStateRoot(hexToBytes('${bytesToHex( - await execution.vm.stateManager.getStateRoot() + await execution.vm.stateManager.getStateRoot(), )}')) diff --git a/packages/client/src/util/index.ts b/packages/client/src/util/index.ts index cf0326fb53..a3e42ade2d 100644 --- a/packages/client/src/util/index.ts +++ b/packages/client/src/util/index.ts @@ -22,8 +22,8 @@ export function getClientVersion() { const packageJson = JSON.parse( readFileSync( '/' + import.meta.url.split('client')[0].split('file:///')[1] + 'client/package.json', - 'utf-8' - ) + 'utf-8', + ), ) const { version } = process return `EthereumJS/${packageJson.version}/${platform()}/node${version.substring(1)}` diff --git a/packages/client/src/util/parse.ts b/packages/client/src/util/parse.ts index 28d27ac6dc..061f0157ae 100644 --- a/packages/client/src/util/parse.ts +++ b/packages/client/src/util/parse.ts @@ -7,7 +7,7 @@ import type { Multiaddr } from '@multiformats/multiaddr' // From: https://community.fortra.com/forums/intermapper/miscellaneous-topics/5acc4fcf-fa83-e511-80cf-0050568460e4 const ip6RegExp = new RegExp( - /((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))/ + /((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))/, ) /** diff --git a/packages/client/src/util/rpc.ts b/packages/client/src/util/rpc.ts index 9eb48cfc89..77d2d52baa 100644 --- a/packages/client/src/util/rpc.ts +++ b/packages/client/src/util/rpc.ts @@ -86,7 +86,7 @@ export function inspectParams(params: any, shorten?: number) { export function createRPCServer( manager: RPCManager, - opts: CreateRPCServerOpts + opts: CreateRPCServerOpts, ): CreateRPCServerReturn { const { methodConfig, rpcDebug, rpcDebugVerbose, logger } = opts const onRequest = (request: any) => { @@ -102,7 +102,7 @@ export function createRPCServer( logger?.info(`${request.method}${batchAddOn} responded with:\n${inspectParams(response)}`) } else if (checkFilter(request.method, rpcDebug)) { logger?.info( - `${request.method}${batchAddOn} responded with:\n${inspectParams(response, 125)}` + `${request.method}${batchAddOn} responded with:\n${inspectParams(response, 125)}`, ) } } diff --git a/packages/client/test/blockchain/chain.spec.ts b/packages/client/test/blockchain/chain.spec.ts index 2f6f945ca4..f684122bad 100644 --- a/packages/client/test/blockchain/chain.spec.ts +++ b/packages/client/test/blockchain/chain.spec.ts @@ -39,11 +39,11 @@ describe('[Chain]', () => { assert.equal( bytesToHex(chain.genesis.hash()), '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', - 'get chain.genesis' + 'get chain.genesis', ) assert.ok( equalsBytes(chain.genesis.hash(), chain.blocks.latest!.hash()), - 'get chain.block.latest' + 'get chain.block.latest', ) await chain.close() }) diff --git a/packages/client/test/cli/cli.spec.ts b/packages/client/test/cli/cli.spec.ts index cabf9ee261..18180c598b 100644 --- a/packages/client/test/cli/cli.spec.ts +++ b/packages/client/test/cli/cli.spec.ts @@ -11,7 +11,7 @@ import type { ChildProcessWithoutNullStreams } from 'child_process' export function clientRunHelper( cliArgs: string[], onData: (message: string, child: ChildProcessWithoutNullStreams, resolve: Function) => void, - shouldError = false + shouldError = false, ) { const file = require.resolve('../../bin/cli.ts') const child = spawn('tsx', [file, ...cliArgs]) @@ -36,7 +36,7 @@ describe('[CLI]', () => { if (message.includes('Initializing Ethereumjs client')) { assert.ok( message.includes('network=sepolia chainId=11155111'), - 'client is using custom inputs for network and network ID' + 'client is using custom inputs for network and network ID', ) child.kill(9) resolve(undefined) @@ -59,7 +59,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('http://')) { // if http endpoint startup message detected, call http endpoint with RPC method @@ -71,7 +71,7 @@ describe('[CLI]', () => { const res = await client.request('eth_coinbase', [], 2.0) assert.ok( res.result === '0x7e5f4552091a69125d5dfcb7b8c2659029395bdf', - 'correct coinbase address set' + 'correct coinbase address set', ) count -= 1 } @@ -91,12 +91,12 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Invalid values')) { assert.ok( true, - 'client correctly throws error when "dev" option is passed in without a value' + 'client correctly throws error when "dev" option is passed in without a value', ) } child.kill(15) @@ -109,7 +109,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('cannot reuse')) { assert.ok(true, 'cannot reuse ports between HTTP and WS RPCs') @@ -125,7 +125,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('http://')) { // if http endpoint startup message detected, call http endpoint with RPC method @@ -137,7 +137,7 @@ describe('[CLI]', () => { } catch (e: any) { assert( e.message.includes('Unauthorized: Error: Missing auth header'), - 'authentication failure shows that auth is defaulting to active' + 'authentication failure shows that auth is defaulting to active', ) } child.kill(15) @@ -157,13 +157,13 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('http://')) { assert.ok(message.includes('engine'), 'engine rpc started') assert.ok( message.includes('rpcEngineAuth=false'), - 'auth is disabled according to client logs' + 'auth is disabled according to client logs', ) await wait(600) const client = Client.http({ port: 8553 }) @@ -187,14 +187,14 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('http://')) { assert.ok(message.includes('engine'), 'engine rpc started') assert.ok(message.includes(customPort), 'custom port is being used') assert.ok( message.includes('rpcEngineAuth=false'), - 'auth is disabled according to client logs' + 'auth is disabled according to client logs', ) await wait(600) const client = Client.http({ port: Number(customPort) }) @@ -219,14 +219,14 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('http://')) { assert.ok(message.includes('engine'), 'engine rpc started') assert.ok(message.includes('0.0.0.0'), 'custom address is being used') assert.ok( message.includes('rpcEngineAuth=false'), - 'auth is disabled according to client logs' + 'auth is disabled according to client logs', ) await wait(600) const client = Client.http({ hostname: '0.0.0.0', port: Number(customPort) }) @@ -252,12 +252,12 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('ws://') && message.includes('engine')) { assert.ok( message.includes('0.0.0.0:' + customPort), - 'client logs show correct custom address and port being used' + 'client logs show correct custom address and port being used', ) assert.ok(message.includes('engine'), 'engine ws started') await wait(600) @@ -286,7 +286,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('ws://')) { // if ws endpoint startup message detected, call ws endpoint with RPC method @@ -315,7 +315,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('http://')) { // if http endpoint startup message detected, call http endpoint with RPC method @@ -346,7 +346,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('address=http://')) { child.kill(15) @@ -369,7 +369,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('JSON-RPC: Supported Methods')) { assert.ok(message, 'logged out supported RPC methods') @@ -397,7 +397,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('DEBUG')) { assert.ok(message, 'debug logging is enabled') @@ -413,7 +413,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('account cache')) { assert.ok(message.includes('2000'), 'account cache option works') @@ -428,7 +428,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('storage cache')) { assert.ok(message.includes('2000'), 'storage cache option works') @@ -444,7 +444,7 @@ describe('[CLI]', () => { message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('code cache')) { assert.ok(message.includes('2000'), 'code cache option works') @@ -459,7 +459,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('trie cache')) { assert.ok(message.includes('2000'), 'trie cache option works') @@ -474,7 +474,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Reading bootnodes')) { assert.ok(message.includes('num=2'), 'passing bootnode.txt URL for bootnodes option works') @@ -490,12 +490,12 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Client started successfully')) { assert.ok( message.includes('Client started successfully'), - 'Clients started with experimental feature options' + 'Clients started with experimental feature options', ) child.kill(15) resolve(undefined) @@ -520,12 +520,12 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Client started successfully')) { assert.ok( message.includes('Client started successfully'), - 'Clients starts with client execution limits' + 'Clients starts with client execution limits', ) child.kill(15) resolve(undefined) @@ -547,7 +547,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Server listener up transport=rlpx')) { const [ip, port] = message @@ -584,12 +584,12 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Client started successfully')) { assert.ok( message.includes('Client started successfully'), - 'Clients starts with custom network options' + 'Clients starts with custom network options', ) await wait(600) const client = Client.http({ port: 8593 }) @@ -616,12 +616,12 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Serving light peer requests')) { assert.ok( message.includes('Serving light peer requests'), - 'client respects custom light-mode option' + 'client respects custom light-mode option', ) } if (message.includes('Starting FullEthereumService')) { @@ -630,7 +630,7 @@ describe('[CLI]', () => { if (message.includes('Client started successfully')) { assert.ok( message.includes('Client started successfully'), - 'Client starts with custom sync options' + 'Client starts with custom sync options', ) await wait(600) const client = Client.http({ port: 8548 }) @@ -735,12 +735,12 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Reading custom genesis state')) { assert.ok( message.includes('Reading custom genesis state'), - 'client respects custom genesis state file option' + 'client respects custom genesis state file option', ) } if (message.includes('Data directory')) { @@ -749,7 +749,7 @@ describe('[CLI]', () => { if (message.includes('Initializing Ethereumjs client')) { assert.ok( message.includes('network=customChain'), - 'Client respects custom chain parameters json file option' + 'Client respects custom chain parameters json file option', ) } if (message.includes('Client started successfully')) { @@ -770,7 +770,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Unknown argument: datadir')) { assert.ok(true, 'correctly errors on unknown arguments') @@ -785,7 +785,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Arguments chainId and gethGenesis are mutually exclusive')) { assert.ok(true, 'correctly errors on conflicting arguments') diff --git a/packages/client/test/execution/vmexecution.spec.ts b/packages/client/test/execution/vmexecution.spec.ts index 48622702a4..882dd3e1b6 100644 --- a/packages/client/test/execution/vmexecution.spec.ts +++ b/packages/client/test/execution/vmexecution.spec.ts @@ -210,7 +210,7 @@ describe('[VMExecution]', () => { assert.equal( bytesToHex(block.hash()), bytesToHex(newHead.hash()), - 'vmHead should be on the latest block' + 'vmHead should be on the latest block', ) // reset head and run again @@ -219,7 +219,7 @@ describe('[VMExecution]', () => { assert.equal( bytesToHex(oldHead.hash()), bytesToHex(newHead.hash()), - 'vmHead should be on the latest block' + 'vmHead should be on the latest block', ) await execution.run() @@ -227,7 +227,7 @@ describe('[VMExecution]', () => { assert.equal( bytesToHex(block.hash()), bytesToHex(newHead.hash()), - 'vmHead should be on the latest block' + 'vmHead should be on the latest block', ) closeRPC(server) diff --git a/packages/client/test/ext/jwt-simple.spec.ts b/packages/client/test/ext/jwt-simple.spec.ts index 3fa9197a2f..c0af86a255 100644 --- a/packages/client/test/ext/jwt-simple.spec.ts +++ b/packages/client/test/ext/jwt-simple.spec.ts @@ -94,7 +94,7 @@ describe('decode', function () { const obj2 = jwt.decode(token, key, false, 'HS512') expect(obj2).to.eql(obj) expect(jwt.decode.bind(null, token, key, false, 'HS256')).toThrowError( - /Signature verification failed/ + /Signature verification failed/, ) }) diff --git a/packages/client/test/integration/fullethereumservice.spec.ts b/packages/client/test/integration/fullethereumservice.spec.ts index 15d6427517..c1ff4edc12 100644 --- a/packages/client/test/integration/fullethereumservice.spec.ts +++ b/packages/client/test/integration/fullethereumservice.spec.ts @@ -88,7 +88,7 @@ describe( difficulty: 1, }, }, - { common: config.chainCommon } + { common: config.chainCommon }, ) peer.eth!.send('NewBlock', [block, BigInt(1)]) @@ -97,12 +97,12 @@ describe( const tx = create1559FeeMarketTxFromRLP(toBytes(txData)) await service.execution.vm.stateManager.putAccount( tx.getSenderAddress(), - new Account(BigInt(0), BigInt('40000000000100000')) + new Account(BigInt(0), BigInt('40000000000100000')), ) await service.txPool.add(tx) service.config.chainCommon.getHardforkBy = td.func() td.when(service.config.chainCommon.getHardforkBy(td.matchers.anything())).thenReturn( - Hardfork.London + Hardfork.London, ) const [_, txs] = await peer.eth!.getPooledTransactions({ hashes: [tx.hash()] }) it('should handle GetPooledTransactions', async () => { @@ -111,7 +111,7 @@ describe( peer.eth!.send('Transactions', [tx]) }, - { timeout: 30000 } + { timeout: 30000 }, ) describe('should handle LES requests', async () => { @@ -122,7 +122,7 @@ describe('should handle LES requests', async () => { assert.equal( bytesToHex(headers[1].hash()), '0xa321d27cd2743617c1c1b0d7ecb607dd14febcdfca8f01b79c3f0249505ea069', - 'handled GetBlockHeaders' + 'handled GetBlockHeaders', ) }) await destroy(server, service) diff --git a/packages/client/test/integration/lightsync.spec.backup.ts b/packages/client/test/integration/lightsync.spec.backup.ts index 4ed1149672..1c1f3cb862 100644 --- a/packages/client/test/integration/lightsync.spec.backup.ts +++ b/packages/client/test/integration/lightsync.spec.backup.ts @@ -29,7 +29,7 @@ describe( }) await localService.synchronizer!.start() }, - { timeout: 30000 } + { timeout: 30000 }, ) describe( @@ -56,7 +56,7 @@ describe( assert.ok('did not sync') }) }, - { timeout: 30000 } + { timeout: 30000 }, ) describe( @@ -90,5 +90,5 @@ describe( }) await localService.synchronizer!.start() }, - { timeout: 30000 } + { timeout: 30000 }, ) diff --git a/packages/client/test/integration/merge.spec.ts b/packages/client/test/integration/merge.spec.ts index f9702dcaec..79215397bc 100644 --- a/packages/client/test/integration/merge.spec.ts +++ b/packages/client/test/integration/merge.spec.ts @@ -43,7 +43,7 @@ const commonPoA = createCustomCommon( }, ], }, - { baseChain: ChainCommon.Goerli, hardfork: Hardfork.London } + { baseChain: ChainCommon.Goerli, hardfork: Hardfork.London }, ) const commonPoW = createCustomCommon( { @@ -64,7 +64,7 @@ const commonPoW = createCustomCommon( }, ], }, - { baseChain: ChainCommon.Mainnet, hardfork: Hardfork.London } + { baseChain: ChainCommon.Mainnet, hardfork: Hardfork.London }, ) const accounts: [Address, Uint8Array][] = [ [ @@ -129,7 +129,7 @@ describe('should mine and stop at the merge (PoA)', async () => { assert.equal( remoteService.chain.headers.td, targetTTD, - 'synced blocks to the merge successfully' + 'synced blocks to the merge successfully', ) // Make sure the miner has stopped assert.notOk(service.miner!.running, 'miner should not be running') @@ -169,7 +169,7 @@ describe('should mine and stop at the merge (PoW)', async () => { assert.equal( remoteService.chain.headers.height, terminalHeight, - 'synced blocks to the merge successfully' + 'synced blocks to the merge successfully', ) // Make sure the miner has stopped assert.notOk(service.miner!.running, 'miner should not be running') diff --git a/packages/client/test/integration/miner.spec.ts b/packages/client/test/integration/miner.spec.ts index a92685eedf..bb75b08435 100644 --- a/packages/client/test/integration/miner.spec.ts +++ b/packages/client/test/integration/miner.spec.ts @@ -26,7 +26,7 @@ const hardforks = new Common({ chain: ChainCommon.Goerli }) .map((h) => h.name === Hardfork.London ? { ...h, block: 0, timestamp: undefined } - : { ...h, timestamp: undefined } + : { ...h, timestamp: undefined }, ) const common = createCustomCommon( { @@ -40,7 +40,7 @@ const common = createCustomCommon( }, }, }, - { baseChain: ChainCommon.Goerli, hardfork: Hardfork.London } + { baseChain: ChainCommon.Goerli, hardfork: Hardfork.London }, ) const accounts: [Address, Uint8Array][] = [ [ @@ -103,7 +103,7 @@ describe( assert.equal( remoteService.chain.blocks.height, targetHeight, - 'synced blocks successfully' + 'synced blocks successfully', ) }) await destroy(server, service) @@ -115,5 +115,5 @@ describe( }) }) }, - { timeout: 25000 } + { timeout: 25000 }, ) diff --git a/packages/client/test/integration/mocks/mockchain.ts b/packages/client/test/integration/mocks/mockchain.ts index d0c888d4d1..7a1c12bea8 100644 --- a/packages/client/test/integration/mocks/mockchain.ts +++ b/packages/client/test/integration/mocks/mockchain.ts @@ -38,7 +38,7 @@ export class MockChain extends Chain { parentHash: number ? blocks[number - 1].hash() : this.genesis.hash(), }, }, - { common } + { common }, ) blocks.push(block) } diff --git a/packages/client/test/integration/mocks/mockpeer.ts b/packages/client/test/integration/mocks/mockpeer.ts index 75d3461fd5..fbaf0f5d04 100644 --- a/packages/client/test/integration/mocks/mockpeer.ts +++ b/packages/client/test/integration/mocks/mockpeer.ts @@ -79,7 +79,7 @@ export class MockPeer extends Peer { if (!(stream.protocols as string[]).includes(`${p.name}/${p.versions[0]}`)) return await p.open() await this.addProtocol(new MockSender(p.name, pushableFn, receiver), p) - }) + }), ) this.connected = true } diff --git a/packages/client/test/integration/mocks/network.ts b/packages/client/test/integration/mocks/network.ts index db67d2477d..1bc30a5583 100644 --- a/packages/client/test/integration/mocks/network.ts +++ b/packages/client/test/integration/mocks/network.ts @@ -64,7 +64,7 @@ export function createStream(id: string, location: string, protocols: string[]) servers[location].streams[id] = stream setTimeout( () => servers[location].server.emit('connection', { id, stream: stream.local(id) }), - 10 + 10, ) return stream.remote(location) } diff --git a/packages/client/test/integration/peerpool.spec.ts b/packages/client/test/integration/peerpool.spec.ts index 583e9f425b..a875386595 100644 --- a/packages/client/test/integration/peerpool.spec.ts +++ b/packages/client/test/integration/peerpool.spec.ts @@ -81,7 +81,7 @@ describe('should handle peer messages', async () => { config.events.on(Event.POOL_PEER_ADDED, (peer: any) => it('should add peer', () => { assert.equal(peer.id, 'peer0', 'added peer') - }) + }), ) config.events.on(Event.PROTOCOL_MESSAGE, (msg: any, proto: any, peer: any) => { it('should get message', () => { diff --git a/packages/client/test/integration/util.ts b/packages/client/test/integration/util.ts index 7d467505b4..34abf6da7c 100644 --- a/packages/client/test/integration/util.ts +++ b/packages/client/test/integration/util.ts @@ -22,7 +22,7 @@ interface SetupOptions { } export async function setup( - options: SetupOptions = {} + options: SetupOptions = {}, ): Promise<[MockServer, FullEthereumService | LightEthereumService]> { const { location, height, interval, syncmode } = options const minPeers = options.minPeers ?? 1 @@ -86,7 +86,7 @@ export async function setup( export async function destroy( server: MockServer, - service: FullEthereumService | LightEthereumService + service: FullEthereumService | LightEthereumService, ): Promise { service.config.events.emit(Event.CLIENT_SHUTDOWN) await server.stop() diff --git a/packages/client/test/logging.spec.ts b/packages/client/test/logging.spec.ts index 5f48ea5b5e..b5542e9e56 100644 --- a/packages/client/test/logging.spec.ts +++ b/packages/client/test/logging.spec.ts @@ -9,11 +9,11 @@ describe('[Logging]', () => { it('should have correct transports', () => { assert.ok( logger.transports.find((t: any) => t.name === 'console') !== undefined, - 'should have stdout transport' + 'should have stdout transport', ) assert.ok( logger.transports.find((t: any) => t.name === 'file') !== undefined, - 'should have file transport' + 'should have file transport', ) }) @@ -24,11 +24,11 @@ describe('[Logging]', () => { e.level = 'error' assert.ok( /an error\n {4}at/.test((format.transform(e) as any).message), - 'log message should contain stack trace (1)' + 'log message should contain stack trace (1)', ) assert.ok( /an error\n {4}at/.test((format.transform({ level: 'error', message: e }) as any).message), - 'log message should contain stack trace (2)' + 'log message should contain stack trace (2)', ) } }) @@ -45,7 +45,7 @@ describe('[Logging]', () => { assert.equal( message, 'test \x1B[38;2;0;128;0mkey\x1B[39m=value ', - 'key=value pairs should be colorized' + 'key=value pairs should be colorized', ) }) }) diff --git a/packages/client/test/miner/miner.spec.ts b/packages/client/test/miner/miner.spec.ts index e3868c7cb7..0fa1c2e97b 100644 --- a/packages/client/test/miner/miner.spec.ts +++ b/packages/client/test/miner/miner.spec.ts @@ -168,7 +168,7 @@ const createTx = ( value = 1, gasPrice = 1000000000, gasLimit = 100000, - common = customCommon + common = customCommon, ) => { const txData = { nonce, @@ -191,7 +191,7 @@ const txA011 = createTx( 1, 1000000000, 100000, - goerliCommon + goerliCommon, ) // A -> B, nonce: 0, value: 1, normal gasPrice const txA02 = createTx(A, B, 1, 1, 2000000000) // A -> B, nonce: 1, value: 1, 2x gasPrice @@ -287,7 +287,7 @@ describe('assembleBlocks() -> with a hardfork mismatching tx', async () => { assert.equal( blocks[0].transactions.length, 0, - 'new block should not include tx due to hardfork mismatch' + 'new block should not include tx due to hardfork mismatch', ) assert.equal(txPool.txsInPool, 1, 'transaction should remain in pool') }) @@ -455,7 +455,7 @@ describe('assembleBlocks() -> should not include tx under the baseFee', async () // the default block baseFee will be 7 // add tx with maxFeePerGas of 6 const tx = create1559FeeMarketTx({ to: B.address, maxFeePerGas: 6 }, { common }).sign( - A.privateKey + A.privateKey, ) try { await txPool.add(tx, true) @@ -480,7 +480,7 @@ describe("assembleBlocks() -> should stop assembling a block after it's full", a const gasLimit = 100000 const block = createBlockFromBlockData( { header: { gasLimit } }, - { common: customCommon, setHardfork: true } + { common: customCommon, setHardfork: true }, ) Object.defineProperty(chain, 'headers', { get() { @@ -509,11 +509,11 @@ describe("assembleBlocks() -> should stop assembling a block after it's full", a const data = '0xfe' // INVALID opcode, consumes all gas const tx1FillsBlockGasLimit = createLegacyTx( { gasLimit: gasLimit - 1, data, gasPrice: BigInt('1000000000') }, - { common: customCommon } + { common: customCommon }, ).sign(A.privateKey) const tx2ExceedsBlockGasLimit = createLegacyTx( { gasLimit: 21000, to: B.address, nonce: 1, gasPrice: BigInt('1000000000') }, - { common: customCommon } + { common: customCommon }, ).sign(A.privateKey) await txPool.add(tx1FillsBlockGasLimit) await txPool.add(tx2ExceedsBlockGasLimit) diff --git a/packages/client/test/miner/pendingBlock.spec.ts b/packages/client/test/miner/pendingBlock.spec.ts index fd4f87ca1b..331c899804 100644 --- a/packages/client/test/miner/pendingBlock.spec.ts +++ b/packages/client/test/miner/pendingBlock.spec.ts @@ -107,7 +107,7 @@ describe('[PendingBlock]', async () => { nonce = 0, value = 1, gasPrice = 1000000000, - gasLimit = 100000 + gasLimit = 100000, ) => { const txData = { nonce, @@ -150,7 +150,7 @@ describe('[PendingBlock]', async () => { assert.equal( pendingBlock.pendingPayloads.size, 0, - 'should reset the pending payload after build' + 'should reset the pending payload after build', ) }) @@ -171,10 +171,10 @@ describe('[PendingBlock]', async () => { const payload = pendingBlock.pendingPayloads.get(bytesToHex(payloadId)) assert.equal( (payload as any).transactions.filter( - (tx: TypedTransaction) => bytesToHex(tx.hash()) === bytesToHex(txA011.hash()) + (tx: TypedTransaction) => bytesToHex(tx.hash()) === bytesToHex(txA011.hash()), ).length, 1, - 'txA011 should be in block' + 'txA011 should be in block', ) txB011.common.setHardfork(Hardfork.Paris) @@ -187,16 +187,16 @@ describe('[PendingBlock]', async () => { assert.equal(block?.transactions.length, 2, 'should include txs from pool') assert.equal( (payload as any).transactions.filter( - (tx: TypedTransaction) => bytesToHex(tx.hash()) === bytesToHex(txB011.hash()) + (tx: TypedTransaction) => bytesToHex(tx.hash()) === bytesToHex(txB011.hash()), ).length, 1, - 'txB011 should be in block' + 'txB011 should be in block', ) pendingBlock.pruneSetToMax(0) assert.equal( pendingBlock.pendingPayloads.size, 0, - 'should reset the pending payload after build' + 'should reset the pending payload after build', ) }) @@ -213,7 +213,7 @@ describe('[PendingBlock]', async () => { assert.equal( pendingBlock.pendingPayloads.size, 0, - 'should reset the pending payload after stopping' + 'should reset the pending payload after stopping', ) }) @@ -242,7 +242,7 @@ describe('[PendingBlock]', async () => { gasPrice: 1000000000, nonce: 2, }, - { common } + { common }, ).sign(A.privateKey) await txPool.add(txA03) const pendingBlock = new PendingBlock({ config, txPool, skipHardForkValidation: true }) @@ -259,14 +259,14 @@ describe('[PendingBlock]', async () => { assert.equal( block?.transactions.length, 2, - 'should include txs from pool that fit in the block' + 'should include txs from pool that fit in the block', ) assert.equal(receipts.length, 2, 'receipts should match number of transactions') pendingBlock.pruneSetToMax(0) assert.equal( pendingBlock.pendingPayloads.size, 0, - 'should reset the pending payload after build' + 'should reset the pending payload after build', ) // reset gas Limit @@ -286,7 +286,7 @@ describe('[PendingBlock]', async () => { gasPrice: 1000000000, nonce: 2, }, - { common } + { common }, ).sign(A.privateKey) await txPool.add(txA03) const pendingBlock = new PendingBlock({ config, txPool, skipHardForkValidation: true }) @@ -301,14 +301,14 @@ describe('[PendingBlock]', async () => { assert.equal( block?.transactions.length, 2, - 'should include txs from pool that fit in the block' + 'should include txs from pool that fit in the block', ) assert.equal(receipts.length, 2, 'receipts should match number of transactions') pendingBlock.pruneSetToMax(0) assert.equal( pendingBlock.pendingPayloads.size, 0, - 'should reset the pending payload after build' + 'should reset the pending payload after build', ) }) @@ -327,14 +327,14 @@ describe('[PendingBlock]', async () => { assert.equal( block.transactions.length, 0, - 'should not include tx with sender that has insufficient funds' + 'should not include tx with sender that has insufficient funds', ) assert.equal(receipts.length, 0, 'receipts should match number of transactions') pendingBlock.pruneSetToMax(0) assert.equal( pendingBlock.pendingPayloads.size, 0, - 'should reset the pending payload after build' + 'should reset the pending payload after build', ) }) @@ -350,7 +350,7 @@ describe('[PendingBlock]', async () => { } catch (err: any) { assert.equal( err.message, - 'cannot get iterator head: blockchain has no getTotalDifficulty function' + 'cannot get iterator head: blockchain has no getTotalDifficulty function', ) } }) @@ -391,7 +391,7 @@ describe('[PendingBlock]', async () => { to: randomBytes(20), nonce: BigInt(x), }, - { common } + { common }, ).sign(A.privateKey) await txPool.add(txA01) } @@ -405,7 +405,7 @@ describe('[PendingBlock]', async () => { to: randomBytes(20), nonce: BigInt(3), }, - { common } + { common }, ).sign(A.privateKey) await txPool.add(txNorm) @@ -464,7 +464,7 @@ describe('[PendingBlock]', async () => { to: randomBytes(20), nonce: BigInt(0), }, - { common } + { common }, ).sign(A.privateKey) await txPool.add(missingBlobTx) diff --git a/packages/client/test/net/peer/peer.spec.ts b/packages/client/test/net/peer/peer.spec.ts index 54ff58a5b6..c703effd5b 100644 --- a/packages/client/test/net/peer/peer.spec.ts +++ b/packages/client/test/net/peer/peer.spec.ts @@ -28,13 +28,13 @@ describe('[Peer]', () => { assert.equal( peer.toString(true), 'id=0123456789abcdef address=address0 transport=transport0 inbound=true', - 'correct full id string' + 'correct full id string', ) peer.inbound = false assert.equal( peer.toString(), 'id=01234567 address=address0 transport=transport0 inbound=false', - 'correct short id string' + 'correct short id string', ) }) }) diff --git a/packages/client/test/net/peer/rlpxpeer.spec.ts b/packages/client/test/net/peer/rlpxpeer.spec.ts index e664039871..65525ffe18 100644 --- a/packages/client/test/net/peer/rlpxpeer.spec.ts +++ b/packages/client/test/net/peer/rlpxpeer.spec.ts @@ -52,7 +52,7 @@ describe('[RlpxPeer]', async () => { { name: 'les', version: 4, length: 23 }, { name: 'snap', version: 1, length: 8 }, ], - 'correct capabilities' + 'correct capabilities', ) }) @@ -95,10 +95,10 @@ describe('[RlpxPeer]', async () => { }) peer.config.events.on(Event.PEER_CONNECTED, (peer) => - assert.equal(peer.id, 'abcdef0123', 'got connected') + assert.equal(peer.id, 'abcdef0123', 'got connected'), ) peer.config.events.on(Event.PEER_DISCONNECTED, (rlpxPeer) => - assert.equal(rlpxPeer.pooled, false, 'got disconnected') + assert.equal(rlpxPeer.pooled, false, 'got disconnected'), ) peer.rlpx!.events.emit('peer:error', rlpxPeer, new Error('err0')) peer.rlpx!.events.emit('peer:added', rlpxPeer) diff --git a/packages/client/test/net/peerpool.spec.ts b/packages/client/test/net/peerpool.spec.ts index 70339910ec..a066c3531d 100644 --- a/packages/client/test/net/peerpool.spec.ts +++ b/packages/client/test/net/peerpool.spec.ts @@ -82,7 +82,7 @@ describe('should get idle peers', () => { assert.equal( pool.idle((p: any) => p.id > 1), peers[1], - 'correct idle peer with filter' + 'correct idle peer with filter', ) }) }) diff --git a/packages/client/test/net/protocol/ethprotocol.spec.ts b/packages/client/test/net/protocol/ethprotocol.spec.ts index 01dd00fe1e..fb6ef7ce84 100644 --- a/packages/client/test/net/protocol/ethprotocol.spec.ts +++ b/packages/client/test/net/protocol/ethprotocol.spec.ts @@ -59,7 +59,7 @@ describe('[EthProtocol]', () => { genesisHash: '0xbb', latestBlock: hexToBytes('0x0a'), }, - 'encode status' + 'encode status', ) const status = p.decodeStatus({ chainId: [0x01], @@ -72,7 +72,7 @@ describe('[EthProtocol]', () => { status.td === BigInt(100) && status.bestHash === '0xaa' && status.genesisHash === '0xbb', - 'decode status' + 'decode status', ) }) @@ -129,7 +129,7 @@ describe('[EthProtocol]', () => { gasLimit: 100, value: 6, }, - { common: config.chainCommon } + { common: config.chainCommon }, ) const res = p.encode(p.messages.filter((message) => message.name === 'PooledTransactions')[0], { reqId: BigInt(1), @@ -184,10 +184,10 @@ describe('[EthProtocol]', () => { assert.equal(bytesToBigInt(res[0]), BigInt(1), 'correctly encoded reqId') const expectedSerializedReceipts = [ hexToBytes( - '0x02f9016d0164b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f866f864940000000000000000000000000000000000000000f842a00000000000000000000000000000000000000000000000000000000000000000a001010101010101010101010101010101010101010101010101010101010101018a00000000000000000000' + '0x02f9016d0164b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f866f864940000000000000000000000000000000000000000f842a00000000000000000000000000000000000000000000000000000000000000000a001010101010101010101010101010101010101010101010101010101010101018a00000000000000000000', ), hexToBytes( - '0xf9016f808203e8b9010001010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101f866f864940101010101010101010101010101010101010101f842a00101010101010101010101010101010101010101010101010101010101010101a001010101010101010101010101010101010101010101010101010101010101018a00000000000000000000' + '0xf9016f808203e8b9010001010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101f866f864940101010101010101010101010101010101010101f842a00101010101010101010101010101010101010101010101010101010101010101a001010101010101010101010101010101010101010101010101010101010101018a00000000000000000000', ), ] assert.deepEqual(res[1], expectedSerializedReceipts, 'correctly encoded receipts') @@ -225,7 +225,7 @@ describe('[EthProtocol]', () => { const eip1559Tx = createTxFromTxData({ type: 2 }, { common: config.chainCommon }) const blobTx = createTxFromTxData( { type: 3, to: Address.zero(), blobVersionedHashes: [hexToBytes(`0x01${'00'.repeat(31)}`)] }, - { common: config.chainCommon } + { common: config.chainCommon }, ) const res = p.encode(p.messages.filter((message) => message.name === 'Transactions')[0], [ legacyTx, @@ -239,7 +239,7 @@ describe('[EthProtocol]', () => { const decoded = p.decode( p.messages.filter((message) => message.name === 'Transactions')[0], - res + res, ) assert.deepEqual(decoded[0].type, legacyTx.type, 'decoded legacy tx correctly') assert.deepEqual(decoded[1].type, eip2929Tx.type, 'decoded eip2929 tx correctly') @@ -259,23 +259,23 @@ describe('[EthProtocol]', () => { const fakeHash = fakeTx.hash() const encoded = p.encode( p.messages.filter((message) => message.name === 'NewPooledTransactionHashes')[0], - [fakeHash] + [fakeHash], ) const encodedEth68 = p.encode( p.messages.filter((message) => message.name === 'NewPooledTransactionHashes')[0], - [[fakeTx.type], [fakeTx.serialize().byteLength], [fakeHash]] + [[fakeTx.type], [fakeTx.serialize().byteLength], [fakeHash]], ) assert.deepEqual(encoded[0], fakeHash, 'encoded hash correctly with pre-eth/68 format') assert.deepEqual(encodedEth68[2][0], fakeHash, 'encoded hash correctly with eth/68 format') const decoded = p.decode( p.messages.filter((message) => message.name === 'NewPooledTransactionHashes')[0], - encoded + encoded, ) const decodedEth68 = p.decode( p.messages.filter((message) => message.name === 'NewPooledTransactionHashes')[0], - encodedEth68 + encodedEth68, ) assert.deepEqual(decoded[0], fakeHash, 'decoded hash correctly with pre-eth/68 format') assert.deepEqual(decodedEth68[2][0], fakeHash, 'decoded hash correctly with eth/68 format') diff --git a/packages/client/test/net/protocol/lesprotocol.spec.ts b/packages/client/test/net/protocol/lesprotocol.spec.ts index e147f4b5b7..e142c9cdf3 100644 --- a/packages/client/test/net/protocol/lesprotocol.spec.ts +++ b/packages/client/test/net/protocol/lesprotocol.spec.ts @@ -82,7 +82,7 @@ describe('[LesProtocol]', () => { bytesToHex(status['flowControl/MRC'][0][0]) === '0x02' && bytesToHex(status['flowControl/MRC'][0][1]) === '0x0a' && bytesToHex(status['flowControl/MRC'][0][2]) === '0x0a', - 'encode status' + 'encode status', ) status = { ...status, chainId: [0x01] } status = p.decodeStatus(status) @@ -105,7 +105,7 @@ describe('[LesProtocol]', () => { status.mrc['2'].req === 10 && status.mrc.GetBlockHeaders.base === 10 && status.mrc.GetBlockHeaders.req === 10, - 'decode status' + 'decode status', ) }) }) diff --git a/packages/client/test/net/protocol/snapprotocol.spec.ts b/packages/client/test/net/protocol/snapprotocol.spec.ts index 3f6a7d6bf2..43b30fda1f 100644 --- a/packages/client/test/net/protocol/snapprotocol.spec.ts +++ b/packages/client/test/net/protocol/snapprotocol.spec.ts @@ -57,34 +57,34 @@ describe('[SnapProtocol]', () => { origin, limit, bytes, - } + }, ) assert.ok( JSON.stringify(payload[0]) === JSON.stringify(bigIntToBytes(BigInt(1))), - 'correctly encoded reqId' + 'correctly encoded reqId', ) assert.ok( JSON.stringify(payload[1]) === JSON.stringify(setLengthLeft(root, 32)), - 'correctly encoded root' + 'correctly encoded root', ) assert.ok(JSON.stringify(payload[2]) === JSON.stringify(origin), 'correctly encoded origin') assert.ok(JSON.stringify(payload[3]) === JSON.stringify(limit), 'correctly encoded limit') assert.ok( JSON.stringify(payload[4]) === JSON.stringify(bigIntToBytes(bytes)), - 'correctly encoded bytes' + 'correctly encoded bytes', ) assert.ok(payload) const res = p.decode( p.messages.filter((message) => message.name === 'GetAccountRange')[0], - payload + payload, ) assert.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') assert.ok( JSON.stringify(res.root) === JSON.stringify(setLengthLeft(root, 32)), - 'correctly decoded root' + 'correctly decoded root', ) assert.ok(JSON.stringify(res.origin) === JSON.stringify(origin), 'correctly decoded origin') assert.ok(JSON.stringify(res.limit) === JSON.stringify(limit), 'correctly decoded limit') @@ -100,7 +100,7 @@ describe('[SnapProtocol]', () => { const data = RLP.decode(hexToBytes(contractAccountRangeRLP)) as unknown const { reqId, accounts, proof } = p.decode( p.messages.filter((message) => message.name === 'AccountRange')[0], - data + data, ) assert.ok(reqId === BigInt(1), 'reqId should be 1') assert.ok(accounts.length === 2, 'accounts should be 2') @@ -114,23 +114,23 @@ describe('[SnapProtocol]', () => { assert.ok( bytesToHex(secondAccount[2]) === '0x3dc6d3cfdc6210b8591ea852961d880821298c7891dea399e02d87550af9d40e', - 'storageHash of the second account' + 'storageHash of the second account', ) assert.ok( bytesToHex(secondAccount[3]) === '0xe68fe0bb7c4a483affd0f19cc2b989105242bd6b256c6de3afd738f8acd80c66', - 'codeHash of the second account' + 'codeHash of the second account', ) const payload = RLP.encode( p.encode(p.messages.filter((message) => message.name === 'AccountRange')[0], { reqId, accounts, proof, - }) + }), ) assert.ok( contractAccountRangeRLP === bytesToHex(payload), - 'Re-encoded payload should match with original' + 'Re-encoded payload should match with original', ) }) @@ -144,7 +144,7 @@ describe('[SnapProtocol]', () => { const fullData = pFull.decode( pFull.messages.filter((message) => message.name === 'AccountRange')[0], - resData + resData, ) const { accounts: accountsFull } = fullData assert.ok(accountsFull.length === 3, '3 accounts should be decoded in accountsFull') @@ -156,11 +156,11 @@ describe('[SnapProtocol]', () => { // we shpuld get slim format const slimPayload = pFull.encode( pFull.messages.filter((message) => message.name === 'AccountRange')[0], - fullData + fullData, ) const { accounts: accountsSlim } = pSlim.decode( pSlim.messages.filter((message) => message.name === 'AccountRange')[0], - slimPayload + slimPayload, ) // 3 accounts are there in accountRangeRLP @@ -179,13 +179,13 @@ describe('[SnapProtocol]', () => { const reqData = RLP.decode(hexToBytes(getAccountRangeRLP)) const { root: stateRoot } = p.decode( p.messages.filter((message) => message.name === 'GetAccountRange')[0], - reqData + reqData, ) // accountRangeRLP is the corresponding response to getAccountRangeRLP const resData = RLP.decode(hexToBytes(accountRangeRLP)) const { accounts, proof } = p.decode( p.messages.filter((message) => message.name === 'AccountRange')[0], - resData + resData, ) try { @@ -199,7 +199,7 @@ describe('[SnapProtocol]', () => { } assert.ok( equalsBytes(keccak256(proof[0]), stateRoot), - 'Proof should link to the requested stateRoot' + 'Proof should link to the requested stateRoot', ) }) @@ -226,38 +226,38 @@ describe('[SnapProtocol]', () => { origin, limit, bytes, - } + }, ) assert.ok( JSON.stringify(payload[0]) === JSON.stringify(bigIntToBytes(BigInt(1))), - 'correctly encoded reqId' + 'correctly encoded reqId', ) assert.ok( JSON.stringify(payload[1]) === JSON.stringify(setLengthLeft(root, 32)), - 'correctly encoded root' + 'correctly encoded root', ) assert.ok(JSON.stringify(payload[2]) === JSON.stringify(accounts), 'correctly encoded accounts') assert.ok(JSON.stringify(payload[3]) === JSON.stringify(origin), 'correctly encoded origin') assert.ok(JSON.stringify(payload[4]) === JSON.stringify(limit), 'correctly encoded limit') assert.ok( JSON.stringify(payload[5]) === JSON.stringify(bigIntToBytes(bytes)), - 'correctly encoded bytes' + 'correctly encoded bytes', ) assert.ok(payload) const res = p.decode( p.messages.filter((message) => message.name === 'GetStorageRanges')[0], - payload + payload, ) assert.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') assert.ok( JSON.stringify(res.root) === JSON.stringify(setLengthLeft(root, 32)), - 'correctly decoded root' + 'correctly decoded root', ) assert.ok( JSON.stringify(res.accounts) === JSON.stringify(accounts), - 'correctly decoded accounts' + 'correctly decoded accounts', ) assert.ok(JSON.stringify(res.origin) === JSON.stringify(origin), 'correctly decoded origin') assert.ok(JSON.stringify(res.limit) === JSON.stringify(limit), 'correctly decoded limit') @@ -274,14 +274,14 @@ describe('[SnapProtocol]', () => { const data = RLP.decode(hexToBytes(storageRangesRLP)) as unknown const { reqId, slots, proof } = p.decode( p.messages.filter((message) => message.name === 'StorageRanges')[0], - data + data, ) assert.ok(reqId === BigInt(1), 'correctly decoded reqId') assert.ok(slots.length === 1 && slots[0].length === 3, 'correctly decoded slots') const { hash, body } = slots[0][2] assert.ok( bytesToHex(hash) === '0x60264186ee63f748d340388f07b244d96d007fff5cbc397bbd69f8747c421f79', - 'Slot 3 key' + 'Slot 3 key', ) assert.ok(bytesToHex(body) === '0x8462b66ae7', 'Slot 3 value') @@ -290,11 +290,11 @@ describe('[SnapProtocol]', () => { reqId, slots, proof, - }) + }), ) assert.ok( storageRangesRLP === bytesToHex(payload), - 'Re-encoded payload should match with original' + 'Re-encoded payload should match with original', ) }) @@ -307,7 +307,7 @@ describe('[SnapProtocol]', () => { const accountsData = RLP.decode(hexToBytes(contractAccountRangeRLP)) const { accounts } = p.decode( p.messages.filter((message) => message.name === 'AccountRange')[0], - accountsData + accountsData, ) const lastAccount = accounts[accounts.length - 1] @@ -315,7 +315,7 @@ describe('[SnapProtocol]', () => { const data = RLP.decode(hexToBytes(storageRangesRLP)) const { proof, slots } = p.decode( p.messages.filter((message) => message.name === 'StorageRanges')[0], - data + data, ) // storageRangesRLP response is to the lastAccount's slots so slots[0] are the slots of // lastAccount @@ -333,14 +333,14 @@ describe('[SnapProtocol]', () => { proof, { useKeyHashingFunction: keccak256, - } + }, ) } catch (e) { assert.fail(`StorageRange proof verification failed with message=${(e as Error).message}`) } assert.ok( equalsBytes(keccak256(proof[0]), lastAccountStorageRoot), - 'Proof should link to the accounts storageRoot' + 'Proof should link to the accounts storageRoot', ) }) @@ -363,18 +363,18 @@ describe('[SnapProtocol]', () => { assert.ok( JSON.stringify(payload[0]) === JSON.stringify(bigIntToBytes(BigInt(1))), - 'correctly encoded reqId' + 'correctly encoded reqId', ) assert.ok(JSON.stringify(payload[1]) === JSON.stringify(hashes), 'correctly encoded hashes') assert.ok( JSON.stringify(payload[2]) === JSON.stringify(bigIntToBytes(bytes)), - 'correctly encoded bytes' + 'correctly encoded bytes', ) assert.ok(payload) const res = p.decode( p.messages.filter((message) => message.name === 'GetByteCodes')[0], - payload + payload, ) assert.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') @@ -391,7 +391,7 @@ describe('[SnapProtocol]', () => { const codesRes = RLP.decode(hexToBytes(byteCodesRLP)) const { reqId, codes } = p.decode( p.messages.filter((message) => message.name === 'ByteCodes')[0], - codesRes + codesRes, ) assert.ok(reqId === BigInt(1), 'reqId should be 1') @@ -401,7 +401,7 @@ describe('[SnapProtocol]', () => { p.encode(p.messages.filter((message) => message.name === 'ByteCodes')[0], { reqId, codes, - }) + }), ) assert.ok(byteCodesRLP === bytesToHex(payload), 'Re-encoded payload should match with original') }) @@ -415,13 +415,13 @@ describe('[SnapProtocol]', () => { const codesReq = RLP.decode(hexToBytes(getByteCodesRLP)) const { hashes } = p.decode( p.messages.filter((message) => message.name === 'GetByteCodes')[0], - codesReq + codesReq, ) const codeHash = hashes[0] const codesRes = RLP.decode(hexToBytes(byteCodesRLP)) const { codes } = p.decode( p.messages.filter((message) => message.name === 'ByteCodes')[0], - codesRes + codesRes, ) const code = codes[0] assert.ok(equalsBytes(keccak256(code), codeHash), 'Code should match the requested codeHash') @@ -446,19 +446,19 @@ describe('[SnapProtocol]', () => { assert.ok( JSON.stringify(payload[0]) === JSON.stringify(bigIntToBytes(reqId)), - 'correctly encoded reqId' + 'correctly encoded reqId', ) assert.ok(JSON.stringify(payload[1]) === JSON.stringify(root), 'correctly encoded root') assert.ok(JSON.stringify(payload[2]) === JSON.stringify(paths), 'correctly encoded paths') assert.ok( JSON.stringify(payload[3]) === JSON.stringify(bigIntToBytes(bytes)), - 'correctly encoded bytes' + 'correctly encoded bytes', ) assert.ok(payload) const res = p.decode( p.messages.filter((message) => message.name === 'GetTrieNodes')[0], - payload + payload, ) assert.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') @@ -476,7 +476,7 @@ describe('[SnapProtocol]', () => { const nodesRes = RLP.decode(hexToBytes(trieNodesRLP)) as unknown const { reqId, nodes } = p.decode( p.messages.filter((message) => message.name === 'TrieNodes')[0], - nodesRes + nodesRes, ) assert.ok(reqId === BigInt(1), 'reqId should be 1') @@ -494,7 +494,7 @@ describe('[SnapProtocol]', () => { p.encode(p.messages.filter((message) => message.name === 'TrieNodes')[0], { reqId, nodes, - }) + }), ) assert.ok(trieNodesRLP === bytesToHex(payload), 'Re-encoded payload should match with original') }) diff --git a/packages/client/test/net/server/rlpxserver.spec.ts b/packages/client/test/net/server/rlpxserver.spec.ts index f00b0cd07a..4db0ab7e59 100644 --- a/packages/client/test/net/server/rlpxserver.spec.ts +++ b/packages/client/test/net/server/rlpxserver.spec.ts @@ -46,7 +46,7 @@ vi.doMock('@ethereumjs/devp2p', () => { } }) -const { RlpxServer } = await import('../../../src/net/server/rlpxserver') +const { RlpxServer } = await import('../../../src/net/server/rlpxserver.js') describe('[RlpxServer]', async () => { it('should initialize correctly', async () => { const config = new Config({ accountCache: 10000, storageCache: 1000 }) @@ -60,7 +60,7 @@ describe('[RlpxServer]', async () => { assert.deepEqual( server.bootnodes, [multiaddr('/ip4/10.0.0.1/tcp/1234'), multiaddr('/ip4/10.0.0.2/tcp/1234')], - 'bootnodes split' + 'bootnodes split', ) }) @@ -89,7 +89,7 @@ describe('[RlpxServer]', async () => { } server.rlpx = { destroy: vi.fn() } server.config.events.on(Event.PEER_ERROR, (err: any) => - assert.equal(err.message, 'err0', 'got error') + assert.equal(err.message, 'err0', 'got error'), ) await server.start() expect((server as any).initDpt).toHaveBeenCalled() @@ -172,7 +172,7 @@ describe('should return rlpx server info with ip4 as default', async () => { listenAddr: '0.0.0.0:30303', ports: { discovery: 30303, listener: 30303 }, }, - 'get nodeInfo' + 'get nodeInfo', ) }) await server.stop() @@ -227,7 +227,7 @@ describe('should return rlpx server info with ip6', async () => { listenAddr: '[::]:30303', ports: { discovery: 30303, listener: 30303 }, }, - 'get nodeInfo' + 'get nodeInfo', ) }) await server.stop() @@ -278,7 +278,7 @@ describe('should init dpt', async () => { config.events.on(Event.SERVER_ERROR, (err) => it('should throw', async () => { assert.equal(err.message, 'err0', 'got error') - }) + }), ) server['dpt']?.events.emit('error', new Error('err0')) }) @@ -323,22 +323,22 @@ describe('should init rlpx', async () => { config.events.on(Event.PEER_CONNECTED, (peer) => it('should connect', async () => { assert.ok(peer instanceof RlpxPeer, 'connected') - }) + }), ) config.events.on(Event.PEER_DISCONNECTED, (peer) => it('should disconnect', async () => { assert.equal(peer.id, '01', 'disconnected') - }) + }), ) config.events.on(Event.SERVER_ERROR, (err) => it('should throw error', async () => { assert.equal(err.message, 'err0', 'got error') - }) + }), ) config.events.on(Event.SERVER_LISTENING, (info) => it('should listen', async () => { assert.deepEqual(info, { transport: 'rlpx', url: 'enode://ff@0.0.0.0:30303' }, 'listening') - }) + }), ) server.rlpx!.events.emit('peer:added', rlpxPeer) ;(server as any).peers.set('01', { id: '01' } as any) diff --git a/packages/client/test/rpc/debug/getRawBlock.spec.ts b/packages/client/test/rpc/debug/getRawBlock.spec.ts index 86102288a2..4e560c185b 100644 --- a/packages/client/test/rpc/debug/getRawBlock.spec.ts +++ b/packages/client/test/rpc/debug/getRawBlock.spec.ts @@ -17,7 +17,7 @@ const mockedTx1 = createLegacyTx({}).sign(dummy.privKey) const mockedTx2 = createLegacyTx({ nonce: 1 }).sign(dummy.privKey) const mockedBlobTx3 = create4844BlobTx( { nonce: 2, blobsData: ['0x1234'], to: Address.zero() }, - { common } + { common }, ).sign(dummy.privKey) const blockHash = hexToBytes('0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5') const transactions = [mockedTx1] @@ -41,7 +41,7 @@ const block = { } const genesisBlockHash = hexToBytes( - '0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5' + '0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5', ) const genesisBlock = { hash: () => genesisBlockHash, @@ -83,7 +83,7 @@ describe(method, async () => { assert.equal( res.result, bytesToHex(genesisBlock.serialize()), - 'should return the genesis block as earliest' + 'should return the genesis block as earliest', ) }) @@ -118,8 +118,8 @@ describe(method, async () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"' - ) + 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"', + ), ) }) }) @@ -131,7 +131,7 @@ describe('call with block with blob txs', () => { header: { number: 1, parentHash: genesisBlock.header.hash() }, transactions: [mockedBlobTx3], }, - { common } + { common }, ) const manager = createManager(await createClient({ chain: createChain(block1 as any) })) const rpc = getRpcClient(startRPC(manager.getMethods())) @@ -140,7 +140,7 @@ describe('call with block with blob txs', () => { assert.equal( res.result, bytesToHex(block1.serialize()), - 'block body contains a transaction with the blobVersionedHashes field' + 'block body contains a transaction with the blobVersionedHashes field', ) }) }) diff --git a/packages/client/test/rpc/debug/getRawHeader.spec.ts b/packages/client/test/rpc/debug/getRawHeader.spec.ts index 27a2a7e67e..bd86b30f3c 100644 --- a/packages/client/test/rpc/debug/getRawHeader.spec.ts +++ b/packages/client/test/rpc/debug/getRawHeader.spec.ts @@ -17,7 +17,7 @@ const mockedTx1 = createLegacyTx({}).sign(dummy.privKey) const mockedTx2 = createLegacyTx({ nonce: 1 }).sign(dummy.privKey) const mockedBlobTx3 = create4844BlobTx( { nonce: 2, blobsData: ['0x1234'], to: Address.zero() }, - { common } + { common }, ).sign(dummy.privKey) const blockHash = hexToBytes('0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5') const transactions = [mockedTx1] @@ -41,7 +41,7 @@ const block = { } const genesisBlockHash = hexToBytes( - '0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5' + '0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5', ) const genesisBlock = { hash: () => genesisBlockHash, @@ -76,7 +76,7 @@ describe(method, async () => { assert.equal( res.result, bytesToHex(genesisBlock.header.serialize()), - 'should return a valid block' + 'should return a valid block', ) }) @@ -88,7 +88,7 @@ describe(method, async () => { assert.equal( res.result, bytesToHex(genesisBlock.header.serialize()), - 'should return the genesis block as earliest' + 'should return the genesis block as earliest', ) }) @@ -123,8 +123,8 @@ describe(method, async () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"' - ) + 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"', + ), ) }) }) @@ -136,7 +136,7 @@ describe('call with block with blob txs', () => { header: { number: 1, parentHash: genesisBlock.header.hash() }, transactions: [mockedBlobTx3], }, - { common } + { common }, ) const manager = createManager(await createClient({ chain: createChain(block1 as any) })) const rpc = getRpcClient(startRPC(manager.getMethods())) @@ -145,7 +145,7 @@ describe('call with block with blob txs', () => { assert.equal( res.result, bytesToHex(block1.header.serialize()), - 'block body contains a transaction with the blobVersionedHashes field' + 'block body contains a transaction with the blobVersionedHashes field', ) }) }) diff --git a/packages/client/test/rpc/debug/getRawReceipts.spec.ts b/packages/client/test/rpc/debug/getRawReceipts.spec.ts index 7d23ce7775..ef90f66552 100644 --- a/packages/client/test/rpc/debug/getRawReceipts.spec.ts +++ b/packages/client/test/rpc/debug/getRawReceipts.spec.ts @@ -38,7 +38,7 @@ describe(method, () => { gasPrice: 100, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) const block = await runBlockWithTxs(chain, execution, [tx]) const res0 = await rpc.request(method, [bytesToHex(tx.hash())]) @@ -56,7 +56,7 @@ describe(method, () => { it('call with 1559 tx', async () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), - 'powLondon' + 'powLondon', ) const rpc = getRpcClient(server) // construct tx @@ -67,7 +67,7 @@ describe(method, () => { maxPriorityFeePerGas: 10, to: '0x1230000000000000000000000000000000000321', }, - { common } + { common }, ).sign(dummy.privKey) const block = await runBlockWithTxs(chain, execution, [tx]) @@ -134,7 +134,7 @@ describe(method, () => { to: randomBytes(20), nonce: 0n, }, - { common } + { common }, ).sign(dummy.privKey) const block = await runBlockWithTxs(chain, execution, [tx], true) diff --git a/packages/client/test/rpc/debug/getRawTransaction.spec.ts b/packages/client/test/rpc/debug/getRawTransaction.spec.ts index 19b65e6aac..155ea6479d 100644 --- a/packages/client/test/rpc/debug/getRawTransaction.spec.ts +++ b/packages/client/test/rpc/debug/getRawTransaction.spec.ts @@ -20,7 +20,7 @@ describe(method, () => { // construct tx const tx = createLegacyTx( { gasLimit: 2000000, gasPrice: 100, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -38,7 +38,7 @@ describe(method, () => { it('call with 1559 tx', async () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), - 'powLondon' + 'powLondon', ) const rpc = getRpcClient(server) // construct tx @@ -49,7 +49,7 @@ describe(method, () => { maxPriorityFeePerGas: 10, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) diff --git a/packages/client/test/rpc/debug/storageRangeAt.spec.ts b/packages/client/test/rpc/debug/storageRangeAt.spec.ts index 8f55a24f20..17eb9e18b0 100644 --- a/packages/client/test/rpc/debug/storageRangeAt.spec.ts +++ b/packages/client/test/rpc/debug/storageRangeAt.spec.ts @@ -98,7 +98,7 @@ describe(method, () => { value: 0, data: storageBytecode, }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) const vmCopy = await execution.vm.shallowCopy() @@ -127,7 +127,7 @@ describe(method, () => { nonce: 1, data: updateBytecode, }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) await blockBuilder.addTransaction(secondTx, { skipHardForkValidation: true }) @@ -142,7 +142,7 @@ describe(method, () => { nonce: 2, data: noStorageBytecode, }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) const thirdResult = await blockBuilder.addTransaction(thirdTx, { skipHardForkValidation: true }) @@ -175,27 +175,27 @@ describe(method, () => { assert.equal( storageRange.storage[bytesToHex(firstVariableHash)].value, '0x43', - 'First variable correctly included.' + 'First variable correctly included.', ) const secondVariableHash = keccak256(setLengthLeft(hexToBytes('0x01'), 32)) assert.equal( storageRange.storage[bytesToHex(secondVariableHash)].value, '0x01', - 'Second variable correctly included.' + 'Second variable correctly included.', ) const thirdVariableHash = keccak256(setLengthLeft(hexToBytes('0x02'), 32)) assert.equal( storageRange.storage[bytesToHex(thirdVariableHash)].value, '0x02', - 'Third variable correctly included.' + 'Third variable correctly included.', ) assert.equal( Object.keys(storageRange.storage).length, 3, - 'Call returned the correct number of key value pairs.' + 'Call returned the correct number of key value pairs.', ) }) @@ -219,7 +219,7 @@ describe(method, () => { assert.equal( storageRange.storage[bytesToHex(hashedKey)].value, '0x42', - 'Old value was correctly reported.' + 'Old value was correctly reported.', ) }) @@ -241,7 +241,7 @@ describe(method, () => { assert.equal( Object.keys(storageRange.storage).length, 2, - 'Call returned the correct number of key value pairs.' + 'Call returned the correct number of key value pairs.', ) }) @@ -263,7 +263,7 @@ describe(method, () => { assert.equal( Object.keys(storageRange.storage).length, 0, - 'Call returned the correct number of key value pairs.' + 'Call returned the correct number of key value pairs.', ) assert.isNull(storageRange.nextKey, 'nextKey was correctly set to null.') @@ -291,12 +291,12 @@ describe(method, () => { assert.equal( Object.keys(storageRange.storage).length, 2, - 'Call returned the correct number of key value pairs.' + 'Call returned the correct number of key value pairs.', ) assert.isUndefined( storageRange.storage[bytesToHex(smallestHashedKey)], - 'Smallest hashed key was correctly excluded from result.' + 'Smallest hashed key was correctly excluded from result.', ) }) @@ -399,8 +399,8 @@ describe(method, () => { assert.equal(res.error.code, INTERNAL_ERROR) assert.ok( res.error.message.includes( - 'txIndex cannot be larger than the number of transactions in the block.' - ) + 'txIndex cannot be larger than the number of transactions in the block.', + ), ) }) diff --git a/packages/client/test/rpc/debug/traceCall.spec.ts b/packages/client/test/rpc/debug/traceCall.spec.ts index 23348f588b..9476c8e943 100644 --- a/packages/client/test/rpc/debug/traceCall.spec.ts +++ b/packages/client/test/rpc/debug/traceCall.spec.ts @@ -60,7 +60,7 @@ describe('trace a call', async () => { value: 10000, data: '0x60AA', }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) tx.getSenderAddress = () => { return dummy.addr @@ -103,7 +103,7 @@ describe('trace a call', async () => { }, ], }, - 'produced a correct trace' + 'produced a correct trace', ) }) }) diff --git a/packages/client/test/rpc/debug/traceTransaction.spec.ts b/packages/client/test/rpc/debug/traceTransaction.spec.ts index 024a20a276..60f0719406 100644 --- a/packages/client/test/rpc/debug/traceTransaction.spec.ts +++ b/packages/client/test/rpc/debug/traceTransaction.spec.ts @@ -32,7 +32,7 @@ describe(method, () => { res = await rpc.request(method, ['0xabcd', { tracerConfig: { some: 'value' } }]) assert.equal(res.error.code, INVALID_PARAMS) assert.ok( - res.error.message.includes('custom tracers and tracer configurations are not implemented') + res.error.message.includes('custom tracers and tracer configurations are not implemented'), ) res = await rpc.request(method, ['0xabcd', { tracer: 'someTracer' }]) @@ -59,7 +59,7 @@ describe(method, () => { value: 10000, data: '0x60AA', }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) tx.getSenderAddress = () => { return dummy.addr @@ -88,7 +88,7 @@ describe(method, () => { value: 10000, data: '0x560FAA', }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) tx.getSenderAddress = () => { return dummy.addr @@ -117,7 +117,7 @@ describe(method, () => { value: 10000, data: '0x604260005260206000F3', }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) tx.getSenderAddress = () => { return dummy.addr @@ -131,7 +131,7 @@ describe(method, () => { assert.equal( res.result.structLogs[5].memory[0], '0x0000000000000000000000000000000000000000000000000000000000000042', - 'produced a trace with correct memory value returned' + 'produced a trace with correct memory value returned', ) }) @@ -150,7 +150,7 @@ describe(method, () => { value: 10000, data: '0x600F6000', }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) tx.getSenderAddress = () => { return dummy.addr diff --git a/packages/client/test/rpc/engine/CLConnectionManager.spec.ts b/packages/client/test/rpc/engine/CLConnectionManager.spec.ts index 49e0d67ff2..6051ed54de 100644 --- a/packages/client/test/rpc/engine/CLConnectionManager.spec.ts +++ b/packages/client/test/rpc/engine/CLConnectionManager.spec.ts @@ -149,7 +149,7 @@ describe('updates status correctly', async () => { assert.equal( manager['connectionStatus'], ConnectionStatus.Connected, - 'connection status updated correctly' + 'connection status updated correctly', ) }) }) @@ -165,7 +165,7 @@ describe('updates connection status correctly', async () => { assert.equal( manager['connectionStatus'], ConnectionStatus.Disconnected, - 'should disconnect from CL' + 'should disconnect from CL', ) }) it('should change status to uncertain', () => { @@ -175,7 +175,7 @@ describe('updates connection status correctly', async () => { assert.equal( manager['connectionStatus'], ConnectionStatus.Uncertain, - 'should update status to uncertain' + 'should update status to uncertain', ) }) diff --git a/packages/client/test/rpc/engine/exchangeCapabilities.spec.ts b/packages/client/test/rpc/engine/exchangeCapabilities.spec.ts index 358a1f0c60..e1a2db4656 100644 --- a/packages/client/test/rpc/engine/exchangeCapabilities.spec.ts +++ b/packages/client/test/rpc/engine/exchangeCapabilities.spec.ts @@ -14,7 +14,7 @@ describe(method, () => { assert.equal( res.result.findIndex((el: string) => el === 'engine_exchangeCapabilities'), -1, - 'should not include engine_exchangeCapabilities in response' + 'should not include engine_exchangeCapabilities in response', ) }) }) diff --git a/packages/client/test/rpc/engine/forkchoiceUpdatedV1.spec.ts b/packages/client/test/rpc/engine/forkchoiceUpdatedV1.spec.ts index 3b3a14f777..421e78fbe8 100644 --- a/packages/client/test/rpc/engine/forkchoiceUpdatedV1.spec.ts +++ b/packages/client/test/rpc/engine/forkchoiceUpdatedV1.spec.ts @@ -42,7 +42,7 @@ function createBlock(parentBlock: Block) { gasLimit: parentBlock.header.gasLimit, }, }, - { common } + { common }, ) return block } @@ -60,8 +60,8 @@ describe(method, () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - "invalid argument 0 for key 'headBlockHash': hex string without 0x prefix" - ) + "invalid argument 0 for key 'headBlockHash': hex string without 0x prefix", + ), ) }) @@ -76,8 +76,8 @@ describe(method, () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - "invalid argument 0 for key 'finalizedBlockHash': invalid block hash" - ) + "invalid argument 0 for key 'finalizedBlockHash': invalid block hash", + ), ) }) @@ -103,7 +103,7 @@ describe(method, () => { assert.equal(res.result.payloadStatus.status, 'VALID') assert.equal( res.result.payloadStatus.latestValidHash, - '0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4a' + '0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4a', ) assert.equal(res.result.payloadStatus.validationError, null) assert.notEqual(res.result.payloadId, null) @@ -118,7 +118,7 @@ describe(method, () => { const res = await rpc.request(method, invalidTimestampPayload) assert.equal(res.error.code, INVALID_PARAMS) assert.ok( - res.error.message.includes('invalid timestamp in payloadAttributes, got 0, need at least 1') + res.error.message.includes('invalid timestamp in payloadAttributes, got 0, need at least 1'), ) }) @@ -175,7 +175,7 @@ describe(method, () => { extraData: new Uint8Array(97), }, } as BlockData, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) await chain.putBlocks([newBlock]) @@ -317,7 +317,7 @@ describe(method, () => { } const canonicalPayload = canonical.map( - (e) => blockToExecutionPayload(e, BigInt(0)).executionPayload + (e) => blockToExecutionPayload(e, BigInt(0)).executionPayload, ) const reorgPayload = reorg.map((e) => blockToExecutionPayload(e, BigInt(0)).executionPayload) @@ -357,7 +357,7 @@ describe(method, () => { } const canonicalPayload = canonical.map( - (e) => blockToExecutionPayload(e, BigInt(0)).executionPayload + (e) => blockToExecutionPayload(e, BigInt(0)).executionPayload, ) const reorgPayload = reorg.map((e) => blockToExecutionPayload(e, BigInt(0)).executionPayload) diff --git a/packages/client/test/rpc/engine/getPayloadBodiesByHashV1.spec.ts b/packages/client/test/rpc/engine/getPayloadBodiesByHashV1.spec.ts index 182e2dca70..c43e167931 100644 --- a/packages/client/test/rpc/engine/getPayloadBodiesByHashV1.spec.ts +++ b/packages/client/test/rpc/engine/getPayloadBodiesByHashV1.spec.ts @@ -53,7 +53,7 @@ describe(method, () => { maxPriorityFeePerGas: 100000000n, gasLimit: 30000000n, }, - { common } + { common }, ).sign(pkey) const tx2 = createTxFromTxData( { @@ -64,27 +64,27 @@ describe(method, () => { gasLimit: 30000000n, nonce: 1n, }, - { common } + { common }, ).sign(pkey) const block = createBlockFromBlockData( { transactions: [tx], header: BlockHeader.fromHeaderData( { parentHash: chain.genesis.hash(), number: 1n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) const block2 = createBlockFromBlockData( { transactions: [tx2], header: BlockHeader.fromHeaderData( { parentHash: block.hash(), number: 2n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) await chain.putBlocks([block, block2], true) @@ -96,7 +96,7 @@ describe(method, () => { assert.equal( res.result[0].transactions[0], bytesToHex(tx.serialize()), - 'got expected transaction from first payload' + 'got expected transaction from first payload', ) assert.equal(res.result[1], null, 'got null for block not found in chain') assert.equal(res.result.length, 3, 'length of response matches number of block hashes sent') @@ -120,7 +120,7 @@ describe(method, () => { { engine: true, hardfork: Hardfork.London, - } + }, ) const rpc = getRpcClient(server) common.setHardfork(Hardfork.London) @@ -139,7 +139,7 @@ describe(method, () => { maxPriorityFeePerGas: 100000000n, gasLimit: 30000000n, }, - { common } + { common }, ).sign(pkey) const tx2 = createTxFromTxData( { @@ -150,27 +150,27 @@ describe(method, () => { gasLimit: 30000000n, nonce: 1n, }, - { common } + { common }, ).sign(pkey) const block = createBlockFromBlockData( { transactions: [tx], header: BlockHeader.fromHeaderData( { parentHash: chain.genesis.hash(), number: 1n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) const block2 = createBlockFromBlockData( { transactions: [tx2], header: BlockHeader.fromHeaderData( { parentHash: block.hash(), number: 2n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) await chain.putBlocks([block, block2], true) @@ -182,7 +182,7 @@ describe(method, () => { assert.equal( res.result[0].withdrawals, null, - 'got null for withdrawals field on pre-Shanghai block' + 'got null for withdrawals field on pre-Shanghai block', ) // Restore setStateRoot diff --git a/packages/client/test/rpc/engine/getPayloadBodiesByRangeV1.spec.ts b/packages/client/test/rpc/engine/getPayloadBodiesByRangeV1.spec.ts index 02384549d2..45c4226068 100644 --- a/packages/client/test/rpc/engine/getPayloadBodiesByRangeV1.spec.ts +++ b/packages/client/test/rpc/engine/getPayloadBodiesByRangeV1.spec.ts @@ -55,7 +55,7 @@ describe(method, () => { maxPriorityFeePerGas: 100000000n, gasLimit: 30000000n, }, - { common } + { common }, ).sign(pkey) const tx2 = createTxFromTxData( { @@ -66,27 +66,27 @@ describe(method, () => { gasLimit: 30000000n, nonce: 1n, }, - { common } + { common }, ).sign(pkey) const block = createBlockFromBlockData( { transactions: [tx], header: BlockHeader.fromHeaderData( { parentHash: chain.genesis.hash(), number: 1n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) const block2 = createBlockFromBlockData( { transactions: [tx2], header: BlockHeader.fromHeaderData( { parentHash: block.hash(), number: 2n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) await chain.putBlocks([block, block2], true) @@ -95,19 +95,19 @@ describe(method, () => { assert.equal( res.result[0].transactions[0], bytesToHex(tx.serialize()), - 'got expected transaction from first payload' + 'got expected transaction from first payload', ) assert.equal( res.result.length, 2, - 'length of response matches start of range up to highest known block' + 'length of response matches start of range up to highest known block', ) const res2 = await rpc.request(method, ['0x3', '0x2']) assert.equal( res2.result.length, 0, - 'got empty array when start of requested range is beyond current chain head' + 'got empty array when start of requested range is beyond current chain head', ) }) @@ -137,7 +137,7 @@ describe(method, () => { maxPriorityFeePerGas: 100000000n, gasLimit: 30000000n, }, - { common } + { common }, ).sign(pkey) const tx2 = createTxFromTxData( { @@ -148,27 +148,27 @@ describe(method, () => { gasLimit: 30000000n, nonce: 1n, }, - { common } + { common }, ).sign(pkey) const block = createBlockFromBlockData( { transactions: [tx], header: BlockHeader.fromHeaderData( { parentHash: chain.genesis.hash(), number: 1n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) const block2 = createBlockFromBlockData( { transactions: [tx2], header: BlockHeader.fromHeaderData( { parentHash: block.hash(), number: 2n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) await chain.putBlocks([block, block2], true) diff --git a/packages/client/test/rpc/engine/getPayloadV3.spec.ts b/packages/client/test/rpc/engine/getPayloadV3.spec.ts index 0edae358fa..def3e58408 100644 --- a/packages/client/test/rpc/engine/getPayloadV3.spec.ts +++ b/packages/client/test/rpc/engine/getPayloadV3.spec.ts @@ -106,7 +106,7 @@ describe(method, () => { gasLimit: 30000000n, to: Address.zero(), }, - { common } + { common }, ).sign(pkey) await service.txPool.add(tx, true) @@ -116,14 +116,14 @@ describe(method, () => { assert.equal( executionPayload.blockHash, '0x8c71ad199a3dda94de6a1c31cc50a26b1f03a8a4924e9ea3fd7420c6411cac42', - 'built expected block' + 'built expected block', ) assert.equal(executionPayload.excessBlobGas, '0x0', 'correct execess blob gas') assert.equal(executionPayload.blobGasUsed, '0x20000', 'correct blob gas used') const { commitments, proofs, blobs } = blobsBundle assert.ok( commitments.length === proofs.length && commitments.length === blobs.length, - 'equal commitments, proofs and blobs' + 'equal commitments, proofs and blobs', ) assert.equal(blobs.length, 1, '1 blob should be returned') assert.equal(proofs[0], bytesToHex(txProofs[0]), 'proof should match') diff --git a/packages/client/test/rpc/engine/kaustinen6.spec.ts b/packages/client/test/rpc/engine/kaustinen6.spec.ts index 4ad0f3b2b2..4b7dab2c55 100644 --- a/packages/client/test/rpc/engine/kaustinen6.spec.ts +++ b/packages/client/test/rpc/engine/kaustinen6.spec.ts @@ -39,7 +39,7 @@ const originalValidate = (BlockHeader as any).prototype._consensusFormatValidati async function fetchExecutionPayload( peerBeaconUrl: string, - slot: number | string + slot: number | string, ): Promise { let beaconPayload: BeaconPayloadJson | undefined = undefined try { @@ -55,7 +55,7 @@ async function runBlock( { chain, rpc, common }: { chain: Chain; rpc: HttpClient; common: Common }, { execute, parent }: { execute: any; parent: any }, isBeaconData: boolean, - context: any + context: any, ) { const blockCache = chain.blockCache diff --git a/packages/client/test/rpc/engine/newPayloadV1.spec.ts b/packages/client/test/rpc/engine/newPayloadV1.spec.ts index d2b6754909..df1155e379 100644 --- a/packages/client/test/rpc/engine/newPayloadV1.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV1.spec.ts @@ -27,8 +27,8 @@ describe(method, () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - "invalid argument 0 for key 'parentHash': hex string without 0x prefix" - ) + "invalid argument 0 for key 'parentHash': hex string without 0x prefix", + ), ) }) @@ -39,7 +39,7 @@ describe(method, () => { const res = await rpc.request(method, blockDataWithInvalidBlockHash) assert.equal(res.error.code, INVALID_PARAMS) assert.ok( - res.error.message.includes("invalid argument 0 for key 'blockHash': invalid block hash") + res.error.message.includes("invalid argument 0 for key 'blockHash': invalid block hash"), ) }) @@ -145,7 +145,7 @@ describe(method, () => { const expectedError = 'Invalid tx at index 0: Error: Invalid serialized tx input: must be array' assert.ok( res.result.validationError.includes(expectedError), - `should error with - ${expectedError}` + `should error with - ${expectedError}`, ) }) @@ -162,7 +162,7 @@ describe(method, () => { value: 1, to: Address.fromString('0x61FfE691821291D02E9Ba5D33098ADcee71a3a17'), }, - { common } + { common }, ) const transactions = [bytesToHex(tx.serialize())] @@ -180,7 +180,7 @@ describe(method, () => { it('call with valid data & valid transaction', async () => { const accountPk = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) const accountAddress = Address.fromPrivateKey(accountPk) const newGenesisJSON = { @@ -201,7 +201,7 @@ describe(method, () => { value: 6, gasLimit: 53_000, }, - { common } + { common }, ).sign(accountPk) const transactions = [bytesToHex(tx.serialize())] const blockDataWithValidTransaction = { @@ -220,7 +220,7 @@ describe(method, () => { it('call with too many transactions', async () => { const accountPk = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) const accountAddress = Address.fromPrivateKey(accountPk) const newGenesisJSON = { @@ -245,7 +245,7 @@ describe(method, () => { value: 6, gasLimit: 53_000, }, - { common } + { common }, ).sign(accountPk) return bytesToHex(tx.serialize()) diff --git a/packages/client/test/rpc/engine/newPayloadV2.spec.ts b/packages/client/test/rpc/engine/newPayloadV2.spec.ts index 16ccb21d08..f11aaad90f 100644 --- a/packages/client/test/rpc/engine/newPayloadV2.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV2.spec.ts @@ -27,8 +27,8 @@ describe(`${method}: call with executionPayloadV1`, () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - "invalid argument 0 for key 'parentHash': hex string without 0x prefix" - ) + "invalid argument 0 for key 'parentHash': hex string without 0x prefix", + ), ) }) @@ -39,7 +39,7 @@ describe(`${method}: call with executionPayloadV1`, () => { const res = await rpc.request(method, blockDataWithInvalidBlockHash) assert.equal(res.error.code, INVALID_PARAMS) assert.ok( - res.error.message.includes("invalid argument 0 for key 'blockHash': invalid block hash") + res.error.message.includes("invalid argument 0 for key 'blockHash': invalid block hash"), ) }) @@ -143,7 +143,7 @@ describe(`${method}: call with executionPayloadV1`, () => { const expectedError = 'Invalid tx at index 0: Error: Invalid serialized tx input: must be array' assert.ok( res.result.validationError.includes(expectedError), - `should error with - ${expectedError}` + `should error with - ${expectedError}`, ) }) @@ -160,7 +160,7 @@ describe(`${method}: call with executionPayloadV1`, () => { value: 1, to: Address.fromString('0x61FfE691821291D02E9Ba5D33098ADcee71a3a17'), }, - { common } + { common }, ) const transactions = [bytesToHex(tx.serialize())] @@ -177,7 +177,7 @@ describe(`${method}: call with executionPayloadV1`, () => { it('call with valid data & valid transaction', async () => { const accountPk = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) const accountAddress = Address.fromPrivateKey(accountPk) const newGenesisJSON = { @@ -198,7 +198,7 @@ describe(`${method}: call with executionPayloadV1`, () => { value: 6, gasLimit: 53_000, }, - { common } + { common }, ).sign(accountPk) const transactions = [bytesToHex(tx.serialize())] const blockDataWithValidTransaction = { diff --git a/packages/client/test/rpc/engine/newPayloadV3.spec.ts b/packages/client/test/rpc/engine/newPayloadV3.spec.ts index 33c98e9bbc..75303d7629 100644 --- a/packages/client/test/rpc/engine/newPayloadV3.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV3.spec.ts @@ -28,7 +28,7 @@ describe(`${method}: call with executionPayloadV3`, () => { const res = await rpc.request(method, [validBlock, [], parentBeaconBlockRoot]) assert.equal(res.error.code, UNSUPPORTED_FORK) assert.ok( - res.error.message.includes('NewPayloadV{1|2} MUST be used before Cancun is activated') + res.error.message.includes('NewPayloadV{1|2} MUST be used before Cancun is activated'), ) }) diff --git a/packages/client/test/rpc/engine/newPayloadV3VersionedHashes.spec.ts b/packages/client/test/rpc/engine/newPayloadV3VersionedHashes.spec.ts index 39451b9c32..4f4880a4d1 100644 --- a/packages/client/test/rpc/engine/newPayloadV3VersionedHashes.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV3VersionedHashes.spec.ts @@ -42,7 +42,7 @@ describe(`${method}: Cancun validations`, () => { assert.equal(res.result.status, 'INVALID') assert.equal( res.result.validationError, - 'Error verifying blobVersionedHashes: expected=0 received=2' + 'Error verifying blobVersionedHashes: expected=0 received=2', ) const txString = @@ -83,7 +83,7 @@ describe(`${method}: Cancun validations`, () => { res = await rpc.request(method, blockDataMissingParentBeaconRoot) assert.equal(res.error.code, INVALID_PARAMS) assert.ok( - res.error.message.includes('missing value for required argument parentBeaconBlockRoot') + res.error.message.includes('missing value for required argument parentBeaconBlockRoot'), ) const blockDataExtraMissingHashes1 = [ @@ -105,7 +105,7 @@ describe(`${method}: Cancun validations`, () => { assert.equal(res.result.status, 'INVALID') assert.equal( res.result.validationError, - 'Error verifying blobVersionedHashes: expected=2 received=1' + 'Error verifying blobVersionedHashes: expected=2 received=1', ) const blockDataExtraMisMatchingHashes1 = [ @@ -127,7 +127,7 @@ describe(`${method}: Cancun validations`, () => { assert.equal(res.result.status, 'INVALID') assert.equal( res.result.validationError, - 'Error verifying blobVersionedHashes: mismatch at index=1 expected=0x0131…52c5 received=0x3456…' + 'Error verifying blobVersionedHashes: mismatch at index=1 expected=0x0131…52c5 received=0x3456…', ) const blockDataMatchingVersionedHashes = [ diff --git a/packages/client/test/rpc/engine/newPayloadV4.spec.ts b/packages/client/test/rpc/engine/newPayloadV4.spec.ts index 598dec3ca5..a46038ebd4 100644 --- a/packages/client/test/rpc/engine/newPayloadV4.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV4.spec.ts @@ -107,15 +107,15 @@ describe(`${method}: call with executionPayloadV4`, () => { const { executionPayload } = res.result assert.ok( executionPayload.depositRequests?.length === 1, - 'depositRequests should have 1 deposit request' + 'depositRequests should have 1 deposit request', ) assert.ok( executionPayload.withdrawalRequests !== undefined, - 'depositRequests field should be received' + 'depositRequests field should be received', ) assert.ok( executionPayload.consolidationRequests !== undefined, - 'consolidationRequests field should be received' + 'consolidationRequests field should be received', ) res = await rpc.request(method, [executionPayload, [], parentBeaconBlockRoot]) diff --git a/packages/client/test/rpc/engine/preimages.spec.ts b/packages/client/test/rpc/engine/preimages.spec.ts index 9081ccd97e..9e06517a2e 100644 --- a/packages/client/test/rpc/engine/preimages.spec.ts +++ b/packages/client/test/rpc/engine/preimages.spec.ts @@ -50,7 +50,7 @@ async function genBlockWithdrawals(blockNumber: number) { } }) const withdrawalsRoot = bytesToHex( - await genWithdrawalsTrieRoot(withdrawals.map(Withdrawal.fromWithdrawalData)) + await genWithdrawalsTrieRoot(withdrawals.map(Withdrawal.fromWithdrawalData)), ) return { withdrawals, withdrawalsRoot } @@ -66,7 +66,7 @@ async function runBlock( receiptTrie: PrefixedHexString gasUsed: PrefixedHexString coinbase: PrefixedHexString - } + }, ) { const { transactions, parentHash, blockNumber, stateRoot, receiptTrie, gasUsed, coinbase } = runData @@ -116,7 +116,7 @@ describe(`valid verkle network setup`, async () => { { engine: true, savePreimages: true, - } + }, ) ;(chain.blockchain as any).validateHeader = () => {} @@ -252,12 +252,12 @@ describe(`valid verkle network setup`, async () => { for (const preimage of preimages) { const preimageBytes = hexToBytes(preimage) const savedPreimage = await execution.preimagesManager!.getPreimage( - keccak256(preimageBytes) + keccak256(preimageBytes), ) assert.isNotNull(savedPreimage, `Missing preimage for ${preimage}`) assert.ok( savedPreimage !== null && equalsBytes(savedPreimage, preimageBytes), - `Incorrect preimage for ${preimage}` + `Incorrect preimage for ${preimage}`, ) } parentHash = blockHash diff --git a/packages/client/test/rpc/engine/withdrawals.spec.ts b/packages/client/test/rpc/engine/withdrawals.spec.ts index c2616a4437..e46f000c89 100644 --- a/packages/client/test/rpc/engine/withdrawals.spec.ts +++ b/packages/client/test/rpc/engine/withdrawals.spec.ts @@ -105,12 +105,12 @@ for (const { name, withdrawals, withdrawalsRoot, gethBlockRlp } of testCases) { it(name, async () => { // check withdrawals root computation const computedWithdrawalsRoot = bytesToHex( - await genWithdrawalsTrieRoot(withdrawals.map(Withdrawal.fromWithdrawalData), new Trie()) + await genWithdrawalsTrieRoot(withdrawals.map(Withdrawal.fromWithdrawalData), new Trie()), ) assert.equal( withdrawalsRoot, computedWithdrawalsRoot, - 'withdrawalsRoot compuation should match' + 'withdrawalsRoot compuation should match', ) const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) const rpc = getRpcClient(server) @@ -120,7 +120,7 @@ for (const { name, withdrawals, withdrawalsRoot, gethBlockRlp } of testCases) { ]) assert.equal(res.error.code, INVALID_PARAMS) assert.ok( - res.error.message.includes('PayloadAttributesV2 MUST be used after Shanghai is activated') + res.error.message.includes('PayloadAttributesV2 MUST be used after Shanghai is activated'), ) res = await rpc.request('engine_forkchoiceUpdatedV2', [ @@ -139,7 +139,7 @@ for (const { name, withdrawals, withdrawalsRoot, gethBlockRlp } of testCases) { assert.equal( executionPayload!.withdrawals!.length, withdrawals.length, - 'withdrawals should match' + 'withdrawals should match', ) assert.equal(blockValue, '0x0', 'No value should be returned') payload = executionPayload @@ -149,7 +149,7 @@ for (const { name, withdrawals, withdrawalsRoot, gethBlockRlp } of testCases) { assert.equal( payload!.stateRoot, '0x23eadd91fca55c0e14034e4d63b2b3ed43f2e807b6bf4d276b784ac245e7fa3f', - 'stateRoot should match' + 'stateRoot should match', ) } diff --git a/packages/client/test/rpc/eth/blobBaseFee.spec.ts b/packages/client/test/rpc/eth/blobBaseFee.spec.ts index b4c9bbe01e..5a2dc594fc 100644 --- a/packages/client/test/rpc/eth/blobBaseFee.spec.ts +++ b/packages/client/test/rpc/eth/blobBaseFee.spec.ts @@ -25,7 +25,7 @@ const accountAddress = Address.fromPrivateKey(privateKey) const produceBlockWith4844Tx = async ( execution: VMExecution, chain: Chain, - blobsCount: number[] + blobsCount: number[], ) => { const kzg = await loadKZG() // 4844 sample blob @@ -75,8 +75,8 @@ const produceBlockWith4844Tx = async ( kzgCommitments, maxFeePerBlobGas: BigInt(1000), }, - { common: vmCopy.common } - ).sign(privateKey) + { common: vmCopy.common }, + ).sign(privateKey), ) nonce++ } diff --git a/packages/client/test/rpc/eth/call.spec.ts b/packages/client/test/rpc/eth/call.spec.ts index e516875ad1..a2d7613d0e 100644 --- a/packages/client/test/rpc/eth/call.spec.ts +++ b/packages/client/test/rpc/eth/call.spec.ts @@ -64,7 +64,7 @@ describe(method, () => { gasLimit, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx @@ -104,21 +104,21 @@ describe(method, () => { assert.equal( res.result, bytesToHex(execResult.returnValue), - 'should return the correct return value' + 'should return the correct return value', ) res = await rpc.request(method, [{ ...estimateTxData }, 'latest']) assert.equal( res.result, bytesToHex(execResult.returnValue), - 'should return the correct return value with no gas limit provided' + 'should return the correct return value with no gas limit provided', ) res = await rpc.request(method, [{ gasLimit, data }, 'latest']) assert.equal( res.result, bytesToHex(result.results[0].execResult.returnValue), - `should let run call without 'to' for contract creation` + `should let run call without 'to' for contract creation`, ) }) diff --git a/packages/client/test/rpc/eth/chainId.spec.ts b/packages/client/test/rpc/eth/chainId.spec.ts index a827c875a2..3b106d425e 100644 --- a/packages/client/test/rpc/eth/chainId.spec.ts +++ b/packages/client/test/rpc/eth/chainId.spec.ts @@ -23,7 +23,7 @@ describe(method, () => { it('returns 3 for Goerli', async () => { const manager = createManager( - await createClient({ opened: true, commonChain: new Common({ chain: Chain.Goerli }) }) + await createClient({ opened: true, commonChain: new Common({ chain: Chain.Goerli }) }), ) const rpc = getRpcClient(startRPC(manager.getMethods())) diff --git a/packages/client/test/rpc/eth/estimateGas.spec.ts b/packages/client/test/rpc/eth/estimateGas.spec.ts index a584855504..a31fc8f10c 100644 --- a/packages/client/test/rpc/eth/estimateGas.spec.ts +++ b/packages/client/test/rpc/eth/estimateGas.spec.ts @@ -73,7 +73,7 @@ describe( gasLimit, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx @@ -114,7 +114,7 @@ describe( assert.equal( res.result, '0x' + totalGasSpent.toString(16), - 'should return the correct gas estimate' + 'should return the correct gas estimate', ) // Test without blockopt as its optional and should default to latest @@ -122,7 +122,7 @@ describe( assert.equal( res2.result, '0x' + totalGasSpent.toString(16), - 'should return the correct gas estimate' + 'should return the correct gas estimate', ) // Setup chain to run an EIP1559 tx const service = client.services[0] as FullEthereumService @@ -141,10 +141,10 @@ describe( common: service.chain.config.chainCommon, skipConsensusFormatValidation: true, calcDifficultyFromHeader: headBlock.header, - } + }, ), }, - { common: service.chain.config.chainCommon } + { common: service.chain.config.chainCommon }, ) vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) @@ -158,7 +158,7 @@ describe( assert.equal( EIP1559res.result, '0x' + totalGasSpent.toString(16), - 'should return the correct gas estimate for EIP1559 tx' + 'should return the correct gas estimate for EIP1559 tx', ) // Test EIP1559 tx with no maxFeePerGas @@ -174,7 +174,7 @@ describe( assert.equal( EIP1559reqNoGas.result, '0x' + totalGasSpent.toString(16), - 'should return the correct gas estimate' + 'should return the correct gas estimate', ) // Test legacy tx with London head block @@ -184,7 +184,7 @@ describe( assert.equal( legacyTxNoGas.result, '0x' + totalGasSpent.toString(16), - 'should return the correct gas estimate' + 'should return the correct gas estimate', ) }) @@ -214,5 +214,5 @@ describe( assert.ok(res.error.message.includes('"pending" is not yet supported')) }) }, - 20000 + 20000, ) diff --git a/packages/client/test/rpc/eth/gasPrice.spec.ts b/packages/client/test/rpc/eth/gasPrice.spec.ts index 4516d91d79..d2e0ea922a 100644 --- a/packages/client/test/rpc/eth/gasPrice.spec.ts +++ b/packages/client/test/rpc/eth/gasPrice.spec.ts @@ -23,7 +23,7 @@ describe(method, () => { // construct tx const tx = createLegacyTx( { gasLimit: 21000, gasPrice: GAS_PRICE, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -32,7 +32,7 @@ describe(method, () => { assert.equal( res.result, intToHex(GAS_PRICE), - 'should return the correct suggested gas price with 1 legacy transaction' + 'should return the correct suggested gas price with 1 legacy transaction', ) }) @@ -46,7 +46,7 @@ describe(method, () => { averageGasPrice += BigInt(gasPrice) const tx = createLegacyTx( { nonce: i, gasLimit: 21000, gasPrice, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) } @@ -56,7 +56,7 @@ describe(method, () => { assert.equal( res.result, bigIntToHex(averageGasPrice), - 'should return the correct gas price with multiple legacy transactions' + 'should return the correct gas price with multiple legacy transactions', ) }) @@ -68,11 +68,11 @@ describe(method, () => { const tx1 = createLegacyTx( { gasLimit: 21000, gasPrice: G1, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey) const tx2 = createLegacyTx( { nonce: 1, gasLimit: 21000, gasPrice: G2, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx1, tx2]) @@ -82,14 +82,14 @@ describe(method, () => { assert.equal( res.result, intToHex(Math.trunc(averageGasPrice)), - 'should return the correct gas price with multiple legacy transactions in a block' + 'should return the correct gas price with multiple legacy transactions in a block', ) }) it('call with 1559 transaction data', async () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), - 'powLondon' + 'powLondon', ) const rpc = getRpcClient(server) const tx = create1559FeeMarketTx( @@ -99,7 +99,7 @@ describe(method, () => { maxFeePerGas: 975000000, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -110,14 +110,14 @@ describe(method, () => { assert.equal( res.result, bigIntToHex(gasPrice), - 'should return the correct gas price with 1 1559 transaction' + 'should return the correct gas price with 1 1559 transaction', ) }) it('call with multiple 1559 transactions', async () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), - 'powLondon' + 'powLondon', ) const rpc = getRpcClient(server) const maxPriority1 = 10 @@ -129,7 +129,7 @@ describe(method, () => { maxFeePerGas: 975000000, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) const tx2 = create1559FeeMarketTx( { @@ -139,7 +139,7 @@ describe(method, () => { maxFeePerGas: 975000000, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx1, tx2]) @@ -151,7 +151,7 @@ describe(method, () => { assert.equal( res.result, bigIntToHex(gasPrice), - 'should return the correct gas price with 1 1559 transaction' + 'should return the correct gas price with 1 1559 transaction', ) }) @@ -171,7 +171,7 @@ describe(method, () => { gasPrice: firstBlockGasPrice, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) } else { tx = createLegacyTx( @@ -181,7 +181,7 @@ describe(method, () => { gasPrice, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) } await runBlockWithTxs(chain, execution, [tx!]) @@ -197,7 +197,7 @@ describe(method, () => { assert.equal( res.result, bigIntToHex(gasPrice), - 'should return the correct gas price for 21 blocks' + 'should return the correct gas price for 21 blocks', ) }) }) diff --git a/packages/client/test/rpc/eth/getBalance.spec.ts b/packages/client/test/rpc/eth/getBalance.spec.ts index 9afbc079c8..2598d61a2f 100644 --- a/packages/client/test/rpc/eth/getBalance.spec.ts +++ b/packages/client/test/rpc/eth/getBalance.spec.ts @@ -44,7 +44,7 @@ describe( assert.equal( res.result, bigIntToHex(genesisBalance), - 'should return the correct genesis balance' + 'should return the correct genesis balance', ) // construct block with tx @@ -64,7 +64,7 @@ describe( assert.equal( res.result, bigIntToHex(expectedNewBalance), - 'should return the correct balance after a tx' + 'should return the correct balance after a tx', ) // verify we can query with "earliest" @@ -72,7 +72,7 @@ describe( assert.equal( res.result, bigIntToHex(genesisBalance), - "should return the correct balance with 'earliest'" + "should return the correct balance with 'earliest'", ) // verify we can query with a past block number @@ -80,7 +80,7 @@ describe( assert.equal( res.result, bigIntToHex(genesisBalance), - 'should return the correct balance with a past block number' + 'should return the correct balance with a past block number', ) // call with height that exceeds chain height @@ -108,5 +108,5 @@ describe( assert.ok(res.error.message.includes('"pending" is not yet supported')) }) }, - 40000 + 40000, ) diff --git a/packages/client/test/rpc/eth/getBlockByHash.spec.ts b/packages/client/test/rpc/eth/getBlockByHash.spec.ts index 5416ada56d..f737b4f428 100644 --- a/packages/client/test/rpc/eth/getBlockByHash.spec.ts +++ b/packages/client/test/rpc/eth/getBlockByHash.spec.ts @@ -33,7 +33,7 @@ describe(method, () => { assert.equal( typeof res.result.transactions[0], 'string', - 'should return only the hashes of the transactions' + 'should return only the hashes of the transactions', ) }) diff --git a/packages/client/test/rpc/eth/getBlockByNumber.spec.ts b/packages/client/test/rpc/eth/getBlockByNumber.spec.ts index 8b631dd738..2e21890b93 100644 --- a/packages/client/test/rpc/eth/getBlockByNumber.spec.ts +++ b/packages/client/test/rpc/eth/getBlockByNumber.spec.ts @@ -17,7 +17,7 @@ const mockedTx1 = createLegacyTx({}).sign(dummy.privKey) const mockedTx2 = createLegacyTx({ nonce: 1 }).sign(dummy.privKey) const mockedBlobTx3 = create4844BlobTx( { nonce: 2, blobsData: ['0x1234'], to: Address.zero() }, - { common } + { common }, ).sign(dummy.privKey) const blockHash = hexToBytes('0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5') const transactions = [mockedTx1] @@ -41,7 +41,7 @@ const block = { function createChain(headBlock = block) { const genesisBlockHash = hexToBytes( - '0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5' + '0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5', ) const genesisBlock = { hash: () => genesisBlockHash, @@ -86,7 +86,7 @@ describe(method, async () => { assert.equal( typeof res.result.transactions[0], 'string', - 'should return only the hashes of the transactions' + 'should return only the hashes of the transactions', ) }) @@ -134,8 +134,8 @@ describe(method, async () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"' - ) + 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"', + ), ) }) @@ -172,7 +172,7 @@ describe(method, async () => { header: { number: 1, parentHash: genesisBlock.header.hash() }, transactions: [mockedBlobTx3], }, - { common } + { common }, ) const manager = createManager(await createClient({ chain: createChain(block1 as any) })) const rpc = getRpcClient(startRPC(manager.getMethods())) @@ -181,7 +181,7 @@ describe(method, async () => { assert.equal( res.result.transactions[0].blobVersionedHashes.length, 1, - 'block body contains a transaction with the blobVersionedHashes field' + 'block body contains a transaction with the blobVersionedHashes field', ) }) }) diff --git a/packages/client/test/rpc/eth/getBlockReceipts.spec.ts b/packages/client/test/rpc/eth/getBlockReceipts.spec.ts index 7234049067..f43a975597 100644 --- a/packages/client/test/rpc/eth/getBlockReceipts.spec.ts +++ b/packages/client/test/rpc/eth/getBlockReceipts.spec.ts @@ -34,7 +34,7 @@ describe(method, () => { gasPrice: 100, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) const tx2 = createLegacyTx( { @@ -43,7 +43,7 @@ describe(method, () => { to: '0x0000000000000000000000000000000000000000', nonce: 1, }, - { common } + { common }, ).sign(dummy.privKey) const block = await runBlockWithTxs(chain, execution, [tx, tx2]) const res0 = await rpc.request(method, [bytesToHex(tx.hash())]) @@ -55,7 +55,7 @@ describe(method, () => { it('call with 1559 tx', async () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), - 'powLondon' + 'powLondon', ) const rpc = getRpcClient(server) // construct tx @@ -66,7 +66,7 @@ describe(method, () => { maxPriorityFeePerGas: 10, to: '0x1230000000000000000000000000000000000321', }, - { common } + { common }, ).sign(dummy.privKey) const tx1 = create1559FeeMarketTx( { @@ -76,7 +76,7 @@ describe(method, () => { to: '0x1230000000000000000000000000000000000321', nonce: 1, }, - { common } + { common }, ).sign(dummy.privKey) const block = await runBlockWithTxs(chain, execution, [tx, tx1]) @@ -137,7 +137,7 @@ describe(method, () => { to: randomBytes(20), nonce: 0n, }, - { common } + { common }, ).sign(dummy.privKey) const block = await runBlockWithTxs(chain, execution, [tx], true) diff --git a/packages/client/test/rpc/eth/getBlockTransactionCountByNumber.spec.ts b/packages/client/test/rpc/eth/getBlockTransactionCountByNumber.spec.ts index 1e06d6157f..ae29e1c366 100644 --- a/packages/client/test/rpc/eth/getBlockTransactionCountByNumber.spec.ts +++ b/packages/client/test/rpc/eth/getBlockTransactionCountByNumber.spec.ts @@ -51,7 +51,7 @@ describe(method, () => { gasLimit: 2000000, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx @@ -107,7 +107,7 @@ describe(method, () => { gasLimit: 2000000, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx block.transactions[1] = tx2 diff --git a/packages/client/test/rpc/eth/getCode.spec.ts b/packages/client/test/rpc/eth/getCode.spec.ts index cc6e678865..e214c0c882 100644 --- a/packages/client/test/rpc/eth/getCode.spec.ts +++ b/packages/client/test/rpc/eth/getCode.spec.ts @@ -77,7 +77,7 @@ describe(method, () => { gasLimit, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx @@ -91,7 +91,7 @@ describe(method, () => { const expectedContractAddress = Address.generate(address, BigInt(0)) assert.ok( createdAddress!.equals(expectedContractAddress), - 'should match the expected contract address' + 'should match the expected contract address', ) // verify contract has code diff --git a/packages/client/test/rpc/eth/getFeeHistory.spec.ts b/packages/client/test/rpc/eth/getFeeHistory.spec.ts index 211140087b..3ead5e1ff6 100644 --- a/packages/client/test/rpc/eth/getFeeHistory.spec.ts +++ b/packages/client/test/rpc/eth/getFeeHistory.spec.ts @@ -28,7 +28,7 @@ const privateKey = hexToBytes('0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3 const pKeyAddress = Address.fromPrivateKey(privateKey) const privateKey4844 = hexToBytes( - '0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8' + '0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8', ) const p4844Address = Address.fromPrivateKey(privateKey4844) @@ -68,7 +68,7 @@ const produceBlockWithTx = async ( execution: VMExecution, chain: Chain, maxPriorityFeesPerGas: bigint[] = [BigInt(0xff)], - gasLimits: bigint[] = [BigInt(0xfffff)] + gasLimits: bigint[] = [BigInt(0xfffff)], ) => { const { vm } = execution const account = await vm.stateManager.getAccount(pKeyAddress) @@ -99,8 +99,8 @@ const produceBlockWithTx = async ( nonce, data: '0xFE', }, - { common: vmCopy.common } - ).sign(privateKey) + { common: vmCopy.common }, + ).sign(privateKey), ) nonce++ } @@ -119,7 +119,7 @@ const produceBlockWithTx = async ( const produceBlockWith4844Tx = async ( execution: VMExecution, chain: Chain, - blobsCount: number[] + blobsCount: number[], ) => { const kzg = await loadKZG() // 4844 sample blob @@ -169,8 +169,8 @@ const produceBlockWith4844Tx = async ( kzgCommitments, maxFeePerBlobGas: BigInt(1000), }, - { common: vmCopy.common } - ).sign(privateKey4844) + { common: vmCopy.common }, + ).sign(privateKey4844), ) nonce++ } @@ -198,13 +198,13 @@ describe(method, () => { const [firstBaseFee, previousBaseFee, nextBaseFee] = res.result.baseFeePerGas as [ string, string, - string + string, ] const increase = Number( (1000n * (bytesToBigInt(hexToBytes(nextBaseFee)) - bytesToBigInt(hexToBytes(previousBaseFee)))) / - bytesToBigInt(hexToBytes(previousBaseFee)) + bytesToBigInt(hexToBytes(previousBaseFee)), ) / 1000 // Note: this also ensures that block 2,3 are returned, since gas of block 0 -> 1 and 1 -> 2 does not change @@ -239,7 +239,7 @@ describe(method, () => { Number( (1000n * (bytesToBigInt(hexToBytes(nextBaseFee)) - bytesToBigInt(hexToBytes(previousBaseFee)))) / - bytesToBigInt(hexToBytes(previousBaseFee)) + bytesToBigInt(hexToBytes(previousBaseFee)), ) / 1000 assert.equal(decrease, -0.125) @@ -323,12 +323,12 @@ describe(method, () => { assert.equal( parseInt(res.result.reward[0][0]), 0, - 'Should return 0 for empty block reward percentiles' + 'Should return 0 for empty block reward percentiles', ) assert.equal( res.result.reward[0][1], '0x0', - 'Should return 0 for empty block reward percentiles' + 'Should return 0 for empty block reward percentiles', ) }) it(`${method}: should generate reward percentiles`, async () => { @@ -385,7 +385,7 @@ describe(method, () => { const res = await rpc.request(method, ['0x1', 'latest', [10, 20, 60, 100]]) const expected = [priorityFees[0], priorityFees[0], priorityFees[1], priorityFees[1]].map( - bigIntToHex + bigIntToHex, ) assert.deepEqual(res.result.reward[0], expected) @@ -442,6 +442,6 @@ describe(method, () => { }, { timeout: 60000, - } + }, ) }) diff --git a/packages/client/test/rpc/eth/getLogs.spec.ts b/packages/client/test/rpc/eth/getLogs.spec.ts index 2cdd054f59..c433cddea8 100644 --- a/packages/client/test/rpc/eth/getLogs.spec.ts +++ b/packages/client/test/rpc/eth/getLogs.spec.ts @@ -23,7 +23,7 @@ const method = 'eth_getLogs' ``` */ const logExampleBytecode = hexToBytes( - '0x608060405234801561001057600080fd5b50610257806100206000396000f3fe608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063aefb4f0a1461004d575b600080fd5b610067600480360381019061006291906100de565b610069565b005b60005b858110156100c1578284867fbf642f3055e2ef2589825c2c0dd4855c1137a63f6260d9d112629e5cd034a3eb856040516100a69190610168565b60405180910390a480806100b99061018d565b91505061006c565b505050505050565b6000813590506100d88161020a565b92915050565b600080600080600060a086880312156100fa576100f9610205565b5b6000610108888289016100c9565b9550506020610119888289016100c9565b945050604061012a888289016100c9565b935050606061013b888289016100c9565b925050608061014c888289016100c9565b9150509295509295909350565b61016281610183565b82525050565b600060208201905061017d6000830184610159565b92915050565b6000819050919050565b600061019882610183565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8214156101cb576101ca6101d6565b5b600182019050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b600080fd5b61021381610183565b811461021e57600080fd5b5056fea2646970667358221220b98f45f4d4112e71fd287ab0ce7cc1872e53b463eb0abf1182b892192d3d8a1d64736f6c63430008070033' + '0x608060405234801561001057600080fd5b50610257806100206000396000f3fe608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063aefb4f0a1461004d575b600080fd5b610067600480360381019061006291906100de565b610069565b005b60005b858110156100c1578284867fbf642f3055e2ef2589825c2c0dd4855c1137a63f6260d9d112629e5cd034a3eb856040516100a69190610168565b60405180910390a480806100b99061018d565b91505061006c565b505050505050565b6000813590506100d88161020a565b92915050565b600080600080600060a086880312156100fa576100f9610205565b5b6000610108888289016100c9565b9550506020610119888289016100c9565b945050604061012a888289016100c9565b935050606061013b888289016100c9565b925050608061014c888289016100c9565b9150509295509295909350565b61016281610183565b82525050565b600060208201905061017d6000830184610159565b92915050565b6000819050919050565b600061019882610183565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8214156101cb576101ca6101d6565b5b600182019050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b600080fd5b61021381610183565b811461021e57600080fd5b5056fea2646970667358221220b98f45f4d4112e71fd287ab0ce7cc1872e53b463eb0abf1182b892192d3d8a1d64736f6c63430008070033', ) describe(method, async () => { @@ -38,7 +38,7 @@ describe(method, async () => { data: logExampleBytecode, nonce: 0, }, - { common } + { common }, ).sign(dummy.privKey) const tx2 = createLegacyTx( { @@ -46,7 +46,7 @@ describe(method, async () => { data: logExampleBytecode, nonce: 1, }, - { common } + { common }, ).sign(dummy.privKey) const contractAddr1 = Address.generate(dummy.addr, BigInt(0)) @@ -54,7 +54,7 @@ describe(method, async () => { // construct txs to emit the logs // data calls log(logCount: 10, num1: 1, num2: 2, num3: 3, num4: 4) const data = hexToBytes( - '0xaefb4f0a000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000004' + '0xaefb4f0a000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000004', ) const tx3 = createLegacyTx( { @@ -63,7 +63,7 @@ describe(method, async () => { to: contractAddr1, nonce: 2, }, - { common } + { common }, ).sign(dummy.privKey) const tx4 = createLegacyTx( { @@ -72,7 +72,7 @@ describe(method, async () => { to: contractAddr2, nonce: 3, }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx1, tx2, tx3, tx4]) @@ -94,7 +94,7 @@ describe(method, async () => { ) { assert.ok( true, - `should return the correct logs (fromBlock/toBlock as 'earliest' and 'latest')` + `should return the correct logs (fromBlock/toBlock as 'earliest' and 'latest')`, ) } else { assert.fail(`should return the correct logs (fromBlock/toBlock as 'earliest' and 'latest')`) @@ -105,7 +105,7 @@ describe(method, async () => { assert.equal( res.result.length, 20, - 'should return the correct logs (fromBlock/toBlock as block numbers)' + 'should return the correct logs (fromBlock/toBlock as block numbers)', ) // test filtering by single address @@ -137,7 +137,7 @@ describe(method, async () => { assert.equal( res.result.length, 20, - 'should return the correct logs (filter by topic - empty means anything)' + 'should return the correct logs (filter by topic - empty means anything)', ) // test filtering by topics (exact match) @@ -147,7 +147,7 @@ describe(method, async () => { assert.equal( res.result.length, 20, - 'should return the correct logs (filter by topic - exact match)' + 'should return the correct logs (filter by topic - exact match)', ) // test filtering by topics (exact match for second topic) @@ -157,7 +157,7 @@ describe(method, async () => { assert.equal( res.result.length, 20, - 'should return the correct logs (filter by topic - exact match for second topic)' + 'should return the correct logs (filter by topic - exact match for second topic)', ) // test filtering by topics (A or B in first position) @@ -177,7 +177,7 @@ describe(method, async () => { assert.equal( res.result.length, 20, - 'should return the correct logs (filter by topic - A or B in first position)' + 'should return the correct logs (filter by topic - A or B in first position)', ) // test filtering by topics (null means anything) @@ -190,7 +190,7 @@ describe(method, async () => { assert.equal( res.result.length, 20, - 'should return the correct logs (filter by topic - null means anything)' + 'should return the correct logs (filter by topic - null means anything)', ) // test filtering by blockHash @@ -234,8 +234,8 @@ describe(method, async () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - 'Can only specify a blockHash if fromBlock or toBlock are not provided' - ) + 'Can only specify a blockHash if fromBlock or toBlock are not provided', + ), ) res = await rpc.request(method, [ @@ -247,8 +247,8 @@ describe(method, async () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - 'Can only specify a blockHash if fromBlock or toBlock are not provided' - ) + 'Can only specify a blockHash if fromBlock or toBlock are not provided', + ), ) // unknown address diff --git a/packages/client/test/rpc/eth/getProof.spec.ts b/packages/client/test/rpc/eth/getProof.spec.ts index b9e2f41d1d..84f52a7db8 100644 --- a/packages/client/test/rpc/eth/getProof.spec.ts +++ b/packages/client/test/rpc/eth/getProof.spec.ts @@ -140,7 +140,7 @@ describe(method, async () => { gasLimit, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx @@ -172,7 +172,7 @@ describe(method, async () => { gasLimit, }, }, - { common, calcDifficultyFromHeader: block.header } + { common, calcDifficultyFromHeader: block.header }, ) block2.transactions[0] = storeTx diff --git a/packages/client/test/rpc/eth/getStorageAt.spec.ts b/packages/client/test/rpc/eth/getStorageAt.spec.ts index fe30e8ba1a..e5d3784ead 100644 --- a/packages/client/test/rpc/eth/getStorageAt.spec.ts +++ b/packages/client/test/rpc/eth/getStorageAt.spec.ts @@ -41,7 +41,7 @@ describe(method, async () => { gasLimit, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = signedTx @@ -65,7 +65,7 @@ describe(method, async () => { assert.equal( res.result, emptySlotStr, - 'should not have new slot value for block that is addressed by "earliest" tag and is older than latest' + 'should not have new slot value for block that is addressed by "earliest" tag and is older than latest', ) // call with integer for block number to see if getStorageAt allows addressing blocks by number index @@ -73,7 +73,7 @@ describe(method, async () => { assert.equal( res.result, expectedSlotValue, - 'should return the correct slot value when addressing the latest block by integer index' + 'should return the correct slot value when addressing the latest block by integer index', ) // call with unsupported block argument diff --git a/packages/client/test/rpc/eth/getTransactionByBlockHashAndIndex.spec.ts b/packages/client/test/rpc/eth/getTransactionByBlockHashAndIndex.spec.ts index 5a97a2950e..7feabc0fd3 100644 --- a/packages/client/test/rpc/eth/getTransactionByBlockHashAndIndex.spec.ts +++ b/packages/client/test/rpc/eth/getTransactionByBlockHashAndIndex.spec.ts @@ -18,11 +18,11 @@ async function setUp() { nonce: 0, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey), createLegacyTx( { gasLimit: 21000, gasPrice: 50, nonce: 1, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey), ] diff --git a/packages/client/test/rpc/eth/getTransactionByHash.spec.ts b/packages/client/test/rpc/eth/getTransactionByHash.spec.ts index 9a5fa3981a..4004a50748 100644 --- a/packages/client/test/rpc/eth/getTransactionByHash.spec.ts +++ b/packages/client/test/rpc/eth/getTransactionByHash.spec.ts @@ -20,7 +20,7 @@ describe(method, () => { // construct tx const tx = createLegacyTx( { gasLimit: 2000000, gasPrice: 100, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -39,7 +39,7 @@ describe(method, () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), 'powLondon', - { txLookupLimit: 0 } + { txLookupLimit: 0 }, ) const rpc = getRpcClient(server) // construct tx @@ -50,7 +50,7 @@ describe(method, () => { maxPriorityFeePerGas: 10, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -67,7 +67,7 @@ describe(method, () => { assert.equal( res.result.hash, bytesToHex(tx.hash()), - 'should return the correct tx when txLookupLimit=0' + 'should return the correct tx when txLookupLimit=0', ) }) diff --git a/packages/client/test/rpc/eth/getTransactionCount.spec.ts b/packages/client/test/rpc/eth/getTransactionCount.spec.ts index b58383b877..fbb77b7608 100644 --- a/packages/client/test/rpc/eth/getTransactionCount.spec.ts +++ b/packages/client/test/rpc/eth/getTransactionCount.spec.ts @@ -57,7 +57,7 @@ describe(method, () => { gasLimit: 2000000, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx diff --git a/packages/client/test/rpc/eth/getTransactionReceipt.spec.ts b/packages/client/test/rpc/eth/getTransactionReceipt.spec.ts index 8f4ff58727..07a80b5aa3 100644 --- a/packages/client/test/rpc/eth/getTransactionReceipt.spec.ts +++ b/packages/client/test/rpc/eth/getTransactionReceipt.spec.ts @@ -32,7 +32,7 @@ describe(method, () => { gasPrice: 100, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -45,7 +45,7 @@ describe(method, () => { it('call with 1559 tx', async () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), - 'powLondon' + 'powLondon', ) const rpc = getRpcClient(server) // construct tx @@ -56,7 +56,7 @@ describe(method, () => { maxPriorityFeePerGas: 10, to: '0x1230000000000000000000000000000000000321', }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -117,7 +117,7 @@ describe(method, () => { to: randomBytes(20), nonce: 0n, }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx], true) diff --git a/packages/client/test/rpc/eth/sendRawTransaction.spec.ts b/packages/client/test/rpc/eth/sendRawTransaction.spec.ts index 0d2de1b69e..c4254395c5 100644 --- a/packages/client/test/rpc/eth/sendRawTransaction.spec.ts +++ b/packages/client/test/rpc/eth/sendRawTransaction.spec.ts @@ -56,7 +56,7 @@ describe(method, () => { assert.equal( res.result, '0xd7217a7d3251880051783f305a3536e368c604aa1f1602e6cd107eb7b87129da', - 'should return the correct tx hash' + 'should return the correct tx hash', ) // Restore setStateRoot @@ -84,7 +84,7 @@ describe(method, () => { assert.equal( res.result, '0xf6798d5ed936a464ef4f49dd5a3abe1ad6947364912bd47c5e56781125d44ac3', - 'local tx with lower gasprice than minimum gasprice added to pool' + 'local tx with lower gasprice than minimum gasprice added to pool', ) // Restore setStateRoot @@ -122,8 +122,8 @@ describe(method, () => { assert.equal(res.error.code, INTERNAL_ERROR) assert.ok( res.error.message.includes( - 'client is not aware of the current chain height yet (give sync some more time)' - ) + 'client is not aware of the current chain height yet (give sync some more time)', + ), ) }) @@ -245,7 +245,7 @@ describe(method, () => { maxPriorityFeePerGas: 1000000n, to: randomBytes(20), }, - { common } + { common }, ).sign(pk) const replacementTx = create4844BlobTx( @@ -260,7 +260,7 @@ describe(method, () => { maxPriorityFeePerGas: 10000000n, to: randomBytes(20), }, - { common } + { common }, ).sign(pk) const vm = (client.services.find((s) => s.name === 'eth') as FullEthereumService).execution.vm await vm.stateManager.putAccount(tx.getSenderAddress(), new Account()) diff --git a/packages/client/test/rpc/eth/syncing.spec.ts b/packages/client/test/rpc/eth/syncing.spec.ts index 1945cf933e..89f55855fb 100644 --- a/packages/client/test/rpc/eth/syncing.spec.ts +++ b/packages/client/test/rpc/eth/syncing.spec.ts @@ -43,7 +43,7 @@ describe(method, () => { const rpcServer = startRPC(manager.getMethods()) const rpc = getRpcClient(rpcServer) const sync = client.services[0].synchronizer! - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ latest: () => { return @@ -65,7 +65,7 @@ describe(method, () => { const rpcServer = startRPC(manager.getMethods()) const rpc = getRpcClient(rpcServer) const sync = client.services[0].synchronizer as FullSynchronizer - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ latest: () => { return { diff --git a/packages/client/test/rpc/helpers.ts b/packages/client/test/rpc/helpers.ts index 1fa17f829c..551daab306 100644 --- a/packages/client/test/rpc/helpers.ts +++ b/packages/client/test/rpc/helpers.ts @@ -60,7 +60,7 @@ type createClientArgs = { export function startRPC( methods: any, opts: StartRPCOpts = { port: 0 }, - withEngineMiddleware?: WithEngineMiddleware + withEngineMiddleware?: WithEngineMiddleware, ) { const { port, wsServer } = opts const server = new RPCServer(methods) @@ -123,7 +123,7 @@ export async function createClient(clientOpts: Partial = {}) { if ((chain as any)._headers !== undefined) { ;(chain as any)._headers.latest = BlockHeader.fromHeaderData( { withdrawalsRoot: common.isActivatedEIP(4895) ? KECCAK256_RLP : undefined }, - { common } + { common }, ) } @@ -280,7 +280,7 @@ export async function runBlockWithTxs( chain: Chain, execution: VMExecution, txs: TypedTransaction[], - fromEngine = false + fromEngine = false, ) { const { vm } = execution // build block with tx diff --git a/packages/client/test/rpc/net/version.spec.ts b/packages/client/test/rpc/net/version.spec.ts index c57f6d8069..8e07bf368d 100644 --- a/packages/client/test/rpc/net/version.spec.ts +++ b/packages/client/test/rpc/net/version.spec.ts @@ -12,7 +12,7 @@ function compareResult(result: any, chainId: any) { assert.equal( result, chainId, - `should be the correct chain ID (expected: ${chainId}, received: ${result})` + `should be the correct chain ID (expected: ${chainId}, received: ${result})`, ) } @@ -28,7 +28,7 @@ describe(method, () => { it('call on holesky', async () => { const manager = createManager( - await createClient({ opened: true, commonChain: new Common({ chain: Chain.Holesky }) }) + await createClient({ opened: true, commonChain: new Common({ chain: Chain.Holesky }) }), ) const rpc = getRpcClient(startRPC(manager.getMethods())) @@ -42,7 +42,7 @@ describe(method, () => { it('call on goerli', async () => { const manager = createManager( - await createClient({ opened: true, commonChain: new Common({ chain: Chain.Goerli }) }) + await createClient({ opened: true, commonChain: new Common({ chain: Chain.Goerli }) }), ) const rpc = getRpcClient(startRPC(manager.getMethods())) diff --git a/packages/client/test/rpc/txpool/content.spec.ts b/packages/client/test/rpc/txpool/content.spec.ts index d96485f403..bf95e680fa 100644 --- a/packages/client/test/rpc/txpool/content.spec.ts +++ b/packages/client/test/rpc/txpool/content.spec.ts @@ -40,7 +40,7 @@ describe(method, () => { gasLimit, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) let ranBlock: Block | undefined = undefined @@ -63,10 +63,10 @@ describe(method, () => { common: service.chain.config.chainCommon, skipConsensusFormatValidation: true, calcDifficultyFromHeader: headBlock.header, - } + }, ), }, - { common: service.chain.config.chainCommon } + { common: service.chain.config.chainCommon }, ) vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) @@ -79,7 +79,7 @@ describe(method, () => { assert.equal( Object.keys(res.result.pending).length, 1, - 'received one pending transaction back from response' + 'received one pending transaction back from response', ) }) }) diff --git a/packages/client/test/rpc/validation.spec.ts b/packages/client/test/rpc/validation.spec.ts index b252433a34..932f37bf3b 100644 --- a/packages/client/test/rpc/validation.spec.ts +++ b/packages/client/test/rpc/validation.spec.ts @@ -52,15 +52,15 @@ describe(prefix, () => { // valid // zero address assert.ok( - validatorResult(validators.address(['0x0000000000000000000000000000000000000000'], 0)) + validatorResult(validators.address(['0x0000000000000000000000000000000000000000'], 0)), ) // lowercase address assert.ok( - validatorResult(validators.address(['0xa7d8d9ef8d8ce8992df33d8b8cf4aebabd5bd270'], 0)) + validatorResult(validators.address(['0xa7d8d9ef8d8ce8992df33d8b8cf4aebabd5bd270'], 0)), ) // checksummed address assert.ok( - validatorResult(validators.address(['0xa7d8d9ef8D8Ce8992Df33D8b8CF4Aebabd5bD270'], 0)) + validatorResult(validators.address(['0xa7d8d9ef8D8Ce8992Df33D8b8CF4Aebabd5bD270'], 0)), ) // invalid @@ -70,23 +70,23 @@ describe(prefix, () => { assert.notOk(validatorResult(validators.address(['0x1'], 0))) // invalid length: 38 chars assert.notOk( - validatorResult(validators.address(['0x00000000000000000000000000000000000000'], 0)) + validatorResult(validators.address(['0x00000000000000000000000000000000000000'], 0)), ) // invalidlength: 39 chars assert.notOk( - validatorResult(validators.address(['0x000000000000000000000000000000000000000'], 0)) + validatorResult(validators.address(['0x000000000000000000000000000000000000000'], 0)), ) // invalidlength: 41 chars assert.notOk( - validatorResult(validators.address(['0x00000000000000000000000000000000000000000'], 0)) + validatorResult(validators.address(['0x00000000000000000000000000000000000000000'], 0)), ) // invalid length: 42 chars assert.notOk( - validatorResult(validators.address(['0x00000000000000000000000000000000000000000'], 0)) + validatorResult(validators.address(['0x00000000000000000000000000000000000000000'], 0)), ) // invalid character assert.notOk( - validatorResult(validators.address(['0x62223651d6a33d58be70eb9876c3caf7096169ez'], 0)) + validatorResult(validators.address(['0x62223651d6a33d58be70eb9876c3caf7096169ez'], 0)), ) assert.ok(validatorResult(validators.bytes8([bytesToHex(randomBytes(8))], 0))) assert.ok(validatorResult(validators.bytes8([bytes(8)], 0))) @@ -228,59 +228,59 @@ describe(prefix, () => { validatorResult( validators.blockHash( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249'], - 0 - ) - ) + 0, + ), + ), ) assert.ok( validatorResult( validators.blockHash( ['0xf79d019c58d58a4efcfdf100c9596dd38014dcec6cf6f52000d4fae4e139b703'], - 0 - ) - ) + 0, + ), + ), ) // invalid length assert.notOk( validatorResult( validators.blockHash( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a2'], - 0 - ) - ) + 0, + ), + ), ) assert.notOk( validatorResult( validators.blockHash( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a24'], - 0 - ) - ) + 0, + ), + ), ) assert.notOk( validatorResult( validators.blockHash( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a2499'], - 0 - ) - ) + 0, + ), + ), ) assert.notOk( validatorResult( validators.blockHash( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a24999'], - 0 - ) - ) + 0, + ), + ), ) // invalid character assert.notOk( validatorResult( validators.blockHash( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66z249'], - 0 - ) - ) + 0, + ), + ), ) }) @@ -293,9 +293,9 @@ describe(prefix, () => { validatorResult( validators.blockOption( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249'], - 0 - ) - ) + 0, + ), + ), ) assert.ok(validatorResult(validators.blockOption(['0x1'], 0))) assert.ok(validatorResult(validators.blockOption(['0x01'], 0))) @@ -312,9 +312,9 @@ describe(prefix, () => { validatorResult( validators.blockOption( ['573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249'], - 0 - ) - ) + 0, + ), + ), ) }) @@ -463,14 +463,14 @@ describe(prefix, () => { gas: '0xcf08', }, ], - 0 - ) - ) + 0, + ), + ), ) assert.ok( validatorResult( - validators.transaction(['to'])([{ to: '0x0000000000000000000000000000000000000000' }], 0) - ) + validators.transaction(['to'])([{ to: '0x0000000000000000000000000000000000000000' }], 0), + ), ) // invalid @@ -489,17 +489,17 @@ describe(prefix, () => { from: '0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249', }, ], - 0 - ) - ) + 0, + ), + ), ) assert.notOk( validatorResult( validators.transaction(['to'])( [{ from: '0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249' }], - 0 - ) - ) + 0, + ), + ), ) assert.notOk(validatorResult(validators.transaction([])([{ gas: '12' }], 0))) assert.notOk(validatorResult(validators.transaction([])([{ gasPrice: '12' }], 0))) @@ -525,22 +525,22 @@ describe(prefix, () => { hex: '0x1', }, ], - 0 - ) - ) + 0, + ), + ), ) // invalid assert.notOk( - validatorResult(validators.object({ address: validators.address })([{ address: '0x0' }], 0)) + validatorResult(validators.object({ address: validators.address })([{ address: '0x0' }], 0)), ) assert.notOk( validatorResult( - validators.object({ blockHash: validators.blockHash })([{ blockHash: '0x0' }], 0) - ) + validators.object({ blockHash: validators.blockHash })([{ blockHash: '0x0' }], 0), + ), ) assert.notOk( - validatorResult(validators.object({ bool: validators.bool })([{ bool: '0x0' }], 0)) + validatorResult(validators.object({ bool: validators.bool })([{ bool: '0x0' }], 0)), ) assert.notOk(validatorResult(validators.object({ hex: validators.hex })([{ hex: '1' }], 0))) }) @@ -557,37 +557,37 @@ describe(prefix, () => { '0xda4a22ad0d0e9aff0846ca54225637ada5bf7a14', ], ], - 0 - ) - ) + 0, + ), + ), ) assert.ok( validatorResult( validators.array(validators.blockHash)( [['0xb6dbbc1c702583de187e1284a00a23f9d322bf96f70fd4968b6339d0ace066b3']], - 0 - ) - ) + 0, + ), + ), ) assert.ok(validatorResult(validators.array(validators.bool)([[true, false]], 0))) // invalid assert.notOk( - validatorResult(validators.array(validators.hex)([['0x0', '0x1', '0x2', 'true']], 0)) + validatorResult(validators.array(validators.hex)([['0x0', '0x1', '0x2', 'true']], 0)), ) assert.notOk( validatorResult( validators.array(validators.address)( [['0xb7e390864a90b7b923c9f9310c6f98aafe43f707', '0x0']], - 0 - ) - ) + 0, + ), + ), ) assert.notOk( - validatorResult(validators.array(validators.blockHash)([['0xb6dbbc1cd0ace066b3']], 0)) + validatorResult(validators.array(validators.blockHash)([['0xb6dbbc1cd0ace066b3']], 0)), ) assert.notOk( - validatorResult(validators.array(validators.bool)([['0x123', '0x456', '0x789']], 0)) + validatorResult(validators.array(validators.bool)([['0x123', '0x456', '0x789']], 0)), ) assert.notOk(validatorResult(validators.array(validators.bool)([[true, 'true']], 0))) }) @@ -667,15 +667,15 @@ describe(prefix, () => { validatorResult( validators.optional(validators.blockHash)( ['0x0000000000000000000000000000000000000000000000000000000000000000'], - 0 - ) - ) + 0, + ), + ), ) assert.ok( - validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))(['INVALID'], 0)) + validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))(['INVALID'], 0)), ) assert.ok( - validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))([''], 0)) + validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))([''], 0)), ) assert.ok(validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))([], 0))) @@ -683,7 +683,7 @@ describe(prefix, () => { assert.notOk(validatorResult(validators.optional(validators.bool)(['hey'], 0))) assert.notOk(validatorResult(validators.optional(validators.blockHash)(['0x0'], 0))) assert.notOk( - validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))(['ANOTHER'], 0)) + validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))(['ANOTHER'], 0)), ) }) @@ -696,35 +696,35 @@ describe(prefix, () => { validators.either( validators.bool, validators.hex, - validators.array(validators.hex) - )([['0xaaa']], 0) - ) + validators.array(validators.hex), + )([['0xaaa']], 0), + ), ) assert.ok( validatorResult( validators.either(validators.bool, validators.blockHash)( ['0x0000000000000000000000000000000000000000000000000000000000000000'], - 0 - ) - ) + 0, + ), + ), ) // invalid assert.notOk( - validatorResult(validators.either(validators.bool, validators.blockHash)(['0xabc'], 0)) + validatorResult(validators.either(validators.bool, validators.blockHash)(['0xabc'], 0)), ) assert.notOk(validatorResult(validators.either(validators.bool, validators.hex)(['abc'], 0))) assert.notOk( - validatorResult(validators.either(validators.hex, validators.blockHash)([true], 0)) + validatorResult(validators.either(validators.hex, validators.blockHash)([true], 0)), ) assert.notOk( validatorResult( validators.either( validators.hex, validators.blockHash, - validators.array(validators.hex) - )([[false]], 0) - ) + validators.array(validators.hex), + )([[false]], 0), + ), ) }) }) diff --git a/packages/client/test/rpc/web3/sha3.spec.ts b/packages/client/test/rpc/web3/sha3.spec.ts index 31706682fa..03896efd0c 100644 --- a/packages/client/test/rpc/web3/sha3.spec.ts +++ b/packages/client/test/rpc/web3/sha3.spec.ts @@ -8,7 +8,7 @@ function compareErrorCode(error: any, errorCode: any) { assert.equal( error.code, errorCode, - `should return the correct error code (expected: ${errorCode}, received: ${error.code})` + `should return the correct error code (expected: ${errorCode}, received: ${error.code})`, ) } @@ -26,7 +26,7 @@ describe(method, () => { assert.equal( result, '0x47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad', - 'should return the correct hash value' + 'should return the correct hash value', ) }) diff --git a/packages/client/test/service/fullethereumservice.spec.ts b/packages/client/test/service/fullethereumservice.spec.ts index 03336d3e68..0915fdb095 100644 --- a/packages/client/test/service/fullethereumservice.spec.ts +++ b/packages/client/test/service/fullethereumservice.spec.ts @@ -136,7 +136,7 @@ describe('should start/stop', async () => { describe('should correctly handle GetBlockHeaders', async () => { const config = new Config({ accountCache: 10000, storageCache: 1000 }) vi.unmock('../../src/blockchain') - await import('../../src/blockchain') + await import('../../src/blockchain/index.js') const chain = await Chain.create({ config }) chain.getHeaders = () => [{ number: 1n }] as any const service = new FullEthereumService({ config, chain }) @@ -152,12 +152,12 @@ describe('should correctly handle GetBlockHeaders', async () => { it('should send empty headers', () => { assert.ok( title === 'BlockHeaders' && msg.headers.length === 0, - 'sent empty headers when block height is too high' + 'sent empty headers when block height is too high', ) }) }, } as any, - } as any + } as any, ) ;(service.chain as any)._headers = { height: 5n, @@ -177,12 +177,12 @@ describe('should correctly handle GetBlockHeaders', async () => { it('should send 1 header', () => { assert.ok( title === 'BlockHeaders' && msg.headers.length === 1, - 'sent 1 header when requested' + 'sent 1 header when requested', ) }) }, } as any, - } as any + } as any, ) }) @@ -204,7 +204,7 @@ describe('should call handleNewBlock on NewBlock and handleNewBlockHashes on New await service.switchToBeaconSync() assert.ok( (service.synchronizer as BeaconSynchronizer).type === 'beacon', - 'switched to BeaconSynchronizer' + 'switched to BeaconSynchronizer', ) assert.ok(service.beaconSync, 'can access BeaconSynchronizer') }) @@ -282,7 +282,7 @@ describe('should handle Transactions', async () => { data: [createTxFromTxData({ type: 2 })], }, 'eth', - undefined as any + undefined as any, ) }) @@ -306,7 +306,7 @@ describe('should handle NewPooledTransactionHashes', async () => { eth: { versions: [66], }, - } as any + } as any, ) }) @@ -330,7 +330,7 @@ describe('should handle GetPooledTransactions', async () => { }) }, } as any, - } as any + } as any, ) }) @@ -344,7 +344,7 @@ describe('should handle decoding NewPooledTransactionHashes with eth/68 message ;(service.txPool as any).handleAnnouncedTxHashes = ( hashes: Uint8Array[], _peer: any, - _pool: any + _pool: any, ) => { it('should get correct tx hash from eth68 message', () => { assert.deepEqual(hashes[0], txHash) @@ -358,7 +358,7 @@ describe('should handle decoding NewPooledTransactionHashes with eth/68 message eth: { versions: [67, 68], }, - } as any + } as any, ) }) @@ -381,7 +381,7 @@ describe.skip('should handle structuring NewPooledTransactionHashes with eth/68 }, }, } as any, - ] + ], ) }) diff --git a/packages/client/test/service/lightethereumservice.spec.ts b/packages/client/test/service/lightethereumservice.spec.ts index cec1e520bc..ede731051b 100644 --- a/packages/client/test/service/lightethereumservice.spec.ts +++ b/packages/client/test/service/lightethereumservice.spec.ts @@ -1,6 +1,6 @@ import { assert, describe, expect, it, vi } from 'vitest' -import { Chain } from '../../src/blockchain/chain' +import { Chain } from '../../src/blockchain/chain.js' import { Config } from '../../src/config.js' import { LesProtocol } from '../../src/net/protocol/index.js' import { RlpxServer } from '../../src/net/server/index.js' diff --git a/packages/client/test/sim/4844-blobpost.spec.ts b/packages/client/test/sim/4844-blobpost.spec.ts index 237098b5aa..68c41c4ec5 100644 --- a/packages/client/test/sim/4844-blobpost.spec.ts +++ b/packages/client/test/sim/4844-blobpost.spec.ts @@ -17,7 +17,7 @@ import type { PrefixedHexString } from '@ethereumjs/util' const pkey = hexToBytes( (process.env.PRIVATE_KEY as PrefixedHexString) ?? - '0xae557af4ceefda559c924516cabf029bedc36b68109bf8d6183fe96e04121f4e' + '0xae557af4ceefda559c924516cabf029bedc36b68109bf8d6183fe96e04121f4e', ) const sender = bytesToHex(privateToAddress(pkey)) const rpcUrl = @@ -68,7 +68,7 @@ describe(`running txes on ${rpcUrl}`, async () => { const nonceFetch = await client.request( 'eth_getTransactionCount', [sender.toString(), 'latest'], - 2.0 + 2.0, ) const nonce = Number(nonceFetch.result) assert.ok(true, `fetched ${sender}'s nonce=${nonce} for blob txs`) @@ -86,7 +86,7 @@ describe(`running txes on ${rpcUrl}`, async () => { gasLimit: BigInt(process.env.GAS_LIMIT ?? 0xffffffn), blobSize: Number(process.env.BLOB_SIZE ?? 4096), }, - { common } + { common }, ) const txHashes = [] for (const txn of txns) { @@ -101,7 +101,7 @@ describe(`running txes on ${rpcUrl}`, async () => { } assert.ok(true, `posted txs=${txHashes.length}`) }, - 10 * 60_000 + 10 * 60_000, ) it('cleanup', async () => { diff --git a/packages/client/test/sim/4844-devnet.spec.ts b/packages/client/test/sim/4844-devnet.spec.ts index c0c4cf225d..b482324020 100644 --- a/packages/client/test/sim/4844-devnet.spec.ts +++ b/packages/client/test/sim/4844-devnet.spec.ts @@ -63,7 +63,7 @@ describe('sharding/eip4844 hardfork tests', async () => { pkey, '0x3dA33B9A0894b908DdBb00d96399e506515A1009', undefined, - { common } + { common }, ) const eth2res = await (await fetch('http://127.0.0.1:9596/eth/v1/beacon/headers')).json() @@ -98,7 +98,7 @@ describe('sharding/eip4844 hardfork tests', async () => { assert.equal( eth2kzgs[0], bytesToHex(txResult.tx.kzgCommitments![0]), - 'found expected blob commitments on CL' + 'found expected blob commitments on CL', ) }, 60_000) @@ -119,7 +119,7 @@ describe('sharding/eip4844 hardfork tests', async () => { gasLimit: BigInt(1000000) as any, blobSize: 4096, }, - { common } + { common }, ) const txHashes = [] for (const txn of txns) { @@ -138,7 +138,7 @@ describe('sharding/eip4844 hardfork tests', async () => { const block1 = await client.request( 'eth_getBlockByHash', [txReceipt.result.blockHash, false], - 2.0 + 2.0, ) // next block will have the excessBlobGas done = false @@ -153,14 +153,14 @@ describe('sharding/eip4844 hardfork tests', async () => { } assert.ok(BigInt(block2.result.excessBlobGas) > 0n, 'block1 has excess blob gas > 0') }, - 10 * 60_000 + 10 * 60_000, ) it('point precompile contract test', async () => { const nonce = await client.request( 'eth_getTransactionCount', [sender.toString(), 'latest'], - 2.0 + 2.0, ) /* Data is contract deployment code for the below contract borrowed from the 4844-interop repo @@ -179,7 +179,7 @@ describe('sharding/eip4844 hardfork tests', async () => { const txData = { data: hexToBytes( - '0xf9031103830186a0830f42408080b902c0608060405234801561001057600080fd5b50604051610260380380610260833981810160405281019061003291906101ca565b60008060c0835160145afa61004657600080fd5b50610213565b6000604051905090565b600080fd5b600080fd5b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6100b38261006a565b810181811067ffffffffffffffff821117156100d2576100d161007b565b5b80604052505050565b60006100e561004c565b90506100f182826100aa565b919050565b600067ffffffffffffffff8211156101115761011061007b565b5b61011a8261006a565b9050602081019050919050565b60005b8381101561014557808201518184015260208101905061012a565b83811115610154576000848401525b50505050565b600061016d610168846100f6565b6100db565b90508281526020810184848401111561018957610188610065565b5b610194848285610127565b509392505050565b600082601f8301126101b1576101b0610060565b5b81516101c184826020860161015a565b91505092915050565b6000602082840312156101e0576101df610056565b5b600082015167ffffffffffffffff8111156101fe576101fd61005b565b5b61020a8482850161019c565b91505092915050565b603f806102216000396000f3fe6080604052600080fdfea2646970667358221220cbb964afe0f584a89b887bf992e18697c0ebd77a40a102c121f54213f23d4d9464736f6c634300080f00330000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000212340000000000000000000000000000000000000000000000000000000000001ba002e89a44a4e4da739fed1ed658079a75dbcb59eebbd8ea0cb11f88a41d611dfaa025fe1645a1d3c9828be471fac5cd3e4be59c90ea304c94d774ff88c84349d8db' + '0xf9031103830186a0830f42408080b902c0608060405234801561001057600080fd5b50604051610260380380610260833981810160405281019061003291906101ca565b60008060c0835160145afa61004657600080fd5b50610213565b6000604051905090565b600080fd5b600080fd5b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6100b38261006a565b810181811067ffffffffffffffff821117156100d2576100d161007b565b5b80604052505050565b60006100e561004c565b90506100f182826100aa565b919050565b600067ffffffffffffffff8211156101115761011061007b565b5b61011a8261006a565b9050602081019050919050565b60005b8381101561014557808201518184015260208101905061012a565b83811115610154576000848401525b50505050565b600061016d610168846100f6565b6100db565b90508281526020810184848401111561018957610188610065565b5b610194848285610127565b509392505050565b600082601f8301126101b1576101b0610060565b5b81516101c184826020860161015a565b91505092915050565b6000602082840312156101e0576101df610056565b5b600082015167ffffffffffffffff8111156101fe576101fd61005b565b5b61020a8482850161019c565b91505092915050565b603f806102216000396000f3fe6080604052600080fdfea2646970667358221220cbb964afe0f584a89b887bf992e18697c0ebd77a40a102c121f54213f23d4d9464736f6c634300080f00330000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000212340000000000000000000000000000000000000000000000000000000000001ba002e89a44a4e4da739fed1ed658079a75dbcb59eebbd8ea0cb11f88a41d611dfaa025fe1645a1d3c9828be471fac5cd3e4be59c90ea304c94d774ff88c84349d8db', ), nonce: BigInt(nonce.result), gasLimit: 0xffffff, @@ -192,7 +192,7 @@ describe('sharding/eip4844 hardfork tests', async () => { const txResult = await client.request( 'eth_sendRawTransaction', [bytesToHex(tx.serialize())], - 2.0 + 2.0, ) let receipt = await client.request('eth_getTransactionReceipt', [txResult.result], 2.0) while (receipt.result === null) { @@ -201,7 +201,7 @@ describe('sharding/eip4844 hardfork tests', async () => { } assert.ok( receipt.result.contractAddress !== undefined, - 'successfully deployed contract that calls precompile' + 'successfully deployed contract that calls precompile', ) }, 60_000) /* diff --git a/packages/client/test/sim/beaconsync.spec.ts b/packages/client/test/sim/beaconsync.spec.ts index 7611183123..01c4b8d5f1 100644 --- a/packages/client/test/sim/beaconsync.spec.ts +++ b/packages/client/test/sim/beaconsync.spec.ts @@ -89,7 +89,7 @@ describe('simple mainnet test run', async () => { assert.equal( EOATransferToBalance, BigInt(balance.result), - `fetched ${EOATransferToAccount} balance=${EOATransferToBalance}` + `fetched ${EOATransferToAccount} balance=${EOATransferToBalance}`, ) balance = await client.request('eth_getBalance', [EOATransferToAccount, 'latest']) @@ -107,10 +107,10 @@ describe('simple mainnet test run', async () => { balance = await client.request('eth_getBalance', [sender, 'latest']) assert.ok( balance.result !== undefined, - 'remaining sender balance after transfers and gas fee' + 'remaining sender balance after transfers and gas fee', ) }, - 2 * 60_000 + 2 * 60_000, ) it.skipIf(process.env.BEACON_SYNC === undefined)( @@ -127,7 +127,7 @@ describe('simple mainnet test run', async () => { common, customGenesisState, [nodeInfo.enode], - peerBeaconUrl + peerBeaconUrl, ).catch((e) => { console.log(e) return null @@ -152,7 +152,7 @@ describe('simple mainnet test run', async () => { assert.fail('could not connect to geth peer in 10 seconds') } }, - 60_000 + 60_000, ) it.skipIf(process.env.BEACON_SYNC === undefined)( @@ -170,7 +170,7 @@ describe('simple mainnet test run', async () => { assert.equal( ['SYNCED', 'VALID'].includes(syncResponse.syncState), true, - 'beaconSyncRelayer should have synced client' + 'beaconSyncRelayer should have synced client', ) await ejsClient.stop() assert.ok(true, 'completed beacon sync') @@ -182,7 +182,7 @@ describe('simple mainnet test run', async () => { assert.fail('ethereumjs client not setup properly for beacon sync') } }, - 10 * 60_000 + 10 * 60_000, ) it('network cleanup', async () => { @@ -201,7 +201,7 @@ async function createBeaconSyncClient( customGenesisState?: any, bootnodes?: any, peerBeaconUrl?: any, - datadir?: any + datadir?: any, ) { // Turn on `debug` logs, defaults to all client logging debug.enable(process.env.DEBUG_SYNC ?? '') diff --git a/packages/client/test/sim/eof.spec.ts b/packages/client/test/sim/eof.spec.ts index 33401e9e2b..b44ad8700a 100644 --- a/packages/client/test/sim/eof.spec.ts +++ b/packages/client/test/sim/eof.spec.ts @@ -87,7 +87,7 @@ describe('EOF ephemeral hardfork tests', async () => { assert.equal( code.result, '0XEF00010100010200010000AA'.toLowerCase(), - 'deposited valid EOF1 code' + 'deposited valid EOF1 code', ) }) // ------------EIP 3860 tests------------------------------- @@ -105,7 +105,7 @@ describe('EOF ephemeral hardfork tests', async () => { const push0res = await runTx('0x5F') assert.ok( BigInt(push1res.gasUsed) > BigInt(push0res.gasUsed), - 'PUSH1 transaction costs higher gas than PUSH0' + 'PUSH1 transaction costs higher gas than PUSH0', ) }) // ------------EIP 3651 tests------------------------------- @@ -129,18 +129,18 @@ describe('EOF ephemeral hardfork tests', async () => { */ const contractAddress = ( await runTx( - '0x608060405234801561001057600080fd5b5061021d806100206000396000f3fe608060405234801561001057600080fd5b50600436106100365760003560e01c80635caba0a41461003b578063e178495614610057575b600080fd5b6100556004803603810190610050919061011b565b610061565b005b61005f6100b4565b005b7fe37f346e484eff2a55fc81911c0cd6f3f9403f2c3d4c34f3b705adaf5e15620f818273ffffffffffffffffffffffffffffffffffffffff16316040516100a9929190610166565b60405180910390a150565b7fe37f346e484eff2a55fc81911c0cd6f3f9403f2c3d4c34f3b705adaf5e15620f414173ffffffffffffffffffffffffffffffffffffffff16316040516100fc929190610166565b60405180910390a1565b600081359050610115816101d0565b92915050565b600060208284031215610131576101306101cb565b5b600061013f84828501610106565b91505092915050565b6101518161018f565b82525050565b610160816101c1565b82525050565b600060408201905061017b6000830185610148565b6101886020830184610157565b9392505050565b600061019a826101a1565b9050919050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b6000819050919050565b600080fd5b6101d98161018f565b81146101e457600080fd5b5056fea2646970667358221220d00dedb6dcbb511fab3ae484199f836b4c36119fb6faec1baee5e29db1ead12864736f6c63430008070033' + '0x608060405234801561001057600080fd5b5061021d806100206000396000f3fe608060405234801561001057600080fd5b50600436106100365760003560e01c80635caba0a41461003b578063e178495614610057575b600080fd5b6100556004803603810190610050919061011b565b610061565b005b61005f6100b4565b005b7fe37f346e484eff2a55fc81911c0cd6f3f9403f2c3d4c34f3b705adaf5e15620f818273ffffffffffffffffffffffffffffffffffffffff16316040516100a9929190610166565b60405180910390a150565b7fe37f346e484eff2a55fc81911c0cd6f3f9403f2c3d4c34f3b705adaf5e15620f414173ffffffffffffffffffffffffffffffffffffffff16316040516100fc929190610166565b60405180910390a1565b600081359050610115816101d0565b92915050565b600060208284031215610131576101306101cb565b5b600061013f84828501610106565b91505092915050565b6101518161018f565b82525050565b610160816101c1565b82525050565b600060408201905061017b6000830185610148565b6101886020830184610157565b9392505050565b600061019a826101a1565b9050919050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b6000819050919050565b600080fd5b6101d98161018f565b81146101e457600080fd5b5056fea2646970667358221220d00dedb6dcbb511fab3ae484199f836b4c36119fb6faec1baee5e29db1ead12864736f6c63430008070033', ) ).contractAddress const readWarmCoinbase = await runTx('0xe1784956', contractAddress) const readCold = await runTx( '0x5caba0a40000000000000000000000004242424242424242424242424242424242424242', - contractAddress + contractAddress, ) assert.ok( BigInt(readCold.gasUsed) > BigInt(readWarmCoinbase.gasUsed), - 'read cold storage tx should have higher cumulative gas than than read coinbase tx' + 'read cold storage tx should have higher cumulative gas than than read coinbase tx', ) }) diff --git a/packages/client/test/sim/mainnet.spec.ts b/packages/client/test/sim/mainnet.spec.ts index 459a5c0614..f21262f476 100644 --- a/packages/client/test/sim/mainnet.spec.ts +++ b/packages/client/test/sim/mainnet.spec.ts @@ -75,7 +75,7 @@ describe('simple mainnet test run', async () => { const latestBlock = await client.request('eth_getBlockByNumber', ['latest', false]) blockHashes.push(latestBlock.result.hash) }, - 2 * 60_000 + 2 * 60_000, ) it('Validate execution hashes present in beacon headers', async () => { @@ -84,7 +84,7 @@ describe('simple mainnet test run', async () => { 'http://127.0.0.1:9596', 1, parseInt(eth2res.data[0].header.message.slot), - blockHashes + blockHashes, ) }, 60_000) diff --git a/packages/client/test/sim/simutils.ts b/packages/client/test/sim/simutils.ts index 252ea2decc..2ddba64f10 100644 --- a/packages/client/test/sim/simutils.ts +++ b/packages/client/test/sim/simutils.ts @@ -19,11 +19,11 @@ import { execSync, spawn } from 'node:child_process' import * as net from 'node:net' import qs from 'qs' -import { EthereumClient } from '../../src/client' +import { EthereumClient } from '../../src/client.js' import { Config } from '../../src/config.js' -import { LevelDB } from '../../src/execution/level' -import { RPCManager } from '../../src/rpc' -import { Event } from '../../src/types' +import { LevelDB } from '../../src/execution/level.js' +import { RPCManager } from '../../src/rpc/index.js' +import { Event } from '../../src/types.js' import type { Common } from '@ethereumjs/common' import type { TransactionType, TxData, TxOptions } from '@ethereumjs/tx' @@ -117,7 +117,7 @@ export async function validateBlockHashesInclusionInBeacon( beaconUrl: string, from: number, to: number, - blockHashes: string[] + blockHashes: string[], ) { const executionHashes: string[] = [] for (let i = from; i <= to; i++) { @@ -147,7 +147,7 @@ type RunOpts = { export function runNetwork( network: string, client: Client, - { filterKeywords, filterOutWords, withPeer }: RunOpts + { filterKeywords, filterOutWords, withPeer }: RunOpts, ): () => Promise { const runProc = spawn('test/sim/single-run.sh', [], { env: { @@ -255,7 +255,7 @@ export function runNetwork( export async function startNetwork( network: string, client: Client, - opts: RunOpts + opts: RunOpts, ): Promise<{ teardownCallBack: () => Promise; result: string }> { let teardownCallBack if (opts.externalRun === undefined) { @@ -271,7 +271,7 @@ export async function runTxHelper( opts: { client: Client; common: Common; sender: string; pkey: Uint8Array }, data: PrefixedHexString | '', to?: PrefixedHexString, - value?: bigint + value?: bigint, ) { const { client, common, sender, pkey } = opts const nonce = BigInt((await client.request('eth_getTransactionCount', [sender, 'latest'])).result) @@ -289,7 +289,7 @@ export async function runTxHelper( to, value, }, - { common } + { common }, ).sign(pkey) const res = await client.request('eth_sendRawTransaction', [bytesToHex(tx.serialize())], 2.0) @@ -316,7 +316,7 @@ export const runBlobTx = async ( pkey: Uint8Array, to?: PrefixedHexString, value?: bigint, - opts?: TxOptions + opts?: TxOptions, ) => { const blobs = getBlobs(bytesToHex(randomBytes(blobSize))) const commitments = blobsToCommitments(kzg, blobs) @@ -383,7 +383,7 @@ export const createBlobTxs = async ( gasLimit: bigint blobSize: number }, - opts?: TxOptions + opts?: TxOptions, ) => { const txHashes: string[] = [] const blobSize = txMeta.blobSize ?? 2 ** 17 - 1 @@ -432,17 +432,17 @@ export async function createInlineClient( config: any, common: any, customGenesisState: any, - datadir: any = Config.DATADIR_DEFAULT + datadir: any = Config.DATADIR_DEFAULT, ) { config.events.setMaxListeners(50) const chainDB = new Level( - `${datadir}/${common.chainName()}/chainDB` + `${datadir}/${common.chainName()}/chainDB`, ) const stateDB = new Level( - `${datadir}/${common.chainName()}/stateDB` + `${datadir}/${common.chainName()}/stateDB`, ) const metaDB = new Level( - `${datadir}/${common.chainName()}/metaDB` + `${datadir}/${common.chainName()}/metaDB`, ) const blockchain = await createBlockchain({ @@ -506,7 +506,7 @@ export async function setupEngineUpdateRelay(client: EthereumClient, peerBeaconU !['SYNCING', 'VALID', 'ACCEPTED'].includes(newPayloadRes.status) ) { throw Error( - `newPayload error: status${newPayloadRes.status} validationError=${newPayloadRes.validationError} error=${newPayloadRes.error}` + `newPayload error: status${newPayloadRes.status} validationError=${newPayloadRes.validationError} error=${newPayloadRes.error}`, ) } @@ -545,7 +545,7 @@ export async function setupEngineUpdateRelay(client: EthereumClient, peerBeaconU const beaconHead = await (await fetch(`${peerBeaconUrl}/eth/v2/beacon/blocks/head`)).json() const payload = executionPayloadFromBeaconPayload( - beaconHead.data.message.body.execution_payload + beaconHead.data.message.body.execution_payload, ) const finalizedBlockHash = beaconFinalized.data.finalized_header.execution.block_hash diff --git a/packages/client/test/sim/snapsync.spec.ts b/packages/client/test/sim/snapsync.spec.ts index e8aded7d90..89ae5e2422 100644 --- a/packages/client/test/sim/snapsync.spec.ts +++ b/packages/client/test/sim/snapsync.spec.ts @@ -22,9 +22,9 @@ import { setupEngineUpdateRelay, startNetwork, waitForELStart, -} from './simutils' +} from './simutils.js' -import type { EthereumClient } from '../../src/client' +import type { EthereumClient } from '../../src/client.js' import type { DefaultStateManager } from '@ethereumjs/statemanager' import type { PrefixedHexString } from '@ethereumjs/util' @@ -92,7 +92,7 @@ describe('simple mainnet test run', async () => { assert.equal( EOATransferToBalance, BigInt(balance.result), - `fetched ${EOATransferToAccount} balance=${EOATransferToBalance}` + `fetched ${EOATransferToAccount} balance=${EOATransferToBalance}`, ) balance = await client.request('eth_getBalance', [EOATransferToAccount, 'latest']) @@ -110,11 +110,11 @@ describe('simple mainnet test run', async () => { balance = await client.request('eth_getBalance', [sender, 'latest']) assert.ok( balance.result !== undefined, - 'remaining sender balance after transfers and gas fee' + 'remaining sender balance after transfers and gas fee', ) senderBalance = BigInt(balance.result) }, - 2 * 60_000 + 2 * 60_000, ) it.skipIf(process.env.SNAP_SYNC === undefined)( @@ -135,7 +135,7 @@ describe('simple mainnet test run', async () => { customGenesisState, [nodeInfo.enode], peerBeaconUrl, - '' + '', ).catch((e) => { console.log(e) return null @@ -162,7 +162,7 @@ describe('simple mainnet test run', async () => { assert.fail('could not connect to geth peer in 10 seconds') } }, - 60_000 + 60_000, ) it.skipIf(process.env.SNAP_SYNC === undefined)( @@ -201,7 +201,7 @@ describe('simple mainnet test run', async () => { assert.fail('ethereumjs client not setup properly for snap sync') } }, - 10 * 60_000 + 10 * 60_000, ) it.skipIf(stateManager !== undefined)('should match entire state', async () => { @@ -222,7 +222,7 @@ describe('simple mainnet test run', async () => { assert.equal( account?.balance, BigInt(customGenesisState[addressString][0]), - `${addressString} balance should match` + `${addressString} balance should match`, ) } }) @@ -244,7 +244,7 @@ async function createSnapClient( customGenesisState: any, bootnodes: any, peerBeaconUrl: any, - datadir: any + datadir: any, ) { // Turn on `debug` logs, defaults to all client logging debug.enable(process.env.DEBUG_SNAP ?? '') @@ -272,7 +272,7 @@ async function createSnapClient( config.events.once( Event.SYNC_SNAPSYNC_COMPLETE, (stateRoot: Uint8Array, stateManager: DefaultStateManager) => - resolve([stateRoot, stateManager]) + resolve([stateRoot, stateManager]), ) }) diff --git a/packages/client/test/sync/beaconsync.spec.ts b/packages/client/test/sync/beaconsync.spec.ts index 81173704eb..722f2f6104 100644 --- a/packages/client/test/sync/beaconsync.spec.ts +++ b/packages/client/test/sync/beaconsync.spec.ts @@ -30,7 +30,7 @@ describe('[BeaconSynchronizer]', async () => { ReverseBlockFetcher.prototype.destroy = td.func() vi.doMock('../../src/sync/fetcher/reverseblockfetcher.js', () => - td.constructor(ReverseBlockFetcher) + td.constructor(ReverseBlockFetcher), ) const { BeaconSynchronizer } = await import('../../src/sync/beaconsync.js') @@ -132,11 +132,11 @@ describe('[BeaconSynchronizer]', async () => { const pool = new PeerPool() as any const chain = await Chain.create({ config }) const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) - skeleton['getSyncStatus'] = td.func() + skeleton['getSyncStatus'] = td.func<(typeof skeleton)['getSyncStatus']>() await skeleton.open() const sync = new BeaconSynchronizer({ config, pool, chain, execution, skeleton }) - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ latest: () => { return { @@ -186,10 +186,10 @@ describe('[BeaconSynchronizer]', async () => { const pool = new PeerPool() as any const chain = await Chain.create({ config }) const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) - skeleton['getSyncStatus'] = td.func() + skeleton['getSyncStatus'] = td.func<(typeof skeleton)['getSyncStatus']>() await skeleton.open() const sync = new BeaconSynchronizer({ config, pool, chain, execution, skeleton }) - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ latest: () => { return { @@ -238,7 +238,7 @@ describe('[BeaconSynchronizer]', async () => { assert.notOk(await sync.extendChain(gapBlock), 'should not extend chain with gapped block') assert.ok( await sync.setHead(gapBlock), - 'should be able to set and update head with gapped block' + 'should be able to set and update head with gapped block', ) assert.equal(skeleton.bounds().head, BigInt(18), 'head should update with gapped block') await sync.stop() @@ -256,7 +256,7 @@ describe('[BeaconSynchronizer]', async () => { assert.equal( await sync.syncWithPeer({} as any), false, - `syncWithPeer should return false as nothing to sync` + `syncWithPeer should return false as nothing to sync`, ) await sync.stop() await sync.close() diff --git a/packages/client/test/sync/fetcher/accountfetcher.spec.ts b/packages/client/test/sync/fetcher/accountfetcher.spec.ts index 949262c9ff..86a9da8c1b 100644 --- a/packages/client/test/sync/fetcher/accountfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/accountfetcher.spec.ts @@ -19,10 +19,10 @@ export const _accountRangeRLP = '0xf90b7c01f88aeda0000001907a67cf7ece54c42262997b2f19041a4d99466b94b8c12f225827e239cb80872386f26fc100008080eda00000107c642e29a6b613205c923ac3a4cf0cf1704ae9a8bef2784caba060f4b7cb07870e22e1219054118080eda000001d26422787b6d40c0c0c2df85757c5ad4a3e367831e932fa24f34da43d57cb80872386f26fc100008080f90aecb90214f90211a0b3f22b069c398ded55d4ce421b06f6b4d5e13cb53ad1c6220276b2b3a078937ba08a54e492e7b9ef911b4a299487a12390ccd81a087398af7106e00b81a791868da0a323a93f5791d4c39e1496e4856f9233e5e86070c722efde613219aca834bde3a0d8c11a8fc2eba0b47de9d5b207b702a8bd62609e9c2504aaa444fd2e98e31deaa0dbfc625e370fa89cb7b123550ef6fd637687b9e9a7c8556bd41bcd4226226095a094fe5f6ac37c805917beefa220d7c6b3bd50848322f6342e940cc047c9b6a8ffa074af7e57b9c59e06a2e478610d56ab39004cda3109cfd953dc8b1d168c453cbca0d58f31d0ecce773d610aa5d12f7cc2f4ca992db4ce2e154c13a12cb4bb567816a0b26a7d9776165bb52e793df6a77d4032164d788bf9954c9cac289ea0786da2fda043804bd146f583b183dc267b36bbe55f63daa36fd6cbdafce48ce451a444b4eca0fc724e8bb65724450eb3966d8672330c8e49a94c6ceaed06174a2322aafee105a02ccb0445b0a4028f167e425b57cb9462cc6caceda0c3cfb5363f08614314a77ca0c64db3edb50609b6de331f00ba1f455113d1388e9eb5f50f5420983012d62b7da0168c680c03ef3fbcc36a6c1ddd9bf7d46b5fd5ee34dd7048320223c8bbe412f9a05747d2eb930bffce317c253e3889a7db57c87dcc55f1f1f77b3d02fc82bc6bcfa0997073e1664f9cbbcfd968277856596c325a6b83887f4ad007c3b93e1133c65280b90214f90211a0b3e6ec5fa09062b280599994d38261cae87ab198ed1b3a7d7003a277ffc735dfa01bac91007228f4fa15ac9c2a4822b7d4103eafae61dd3db30eb830e31de9cddfa0809973bebc62f48fb834336800b1ce8e1b2128ee5824645464b6c09ddd381578a0f8d54e19e888fc01cd5069bfcddb7ee78a4afdec24aa03822d9fd5356a3c109fa08a61ea95c616906799398778b28f0e8a19f6569f885e4b4f1192f3e9f690cefea09aa53cd259b1df9650222dc285236399da685b7350312a3ac0a07a86bef64d5ea01596637937233489a70e114c23818e3512b3c2abf621d142c14a9b9a3afb09d1a0e8a8bcda78ae77bee956389dff38a10c8c1565bc1a85064da6cd8ba606b9aa35a04ae4b4bfbfb97f5b4e178f8c30a6d93ffd6614c8b4d0b44df31b653a3a1e4f0fa0a4e3413e6ee6c5886ed346827ee0cce05a8e4f799b005aacf002a17e6d93e5aaa09a3e6d344bbd2496bf8fa84abc96a3d5f363ba03103edff2164244bb020c52a2a0998f39835105197f860930b46adad4527f5a9ef31c4744476718b910ffc5e586a01cec4592958b5aefe25bea6a49a11089e798d96aebc2be7fce0f1772146d18aea0d7c178ed5bcf822d22f9ed3ca8c95e5144ee0a9fbae901b21da002e2c3c0415ea0a9d5c5c67326f4154449575827ab68ea47c7c8931490160a7a299f829a670476a074814ffe69da7e253de29fc7d5eb57291a67bd6f16cb52175106b7cbd3b19c8f80b90214f90211a0947eec1b645849d129fb8c65cd06bd52526fb2399d1660ee5108fc4698e809aaa02735f6cbb0e10514b1515826ae1c539850543dbe162badaf2efa51b1a353ca1ca0fde2642bcc8db8d6d6e42731eeae2045fc30b84c6efdc420ce8cee5d537b648fa071e7887ca31ae375838ceeed57165f5592a9e6cae9beb070e92a4f5d5aec5014a0f81f4b4d5e2c52373b8884b398838941df0b16177aa4ea8494b183176cf7d526a0dc6ecec073532c8f9581ece75cb4eea83a40ba0210cc10ef0fd8b27a102a028fa0426f18f1de1bc9b665e9efb45d6547e88e35a267d7ec9197ae97052d1be59ab9a0d6aad68bece934d578e18eb3acd147490bc6cc01e646f1d8618a747526eae4f5a04ffee6f8660794981b15fda1ceafef98db853bfc31c029db7cb515bb34bb5572a0da2497fed45626b94c1eb910c9eedc9c26a4ff5b56b709b96d5a567991ebe2aca021b3bfcd8aa97eb8d9a3ce258389603564f01d6f485899a9f6e0a00d85dc00dfa0339e45f0407ad527a899a2e06e17330c2cfe25b81689dcffd20c166ef256fbc6a0dafd25416aaf44a8bfa1a6bf2b0cc563f9be84b9b3b8bf307983252d7cd63c51a0191504034adb55fe0926c7c4066654739af3e1c9c4173f4d90fa2e1df62a99cca0504e2144c1a889e48cd5a6baa17e39b6a176dbf41147dd171f2673c5c9d849dba04850f33ad929cb1a07136f162e33a5df0f65c48f359637774e7c8ebabe90eb7080b90214f90211a05d16e93a6e58a13a7c7dde40d0c543b9d63d029ec0da5efb4be34cd4ce672181a089cbb0e940fb7bb395091e3b665755be6b51292fba7a7bc39904568c63a907e1a050314b93f73fed553cd9dee63dc1fe9b789f9b9e111a659ff4e4c91c8167a63ca04444bd2a1bb78a83b66a36a09076b2b49eade4e2e8c8ef91538117525893841aa0abde6220817f3608bdfec46ebed292c464ee1d2c58d0b43286b8617bb4cb49d9a07257eff6aebb380db4c75752a84c6b2d0bb86bb190cef2a58829497997262b6aa0a0d4ab9d93be97287f29637a9b16fb8a6c8cd3bc29786b64343113b95a4153ffa0f0d479377ce4c0f31185c45319f915532cea13e97d5abfc939b75b642b5b47bba0eb96a911347f5321e03f1602a041ce82ec29bb4b322faa9f999cf02bb0c7a932a047b6c76ffeb29b4e3c3c09749289213395c8b0126dbd8acee45c6d32d2a0ab5fa0ca462e8ff237f9e56698ca416fac835ed37bc90683d363effe7ec9dacb4963fba0d385f828becce3665e070b645df25dec507a7c6c3813591e3436147be0becc75a0537a7451522228feca0ceb55374615e8396229e1c7a6b0ae16fb49cd8e6ed7a9a0b96561ab484f67b604d2dc46ac170750b321334aabcfb6b212a906e1cb5b3532a09f64f7c76e201d48b4bc1fb02f7e052a5a1bf05b2c59f3c969c8d2d6b373b3dca0398a988af30676952fcf1a968ac530b30dbe32922efe8c27acb9025adcaf1a5180b90134f90131a0b2151043be015f98b1b249180bfac505781022ede708f533f373b2d612837df7a0031e6ffe32d313f0cd57b4bebbf6fcacf83c366157846040108d198129d99a5aa0bfca4f79ac9eb24bcbdbd94fc49c0ca30a6399a2071e4ab3024e1aae0159a31180808080a0f1a2c911436f5bf1aa936e140b17399f7c092ad64a8ab839057a67fc6923a318a0e648ced926c977b0dcc17452361ac43e53f839b8e485a288e93fb667573ae088a0808107d197eb28741f8cec92b6fa76957fa6928b00f4b7301d464809519258098080a02c7ac441b072bbe33030110dccfdda0de6705c4bdb2c94594e10c2fb8687c41080a0162e8104a86bd043ca2fac0c5d56181127c7b24f6c10fefb90c27064b4edeff8a0376bcbdd3b7503a144b9016159b7e2cd074c9566b843cb834123057c61adbd2e80b870f86e9e31907a67cf7ece54c42262997b2f19041a4d99466b94b8c12f225827e239b84df84b80872386f26fc10000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470b873f871a0a75a6fa397f39292a3bb4fdb84463908c473bad9a0206bd00964adabd7a4b589808080808080808080808080a0ea5b9774dfc3fd50b359b86fa49a57fce0186593cf89d865e279413b63947bed80a0a0747bb1023533b4f9cdaa7c845609975d413348fc5f185a120037dccdf3584c80b870f86e9e2026422787b6d40c0c0c2df85757c5ad4a3e367831e932fa24f34da43d57b84df84b80872386f26fc10000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' export const _zeroElementProofRoot = hexToBytes( - '0xe794e45a596856bcd5412788f46752a559a4aa89fe556ab26a8c2cf0fc24cb5e' + '0xe794e45a596856bcd5412788f46752a559a4aa89fe556ab26a8c2cf0fc24cb5e', ) export const _zeroElementProofOrigin = bytesToBigInt( - hexToBytes('0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffa') + hexToBytes('0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffa'), ) export const _zeroElementProof = [ '0xf90211a07d363fdc4ad4413321005a1981d415a872aed14651c159bea575d713fb1d1fd8a0d51e3a39747ab080d602e8dff07ed7fdf18fd5dd480b85ec8d5ebd86475481fba0382fbb965c19798b116e1b32ad64d99bdf09f8f4ed4c83e1b388ffad0ee8bc62a02ff7448b0092b7926a01bbb4f72e6f38366fdf109f3e9f8ac0794af3dc0e3de4a05db544523b1c10f8aead4252bff05665b8c7d21f02a102b51ac79acb6b3d2854a0cb0c46c37d6b44be6ff2204c4f4cea393099fefeae88cf5aa88195da74cca13fa0b459b6b3672dab2bb058e561761a0838e349d1dd1292dda31245e8404ec844eaa082cbce67bd082cb430296662fb1f32aabe866dee947970877abaf4233eb0fb48a0828820316cc02bfefd899aba41340659fd06df1e0a0796287ec2a4110239f6d2a0be88e4724326382a8b56e2328eeef0ad51f18d5bae0e84296afe14c4028c4af9a0c14e9060c6b3784e35b9e6ae2ad2984142a75910ccc89eb89dc1e2f44b6c58c2a091467954490d127631d2a2f39a6edabd702153de817fe8da2ab9a30513e5c6dda01c00f6abbb9bcb3ae9b12c887bc3ea3b13dba33a5dbad455c24778fa7d3ab01ea0899f71abb18c6c956118bf567fac629b75f7e9526873e429d3d8abb6dbb58021a00fd717235298742623c0b3cafb3e4bd86c0b5ab1f71097b4dd19f3d6925d758da011e10e11fa54a847669b26adaf1b5cbe7736eafde6c552b9b6158fe12307e60680', @@ -38,7 +38,7 @@ describe('[AccountFetcher]', async () => { PeerPool.prototype.idle = td.func() PeerPool.prototype.ban = td.func() - const { AccountFetcher } = await import('../../../src/sync/fetcher/accountfetcher') + const { AccountFetcher } = await import('../../../src/sync/fetcher/accountfetcher.js') it('should start/stop', async () => { const config = new Config({ maxPerRequest: 5 }) @@ -89,7 +89,7 @@ describe('[AccountFetcher]', async () => { assert.deepEqual( fetcher.highestKnownHash, highestReceivedHash, - 'highest known hash correctly updated' + 'highest known hash correctly updated', ) }) @@ -204,7 +204,7 @@ describe('[AccountFetcher]', async () => { const result = (await fetcher.request(job as any)) as any assert.ok( JSON.stringify(result[0]) === JSON.stringify({ skipped: true }), - 'skipped fetching task with limit lower than highest known key hash' + 'skipped fetching task with limit lower than highest known key hash', ) }) @@ -280,7 +280,7 @@ describe('[AccountFetcher]', async () => { const ret = await fetcher.request(job as any) assert.ok( ret?.completed === true, - 'should handle peer that is signaling that an empty range has been requested with no elements remaining to the right' + 'should handle peer that is signaling that an empty range has been requested with no elements remaining to the right', ) }) @@ -319,7 +319,7 @@ describe('[AccountFetcher]', async () => { const ret = await fetcher.request(job as any) assert.ok( ret?.completed === undefined, - 'proof verification should fail if elements still remain to the right of the proof' + 'proof verification should fail if elements still remain to the right of the proof', ) }) @@ -333,10 +333,10 @@ describe('[AccountFetcher]', async () => { pool, root: hexToBytes('0x39ed8daab7679c0b1b7cf3667c50108185d4d9d1431c24a1c35f696a58277f8f'), first: bytesToBigInt( - hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000001') + hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000001'), ), count: bytesToBigInt( - hexToBytes('0x000010c6f7a0b5ed8d36b4c7f34938583621fafc8b0079a2834d26fa3fcc9ea9') + hexToBytes('0x000010c6f7a0b5ed8d36b4c7f34938583621fafc8b0079a2834d26fa3fcc9ea9'), ), }) assert.ok(fetcher.storageFetcher !== undefined, 'storageFetcher should be created') @@ -345,7 +345,7 @@ describe('[AccountFetcher]', async () => { const resData = RLP.decode(hexToBytes(_accountRangeRLP)) const { accounts, proof } = p.decode( p.messages.filter((message) => message.name === 'AccountRange')[0], - resData + resData, ) const mockedGetAccountRange = vi.fn(() => { return { @@ -365,7 +365,7 @@ describe('[AccountFetcher]', async () => { assert.ok(results !== undefined, 'Proof verification is completed without errors') assert.ok( fetcher.process(job as any, results!) !== undefined, - 'Response should be processed properly' + 'Response should be processed properly', ) // mock storageFetches's enqueue so to not having a hanging storage fetcher diff --git a/packages/client/test/sync/fetcher/blockfetcher.spec.ts b/packages/client/test/sync/fetcher/blockfetcher.spec.ts index f7c7e0f0b9..e21c0adeca 100644 --- a/packages/client/test/sync/fetcher/blockfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/blockfetcher.spec.ts @@ -71,7 +71,7 @@ describe('[BlockFetcher]', async () => { assert.equal( fetcher.first + fetcher.count - BigInt(1) === BigInt(15), true, - 'height should now be 15' + 'height should now be 15', ) // Clear fetcher queue for next test of gap when following head @@ -83,7 +83,7 @@ describe('[BlockFetcher]', async () => { assert.equal( (fetcher as any).in.length, 11, - '10 new tasks to catch up to head (1-49, 5 per request), 1 new task for subsequent block numbers (50-51)' + '10 new tasks to catch up to head (1-49, 5 per request), 1 new task for subsequent block numbers (50-51)', ) fetcher.destroy() @@ -104,7 +104,7 @@ describe('[BlockFetcher]', async () => { assert.deepEqual(fetcher.process({ task: { count: 2 } } as any, blocks), blocks, 'got results') assert.notOk( fetcher.process({ task: { count: 2 } } as any, { blocks: [] } as any), - 'bad results' + 'bad results', ) }) @@ -213,7 +213,7 @@ describe('[BlockFetcher]', async () => { const shanghaiHeader = BlockHeader.fromHeaderData( { number: 1, withdrawalsRoot: KECCAK256_RLP }, - { common: config.chainCommon, setHardfork: true } + { common: config.chainCommon, setHardfork: true }, ) const task = { count: 1, first: BigInt(1) } @@ -267,7 +267,7 @@ describe('store()', async () => { config.events.on(Event.SYNC_FETCHED_BLOCKS, () => it('should emit fetched blocks event', () => { assert.ok(true, 'store() emitted SYNC_FETCHED_BLOCKS event on putting blocks') - }) + }), ) await fetcher.store([]) }) diff --git a/packages/client/test/sync/fetcher/bytecodefetcher.spec.ts b/packages/client/test/sync/fetcher/bytecodefetcher.spec.ts index ba7e443e72..45cbba8132 100644 --- a/packages/client/test/sync/fetcher/bytecodefetcher.spec.ts +++ b/packages/client/test/sync/fetcher/bytecodefetcher.spec.ts @@ -73,7 +73,7 @@ describe('[ByteCodeFetcher]', async () => { assert.deepEqual( (fetcher.process(job, ByteCodeResponse) as any)[0], fullResult[0], - 'got results' + 'got results', ) assert.notOk(fetcher.process({} as any, { ByteCodeResponse: [] } as any), 'bad results') }) diff --git a/packages/client/test/sync/fetcher/fetcher.spec.ts b/packages/client/test/sync/fetcher/fetcher.spec.ts index 8d22c2b351..868a669e4d 100644 --- a/packages/client/test/sync/fetcher/fetcher.spec.ts +++ b/packages/client/test/sync/fetcher/fetcher.spec.ts @@ -44,7 +44,7 @@ it('should handle failure', () => { ;(fetcher as any).running = true fetcher.next = td.func() config.events.on(Event.SYNC_FETCHER_ERROR, (err) => - assert.equal(err.message, 'err0', 'got error') + assert.equal(err.message, 'err0', 'got error'), ) ;(fetcher as any).failure(job as Job, new Error('err0')) assert.equal((fetcher as any).in.length, 1, 'enqueued job') @@ -131,7 +131,7 @@ describe('should re-enqueue on a non-fatal error', () => { ;(fetcher as any).running = true fetcher.store = td.func() td.when(fetcher.store(td.matchers.anything())).thenReject( - new Error('could not find parent header') + new Error('could not find parent header'), ) td.when(fetcher.processStoreError(td.matchers.anything(), td.matchers.anything())).thenReturn({ destroyFetcher: false, @@ -142,7 +142,7 @@ describe('should re-enqueue on a non-fatal error', () => { it('should step back', () => { assert.ok( (fetcher as any).in.peek().task.first === BigInt(1), - 'should step back for safeReorgDistance' + 'should step back for safeReorgDistance', ) }) }) diff --git a/packages/client/test/sync/fetcher/headerfetcher.spec.ts b/packages/client/test/sync/fetcher/headerfetcher.spec.ts index ed70d029a3..83eb52baca 100644 --- a/packages/client/test/sync/fetcher/headerfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/headerfetcher.spec.ts @@ -25,14 +25,14 @@ describe('[HeaderFetcher]', async () => { assert.deepEqual( fetcher.process( { task: { count: 2 }, peer: 'peer0' } as any, - { headers, bv: BigInt(1) } as any + { headers, bv: BigInt(1) } as any, ), headers as any, - 'got results' + 'got results', ) assert.notOk( fetcher.process({ task: { count: 2 } } as any, { headers: [], bv: BigInt(1) } as any), - 'bad results' + 'bad results', ) expect((fetcher as any).flow.handleReply).toHaveBeenCalledWith('peer0', 1) }) @@ -126,7 +126,7 @@ describe('store()', async () => { config.events.on(Event.SYNC_FETCHED_HEADERS, () => it('should emit event on put headers', () => { assert.ok(true, 'store() emitted SYNC_FETCHED_HEADERS event on putting headers') - }) + }), ) await fetcher.store([1 as any]) }) diff --git a/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts b/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts index ca63d3c87a..71fde3f121 100644 --- a/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts @@ -89,7 +89,7 @@ describe('[ReverseBlockFetcher]', async () => { assert.deepEqual(fetcher.process({ task: { count: 2 } } as any, blocks), blocks, 'got results') assert.notOk( fetcher.process({ task: { count: 2 } } as any, { blocks: [] } as any), - 'bad results' + 'bad results', ) }) @@ -197,31 +197,31 @@ describe('[ReverseBlockFetcher]', async () => { }) const block47 = createBlockFromBlockData( { header: { number: BigInt(47), difficulty: BigInt(1) } }, - { setHardfork: true } + { setHardfork: true }, ) const block48 = createBlockFromBlockData( { header: { number: BigInt(48), parentHash: block47.hash(), difficulty: BigInt(1) }, }, - { setHardfork: true } + { setHardfork: true }, ) const block49 = createBlockFromBlockData( { header: { number: BigInt(49), parentHash: block48.hash(), difficulty: BigInt(1) }, }, - { setHardfork: true } + { setHardfork: true }, ) const block4 = createBlockFromBlockData( { header: { number: BigInt(4), difficulty: BigInt(1) }, }, - { setHardfork: true } + { setHardfork: true }, ) const block5 = createBlockFromBlockData( { header: { number: BigInt(5), difficulty: BigInt(1), parentHash: block4.hash() }, }, - { setHardfork: true } + { setHardfork: true }, ) ;(skeleton as any).status.progress.subchains = [ { head: BigInt(100), tail: BigInt(50), next: block49.hash() }, @@ -232,12 +232,12 @@ describe('[ReverseBlockFetcher]', async () => { await fetcher.store([block49, block48]) assert.ok( (skeleton as any).status.progress.subchains.length === 1, - 'subchains should be merged' + 'subchains should be merged', ) assert.equal( (skeleton as any).status.progress.subchains[0].tail, BigInt(5), - 'subchain tail should be next segment' + 'subchain tail should be next segment', ) assert.notOk((fetcher as any).running, 'fetcher should stop') assert.equal((fetcher as any).in.length, 0, 'fetcher in should be cleared') @@ -269,7 +269,7 @@ describe('store()', async () => { assert.equal( err.message, `Blocks don't extend canonical subchain`, - 'store() threw on invalid block' + 'store() threw on invalid block', ) const { destroyFetcher, banPeer } = fetcher.processStoreError(err, { first: BigInt(10), @@ -283,7 +283,7 @@ describe('store()', async () => { config.events.on(Event.SYNC_FETCHED_BLOCKS, () => it('should emit event on put blocks', async () => { assert.ok(true, 'store() emitted SYNC_FETCHED_BLOCKS event on putting blocks') - }) + }), ) await fetcher.store([]) }) diff --git a/packages/client/test/sync/fetcher/storagefetcher.spec.ts b/packages/client/test/sync/fetcher/storagefetcher.spec.ts index ef4a920095..a1cb73eac4 100644 --- a/packages/client/test/sync/fetcher/storagefetcher.spec.ts +++ b/packages/client/test/sync/fetcher/storagefetcher.spec.ts @@ -45,10 +45,10 @@ describe('[StorageFetcher]', async () => { storageRequests: [ { accountHash: hexToBytes( - '0x352a47fc6863b89a6b51890ef3c1550d560886c027141d2058ba1e2d4c66d99a' + '0x352a47fc6863b89a6b51890ef3c1550d560886c027141d2058ba1e2d4c66d99a', ), storageRoot: hexToBytes( - '0x556a482068355939c95a3412bdb21213a301483edb1b64402fb66ac9f3583599' + '0x556a482068355939c95a3412bdb21213a301483edb1b64402fb66ac9f3583599', ), first: BigInt(0), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -62,10 +62,10 @@ describe('[StorageFetcher]', async () => { fetcher.enqueueByStorageRequestList([ { accountHash: hexToBytes( - '0xe9a5016cb1a53dbc750d06e725514ac164231d71853cafdcbff42f5adb6ca6f1' + '0xe9a5016cb1a53dbc750d06e725514ac164231d71853cafdcbff42f5adb6ca6f1', ), storageRoot: hexToBytes( - '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92' + '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92', ), first: BigInt(0), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -111,10 +111,10 @@ describe('[StorageFetcher]', async () => { storageRequests: [ { accountHash: hexToBytes( - '0xe9a5016cb1a53dbc750d06e725514ac164231d71853cafdcbff42f5adb6ca6f1' + '0xe9a5016cb1a53dbc750d06e725514ac164231d71853cafdcbff42f5adb6ca6f1', ), storageRoot: hexToBytes( - '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92' + '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92', ), first: BigInt(0), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -127,7 +127,7 @@ describe('[StorageFetcher]', async () => { assert.deepEqual( (fetcher.process(job, StorageDataResponse) as any)[0], fullResult[0], - 'got results' + 'got results', ) assert.throws(() => fetcher.process({} as any, { StorageDataResponse: [] } as any)) }) @@ -161,7 +161,7 @@ describe('[StorageFetcher]', async () => { { accountHash: hexToBytes(accountHashString), storageRoot: hexToBytes( - '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92' + '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92', ), first: BigInt(10), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -177,14 +177,14 @@ describe('[StorageFetcher]', async () => { assert.equal( JSON.stringify(fetcher.accountToHighestKnownHash.get(accountHashString)), JSON.stringify(utf8ToBytes(highestReceivedhash)), - 'should set new highest known hash' + 'should set new highest known hash', ) ;(job.task.storageRequests[0] as any).first = BigInt(3) ;(job.task.storageRequests[0] as any).count = BigInt(4) const result = (await fetcher.request(job as any)) as any assert.ok( JSON.stringify(result[0]) === JSON.stringify({ skipped: true }), - 'should skip fetching task with limit lower than highest known key hash' + 'should skip fetching task with limit lower than highest known key hash', ) StorageDataResponse.completed = true @@ -192,7 +192,7 @@ describe('[StorageFetcher]', async () => { assert.equal( fetcher.accountToHighestKnownHash.get(accountHashString), undefined, - 'should delete highest known hash for completed job' + 'should delete highest known hash for completed job', ) }) @@ -215,10 +215,10 @@ describe('[StorageFetcher]', async () => { storageRequests: [ { accountHash: hexToBytes( - '0xe9a5016cb1a53dbc750d06e725514ac164231d71853cafdcbff42f5adb6ca6f1' + '0xe9a5016cb1a53dbc750d06e725514ac164231d71853cafdcbff42f5adb6ca6f1', ), storageRoot: hexToBytes( - '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92' + '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92', ), first: BigInt(0), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -265,10 +265,10 @@ describe('[StorageFetcher]', async () => { storageRequests: [ { accountHash: hexToBytes( - '0x00009e5969eba9656d7e4dad5b0596241deb87c29bbab71c23b602c2b88a7276' + '0x00009e5969eba9656d7e4dad5b0596241deb87c29bbab71c23b602c2b88a7276', ), storageRoot: hexToBytes( - '0x4431bd7d69241190bb930b74485c1e31ff75552f67d758d0b6612e7bd9226121' + '0x4431bd7d69241190bb930b74485c1e31ff75552f67d758d0b6612e7bd9226121', ), first: BigInt(0), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -278,7 +278,7 @@ describe('[StorageFetcher]', async () => { const resData = RLP.decode(hexToBytes(_storageRangesRLP)) as unknown const res = p.decode( p.messages.filter((message) => message.name === 'StorageRanges')[0], - resData + resData, ) const { reqId, slots, proof } = res const mockedGetStorageRanges = vi.fn((input) => { @@ -353,7 +353,7 @@ describe('[StorageFetcher]', async () => { const ret = await fetcher.request(job as any) assert.ok( ret?.completed === true, - 'should handle peer that is signaling that an empty range has been requested with no elements remaining to the right' + 'should handle peer that is signaling that an empty range has been requested with no elements remaining to the right', ) }) @@ -397,7 +397,7 @@ describe('[StorageFetcher]', async () => { const ret = await fetcher.request(job as any) assert.ok( ret?.completed === undefined, - 'proof verification should fail if elements still remain to the right of the proof' + 'proof verification should fail if elements still remain to the right of the proof', ) }) @@ -422,10 +422,10 @@ describe('[StorageFetcher]', async () => { storageRequests: [ { accountHash: hexToBytes( - '0x00009e5969eba9656d7e4dad5b0596241deb87c29bbab71c23b602c2b88a7276' + '0x00009e5969eba9656d7e4dad5b0596241deb87c29bbab71c23b602c2b88a7276', ), storageRoot: hexToBytes( - '0x4431bd7d69241190bb930b74485c1e31ff75552f67d758d0b6612e7bd9226121' + '0x4431bd7d69241190bb930b74485c1e31ff75552f67d758d0b6612e7bd9226121', ), first: BigInt(0), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -435,7 +435,7 @@ describe('[StorageFetcher]', async () => { const resData = RLP.decode(hexToBytes(_storageRangesRLP)) as unknown const res = p.decode( p.messages.filter((message) => message.name === 'StorageRanges')[0], - resData + resData, ) const { reqId, slots, proof } = res const mockedGetStorageRanges = vi.fn().mockReturnValueOnce({ @@ -473,10 +473,10 @@ describe('[StorageFetcher]', async () => { const accResData = RLP.decode(hexToBytes(_accountRangeRLP)) as unknown const { proof: proofInvalid } = p.decode( p.messages.filter((message) => message.name === 'AccountRange')[0], - accResData + accResData, ) const dummyStorageRoot = hexToBytes( - '0x39ed8daab7679c0b1b7cf3667c50108185d4d9d1431c24a1c35f696a58277f8f' + '0x39ed8daab7679c0b1b7cf3667c50108185d4d9d1431c24a1c35f696a58277f8f', ) const dummyOrigin = new Uint8Array(32) try { @@ -488,7 +488,7 @@ describe('[StorageFetcher]', async () => { } catch (e) { assert.ok( true, - `verifyRangeProof correctly failed on invalid proof, Error: ${(e as Error).message}` + `verifyRangeProof correctly failed on invalid proof, Error: ${(e as Error).message}`, ) } @@ -497,7 +497,7 @@ describe('[StorageFetcher]', async () => { await fetcher.store([Object.create(null)] as any) assert.ok( fetcher['destroyWhenDone'] === false, - 'should still be open to enqueue and process new requests' + 'should still be open to enqueue and process new requests', ) fetcher.setDestroyWhenDone() assert.ok(fetcher['destroyWhenDone'] === true, 'should mark to close on finished') diff --git a/packages/client/test/sync/fetcher/trienodefetcher.spec.ts b/packages/client/test/sync/fetcher/trienodefetcher.spec.ts index 3f54b44b5e..64303e1d69 100644 --- a/packages/client/test/sync/fetcher/trienodefetcher.spec.ts +++ b/packages/client/test/sync/fetcher/trienodefetcher.spec.ts @@ -44,7 +44,7 @@ describe('[TrieNodeFetcher]', async () => { assert.equal( (fetcher as any).pathToNodeRequestData.length, 1, - 'one node request has been added' + 'one node request has been added', ) void fetcher.fetch() @@ -77,7 +77,7 @@ describe('[TrieNodeFetcher]', async () => { assert.deepEqual( (fetcher.process(job, NodeDataResponse) as any)[0], fullResult[0], - 'got results' + 'got results', ) assert.notOk(fetcher.process({} as any, { NodeDataResponse: [] } as any), 'bad results') }) @@ -135,7 +135,7 @@ describe('[TrieNodeFetcher]', async () => { fetcher.requestedNodeToPath = new Map() fetcher.requestedNodeToPath.set( '9100b295173da75cf0f160214e47b480abc2c9d2fe11330fe8befa69aac69656', - '' + '', ) const resData = RLP.decode(hexToBytes(_trieNodesRLP)) as unknown @@ -160,7 +160,7 @@ describe('[TrieNodeFetcher]', async () => { assert.equal( requestResult[0][0], res.nodes[0], - 'Request phase should cross-validate received nodes with requested nodes' + 'Request phase should cross-validate received nodes with requested nodes', ) await fetcher.store(requestResult) @@ -170,7 +170,7 @@ describe('[TrieNodeFetcher]', async () => { assert.equal( children.length, fetcher.pathToNodeRequestData.length, - 'Should generate requests for all child nodes' + 'Should generate requests for all child nodes', ) }) it('should not throw if undefined', async () => { diff --git a/packages/client/test/sync/fullsync.spec.ts b/packages/client/test/sync/fullsync.spec.ts index 5dd4c104cf..f6acbb3da3 100644 --- a/packages/client/test/sync/fullsync.spec.ts +++ b/packages/client/test/sync/fullsync.spec.ts @@ -135,7 +135,7 @@ describe('[FullSynchronizer]', async () => { txPool, execution, }) - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ les: { status: { headNum: BigInt(2) } }, latest: () => { @@ -247,7 +247,7 @@ describe('[FullSynchronizer]', async () => { chain.putBlocks = vi.fn((input) => { assert.ok( JSON.stringify(input) === JSON.stringify([newBlock]), - 'putBlocks is called as expected' + 'putBlocks is called as expected', ) }) as any // NewBlock message from Peer 3 diff --git a/packages/client/test/sync/lightsync.spec.ts b/packages/client/test/sync/lightsync.spec.ts index 67cfc6c4de..b75f4eaff6 100644 --- a/packages/client/test/sync/lightsync.spec.ts +++ b/packages/client/test/sync/lightsync.spec.ts @@ -75,7 +75,7 @@ describe('[LightSynchronizer]', async () => { pool, chain, }) - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ les: { status: { headNum: BigInt(2) } }, latest: () => { @@ -118,7 +118,7 @@ describe('sync errors', async () => { pool, chain, }) - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ les: { status: { headNum: BigInt(2) } }, latest: () => { @@ -130,7 +130,7 @@ describe('sync errors', async () => { } as any) td.when(HeaderFetcher.prototype.fetch()).thenResolve(true) td.when(HeaderFetcher.prototype.fetch()).thenDo(() => - config.events.emit(Event.SYNC_FETCHED_HEADERS, [] as BlockHeader[]) + config.events.emit(Event.SYNC_FETCHED_HEADERS, [] as BlockHeader[]), ) config.logger.on('data', async (data) => { if ((data.message as string).includes('No headers fetched are applicable for import')) { @@ -168,7 +168,7 @@ describe('import headers', () => { pool, chain, }) - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ les: { status: { headNum: BigInt(2) } }, latest: () => { @@ -180,7 +180,7 @@ describe('import headers', () => { } as any) td.when(HeaderFetcher.prototype.fetch()).thenResolve(true) td.when(HeaderFetcher.prototype.fetch()).thenDo(() => - config.events.emit(Event.SYNC_FETCHED_HEADERS, [BlockHeader.fromHeaderData({})]) + config.events.emit(Event.SYNC_FETCHED_HEADERS, [BlockHeader.fromHeaderData({})]), ) config.logger.on('data', async (data) => { if ((data.message as string).includes('Imported headers count=1')) { diff --git a/packages/client/test/sync/skeleton.spec.ts b/packages/client/test/sync/skeleton.spec.ts index 8220a98316..3903edbbbe 100644 --- a/packages/client/test/sync/skeleton.spec.ts +++ b/packages/client/test/sync/skeleton.spec.ts @@ -22,19 +22,19 @@ const common = new Common({ chain: 1 }) const block49 = createBlockFromBlockData({ header: { number: 49 } }, { common }) const block49B = createBlockFromBlockData( { header: { number: 49, extraData: utf8ToBytes('B') } }, - { common } + { common }, ) const block50 = createBlockFromBlockData( { header: { number: 50, parentHash: block49.hash() } }, - { common } + { common }, ) const block50B = createBlockFromBlockData( { header: { number: 50, parentHash: block49.hash(), gasLimit: 999 } }, - { common } + { common }, ) const block51 = createBlockFromBlockData( { header: { number: 51, parentHash: block50.hash() } }, - { common } + { common }, ) describe('[Skeleton]/ startup scenarios ', () => { @@ -252,17 +252,17 @@ describe('[Skeleton] / initSync', async () => { const { progress } = skeleton['status'] if (progress.subchains.length !== testCase.newState.length) { assert.fail( - `test ${testCaseIndex}: subchain count mismatch: have ${progress.subchains.length}, want ${testCase.newState.length}` + `test ${testCaseIndex}: subchain count mismatch: have ${progress.subchains.length}, want ${testCase.newState.length}`, ) } for (const [i, subchain] of progress.subchains.entries()) { if (subchain.head !== testCase.newState[i].head) { assert.fail( - `test ${testCaseIndex}: subchain head mismatch: have ${subchain.head}, want ${testCase.newState[i].head}` + `test ${testCaseIndex}: subchain head mismatch: have ${subchain.head}, want ${testCase.newState[i].head}`, ) } else if (subchain.tail !== testCase.newState[i].tail) { assert.fail( - `test ${testCaseIndex}: subchain tail mismatch: have ${subchain.tail}, want ${testCase.newState[i].tail}` + `test ${testCaseIndex}: subchain tail mismatch: have ${subchain.tail}, want ${testCase.newState[i].tail}`, ) } else { assert.ok(true, `test ${testCaseIndex}: subchain[${i}] matched`) @@ -376,7 +376,7 @@ describe('[Skeleton] / setHead', async () => { assert.ok(true, `test ${testCaseIndex}: passed with correct error`) } else { assert.fail( - `test ${testCaseIndex}: received wrong error expected=${testCase.err?.message} actual=${error.message}` + `test ${testCaseIndex}: received wrong error expected=${testCase.err?.message} actual=${error.message}`, ) } } @@ -384,17 +384,17 @@ describe('[Skeleton] / setHead', async () => { const { progress } = skeleton['status'] if (progress.subchains.length !== testCase.newState.length) { assert.fail( - `test ${testCaseIndex}: subchain count mismatch: have ${progress.subchains.length}, want ${testCase.newState.length}` + `test ${testCaseIndex}: subchain count mismatch: have ${progress.subchains.length}, want ${testCase.newState.length}`, ) } for (const [i, subchain] of progress.subchains.entries()) { if (subchain.head !== testCase.newState[i].head) { assert.fail( - `test ${testCaseIndex}: subchain head mismatch: have ${subchain.head}, want ${testCase.newState[i].head}` + `test ${testCaseIndex}: subchain head mismatch: have ${subchain.head}, want ${testCase.newState[i].head}`, ) } else if (subchain.tail !== testCase.newState[i].tail) { assert.fail( - `test ${testCaseIndex}: subchain tail mismatch: have ${subchain.tail}, want ${testCase.newState[i].tail}` + `test ${testCaseIndex}: subchain tail mismatch: have ${subchain.tail}, want ${testCase.newState[i].tail}`, ) } else { assert.ok(true, `test ${testCaseIndex}: subchain[${i}] matched`) @@ -437,15 +437,15 @@ describe('[Skeleton] / setHead', async () => { const genesis = await chain.getBlock(BigInt(0)) const block1 = createBlockFromBlockData( { header: { number: 1, parentHash: genesis.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block2 = createBlockFromBlockData( { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block3 = createBlockFromBlockData( { header: { number: 3, difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) await skeleton.open() @@ -463,12 +463,12 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains.length, 1, - 'trivial subchain0 should have been created' + 'trivial subchain0 should have been created', ) assert.equal( skeleton['status'].progress.subchains[0]!.head, BigInt(0), - 'trivial subchain0 should have been created' + 'trivial subchain0 should have been created', ) try { @@ -484,12 +484,12 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains.length, 1, - 'trivial subchain should have been created' + 'trivial subchain should have been created', ) assert.equal( skeleton['status'].progress.subchains[0]!.head, BigInt(0), - 'trivial subchain0 should have been created' + 'trivial subchain0 should have been created', ) reorg = await skeleton.setHead(block1, true) @@ -497,12 +497,12 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains.length, 1, - 'subchain should have been created' + 'subchain should have been created', ) assert.equal( skeleton['status'].progress.subchains[0].head, BigInt(1), - 'head should be set to first block' + 'head should be set to first block', ) assert.equal(skeleton.isLinked(), true, 'subchain status should be linked') @@ -512,7 +512,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains[0].head, BigInt(2), - 'head should be set to first block' + 'head should be set to first block', ) assert.equal(skeleton.isLinked(), true, 'subchain status should stay linked') @@ -523,7 +523,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains[0].head, BigInt(2), - 'head should be set to second block' + 'head should be set to second block', ) assert.equal(skeleton.isLinked(), true, 'subchain status should stay linked') @@ -534,7 +534,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains[0].head, BigInt(3), - 'head should be set to third block' + 'head should be set to third block', ) assert.equal(skeleton.isLinked(), false, 'subchain status should not be linked anymore') }) @@ -549,23 +549,23 @@ describe('[Skeleton] / setHead', async () => { const genesis = await chain.getBlock(BigInt(0)) const block1 = createBlockFromBlockData( { header: { number: 1, parentHash: genesis.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block2 = createBlockFromBlockData( { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block3 = createBlockFromBlockData( { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block4 = createBlockFromBlockData( { header: { number: 4, parentHash: block3.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block5 = createBlockFromBlockData( { header: { number: 5, parentHash: block4.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) await skeleton.open() @@ -578,14 +578,14 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(4), - 'canonical height should update after being linked' + 'canonical height should update after being linked', ) await skeleton.setHead(block5, false) await wait(200) assert.equal( chain.blocks.height, BigInt(4), - 'canonical height should not change when setHead is set with force=false' + 'canonical height should not change when setHead is set with force=false', ) await skeleton.setHead(block5, true) await skeleton.blockingFillWithCutoff(10) @@ -594,7 +594,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(5), - 'canonical height should change when setHead is set with force=true' + 'canonical height should change when setHead is set with force=true', ) // unlink the skeleton for the below check to check all blocks cleared @@ -603,14 +603,14 @@ describe('[Skeleton] / setHead', async () => { assert.equal( (await skeleton.getBlock(block.header.number, true))?.hash(), undefined, - `skeleton block number=${block.header.number} should be cleaned up after filling canonical chain` + `skeleton block number=${block.header.number} should be cleaned up after filling canonical chain`, ) assert.equal( (await skeleton.getBlockByHash(block.hash(), true))?.hash(), undefined, `skeleton block hash=${short( - block.hash() - )} should be cleaned up after filling canonical chain` + block.hash(), + )} should be cleaned up after filling canonical chain`, ) } }) @@ -628,23 +628,23 @@ describe('[Skeleton] / setHead', async () => { const block1 = createBlockFromBlockData( { header: { number: 1, parentHash: genesis.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block2 = createBlockFromBlockData( { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block3 = createBlockFromBlockData( { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block4 = createBlockFromBlockData( { header: { number: 4, parentHash: block3.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block5 = createBlockFromBlockData( { header: { number: 5, parentHash: block4.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) await chain.putBlocks([block1, block2]) @@ -655,14 +655,14 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(4), - 'canonical height should update after being linked' + 'canonical height should update after being linked', ) await skeleton.setHead(block5, false) await wait(200) assert.equal( chain.blocks.height, BigInt(4), - 'canonical height should not change when setHead with force=false' + 'canonical height should not change when setHead with force=false', ) // test sethead and blockingFillWithCutoff true via forkchoice update @@ -672,7 +672,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(5), - 'canonical height should change when setHead with force=true' + 'canonical height should change when setHead with force=true', ) // unlink the skeleton for the below check to check all blocks cleared @@ -682,14 +682,14 @@ describe('[Skeleton] / setHead', async () => { assert.equal( (await skeleton.getBlock(block.header.number, true))?.hash(), undefined, - `skeleton block number=${block.header.number} should be cleaned up after filling canonical chain` + `skeleton block number=${block.header.number} should be cleaned up after filling canonical chain`, ) assert.equal( (await skeleton.getBlockByHash(block.hash(), true))?.hash(), undefined, `skeleton block hash=${short( - block.hash() - )} should be cleaned up after filling canonical chain` + block.hash(), + )} should be cleaned up after filling canonical chain`, ) } // restore linkedStatus @@ -697,15 +697,15 @@ describe('[Skeleton] / setHead', async () => { const block41 = createBlockFromBlockData( { header: { number: 4, parentHash: block3.hash(), difficulty: 101 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block51 = createBlockFromBlockData( { header: { number: 5, parentHash: block41.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block61 = createBlockFromBlockData( { header: { number: 6, parentHash: block51.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) await skeleton.setHead(block41, false) @@ -716,27 +716,27 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains[0]?.head, BigInt(6), - 'head should be correct' + 'head should be correct', ) assert.equal( skeleton['status'].progress.subchains[0]?.tail, BigInt(4), - 'tail should be backfilled' + 'tail should be backfilled', ) assert.equal(skeleton['status'].linked, true, 'should be linked') assert.equal(chain.blocks.height, BigInt(6), 'all blocks should be in chain') const block71 = createBlockFromBlockData( { header: { number: 7, parentHash: block61.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block81 = createBlockFromBlockData( { header: { number: 8, parentHash: block71.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block91 = createBlockFromBlockData( { header: { number: 9, parentHash: block81.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) // lets jump ahead and add the block 81 and 71 with annoucements and trigger tryTailBackfill @@ -745,12 +745,12 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains[0]?.head, BigInt(9), - 'head should be correct' + 'head should be correct', ) assert.equal( skeleton['status'].progress.subchains[0]?.tail, BigInt(9), - 'new subchain should be created' + 'new subchain should be created', ) await skeleton.setHead(block81, false) await skeleton.setHead(block71, false) @@ -759,12 +759,12 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains[0]?.head, BigInt(9), - 'head should be correct' + 'head should be correct', ) assert.equal( skeleton['status'].progress.subchains[0]?.tail, BigInt(7), - 'tail should be backfilled' + 'tail should be backfilled', ) assert.equal(skeleton['status'].linked, true, 'should be linked') // async wait needed here so the async fillCanonicalChain can fill the chain @@ -773,34 +773,34 @@ describe('[Skeleton] / setHead', async () => { assert.equal( equalsBytes(chain.blocks.latest!.hash(), block91.hash()), true, - 'correct head hash' + 'correct head hash', ) // do a very common reorg that happens in a network: reorged head block const block92 = createBlockFromBlockData( { header: { number: 9, parentHash: block81.hash(), difficulty: 101 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block102 = createBlockFromBlockData( { header: { number: 10, parentHash: block92.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) await skeleton.forkchoiceUpdate(block92) assert.equal( skeleton['status'].progress.subchains[0]?.head, BigInt(9), - 'head number should be same' + 'head number should be same', ) assert.equal( skeleton['status'].progress.subchains[0]?.tail, BigInt(9), - 'tail should be truncated to head' + 'tail should be truncated to head', ) assert.equal( equalsBytes(chain.blocks.latest!.hash(), block92.hash()), true, - 'correct reorged head hash' + 'correct reorged head hash', ) // should be able to build on top of the next block @@ -808,7 +808,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( equalsBytes(chain.blocks.latest!.hash(), block102.hash()), true, - 'continue reorged chain' + 'continue reorged chain', ) }) @@ -838,31 +838,31 @@ describe('[Skeleton] / setHead', async () => { const block1 = createBlockFromBlockData( { header: { number: 1, parentHash: genesisBlock.hash(), difficulty: 100 } }, - { common } + { common }, ) const block2 = createBlockFromBlockData( { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common } + { common }, ) const block3PoW = createBlockFromBlockData( { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, - { common } + { common }, ) const block3PoS = createBlockFromBlockData( { header: { number: 3, parentHash: block2.hash(), difficulty: 0 } }, - { common, setHardfork: BigInt(200) } + { common, setHardfork: BigInt(200) }, ) const block4InvalidPoS = createBlockFromBlockData( { header: { number: 4, parentHash: block3PoW.hash(), difficulty: 0 } }, - { common, setHardfork: BigInt(200) } + { common, setHardfork: BigInt(200) }, ) const block4PoS = createBlockFromBlockData( { header: { number: 4, parentHash: block3PoS.hash(), difficulty: 0 } }, - { common, setHardfork: BigInt(200) } + { common, setHardfork: BigInt(200) }, ) const block5 = createBlockFromBlockData( { header: { number: 5, parentHash: block4PoS.hash(), difficulty: 0 } }, - { common, setHardfork: BigInt(200) } + { common, setHardfork: BigInt(200) }, ) const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) @@ -876,7 +876,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(2), - 'canonical height should stop at block 2 (valid terminal block), since block 3 is invalid (past ttd)' + 'canonical height should stop at block 2 (valid terminal block), since block 3 is invalid (past ttd)', ) try { await skeleton.setHead(block5, false) @@ -889,7 +889,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(2), - 'canonical height should not change when setHead is set with force=false' + 'canonical height should not change when setHead is set with force=false', ) // Put correct chain await skeleton.initSync(block4PoS) @@ -904,12 +904,12 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(4), - 'canonical height should now be at head with correct chain' + 'canonical height should now be at head with correct chain', ) const latestHash = chain.headers.latest?.hash() assert.ok( latestHash !== undefined && equalsBytes(latestHash, block4PoS.hash()), - 'canonical height should now be at head with correct chain' + 'canonical height should now be at head with correct chain', ) await skeleton.setHead(block5, true) await wait(200) @@ -943,19 +943,19 @@ describe('[Skeleton] / setHead', async () => { const block1 = createBlockFromBlockData( { header: { number: 1, parentHash: genesisBlock.hash(), difficulty: 100 } }, - { common } + { common }, ) const block2 = createBlockFromBlockData( { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common } + { common }, ) const block3PoW = createBlockFromBlockData( { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, - { common } + { common }, ) const block4InvalidPoS = createBlockFromBlockData( { header: { number: 4, parentHash: block3PoW.hash(), difficulty: 0 } }, - { common, setHardfork: 200 } + { common, setHardfork: 200 }, ) const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) @@ -969,12 +969,12 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(2), - 'canonical height should stop at block 2 (valid terminal block), since block 3 is invalid (past ttd)' + 'canonical height should stop at block 2 (valid terminal block), since block 3 is invalid (past ttd)', ) assert.equal( skeleton['status'].progress.subchains[0].tail, BigInt(1), - `Subchain should have been backstepped to 1` + `Subchain should have been backstepped to 1`, ) }) @@ -1005,7 +1005,7 @@ describe('[Skeleton] / setHead', async () => { throw Error( `Invalid header difficulty=${ block.header.difficulty - } for consensus=${block.header.common.consensusType()}` + } for consensus=${block.header.common.consensusType()}`, ) } } @@ -1018,19 +1018,19 @@ describe('[Skeleton] / setHead', async () => { const block1 = createBlockFromBlockData( { header: { number: 1, parentHash: genesisBlock.hash(), difficulty: 100 } }, - { common } + { common }, ) const block2 = createBlockFromBlockData( { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common } + { common }, ) const block2PoS = createBlockFromBlockData( { header: { number: 2, parentHash: block1.hash(), difficulty: 0 } }, - { common } + { common }, ) const block3 = createBlockFromBlockData( { header: { number: 3, parentHash: block2.hash(), difficulty: 0 } }, - { common } + { common }, ) const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) @@ -1043,7 +1043,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(1), - 'canonical height should stop at block 1 (valid PoW block), since block 2 is invalid (invalid PoS, not past ttd)' + 'canonical height should stop at block 1 (valid PoW block), since block 2 is invalid (invalid PoS, not past ttd)', ) // Put correct chain await skeleton.initSync(block3) @@ -1058,12 +1058,12 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(3), - 'canonical height should now be at head with correct chain' + 'canonical height should now be at head with correct chain', ) const latestHash = chain.headers.latest?.hash() assert.ok( latestHash !== undefined && equalsBytes(latestHash, block3.hash()), - 'canonical height should now be at head with correct chain' + 'canonical height should now be at head with correct chain', ) BlockHeader.prototype['_consensusFormatValidation'] = originalValidate diff --git a/packages/client/test/sync/txpool.spec.ts b/packages/client/test/sync/txpool.spec.ts index 21240417e9..a8cabaa964 100644 --- a/packages/client/test/sync/txpool.spec.ts +++ b/packages/client/test/sync/txpool.spec.ts @@ -107,7 +107,7 @@ const handleTxs = async ( txs: any[], failMessage: string, stateManager?: DefaultStateManager, - pool?: TxPool + pool?: TxPool, ) => { if (pool === undefined) { pool = setup().pool @@ -134,7 +134,7 @@ const handleTxs = async ( await pool.handleAnnouncedTxHashes( validTxs.map((e) => e.hash()), peer, - peerPool + peerPool, ) await pool.add(txs[txs.length - 1]) @@ -262,13 +262,13 @@ describe('[TxPool]', async () => { assert.equal( (pool as any).knownByPeer.size, 2, - 'known tx hashes size 2 (entries for both peers)' + 'known tx hashes size 2 (entries for both peers)', ) assert.equal((pool as any).knownByPeer.get(peer.id).length, 1, 'one tx added for peer 1') assert.equal( (pool as any).knownByPeer.get(peer.id)[0].hash, bytesToUnprefixedHex(txA01.hash()), - 'new known tx hashes entry for announcing peer' + 'new known tx hashes entry for announcing peer', ) const txs = pool.getByHash([txA01.hash()]) @@ -276,7 +276,7 @@ describe('[TxPool]', async () => { assert.equal( bytesToHex(txs[0].serialize()), bytesToHex(txA01.serialize()), - 'should get correct tx by hash' + 'should get correct tx by hash', ) // check if transaction added in metrics @@ -294,7 +294,7 @@ describe('[TxPool]', async () => { assert.equal( feeMarketEip1559TransactionCountInPool, pool.pool.size, - 'pool should contain single eip 1559 transaction' + 'pool should contain single eip 1559 transaction', ) pool.pool.clear() @@ -303,12 +303,12 @@ describe('[TxPool]', async () => { assert.equal( (pool as any).knownByPeer.get(peer.id).length, 1, - 'should add tx only once to known tx hashes' + 'should add tx only once to known tx hashes', ) assert.equal( (pool as any).knownByPeer.size, 2, - 'known tx hashes size 2 (entries for both peers)' + 'known tx hashes size 2 (entries for both peers)', ) pool.stop() @@ -329,7 +329,7 @@ describe('[TxPool]', async () => { assert.equal( res['hashes'].length, TX_RETRIEVAL_LIMIT, - 'should limit to TX_RETRIEVAL_LIMIT' + 'should limit to TX_RETRIEVAL_LIMIT', ) return [null, []] }, @@ -431,7 +431,7 @@ describe('[TxPool]', async () => { } catch (e: any) { assert.ok( e.message.includes('replacement gas too low'), - 'successfully failed adding underpriced txn' + 'successfully failed adding underpriced txn', ) const poolObject = pool['handled'].get(bytesToUnprefixedHex(txA02_Underpriced.hash())) assert.equal(poolObject?.error, e, 'should have an errored poolObject') @@ -444,7 +444,7 @@ describe('[TxPool]', async () => { assert.equal( (pool as any).knownByPeer.get(peer2.id)[0]?.error?.message, 'NewPooledTransactionHashes', - 'should have errored sendObject for NewPooledTransactionHashes broadcast' + 'should have errored sendObject for NewPooledTransactionHashes broadcast', ) const address = bytesToUnprefixedHex(A.address) const poolContent = pool.pool.get(address)! @@ -492,7 +492,7 @@ describe('[TxPool]', async () => { for (let account = 0; account < 51; account++) { const pkey = concatBytes( hexToBytes(`0x${'aa'.repeat(31)}`), - hexToBytes(`0x${account.toString(16).padStart(2, '0')}`) + hexToBytes(`0x${account.toString(16).padStart(2, '0')}`), ) const from = { address: privateToAddress(pkey), @@ -523,7 +523,7 @@ describe('[TxPool]', async () => { assert.notOk( await handleTxs(txs, 'already have max amount of txs for this account'), - 'successfully rejected too many txs from same account' + 'successfully rejected too many txs from same account', ) }) @@ -534,12 +534,12 @@ describe('[TxPool]', async () => { create1559FeeMarketTx({ maxFeePerGas: 1000000000, maxPriorityFeePerGas: 1000000000, - }) + }), ) assert.notOk( await handleTxs(txs, 'Cannot call hash method if transaction is not signed'), - 'successfully rejected unsigned tx' + 'successfully rejected unsigned tx', ) }) @@ -551,14 +551,14 @@ describe('[TxPool]', async () => { maxFeePerGas: 1000000000, maxPriorityFeePerGas: 1000000000, nonce: 0, - }).sign(A.privateKey) + }).sign(A.privateKey), ) assert.notOk( await handleTxs(txs, 'tx nonce too low', { getAccount: () => new Account(BigInt(1), BigInt('50000000000000000000')), } as any), - 'successfully rejected tx with invalid nonce' + 'successfully rejected tx with invalid nonce', ) }) @@ -574,15 +574,15 @@ describe('[TxPool]', async () => { nonce: 0, data: `0x${'00'.repeat(128 * 1024 + 1)}`, }, - { common } - ).sign(A.privateKey) + { common }, + ).sign(A.privateKey), ) assert.notOk( await handleTxs(txs, 'exceeds the max data size', { getAccount: () => new Account(BigInt(0), BigInt('50000000000000000000000')), } as any), - 'successfully rejected tx with too much data' + 'successfully rejected tx with too much data', ) }) @@ -595,14 +595,14 @@ describe('[TxPool]', async () => { maxPriorityFeePerGas: 1000000000, gasLimit: 21000, nonce: 0, - }).sign(A.privateKey) + }).sign(A.privateKey), ) assert.notOk( await handleTxs(txs, 'insufficient balance', { getAccount: () => new Account(BigInt(0), BigInt('0')), } as any), - 'successfully rejected account with too low balance' + 'successfully rejected account with too low balance', ) }) @@ -614,7 +614,7 @@ describe('[TxPool]', async () => { maxFeePerGas: 1000000000, maxPriorityFeePerGas: 1000000000, nonce: 0, - }).sign(A.privateKey) + }).sign(A.privateKey), ) const { pool } = setup() @@ -625,7 +625,7 @@ describe('[TxPool]', async () => { assert.notOk( await handleTxs(txs, 'not within 50% range of current basefee', undefined, pool), - 'successfully rejected tx with too low gas price' + 'successfully rejected tx with too low gas price', ) }) @@ -638,7 +638,7 @@ describe('[TxPool]', async () => { maxPriorityFeePerGas: 1000000000, nonce: 0, gasLimit: 21000, - }).sign(A.privateKey) + }).sign(A.privateKey), ) const { pool } = setup() @@ -649,7 +649,7 @@ describe('[TxPool]', async () => { assert.notOk( await handleTxs(txs, 'exceeds last block gas limit', undefined, pool), - 'successfully rejected tx which has gas limit higher than block gas limit' + 'successfully rejected tx which has gas limit higher than block gas limit', ) }) @@ -660,7 +660,7 @@ describe('[TxPool]', async () => { create1559FeeMarketTx({ maxFeePerGas: 1000000000, maxPriorityFeePerGas: 1000000000, - }).sign(A.privateKey) + }).sign(A.privateKey), ) txs.push(txs[0]) @@ -669,7 +669,7 @@ describe('[TxPool]', async () => { assert.notOk( await handleTxs(txs, 'this transaction is already in the TxPool', undefined, pool), - 'successfully rejected tx which is already in pool' + 'successfully rejected tx which is already in pool', ) }) @@ -681,12 +681,12 @@ describe('[TxPool]', async () => { maxFeePerGas: 10000000, maxPriorityFeePerGas: 10000000, nonce: 0, - }).sign(A.privateKey) + }).sign(A.privateKey), ) assert.notOk( await handleTxs(txs, 'does not pay the minimum gas price of'), - 'successfully rejected tx with too low gas price' + 'successfully rejected tx with too low gas price', ) }) @@ -697,12 +697,12 @@ describe('[TxPool]', async () => { create2930AccessListTx({ gasPrice: 10000000, nonce: 0, - }).sign(A.privateKey) + }).sign(A.privateKey), ) assert.notOk( await handleTxs(txs, 'does not pay the minimum gas price of'), - 'successfully rejected tx with too low gas price' + 'successfully rejected tx with too low gas price', ) }) @@ -716,7 +716,7 @@ describe('[TxPool]', async () => { }, { freeze: false, - } + }, ).sign(A.privateKey) Object.defineProperty(tx, 'type', { get: () => 5 }) @@ -836,17 +836,17 @@ describe('[TxPool]', async () => { assert.equal( pool.pool.size, 2, - 'should not remove txs from pool (POOLED_STORAGE_TIME_LIMIT within range)' + 'should not remove txs from pool (POOLED_STORAGE_TIME_LIMIT within range)', ) assert.equal( (pool as any).knownByPeer.size, 1, - 'should not remove txs from known by peer map (POOLED_STORAGE_TIME_LIMIT within range)' + 'should not remove txs from known by peer map (POOLED_STORAGE_TIME_LIMIT within range)', ) assert.equal( (pool as any).handled.size, 2, - 'should not remove txs from handled (HANDLED_CLEANUP_TIME_LIMIT within range)' + 'should not remove txs from handled (HANDLED_CLEANUP_TIME_LIMIT within range)', ) const address = txB01.getSenderAddress().toString().slice(2) @@ -868,17 +868,17 @@ describe('[TxPool]', async () => { assert.equal( pool.pool.size, 1, - 'should remove txs from pool (POOLED_STORAGE_TIME_LIMIT before range)' + 'should remove txs from pool (POOLED_STORAGE_TIME_LIMIT before range)', ) assert.equal( (pool as any).knownByPeer.get(peer.id).length, 1, - 'should remove one tx from known by peer map (POOLED_STORAGE_TIME_LIMIT before range)' + 'should remove one tx from known by peer map (POOLED_STORAGE_TIME_LIMIT before range)', ) assert.equal( (pool as any).handled.size, 1, - 'should remove txs from handled (HANDLED_CLEANUP_TIME_LIMIT before range)' + 'should remove txs from handled (HANDLED_CLEANUP_TIME_LIMIT before range)', ) pool.stop() diff --git a/packages/client/test/util/parse.spec.ts b/packages/client/test/util/parse.spec.ts index 717b60a01b..5069c7dbc6 100644 --- a/packages/client/test/util/parse.spec.ts +++ b/packages/client/test/util/parse.spec.ts @@ -9,37 +9,37 @@ describe('[Util/Parse]', () => { assert.deepEqual( parseMultiaddrs('10.0.0.1:1234'), [multiaddr('/ip4/10.0.0.1/tcp/1234')], - 'parse ip:port' + 'parse ip:port', ) assert.deepEqual( parseMultiaddrs('enode://abc@10.0.0.1:1234'), [multiaddr('/ip4/10.0.0.1/tcp/1234')], - 'parse url' + 'parse url', ) assert.deepEqual( parseMultiaddrs('/ip4/1.1.1.1/tcp/50507/ws'), [multiaddr('/ip4/1.1.1.1/tcp/50507/ws')], - 'parse multiaddr' + 'parse multiaddr', ) assert.deepEqual( parseMultiaddrs( - '/ip4/1.1.1.2/tcp/50508/ws/p2p/QmYAuYxw6QX1x5aafs6g3bUrPbMDifP5pDun3N9zbVLpEa' + '/ip4/1.1.1.2/tcp/50508/ws/p2p/QmYAuYxw6QX1x5aafs6g3bUrPbMDifP5pDun3N9zbVLpEa', ), [multiaddr('/ip4/1.1.1.2/tcp/50508/ws/p2p/QmYAuYxw6QX1x5aafs6g3bUrPbMDifP5pDun3N9zbVLpEa')], - 'parse multiaddr with peer id' + 'parse multiaddr with peer id', ) assert.deepEqual( parseMultiaddrs( - '10.0.0.1:1234,enode://343149e4feefa15d882d9fe4ac7d88f885bd05ebb735e547f12e12080a9fa07c8014ca6fd7f373123488102fe5e34111f8509cf0b7de3f5b44339c9f25e87cb8@127.0.0.1:2345' + '10.0.0.1:1234,enode://343149e4feefa15d882d9fe4ac7d88f885bd05ebb735e547f12e12080a9fa07c8014ca6fd7f373123488102fe5e34111f8509cf0b7de3f5b44339c9f25e87cb8@127.0.0.1:2345', ), [multiaddr('/ip4/10.0.0.1/tcp/1234'), multiaddr('/ip4/127.0.0.1/tcp/2345')], - 'parse multiple' + 'parse multiple', ) assert.throws(() => parseMultiaddrs(10 as any), /not a function/, 'throws error') assert.deepEqual( parseMultiaddrs('[2607:f8b0:4003:c00::6a]:5678'), [multiaddr('/ip6/2607:f8b0:4003:c00::6a/tcp/5678')], - 'parse ipv6 multiaddr' + 'parse ipv6 multiaddr', ) }) }) diff --git a/packages/client/test/util/rpc.spec.ts b/packages/client/test/util/rpc.spec.ts index ac36366d7c..6dc89c6f5a 100644 --- a/packages/client/test/util/rpc.spec.ts +++ b/packages/client/test/util/rpc.spec.ts @@ -54,7 +54,7 @@ describe('[Util/RPC]', () => { assert.ok( httpServer !== undefined && wsServer !== undefined, - 'should return http and ws servers' + 'should return http and ws servers', ) } } @@ -81,7 +81,7 @@ describe('[Util/RPC]', () => { }) assert.ok( httpServer !== undefined && wsServer !== undefined, - 'should return http and ws servers' + 'should return http and ws servers', ) }) }) diff --git a/packages/client/test/util/wasmCrypto.spec.ts b/packages/client/test/util/wasmCrypto.spec.ts index 59b0502a2a..3ffb775eaa 100644 --- a/packages/client/test/util/wasmCrypto.spec.ts +++ b/packages/client/test/util/wasmCrypto.spec.ts @@ -29,14 +29,14 @@ describe('WASM crypto tests', () => { v: bigint, r: Uint8Array, s: Uint8Array, - chainID?: bigint + chainID?: bigint, ) => secp256k1Expand( secp256k1Recover( msgHash, concatBytes(setLengthLeft(r, 32), setLengthLeft(s, 32)), - Number(calculateSigRecovery(v, chainID)) - ) + Number(calculateSigRecovery(v, chainID)), + ), ).slice(1) await waitReady() @@ -90,7 +90,7 @@ describe('WASM crypto tests', () => { assert.deepEqual(wasmSig, jsSig, 'wasm signatures produce same result as js signatures') assert.throws( () => wasmSign(randomBytes(31), randomBytes(32)), - 'message length must be 32 bytes or greater' + 'message length must be 32 bytes or greater', ) }) it('should have the same signature and verification', async () => { diff --git a/packages/client/tsconfig.eslint.json b/packages/client/tsconfig.eslint.json deleted file mode 100644 index e2b5df7d39..0000000000 --- a/packages/client/tsconfig.eslint.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "include": ["webpack.config.js"] -} diff --git a/packages/client/tsconfig.lint.json b/packages/client/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/client/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/common/.eslintrc.cjs b/packages/common/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/common/.eslintrc.cjs +++ b/packages/common/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/common/examples/common.ts b/packages/common/examples/common.ts index 4e2ac82506..272f3005a0 100644 --- a/packages/common/examples/common.ts +++ b/packages/common/examples/common.ts @@ -1,4 +1,4 @@ -import { Chain, Common, createCustomCommon, Hardfork } from '@ethereumjs/common' +import { Chain, Common, Hardfork, createCustomCommon } from '@ethereumjs/common' // With enums: const commonWithEnums = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) diff --git a/packages/common/examples/customChain.ts b/packages/common/examples/customChain.ts index 44a89a547b..9344638359 100644 --- a/packages/common/examples/customChain.ts +++ b/packages/common/examples/customChain.ts @@ -1,4 +1,5 @@ import { Common } from '@ethereumjs/common' + import myCustomChain1 from './genesisData/testnet.json' // Add custom chain config diff --git a/packages/common/examples/customChains.ts b/packages/common/examples/customChains.ts index f051611dd8..4d492f2ab9 100644 --- a/packages/common/examples/customChains.ts +++ b/packages/common/examples/customChains.ts @@ -1,4 +1,5 @@ import { Common } from '@ethereumjs/common' + import myCustomChain1 from './genesisData/testnet.json' import myCustomChain2 from './genesisData/testnet2.json' // Add two custom chains, initial mainnet activation diff --git a/packages/common/examples/customCrypto.ts b/packages/common/examples/customCrypto.ts index 60668a71d2..3b1c601963 100644 --- a/packages/common/examples/customCrypto.ts +++ b/packages/common/examples/customCrypto.ts @@ -1,6 +1,6 @@ -import { keccak256, waitReady } from '@polkadot/wasm-crypto' +import { createBlockFromBlockData } from '@ethereumjs/block' import { Chain, Common } from '@ethereumjs/common' -import { Block, createBlockFromBlockData } from '@ethereumjs/block' +import { keccak256, waitReady } from '@polkadot/wasm-crypto' const main = async () => { // @polkadot/wasm-crypto specific initialization @@ -14,4 +14,4 @@ const main = async () => { console.log(block.hash()) } -main() +void main() diff --git a/packages/common/examples/fromGeth.ts b/packages/common/examples/fromGeth.ts index 2164390404..af3828e4e4 100644 --- a/packages/common/examples/fromGeth.ts +++ b/packages/common/examples/fromGeth.ts @@ -1,4 +1,4 @@ -import { Common, createCommonFromGethGenesis } from '@ethereumjs/common' +import { createCommonFromGethGenesis } from '@ethereumjs/common' import { hexToBytes } from '@ethereumjs/util' import genesisJson from './genesisData/post-merge.json' diff --git a/packages/common/examples/initKzg.ts b/packages/common/examples/initKzg.ts index 4d6ecdcdf7..e3c72a889a 100644 --- a/packages/common/examples/initKzg.ts +++ b/packages/common/examples/initKzg.ts @@ -1,5 +1,5 @@ +import { Chain, Common, Hardfork } from '@ethereumjs/common' import { loadKZG } from 'kzg-wasm' -import { Common, Chain, Hardfork } from '@ethereumjs/common' const main = async () => { const kzg = await loadKZG() @@ -11,4 +11,4 @@ const main = async () => { console.log(common.customCrypto.kzg) // Should print the initialized KZG interface } -main() +void main() diff --git a/packages/common/src/common.ts b/packages/common/src/common.ts index 2e4aadba31..1d25c0db9b 100644 --- a/packages/common/src/common.ts +++ b/packages/common/src/common.ts @@ -101,7 +101,7 @@ export class Common { } else if (typeof chain === 'object') { if (this._customChains.length > 0) { throw new Error( - 'Chain must be a string, number, or bigint when initialized with customChains passed in' + 'Chain must be a string, number, or bigint when initialized with customChains passed in', ) } const required = ['chainId', 'genesis', 'hardforks', 'bootstrapNodes'] @@ -163,7 +163,9 @@ export class Common { // Filter out hardforks with no block number, no ttd or no timestamp (i.e. unapplied hardforks) const hfs = this.hardforks().filter( (hf) => - hf.block !== null || (hf.ttd !== null && hf.ttd !== undefined) || hf.timestamp !== undefined + hf.block !== null || + (hf.ttd !== null && hf.ttd !== undefined) || + hf.timestamp !== undefined, ) const mergeIndex = hfs.findIndex((hf) => hf.ttd !== null && hf.ttd !== undefined) const doubleTTDHF = hfs @@ -180,7 +182,7 @@ export class Common { let hfIndex = hfs.findIndex( (hf) => (blockNumber !== undefined && hf.block !== null && BigInt(hf.block) > blockNumber) || - (timestamp !== undefined && hf.timestamp !== undefined && BigInt(hf.timestamp) > timestamp) + (timestamp !== undefined && hf.timestamp !== undefined && BigInt(hf.timestamp) > timestamp), ) if (hfIndex === -1) { @@ -239,7 +241,7 @@ export class Common { .slice(0, hfStartIndex) .reduce( (acc: number, hf: HardforkTransitionConfig) => Math.max(Number(hf.timestamp ?? '0'), acc), - 0 + 0, ) if (minTimeStamp > timestamp) { throw Error(`Maximum HF determined by timestamp is lower than the block number/ttd HF`) @@ -250,7 +252,7 @@ export class Common { .reduce( (acc: number, hf: HardforkTransitionConfig) => Math.min(Number(hf.timestamp ?? timestamp), acc), - Number(timestamp) + Number(timestamp), ) if (maxTimeStamp < timestamp) { throw Error(`Maximum HF determined by block number/ttd is lower than timestamp HF`) @@ -302,7 +304,7 @@ export class Common { const minHF = this.gteHardfork(eipsDict[eip]['minimumHardfork']) if (!minHF) { throw new Error( - `${eip} cannot be activated on hardfork ${this.hardfork()}, minimumHardfork: ${minHF}` + `${eip} cannot be activated on hardfork ${this.hardfork()}, minimumHardfork: ${minHF}`, ) } } @@ -464,7 +466,7 @@ export class Common { name: string, blockNumber: BigIntLike, td?: BigIntLike, - timestamp?: BigIntLike + timestamp?: BigIntLike, ): bigint { const hardfork = this.getHardforkBy({ blockNumber, td, timestamp }) return this.paramByHardfork(name, hardfork) diff --git a/packages/common/src/constructors.ts b/packages/common/src/constructors.ts index 5ee817a7b5..080d268bbc 100644 --- a/packages/common/src/constructors.ts +++ b/packages/common/src/constructors.ts @@ -28,7 +28,7 @@ import type { ChainConfig, CustomCommonOpts, GethConfigOpts } from './index.js' */ export function createCustomCommon( chainParamsOrName: Partial | CustomChain, - opts: CustomCommonOpts = {} + opts: CustomCommonOpts = {}, ): Common { const baseChain = opts.baseChain ?? 'mainnet' const standardChainParams = { ..._getChainParams(baseChain) } @@ -49,7 +49,7 @@ export function createCustomCommon( name: CustomChain.PolygonMainnet, chainId: 137, }, - opts + opts, ) } if (chainParamsOrName === CustomChain.PolygonMumbai) { @@ -58,7 +58,7 @@ export function createCustomCommon( name: CustomChain.PolygonMumbai, chainId: 80001, }, - opts + opts, ) } if (chainParamsOrName === CustomChain.ArbitrumOne) { @@ -67,7 +67,7 @@ export function createCustomCommon( name: CustomChain.ArbitrumOne, chainId: 42161, }, - opts + opts, ) } if (chainParamsOrName === CustomChain.xDaiChain) { @@ -76,7 +76,7 @@ export function createCustomCommon( name: CustomChain.xDaiChain, chainId: 100, }, - opts + opts, ) } @@ -86,7 +86,7 @@ export function createCustomCommon( name: CustomChain.OptimisticKovan, chainId: 69, }, - opts + opts, ) } @@ -97,7 +97,7 @@ export function createCustomCommon( chainId: 10, }, // Optimism has not implemented the London hardfork yet (targeting Q1.22) - { hardfork: Hardfork.Berlin, ...opts } + { hardfork: Hardfork.Berlin, ...opts }, ) } throw new Error(`Custom chain ${chainParamsOrName} not supported`) @@ -112,7 +112,7 @@ export function createCustomCommon( */ export function createCommonFromGethGenesis( genesisJson: any, - { chain, eips, genesisHash, hardfork, mergeForkIdPostMerge, customCrypto }: GethConfigOpts + { chain, eips, genesisHash, hardfork, mergeForkIdPostMerge, customCrypto }: GethConfigOpts, ): Common { const genesisParams = parseGethGenesis(genesisJson, chain, mergeForkIdPostMerge) const common = new Common({ diff --git a/packages/common/src/interfaces.ts b/packages/common/src/interfaces.ts index 9721b31945..113b5b47e9 100644 --- a/packages/common/src/interfaces.ts +++ b/packages/common/src/interfaces.ts @@ -84,7 +84,7 @@ export type AuthorizationListBytesItem = [ Uint8Array[], Uint8Array, Uint8Array, - Uint8Array + Uint8Array, ] export type AuthorizationListBytes = AuthorizationListBytesItem[] export type AuthorizationList = AuthorizationListItem[] @@ -120,24 +120,24 @@ export interface AccessWitnessInterface { touchAddressOnWriteAndComputeGas( address: Address, treeIndex: number | bigint, - subIndex: number | Uint8Array + subIndex: number | Uint8Array, ): bigint touchAddressOnReadAndComputeGas( address: Address, treeIndex: number | bigint, - subIndex: number | Uint8Array + subIndex: number | Uint8Array, ): bigint touchAddressAndChargeGas( address: Address, treeIndex: number | bigint, subIndex: number | Uint8Array, - { isWrite }: { isWrite?: boolean } + { isWrite }: { isWrite?: boolean }, ): bigint touchAddress( address: Address, treeIndex: number | bigint, subIndex: number | Uint8Array, - { isWrite }: { isWrite?: boolean } + { isWrite }: { isWrite?: boolean }, ): AccessEventFlags shallowCopy(): AccessWitnessInterface merge(accessWitness: AccessWitnessInterface): void diff --git a/packages/common/src/types.ts b/packages/common/src/types.ts index edd6eb1b7f..746400f364 100644 --- a/packages/common/src/types.ts +++ b/packages/common/src/types.ts @@ -78,7 +78,7 @@ export interface CustomCrypto { v: bigint, r: Uint8Array, s: Uint8Array, - chainId?: bigint + chainId?: bigint, ) => Uint8Array sha256?: (msg: Uint8Array) => Uint8Array ecsign?: (msg: Uint8Array, pk: Uint8Array, chainId?: bigint) => ECDSASignature diff --git a/packages/common/src/utils.ts b/packages/common/src/utils.ts index caa1274edf..0bc8ae3dac 100644 --- a/packages/common/src/utils.ts +++ b/packages/common/src/utils.ts @@ -81,7 +81,7 @@ function parseGethParams(json: any, mergeForkIdPostMerge: boolean = true) { // but have different configuration parameters in geth genesis parameters if (config.eip155Block !== config.eip158Block) { throw new Error( - 'EIP155 block number must equal EIP 158 block number since both are part of SpuriousDragon hardfork and the client only supports activating the full hardfork' + 'EIP155 block number must equal EIP 158 block number since both are part of SpuriousDragon hardfork and the client only supports activating the full hardfork', ) } @@ -143,12 +143,15 @@ function parseGethParams(json: any, mergeForkIdPostMerge: boolean = true) { } // forkMapRev is the map from config field name to Hardfork - const forkMapRev = Object.keys(forkMap).reduce((acc, elem) => { - acc[forkMap[elem].name] = elem - return acc - }, {} as { [key: string]: string }) + const forkMapRev = Object.keys(forkMap).reduce( + (acc, elem) => { + acc[forkMap[elem].name] = elem + return acc + }, + {} as { [key: string]: string }, + ) const configHardforkNames = Object.keys(config).filter( - (key) => forkMapRev[key] !== undefined && config[key] !== undefined && config[key] !== null + (key) => forkMapRev[key] !== undefined && config[key] !== undefined && config[key] !== null, ) params.hardforks = configHardforkNames @@ -196,7 +199,7 @@ function parseGethParams(json: any, mergeForkIdPostMerge: boolean = true) { // Merge hardfork has to be placed before first hardfork that is dependent on merge const postMergeIndex = params.hardforks.findIndex( - (hf: any) => forkMap[hf.name]?.postMerge === true + (hf: any) => forkMap[hf.name]?.postMerge === true, ) if (postMergeIndex !== -1) { params.hardforks.splice(postMergeIndex, 0, mergeConfig as unknown as ConfigHardfork) @@ -267,7 +270,7 @@ export function isSupportedChainId(chainId: bigint): boolean { export function _getChainParams( chain: string | number | Chain | bigint, - customChains?: ChainConfig[] + customChains?: ChainConfig[], ): ChainConfig { const initializedChains = getInitializedChains(customChains) if (typeof chain === 'number' || typeof chain === 'bigint') { diff --git a/packages/common/test/chains.spec.ts b/packages/common/test/chains.spec.ts index e0d25554b7..77e907421a 100644 --- a/packages/common/test/chains.spec.ts +++ b/packages/common/test/chains.spec.ts @@ -18,7 +18,7 @@ describe('[Common/Chains]: Initialization / Chain params', () => { assert.equal( c.hardfork(), c.DEFAULT_HARDFORK, - 'should set hardfork to hardfork set as DEFAULT_HARDFORK' + 'should set hardfork to hardfork set as DEFAULT_HARDFORK', ) c = new Common({ chain: 1 }) @@ -33,7 +33,7 @@ describe('[Common/Chains]: Initialization / Chain params', () => { assert.equal( c.hardfork(), c.DEFAULT_HARDFORK, - 'should set hardfork to hardfork set as DEFAULT_HARDFORK' + 'should set hardfork to hardfork set as DEFAULT_HARDFORK', ) }) @@ -68,12 +68,12 @@ describe('[Common/Chains]: Initialization / Chain params', () => { assert.equal( c.consensusType(), ConsensusType.ProofOfWork, - 'should return correct consensus type' + 'should return correct consensus type', ) assert.equal( c.consensusAlgorithm(), ConsensusAlgorithm.Ethash, - 'should return correct consensus algorithm' + 'should return correct consensus algorithm', ) assert.deepEqual(c.consensusConfig(), {}, 'should return empty dictionary for consensus config') @@ -83,17 +83,17 @@ describe('[Common/Chains]: Initialization / Chain params', () => { assert.equal( c.consensusType(), ConsensusType.ProofOfAuthority, - 'should return correct consensus type' + 'should return correct consensus type', ) assert.equal( c.consensusAlgorithm(), ConsensusAlgorithm.Clique, - 'should return correct consensus algorithm' + 'should return correct consensus algorithm', ) assert.equal( c.consensusConfig().epoch, 30000, - 'should return correct consensus config parameters' + 'should return correct consensus config parameters', ) }) @@ -108,12 +108,12 @@ describe('[Common/Chains]: Initialization / Chain params', () => { assert.equal( typeof bootnode.location, 'string', - 'returns the location as string (empty string if unavailable)' + 'returns the location as string (empty string if unavailable)', ) assert.equal( typeof bootnode.comment, 'string', - 'returns a comment as string (empty string if unavailable)' + 'returns a comment as string (empty string if unavailable)', ) } }) @@ -149,12 +149,12 @@ describe('[Common]: copy() listener tests', () => { assert.equal( common.events.listenerCount('hardforkChanged'), 2, - 'original common instance should have two listeners' + 'original common instance should have two listeners', ) assert.equal( commonCopy.events.listenerCount('hardforkChanged'), 0, - 'copied common instance should have zero listeners' + 'copied common instance should have zero listeners', ) }) }) diff --git a/packages/common/test/customChains.spec.ts b/packages/common/test/customChains.spec.ts index 14eb3a4688..288bd8d209 100644 --- a/packages/common/test/customChains.spec.ts +++ b/packages/common/test/customChains.spec.ts @@ -33,7 +33,7 @@ describe('[Common]: Custom chains', () => { }, /Missing required/, undefined, - 'should throw an exception on missing parameter' + 'should throw an exception on missing parameter', ) }) @@ -67,14 +67,14 @@ describe('[Common]: Custom chains', () => { assert.deepEqual( common.chainId(), BigInt(80001), - 'supported chain -> should initialize with correct chain ID' + 'supported chain -> should initialize with correct chain ID', ) for (const customChain of Object.values(CustomChain)) { common = createCustomCommon(customChain) assert.equal( common.chainName(), customChain, - `supported chain -> should initialize with enum name (${customChain})` + `supported chain -> should initialize with enum name (${customChain})`, ) } @@ -82,14 +82,14 @@ describe('[Common]: Custom chains', () => { assert.equal( common.hardfork(), common.DEFAULT_HARDFORK, - 'uses default hardfork when no options are present' + 'uses default hardfork when no options are present', ) common = createCustomCommon(CustomChain.OptimisticEthereum, { hardfork: Hardfork.Byzantium }) assert.equal( common.hardfork(), Hardfork.Byzantium, - 'should correctly set an option (default options present)' + 'should correctly set an option (default options present)', ) try { @@ -99,7 +99,7 @@ describe('[Common]: Custom chains', () => { } catch (e: any) { assert.ok( e.message.includes('not supported'), - 'supported chain -> should throw if chain name is not supported' + 'supported chain -> should throw if chain name is not supported', ) } }) @@ -111,9 +111,9 @@ describe('[Common]: Custom chains', () => { } catch (e: any) { assert.ok( e.message.includes( - 'Chain must be a string, number, or bigint when initialized with customChains passed in' + 'Chain must be a string, number, or bigint when initialized with customChains passed in', ), - 'should throw an exception on wrong initialization' + 'should throw an exception on wrong initialization', ) } }) @@ -149,7 +149,7 @@ describe('[Common]: Custom chains', () => { assert.equal( c.hardforkBlock()!, BigInt(10), - 'customChains, chain initialized with custom chain' + 'customChains, chain initialized with custom chain', ) const customChainParams: Partial = { @@ -163,14 +163,14 @@ describe('[Common]: Custom chains', () => { assert.equal( customChainCommon['_chainParams'].depositContractAddress, - customChainParams.depositContractAddress + customChainParams.depositContractAddress, ) c.setChain('testnet') assert.equal(c.chainName(), 'testnet', 'customChains, should allow to switch custom chain') assert.equal( c.consensusType(), ConsensusType.ProofOfWork, - 'customChains, should allow to switch custom chain' + 'customChains, should allow to switch custom chain', ) }) @@ -276,7 +276,7 @@ describe('custom chain setup with hardforks with undefined/null block numbers', () => createCustomCommon({ hardforks: undefinedHardforks as HardforkTransitionConfig[] }), undefined, undefined, - 'throws when a hardfork with an undefined block number is passed' + 'throws when a hardfork with an undefined block number is passed', ) const nullHardforks = [ diff --git a/packages/common/test/customCrypto.spec.ts b/packages/common/test/customCrypto.spec.ts index 7e5ae55f40..908198cc6f 100644 --- a/packages/common/test/customCrypto.spec.ts +++ b/packages/common/test/customCrypto.spec.ts @@ -14,7 +14,7 @@ describe('[Common]: Custom Crypto', () => { v: bigint, r: Uint8Array, s: Uint8Array, - _chainID?: bigint + _chainID?: bigint, ) => { return concatBytes(msgHash, Uint8Array.from([Number(v)]), r, s) } @@ -58,8 +58,8 @@ describe('[Common]: Custom Crypto', () => { Uint8Array.from([1]), BigInt(2), Uint8Array.from([3]), - Uint8Array.from([4]) - ) + Uint8Array.from([4]), + ), ) }) diff --git a/packages/common/test/hardforks.spec.ts b/packages/common/test/hardforks.spec.ts index 9375d3a2b2..d52e806aa4 100644 --- a/packages/common/test/hardforks.spec.ts +++ b/packages/common/test/hardforks.spec.ts @@ -260,7 +260,7 @@ describe('[Common]: Hardfork logic', () => { msg = 'should provide correct forkHash for HF provided' assert.equal(c.forkHash(Hardfork.SpuriousDragon), '0x3edd5b10', msg) const genesisHash = hexToBytes( - '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ) assert.equal(c.forkHash(Hardfork.SpuriousDragon, genesisHash), '0x3edd5b10', msg) @@ -357,34 +357,34 @@ describe('[Common]: Hardfork logic', () => { assert.equal( c.consensusType(), ConsensusType.ProofOfAuthority, - 'should provide the correct initial chain consensus type' + 'should provide the correct initial chain consensus type', ) assert.equal( c.consensusAlgorithm(), ConsensusAlgorithm.Clique, - 'should provide the correct initial chain consensus algorithm' + 'should provide the correct initial chain consensus algorithm', ) assert.equal( c.consensusConfig()['period'], 15, - 'should provide the correct initial chain consensus configuration' + 'should provide the correct initial chain consensus configuration', ) c = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Paris }) assert.equal( c.consensusType(), ConsensusType.ProofOfStake, - 'should provide the correct updated chain consensus type' + 'should provide the correct updated chain consensus type', ) assert.equal( c.consensusAlgorithm(), ConsensusAlgorithm.Casper, - 'should provide the correct updated chain consensus algorithm' + 'should provide the correct updated chain consensus algorithm', ) assert.deepEqual( c.consensusConfig(), {}, - 'should provide the correct updated chain consensus configuration' + 'should provide the correct updated chain consensus configuration', ) }) @@ -394,31 +394,31 @@ describe('[Common]: Hardfork logic', () => { assert.equal( c['HARDFORK_CHANGES'][11][0], Hardfork.Paris, - 'should correctly apply hardfork changes' + 'should correctly apply hardfork changes', ) assert.equal( c['HARDFORK_CHANGES'][12][0], Hardfork.MergeForkIdTransition, - 'should correctly apply hardfork changes' + 'should correctly apply hardfork changes', ) // Should give correct ConsensusType pre and post merge assert.equal( c.consensusType(), ConsensusType.ProofOfWork, - 'should provide the correct initial chain consensus type' + 'should provide the correct initial chain consensus type', ) c.setHardfork(Hardfork.Paris) assert.equal( c.consensusType(), ConsensusType.ProofOfStake, - `should switch to ProofOfStake consensus on merge` + `should switch to ProofOfStake consensus on merge`, ) c.setHardfork(Hardfork.MergeForkIdTransition) assert.equal( c.consensusType(), ConsensusType.ProofOfStake, - `should stay on ProofOfStake consensus post merge` + `should stay on ProofOfStake consensus post merge`, ) // For kiln MergeForkIdTransition happens BEFORE Merge @@ -431,12 +431,12 @@ describe('[Common]: Hardfork logic', () => { assert.equal( c['HARDFORK_CHANGES'][10][0], Hardfork.MergeForkIdTransition, - 'should correctly apply hardfork changes' + 'should correctly apply hardfork changes', ) assert.equal( c['HARDFORK_CHANGES'][11][0], Hardfork.Paris, - 'should correctly apply hardfork changes' + 'should correctly apply hardfork changes', ) // Should give correct ConsensusType pre and post merge @@ -444,19 +444,19 @@ describe('[Common]: Hardfork logic', () => { assert.equal( c.consensusType(), ConsensusType.ProofOfWork, - 'should provide the correct initial chain consensus type' + 'should provide the correct initial chain consensus type', ) c.setHardfork(Hardfork.Paris) assert.equal( c.consensusType(), ConsensusType.ProofOfStake, - `should switch to ProofOfStake consensus on merge` + `should switch to ProofOfStake consensus on merge`, ) c.setHardfork(Hardfork.MergeForkIdTransition) assert.equal( c.consensusType(), ConsensusType.ProofOfWork, - `should give pow consensus as MergeForkIdTransition is pre-merge` + `should give pow consensus as MergeForkIdTransition is pre-merge`, ) }) }) diff --git a/packages/common/test/mergePOS.spec.ts b/packages/common/test/mergePOS.spec.ts index f5dbe3a826..3d350704ec 100644 --- a/packages/common/test/mergePOS.spec.ts +++ b/packages/common/test/mergePOS.spec.ts @@ -16,7 +16,7 @@ describe('[Common]: Merge/POS specific logic', () => { assert.equal( c.hardforkTTD('thisHardforkDoesNotExist'), null, - 'should return null if HF does not exist on chain' + 'should return null if HF does not exist on chain', ) }) @@ -159,7 +159,7 @@ describe('[Common]: Merge/POS specific logic', () => { assert.equal( c.hardforkTTD(Hardfork.Chainstart), BigInt(0), - 'should get the HF total difficulty' + 'should get the HF total difficulty', ) const msg = 'block number > last HF block number set, TD set (0) and equal' @@ -197,7 +197,7 @@ describe('[Common]: Merge/POS specific logic', () => { assert.equal( c.getHardforkBy({ blockNumber: 1450409n, td: 17000000000000000n }), Hardfork.Paris, - msg + msg, ) // should select MergeForkIdTransition even without td specified as the block is set for this hardfork assert.equal(c.getHardforkBy({ blockNumber: 1735371n }), Hardfork.MergeForkIdTransition, msg) @@ -205,24 +205,24 @@ describe('[Common]: Merge/POS specific logic', () => { assert.equal( c.getHardforkBy({ blockNumber: 1735371n, td: 17000000000000000n }), Hardfork.MergeForkIdTransition, - msg + msg, ) // Check nextHardforkBlockOrTimestamp should be MergeForkIdTransition's block on london and merge both assert.equal( c.nextHardforkBlockOrTimestamp(Hardfork.Berlin), 1735371n, - `should get nextHardforkBlockOrTimestamp correctly` + `should get nextHardforkBlockOrTimestamp correctly`, ) assert.equal( c.nextHardforkBlockOrTimestamp(Hardfork.London), 1735371n, - `should get nextHardforkBlockOrTimestamp correctly` + `should get nextHardforkBlockOrTimestamp correctly`, ) assert.equal( c.nextHardforkBlockOrTimestamp(Hardfork.Paris), 1735371n, - `should get nextHardforkBlockOrTimestamp correctly` + `should get nextHardforkBlockOrTimestamp correctly`, ) let f = () => { @@ -232,7 +232,7 @@ describe('[Common]: Merge/POS specific logic', () => { f, undefined, undefined, - 'throws error as specified td < merge ttd for a post merge hardfork' + 'throws error as specified td < merge ttd for a post merge hardfork', ) msg = 'should set HF correctly' @@ -242,13 +242,13 @@ describe('[Common]: Merge/POS specific logic', () => { assert.equal( c.setHardforkBy({ blockNumber: 1450409n, td: 17000000000000000n }), Hardfork.Paris, - msg + msg, ) assert.equal(c.setHardforkBy({ blockNumber: 1735371n }), Hardfork.MergeForkIdTransition, msg) assert.equal( c.setHardforkBy({ blockNumber: 1735371n, td: 17000000000000000n }), Hardfork.MergeForkIdTransition, - msg + msg, ) f = () => { c.setHardforkBy({ blockNumber: 1735371n, td: 15000000000000000n }) @@ -257,7 +257,7 @@ describe('[Common]: Merge/POS specific logic', () => { f, undefined, undefined, - 'throws error as specified td < merge ttd for a post merge hardfork' + 'throws error as specified td < merge ttd for a post merge hardfork', ) // restore value @@ -278,25 +278,25 @@ describe('[Common]: Merge/POS specific logic', () => { assert.equal( c.setHardforkBy({ blockNumber: 1450409n, td: 17000000000000000n }), Hardfork.Paris, - msg + msg, ) assert.equal(c.setHardforkBy({ blockNumber: 1735371n }), Hardfork.MergeForkIdTransition, msg) assert.equal( c.setHardforkBy({ blockNumber: 1735371n, td: 17000000000000000n }), Hardfork.MergeForkIdTransition, - msg + msg, ) // Check nextHardforkBlockOrTimestamp should be MergeForkIdTransition's block on london and merge both assert.equal( c.nextHardforkBlockOrTimestamp(Hardfork.London), 1735371n, - `should get nextHardforkBlockOrTimestamp correctly` + `should get nextHardforkBlockOrTimestamp correctly`, ) assert.equal( c.nextHardforkBlockOrTimestamp(Hardfork.Paris), 1735371n, - `should get nextHardforkBlockOrTimestamp correctly` + `should get nextHardforkBlockOrTimestamp correctly`, ) // restore value diff --git a/packages/common/test/params.spec.ts b/packages/common/test/params.spec.ts index 4969aaf6c2..a71702fbc4 100644 --- a/packages/common/test/params.spec.ts +++ b/packages/common/test/params.spec.ts @@ -37,7 +37,7 @@ describe('[Common]: Parameter access for param(), paramByHardfork()', () => { assert.equal( c.param('ecAddGas'), BigInt(500), - 'Should return correct value for HF set in class' + 'Should return correct value for HF set in class', ) }) diff --git a/packages/common/test/timestamp.spec.ts b/packages/common/test/timestamp.spec.ts index 7bd4008208..1dfcd9e34e 100644 --- a/packages/common/test/timestamp.spec.ts +++ b/packages/common/test/timestamp.spec.ts @@ -19,17 +19,17 @@ describe('[Common]: Timestamp Hardfork logic', () => { assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 0n }), Hardfork.MergeForkIdTransition, - 'should match the HF' + 'should match the HF', ) assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 1668699476n }), Hardfork.Shanghai, - 'should match the HF' + 'should match the HF', ) assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 1668699576n }), Hardfork.Shanghai, - 'should match the HF' + 'should match the HF', ) }) @@ -44,12 +44,12 @@ describe('[Common]: Timestamp Hardfork logic', () => { assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 0n }), Hardfork.MergeForkIdTransition, - 'should match the HF' + 'should match the HF', ) assert.equal( c.nextHardforkBlockOrTimestamp(Hardfork.Shanghai), null, - 'should give null on next Hardfork block' + 'should give null on next Hardfork block', ) }) @@ -64,18 +64,18 @@ describe('[Common]: Timestamp Hardfork logic', () => { assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 0n }), Hardfork.MergeForkIdTransition, - 'should match the HF' + 'should match the HF', ) // Should give the shanghai as sharding is schedule a bit post shanghai assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 1668699476n }), Hardfork.Shanghai, - 'should match the HF' + 'should match the HF', ) assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 1668699576n }), Hardfork.Shanghai, - 'should match the HF' + 'should match the HF', ) }) @@ -101,7 +101,7 @@ describe('[Common]: Timestamp Hardfork logic', () => { const c = createCustomCommon({ hardforks }, { baseChain: Chain.Mainnet }) const mainnetGenesisHash = hexToBytes( - '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ) for (const hf of c.hardforks()) { if (typeof hf.forkHash === 'string') { @@ -118,7 +118,7 @@ describe('[Common]: Timestamp Hardfork logic', () => { assert.equal( c.hardforkForForkHash('0xc1fdf181')?.name, Hardfork.Shanghai, - 'Should be able to get Shanghai from forkHash' + 'Should be able to get Shanghai from forkHash', ) }) @@ -142,7 +142,7 @@ describe('[Common]: Timestamp Hardfork logic', () => { const c = createCustomCommon({ hardforks }, { baseChain: Chain.Mainnet }) const mainnetGenesisHash = hexToBytes( - '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ) let noForkHashes = c.hardforks().reduce((acc, hf) => { diff --git a/packages/common/test/utils.spec.ts b/packages/common/test/utils.spec.ts index c478553469..5653c817c2 100644 --- a/packages/common/test/utils.spec.ts +++ b/packages/common/test/utils.spec.ts @@ -32,7 +32,7 @@ describe('[Utils/Parse]', () => { assert.deepEqual( params.consensus, { type: 'poa', algorithm: 'clique', clique: { period: 15, epoch: 30000 } }, - 'consensus config matches' + 'consensus config matches', ) const poaJSONCopy = Object.assign({}, poaJSON) poaJSONCopy.nonce = '00' @@ -40,7 +40,7 @@ describe('[Utils/Parse]', () => { assert.equal( params.genesis.nonce, '0x0000000000000000', - 'non-hex prefixed nonce is formatted correctly' + 'non-hex prefixed nonce is formatted correctly', ) assert.equal(params.hardfork, Hardfork.London, 'should correctly infer current hardfork') }) @@ -78,7 +78,7 @@ describe('[Utils/Parse]', () => { 'mergeForkIdTransition', 'paris', ], - 'hardfork parse order should be correct' + 'hardfork parse order should be correct', ) for (const hf of common.hardforks()) { /* eslint-disable @typescript-eslint/no-use-before-define */ @@ -113,7 +113,7 @@ describe('[Utils/Parse]', () => { 'mergeForkIdTransition', 'shanghai', ], - 'hardfork parse order should be correct' + 'hardfork parse order should be correct', ) assert.equal(common1.hardfork(), Hardfork.Shanghai, 'should correctly infer current hardfork') @@ -140,36 +140,36 @@ describe('[Utils/Parse]', () => { 'paris', 'shanghai', ], - 'hardfork parse order should be correct' + 'hardfork parse order should be correct', ) assert.equal(common.getHardforkBy({ blockNumber: 0n }), Hardfork.London, 'london at genesis') assert.equal( common.getHardforkBy({ blockNumber: 1n, td: 2n }), Hardfork.Paris, - 'merge at block 1' + 'merge at block 1', ) // shanghai is at timestamp 8 assert.equal( common.getHardforkBy({ blockNumber: 8n }), Hardfork.London, - 'without timestamp still london' + 'without timestamp still london', ) assert.equal( common.getHardforkBy({ blockNumber: 8n, td: 2n }), Hardfork.Paris, - 'without timestamp at merge' + 'without timestamp at merge', ) assert.equal( common.getHardforkBy({ blockNumber: 8n, timestamp: 8n }), Hardfork.Shanghai, - 'with timestamp at shanghai' + 'with timestamp at shanghai', ) // should be post merge at shanghai assert.equal( common.getHardforkBy({ blockNumber: 8n, td: 2n, timestamp: 8n }), Hardfork.Shanghai, - 'post merge shanghai' + 'post merge shanghai', ) assert.equal(common.hardfork(), Hardfork.Shanghai, 'should correctly infer common hardfork') }) @@ -185,7 +185,7 @@ describe('[Utils/Parse]', () => { assert.equal( depositContractAddress, getInitializedChains().mainnet.depositContractAddress, - 'should assign mainnet deposit contract' + 'should assign mainnet deposit contract', ) }) @@ -206,7 +206,7 @@ describe('[Utils/Parse]', () => { assert.equal( depositContractAddress, '0x4242424242424242424242424242424242424242', - 'should parse correct address' + 'should parse correct address', ) }) }) diff --git a/packages/common/tsconfig.lint.json b/packages/common/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/common/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/devp2p/.eslintrc.cjs b/packages/devp2p/.eslintrc.cjs index 940a7fc316..9c3e67209e 100644 --- a/packages/devp2p/.eslintrc.cjs +++ b/packages/devp2p/.eslintrc.cjs @@ -5,4 +5,13 @@ module.exports = { 'no-redeclare': 'off', 'no-undef': 'off', // temporary until fixed: 'NodeJS' is not defined }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], } diff --git a/packages/devp2p/examples/dpt.ts b/packages/devp2p/examples/dpt.ts index c054953328..af85e5a13f 100644 --- a/packages/devp2p/examples/dpt.ts +++ b/packages/devp2p/examples/dpt.ts @@ -1,5 +1,5 @@ import { DPT } from '@ethereumjs/devp2p' -import { bytesToHex, hexToBytes, randomBytes } from '@ethereumjs/util' +import { bytesToHex, hexToBytes } from '@ethereumjs/util' const PRIVATE_KEY = hexToBytes('0xed6df2d4b7e82d105538e4a1279925a16a84e772243e80a561e1b201f2e78220') const main = async () => { @@ -12,7 +12,7 @@ const main = async () => { }) console.log(`DPT is active and has id - ${bytesToHex(dpt.id!)}`) // Should log the DPT's hex ID - 0xcd80bb7a768432302d267729c15da61d172373ea036... - await dpt.destroy() + dpt.destroy() } -main() +void main() diff --git a/packages/devp2p/examples/peer-communication-les.ts b/packages/devp2p/examples/peer-communication-les.ts index f85d1ad84e..f173cb44db 100644 --- a/packages/devp2p/examples/peer-communication-les.ts +++ b/packages/devp2p/examples/peer-communication-les.ts @@ -1,17 +1,18 @@ -import { bytesToInt, intToBytes, randomBytes, bytesToHex, hexToBytes } from '@ethereumjs/util' -import { Block, BlockHeader, createBlockFromValuesArray } from '@ethereumjs/block' +import { BlockHeader, createBlockFromValuesArray } from '@ethereumjs/block' import { Chain, Common, Hardfork } from '@ethereumjs/common' +import * as devp2p from '@ethereumjs/devp2p' +import { bytesToHex, bytesToInt, hexToBytes, intToBytes, randomBytes } from '@ethereumjs/util' import chalk from 'chalk' import ms from 'ms' -import * as devp2p from '@ethereumjs/devp2p' -import { ETH, Peer } from '@ethereumjs/devp2p' +import type { Block } from '@ethereumjs/block' +import type { Peer } from '@ethereumjs/devp2p' const PRIVATE_KEY = randomBytes(32) const GENESIS_TD = 1 const GENESIS_HASH = hexToBytes( - '0x6341fd3daf94b748c72ced5a5b26028f2474f5f00d824504e4fa37a75767e177' + '0x6341fd3daf94b748c72ced5a5b26028f2474f5f00d824504e4fa37a75767e177', ) const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) @@ -72,8 +73,8 @@ rlpx.events.on('peer:added', (peer) => { const clientId = peer.getHelloMessage().clientId console.log( chalk.green( - `Add peer: ${addr} ${clientId} (les${les.getVersion()}) (total: ${rlpx.getPeers().length})` - ) + `Add peer: ${addr} ${clientId} (les${les.getVersion()}) (total: ${rlpx.getPeers().length})`, + ), ) les.sendStatus({ @@ -104,7 +105,7 @@ rlpx.events.on('peer:added', (peer) => { case devp2p.LES.MESSAGE_CODES.BLOCK_HEADERS: { if (payload[2].length > 1) { console.log( - `${addr} not more than one block header expected (received: ${payload[2].length})` + `${addr} not more than one block header expected (received: ${payload[2].length})`, ) break } @@ -123,7 +124,7 @@ rlpx.events.on('peer:added', (peer) => { case devp2p.LES.MESSAGE_CODES.BLOCK_BODIES: { if (payload[2].length !== 1) { console.log( - `${addr} not more than one block body expected (received: ${payload[2].length})` + `${addr} not more than one block body expected (received: ${payload[2].length})`, ) break } @@ -155,9 +156,9 @@ rlpx.events.on('peer:removed', (peer, reasonCode, disconnectWe) => { console.log( chalk.yellow( `Remove peer: ${getPeerAddr(peer)} - ${who}, reason: ${peer.getDisconnectPrefix( - reasonCode - )} (${String(reasonCode)}) (total: ${total})` - ) + reasonCode, + )} (${String(reasonCode)}) (total: ${total})`, + ), ) }) @@ -203,11 +204,11 @@ function onNewBlock(block: Block, peer: Peer) { const blockNumber = block.header.number console.log( - `----------------------------------------------------------------------------------------------------------` + `----------------------------------------------------------------------------------------------------------`, ) console.log(`block ${blockNumber} received: ${blockHashHex} (from ${getPeerAddr(peer)})`) console.log( - `----------------------------------------------------------------------------------------------------------` + `----------------------------------------------------------------------------------------------------------`, ) } @@ -229,7 +230,7 @@ setInterval(() => { console.log( chalk.yellow( - `Total nodes in DPT: ${peersCount}, open slots: ${openSlots}, queue: ${queueLength} / ${queueLength2}` - ) + `Total nodes in DPT: ${peersCount}, open slots: ${openSlots}, queue: ${queueLength} / ${queueLength2}`, + ), ) }, ms('30s')) diff --git a/packages/devp2p/examples/peer-communication.ts b/packages/devp2p/examples/peer-communication.ts index a6da6f7ce1..e408062864 100644 --- a/packages/devp2p/examples/peer-communication.ts +++ b/packages/devp2p/examples/peer-communication.ts @@ -1,22 +1,23 @@ +import { BlockHeader, createBlockFromValuesArray } from '@ethereumjs/block' +import { Chain, Common, Hardfork } from '@ethereumjs/common' +import * as devp2p from '@ethereumjs/devp2p' +import { RLP } from '@ethereumjs/rlp' +import { createTxFromBlockBodyData } from '@ethereumjs/tx' import { bytesToInt, - intToBytes, - randomBytes, bytesToUnprefixedHex, equalsBytes, hexToBytes, + intToBytes, + randomBytes, } from '@ethereumjs/util' -import { Block, BlockHeader, createBlockFromValuesArray } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { RLP } from '@ethereumjs/rlp' -import { createTxFromBlockBodyData, TypedTransaction } from '@ethereumjs/tx' import chalk from 'chalk' import { LRUCache } from 'lru-cache' - import ms from 'ms' -import * as devp2p from '@ethereumjs/devp2p' -import { ETH, Peer } from '@ethereumjs/devp2p' +import type { Block } from '@ethereumjs/block' +import type { ETH, Peer } from '@ethereumjs/devp2p' +import type { TypedTransaction } from '@ethereumjs/tx' const PRIVATE_KEY = randomBytes(32) @@ -45,7 +46,7 @@ const CHECK_BLOCK_TITLE = 'Berlin Fork' // Only for debugging/console output const CHECK_BLOCK_NR = 12244000 const CHECK_BLOCK = '1638380ab737e0e916bd1c7f23bd2bab2a532e44b90047f045f262ee21c42b21' const CHECK_BLOCK_HEADER = RLP.decode( - '0xf90219a0d44a4d33e28d7ea9edd12b69bd32b394587eee498b0e2543ce2bad1877ffbeaca01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347941ad91ee08f21be3de0ba2ba6918e714da6b45836a0fdec060ee45e55da9e36060fc95dddd0bdc47e447224666a895d9f0dc9adaa0ca0092d9fcc02ca9b372daec726704ce720d3aa366739868f4820ecaabadb9ac309a0974fee017515a46303f467b6fd50872994db1b0ea64d3455bad93ff9678aced9b90100356050004c5c89691add79838a01d4c302419252a4d3c96e9273908b7ee84660886c070607b4928c416a1800746a0d1dbb442d0baf06eea321422263726748600cc200e82aec08336863514d12d665718016989189c116bc0947046cc6718110586c11464a189000a11a41cc96991970153d88840768170244197e164c6204249b9091a0052ac85088c8108a4418dd2903690a036722623888ea14e90458a390a305a2342cb02766094f68c4100036330719848b48411614686717ab6068a46318204232429dc42020608802ceecd66c3c33a3a1fc6e82522049470328a4a81ba07c6604228ba94f008476005087a6804463696b41002650c0fdf548448a90408717ca31b6d618e883bad42083be153b83bdfbb1846078104798307834383639373636353666366532303530366636663663a0ae1de0acd35a98e211c7e276ad7524bb84a5e1b8d33dd7d1c052b095b564e8b888cca66773148b6e12' + '0xf90219a0d44a4d33e28d7ea9edd12b69bd32b394587eee498b0e2543ce2bad1877ffbeaca01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347941ad91ee08f21be3de0ba2ba6918e714da6b45836a0fdec060ee45e55da9e36060fc95dddd0bdc47e447224666a895d9f0dc9adaa0ca0092d9fcc02ca9b372daec726704ce720d3aa366739868f4820ecaabadb9ac309a0974fee017515a46303f467b6fd50872994db1b0ea64d3455bad93ff9678aced9b90100356050004c5c89691add79838a01d4c302419252a4d3c96e9273908b7ee84660886c070607b4928c416a1800746a0d1dbb442d0baf06eea321422263726748600cc200e82aec08336863514d12d665718016989189c116bc0947046cc6718110586c11464a189000a11a41cc96991970153d88840768170244197e164c6204249b9091a0052ac85088c8108a4418dd2903690a036722623888ea14e90458a390a305a2342cb02766094f68c4100036330719848b48411614686717ab6068a46318204232429dc42020608802ceecd66c3c33a3a1fc6e82522049470328a4a81ba07c6604228ba94f008476005087a6804463696b41002650c0fdf548448a90408717ca31b6d618e883bad42083be153b83bdfbb1846078104798307834383639373636353666366532303530366636663663a0ae1de0acd35a98e211c7e276ad7524bb84a5e1b8d33dd7d1c052b095b564e8b888cca66773148b6e12', ) const getPeerAddr = (peer: Peer) => `${peer['_socket'].remoteAddress}:${peer['_socket'].remotePort}` @@ -88,8 +89,8 @@ rlpx.events.on('peer:added', (peer) => { const clientId = peer.getHelloMessage().clientId console.log( chalk.green( - `Add peer: ${addr} ${clientId} (eth${eth.getVersion()}) (total: ${rlpx.getPeers().length})` - ) + `Add peer: ${addr} ${clientId} (eth${eth.getVersion()}) (total: ${rlpx.getPeers().length})`, + ), ) eth.sendStatus({ @@ -166,7 +167,7 @@ rlpx.events.on('peer:added', (peer) => { if (!forkVerified) { if (payload[1].length !== 1) { console.log( - `${addr} expected one header for ${CHECK_BLOCK_TITLE} verify (received: ${payload[1].length})` + `${addr} expected one header for ${CHECK_BLOCK_TITLE} verify (received: ${payload[1].length})`, ) peer.disconnect(devp2p.DISCONNECT_REASON.USELESS_PEER) break @@ -182,7 +183,7 @@ rlpx.events.on('peer:added', (peer) => { } else { if (payload[1].length > 1) { console.log( - `${addr} not more than one block header expected (received: ${payload[1].length})` + `${addr} not more than one block header expected (received: ${payload[1].length})`, ) break } @@ -206,7 +207,7 @@ rlpx.events.on('peer:added', (peer) => { if (!isValidPayload) { console.log( - `${addr} received wrong block header ${bytesToUnprefixedHex(header.hash())}` + `${addr} received wrong block header ${bytesToUnprefixedHex(header.hash())}`, ) } } @@ -227,7 +228,7 @@ rlpx.events.on('peer:added', (peer) => { if (payload[1].length !== 1) { console.log( - `${addr} not more than one block body expected (received: ${payload[1].length})` + `${addr} not more than one block body expected (received: ${payload[1].length})`, ) break } @@ -294,9 +295,9 @@ rlpx.events.on('peer:removed', (peer, reasonCode, disconnectWe) => { console.log( chalk.yellow( `Remove peer: ${getPeerAddr(peer)} - ${who}, reason: ${peer.getDisconnectPrefix( - reasonCode - )} (${String(reasonCode)}) (total: ${total})` - ) + reasonCode, + )} (${String(reasonCode)}) (total: ${total})`, + ), ) }) @@ -378,7 +379,7 @@ setInterval(() => { console.log( chalk.yellow( - `Total nodes in DPT: ${peersCount}, open slots: ${openSlots}, queue: ${queueLength} / ${queueLength2}` - ) + `Total nodes in DPT: ${peersCount}, open slots: ${openSlots}, queue: ${queueLength} / ${queueLength2}`, + ), ) }, ms('30s')) diff --git a/packages/devp2p/examples/rlpx.ts b/packages/devp2p/examples/rlpx.ts index b2293763d3..c9d0b46727 100644 --- a/packages/devp2p/examples/rlpx.ts +++ b/packages/devp2p/examples/rlpx.ts @@ -1,11 +1,11 @@ import { Chain, Common } from '@ethereumjs/common' -import { RLPx, ETH } from '@ethereumjs/devp2p' +import { ETH, RLPx } from '@ethereumjs/devp2p' import { hexToBytes } from '@ethereumjs/util' const main = async () => { const common = new Common({ chain: Chain.Mainnet }) const PRIVATE_KEY = hexToBytes( - '0xed6df2d4b7e82d105538e4a1279925a16a84e772243e80a561e1b201f2e78220' + '0xed6df2d4b7e82d105538e4a1279925a16a84e772243e80a561e1b201f2e78220', ) const rlpx = new RLPx(PRIVATE_KEY, { maxPeers: 25, @@ -13,7 +13,7 @@ const main = async () => { common, }) console.log(`RLPx is active - ${rlpx._isAlive()}`) - await rlpx.destroy() + rlpx.destroy() } -main() +void main() diff --git a/packages/devp2p/examples/simple.ts b/packages/devp2p/examples/simple.ts index 130fbac857..c25b51eddd 100644 --- a/packages/devp2p/examples/simple.ts +++ b/packages/devp2p/examples/simple.ts @@ -1,8 +1,7 @@ import { Chain, Common } from '@ethereumjs/common' -import chalk from 'chalk' -import { bytesToHex, hexToBytes } from '@ethereumjs/util' - import { DPT } from '@ethereumjs/devp2p' +import { bytesToHex, hexToBytes } from '@ethereumjs/util' +import chalk from 'chalk' const TIMEOUT = 5000 // 5 second timeout const PRIVATE_KEY = '0xd772e3d6a001a38064dd23964dd2836239fa0e6cec8b28972a87460a17210fe9' diff --git a/packages/devp2p/src/dns/dns.ts b/packages/devp2p/src/dns/dns.ts index d2d056994e..ac526b30c1 100644 --- a/packages/devp2p/src/dns/dns.ts +++ b/packages/devp2p/src/dns/dns.ts @@ -32,7 +32,7 @@ export class DNS { this._common = options.common this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } /** diff --git a/packages/devp2p/src/dns/enr.ts b/packages/devp2p/src/dns/enr.ts index 996ed5938c..43ad937c35 100644 --- a/packages/devp2p/src/dns/enr.ts +++ b/packages/devp2p/src/dns/enr.ts @@ -74,7 +74,7 @@ export class ENR { const isVerified = ecdsaVerify( signature as Uint8Array, (common?.customCrypto.keccak256 ?? keccak256)(RLP.encode([seq, ...kvs])), - obj.secp256k1 + obj.secp256k1, ) if (!isVerified) throw new Error('Unable to verify ENR signature') @@ -106,7 +106,7 @@ export class ENR { 'eRoot', 'lRoot', 'seq', - 'signature' + 'signature', ) as ENRRootValues if (!rootVals.eRoot) throw new Error("Could not parse 'e' value from ENR root entry") @@ -122,7 +122,7 @@ export class ENR { const signedComponent = root.split(' sig')[0] const signedComponentBytes = utf8ToBytes(signedComponent) const signatureBytes = Uint8Array.from( - [...base64url.decode(rootVals.signature + '=').values()].slice(0, 64) + [...base64url.decode(rootVals.signature + '=').values()].slice(0, 64), ) const keyBytes = Uint8Array.from(decodedPublicKey) @@ -130,7 +130,7 @@ export class ENR { const isVerified = ecdsaVerify( signatureBytes, (common?.customCrypto.keccak256 ?? keccak256)(signedComponentBytes), - keyBytes + keyBytes, ) if (!isVerified) throw new Error('Unable to verify ENR root signature') @@ -154,7 +154,7 @@ export class ENR { tree, `${this.TREE_PREFIX}//%s@%s`, 'publicKey', - 'domain' + 'domain', ) as ENRTreeValues if (!treeVals.publicKey) throw new Error('Could not parse public key from ENR tree entry') diff --git a/packages/devp2p/src/dpt/ban-list.ts b/packages/devp2p/src/dpt/ban-list.ts index 639a700e96..cae50bd8af 100644 --- a/packages/devp2p/src/dpt/ban-list.ts +++ b/packages/devp2p/src/dpt/ban-list.ts @@ -16,7 +16,7 @@ export class BanList { constructor() { this._lru = new LRUCache({ max: 10000 }) this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } add(obj: string | Uint8Array | PeerInfo, maxAge?: number) { diff --git a/packages/devp2p/src/dpt/dpt.ts b/packages/devp2p/src/dpt/dpt.ts index 60c6f61f1e..8a6b2341c7 100644 --- a/packages/devp2p/src/dpt/dpt.ts +++ b/packages/devp2p/src/dpt/dpt.ts @@ -86,7 +86,7 @@ export class DPT { this._refreshIntervalId = setInterval(() => this.refresh(), refreshIntervalSubdivided) this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } bind(...args: any[]): void { @@ -113,7 +113,8 @@ export class DPT { }) .then(() => { if (++count < oldPeers.length) return - if (err === null) this._banlist.add(newPeer, 300000) // 5 min * 60 * 1000 + if (err === null) + this._banlist.add(newPeer, 300000) // 5 min * 60 * 1000 else this._kbucket.add(newPeer) }) } @@ -200,7 +201,7 @@ export class DPT { let peers = this._kbucket.closest(id) if (this._onlyConfirmed && this._confirmedPeers.size > 0) { peers = peers.filter((peer) => - this._confirmedPeers.has(bytesToUnprefixedHex(peer.id as Uint8Array)) ? true : false + this._confirmedPeers.has(bytesToUnprefixedHex(peer.id as Uint8Array)) ? true : false, ) } return peers @@ -231,7 +232,7 @@ export class DPT { const peers = this.getPeers() if (this.DEBUG) { this._debug( - `call .refresh() (selector ${this._refreshIntervalSelectionCounter}) (${peers.length} peers in table)` + `call .refresh() (selector ${this._refreshIntervalSelectionCounter}) (${peers.length} peers in table)`, ) } @@ -259,7 +260,7 @@ export class DPT { this._debug( `.refresh() Adding ${dnsPeers.length} from DNS tree, (${ this.getPeers().length - } current peers in table)` + } current peers in table)`, ) } diff --git a/packages/devp2p/src/dpt/message.ts b/packages/devp2p/src/dpt/message.ts index f0d09ccfcd..8e861c6519 100644 --- a/packages/devp2p/src/dpt/message.ts +++ b/packages/devp2p/src/dpt/message.ts @@ -197,7 +197,7 @@ export function decode(bytes: Uint8Array, common?: Common) { signature, recoverId, sighash, - false + false, ) return { typename, data, publicKey } } diff --git a/packages/devp2p/src/dpt/server.ts b/packages/devp2p/src/dpt/server.ts index dfb982e3f0..73b3c16076 100644 --- a/packages/devp2p/src/dpt/server.ts +++ b/packages/devp2p/src/dpt/server.ts @@ -63,7 +63,7 @@ export class Server { this._common = options.common this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } bind(...args: any[]) { @@ -111,7 +111,7 @@ export class Server { this._debug( `ping timeout: ${peer.address}:${peer.udpPort} ${ peer.id ? formatLogId(bytesToHex(peer.id), verbose) : '-' - }` + }`, ) } this._requests.delete(rkey) @@ -140,7 +140,7 @@ export class Server { typename, `send ${typename} to ${peer.address}:${peer.udpPort} (peerId: ${ peer.id ? formatLogId(bytesToHex(peer.id), verbose) : '-' - })` + })`, ) } @@ -159,8 +159,8 @@ export class Server { info.typename.toString(), `received ${info.typename} from ${rinfo.address}:${rinfo.port} (peerId: ${formatLogId( bytesToHex(peerId), - verbose - )})` + verbose, + )})`, ) } @@ -217,7 +217,7 @@ export class Server { case 'neighbours': { this.events.emit( 'peers', - info.data.peers.map((peer: any) => peer.endpoint) + info.data.peers.map((peer: any) => peer.endpoint), ) break } diff --git a/packages/devp2p/src/protocol/eth.ts b/packages/devp2p/src/protocol/eth.ts index c2dd7e0c92..fc89e9cbb7 100644 --- a/packages/devp2p/src/protocol/eth.ts +++ b/packages/devp2p/src/protocol/eth.ts @@ -78,7 +78,7 @@ export class ETH extends Protocol { null, 'Uncontrolled status message', this.debug.bind(this), - 'STATUS' + 'STATUS', ) this._peerStatus = payload as ETH.StatusMsg const peerStatusMsg = `${ @@ -185,21 +185,21 @@ export class ETH extends Protocol { this._peerStatus[0], 'Protocol version mismatch', this.debug.bind(this), - 'STATUS' + 'STATUS', ) assertEq( this._status[1], this._peerStatus[1], 'NetworkId mismatch', this.debug.bind(this), - 'STATUS' + 'STATUS', ) assertEq( this._status[4], this._peerStatus[4], 'Genesis block mismatch', this.debug.bind(this), - 'STATUS' + 'STATUS', ) const status: { @@ -222,7 +222,7 @@ export class ETH extends Protocol { 2, 'Incorrect forkId msg format', this.debug.bind(this), - 'STATUS' + 'STATUS', ) this._validateForkId(this._peerStatus[5] as Uint8Array[]) status.forkId = this._peerStatus[5] @@ -248,11 +248,11 @@ export class ETH extends Protocol { _getStatusString(status: ETH.StatusMsg) { let sStr = `[V:${bytesToInt(status[0] as Uint8Array)}, NID:${bytesToInt( - status[1] as Uint8Array + status[1] as Uint8Array, )}, TD:${status[2].length === 0 ? 0 : bytesToBigInt(status[2] as Uint8Array).toString()}` sStr += `, BestH:${formatLogId( bytesToHex(status[3] as Uint8Array), - this._verbose + this._verbose, )}, GenH:${formatLogId(bytesToHex(status[4] as Uint8Array), this._verbose)}` if (this._version >= 64) { sStr += `, ForkHash: ${ @@ -280,13 +280,13 @@ export class ETH extends Protocol { const latestBlock = bytesToBigInt(status.latestBlock) if (latestBlock < this._latestBlock) { throw new Error( - 'latest block provided is not matching the HF setting of the Common instance (Rlpx)' + 'latest block provided is not matching the HF setting of the Common instance (Rlpx)', ) } this._latestBlock = latestBlock } const forkHashB = hexToBytes( - isHexString(this._forkHash) ? this._forkHash : `0x${this._forkHash}` + isHexString(this._forkHash) ? this._forkHash : `0x${this._forkHash}`, ) const nextForkB = @@ -301,7 +301,7 @@ export class ETH extends Protocol { `Send STATUS message to ${this._peer['_socket'].remoteAddress}:${ this._peer['_socket'].remotePort - } (eth${this._version}): ${this._getStatusString(this._status)}` + } (eth${this._version}): ${this._getStatusString(this._status)}`, ) } diff --git a/packages/devp2p/src/protocol/les.ts b/packages/devp2p/src/protocol/les.ts index a0950a7f17..6c23c389c2 100644 --- a/packages/devp2p/src/protocol/les.ts +++ b/packages/devp2p/src/protocol/les.ts @@ -34,7 +34,7 @@ export class LES extends Protocol { }, 5000) // 5 sec * 1000 this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } static les2 = { name: 'les', version: 2, length: 21, constructor: LES } @@ -50,7 +50,7 @@ export class LES extends Protocol { this.getMsgPrefix(code), `${`Received ${this.getMsgPrefix(code)} message from ${ this._peer['_socket'].remoteAddress - }:${this._peer['_socket'].remotePort}`}: ${logData}` + }:${this._peer['_socket'].remotePort}`}: ${logData}`, ) } } @@ -61,7 +61,7 @@ export class LES extends Protocol { null, 'Uncontrolled status message', this.debug.bind(this), - 'STATUS' + 'STATUS', ) const status: LES.Status = Object.assign({}) for (const value of payload as NestedUint8Array) { @@ -73,7 +73,7 @@ export class LES extends Protocol { this.getMsgPrefix(code), `${`Received ${this.getMsgPrefix(code)} message from ${ this._peer['_socket'].remoteAddress - }:${this._peer['_socket'].remotePort}`}: ${this._getStatusString(this._peerStatus)}` + }:${this._peer['_socket'].remotePort}`}: ${this._getStatusString(this._peerStatus)}`, ) } this._handleStatus() @@ -124,21 +124,21 @@ export class LES extends Protocol { this._peerStatus['protocolVersion'], 'Protocol version mismatch', this.debug.bind(this), - 'STATUS' + 'STATUS', ) assertEq( this._status['chainId'], this._peerStatus['chainId'], 'NetworkId mismatch', this.debug.bind(this), - 'STATUS' + 'STATUS', ) assertEq( this._status['genesisHash'], this._peerStatus['genesisHash'], 'Genesis block mismatch', this.debug.bind(this), - 'STATUS' + 'STATUS', ) this.events.emit('status', this._peerStatus) @@ -154,7 +154,7 @@ export class LES extends Protocol { _getStatusString(status: LES.Status) { let sStr = `[V:${bytesToInt(status['protocolVersion'])}, ` sStr += `NID:${bytesToInt(status['chainId'] as Uint8Array)}, HTD:${bytesToInt( - status['headTd'] + status['headTd'], )}, ` sStr += `HeadH:${bytesToHex(status['headHash'])}, HeadN:${bytesToInt(status['headNum'])}, ` sStr += `GenH:${bytesToHex(status['genesisHash'])}` @@ -169,7 +169,7 @@ export class LES extends Protocol { if (status['flowControl/MRC)'] !== undefined) sStr += `, flowControl/MRC set` if (status['forkID'] !== undefined) sStr += `, forkID: [crc32: ${bytesToHex(status['forkID'][0])}, nextFork: ${bytesToInt( - status['forkID'][1] + status['forkID'][1], )}]` if (status['recentTxLookup'] !== undefined) sStr += `, recentTxLookup: ${bytesToInt(status['recentTxLookup'])}` @@ -198,7 +198,7 @@ export class LES extends Protocol { 'STATUS', `Send STATUS message to ${this._peer['_socket'].remoteAddress}:${ this._peer['_socket'].remotePort - } (les${this._version}): ${this._getStatusString(this._status)}` + } (les${this._version}): ${this._getStatusString(this._status)}`, ) } @@ -224,7 +224,7 @@ export class LES extends Protocol { this.getMsgPrefix(code), `Send ${this.getMsgPrefix(code)} message to ${this._peer['_socket'].remoteAddress}:${ this._peer['_socket'].remotePort - }: ${formatLogData(bytesToHex(RLP.encode(payload)), this._verbose)}` + }: ${formatLogData(bytesToHex(RLP.encode(payload)), this._verbose)}`, ) } diff --git a/packages/devp2p/src/protocol/protocol.ts b/packages/devp2p/src/protocol/protocol.ts index 01ccd3e22e..6d54e2be4f 100644 --- a/packages/devp2p/src/protocol/protocol.ts +++ b/packages/devp2p/src/protocol/protocol.ts @@ -34,7 +34,7 @@ export abstract class Protocol { send: SendMethod, protocol: ProtocolType, version: number, - messageCodes: MessageCodes + messageCodes: MessageCodes, ) { this.events = new EventEmitter() this._peer = peer @@ -55,7 +55,7 @@ export abstract class Protocol { private initMsgDebuggers(protocol: ProtocolType) { const MESSAGE_NAMES = Object.values(this._messageCodes).filter( - (value) => typeof value === 'string' + (value) => typeof value === 'string', ) as string[] for (const name of MESSAGE_NAMES) { this.msgDebuggers[name] = devp2pDebug.extend(protocol).extend(name) diff --git a/packages/devp2p/src/protocol/snap.ts b/packages/devp2p/src/protocol/snap.ts index 084c93aa72..3fa8da2d44 100644 --- a/packages/devp2p/src/protocol/snap.ts +++ b/packages/devp2p/src/protocol/snap.ts @@ -16,7 +16,7 @@ export class SNAP extends Protocol { constructor(version: number, peer: Peer, send: SendMethod) { super(peer, send, ProtocolType.SNAP, version, SNAP.MESSAGE_CODES) this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } static snap = { name: 'snap', version: 1, length: 8, constructor: SNAP } @@ -30,7 +30,7 @@ export class SNAP extends Protocol { this.getMsgPrefix(code), `Received ${this.getMsgPrefix(code)} message from ${this._peer['_socket'].remoteAddress}:${ this._peer['_socket'].remotePort - }: ${formatLogData(bytesToHex(data), this._verbose)}` + }: ${formatLogData(bytesToHex(data), this._verbose)}`, ) } @@ -66,7 +66,7 @@ export class SNAP extends Protocol { this.getMsgPrefix(code), `Send ${this.getMsgPrefix(code)} message to ${this._peer['_socket'].remoteAddress}:${ this._peer['_socket'].remotePort - }: ${formatLogData(utils.bytesToHex(RLP.encode(payload)), this._verbose)}` + }: ${formatLogData(utils.bytesToHex(RLP.encode(payload)), this._verbose)}`, ) } diff --git a/packages/devp2p/src/rlpx/ecies.ts b/packages/devp2p/src/rlpx/ecies.ts index d13fa10886..4eaa7455d5 100644 --- a/packages/devp2p/src/rlpx/ecies.ts +++ b/packages/devp2p/src/rlpx/ecies.ts @@ -43,7 +43,7 @@ function concatKDF(keyMaterial: Uint8Array, keyLength: number) { counter += 1 new DataView(tmp.buffer).setUint32(0, counter) bytes.push( - Uint8Array.from(crypto.createHash('sha256').update(tmp).update(keyMaterial).digest()) + Uint8Array.from(crypto.createHash('sha256').update(tmp).update(keyMaterial).digest()), ) } @@ -73,7 +73,7 @@ export class ECIES { protected _keccakFunction: (msg: Uint8Array) => Uint8Array protected _ecdsaSign: ( msg: Uint8Array, - pk: Uint8Array + pk: Uint8Array, ) => { signature: Uint8Array recid: number @@ -82,7 +82,7 @@ export class ECIES { sig: Uint8Array, recId: number, hash: Uint8Array, - compressed?: boolean + compressed?: boolean, ) => Uint8Array constructor(privateKey: Uint8Array, id: Uint8Array, remoteId: Uint8Array, common?: Common) { @@ -101,7 +101,7 @@ export class ECIES { _encryptMessage( data: Uint8Array, - sharedMacData: Uint8Array | null = null + sharedMacData: Uint8Array | null = null, ): Uint8Array | undefined { const privateKey = genPrivateKey() if (!this._remotePublicKey) return @@ -121,7 +121,7 @@ export class ECIES { sharedMacData = Uint8Array.from([]) } const tag = Uint8Array.from( - crypto.createHmac('sha256', mkey).update(concatBytes(dataIV, sharedMacData)).digest() + crypto.createHmac('sha256', mkey).update(concatBytes(dataIV, sharedMacData)).digest(), ) const publicKey = secp256k1.getPublicKey(privateKey, false) @@ -133,7 +133,7 @@ export class ECIES { data.subarray(0, 1), hexToBytes('0x04'), 'wrong ecies header (possible cause: EIP8 upgrade)', - debug + debug, ) const publicKey = data.subarray(0, 65) @@ -220,7 +220,7 @@ export class ECIES { this._keccakFunction(pk2id(this._ephemeralPublicKey)), pk2id(this._publicKey), this._nonce, - Uint8Array.from([0x00]) + Uint8Array.from([0x00]), ) this._initMsg = this._encryptMessage(data) @@ -229,7 +229,7 @@ export class ECIES { parseAuthPlain( data: Uint8Array, - sharedMacData: Uint8Array | null = null + sharedMacData: Uint8Array | null = null, ): Uint8Array | undefined { const prefix = sharedMacData !== null ? sharedMacData : new Uint8Array() this._remoteInitMsg = concatBytes(prefix, data) @@ -272,7 +272,7 @@ export class ECIES { signature, recoveryId, xor(x, this._remoteNonce), - false + false, ) if (this._remoteEphemeralPublicKey === null) return @@ -282,7 +282,7 @@ export class ECIES { this._keccakFunction(pk2id(this._remoteEphemeralPublicKey)), heid, 'the hash of the ephemeral key should match', - debug + debug, ) } } diff --git a/packages/devp2p/src/rlpx/peer.ts b/packages/devp2p/src/rlpx/peer.ts index e980c89688..228cc13bd3 100644 --- a/packages/devp2p/src/rlpx/peer.ts +++ b/packages/devp2p/src/rlpx/peer.ts @@ -143,7 +143,7 @@ export class Peer { this._sendAuth() } this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } /** @@ -152,7 +152,7 @@ export class Peer { _sendAuth() { if (this._closed) return this._logger( - `Send auth (EIP8: ${this._EIP8}) to ${this._socket.remoteAddress}:${this._socket.remotePort}` + `Send auth (EIP8: ${this._EIP8}) to ${this._socket.remoteAddress}:${this._socket.remotePort}`, ) if (this._EIP8 === true) { const authEIP8 = this._eciesSession.createAuthEIP8() @@ -173,7 +173,7 @@ export class Peer { _sendAck() { if (this._closed) return this._logger( - `Send ack (EIP8: ${this._eciesSession['_gotEIP8Auth']}) to ${this._socket.remoteAddress}:${this._socket.remotePort}` + `Send ack (EIP8: ${this._eciesSession['_gotEIP8Auth']}) to ${this._socket.remoteAddress}:${this._socket.remotePort}`, ) if (this._eciesSession['_gotEIP8Auth']) { @@ -227,7 +227,7 @@ export class Peer { // TODO: Remove when we can also serve snap requests from other peers .filter((c) => c.name !== 'snap') .map((c) => `${c.name}${c.version}`) - .join(',')} clientId=${bytesToUtf8(this.clientId)}` + .join(',')} clientId=${bytesToUtf8(this.clientId)}`, ) } const payload: HelloMsg = [ @@ -260,7 +260,7 @@ export class Peer { this.debug( 'DISCONNECT', `Send DISCONNECT to ${this._socket.remoteAddress}:${this._socket.remotePort} (reason: ${reasonName})`, - reasonName + reasonName, ) } const data = RLP.encode(reason) @@ -340,7 +340,7 @@ export class Peer { if (parseData.subarray(0, 1) === hexToBytes('0x04')) { this._eciesSession.parseAckPlain(parseData) this._logger( - `Received ack (old format) from ${this._socket.remoteAddress}:${this._socket.remotePort}` + `Received ack (old format) from ${this._socket.remoteAddress}:${this._socket.remotePort}`, ) } else { this._eciesSession['_gotEIP8Ack'] = true @@ -350,7 +350,7 @@ export class Peer { } else { this._eciesSession.parseAckEIP8(parseData) this._logger( - `Received ack (EIP8) from ${this._socket.remoteAddress}:${this._socket.remotePort}` + `Received ack (EIP8) from ${this._socket.remoteAddress}:${this._socket.remotePort}`, ) } this._state = 'Header' @@ -380,7 +380,7 @@ export class Peer { this._hello.protocolVersion } capabilities=${(this._hello.capabilities ?? []) .map((c) => `${c.name}${c.version}`) - .join(',')} clientId=${this._hello.clientId}` + .join(',')} clientId=${this._hello.clientId}`, ) } @@ -458,7 +458,7 @@ export class Peer { `DISCONNECT reason: ${DISCONNECT_REASON[this._disconnectReason as number]} ${ this._socket.remoteAddress }:${this._socket.remotePort}`, - DISCONNECT_REASON[this._disconnectReason as number] + DISCONNECT_REASON[this._disconnectReason as number], ) } this._disconnectWe = false @@ -534,8 +534,8 @@ export class Peer { this._logger( `Received body ${this._socket.remoteAddress}:${this._socket.remotePort} ${formatLogData( bytesToHex(body), - verbose - )}` + verbose, + )}`, ) this._state = 'Header' this._nextPacketSize = 32 diff --git a/packages/devp2p/src/rlpx/rlpx.ts b/packages/devp2p/src/rlpx/rlpx.ts index 45232dcf4c..7e5f49a158 100644 --- a/packages/devp2p/src/rlpx/rlpx.ts +++ b/packages/devp2p/src/rlpx/rlpx.ts @@ -94,7 +94,7 @@ export class RLPx { this._dpt.events.on('peer:removed', (peer: PeerInfo) => { // remove from queue this._peersQueue = this._peersQueue.filter( - (item) => !equalsBytes(item.peer.id! as Uint8Array, peer.id as Uint8Array) + (item) => !equalsBytes(item.peer.id! as Uint8Array, peer.id as Uint8Array), ) }) } @@ -119,7 +119,7 @@ export class RLPx { this._keccakFunction = options.common?.customCrypto.keccak256 ?? keccak256 this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } listen(...args: any[]) { @@ -157,7 +157,7 @@ export class RLPx { if (this.DEBUG) { this._debug( - `connect to ${peer.address}:${peer.tcpPort} (id: ${formatLogId(peerKey, verbose)})` + `connect to ${peer.address}:${peer.tcpPort} (id: ${formatLogId(peerKey, verbose)})`, ) } const deferred = createDeferred() @@ -272,7 +272,7 @@ export class RLPx { if (this.DEBUG) { this._debug( `disconnect from ${socket.remoteAddress}:${socket.remotePort}, reason: ${DISCONNECT_REASON[reason]}`, - `disconnect` + `disconnect`, ) } } @@ -309,7 +309,7 @@ export class RLPx { this._refillIntervalSelectionCounter } peers: ${this._peers.size}, queue size: ${ this._peersQueue.length - }, open slots: ${this._getOpenSlots()}` + }, open slots: ${this._getOpenSlots()}`, ) } } diff --git a/packages/devp2p/src/util.ts b/packages/devp2p/src/util.ts index 746cece35e..e1de5db726 100644 --- a/packages/devp2p/src/util.ts +++ b/packages/devp2p/src/util.ts @@ -46,7 +46,7 @@ export function assertEq( actual: assertInput, msg: string, debug: Function, - messageName?: string + messageName?: string, ): void { let fullMsg diff --git a/packages/devp2p/test/dns.spec.ts b/packages/devp2p/test/dns.spec.ts index bee07a82a7..1b6f434ed0 100644 --- a/packages/devp2p/test/dns.spec.ts +++ b/packages/devp2p/test/dns.spec.ts @@ -174,6 +174,6 @@ describe('DNS: (integration)', () => { seen.push(peer!.address as string) } }, - { timeout: 10000 } + { timeout: 10000 }, ) }) diff --git a/packages/devp2p/test/dpt-message.spec.ts b/packages/devp2p/test/dpt-message.spec.ts index a5ed1ca801..b5c4cdf38e 100644 --- a/packages/devp2p/test/dpt-message.spec.ts +++ b/packages/devp2p/test/dpt-message.spec.ts @@ -9,7 +9,7 @@ const publicKey = publicKeyCreate(privateKey, false) describe('DPT message tests', () => { it('ping packet with version 4, additional list elements', () => { const bytes = hexToBytes( - '0xe9614ccfd9fc3e74360018522d30e1419a143407ffcce748de3e22116b7e8dc92ff74788c0b6663aaa3d67d641936511c8f8d6ad8698b820a7cf9e1be7155e9a241f556658c55428ec0563514365799a4be2be5a685a80971ddcfa80cb422cdd0101ec04cb847f000001820cfa8215a8d790000000000000000000000000000000018208ae820d058443b9a3550102' + '0xe9614ccfd9fc3e74360018522d30e1419a143407ffcce748de3e22116b7e8dc92ff74788c0b6663aaa3d67d641936511c8f8d6ad8698b820a7cf9e1be7155e9a241f556658c55428ec0563514365799a4be2be5a685a80971ddcfa80cb422cdd0101ec04cb847f000001820cfa8215a8d790000000000000000000000000000000018208ae820d058443b9a3550102', ) const msg = message.decode(bytes) @@ -20,7 +20,7 @@ describe('DPT message tests', () => { it('ping packet with version 555, additional list elements and additional random data:', () => { const bytes = hexToBytes( - '0x577be4349c4dd26768081f58de4c6f375a7a22f3f7adda654d1428637412c3d7fe917cadc56d4e5e7ffae1dbe3efffb9849feb71b262de37977e7c7a44e677295680e9e38ab26bee2fcbae207fba3ff3d74069a50b902a82c9903ed37cc993c50001f83e82022bd79020010db83c4d001500000000abcdef12820cfa8215a8d79020010db885a308d313198a2e037073488208ae82823a8443b9a355c5010203040531b9019afde696e582a78fa8d95ea13ce3297d4afb8ba6433e4154caa5ac6431af1b80ba76023fa4090c408f6b4bc3701562c031041d4702971d102c9ab7fa5eed4cd6bab8f7af956f7d565ee1917084a95398b6a21eac920fe3dd1345ec0a7ef39367ee69ddf092cbfe5b93e5e568ebc491983c09c76d922dc3' + '0x577be4349c4dd26768081f58de4c6f375a7a22f3f7adda654d1428637412c3d7fe917cadc56d4e5e7ffae1dbe3efffb9849feb71b262de37977e7c7a44e677295680e9e38ab26bee2fcbae207fba3ff3d74069a50b902a82c9903ed37cc993c50001f83e82022bd79020010db83c4d001500000000abcdef12820cfa8215a8d79020010db885a308d313198a2e037073488208ae82823a8443b9a355c5010203040531b9019afde696e582a78fa8d95ea13ce3297d4afb8ba6433e4154caa5ac6431af1b80ba76023fa4090c408f6b4bc3701562c031041d4702971d102c9ab7fa5eed4cd6bab8f7af956f7d565ee1917084a95398b6a21eac920fe3dd1345ec0a7ef39367ee69ddf092cbfe5b93e5e568ebc491983c09c76d922dc3', ) const msg = message.decode(bytes) @@ -31,7 +31,7 @@ describe('DPT message tests', () => { it('pong packet with additional list elements and additional random data', () => { const bytes = hexToBytes( - '0x09b2428d83348d27cdf7064ad9024f526cebc19e4958f0fdad87c15eb598dd61d08423e0bf66b2069869e1724125f820d851c136684082774f870e614d95a2855d000f05d1648b2d5945470bc187c2d2216fbe870f43ed0909009882e176a46b0102f846d79020010db885a308d313198a2e037073488208ae82823aa0fbc914b16819237dcd8801d7e53f69e9719adecb3cc0e790c57e91ca4461c9548443b9a355c6010203c2040506a0c969a58f6f9095004c0177a6b47f451530cab38966a25cca5cb58f055542124e' + '0x09b2428d83348d27cdf7064ad9024f526cebc19e4958f0fdad87c15eb598dd61d08423e0bf66b2069869e1724125f820d851c136684082774f870e614d95a2855d000f05d1648b2d5945470bc187c2d2216fbe870f43ed0909009882e176a46b0102f846d79020010db885a308d313198a2e037073488208ae82823aa0fbc914b16819237dcd8801d7e53f69e9719adecb3cc0e790c57e91ca4461c9548443b9a355c6010203c2040506a0c969a58f6f9095004c0177a6b47f451530cab38966a25cca5cb58f055542124e', ) const msg = message.decode(bytes) @@ -41,7 +41,7 @@ describe('DPT message tests', () => { it('findnode packet with additional list elements and additional random data', () => { const bytes = hexToBytes( - '0xc7c44041b9f7c7e41934417ebac9a8e1a4c6298f74553f2fcfdcae6ed6fe53163eb3d2b52e39fe91831b8a927bf4fc222c3902202027e5e9eb812195f95d20061ef5cd31d502e47ecb61183f74a504fe04c51e73df81f25c4d506b26db4517490103f84eb840ca634cae0d49acb401d8a4c6b6fe8c55b70d115bf400769cc1400f3258cd31387574077f301b421bc84df7266c44e9e6d569fc56be00812904767bf5ccd1fc7f8443b9a35582999983999999280dc62cc8255c73471e0a61da0c89acdc0e035e260add7fc0c04ad9ebf3919644c91cb247affc82b69bd2ca235c71eab8e49737c937a2c396' + '0xc7c44041b9f7c7e41934417ebac9a8e1a4c6298f74553f2fcfdcae6ed6fe53163eb3d2b52e39fe91831b8a927bf4fc222c3902202027e5e9eb812195f95d20061ef5cd31d502e47ecb61183f74a504fe04c51e73df81f25c4d506b26db4517490103f84eb840ca634cae0d49acb401d8a4c6b6fe8c55b70d115bf400769cc1400f3258cd31387574077f301b421bc84df7266c44e9e6d569fc56be00812904767bf5ccd1fc7f8443b9a35582999983999999280dc62cc8255c73471e0a61da0c89acdc0e035e260add7fc0c04ad9ebf3919644c91cb247affc82b69bd2ca235c71eab8e49737c937a2c396', ) const msg = message.decode(bytes) @@ -51,7 +51,7 @@ describe('DPT message tests', () => { it('neighbours packet with additional list elements and additional random data', () => { const bytes = hexToBytes( - '0xc679fc8fe0b8b12f06577f2e802d34f6fa257e6137a995f6f4cbfc9ee50ed3710faf6e66f932c4c8d81d64343f429651328758b47d3dbc02c4042f0fff6946a50f4a49037a72bb550f3a7872363a83e1b9ee6469856c24eb4ef80b7535bcf99c0004f9015bf90150f84d846321163782115c82115db8403155e1427f85f10a5c9a7755877748041af1bcd8d474ec065eb33df57a97babf54bfd2103575fa829115d224c523596b401065a97f74010610fce76382c0bf32f84984010203040101b840312c55512422cf9b8a4097e9a6ad79402e87a15ae909a4bfefa22398f03d20951933beea1e4dfa6f968212385e829f04c2d314fc2d4e255e0d3bc08792b069dbf8599020010db83c4d001500000000abcdef12820d05820d05b84038643200b172dcfef857492156971f0e6aa2c538d8b74010f8e140811d53b98c765dd2d96126051913f44582e8c199ad7c6d6819e9a56483f637feaac9448aacf8599020010db885a308d313198a2e037073488203e78203e8b8408dcab8618c3253b558d459da53bd8fa68935a719aff8b811197101a4b2b47dd2d47295286fc00cc081bb542d760717d1bdd6bec2c37cd72eca367d6dd3b9df738443b9a355010203b525a138aa34383fec3d2719a0' + '0xc679fc8fe0b8b12f06577f2e802d34f6fa257e6137a995f6f4cbfc9ee50ed3710faf6e66f932c4c8d81d64343f429651328758b47d3dbc02c4042f0fff6946a50f4a49037a72bb550f3a7872363a83e1b9ee6469856c24eb4ef80b7535bcf99c0004f9015bf90150f84d846321163782115c82115db8403155e1427f85f10a5c9a7755877748041af1bcd8d474ec065eb33df57a97babf54bfd2103575fa829115d224c523596b401065a97f74010610fce76382c0bf32f84984010203040101b840312c55512422cf9b8a4097e9a6ad79402e87a15ae909a4bfefa22398f03d20951933beea1e4dfa6f968212385e829f04c2d314fc2d4e255e0d3bc08792b069dbf8599020010db83c4d001500000000abcdef12820d05820d05b84038643200b172dcfef857492156971f0e6aa2c538d8b74010f8e140811d53b98c765dd2d96126051913f44582e8c199ad7c6d6819e9a56483f637feaac9448aacf8599020010db885a308d313198a2e037073488203e78203e8b8408dcab8618c3253b558d459da53bd8fa68935a719aff8b811197101a4b2b47dd2d47295286fc00cc081bb542d760717d1bdd6bec2c37cd72eca367d6dd3b9df738443b9a355010203b525a138aa34383fec3d2719a0', ) const msg = message.decode(bytes) diff --git a/packages/devp2p/test/dpt.spec.ts b/packages/devp2p/test/dpt.spec.ts index 4f37791f5e..0e776e1483 100644 --- a/packages/devp2p/test/dpt.spec.ts +++ b/packages/devp2p/test/dpt.spec.ts @@ -11,7 +11,7 @@ describe('DPT', () => { }) const privateKey1 = hexToBytes( - '0x012e930448c53e0b73edbbbc433e8a741e978cda79be2be039905f538d6247c2' + '0x012e930448c53e0b73edbbbc433e8a741e978cda79be2be039905f538d6247c2', ) const peers: PeerInfo[] = [] @@ -56,7 +56,7 @@ describe('DPT', () => { assert.equal( dpt.getClosestPeers(peers[0].id!).length, 2, - 'should return all peers on getClosestPeers()' + 'should return all peers on getClosestPeers()', ) dpt.destroy() @@ -73,7 +73,7 @@ describe('DPT', () => { await dpt.refresh() expect( spy, - 'call findneighbours on unconfirmed if no confirmed peers yet' + 'call findneighbours on unconfirmed if no confirmed peers yet', ).toHaveBeenCalledTimes(1) dpt['_refreshIntervalSelectionCounter'] = 0 @@ -86,21 +86,21 @@ describe('DPT', () => { assert.equal( dpt.getClosestPeers(peers[0].id!).length, 1, - 'should not return unconfirmed on getClosestPeers()' + 'should not return unconfirmed on getClosestPeers()', ) dpt.confirmPeer('02') assert.equal( dpt.getClosestPeers(peers[0].id!).length, 2, - 'should return confirmed on getClosestPeers()' + 'should return confirmed on getClosestPeers()', ) dpt.removePeer(peers[1]) assert.equal( dpt.getClosestPeers(peers[0].id!).length, 1, - 'should work after peers being removed' + 'should work after peers being removed', ) dpt.destroy() diff --git a/packages/devp2p/test/enr.spec.ts b/packages/devp2p/test/enr.spec.ts index 01df40ac6d..60b1858740 100644 --- a/packages/devp2p/test/enr.spec.ts +++ b/packages/devp2p/test/enr.spec.ts @@ -19,7 +19,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes("ENR root entry must start with 'enrtree-root:'"), - 'has correct error message' + 'has correct error message', ) } }) @@ -30,7 +30,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes('Unable to verify ENR root signature'), - 'has correct error message' + 'has correct error message', ) } }) @@ -41,7 +41,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes("Could not parse 'l' value from ENR root entry"), - 'has correct error message' + 'has correct error message', ) } }) @@ -60,7 +60,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes("ENR tree entry must start with 'enrtree:'"), - 'has correct error message' + 'has correct error message', ) } }) @@ -71,7 +71,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes('Could not parse domain from ENR tree entry'), - 'has correct error message' + 'has correct error message', ) } }) @@ -94,7 +94,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes("ENR branch entry must start with 'enrtree-branch:'"), - 'has correct error message' + 'has correct error message', ) } }) @@ -121,7 +121,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes("String encoded ENR must start with 'enr:'"), - 'has correct error message' + 'has correct error message', ) } }) diff --git a/packages/devp2p/test/integration/dpt-simulator.spec.ts b/packages/devp2p/test/integration/dpt-simulator.spec.ts index 0b2682effa..6bea867128 100644 --- a/packages/devp2p/test/integration/dpt-simulator.spec.ts +++ b/packages/devp2p/test/integration/dpt-simulator.spec.ts @@ -37,7 +37,7 @@ describe('DPT simulator tests', () => { assert.equal( dpts[0].getPeers().length, 0, - 'should have removed peer from k-bucket on peer:removed' + 'should have removed peer from k-bucket on peer:removed', ) await util.delay(500) util.destroyDPTs(dpts) @@ -60,7 +60,7 @@ describe('DPT simulator tests', () => { assert.equal( dpts[0].getPeers().length, 0, - 'should have removed peer from k-bucket on peer:removed' + 'should have removed peer from k-bucket on peer:removed', ) await util.delay(500) util.destroyDPTs(dpts) diff --git a/packages/devp2p/test/integration/eth-simulator.spec.ts b/packages/devp2p/test/integration/eth-simulator.spec.ts index 3a8d1847c5..9f49b08ff8 100644 --- a/packages/devp2p/test/integration/eth-simulator.spec.ts +++ b/packages/devp2p/test/integration/eth-simulator.spec.ts @@ -11,7 +11,7 @@ import type { Capabilities } from '../../src/index.js' const GENESIS_TD = 17179869184 const GENESIS_HASH = hexToBytes( - '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ) const capabilities = [devp2p.ETH.eth63, devp2p.ETH.eth62] @@ -98,7 +98,7 @@ describe('ETH simulator tests', () => { t: typeof it, version: number, cap: Capabilities[], - expectedCode: ETH.MESSAGE_CODES + expectedCode: ETH.MESSAGE_CODES, ) { await new Promise((resolve) => { const opts: any = {} @@ -142,7 +142,7 @@ describe('ETH simulator tests', () => { resolve(undefined) }) }) - } + }, ) it('ETH: send not-allowed eth67', async () => { diff --git a/packages/devp2p/test/integration/les-simulator.spec.ts b/packages/devp2p/test/integration/les-simulator.spec.ts index 93091ae10d..0b472ce954 100644 --- a/packages/devp2p/test/integration/les-simulator.spec.ts +++ b/packages/devp2p/test/integration/les-simulator.spec.ts @@ -8,7 +8,7 @@ import * as util from './util.js' const GENESIS_TD = 17179869184 const GENESIS_HASH = hexToBytes( - '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ) const capabilities = [devp2p.LES.les4] diff --git a/packages/devp2p/test/integration/rlpx-simulator.spec.ts b/packages/devp2p/test/integration/rlpx-simulator.spec.ts index e958b39221..b35bccdc2e 100644 --- a/packages/devp2p/test/integration/rlpx-simulator.spec.ts +++ b/packages/devp2p/test/integration/rlpx-simulator.spec.ts @@ -14,7 +14,7 @@ describe('RLPx simulator tests', () => { assert.equal( peer._port, basePort + 1, - 'should have added peer on peer:added after successful handshake' + 'should have added peer on peer:added after successful handshake', ) assert.equal(rlpxs[0].getPeers().length, 1, 'peer list length should be 1') assert.equal(rlpxs[0]._getOpenSlots(), 9, 'should have maxPeers - 1 open slots left') @@ -35,12 +35,12 @@ describe('RLPx simulator tests', () => { } assert.notOk( rlpxs[0]['_dpt']!['_banlist'].has(peer), - 'should not be in ban list before bad peer discovered' + 'should not be in ban list before bad peer discovered', ) rlpxs[0]['_dpt']!.events.emit('peer:new', peer) assert.ok( rlpxs[0]['_dpt']!['_banlist'].has(peer), - 'should be in ban list after bad peer discovered' + 'should be in ban list after bad peer discovered', ) await util.delay(500) util.destroyRLPXs(rlpxs) @@ -58,7 +58,7 @@ describe('RLPx simulator tests', () => { assert.equal( reason, DISCONNECT_REASON.CLIENT_QUITTING, - 'should close with CLIENT_QUITTING disconnect reason' + 'should close with CLIENT_QUITTING disconnect reason', ) assert.equal(rlpxs[0]._getOpenSlots(), 10, 'should have maxPeers open slots left') await util.delay(500) diff --git a/packages/devp2p/test/integration/util.ts b/packages/devp2p/test/integration/util.ts index 6ebef037a6..ac35ccd51a 100644 --- a/packages/devp2p/test/integration/util.ts +++ b/packages/devp2p/test/integration/util.ts @@ -72,7 +72,7 @@ export function getTestRLPXs( maxPeers: number = 10, basePort: number, capabilities?: Capabilities[], - common?: Object | Common + common?: Object | Common, ) { const rlpxs = [] if (typeof capabilities === 'undefined') { @@ -101,7 +101,7 @@ export function initTwoPeerRLPXSetup( maxPeers?: any, capabilities?: any, common?: Object | Common, - basePort = 30306 + basePort = 30306, ): RLPx[] { const rlpxs = getTestRLPXs(2, maxPeers, basePort, capabilities, common) const peer = { address: localhost, udpPort: basePort + 1, tcpPort: basePort + 1 } @@ -128,7 +128,7 @@ export function twoPeerMsgExchange( opts: any, capabilities?: Capabilities[], common?: Object | Common, - basePort = 30306 + basePort = 30306, ) { const rlpxs = initTwoPeerRLPXSetup(null, capabilities, common, basePort) rlpxs[0].events.on('peer:added', function (peer: any) { @@ -190,7 +190,7 @@ export async function twoPeerMsgExchange2( opts: any, capabilities?: any, common?: Object | Common, - basePort = 30306 + basePort = 30306, ) { const rlpxs = initTwoPeerRLPXSetup(null, capabilities, common, basePort) rlpxs[0].events.on('peer:added', function (peer: any) { @@ -223,7 +223,7 @@ export async function twoPeerMsgExchange2( assert.equal( err.message, 'Invalid Snappy bitstream', - 'unable to process snappy compressed message' + 'unable to process snappy compressed message', ) destroyRLPXs(rlpxs) opts.promise(undefined) @@ -245,7 +245,7 @@ export function twoPeerMsgExchange3( opts: any, capabilities?: any, common?: Object | Common, - basePort = 30306 + basePort = 30306, ) { const rlpxs = initTwoPeerRLPXSetup(null, capabilities, common, basePort) rlpxs[0].events.on('peer:added', function (peer: any) { diff --git a/packages/devp2p/test/rlpx-ecies.spec.ts b/packages/devp2p/test/rlpx-ecies.spec.ts index fc67c63fbf..4e857a477f 100644 --- a/packages/devp2p/test/rlpx-ecies.spec.ts +++ b/packages/devp2p/test/rlpx-ecies.spec.ts @@ -64,7 +64,7 @@ it( const encrypted = t.context.a._encryptMessage(message) const decrypted = t.context.b._decryptMessage(encrypted as Uint8Array) assert.deepEqual(message, decrypted, 'encryptMessage -> decryptMessage should lead to same') - }) + }), ) it( @@ -89,7 +89,7 @@ it( const parsedBody = t.context.b.parseBody(t.context.a.createBody(body) as Uint8Array) assert.deepEqual(parsedBody, body, 'createBody -> parseBody should lead to same') - }) + }), ) it( @@ -106,7 +106,7 @@ it( t.context.a['_gotEIP8Ack'] = true t.context.a.parseAckEIP8(ack as Uint8Array) }, 'should not throw on ack creation/parsing') - }) + }), ) it( @@ -122,7 +122,7 @@ it( t.context.a['_gotEIP8Ack'] = false t.context.a.parseAckPlain(t.context.h0?.ack as Uint8Array) }, 'should not throw on ack parsing') - }) + }), ) it( @@ -137,5 +137,5 @@ it( t.context.a['_gotEIP8Ack'] = true t.context.a.parseAckEIP8(t.context.h1?.ack as Uint8Array) }, 'should not throw on ack parsing') - }) + }), ) diff --git a/packages/devp2p/test/rlpx.spec.ts b/packages/devp2p/test/rlpx.spec.ts index 457bab846e..0978ee8154 100644 --- a/packages/devp2p/test/rlpx.spec.ts +++ b/packages/devp2p/test/rlpx.spec.ts @@ -165,12 +165,12 @@ describe('RLPx', () => { assert.equal( rlpx['_getOpenSlots'](), 10, - 'returns default number of open slots (i.e. `max_peers`) on startup' + 'returns default number of open slots (i.e. `max_peers`) on startup', ) assert.equal( rlpx['_getOpenQueueSlots'](), 20, - 'returns default number of open queue slots on startup' + 'returns default number of open queue slots on startup', ) }) }) diff --git a/packages/devp2p/tsconfig.lint.json b/packages/devp2p/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/devp2p/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/ethash/.eslintrc.cjs b/packages/ethash/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/ethash/.eslintrc.cjs +++ b/packages/ethash/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/ethash/examples/example.ts b/packages/ethash/examples/example.ts index 28461ba38f..983a4007bf 100644 --- a/packages/ethash/examples/example.ts +++ b/packages/ethash/examples/example.ts @@ -1,4 +1,5 @@ import { bytesToHex, hexToBytes } from '@ethereumjs/util' + import { Ethash } from '../dist/cjs/index.js' const ethash = new Ethash() diff --git a/packages/ethash/examples/miner.ts b/packages/ethash/examples/miner.ts index ab783d6d60..c4f294b8c0 100644 --- a/packages/ethash/examples/miner.ts +++ b/packages/ethash/examples/miner.ts @@ -1,6 +1,8 @@ import { createBlockFromBlockData } from '@ethereumjs/block' import { Ethash } from '@ethereumjs/ethash' -import { DBObject, MapDB, bytesToHex } from '@ethereumjs/util' +import { MapDB, bytesToHex } from '@ethereumjs/util' + +import type { DBObject } from '@ethereumjs/util' const block = createBlockFromBlockData( { @@ -9,7 +11,7 @@ const block = createBlockFromBlockData( number: BigInt(1), }, }, - { setHardfork: true, skipConsensusFormatValidation: true } + { setHardfork: true, skipConsensusFormatValidation: true }, ) const cacheDB = new MapDB() diff --git a/packages/ethash/examples/powBlock.ts b/packages/ethash/examples/powBlock.ts index fcd54323c0..f9bbce1a4e 100644 --- a/packages/ethash/examples/powBlock.ts +++ b/packages/ethash/examples/powBlock.ts @@ -1,6 +1,8 @@ -import { Ethash } from '@ethereumjs/ethash' import { createBlockFromRLPSerializedBlock } from '@ethereumjs/block' -import { DBObject, hexToBytes, MapDB } from '@ethereumjs/util' +import { Ethash } from '@ethereumjs/ethash' +import { MapDB, hexToBytes } from '@ethereumjs/util' + +import type { DBObject } from '@ethereumjs/util' const cacheDB = new MapDB() diff --git a/packages/ethash/examples/rawExample.ts b/packages/ethash/examples/rawExample.ts index 3046119e2c..67f0f44727 100644 --- a/packages/ethash/examples/rawExample.ts +++ b/packages/ethash/examples/rawExample.ts @@ -1,5 +1,8 @@ +import { MapDB, bytesToHex, hexToBytes } from '@ethereumjs/util' + import { Ethash } from '../dist/cjs/index.js' -import { DBObject, MapDB, bytesToHex, hexToBytes } from '@ethereumjs/util' + +import type { DBObject } from '@ethereumjs/util' const ethash = new Ethash(new MapDB()) @@ -7,7 +10,7 @@ const verifySubmit = async ( ethash: Ethash, number: number, headerHash: Uint8Array, - nonce: Uint8Array + nonce: Uint8Array, ): Promise => { console.log('Verifying number: ', number) await ethash.loadEpoc(BigInt(number)) @@ -20,6 +23,6 @@ const verifySubmit = async ( const headerHash = hexToBytes('0x0e2887aa1a0668bf8254d1a6ae518927de99e3e5d7f30fd1f16096e2608fe05e') const nonce = hexToBytes('0xe360b6170c229d15') -verifySubmit(ethash, 35414, headerHash, nonce).then((result) => { +void verifySubmit(ethash, 35414, headerHash, nonce).then((result) => { console.log('Result: ', bytesToHex(result)) }) diff --git a/packages/ethash/src/index.ts b/packages/ethash/src/index.ts index 31b8157c8d..0f2f274e48 100644 --- a/packages/ethash/src/index.ts +++ b/packages/ethash/src/index.ts @@ -227,7 +227,7 @@ export class Ethash { const p = (fnv( i ^ new DataView(s.buffer).getUint32(0, true), - new DataView(mix.buffer).getUint32((i % w) * 4, true) + new DataView(mix.buffer).getUint32((i % w) * 4, true), ) % Math.floor(n / mixhashes)) * mixhashes @@ -344,7 +344,7 @@ export class Ethash { { keyEncoding: KeyEncoding.Number, valueEncoding: ValueEncoding.JSON, - } + }, ) } else { this.cache = data.cache.map((a: Uint8Array) => { diff --git a/packages/ethash/src/util.ts b/packages/ethash/src/util.ts index eae06d2a0b..43c261c450 100644 --- a/packages/ethash/src/util.ts +++ b/packages/ethash/src/util.ts @@ -66,7 +66,7 @@ export function fnvBytes(a: Uint8Array, b: Uint8Array) { rView.setUint32( i, fnv(new DataView(a.buffer).getUint32(i, true), new DataView(b.buffer).getUint32(i, true)), - true + true, ) } return r diff --git a/packages/ethash/test/miner.spec.ts b/packages/ethash/test/miner.spec.ts index 493245da5b..3f69b95a33 100644 --- a/packages/ethash/test/miner.spec.ts +++ b/packages/ethash/test/miner.spec.ts @@ -22,7 +22,7 @@ describe('Miner', () => { number: BigInt(1), }, }, - { common } + { common }, ) const invalidBlockResult = await e.verifyPOW(block) @@ -46,7 +46,7 @@ describe('Miner', () => { mixHash: solution?.mixHash, }, }, - { common } + { common }, ) const validBlockResult = await e.verifyPOW(validBlock) @@ -64,14 +64,14 @@ describe('Miner', () => { number: BigInt(1), }, }, - { common } + { common }, ) const miner = e.getMiner(block.header) const solution = await miner.mine(-1) assert.ok( e.verifyPOW(createBlockFromBlockData({ header: solution.toJSON() }, { common })), - 'successfully mined block' + 'successfully mined block', ) const blockMiner = e.getMiner(block) @@ -90,7 +90,7 @@ describe('Miner', () => { number: BigInt(1), }, }, - { common } + { common }, ) const miner = e.getMiner(block.header) setTimeout(function () { @@ -111,7 +111,7 @@ describe('Miner', () => { }, undefined, undefined, - 'miner constructor successfully throws if no BlockHeader or Block object is passed' + 'miner constructor successfully throws if no BlockHeader or Block object is passed', ) }) @@ -127,7 +127,7 @@ describe('Miner', () => { }, { common, - } + }, ) const miner = e.getMiner(block.header) diff --git a/packages/ethash/tsconfig.lint.json b/packages/ethash/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/ethash/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/evm/.eslintrc.cjs b/packages/evm/.eslintrc.cjs index 965b6e94e6..10790297fc 100644 --- a/packages/evm/.eslintrc.cjs +++ b/packages/evm/.eslintrc.cjs @@ -7,9 +7,10 @@ module.exports = { }, overrides: [ { - files: ['test/util.ts', 'test/tester/**/*.ts'], + files: ['test/util.ts', 'test/tester/**/*.ts', 'examples/**/*.ts'], rules: { 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', }, }, ], diff --git a/packages/evm/examples/4844.ts b/packages/evm/examples/4844.ts index 180bf1d2a5..5c4944fae7 100644 --- a/packages/evm/examples/4844.ts +++ b/packages/evm/examples/4844.ts @@ -1,3 +1,3 @@ -import { Common, Chain, Hardfork } from '@ethereumjs/common' +import { Chain, Common, Hardfork } from '@ethereumjs/common' const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai, eips: [4844] }) diff --git a/packages/evm/examples/decode-opcodes.ts b/packages/evm/examples/decode-opcodes.ts index ff551679e3..b138bd529d 100644 --- a/packages/evm/examples/decode-opcodes.ts +++ b/packages/evm/examples/decode-opcodes.ts @@ -4,6 +4,7 @@ import { Chain, Common, Hardfork } from '@ethereumjs/common' import { bytesToHex, hexToBytes } from '@ethereumjs/util' + import { getOpcodesForHF } from '../dist/cjs/opcodes/index.js' const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) @@ -33,7 +34,7 @@ function nameOpCodes(raw: Uint8Array) { ' ' + curOpCode + ' ' + - (pushData?.length > 0 ? bytesToHex(pushData as Uint8Array) : '') + (pushData?.length > 0 ? bytesToHex(pushData as Uint8Array) : ''), ) pushData = new Uint8Array() diff --git a/packages/evm/examples/eips.ts b/packages/evm/examples/eips.ts index 09b0986e1c..878373118c 100644 --- a/packages/evm/examples/eips.ts +++ b/packages/evm/examples/eips.ts @@ -1,5 +1,5 @@ import { Chain, Common } from '@ethereumjs/common' -import { createEVM, EVM } from '@ethereumjs/evm' +import { createEVM } from '@ethereumjs/evm' const main = async () => { const common = new Common({ chain: Chain.Mainnet, eips: [3074] }) @@ -7,4 +7,4 @@ const main = async () => { console.log(`EIP 3074 is active - ${evm.common.isActivatedEIP(3074)}`) } -main() +void main() diff --git a/packages/evm/examples/runCode.ts b/packages/evm/examples/runCode.ts index 901cf97c80..2c9643f580 100644 --- a/packages/evm/examples/runCode.ts +++ b/packages/evm/examples/runCode.ts @@ -1,8 +1,10 @@ import { createBlockchain } from '@ethereumjs/blockchain' import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { createEVM, EVM } from '@ethereumjs/evm' +import { createEVM } from '@ethereumjs/evm' import { bytesToHex, hexToBytes } from '@ethereumjs/util' +import type { PrefixedHexString } from '@ethereumjs/util' + const main = async () => { const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) const blockchain = await createBlockchain() @@ -26,7 +28,7 @@ const main = async () => { evm .runCode({ - code: hexToBytes('0x' + code.join('')), + code: hexToBytes(('0x' + code.join('')) as PrefixedHexString), gasLimit: BigInt(0xffff), }) .then((results) => { diff --git a/packages/evm/examples/simple.ts b/packages/evm/examples/simple.ts index 9600f3b5e7..66cf71b510 100644 --- a/packages/evm/examples/simple.ts +++ b/packages/evm/examples/simple.ts @@ -1,5 +1,5 @@ +import { createEVM } from '@ethereumjs/evm' import { hexToBytes } from '@ethereumjs/util' -import { createEVM, EVM } from '@ethereumjs/evm' const main = async () => { const evm = await createEVM() @@ -7,4 +7,4 @@ const main = async () => { console.log(res.executionGasUsed) // 3n } -main() +void main() diff --git a/packages/evm/examples/withBlockchain.ts b/packages/evm/examples/withBlockchain.ts index 6b046cd466..b7370658c9 100644 --- a/packages/evm/examples/withBlockchain.ts +++ b/packages/evm/examples/withBlockchain.ts @@ -1,8 +1,10 @@ import { createBlockchain } from '@ethereumjs/blockchain' import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { createEVM, EVM } from '@ethereumjs/evm' +import { createEVM } from '@ethereumjs/evm' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { bytesToHex } from '@ethereumjs/util' +import { bytesToHex, hexToBytes } from '@ethereumjs/util' + +import type { PrefixedHexString } from '@ethereumjs/util' const main = async () => { const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai }) @@ -28,7 +30,7 @@ const main = async () => { }) const results = await evm.runCode({ - code: Buffer.from(code.join(''), 'hex'), + code: hexToBytes(('0x' + code.join('')) as PrefixedHexString), gasLimit: BigInt(0xffff), }) diff --git a/packages/evm/src/eof/container.ts b/packages/evm/src/eof/container.ts index 7a60e1f141..a2be68d86c 100644 --- a/packages/evm/src/eof/container.ts +++ b/packages/evm/src/eof/container.ts @@ -290,7 +290,7 @@ class EOFBody { buf: Uint8Array, // The buffer of the body. This should be the entire body. It is not valid to pass an entire EOF container in here header: EOFHeader, // The EOFHeader corresponding to this body eofMode: EOFContainerMode = EOFContainerMode.Default, // The container mode of EOF - dataSectionAllowedSmaller = false // Only for validation: Deployment containers are allowed to have smaller data section size + dataSectionAllowedSmaller = false, // Only for validation: Deployment containers are allowed to have smaller data section size ) { const stream = new StreamReader(buf) const typeSections: TypeSection[] = [] @@ -415,7 +415,7 @@ export class EOFContainer { constructor( buf: Uint8Array, eofMode: EOFContainerMode = EOFContainerMode.Default, - dataSectionAllowedSmaller = false + dataSectionAllowedSmaller = false, ) { this.eofMode = eofMode this.header = new EOFHeader(buf) @@ -423,7 +423,7 @@ export class EOFContainer { buf.slice(this.header.buffer.length), this.header, eofMode, - dataSectionAllowedSmaller + dataSectionAllowedSmaller, ) this.buffer = buf } @@ -443,12 +443,12 @@ export function validateEOF( input: Uint8Array, evm: EVM, containerMode: ContainerSectionType = ContainerSectionType.RuntimeCode, - eofMode: EOFContainerMode = EOFContainerMode.Default + eofMode: EOFContainerMode = EOFContainerMode.Default, ) { const container = new EOFContainer( input, eofMode, - containerMode === ContainerSectionType.DeploymentCode + containerMode === ContainerSectionType.DeploymentCode, ) const containerMap = verifyCode(container, evm, containerMode) // Recursively validate the containerSections diff --git a/packages/evm/src/eof/verify.ts b/packages/evm/src/eof/verify.ts index 00537f1eb0..988f1adc04 100644 --- a/packages/evm/src/eof/verify.ts +++ b/packages/evm/src/eof/verify.ts @@ -41,7 +41,7 @@ export enum ContainerSectionType { export function verifyCode( container: EOFContainer, evm: EVM, - mode: ContainerSectionType = ContainerSectionType.RuntimeCode + mode: ContainerSectionType = ContainerSectionType.RuntimeCode, ) { return validateOpcodes(container, evm, mode) } @@ -58,7 +58,7 @@ function readUint16(code: Uint8Array, start: number) { function validateOpcodes( container: EOFContainer, evm: EVM, - mode: ContainerSectionType = ContainerSectionType.RuntimeCode + mode: ContainerSectionType = ContainerSectionType.RuntimeCode, ) { // Track the intermediate bytes const intermediateBytes = new Set() diff --git a/packages/evm/src/evm.ts b/packages/evm/src/evm.ts index 1d1ac85bac..ff85812b00 100644 --- a/packages/evm/src/evm.ts +++ b/packages/evm/src/evm.ts @@ -179,7 +179,7 @@ export class EVM implements EVMInterface { if (!EVM.supportedHardforks.includes(this.common.hardfork() as Hardfork)) { throw new Error( - `Hardfork ${this.common.hardfork()} not set as supported in supportedHardforks` + `Hardfork ${this.common.hardfork()} not set as supported in supportedHardforks`, ) } @@ -214,7 +214,7 @@ export class EVM implements EVMInterface { // Skip DEBUG calls unless 'ethjs' included in environmental DEBUG variables // Additional window check is to prevent vite browser bundling (and potentially other) to break this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } /** @@ -282,13 +282,13 @@ export class EVM implements EVMInterface { if (!toAccount) { if (this.common.isActivatedEIP(6800)) { const absenceProofAccessGas = message.accessWitness!.touchAndChargeProofOfAbsence( - message.to + message.to, ) gasLimit -= absenceProofAccessGas if (gasLimit < BIGINT_0) { if (this.DEBUG) { debugGas( - `Proof of absense access charged(${absenceProofAccessGas}) caused OOG (-> ${gasLimit})` + `Proof of absense access charged(${absenceProofAccessGas}) caused OOG (-> ${gasLimit})`, ) } return { execResult: OOGResult(message.gasLimit) } @@ -429,13 +429,13 @@ export class EVM implements EVMInterface { if (this.common.isActivatedEIP(6800)) { const contractCreateAccessGas = message.accessWitness!.touchAndChargeContractCreateInit( - message.to + message.to, ) gasLimit -= contractCreateAccessGas if (gasLimit < BIGINT_0) { if (this.DEBUG) { debugGas( - `ContractCreateInit charge(${contractCreateAccessGas}) caused OOG (-> ${gasLimit})` + `ContractCreateInit charge(${contractCreateAccessGas}) caused OOG (-> ${gasLimit})`, ) } return { execResult: OOGResult(message.gasLimit) } @@ -518,13 +518,13 @@ export class EVM implements EVMInterface { if (gasLimit < BIGINT_0) { if (this.DEBUG) { debug( - `ContractCreateComplete access gas (${createCompleteAccessGas}) caused OOG (-> ${gasLimit})` + `ContractCreateComplete access gas (${createCompleteAccessGas}) caused OOG (-> ${gasLimit})`, ) } return { execResult: OOGResult(message.gasLimit) } } else { debug( - `ContractCreateComplete access used (${createCompleteAccessGas}) gas (-> ${gasLimit})` + `ContractCreateComplete access used (${createCompleteAccessGas}) gas (-> ${gasLimit})`, ) } } @@ -627,19 +627,19 @@ export class EVM implements EVMInterface { gasLimit = message.gasLimit - result.executionGasUsed if (!result.exceptionError && this.common.isActivatedEIP(6800)) { const createCompleteAccessGas = message.accessWitness!.touchAndChargeContractCreateCompleted( - message.to + message.to, ) gasLimit -= createCompleteAccessGas if (gasLimit < BIGINT_0) { if (this.DEBUG) { debug( - `ContractCreateComplete access gas (${createCompleteAccessGas}) caused OOG (-> ${gasLimit})` + `ContractCreateComplete access gas (${createCompleteAccessGas}) caused OOG (-> ${gasLimit})`, ) } result = { ...result, ...OOGResult(message.gasLimit) } } else { debug( - `ContractCreateComplete access used (${createCompleteAccessGas}) gas (-> ${gasLimit})` + `ContractCreateComplete access used (${createCompleteAccessGas}) gas (-> ${gasLimit})`, ) result.executionGasUsed += createCompleteAccessGas } @@ -657,13 +657,13 @@ export class EVM implements EVMInterface { message.accessWitness!.touchCodeChunksRangeOnWriteAndChargeGas( message.to, 0, - result.returnValue.length - 1 + result.returnValue.length - 1, ) gasLimit -= byteCodeWriteAccessfee if (gasLimit < BIGINT_0) { if (this.DEBUG) { debug( - `byteCodeWrite access gas (${byteCodeWriteAccessfee}) caused OOG (-> ${gasLimit})` + `byteCodeWrite access gas (${byteCodeWriteAccessfee}) caused OOG (-> ${gasLimit})`, ) } result = { ...result, ...OOGResult(message.gasLimit) } @@ -700,7 +700,7 @@ export class EVM implements EVMInterface { */ protected async runInterpreter( message: Message, - opts: InterpreterOpts = {} + opts: InterpreterOpts = {}, ): Promise { let contract = await this.stateManager.getAccount(message.to ?? Address.zero()) if (!contract) { @@ -736,7 +736,7 @@ export class EVM implements EVMInterface { message.gasLimit, this.journal, this.performanceLogger, - this._optsCached.profiler + this._optsCached.profiler, ) if (message.selfdestruct) { interpreter._result.selfdestruct = message.selfdestruct @@ -870,7 +870,7 @@ export class EVM implements EVMInterface { debug( `New message caller=${caller} gasLimit=${gasLimit} to=${ to?.toString() ?? 'none' - } value=${value} delegatecall=${delegatecall ? 'yes' : 'no'}` + } value=${value} delegatecall=${delegatecall ? 'yes' : 'no'}`, ) } if (message.to) { @@ -889,7 +889,7 @@ export class EVM implements EVMInterface { debug( `Received message execResult: [ gasUsed=${executionGasUsed} exceptionError=${ exceptionError ? `'${exceptionError.error}'` : 'none' - } returnValue=${short(returnValue)} gasRefund=${result.execResult.gasRefund ?? 0} ]` + } returnValue=${short(returnValue)} gasRefund=${result.execResult.gasRefund ?? 0} ]`, ) } const err = result.execResult.exceptionError @@ -971,7 +971,7 @@ export class EVM implements EVMInterface { protected runPrecompile( code: PrecompileFunc, data: Uint8Array, - gasLimit: bigint + gasLimit: bigint, ): Promise | ExecResult { if (typeof code !== 'function') { throw new Error('Invalid precompile') diff --git a/packages/evm/src/interpreter.ts b/packages/evm/src/interpreter.ts index 2c9715f9f5..8e2516b124 100644 --- a/packages/evm/src/interpreter.ts +++ b/packages/evm/src/interpreter.ts @@ -153,7 +153,7 @@ export class Interpreter { gasLeft: bigint, journal: Journal, performanceLogs: EVMPerformanceLogger, - profilerOpts?: EVMProfilerOpts + profilerOpts?: EVMProfilerOpts, ) { this._evm = evm this._stateManager = stateManager @@ -228,7 +228,7 @@ export class Interpreter { this._runState.code, this._evm, ContainerSectionType.InitCode, - EOFContainerMode.TxInitmode + EOFContainerMode.TxInitmode, ) } catch (e) { // Trying to deploy an invalid EOF container @@ -360,7 +360,7 @@ export class Interpreter { this._runState.env.accessWitness!.touchCodeChunksRangeOnReadAndChargeGas( contract, this._runState.programCounter, - this._runState.programCounter + this._runState.programCounter, ) gas += statelessGas debugGas(`codechunk accessed statelessGas=${statelessGas} (-> ${gas})`) @@ -398,7 +398,7 @@ export class Interpreter { Number(gas), 'opcodes', opInfo.fee, - Number(gas) - opInfo.fee + Number(gas) - opInfo.fee, ) } } @@ -543,7 +543,7 @@ export class Interpreter { debugGas( `${typeof context === 'string' ? context + ': ' : ''}refund ${amount} gas (-> ${ this._runState.gasRefund - })` + })`, ) } this._runState.gasRefund += amount @@ -559,7 +559,7 @@ export class Interpreter { debugGas( `${typeof context === 'string' ? context + ': ' : ''}sub gas refund ${amount} (-> ${ this._runState.gasRefund - })` + })`, ) } this._runState.gasRefund -= amount @@ -874,7 +874,7 @@ export class Interpreter { gasLimit: bigint, address: Address, value: bigint, - data: Uint8Array + data: Uint8Array, ): Promise { const msg = new Message({ caller: this._runState.auth, @@ -899,7 +899,7 @@ export class Interpreter { gasLimit: bigint, address: Address, value: bigint, - data: Uint8Array + data: Uint8Array, ): Promise { const msg = new Message({ caller: this._env.address, @@ -926,7 +926,7 @@ export class Interpreter { gasLimit: bigint, address: Address, value: bigint, - data: Uint8Array + data: Uint8Array, ): Promise { const msg = new Message({ caller: this._env.address, @@ -951,7 +951,7 @@ export class Interpreter { gasLimit: bigint, address: Address, value: bigint, - data: Uint8Array + data: Uint8Array, ): Promise { const msg = new Message({ caller: this._env.caller, @@ -1042,7 +1042,7 @@ export class Interpreter { value: bigint, codeToRun: Uint8Array, salt?: Uint8Array, - eofCallData?: Uint8Array + eofCallData?: Uint8Array, ): Promise { const selfdestruct = new Set(this._result.selfdestruct) const caller = this._env.address @@ -1150,7 +1150,7 @@ export class Interpreter { gasLimit: bigint, value: bigint, data: Uint8Array, - salt: Uint8Array + salt: Uint8Array, ): Promise { return this.create(gasLimit, value, data, salt) } @@ -1164,7 +1164,7 @@ export class Interpreter { value: bigint, containerData: Uint8Array, salt: Uint8Array, - callData: Uint8Array + callData: Uint8Array, ): Promise { return this.create(gasLimit, value, containerData, salt, callData) } diff --git a/packages/evm/src/journal.ts b/packages/evm/src/journal.ts index 60dad80bd3..9c17a3e861 100644 --- a/packages/evm/src/journal.ts +++ b/packages/evm/src/journal.ts @@ -51,7 +51,7 @@ export class Journal { // Skip DEBUG calls unless 'ethjs' included in environmental DEBUG variables // Additional window check is to prevent vite browser bundling (and potentially other) to break this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false this._debug = debugDefault('statemanager:statemanager') @@ -100,7 +100,7 @@ export class Journal { const bytesAddress = unprefixedHexToBytes(address) if (this.stateManager.getAppliedKey === undefined) { throw new Error( - 'touchAccount: stateManager.getAppliedKey can not be undefined if preimage storing is enabled' + 'touchAccount: stateManager.getAppliedKey can not be undefined if preimage storing is enabled', ) } const hashedKey = this.stateManager.getAppliedKey(bytesAddress) diff --git a/packages/evm/src/logger.ts b/packages/evm/src/logger.ts index cd81bba043..13f383e009 100644 --- a/packages/evm/src/logger.ts +++ b/packages/evm/src/logger.ts @@ -146,7 +146,7 @@ export class EVMPerformanceLogger { gasUsed: number, targetTimer: 'precompiles' | 'opcodes' = 'opcodes', staticGas?: number, - dynamicGas?: number + dynamicGas?: number, ) { if (this.currentTimer === undefined || this.currentTimer !== timer) { throw new Error('Cannot stop timer: another timer is already running') diff --git a/packages/evm/src/opcodes/EIP1283.ts b/packages/evm/src/opcodes/EIP1283.ts index 49a67a48c8..be1412d3f0 100644 --- a/packages/evm/src/opcodes/EIP1283.ts +++ b/packages/evm/src/opcodes/EIP1283.ts @@ -17,7 +17,7 @@ export function updateSstoreGasEIP1283( currentStorage: Uint8Array, originalStorage: Uint8Array, value: Uint8Array, - common: Common + common: Common, ) { if (equalsBytes(currentStorage, value)) { // If current value equals new value (this is a no-op), 200 gas is deducted. @@ -34,7 +34,7 @@ export function updateSstoreGasEIP1283( // If new value is 0, add 15000 gas to refund counter. runState.interpreter.refundGas( common.param('netSstoreClearRefundGas'), - 'EIP-1283 -> netSstoreClearRefund' + 'EIP-1283 -> netSstoreClearRefund', ) } // Otherwise, 5000 gas is deducted. @@ -47,13 +47,13 @@ export function updateSstoreGasEIP1283( // If current value is 0 (also means that new value is not 0), remove 15000 gas from refund counter. We can prove that refund counter will never go below 0. runState.interpreter.subRefund( common.param('netSstoreClearRefundGas'), - 'EIP-1283 -> netSstoreClearRefund' + 'EIP-1283 -> netSstoreClearRefund', ) } else if (value.length === 0) { // If new value is 0 (also means that current value is not 0), add 15000 gas to refund counter. runState.interpreter.refundGas( common.param('netSstoreClearRefundGas'), - 'EIP-1283 -> netSstoreClearRefund' + 'EIP-1283 -> netSstoreClearRefund', ) } } @@ -63,13 +63,13 @@ export function updateSstoreGasEIP1283( // If original value is 0, add 19800 gas to refund counter. runState.interpreter.refundGas( common.param('netSstoreResetClearRefundGas'), - 'EIP-1283 -> netSstoreResetClearRefund' + 'EIP-1283 -> netSstoreResetClearRefund', ) } else { // Otherwise, add 4800 gas to refund counter. runState.interpreter.refundGas( common.param('netSstoreResetRefundGas'), - 'EIP-1283 -> netSstoreResetRefund' + 'EIP-1283 -> netSstoreResetRefund', ) } } diff --git a/packages/evm/src/opcodes/EIP2200.ts b/packages/evm/src/opcodes/EIP2200.ts index 50ee69ddd1..5f08305d09 100644 --- a/packages/evm/src/opcodes/EIP2200.ts +++ b/packages/evm/src/opcodes/EIP2200.ts @@ -23,7 +23,7 @@ export function updateSstoreGasEIP2200( originalStorage: Uint8Array, value: Uint8Array, key: Uint8Array, - common: Common + common: Common, ) { // Fail if not enough gas is left if (runState.interpreter.getGasLeft() <= common.param('sstoreSentryEIP2200Gas')) { @@ -44,7 +44,7 @@ export function updateSstoreGasEIP2200( if (value.length === 0) { runState.interpreter.refundGas( common.param('sstoreClearRefundEIP2200Gas'), - 'EIP-2200 -> sstoreClearRefundEIP2200' + 'EIP-2200 -> sstoreClearRefundEIP2200', ) } // Write existing slot @@ -55,13 +55,13 @@ export function updateSstoreGasEIP2200( // Recreate slot runState.interpreter.subRefund( common.param('sstoreClearRefundEIP2200Gas'), - 'EIP-2200 -> sstoreClearRefundEIP2200' + 'EIP-2200 -> sstoreClearRefundEIP2200', ) } else if (value.length === 0) { // Delete slot runState.interpreter.refundGas( common.param('sstoreClearRefundEIP2200Gas'), - 'EIP-2200 -> sstoreClearRefundEIP2200' + 'EIP-2200 -> sstoreClearRefundEIP2200', ) } } @@ -71,14 +71,14 @@ export function updateSstoreGasEIP2200( const sstoreInitRefund = common.param('sstoreInitRefundEIP2200Gas') runState.interpreter.refundGas( adjustSstoreGasEIP2929(runState, key, sstoreInitRefund, 'initRefund', common), - 'EIP-2200 -> initRefund' + 'EIP-2200 -> initRefund', ) } else { // Reset to original existing slot const sstoreCleanRefund = common.param('sstoreCleanRefundEIP2200Gas') runState.interpreter.refundGas( BigInt(adjustSstoreGasEIP2929(runState, key, sstoreCleanRefund, 'cleanRefund', common)), - 'EIP-2200 -> cleanRefund' + 'EIP-2200 -> cleanRefund', ) } } diff --git a/packages/evm/src/opcodes/EIP2929.ts b/packages/evm/src/opcodes/EIP2929.ts index 9812f45467..a82c17e700 100644 --- a/packages/evm/src/opcodes/EIP2929.ts +++ b/packages/evm/src/opcodes/EIP2929.ts @@ -18,7 +18,7 @@ export function accessAddressEIP2929( address: Uint8Array, common: Common, chargeGas = true, - isSelfdestructOrAuthcall = false + isSelfdestructOrAuthcall = false, ): bigint { if (!common.isActivatedEIP(2929)) return BIGINT_0 @@ -52,7 +52,7 @@ export function accessStorageEIP2929( key: Uint8Array, isSstore: boolean, common: Common, - chargeGas = true + chargeGas = true, ): bigint { if (!common.isActivatedEIP(2929)) return BIGINT_0 @@ -86,7 +86,7 @@ export function adjustSstoreGasEIP2929( key: Uint8Array, defaultCost: bigint, costName: string, - common: Common + common: Common, ): bigint { if (!common.isActivatedEIP(2929)) return defaultCost diff --git a/packages/evm/src/opcodes/codes.ts b/packages/evm/src/opcodes/codes.ts index dd3e2250d2..d7f51241b0 100644 --- a/packages/evm/src/opcodes/codes.ts +++ b/packages/evm/src/opcodes/codes.ts @@ -384,7 +384,7 @@ function createOpcodes(opcodes: OpcodeEntryFee): OpcodeList { code, fullName: getFullname(code, value.name), ...value, - }) + }), ) } return result @@ -448,7 +448,7 @@ export function getOpcodesForHF(common: Common, customOpcodes?: CustomOpcode[]): // Sanity checks if (code.opcodeName === undefined || code.baseFee === undefined) { throw new Error( - `Custom opcode ${code.opcode} does not have the required values: opcodeName and baseFee are required` + `Custom opcode ${code.opcode} does not have the required values: opcodeName and baseFee are required`, ) } const entry = { diff --git a/packages/evm/src/opcodes/functions.ts b/packages/evm/src/opcodes/functions.ts index 7011f92273..5976f9381e 100644 --- a/packages/evm/src/opcodes/functions.ts +++ b/packages/evm/src/opcodes/functions.ts @@ -535,13 +535,13 @@ export const handlers: Map = new Map([ if (typeof runState.stateManager.getContractCodeSize === 'function') { size = BigInt( await runState.stateManager.getContractCodeSize( - new Address(addresstoBytes(addressBigInt)) - ) + new Address(addresstoBytes(addressBigInt)), + ), ) } else { size = BigInt( (await runState.stateManager.getContractCode(new Address(addresstoBytes(addressBigInt)))) - .length + .length, ) } @@ -556,7 +556,7 @@ export const handlers: Map = new Map([ if (dataLength !== BIGINT_0) { let code = await runState.stateManager.getContractCode( - new Address(addresstoBytes(addressBigInt)) + new Address(addresstoBytes(addressBigInt)), ) if (isEOF(code)) { @@ -613,7 +613,7 @@ export const handlers: Map = new Map([ const data = getDataSlice( runState.interpreter.getReturnData(), returnDataOffset, - dataLength + dataLength, ) const memOffsetNum = Number(memOffset) const lengthNum = Number(dataLength) @@ -650,7 +650,7 @@ export const handlers: Map = new Map([ } const historyAddress = new Address( - bigIntToAddressBytes(common.param('historyStorageAddress')) + bigIntToAddressBytes(common.param('historyStorageAddress')), ) const historyServeWindow = common.param('historyServeWindow') const key = setLengthLeft(bigIntToBytes(number % historyServeWindow), 32) @@ -661,7 +661,7 @@ export const handlers: Map = new Map([ const statelessGas = runState.env.accessWitness!.touchAddressOnReadAndComputeGas( historyAddress, treeIndex, - subIndex + subIndex, ) runState.interpreter.useGas(statelessGas, `BLOCKHASH`) } @@ -957,7 +957,7 @@ export const handlers: Map = new Map([ const statelessGas = runState.env.accessWitness!.touchCodeChunksRangeOnReadAndChargeGas( contract, startOffset, - endOffset + endOffset, ) runState.interpreter.useGas(statelessGas, `PUSH`) } @@ -967,7 +967,7 @@ export const handlers: Map = new Map([ runState.programCounter += numToPush } else { const loaded = bytesToBigInt( - runState.code.subarray(runState.programCounter, runState.programCounter + numToPush) + runState.code.subarray(runState.programCounter, runState.programCounter + numToPush), ) runState.programCounter += numToPush runState.stack.push(loaded) @@ -1045,10 +1045,10 @@ export const handlers: Map = new Map([ trap(ERROR.INVALID_OPCODE) } const toLoad = Number( - bytesToBigInt(runState.code.subarray(runState.programCounter, runState.programCounter + 2)) + bytesToBigInt(runState.code.subarray(runState.programCounter, runState.programCounter + 2)), ) const data = bytesToBigInt( - runState.env.eof!.container.body.dataSection.subarray(toLoad, toLoad + 32) + runState.env.eof!.container.body.dataSection.subarray(toLoad, toLoad + 32), ) runState.stack.push(data) runState.programCounter += 2 @@ -1134,7 +1134,7 @@ export const handlers: Map = new Map([ const jumptableCase = runState.stack.pop() if (jumptableCase <= jumptableEntries) { const rjumpDest = new DataView(code.buffer).getInt16( - runState.programCounter + Number(jumptableCase) * 2 + runState.programCounter + Number(jumptableCase) * 2, ) runState.programCounter += jumptableSize + rjumpDest } else { @@ -1152,7 +1152,7 @@ export const handlers: Map = new Map([ trap(ERROR.INVALID_OPCODE) } const sectionTarget = bytesToInt( - runState.code.slice(runState.programCounter, runState.programCounter + 2) + runState.code.slice(runState.programCounter, runState.programCounter + 2), ) const stackItems = runState.stack.length const typeSection = runState.env.eof!.container.body.typeSections[sectionTarget] @@ -1196,7 +1196,7 @@ export const handlers: Map = new Map([ // (and also the return stack overflow check) // It is commented out here const sectionTarget = bytesToInt( - runState.code.slice(runState.programCounter, runState.programCounter + 2) + runState.code.slice(runState.programCounter, runState.programCounter + 2), ) const stackItems = runState.stack.length const typeSection = runState.env.eof!.container.body.typeSections[sectionTarget] @@ -1223,8 +1223,8 @@ export const handlers: Map = new Map([ const toDup = Number( bytesToBigInt( - runState.code.subarray(runState.programCounter, runState.programCounter + 1) - ) + runState.code.subarray(runState.programCounter, runState.programCounter + 1), + ), ) + 1 runState.stack.dup(toDup) runState.programCounter++ @@ -1241,8 +1241,8 @@ export const handlers: Map = new Map([ const toSwap = Number( bytesToBigInt( - runState.code.subarray(runState.programCounter, runState.programCounter + 1) - ) + runState.code.subarray(runState.programCounter, runState.programCounter + 1), + ), ) + 1 runState.stack.swap(toSwap) runState.programCounter++ @@ -1257,7 +1257,7 @@ export const handlers: Map = new Map([ trap(ERROR.INVALID_OPCODE) } const toExchange = Number( - bytesToBigInt(runState.code.subarray(runState.programCounter, runState.programCounter + 1)) + bytesToBigInt(runState.code.subarray(runState.programCounter, runState.programCounter + 1)), ) const n = (toExchange >> 4) + 1 const m = (toExchange & 0x0f) + 1 @@ -1295,7 +1295,7 @@ export const handlers: Map = new Map([ value, containerCode, setLengthLeft(bigIntToBytes(salt), 32), - data + data, ) runState.stack.push(ret) } @@ -1420,7 +1420,7 @@ export const handlers: Map = new Map([ gasLimit, value, data, - setLengthLeft(bigIntToBytes(salt), 32) + setLengthLeft(bigIntToBytes(salt), 32), ) runState.stack.push(ret) }, diff --git a/packages/evm/src/opcodes/gas.ts b/packages/evm/src/opcodes/gas.ts index 7d2bfe179e..dc0ad6e5bf 100644 --- a/packages/evm/src/opcodes/gas.ts +++ b/packages/evm/src/opcodes/gas.ts @@ -98,7 +98,7 @@ export const dynamicGasHandlers: Map gasLimit) { @@ -987,7 +987,7 @@ export const dynamicGasHandlers: Map BIGINT_0) { gas += runState.env.accessWitness!.touchAddressOnWriteAndComputeGas( contractAddress, 0, - VERKLE_BALANCE_LEAF_KEY + VERKLE_BALANCE_LEAF_KEY, ) } @@ -1121,14 +1121,14 @@ export const dynamicGasHandlers: Map BIGINT_0) { selfDestructToColdAccessGas += runState.env.accessWitness!.touchAddressOnWriteAndComputeGas( selfdestructToAddress, 0, - VERKLE_BALANCE_LEAF_KEY + VERKLE_BALANCE_LEAF_KEY, ) } @@ -1142,7 +1142,7 @@ export const dynamicGasHandlers: Map { opts._debug( `Run KZG_POINT_EVALUATION (0x14) precompile data=${short(opts.data)} length=${ opts.data.length - } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}` + } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}`, ) } @@ -61,8 +61,8 @@ export async function precompile0a(opts: PrecompileInput): Promise { if (opts._debug !== undefined) { opts._debug( `KZG_POINT_EVALUATION (0x14): proof verification with commitment=${bytesToHex( - commitment - )} z=${bytesToHex(z)} y=${bytesToHex(y)} kzgProof=${bytesToHex(kzgProof)}` + commitment, + )} z=${bytesToHex(z)} y=${bytesToHex(y)} kzgProof=${bytesToHex(kzgProof)}`, ) } try { @@ -89,8 +89,8 @@ export async function precompile0a(opts: PrecompileInput): Promise { if (opts._debug !== undefined) { opts._debug( `KZG_POINT_EVALUATION (0x14) return fieldElements=${bytesToHex( - fieldElementsBuffer - )} modulus=${bytesToHex(modulusBuffer)}` + fieldElementsBuffer, + )} modulus=${bytesToHex(modulusBuffer)}`, ) } diff --git a/packages/evm/src/precompiles/bls12_381/constants.ts b/packages/evm/src/precompiles/bls12_381/constants.ts index f5f19a77b6..0ce107c5f3 100644 --- a/packages/evm/src/precompiles/bls12_381/constants.ts +++ b/packages/evm/src/precompiles/bls12_381/constants.ts @@ -2,7 +2,7 @@ import { concatBytes, hexToBytes } from '@ethereumjs/util' // base field modulus as described in the EIP export const BLS_FIELD_MODULUS = BigInt( - '0x1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab' + '0x1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab', ) export const BLS_G1_POINT_BYTE_LENGTH = 128 diff --git a/packages/evm/src/precompiles/bls12_381/mcl.ts b/packages/evm/src/precompiles/bls12_381/mcl.ts index e006a0a263..133ef18cbe 100644 --- a/packages/evm/src/precompiles/bls12_381/mcl.ts +++ b/packages/evm/src/precompiles/bls12_381/mcl.ts @@ -226,7 +226,7 @@ export class MCLBLS implements EVMBLSInterface { const p2 = BLS12_381_ToG1Point( input.subarray(BLS_G1_POINT_BYTE_LENGTH, BLS_G1_POINT_BYTE_LENGTH * 2), this._mcl, - false + false, ) const result = this._mcl.add(p1, p2) @@ -250,7 +250,7 @@ export class MCLBLS implements EVMBLSInterface { const p2 = BLS12_381_ToG2Point( input.subarray(BLS_G2_POINT_BYTE_LENGTH, BLS_G2_POINT_BYTE_LENGTH * 2), this._mcl, - false + false, ) const result = this._mcl.add(p1, p2) @@ -296,11 +296,11 @@ export class MCLBLS implements EVMBLSInterface { const pairStart = pairLength * k const G1 = BLS12_381_ToG1Point( input.subarray(pairStart, pairStart + BLS_G1_POINT_BYTE_LENGTH), - this._mcl + this._mcl, ) const Fr = BLS12_381_ToFrPoint( input.subarray(pairStart + BLS_G1_POINT_BYTE_LENGTH, pairStart + pairLength), - this._mcl + this._mcl, ) G1Array.push(G1) @@ -322,11 +322,11 @@ export class MCLBLS implements EVMBLSInterface { const pairStart = pairLength * k const G2 = BLS12_381_ToG2Point( input.subarray(pairStart, pairStart + BLS_G2_POINT_BYTE_LENGTH), - this._mcl + this._mcl, ) const Fr = BLS12_381_ToFrPoint( input.subarray(pairStart + BLS_G2_POINT_BYTE_LENGTH, pairStart + pairLength), - this._mcl + this._mcl, ) G2Array.push(G2) @@ -344,13 +344,13 @@ export class MCLBLS implements EVMBLSInterface { const pairStart = pairLength * k const G1 = BLS12_381_ToG1Point( input.subarray(pairStart, pairStart + BLS_G1_POINT_BYTE_LENGTH), - this._mcl + this._mcl, ) const g2start = pairStart + BLS_G1_POINT_BYTE_LENGTH const G2 = BLS12_381_ToG2Point( input.subarray(g2start, g2start + BLS_G2_POINT_BYTE_LENGTH), - this._mcl + this._mcl, ) pairs.push([G1, G2]) diff --git a/packages/evm/src/precompiles/bls12_381/noble.ts b/packages/evm/src/precompiles/bls12_381/noble.ts index 1d2ad640be..4a39590785 100644 --- a/packages/evm/src/precompiles/bls12_381/noble.ts +++ b/packages/evm/src/precompiles/bls12_381/noble.ts @@ -166,7 +166,7 @@ export class NobleBLS implements EVMBLSInterface { addG1(input: Uint8Array): Uint8Array { const p1 = BLS12_381_ToG1Point(input.subarray(0, BLS_G1_POINT_BYTE_LENGTH)) const p2 = BLS12_381_ToG1Point( - input.subarray(BLS_G1_POINT_BYTE_LENGTH, BLS_G1_POINT_BYTE_LENGTH * 2) + input.subarray(BLS_G1_POINT_BYTE_LENGTH, BLS_G1_POINT_BYTE_LENGTH * 2), ) const p = p1.add(p2) @@ -190,7 +190,7 @@ export class NobleBLS implements EVMBLSInterface { addG2(input: Uint8Array): Uint8Array { const p1 = BLS12_381_ToG2Point(input.subarray(0, BLS_G2_POINT_BYTE_LENGTH)) const p2 = BLS12_381_ToG2Point( - input.subarray(BLS_G2_POINT_BYTE_LENGTH, BLS_G2_POINT_BYTE_LENGTH * 2) + input.subarray(BLS_G2_POINT_BYTE_LENGTH, BLS_G2_POINT_BYTE_LENGTH * 2), ) const p = p1.add(p2) const result = BLS12_381_FromG2Point(p) @@ -240,10 +240,10 @@ export class NobleBLS implements EVMBLSInterface { for (let k = 0; k < numPairs; k++) { const pairStart = pairLength * k const G1 = BLS12_381_ToG1Point( - input.subarray(pairStart, pairStart + BLS_G1_POINT_BYTE_LENGTH) + input.subarray(pairStart, pairStart + BLS_G1_POINT_BYTE_LENGTH), ) const Fr = BLS12_381_ToFrPoint( - input.subarray(pairStart + BLS_G1_POINT_BYTE_LENGTH, pairStart + pairLength) + input.subarray(pairStart + BLS_G1_POINT_BYTE_LENGTH, pairStart + pairLength), ) let pMul if (Fr === BIGINT_0) { @@ -272,10 +272,10 @@ export class NobleBLS implements EVMBLSInterface { for (let k = 0; k < numPairs; k++) { const pairStart = pairLength * k const G2 = BLS12_381_ToG2Point( - input.subarray(pairStart, pairStart + BLS_G2_POINT_BYTE_LENGTH) + input.subarray(pairStart, pairStart + BLS_G2_POINT_BYTE_LENGTH), ) const Fr = BLS12_381_ToFrPoint( - input.subarray(pairStart + BLS_G2_POINT_BYTE_LENGTH, pairStart + pairLength) + input.subarray(pairStart + BLS_G2_POINT_BYTE_LENGTH, pairStart + pairLength), ) let pMul if (Fr === BIGINT_0) { @@ -296,7 +296,7 @@ export class NobleBLS implements EVMBLSInterface { for (let k = 0; k < input.length / pairLength; k++) { const pairStart = pairLength * k const G1 = BLS12_381_ToG1Point( - input.subarray(pairStart, pairStart + BLS_G1_POINT_BYTE_LENGTH) + input.subarray(pairStart, pairStart + BLS_G1_POINT_BYTE_LENGTH), ) const g2start = pairStart + BLS_G1_POINT_BYTE_LENGTH diff --git a/packages/evm/src/precompiles/bls12_381/util.ts b/packages/evm/src/precompiles/bls12_381/util.ts index ca09ef1271..618acca0a7 100644 --- a/packages/evm/src/precompiles/bls12_381/util.ts +++ b/packages/evm/src/precompiles/bls12_381/util.ts @@ -19,7 +19,7 @@ export const gasCheck = (opts: PrecompileInput, gasUsed: bigint, pName: string) opts._debug( `Run ${pName} precompile data=${short(opts.data)} length=${opts.data.length} gasLimit=${ opts.gasLimit - } gasUsed=${gasUsed}` + } gasUsed=${gasUsed}`, ) } if (opts.gasLimit < gasUsed) { @@ -68,7 +68,7 @@ export const equalityLengthCheck = (opts: PrecompileInput, length: number, pName if (opts.data.length !== length) { if (opts._debug !== undefined) { opts._debug( - `${pName} failed: Invalid input length length=${opts.data.length} (expected: ${length})` + `${pName} failed: Invalid input length length=${opts.data.length} (expected: ${length})`, ) } return false @@ -89,7 +89,7 @@ export const moduloLengthCheck = (opts: PrecompileInput, length: number, pName: if (opts.data.length % length !== 0) { if (opts._debug !== undefined) { opts._debug( - `${pName} failed: Invalid input length length=${opts.data.length} (expected: ${length}*k bytes)` + `${pName} failed: Invalid input length length=${opts.data.length} (expected: ${length}*k bytes)`, ) } return false @@ -123,12 +123,12 @@ export const leading16ZeroBytesCheck = ( opts: PrecompileInput, zeroByteRanges: number[][], pName: string, - pairStart = 0 + pairStart = 0, ) => { for (const index in zeroByteRanges) { const slicedBuffer = opts.data.subarray( zeroByteRanges[index][0] + pairStart, - zeroByteRanges[index][1] + pairStart + zeroByteRanges[index][1] + pairStart, ) if (!(equalsBytes(slicedBuffer, ZERO_BYTES_16) === true)) { if (opts._debug !== undefined) { diff --git a/packages/evm/src/precompiles/index.ts b/packages/evm/src/precompiles/index.ts index 92d57e2f76..c84f429948 100644 --- a/packages/evm/src/precompiles/index.ts +++ b/packages/evm/src/precompiles/index.ts @@ -266,14 +266,14 @@ type CustomPrecompile = AddPrecompile | DeletePrecompile function getActivePrecompiles( common: Common, - customPrecompiles?: CustomPrecompile[] + customPrecompiles?: CustomPrecompile[], ): Map { const precompileMap = new Map() if (customPrecompiles) { for (const precompile of customPrecompiles) { precompileMap.set( bytesToUnprefixedHex(precompile.address.bytes), - 'function' in precompile ? precompile.function : undefined + 'function' in precompile ? precompile.function : undefined, ) } } diff --git a/packages/evm/test/blobVersionedHashes.spec.ts b/packages/evm/test/blobVersionedHashes.spec.ts index 7bc20dcb7f..f8676be1aa 100644 --- a/packages/evm/test/blobVersionedHashes.spec.ts +++ b/packages/evm/test/blobVersionedHashes.spec.ts @@ -32,7 +32,7 @@ describe('BLOBHASH / access blobVersionedHashes in calldata', () => { assert.equal( bytesToHex(unpadBytes(res.execResult.returnValue)), '0xab', - 'retrieved correct versionedHash from runState' + 'retrieved correct versionedHash from runState', ) }) }) @@ -81,7 +81,7 @@ describe(`BLOBHASH: access blobVersionedHashes within contract calls`, () => { assert.equal( bytesToHex(unpadBytes(res.execResult.returnValue)), '0xab', - `retrieved correct versionedHash from runState through callCode=${callCode}` + `retrieved correct versionedHash from runState through callCode=${callCode}`, ) } }) @@ -137,7 +137,7 @@ describe(`BLOBHASH: access blobVersionedHashes in a CREATE/CREATE2 frame`, () => assert.equal( bytesToHex(code), '0x' + 'ab'.padStart(64, '0'), // have to padStart here, since `BLOBHASH` will push 32 bytes on stack - `retrieved correct versionedHash from runState through createOP=${createOP}` + `retrieved correct versionedHash from runState through createOP=${createOP}`, ) } }) diff --git a/packages/evm/test/customCrypto.spec.ts b/packages/evm/test/customCrypto.spec.ts index 7f75682f1b..7338005f82 100644 --- a/packages/evm/test/customCrypto.spec.ts +++ b/packages/evm/test/customCrypto.spec.ts @@ -37,7 +37,7 @@ describe('custom crypto', () => { it('should use custom ecrecover function', async () => { const customEcrecover = (_msg: Uint8Array) => { return hexToBytes( - '0x84b2586da9b582d3cb260e8fd136129c734f3c80453f48a68e8217ea0b81e08342520f318d202f27a548ad8d3f814ca76d0ee621de2cc510c29e2db4d4f39418' + '0x84b2586da9b582d3cb260e8fd136129c734f3c80453f48a68e8217ea0b81e08342520f318d202f27a548ad8d3f814ca76d0ee621de2cc510c29e2db4d4f39418', ) } const customCrypto = { @@ -57,7 +57,7 @@ describe('custom crypto', () => { assert.equal( bytesToHex(result.returnValue), '0x00000000000000000000000063304c5c6884567b84b18f5bc5774d829a32d25d', - 'used custom ecrecover hashing function' + 'used custom ecrecover hashing function', ) }) }) diff --git a/packages/evm/test/customOpcodes.spec.ts b/packages/evm/test/customOpcodes.spec.ts index 807be3c29e..753743c78d 100644 --- a/packages/evm/test/customOpcodes.spec.ts +++ b/packages/evm/test/customOpcodes.spec.ts @@ -117,18 +117,18 @@ describe('VM: custom opcodes', () => { assert.deepEqual( (evmCopy as any)._customOpcodes, (evmCopy as any)._customOpcodes, - 'evm.shallowCopy() successfully copied customOpcodes option' + 'evm.shallowCopy() successfully copied customOpcodes option', ) assert.equal( evm.events.listenerCount('beforeMessage'), 2, - 'original EVM instance should have two listeners' + 'original EVM instance should have two listeners', ) assert.equal( evmCopy!.events!.listenerCount('beforeMessage'), 0, - 'copied EVM instance should have zero listeners' + 'copied EVM instance should have zero listeners', ) }) }) diff --git a/packages/evm/test/customPrecompiles.spec.ts b/packages/evm/test/customPrecompiles.spec.ts index 2dd614ce68..ef73bb7efa 100644 --- a/packages/evm/test/customPrecompiles.spec.ts +++ b/packages/evm/test/customPrecompiles.spec.ts @@ -137,12 +137,12 @@ describe('EVM -> custom precompiles', () => { assert.deepEqual( shaResult.execResult.returnValue, shaResult2.execResult.returnValue, - 'restored sha precompile - returndata correct' + 'restored sha precompile - returndata correct', ) assert.equal( shaResult.execResult.executionGasUsed, shaResult2.execResult.executionGasUsed, - 'restored sha precompile - gas correct' + 'restored sha precompile - gas correct', ) }) it('shold copy custom precompiles', async () => { @@ -158,7 +158,7 @@ describe('EVM -> custom precompiles', () => { assert.deepEqual( (evm as any)._customPrecompiles, (evmCopy as any)._customPrecompiles, - 'evm.shallowCopy() successfully copied customPrecompiles option' + 'evm.shallowCopy() successfully copied customPrecompiles option', ) }) }) diff --git a/packages/evm/test/eips/eip-3860.spec.ts b/packages/evm/test/eips/eip-3860.spec.ts index e9f08e022b..394643bb90 100644 --- a/packages/evm/test/eips/eip-3860.spec.ts +++ b/packages/evm/test/eips/eip-3860.spec.ts @@ -29,15 +29,15 @@ describe('EIP 3860 tests', () => { // (since memory which is not allocated/resized to yet is always defaulted to 0) data: concatBytes( hexToBytes( - '0x7F6000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060005260206000F3' + '0x7F6000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060005260206000F3', ), - buffer + buffer, ), } const result = await evm.runCall(runCallArgs) assert.ok( (result.execResult.exceptionError?.error as string) === 'initcode exceeds max initcode size', - 'initcode exceeds max size' + 'initcode exceeds max size', ) }) @@ -65,7 +65,7 @@ describe('EIP 3860 tests', () => { await evm.stateManager.putAccount(contractFactory, contractAccount!) await evmWithout3860.stateManager.putAccount(contractFactory, contractAccount!) const factoryCode = hexToBytes( - '0x7f600a80600080396000f3000000000000000000000000000000000000000000006000526000355a8160006000f05a8203600a55806000556001600155505050' + '0x7f600a80600080396000f3000000000000000000000000000000000000000000006000526000355a8160006000f05a8203600a55806000556001600155505050', ) await evm.stateManager.putContractCode(contractFactory, factoryCode) @@ -81,7 +81,7 @@ describe('EIP 3860 tests', () => { const res2 = await evmWithout3860.runCall(runCallArgs) assert.ok( res.execResult.executionGasUsed > res2.execResult.executionGasUsed, - 'execution gas used is higher with EIP 3860 active' + 'execution gas used is higher with EIP 3860 active', ) }) @@ -109,7 +109,7 @@ describe('EIP 3860 tests', () => { await evm.stateManager.putAccount(contractFactory, contractAccount!) await evmWithout3860.stateManager.putAccount(contractFactory, contractAccount!) const factoryCode = hexToBytes( - '0x7f600a80600080396000f3000000000000000000000000000000000000000000006000526000355a60008260006000f55a8203600a55806000556001600155505050' + '0x7f600a80600080396000f3000000000000000000000000000000000000000000006000526000355a60008260006000f55a8203600a55806000556001600155505050', ) await evm.stateManager.putContractCode(contractFactory, factoryCode) @@ -125,7 +125,7 @@ describe('EIP 3860 tests', () => { const res2 = await evmWithout3860.runCall(runCallArgs) assert.ok( res.execResult.executionGasUsed > res2.execResult.executionGasUsed, - 'execution gas used is higher with EIP 3860 active' + 'execution gas used is higher with EIP 3860 active', ) }) @@ -151,13 +151,13 @@ describe('EIP 3860 tests', () => { // (since memory which is not allocated/resized to yet is always defaulted to 0) data: concatBytes( hexToBytes(`0x${'00'.repeat(Number(common.param('maxInitCodeSize')) + 1)}`), - bytes + bytes, ), } const result = await evm.runCall(runCallArgs) assert.ok( result.execResult.exceptionError === undefined, - 'succesfully created a contract with data size > MAX_INITCODE_SIZE and allowUnlimitedInitCodeSize active' + 'succesfully created a contract with data size > MAX_INITCODE_SIZE and allowUnlimitedInitCodeSize active', ) }) @@ -207,16 +207,16 @@ describe('EIP 3860 tests', () => { const storageActive = await evm.stateManager.getContractStorage(contractFactory, key0) const storageInactive = await evmDisabled.stateManager.getContractStorage( contractFactory, - key0 + key0, ) assert.ok( !equalsBytes(storageActive, new Uint8Array()), - 'created contract with MAX_INITCODE_SIZE + 1 length, allowUnlimitedInitCodeSize=true' + 'created contract with MAX_INITCODE_SIZE + 1 length, allowUnlimitedInitCodeSize=true', ) assert.ok( equalsBytes(storageInactive, new Uint8Array()), - 'did not create contract with MAX_INITCODE_SIZE + 1 length, allowUnlimitedInitCodeSize=false' + 'did not create contract with MAX_INITCODE_SIZE + 1 length, allowUnlimitedInitCodeSize=false', ) // gas check @@ -236,7 +236,7 @@ describe('EIP 3860 tests', () => { assert.ok( res.execResult.executionGasUsed > res2.execResult.executionGasUsed, - 'charged initcode analysis gas cost on both allowUnlimitedCodeSize=true, allowUnlimitedInitCodeSize=false' + 'charged initcode analysis gas cost on both allowUnlimitedCodeSize=true, allowUnlimitedInitCodeSize=false', ) } }) diff --git a/packages/evm/test/eips/eof-header-validation.ts b/packages/evm/test/eips/eof-header-validation.ts index a2931d1e03..5b51da3ead 100644 --- a/packages/evm/test/eips/eof-header-validation.ts +++ b/packages/evm/test/eips/eof-header-validation.ts @@ -33,7 +33,7 @@ await new Promise((resolve, reject) => { err: Error | undefined, content: string | Uint8Array, fileName: string, - next: Function + next: Function, ) => { if (err) { reject(err) @@ -88,6 +88,6 @@ await new Promise((resolve, reject) => { match: /.json$/, }, fileCallback, - finishedCallback + finishedCallback, ) }) diff --git a/packages/evm/test/memory.spec.ts b/packages/evm/test/memory.spec.ts index 640fe150dd..3bf538966c 100644 --- a/packages/evm/test/memory.spec.ts +++ b/packages/evm/test/memory.spec.ts @@ -38,13 +38,13 @@ describe('Memory', () => { assert.equal( memory._store.length, CONTAINER_SIZE, - 'memory should remain in CONTAINER_SIZE length' + 'memory should remain in CONTAINER_SIZE length', ) memory.write(CONTAINER_SIZE, 1, Uint8Array.from([1])) assert.equal( memory._store.length, 8192 * 2, - 'memory buffer length expanded by CONTAINER_SIZE bytes' + 'memory buffer length expanded by CONTAINER_SIZE bytes', ) }) diff --git a/packages/evm/test/opcodes.spec.ts b/packages/evm/test/opcodes.spec.ts index a18b3fa916..aabde1e465 100644 --- a/packages/evm/test/opcodes.spec.ts +++ b/packages/evm/test/opcodes.spec.ts @@ -13,7 +13,7 @@ describe('EVM -> getActiveOpcodes()', () => { assert.equal( evm.getActiveOpcodes().get(CHAINID), undefined, - 'istanbul opcode not exposed (HF: < istanbul (petersburg)' + 'istanbul opcode not exposed (HF: < istanbul (petersburg)', ) }) @@ -23,7 +23,7 @@ describe('EVM -> getActiveOpcodes()', () => { assert.equal( evm.getActiveOpcodes().get(CHAINID)!.name, 'CHAINID', - 'istanbul opcode exposed (HF: istanbul)' + 'istanbul opcode exposed (HF: istanbul)', ) common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.MuirGlacier }) @@ -31,7 +31,7 @@ describe('EVM -> getActiveOpcodes()', () => { assert.equal( evm.getActiveOpcodes().get(CHAINID)!.name, 'CHAINID', - 'istanbul opcode exposed (HF: > istanbul (muirGlacier)' + 'istanbul opcode exposed (HF: > istanbul (muirGlacier)', ) }) @@ -41,7 +41,7 @@ describe('EVM -> getActiveOpcodes()', () => { assert.equal( evm.getActiveOpcodes().get(DIFFICULTY_PREVRANDAO)!.name, 'DIFFICULTY', - 'Opcode x44 named DIFFICULTY pre-Merge' + 'Opcode x44 named DIFFICULTY pre-Merge', ) common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Paris }) @@ -49,7 +49,7 @@ describe('EVM -> getActiveOpcodes()', () => { assert.equal( evm.getActiveOpcodes().get(DIFFICULTY_PREVRANDAO)!.name, 'PREVRANDAO', - 'Opcode x44 named PREVRANDAO post-Merge' + 'Opcode x44 named PREVRANDAO post-Merge', ) }) @@ -61,14 +61,14 @@ describe('EVM -> getActiveOpcodes()', () => { assert.equal( evm.getActiveOpcodes().get(CHAINID), undefined, - 'opcode not exposed after HF change (-> < istanbul)' + 'opcode not exposed after HF change (-> < istanbul)', ) common.setHardfork(Hardfork.Istanbul) assert.equal( evm.getActiveOpcodes().get(CHAINID)!.name, 'CHAINID', - 'opcode exposed after HF change (-> istanbul)' + 'opcode exposed after HF change (-> istanbul)', ) }) }) diff --git a/packages/evm/test/precompiles/01-ecrecover.spec.ts b/packages/evm/test/precompiles/01-ecrecover.spec.ts index 1665f66963..bd9a87eac6 100644 --- a/packages/evm/test/precompiles/01-ecrecover.spec.ts +++ b/packages/evm/test/precompiles/01-ecrecover.spec.ts @@ -34,7 +34,7 @@ describe('Precompiles: ECRECOVER', () => { assert.deepEqual( bytesToHex(result.returnValue.slice(-20)), address, - 'should recover expected address' + 'should recover expected address', ) result = await ECRECOVER({ diff --git a/packages/evm/test/precompiles/03-ripemd160.spec.ts b/packages/evm/test/precompiles/03-ripemd160.spec.ts index 8ce317e014..d865095f53 100644 --- a/packages/evm/test/precompiles/03-ripemd160.spec.ts +++ b/packages/evm/test/precompiles/03-ripemd160.spec.ts @@ -29,7 +29,7 @@ describe('Precompiles: RIPEMD160', () => { assert.deepEqual( bytesToHex(result.returnValue), `0x${expected}`, - 'should generate expected value' + 'should generate expected value', ) result = await RIPEMD160({ diff --git a/packages/evm/test/precompiles/08-ecpairing.spec.ts b/packages/evm/test/precompiles/08-ecpairing.spec.ts index d74c3b7e95..7474550547 100644 --- a/packages/evm/test/precompiles/08-ecpairing.spec.ts +++ b/packages/evm/test/precompiles/08-ecpairing.spec.ts @@ -14,7 +14,7 @@ describe('Precompiles: ECPAIRING', () => { const ECPAIRING = getActivePrecompiles(common).get(addressStr)! const result = await ECPAIRING({ data: hexToBytes( - '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa000000000000000000000000000000000000000000000000000000000000000130644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd45198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa' + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa000000000000000000000000000000000000000000000000000000000000000130644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd45198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa', ), gasLimit: BigInt(0xffffff), common, @@ -24,7 +24,7 @@ describe('Precompiles: ECPAIRING', () => { assert.deepEqual( result.executionGasUsed, BigInt(260000), - 'should use petersburg gas costs (k ^= 2 pairings)' + 'should use petersburg gas costs (k ^= 2 pairings)', ) }) }) diff --git a/packages/evm/test/precompiles/09-blake2f.spec.ts b/packages/evm/test/precompiles/09-blake2f.spec.ts index 0f4e410b66..a502e1fa91 100644 --- a/packages/evm/test/precompiles/09-blake2f.spec.ts +++ b/packages/evm/test/precompiles/09-blake2f.spec.ts @@ -105,7 +105,7 @@ describe('Precompiles: BLAKE2F', () => { assert.equal( bytesToHex(result.returnValue), `0x${t.expected}`, - 'should generate expected value' + 'should generate expected value', ) assert.deepEqual(result.executionGasUsed, BigInt(t.gas), 'should use expected amount of gas') }) diff --git a/packages/evm/test/precompiles/0a-pointevaluation.spec.ts b/packages/evm/test/precompiles/0a-pointevaluation.spec.ts index 67bfd4801f..f5018b03f1 100644 --- a/packages/evm/test/precompiles/0a-pointevaluation.spec.ts +++ b/packages/evm/test/precompiles/0a-pointevaluation.spec.ts @@ -14,7 +14,7 @@ import { createEVM, getActivePrecompiles } from '../../src/index.js' import type { PrecompileInput } from '../../src/index.js' const BLS_MODULUS = BigInt( - '52435875175126190479447740508185965837690552500527637822603658699938581184513' + '52435875175126190479447740508185965837690552500527637822603658699938581184513', ) describe('Precompiles: point evaluation', () => { @@ -37,12 +37,12 @@ describe('Precompiles: point evaluation', () => { const testCase = { commitment: hexToBytes( - '0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + '0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', ), z: hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000002'), y: hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000000'), proof: hexToBytes( - '0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + '0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', ), } const versionedHash = computeVersionedHash(testCase.commitment, 1) @@ -58,7 +58,7 @@ describe('Precompiles: point evaluation', () => { assert.equal( bytesToBigInt(unpadBytes(res.returnValue.slice(32))), BLS_MODULUS, - 'point evaluation precompile returned expected output' + 'point evaluation precompile returned expected output', ) const optsWithInvalidCommitment: PrecompileInput = { @@ -67,7 +67,7 @@ describe('Precompiles: point evaluation', () => { testCase.z, testCase.y, testCase.commitment, - testCase.proof + testCase.proof, ), gasLimit: 0xfffffffffn, _EVM: evm, @@ -77,7 +77,7 @@ describe('Precompiles: point evaluation', () => { res = await pointEvaluation(optsWithInvalidCommitment) assert.ok( res.exceptionError?.error.match('kzg commitment does not match versioned hash'), - 'precompile throws when commitment does not match versioned hash' + 'precompile throws when commitment does not match versioned hash', ) }) }) diff --git a/packages/evm/test/precompiles/eip-2537-bls.spec.ts b/packages/evm/test/precompiles/eip-2537-bls.spec.ts index be698084e9..138eeeacfe 100644 --- a/packages/evm/test/precompiles/eip-2537-bls.spec.ts +++ b/packages/evm/test/precompiles/eip-2537-bls.spec.ts @@ -79,7 +79,7 @@ for (const bls of [undefined, mclbls]) { assert.deepEqual( '0x' + data.Expected, bytesToHex(result.returnValue), - 'return value should match testVectorResult' + 'return value should match testVectorResult', ) assert.equal(result.executionGasUsed, BigInt(data.Gas)) } catch (e) { diff --git a/packages/evm/test/runCall.spec.ts b/packages/evm/test/runCall.spec.ts index d42a63b5cb..bdd2a3f899 100644 --- a/packages/evm/test/runCall.spec.ts +++ b/packages/evm/test/runCall.spec.ts @@ -36,7 +36,7 @@ describe('RunCall tests', () => { assert.equal( res.createdAddress?.toString(), '0xbd770416a3345f91e4b34576cb804a576fa48eb1', - 'created valid address when FROM account nonce is 0' + 'created valid address when FROM account nonce is 0', ) }) @@ -137,11 +137,11 @@ describe('RunCall tests', () => { assert.ok( byzantiumResult.execResult.exceptionError && byzantiumResult.execResult.exceptionError.error === 'invalid opcode', - 'byzantium cannot accept constantinople opcodes (SHL)' + 'byzantium cannot accept constantinople opcodes (SHL)', ) assert.ok( !constantinopleResult.execResult.exceptionError, - 'constantinople can access the SHL opcode' + 'constantinople can access the SHL opcode', ) }) @@ -178,7 +178,7 @@ describe('RunCall tests', () => { await evm.stateManager.putContractStorage( address, new Uint8Array(32), - hexToBytes(`0x${'00'.repeat(31)}01`) + hexToBytes(`0x${'00'.repeat(31)}01`), ) // setup the call arguments @@ -414,7 +414,7 @@ describe('RunCall tests', () => { assert.deepEqual( storage, emptyBytes, - 'failed to create contract; nonce of creating contract is too high (MAX_UINT64)' + 'failed to create contract; nonce of creating contract is too high (MAX_UINT64)', ) }) @@ -469,7 +469,7 @@ describe('RunCall tests', () => { } catch (err: any) { assert.ok( err.message.includes('value field cannot be negative'), - 'throws on negative call value' + 'throws on negative call value', ) } }) @@ -483,7 +483,7 @@ describe('RunCall tests', () => { const contractAddress = Address.fromString('0x000000000000000000000000636F6E7472616374') await evm.stateManager.putContractCode(contractAddress, contractCode) const senderKey = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) const sender = Address.fromPrivateKey(senderKey) @@ -503,7 +503,7 @@ describe('RunCall tests', () => { assert.equal( senderBalance, balance ?? BigInt(0), - 'sender balance should be the same before and after call execution with skipBalance' + 'sender balance should be the same before and after call execution with skipBalance', ) assert.equal(res.execResult.exceptionError, undefined, 'no exceptionError with skipBalance') } @@ -511,7 +511,7 @@ describe('RunCall tests', () => { const res2 = await evm.runCall({ ...runCallArgs, skipBalance: false }) assert.ok( res2.execResult.exceptionError?.error.match('insufficient balance'), - 'runCall reverts when insufficient sender balance and skipBalance is false' + 'runCall reverts when insufficient sender balance and skipBalance is false', ) }) @@ -536,7 +536,7 @@ describe('RunCall tests', () => { assert.equal( result.execResult.exceptionError?.error, ERROR.CODESIZE_EXCEEDS_MAXIMUM, - 'reported error is correct' + 'reported error is correct', ) }) it('runCall() => use BLOBHASH opcode from EIP 4844', async () => { @@ -558,7 +558,7 @@ describe('RunCall tests', () => { assert.equal( bytesToHex(unpadBytes(res.execResult.returnValue)), '0xab', - 'retrieved correct versionedHash from runState' + 'retrieved correct versionedHash from runState', ) // setup the call arguments @@ -572,7 +572,7 @@ describe('RunCall tests', () => { assert.equal( bytesToHex(unpadBytes(res2.execResult.returnValue)), '0x', - 'retrieved no versionedHash when specified versionedHash does not exist in runState' + 'retrieved no versionedHash when specified versionedHash does not exist in runState', ) }) @@ -588,7 +588,7 @@ describe('RunCall tests', () => { assert.equal( evm.getActiveOpcodes().get(BLOBBASEFEE_OPCODE)!.name, 'BLOBBASEFEE', - 'Opcode 0x4a named BLOBBASEFEE' + 'Opcode 0x4a named BLOBBASEFEE', ) const block = defaultBlock() @@ -605,7 +605,7 @@ describe('RunCall tests', () => { assert.equal( bytesToBigInt(unpadBytes(res.execResult.returnValue)), BigInt(119), - 'retrieved correct gas fee' + 'retrieved correct gas fee', ) assert.equal(res.execResult.executionGasUsed, BigInt(6417), 'correct blob gas fee (2) charged') }) @@ -742,7 +742,7 @@ describe('RunCall tests', () => { await evm.runCall(runCallArgs) const callResult = bytesToHex( - await evm.stateManager.getContractStorage(callerAddress, zeros(32)) + await evm.stateManager.getContractStorage(callerAddress, zeros(32)), ) // Expect slot to have value of either: 0 since CALLCODE and CODE did not have enough gas to execute // Or 1, if CALL(CODE) has enough gas to enter the new call frame diff --git a/packages/evm/test/runCode.spec.ts b/packages/evm/test/runCode.spec.ts index f49596a310..c63560fa9f 100644 --- a/packages/evm/test/runCode.spec.ts +++ b/packages/evm/test/runCode.spec.ts @@ -37,7 +37,7 @@ describe('VM.runCode: initial program counter', () => { assert.equal( result.runState?.programCounter, testData.resultPC, - `should start the execution at the specified pc or 0, testCases[${i}]` + `should start the execution at the specified pc or 0, testCases[${i}]`, ) } } catch (e: any) { @@ -118,7 +118,7 @@ describe('VM.runCode: RunCodeOptions', () => { } catch (err: any) { assert.ok( err.message.includes('value field cannot be negative'), - 'throws on negative call value' + 'throws on negative call value', ) } }) diff --git a/packages/evm/test/transientStorage.spec.ts b/packages/evm/test/transientStorage.spec.ts index 095b4b66b3..94a29cd8fa 100644 --- a/packages/evm/test/transientStorage.spec.ts +++ b/packages/evm/test/transientStorage.spec.ts @@ -96,9 +96,9 @@ describe('Transient Storage', () => { assert.deepEqual( transientStorage.get( Address.fromString('0xff00000000000000000000000000000000000002'), - new Uint8Array(32).fill(0xff) + new Uint8Array(32).fill(0xff), ), - value + value, ) }) diff --git a/packages/evm/tsconfig.lint.json b/packages/evm/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/evm/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/genesis/.eslintrc.cjs b/packages/genesis/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/genesis/.eslintrc.cjs +++ b/packages/genesis/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/genesis/examples/simple.ts b/packages/genesis/examples/simple.ts index 2f20316aa7..91033c5bf7 100644 --- a/packages/genesis/examples/simple.ts +++ b/packages/genesis/examples/simple.ts @@ -1,9 +1,9 @@ -import { getGenesis } from '@ethereumjs/genesis' import { Chain } from '@ethereumjs/common' // or directly use chain ID +import { getGenesis } from '@ethereumjs/genesis' const mainnetGenesis = getGenesis(Chain.Mainnet) console.log( `This balance for account 0x000d836201318ec6899a67540690382780743280 in this chain's genesis state is ${parseInt( - mainnetGenesis!['0x000d836201318ec6899a67540690382780743280'] as string - )}` + mainnetGenesis!['0x000d836201318ec6899a67540690382780743280'] as string, + )}`, ) diff --git a/packages/genesis/test/index.spec.ts b/packages/genesis/test/index.spec.ts index d3eb6023b4..f321627034 100644 --- a/packages/genesis/test/index.spec.ts +++ b/packages/genesis/test/index.spec.ts @@ -17,15 +17,15 @@ describe('genesis test', () => { const genesisState = getGenesis(Number(chainId)) assert.ok( genesisState !== undefined, - `network=${name} chainId=${chainId} genesis should be found` + `network=${name} chainId=${chainId} genesis should be found`, ) const stateRoot = await genGenesisStateRoot(genesisState!) assert.ok( equalsBytes(expectedRoot, stateRoot), `network=${name} chainId=${chainId} stateRoot should match expected=${bytesToHex( - expectedRoot - )} actual=${bytesToHex(stateRoot)}` + expectedRoot, + )} actual=${bytesToHex(stateRoot)}`, ) } }) diff --git a/packages/genesis/tsconfig.lint.json b/packages/genesis/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/genesis/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/rlp/.eslintrc.cjs b/packages/rlp/.eslintrc.cjs index 217a81018c..092291ee94 100644 --- a/packages/rlp/.eslintrc.cjs +++ b/packages/rlp/.eslintrc.cjs @@ -2,7 +2,14 @@ module.exports = { extends: '../../config/eslint.cjs', rules: { '@typescript-eslint/no-use-before-define': 'off', - '@typescript-eslint/no-unused-vars': 'off', - 'no-unused-vars': 'off', }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], } diff --git a/packages/rlp/examples/simple.ts b/packages/rlp/examples/simple.ts index dcd6f7795d..2c7d5f2cb8 100644 --- a/packages/rlp/examples/simple.ts +++ b/packages/rlp/examples/simple.ts @@ -1,5 +1,5 @@ -import assert from 'assert' import { RLP } from '@ethereumjs/rlp' +import assert from 'assert' const nestedList = [[], [[]], [[], [[]]]] const encoded = RLP.encode(nestedList) diff --git a/packages/rlp/src/index.ts b/packages/rlp/src/index.ts index 084ed01399..7d9527bc18 100644 --- a/packages/rlp/src/index.ts +++ b/packages/rlp/src/index.ts @@ -232,7 +232,6 @@ function concatBytes(...arrays: Uint8Array[]): Uint8Array { // Global symbols in both browsers and Node.js since v11 // See https://github.com/microsoft/TypeScript/issues/31535 declare const TextEncoder: any -declare const TextDecoder: any function utf8ToBytes(utf: string): Uint8Array { return new TextEncoder().encode(utf) diff --git a/packages/rlp/test/cli.spec.ts b/packages/rlp/test/cli.spec.ts index dae67540d5..afd4a85d45 100644 --- a/packages/rlp/test/cli.spec.ts +++ b/packages/rlp/test/cli.spec.ts @@ -5,7 +5,7 @@ import type { ChildProcessWithoutNullStreams } from 'child_process' export function cliRunHelper( cliArgs: string[], - onData: (message: string, child: ChildProcessWithoutNullStreams, resolve: Function) => void + onData: (message: string, child: ChildProcessWithoutNullStreams, resolve: Function) => void, ) { const file = require.resolve('../bin/rlp.cjs') const child = spawn(process.execPath, [file, ...cliArgs]) @@ -46,7 +46,7 @@ describe('rlp CLI', async () => { const onData = ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { assert.ok(message.includes('0x05'), 'cli correctly encoded 5') child.kill(9) diff --git a/packages/rlp/test/dataTypes.spec.ts b/packages/rlp/test/dataTypes.spec.ts index 32f8d70da6..ff95ecb621 100644 --- a/packages/rlp/test/dataTypes.spec.ts +++ b/packages/rlp/test/dataTypes.spec.ts @@ -57,7 +57,7 @@ describe('RLP encoding (string)', () => { it('length of string >55 should return 0xb7+len(len(data)) plus len(data) plus data', () => { const encodedLongString = RLP.encode( - 'zoo255zoo255zzzzzzzzzzzzssssssssssssssssssssssssssssssssssssssssssssss' + 'zoo255zoo255zzzzzzzzzzzzssssssssssssssssssssssssssssssssssssssssssssss', ) assert.deepEqual(72, encodedLongString.length) assert.deepEqual(encodedLongString[0], 184) @@ -92,7 +92,7 @@ describe('RLP encoding (list)', () => { } // Verified with Geth's RLPDump const expected = hexToBytes( - 'f85483646f6783676f6483636174b8467a6f6f3235357a6f6f3235357a7a7a7a7a7a7a7a7a7a7a7a73737373737373737373737373737373737373737373737373737373737373737373737373737373737373737373' + 'f85483646f6783676f6483636174b8467a6f6f3235357a6f6f3235357a7a7a7a7a7a7a7a7a7a7a7a73737373737373737373737373737373737373737373737373737373737373737373737373737373737373737373', ) assert.deepEqual(encodedArrayOfStrings, expected) }) @@ -324,7 +324,7 @@ describe('empty values', () => { describe('bad values', () => { it('wrong encoded a zero', () => { const val = hexToBytes( - 'f9005f030182520894b94f5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca098ff921201554726367d2be8c804a7ff89ccf285ebc57dff8ae4c44b9c19ac4aa08887321be575c8095f789dd4c743dfe42c1820f9231f98a962b210e3ac2452a3' + 'f9005f030182520894b94f5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca098ff921201554726367d2be8c804a7ff89ccf285ebc57dff8ae4c44b9c19ac4aa08887321be575c8095f789dd4c743dfe42c1820f9231f98a962b210e3ac2452a3', ) let result try { @@ -337,7 +337,7 @@ describe('bad values', () => { it('invalid length', () => { const a = hexToBytes( - 'f86081000182520894b94f5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca098ff921201554726367d2be8c804a7ff89ccf285ebc57dff8ae4c44b9c19ac4aa08887321be575c8095f789dd4c743dfe42c1820f9231f98a962b210e3ac2452a3' + 'f86081000182520894b94f5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca098ff921201554726367d2be8c804a7ff89ccf285ebc57dff8ae4c44b9c19ac4aa08887321be575c8095f789dd4c743dfe42c1820f9231f98a962b210e3ac2452a3', ) let result @@ -405,11 +405,8 @@ describe('hex prefix', () => { describe('recursive typings', () => { it('should not throw compilation error', () => { - type IsType = Exclude extends never - ? Exclude extends never - ? true - : false - : false + type IsType = + Exclude extends never ? (Exclude extends never ? true : false) : false const assertType = (isTrue: IsType) => { return isTrue } diff --git a/packages/rlp/test/integration.spec.ts b/packages/rlp/test/integration.spec.ts index 107c0dcd47..4cb3a5186d 100644 --- a/packages/rlp/test/integration.spec.ts +++ b/packages/rlp/test/integration.spec.ts @@ -44,7 +44,7 @@ describe.skipIf(isBrowser)('CLI command', () => { assert.deepEqual(encodeResultTrimmed, out.toLowerCase(), `should pass encoding ${testName}`) } }, - { timeout: 10000 } + { timeout: 10000 }, ) }) @@ -54,9 +54,9 @@ describe.skipIf(isBrowser)('Cross-frame', () => { assert.deepEqual( vm.runInNewContext( "Array.from(RLP.encode(['dog', 'god', 'cat'])).map(n => n.toString(16).padStart(2, '0')).join('')", - { RLP } + { RLP }, ), - 'cc83646f6783676f6483636174' + 'cc83646f6783676f6483636174', ) }) }) diff --git a/packages/rlp/test/invalid.spec.ts b/packages/rlp/test/invalid.spec.ts index 06e1a18372..be0280dbf9 100644 --- a/packages/rlp/test/invalid.spec.ts +++ b/packages/rlp/test/invalid.spec.ts @@ -19,7 +19,7 @@ describe('invalid tests', () => { }, undefined, undefined, - `should not decode invalid RLPs, input: ${out}` + `should not decode invalid RLPs, input: ${out}`, ) }) } @@ -27,7 +27,7 @@ describe('invalid tests', () => { it('should pass long string sanity check test', function () { // long string invalid test; string length > 55 const longBufferTest = RLP.encode( - 'zoo255zoo255zzzzzzzzzzzzssssssssssssssssssssssssssssssssssssssssssssss' + 'zoo255zoo255zzzzzzzzzzzzssssssssssssssssssssssssssssssssssssssssssssss', ) // sanity checks assert.ok(longBufferTest[0] > 0xb7) @@ -40,7 +40,7 @@ describe('invalid tests', () => { }, undefined, undefined, - 'string longer than 55 bytes: should throw' + 'string longer than 55 bytes: should throw', ) }) }) @@ -87,7 +87,7 @@ describe('invalid geth tests', () => { }, undefined, undefined, - `should throw: ${gethCase}` + `should throw: ${gethCase}`, ) }) } diff --git a/packages/rlp/test/official.spec.ts b/packages/rlp/test/official.spec.ts index df03b862e6..9f8aef85ef 100644 --- a/packages/rlp/test/official.spec.ts +++ b/packages/rlp/test/official.spec.ts @@ -171,13 +171,13 @@ describe('geth tests', () => { assert.deepEqual( JSON.stringify(arrayOutput), JSON.stringify(gethCase.value!), - `invalid output: ${gethCase.input}` + `invalid output: ${gethCase.input}`, ) } else { assert.deepEqual( bytesToHex(Uint8Array.from(output as any)), gethCase.value, - `invalid output: ${gethCase.input}` + `invalid output: ${gethCase.input}`, ) } }, `should not throw: ${gethCase.input}`) diff --git a/packages/rlp/test/utils.ts b/packages/rlp/test/utils.ts index 4c96e4f216..161f0bb006 100644 --- a/packages/rlp/test/utils.ts +++ b/packages/rlp/test/utils.ts @@ -4,7 +4,6 @@ const { hexToBytes } = utils // Global symbols in both browsers and Node.js since v11 // See https://github.com/microsoft/TypeScript/issues/31535 -declare const TextEncoder: any declare const TextDecoder: any export function bytesToUtf8(bytes: Uint8Array): string { diff --git a/packages/rlp/tsconfig.lint.json b/packages/rlp/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/rlp/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/statemanager/.eslintrc.cjs b/packages/statemanager/.eslintrc.cjs index 91c78776e6..9c5b0dcd15 100644 --- a/packages/statemanager/.eslintrc.cjs +++ b/packages/statemanager/.eslintrc.cjs @@ -5,4 +5,13 @@ module.exports = { 'no-invalid-this': 'off', 'no-restricted-syntax': 'off', }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], } diff --git a/packages/statemanager/examples/basicUsage.ts b/packages/statemanager/examples/basicUsage.ts index d95f08bd73..462e28b8cd 100644 --- a/packages/statemanager/examples/basicUsage.ts +++ b/packages/statemanager/examples/basicUsage.ts @@ -1,6 +1,5 @@ -import { Account, Address } from '@ethereumjs/util' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { hexToBytes } from '@ethereumjs/util' +import { Account, Address, hexToBytes } from '@ethereumjs/util' const main = async () => { const stateManager = new DefaultStateManager() @@ -15,7 +14,7 @@ const main = async () => { console.log( `Account at address ${address.toString()} has balance ${ (await stateManager.getAccount(address))?.balance - }` + }`, ) } -main() +void main() diff --git a/packages/statemanager/examples/evm.ts b/packages/statemanager/examples/evm.ts index bc804f6525..5e767aa137 100644 --- a/packages/statemanager/examples/evm.ts +++ b/packages/statemanager/examples/evm.ts @@ -1,5 +1,5 @@ -import { RPCStateManager, RPCBlockChain } from '@ethereumjs/statemanager' -import { createEVM, EVM } from '@ethereumjs/evm' +import { createEVM } from '@ethereumjs/evm' +import { RPCBlockChain, RPCStateManager } from '@ethereumjs/statemanager' const main = async () => { try { @@ -12,4 +12,4 @@ const main = async () => { console.log(e.message) // fetch would fail because provider url is not real. please replace provider with a valid rpc url string. } } -main() +void main() diff --git a/packages/statemanager/examples/fromProofInstantiation.ts b/packages/statemanager/examples/fromProofInstantiation.ts index df607aef58..97df90f29c 100644 --- a/packages/statemanager/examples/fromProofInstantiation.ts +++ b/packages/statemanager/examples/fromProofInstantiation.ts @@ -1,6 +1,5 @@ -import { Address } from '@ethereumjs/util' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { hexToBytes } from '@ethereumjs/util' +import { Address, hexToBytes } from '@ethereumjs/util' const main = async () => { // setup `stateManager` with some existing address @@ -8,10 +7,10 @@ const main = async () => { const contractAddress = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) const byteCode = hexToBytes('0x67ffffffffffffffff600160006000fb') const storageKey1 = hexToBytes( - '0x0000000000000000000000000000000000000000000000000000000000000001' + '0x0000000000000000000000000000000000000000000000000000000000000001', ) const storageKey2 = hexToBytes( - '0x0000000000000000000000000000000000000000000000000000000000000002' + '0x0000000000000000000000000000000000000000000000000000000000000002', ) const storageValue1 = hexToBytes('0x01') const storageValue2 = hexToBytes('0x02') @@ -29,11 +28,11 @@ const main = async () => { console.log(await partialStateManager.getContractCode(contractAddress)) // contract bytecode is not included in proof console.log( await partialStateManager.getContractStorage(contractAddress, storageKey1), - storageValue1 + storageValue1, ) // should match console.log( await partialStateManager.getContractStorage(contractAddress, storageKey2), - storageValue2 + storageValue2, ) // should match const accountFromNewSM = await partialStateManager.getAccount(contractAddress) @@ -45,4 +44,4 @@ const main = async () => { console.log(slot1FromNewSM, storageValue1) // should match console.log(slot2FromNewSM, storageValue2) // should match } -main() +void main() diff --git a/packages/statemanager/examples/rpcStateManager.ts b/packages/statemanager/examples/rpcStateManager.ts index 9124f25843..f711f2804b 100644 --- a/packages/statemanager/examples/rpcStateManager.ts +++ b/packages/statemanager/examples/rpcStateManager.ts @@ -1,5 +1,5 @@ -import { Address } from '@ethereumjs/util' import { RPCStateManager } from '@ethereumjs/statemanager' +import { Address } from '@ethereumjs/util' const main = async () => { try { @@ -12,4 +12,4 @@ const main = async () => { console.log(e.message) // fetch fails because provider url is not real. please replace provider with a valid rpc url string. } } -main() +void main() diff --git a/packages/statemanager/examples/simple.ts b/packages/statemanager/examples/simple.ts index b7618b4127..308897ce95 100644 --- a/packages/statemanager/examples/simple.ts +++ b/packages/statemanager/examples/simple.ts @@ -1,6 +1,7 @@ -import { SimpleStateManager } from '../src/index.js' import { Account, Address, randomBytes } from '@ethereumjs/util' +import { SimpleStateManager } from '../src/index.js' + const main = async () => { const sm = new SimpleStateManager() const address = Address.fromPrivateKey(randomBytes(32)) @@ -9,4 +10,4 @@ const main = async () => { console.log(await sm.getAccount(address)) } -main() +void main() diff --git a/packages/statemanager/src/accessWitness.ts b/packages/statemanager/src/accessWitness.ts index 2563c91537..0bbedc7a07 100644 --- a/packages/statemanager/src/accessWitness.ts +++ b/packages/statemanager/src/accessWitness.ts @@ -76,7 +76,7 @@ export class AccessWitness implements AccessWitnessInterface { verkleCrypto?: VerkleCrypto stems?: Map chunks?: Map - } = {} + } = {}, ) { if (opts.verkleCrypto === undefined) { throw new Error('verkle crypto required') @@ -179,7 +179,7 @@ export class AccessWitness implements AccessWitnessInterface { touchCodeChunksRangeOnWriteAndChargeGas( contact: Address, startPc: number, - endPc: number + endPc: number, ): bigint { let gas = BIGINT_0 for (let chunkNum = Math.floor(startPc / 31); chunkNum <= Math.floor(endPc / 31); chunkNum++) { @@ -192,7 +192,7 @@ export class AccessWitness implements AccessWitnessInterface { touchAddressOnWriteAndComputeGas( address: Address, treeIndex: number | bigint, - subIndex: number | Uint8Array + subIndex: number | Uint8Array, ): bigint { return this.touchAddressAndChargeGas(address, treeIndex, subIndex, { isWrite: true }) } @@ -200,7 +200,7 @@ export class AccessWitness implements AccessWitnessInterface { touchAddressOnReadAndComputeGas( address: Address, treeIndex: number | bigint, - subIndex: number | Uint8Array + subIndex: number | Uint8Array, ): bigint { return this.touchAddressAndChargeGas(address, treeIndex, subIndex, { isWrite: false }) } @@ -209,7 +209,7 @@ export class AccessWitness implements AccessWitnessInterface { address: Address, treeIndex: number | bigint, subIndex: number | Uint8Array, - { isWrite }: { isWrite?: boolean } + { isWrite }: { isWrite?: boolean }, ): bigint { let gas = BIGINT_0 @@ -217,7 +217,7 @@ export class AccessWitness implements AccessWitnessInterface { address, treeIndex, subIndex, - { isWrite } + { isWrite }, ) if (stemRead === true) { @@ -238,7 +238,7 @@ export class AccessWitness implements AccessWitnessInterface { } debug( - `touchAddressAndChargeGas=${gas} address=${address} treeIndex=${treeIndex} subIndex=${subIndex}` + `touchAddressAndChargeGas=${gas} address=${address} treeIndex=${treeIndex} subIndex=${subIndex}`, ) return gas @@ -248,7 +248,7 @@ export class AccessWitness implements AccessWitnessInterface { address: Address, treeIndex: number | bigint, subIndex: number | Uint8Array, - { isWrite }: { isWrite?: boolean } = {} + { isWrite }: { isWrite?: boolean } = {}, ): AccessEventFlags { let stemRead = false, stemWrite = false, @@ -269,7 +269,7 @@ export class AccessWitness implements AccessWitnessInterface { const accessedChunkKey = getVerkleKey( accessedStemKey, - typeof subIndex === 'number' ? intToBytes(subIndex) : subIndex + typeof subIndex === 'number' ? intToBytes(subIndex) : subIndex, ) const accessedChunkKeyHex = bytesToHex(accessedChunkKey) let accessedChunk = this.chunks.get(accessedChunkKeyHex) @@ -294,7 +294,7 @@ export class AccessWitness implements AccessWitnessInterface { } debug( - `${accessedChunkKeyHex}: isWrite=${isWrite} for steamRead=${stemRead} stemWrite=${stemWrite} chunkRead=${chunkRead} chunkWrite=${chunkWrite} chunkFill=${chunkFill}` + `${accessedChunkKeyHex}: isWrite=${isWrite} for steamRead=${stemRead} stemWrite=${stemWrite} chunkRead=${chunkRead} chunkWrite=${chunkWrite} chunkFill=${chunkFill}`, ) return { stemRead, stemWrite, chunkRead, chunkWrite, chunkFill } } @@ -382,7 +382,7 @@ export function decodeAccessedState(treeIndex: number | bigint, chunkIndex: numb return { type: AccessedStateType.Storage, slot } } else { throw Error( - `Invalid treeIndex=${treeIndex} chunkIndex=${chunkIndex} for verkle tree access` + `Invalid treeIndex=${treeIndex} chunkIndex=${chunkIndex} for verkle tree access`, ) } } diff --git a/packages/statemanager/src/cache/account.ts b/packages/statemanager/src/cache/account.ts index 9100b513ec..216abeb54d 100644 --- a/packages/statemanager/src/cache/account.ts +++ b/packages/statemanager/src/cache/account.ts @@ -68,7 +68,7 @@ export class AccountCache extends Cache { put( address: Address, account: Account | undefined, - couldBeParitalAccount: boolean = false + couldBeParitalAccount: boolean = false, ): void { const addressHex = bytesToUnprefixedHex(address.bytes) this._saveCachePreState(addressHex) diff --git a/packages/statemanager/src/cache/cache.ts b/packages/statemanager/src/cache/cache.ts index 3be3a71215..d80c338a2a 100644 --- a/packages/statemanager/src/cache/cache.ts +++ b/packages/statemanager/src/cache/cache.ts @@ -29,7 +29,7 @@ export class Cache { // Skip DEBUG calls unless 'ethjs' included in environmental DEBUG variables // Additional window check is to prevent vite browser bundling (and potentially other) to break this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false this._debug = debugDefault('statemanager:cache') } diff --git a/packages/statemanager/src/cache/storage.ts b/packages/statemanager/src/cache/storage.ts index a9625a1c3b..cf24607219 100644 --- a/packages/statemanager/src/cache/storage.ts +++ b/packages/statemanager/src/cache/storage.ts @@ -88,7 +88,7 @@ export class StorageCache extends Cache { this._debug( `Put storage for ${addressHex}: ${keyHex} -> ${ value !== undefined ? bytesToUnprefixedHex(value) : '' - }` + }`, ) } if (this._lruCache) { diff --git a/packages/statemanager/src/rpcStateManager.ts b/packages/statemanager/src/rpcStateManager.ts index 8b6862891a..3c4f9570d5 100644 --- a/packages/statemanager/src/rpcStateManager.ts +++ b/packages/statemanager/src/rpcStateManager.ts @@ -56,7 +56,7 @@ export class RPCStateManager implements EVMStateManagerInterface { // Skip DEBUG calls unless 'ethjs' included in environmental DEBUG variables // Additional window check is to prevent vite browser bundling (and potentially other) to break this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false this._debug = debugDefault('statemanager:rpcStateManager') if (typeof opts.provider === 'string' && opts.provider.startsWith('http')) { @@ -306,7 +306,7 @@ export class RPCStateManager implements EVMStateManagerInterface { account?.balance } contract=${account && account.isContract() ? 'yes' : 'no'} empty=${ account && account.isEmpty() ? 'yes' : 'no' - }` + }`, ) } if (account !== undefined) { @@ -333,8 +333,8 @@ export class RPCStateManager implements EVMStateManagerInterface { if (k === 'nonce') return v.toString() return v }, - 2 - ) + 2, + ), ) } let account = await this.getAccount(address) diff --git a/packages/statemanager/src/stateManager.ts b/packages/statemanager/src/stateManager.ts index e4261d9163..25d7c93068 100644 --- a/packages/statemanager/src/stateManager.ts +++ b/packages/statemanager/src/stateManager.ts @@ -207,7 +207,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { // Skip DEBUG calls unless 'ethjs' included in environmental DEBUG variables // Additional window check is to prevent vite browser bundling (and potentially other) to break this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false this._debug = debugDefault('statemanager:statemanager') @@ -297,7 +297,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { account?.balance } contract=${account && account.isContract() ? 'yes' : 'no'} empty=${ account && account.isEmpty() ? 'yes' : 'no' - }` + }`, ) } if (this._accountCacheSettings.deactivate) { @@ -431,7 +431,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { // TODO PR: have a better interface for hashed address pull? protected _getStorageTrie( addressOrHash: Address | { bytes: Uint8Array } | Uint8Array, - rootAccount?: Account + rootAccount?: Account, ): Trie { // use hashed key for lookup from storage cache const addressBytes: Uint8Array = @@ -513,7 +513,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { protected async _modifyContractStorage( address: Address, account: Account, - modifyTrie: (storageTrie: Trie, done: Function) => void + modifyTrie: (storageTrie: Trie, done: Function) => void, ): Promise { // eslint-disable-next-line no-async-promise-executor return new Promise(async (resolve) => { @@ -536,7 +536,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { address: Address, account: Account, key: Uint8Array, - value: Uint8Array + value: Uint8Array, ) { await this._modifyContractStorage(address, account, async (storageTrie, done) => { if (value instanceof Uint8Array && value.length) { @@ -741,7 +741,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { return returnValue } const accountProof: PrefixedHexString[] = (await this._trie.createProof(address.bytes)).map( - (p) => bytesToHex(p) + (p) => bytesToHex(p), ) const storageProof: StorageProof[] = [] const storageTrie = this._getStorageTrie(address, account) @@ -780,7 +780,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { static async fromProof( proof: Proof | Proof[], safe: boolean = false, - opts: DefaultStateManagerOpts = {} + opts: DefaultStateManagerOpts = {}, ): Promise { if (Array.isArray(proof)) { if (proof.length === 0) { @@ -790,7 +790,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { opts.trie ?? (await createTrieFromProof( proof[0].accountProof.map((e) => hexToBytes(e)), - { useKeyHashing: true } + { useKeyHashing: true }, )) const sm = new DefaultStateManager({ ...opts, trie }) const address = Address.fromString(proof[0].address) @@ -818,14 +818,14 @@ export class DefaultStateManager implements EVMStateManagerInterface { storageProof: StorageProof[], storageHash: PrefixedHexString, address: Address, - safe: boolean = false + safe: boolean = false, ) { const trie = this._getStorageTrie(address) trie.root(hexToBytes(storageHash)) for (let i = 0; i < storageProof.length; i++) { await trie.updateFromProof( storageProof[i].proof.map((e) => hexToBytes(e)), - safe + safe, ) } } @@ -841,13 +841,13 @@ export class DefaultStateManager implements EVMStateManagerInterface { for (let i = 0; i < proof.length; i++) { await this._trie.updateFromProof( proof[i].accountProof.map((e) => hexToBytes(e)), - safe + safe, ) await this.addStorageProof( proof[i].storageProof, proof[i].storageHash, Address.fromString(proof[i].address), - safe + safe, ) } } else { @@ -862,7 +862,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { async verifyProof(proof: Proof): Promise { const key = hexToBytes(proof.address) const accountProof = proof.accountProof.map((rlpString: PrefixedHexString) => - hexToBytes(rlpString) + hexToBytes(rlpString), ) // This returns the account if the proof is valid. @@ -918,13 +918,13 @@ export class DefaultStateManager implements EVMStateManagerInterface { }) const reportedValue = setLengthLeft( RLP.decode(proofValue ?? new Uint8Array(0)) as Uint8Array, - 32 + 32, ) if (!equalsBytes(reportedValue, storageValue)) { throw new Error( `Reported trie value does not match storage, key: ${stProof.key}, reported: ${bytesToHex( - reportedValue - )}, actual: ${bytesToHex(storageValue)}` + reportedValue, + )}, actual: ${bytesToHex(storageValue)}`, ) } } diff --git a/packages/statemanager/src/statelessVerkleStateManager.ts b/packages/statemanager/src/statelessVerkleStateManager.ts index b3b604d19c..498842f074 100644 --- a/packages/statemanager/src/statelessVerkleStateManager.ts +++ b/packages/statemanager/src/statelessVerkleStateManager.ts @@ -239,7 +239,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { // Skip DEBUG calls unless 'ethjs' included in environmental DEBUG variables // Additional window check is to prevent vite browser bundling (and potentially other) to break this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } async getTransitionStateRoot(_: DefaultStateManager, __: Uint8Array): Promise { @@ -249,7 +249,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { public initVerkleExecutionWitness( blockNum: bigint, executionWitness?: VerkleExecutionWitness | null, - accessWitness?: AccessWitness + accessWitness?: AccessWitness, ) { this._blockNum = blockNum if (executionWitness === null || executionWitness === undefined) { @@ -316,7 +316,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { async checkChunkWitnessPresent(address: Address, codeOffset: number) { const chunkId = codeOffset / 31 const chunkKey = bytesToHex( - await getVerkleTreeKeyForCodeChunk(address, chunkId, this.verkleCrypto) + await getVerkleTreeKeyForCodeChunk(address, chunkId, this.verkleCrypto), ) return this._state[chunkKey] !== undefined } @@ -389,7 +389,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { const chunks = Math.floor(codeSize / 31) + 1 for (let chunkId = 0; chunkId < chunks; chunkId++) { const chunkKey = bytesToHex( - await getVerkleTreeKeyForCodeChunk(address, chunkId, this.verkleCrypto) + await getVerkleTreeKeyForCodeChunk(address, chunkId, this.verkleCrypto), ) const codeChunk = this._state[chunkKey] if (codeChunk === null) { @@ -463,7 +463,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { const storageKey = await getVerkleTreeKeyForStorageSlot( address, BigInt(bytesToHex(key)), - this.verkleCrypto + this.verkleCrypto, ) const storageValue = toBytes(this._state[bytesToHex(storageKey)]) @@ -489,7 +489,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { const storageKey = await getVerkleTreeKeyForStorageSlot( address, BigInt(bytesToHex(key)), - this.verkleCrypto + this.verkleCrypto, ) this._state[bytesToHex(storageKey)] = bytesToHex(setLengthRight(value, 32)) } @@ -542,7 +542,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { typeof codeHashRaw === 'string' ) { const errorMsg = `Invalid witness for a non existing address=${address} stem=${bytesToHex( - stem + stem, )}` debug(errorMsg) throw Error(errorMsg) @@ -554,7 +554,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { // check if codehash is correct 32 bytes prefixed hex string if (codeHashRaw !== undefined && codeHashRaw !== null && codeHashRaw.length !== 66) { const errorMsg = `Invalid codeHashRaw=${codeHashRaw} for address=${address} chunkKey=${bytesToHex( - codeHashKey + codeHashKey, )}` debug(errorMsg) throw Error(errorMsg) @@ -584,8 +584,8 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { typeof codeSizeRaw === 'string' ? bytesToInt32(hexToBytes(codeSizeRaw), true) : codeSizeRaw === null - ? 0 - : null, + ? 0 + : null, storageRoot: null, }) @@ -695,7 +695,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { const computedValue = this.getComputedValue(accessedState) ?? this._preState[chunkKey] if (computedValue === undefined) { debug( - `Block accesses missing in canonical address=${address} type=${type} ${extraMeta} chunkKey=${chunkKey}` + `Block accesses missing in canonical address=${address} type=${type} ${extraMeta} chunkKey=${chunkKey}`, ) postFailures++ continue @@ -705,7 +705,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { if (canonicalValue === undefined) { debug( - `Block accesses missing in canonical address=${address} type=${type} ${extraMeta} chunkKey=${chunkKey}` + `Block accesses missing in canonical address=${address} type=${type} ${extraMeta} chunkKey=${chunkKey}`, ) postFailures++ continue @@ -738,7 +738,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { : `${canonicalValue} (${decodedCanonicalValue})` debug( - `Block accesses mismatch address=${address} type=${type} ${extraMeta} chunkKey=${chunkKey}` + `Block accesses mismatch address=${address} type=${type} ${extraMeta} chunkKey=${chunkKey}`, ) debug(`expected=${displayCanonicalValue}`) debug(`computed=${displayComputedValue}`) diff --git a/packages/statemanager/test/cache/account.spec.ts b/packages/statemanager/test/cache/account.spec.ts index 6570b3f62f..533f62d095 100644 --- a/packages/statemanager/test/cache/account.spec.ts +++ b/packages/statemanager/test/cache/account.spec.ts @@ -65,7 +65,7 @@ describe('Account Cache: checkpointing', () => { let elem = cache.get(addr) assert.ok( - elem !== undefined && elem.accountRLP && equalsBytes(elem.accountRLP, updatedAccRLP) + elem !== undefined && elem.accountRLP && equalsBytes(elem.accountRLP, updatedAccRLP), ) cache.revert() diff --git a/packages/statemanager/test/checkpointing.account.spec.ts b/packages/statemanager/test/checkpointing.account.spec.ts index 5222174b2c..edc19c25f1 100644 --- a/packages/statemanager/test/checkpointing.account.spec.ts +++ b/packages/statemanager/test/checkpointing.account.spec.ts @@ -12,7 +12,7 @@ import type { Account } from '@ethereumjs/util' const accountEval = async ( sm: StateManagerInterface, address: Address, - compare: bigint | undefined + compare: bigint | undefined, ) => { const account = await sm.getAccount(address) if (compare === undefined) { diff --git a/packages/statemanager/test/checkpointing.code.spec.ts b/packages/statemanager/test/checkpointing.code.spec.ts index 673451c1c3..ddda37eb50 100644 --- a/packages/statemanager/test/checkpointing.code.spec.ts +++ b/packages/statemanager/test/checkpointing.code.spec.ts @@ -8,7 +8,7 @@ const codeEval = async ( sm: StateManagerInterface, address: Address, value: Uint8Array, - root: Uint8Array + root: Uint8Array, ) => { assert.deepEqual(await sm.getContractCode(address), value, 'contract code value should be equal') const accountCMP = await sm.getAccount(address) diff --git a/packages/statemanager/test/checkpointing.storage.spec.ts b/packages/statemanager/test/checkpointing.storage.spec.ts index 0e6d5a2a1e..21a8613746 100644 --- a/packages/statemanager/test/checkpointing.storage.spec.ts +++ b/packages/statemanager/test/checkpointing.storage.spec.ts @@ -11,12 +11,12 @@ const storageEval = async ( key: Uint8Array, value: Uint8Array, root: Uint8Array, - rootCheck = true + rootCheck = true, ) => { assert.deepEqual( await sm.getContractStorage(address, key), value, - 'storage value should be equal' + 'storage value should be equal', ) if (rootCheck) { const accountCMP = await sm.getAccount(address) diff --git a/packages/statemanager/test/proofStateManager.spec.ts b/packages/statemanager/test/proofStateManager.spec.ts index 1be393a655..9bc74fdd86 100644 --- a/packages/statemanager/test/proofStateManager.spec.ts +++ b/packages/statemanager/test/proofStateManager.spec.ts @@ -101,7 +101,7 @@ describe('ProofStateManager', () => { assert.equal( await stateManager.verifyProof(nonExistenceProof), true, - 'verified proof of non-existence of account' + 'verified proof of non-existence of account', ) }) diff --git a/packages/statemanager/test/rpcStateManager.spec.ts b/packages/statemanager/test/rpcStateManager.spec.ts index e563feacc6..1e425abbe8 100644 --- a/packages/statemanager/test/rpcStateManager.spec.ts +++ b/packages/statemanager/test/rpcStateManager.spec.ts @@ -49,20 +49,20 @@ describe('RPC State Manager initialization tests', async () => { assert.equal( (state as any)._blockTag, '0x1', - 'State manager starts with default block tag of 1' + 'State manager starts with default block tag of 1', ) state = new RPCStateManager({ provider, blockTag: 1n }) assert.equal( (state as any)._blockTag, '0x1', - 'State Manager instantiated with predefined blocktag' + 'State Manager instantiated with predefined blocktag', ) state = new RPCStateManager({ provider: 'https://google.com', blockTag: 1n }) assert.ok( state instanceof RPCStateManager, - 'was able to instantiate state manager with valid url' + 'was able to instantiate state manager with valid url', ) const invalidProvider = 'google.com' @@ -70,7 +70,7 @@ describe('RPC State Manager initialization tests', async () => { () => new RPCStateManager({ provider: invalidProvider as any, blockTag: 1n }), undefined, undefined, - 'cannot instantiate state manager with invalid provider' + 'cannot instantiate state manager with invalid provider', ) }) }) @@ -86,12 +86,12 @@ describe('RPC State Manager API tests', () => { await state.putAccount(vitalikDotEth, account!) const retrievedVitalikAccount = createAccountFromRLP( - (state as any)._accountCache.get(vitalikDotEth)!.accountRLP + (state as any)._accountCache.get(vitalikDotEth)!.accountRLP, ) assert.ok(retrievedVitalikAccount.nonce > 0n, 'Vitalik.eth is stored in cache') const doesThisAccountExist = await state.accountExists( - Address.fromString('0xccAfdD642118E5536024675e776d32413728DD07') + Address.fromString('0xccAfdD642118E5536024675e776d32413728DD07'), ) assert.ok(!doesThisAccountExist, 'getAccount returns undefined for non-existent account') @@ -104,12 +104,12 @@ describe('RPC State Manager API tests', () => { await state.putContractCode(UNIerc20ContractAddress, UNIContractCode) assert.ok( typeof (state as any)._contractCache.get(UNIerc20ContractAddress.toString()) !== 'undefined', - 'UNI ERC20 contract code was found in cache' + 'UNI ERC20 contract code was found in cache', ) const storageSlot = await state.getContractStorage( UNIerc20ContractAddress, - setLengthLeft(bigIntToBytes(1n), 32) + setLengthLeft(bigIntToBytes(1n), 32), ) assert.ok(storageSlot.length > 0, 'was able to retrieve storage slot 1 for the UNI contract') @@ -120,11 +120,11 @@ describe('RPC State Manager API tests', () => { await state.putContractStorage( UNIerc20ContractAddress, setLengthLeft(bigIntToBytes(2n), 32), - utf8ToBytes('abcd') + utf8ToBytes('abcd'), ) const slotValue = await state.getContractStorage( UNIerc20ContractAddress, - setLengthLeft(bigIntToBytes(2n), 32) + setLengthLeft(bigIntToBytes(2n), 32), ) assert.ok(equalsBytes(slotValue, utf8ToBytes('abcd')), 'should retrieve slot 2 value') @@ -144,19 +144,19 @@ describe('RPC State Manager API tests', () => { await state.putContractStorage( UNIerc20ContractAddress, setLengthLeft(bigIntToBytes(2n), 32), - new Uint8Array(0) + new Uint8Array(0), ) await state.modifyAccountFields(vitalikDotEth, { nonce: 39n }) assert.equal( (await state.getAccount(vitalikDotEth))?.nonce, 39n, - 'modified account fields successfully' + 'modified account fields successfully', ) assert.doesNotThrow( async () => state.getAccount(vitalikDotEth), - 'does not call getAccountFromProvider' + 'does not call getAccountFromProvider', ) try { @@ -167,7 +167,7 @@ describe('RPC State Manager API tests', () => { const deletedSlot = await state.getContractStorage( UNIerc20ContractAddress, - setLengthLeft(bigIntToBytes(2n), 32) + setLengthLeft(bigIntToBytes(2n), 32), ) assert.equal(deletedSlot.length, 0, 'deleted slot from storage cache') @@ -175,31 +175,31 @@ describe('RPC State Manager API tests', () => { await state.deleteAccount(vitalikDotEth) assert.ok( (await state.getAccount(vitalikDotEth)) === undefined, - 'account should not exist after being deleted' + 'account should not exist after being deleted', ) await state.revert() assert.ok( (await state.getAccount(vitalikDotEth)) !== undefined, - 'account deleted since last checkpoint should exist after revert called' + 'account deleted since last checkpoint should exist after revert called', ) const deletedSlotAfterRevert = await state.getContractStorage( UNIerc20ContractAddress, - setLengthLeft(bigIntToBytes(2n), 32) + setLengthLeft(bigIntToBytes(2n), 32), ) assert.equal( deletedSlotAfterRevert.length, 4, - 'slot deleted since last checkpoint should exist in storage cache after revert' + 'slot deleted since last checkpoint should exist in storage cache after revert', ) const cacheStorage = await state.dumpStorage(UNIerc20ContractAddress) assert.equal( 2, Object.keys(cacheStorage).length, - 'should have 2 storage slots in cache before clear' + 'should have 2 storage slots in cache before clear', ) await state.clearContractStorage(UNIerc20ContractAddress) const clearedStorage = await state.dumpStorage(UNIerc20ContractAddress) @@ -211,14 +211,14 @@ describe('RPC State Manager API tests', () => { } catch (err: any) { assert.ok( err.message.includes('expected blockTag to be block hash, bigint, hex prefixed string'), - 'threw with correct error when invalid blockTag provided' + 'threw with correct error when invalid blockTag provided', ) } assert.equal( (state as any)._contractCache.get(UNIerc20ContractAddress), undefined, - 'should not have any code for contract after cache is reverted' + 'should not have any code for contract after cache is reverted', ) assert.equal((state as any)._blockTag, '0x1', 'blockTag defaults to 1') @@ -240,11 +240,11 @@ describe('runTx custom transaction test', () => { const vitalikDotEth = Address.fromString('0xd8da6bf26964af9d7eed9e03e53415d37aa96045') const privateKey = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) const tx = create1559FeeMarketTx( { to: vitalikDotEth, value: '0x100', gasLimit: 500000n, maxFeePerGas: 7 }, - { common } + { common }, ).sign(privateKey) const result = await runTx(vm, { @@ -274,7 +274,7 @@ describe('runTx test: replay mainnet transactions', () => { assert.equal( res.totalGasSpent, 21000n, - 'calculated correct total gas spent for simple transfer' + 'calculated correct total gas spent for simple transfer', ) }) }) @@ -305,7 +305,7 @@ describe('runBlock test', () => { assert.equal( res.gasUsed, block.header.gasUsed, - 'should compute correct cumulative gas for block' + 'should compute correct cumulative gas for block', ) } catch (err: any) { assert.fail(`should have successfully ran block; got error ${err.message}`) @@ -325,7 +325,7 @@ describe('blockchain', () => const caller = Address.fromString('0xd8da6bf26964af9d7eed9e03e53415d37aa96045') await evm.stateManager.setStateRoot( - hexToBytes('0xf8506f559699a58a4724df4fcf2ad4fd242d20324db541823f128f5974feb6c7') + hexToBytes('0xf8506f559699a58a4724df4fcf2ad4fd242d20324db541823f128f5974feb6c7'), ) const block = await createBlockFromJsonRpcProvider(provider, 500000n, { setHardfork: true }) await evm.stateManager.putContractCode(contractAddress, hexToBytes(code)) @@ -338,7 +338,7 @@ describe('blockchain', () => const res = await evm.runCall(runCallArgs) assert.ok( bytesToHex(res.execResult.returnValue), - '0xd5ba853bc7151fc044b9d273a57e3f9ed35e66e0248ab4a571445650cc4fcaa6' + '0xd5ba853bc7151fc044b9d273a57e3f9ed35e66e0248ab4a571445650cc4fcaa6', ) })) @@ -352,7 +352,7 @@ describe('Should return same value as DefaultStateManager when account does not assert.equal( account0, account1, - 'Should return same value as DefaultStateManager when account does not exist' + 'Should return same value as DefaultStateManager when account does not exist', ) }) }) diff --git a/packages/statemanager/test/stateManager.account.spec.ts b/packages/statemanager/test/stateManager.account.spec.ts index 5da058107b..9b4b01989a 100644 --- a/packages/statemanager/test/stateManager.account.spec.ts +++ b/packages/statemanager/test/stateManager.account.spec.ts @@ -62,7 +62,7 @@ describe('StateManager -> General/Account', () => { const contract0 = await stateManager.getContractStorage(address, key) assert.ok( equalsBytes(contract0, value), - "contract key's value is set in the _storageTries cache" + "contract key's value is set in the _storageTries cache", ) await stateManager.commit() @@ -135,7 +135,7 @@ describe('StateManager -> General/Account', () => { await stateManager.modifyAccountFields(address, { codeHash: hexToBytes('0xd748bf26ab37599c944babfdbeecf6690801bd61bf2670efb0a34adfc6dca10b'), storageRoot: hexToBytes( - '0xcafd881ab193703b83816c49ff6c2bf6ba6f464a1be560c42106128c8dbc35e7' + '0xcafd881ab193703b83816c49ff6c2bf6ba6f464a1be560c42106128c8dbc35e7', ), }) @@ -143,11 +143,11 @@ describe('StateManager -> General/Account', () => { assert.equal( bytesToHex(res3!.codeHash), - '0xd748bf26ab37599c944babfdbeecf6690801bd61bf2670efb0a34adfc6dca10b' + '0xd748bf26ab37599c944babfdbeecf6690801bd61bf2670efb0a34adfc6dca10b', ) assert.equal( bytesToHex(res3!.storageRoot), - '0xcafd881ab193703b83816c49ff6c2bf6ba6f464a1be560c42106128c8dbc35e7' + '0xcafd881ab193703b83816c49ff6c2bf6ba6f464a1be560c42106128c8dbc35e7', ) }) } diff --git a/packages/statemanager/test/stateManager.code.spec.ts b/packages/statemanager/test/stateManager.code.spec.ts index 3ceea7791c..98e713b23c 100644 --- a/packages/statemanager/test/stateManager.code.spec.ts +++ b/packages/statemanager/test/stateManager.code.spec.ts @@ -84,7 +84,7 @@ describe('StateManager -> Code', () => { const stateManager = new DefaultStateManager({ accountCacheOpts }) const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) const code = hexToBytes( - '0x73095e7baea6a6c7c4c2dfeb977efac326af552d873173095e7baea6a6c7c4c2dfeb977efac326af552d873157' + '0x73095e7baea6a6c7c4c2dfeb977efac326af552d873173095e7baea6a6c7c4c2dfeb977efac326af552d873157', ) const raw: AccountData = { nonce: '0x0', diff --git a/packages/statemanager/test/stateManager.spec.ts b/packages/statemanager/test/stateManager.spec.ts index bd8cf6dc4d..9c1b019f3d 100644 --- a/packages/statemanager/test/stateManager.spec.ts +++ b/packages/statemanager/test/stateManager.spec.ts @@ -24,7 +24,7 @@ function verifyAccount( codeHash: Uint8Array nonce: BigInt storageRoot: Uint8Array - } + }, ) { assert.equal(account.balance, state.balance) assert.equal(account.nonce, state.nonce) @@ -60,7 +60,7 @@ describe('StateManager -> General', () => { assert.equal( JSON.stringify(storage), JSON.stringify(new Uint8Array()), - 'clears contract storage' + 'clears contract storage', ) }) @@ -75,7 +75,7 @@ describe('StateManager -> General', () => { assert.equal( smCopy['_prefixCodeHashes'], sm['_prefixCodeHashes'], - 'should retain non-default values' + 'should retain non-default values', ) sm = new DefaultStateManager({ @@ -92,12 +92,12 @@ describe('StateManager -> General', () => { assert.equal( smCopy['_accountCacheSettings'].type, CacheType.ORDERED_MAP, - 'should switch to ORDERED_MAP account cache on copy()' + 'should switch to ORDERED_MAP account cache on copy()', ) assert.equal( smCopy['_storageCacheSettings'].type, CacheType.ORDERED_MAP, - 'should switch to ORDERED_MAP storage cache on copy()' + 'should switch to ORDERED_MAP storage cache on copy()', ) assert.equal(smCopy['_trie']['_opts'].cacheSize, 0, 'should set trie cache size to 0') @@ -105,17 +105,17 @@ describe('StateManager -> General', () => { assert.equal( smCopy['_accountCacheSettings'].type, CacheType.LRU, - 'should retain account cache type when deactivate cache downleveling' + 'should retain account cache type when deactivate cache downleveling', ) assert.equal( smCopy['_storageCacheSettings'].type, CacheType.LRU, - 'should retain storage cache type when deactivate cache downleveling' + 'should retain storage cache type when deactivate cache downleveling', ) assert.equal( smCopy['_trie']['_opts'].cacheSize, 1000, - 'should retain trie cache size when deactivate cache downleveling' + 'should retain trie cache size when deactivate cache downleveling', ) }) @@ -304,12 +304,12 @@ describe('StateManager -> General', () => { const keys = Object.keys(storage) as PrefixedHexString[] const proof = await sm.getProof( address, - keys.map((key) => hexToBytes(key)) + keys.map((key) => hexToBytes(key)), ) const proof2 = await sm.getProof(address2) const newTrie = await createTrieFromProof( proof.accountProof.map((e) => hexToBytes(e)), - { useKeyHashing: false } + { useKeyHashing: false }, ) const partialSM = await DefaultStateManager.fromProof([proof, proof2], true, { trie: newTrie, @@ -317,11 +317,11 @@ describe('StateManager -> General', () => { assert.equal( partialSM['_trie']['_opts'].useKeyHashing, false, - 'trie opts are preserved in new sm' + 'trie opts are preserved in new sm', ) assert.deepEqual( intToBytes(32), - await partialSM.getContractStorage(address, hexToBytes(keys[0])) + await partialSM.getContractStorage(address, hexToBytes(keys[0])), ) assert.equal((await partialSM.getAccount(address2))?.balance, 100n) const partialSM2 = await DefaultStateManager.fromProof(proof, true, { @@ -331,13 +331,13 @@ describe('StateManager -> General', () => { assert.equal( partialSM2['_trie']['_opts'].useKeyHashing, false, - 'trie opts are preserved in new sm' + 'trie opts are preserved in new sm', ) assert.deepEqual( intToBytes(32), - await partialSM2.getContractStorage(address, hexToBytes(keys[0])) + await partialSM2.getContractStorage(address, hexToBytes(keys[0])), ) assert.equal((await partialSM2.getAccount(address2))?.balance, 100n) - } + }, ) }) diff --git a/packages/statemanager/test/statelessVerkleStateManager.spec.ts b/packages/statemanager/test/statelessVerkleStateManager.spec.ts index e78030455e..76c36e0dfd 100644 --- a/packages/statemanager/test/statelessVerkleStateManager.spec.ts +++ b/packages/statemanager/test/statelessVerkleStateManager.spec.ts @@ -33,13 +33,13 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { eips: [2935, 4895, 6800], }) const decodedTxs = verkleBlockJSON.transactions.map((tx) => - createTxFromSerializedData(hexToBytes(tx as PrefixedHexString)) + createTxFromSerializedData(hexToBytes(tx as PrefixedHexString)), ) const block = createBlockFromBlockData( { ...verkleBlockJSON, transactions: decodedTxs } as BlockData, { common, - } + }, ) it('initPreState()', async () => { @@ -54,7 +54,7 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { stateManager.initVerkleExecutionWitness(block.header.number, block.executionWitness) const account = await stateManager.getAccount( - Address.fromString('0x6177843db3138ae69679a54b95cf345ed759450d') + Address.fromString('0x6177843db3138ae69679a54b95cf345ed759450d'), ) assert.equal(account!.balance, 288610978528114322n, 'should have correct balance') @@ -63,7 +63,7 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { assert.equal( bytesToHex(account!.codeHash), '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', - 'should have correct codeHash' + 'should have correct codeHash', ) }) @@ -80,7 +80,7 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { assert.equal( e.message.slice(0, 25), 'No witness bundled for ad', - 'should throw on getting account that does not exist in cache and witness' + 'should throw on getting account that does not exist in cache and witness', ) } @@ -92,7 +92,7 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { assert.deepEqual( await stateManager.getAccount(address), account, - 'should return correct account' + 'should return correct account', ) await stateManager.modifyAccountFields(address, { @@ -102,14 +102,14 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { assert.deepEqual( await stateManager.getAccount(address), account, - 'should return correct account' + 'should return correct account', ) await stateManager.deleteAccount(address) assert.isUndefined( await stateManager.getAccount(address), - 'should return undefined for deleted account' + 'should return undefined for deleted account', ) }) @@ -133,12 +133,12 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { assert.equal( account!.balance, bytesToBigInt(hexToBytes(balanceRaw!), true), - 'should have correct balance' + 'should have correct balance', ) assert.equal( account!.nonce, bytesToBigInt(hexToBytes(nonceRaw!), true), - 'should have correct nonce' + 'should have correct nonce', ) assert.equal(bytesToHex(account!.codeHash), codeHash, 'should have correct codeHash') }) @@ -161,12 +161,12 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { assert.equal( (stateManagerCopy as any)['_accountCacheSettings'].type, CacheType.ORDERED_MAP, - 'should switch to ORDERED_MAP account cache on copy()' + 'should switch to ORDERED_MAP account cache on copy()', ) assert.equal( (stateManagerCopy as any)['_storageCacheSettings'].type, CacheType.ORDERED_MAP, - 'should switch to ORDERED_MAP storage cache on copy()' + 'should switch to ORDERED_MAP storage cache on copy()', ) }) @@ -181,11 +181,11 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { await stateManager.putContractStorage( contractAddress, hexToBytes(storageKey), - hexToBytes(storageValue) + hexToBytes(storageValue), ) let contractStorage = await stateManager.getContractStorage( contractAddress, - hexToBytes(storageKey) + hexToBytes(storageKey), ) assert.equal(bytesToHex(contractStorage), storageValue) diff --git a/packages/statemanager/test/testdata/providerData/mockProvider.ts b/packages/statemanager/test/testdata/providerData/mockProvider.ts index 4e56e4d128..c984b0835e 100644 --- a/packages/statemanager/test/testdata/providerData/mockProvider.ts +++ b/packages/statemanager/test/testdata/providerData/mockProvider.ts @@ -17,7 +17,7 @@ export type JsonReturnType = { export const getValues = async ( method: Method, id: number, - params: any[] + params: any[], ): Promise => { switch (method) { case 'eth_getProof': diff --git a/packages/statemanager/test/vmState.spec.ts b/packages/statemanager/test/vmState.spec.ts index 79b73acca5..d486f191ca 100644 --- a/packages/statemanager/test/vmState.spec.ts +++ b/packages/statemanager/test/vmState.spec.ts @@ -20,7 +20,7 @@ describe('stateManager', () => { return } const expectedStateRoot = hexToBytes( - '0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544' + '0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544', ) const stateManager = new StateManager({}) @@ -30,7 +30,7 @@ describe('stateManager', () => { assert.deepEqual( stateRoot, expectedStateRoot, - `generateCanonicalGenesis should produce correct state root for mainnet from common` + `generateCanonicalGenesis should produce correct state root for mainnet from common`, ) }) @@ -55,7 +55,7 @@ describe('stateManager', () => { assert.deepEqual( stateRoot, expectedStateRoot, - `generateCanonicalGenesis should produce correct state root for ${Chain[chain]}` + `generateCanonicalGenesis should produce correct state root for ${Chain[chain]}`, ) } }) diff --git a/packages/statemanager/tsconfig.lint.json b/packages/statemanager/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/statemanager/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/trie/.eslintrc.cjs b/packages/trie/.eslintrc.cjs index 884b3d6ebe..4a13163a7d 100644 --- a/packages/trie/.eslintrc.cjs +++ b/packages/trie/.eslintrc.cjs @@ -1,11 +1,11 @@ module.exports = { extends: '../../config/eslint.cjs', parserOptions: { - project: ['./tsconfig.json', './tsconfig.benchmarks.json'], + project: ['./tsconfig.lint.json'], }, overrides: [ { - files: ['benchmarks/*.ts'], + files: ['benchmarks/*.ts', 'examples/**/*'], rules: { 'no-console': 'off', }, diff --git a/packages/trie/examples/basicUsage.ts b/packages/trie/examples/basicUsage.ts index b669197bf9..5a008b3b8b 100644 --- a/packages/trie/examples/basicUsage.ts +++ b/packages/trie/examples/basicUsage.ts @@ -1,5 +1,5 @@ -import { createTrie, Trie } from '@ethereumjs/trie' -import { bytesToUtf8, MapDB, utf8ToBytes } from '@ethereumjs/util' +import { createTrie } from '@ethereumjs/trie' +import { MapDB, bytesToUtf8, utf8ToBytes } from '@ethereumjs/util' async function test() { const trie = await createTrie({ db: new MapDB() }) @@ -8,4 +8,4 @@ async function test() { console.log(value ? bytesToUtf8(value) : 'not found') // 'one' } -test() +void test() diff --git a/packages/trie/examples/createFromProof.ts b/packages/trie/examples/createFromProof.ts index 4b3cb284d5..f3db4f5867 100644 --- a/packages/trie/examples/createFromProof.ts +++ b/packages/trie/examples/createFromProof.ts @@ -1,6 +1,5 @@ import { Trie, createTrieFromProof } from '@ethereumjs/trie' -import { bytesToUtf8 } from '@ethereumjs/util' -import { utf8ToBytes } from '@ethereumjs/util' +import { bytesToUtf8, utf8ToBytes } from '@ethereumjs/util' async function main() { const k1 = utf8ToBytes('keyOne') @@ -23,4 +22,4 @@ async function main() { console.log(bytesToUtf8(otherValue!)) // valueTwo } -main() +void main() diff --git a/packages/trie/examples/customLevelDB.ts b/packages/trie/examples/customLevelDB.ts index a4ca04b502..0ed657cb71 100644 --- a/packages/trie/examples/customLevelDB.ts +++ b/packages/trie/examples/customLevelDB.ts @@ -1,7 +1,6 @@ import { Trie } from '@ethereumjs/trie' -import { Level } from 'level' - import { KeyEncoding, ValueEncoding } from '@ethereumjs/util' +import { Level } from 'level' import { MemoryLevel } from 'memory-level' import type { BatchDBOp, DB, DBObject, EncodingOpts } from '@ethereumjs/util' @@ -44,7 +43,7 @@ const getEncodings = (opts: EncodingOpts = {}) => { */ export class LevelDB< TKey extends Uint8Array | string = Uint8Array | string, - TValue extends Uint8Array | string | DBObject = Uint8Array | string | DBObject + TValue extends Uint8Array | string | DBObject = Uint8Array | string | DBObject, > implements DB { _leveldb: AbstractLevel @@ -55,7 +54,7 @@ export class LevelDB< * @param leveldb - An abstract-leveldown compliant store */ constructor( - leveldb?: AbstractLevel + leveldb?: AbstractLevel, ) { this._leveldb = leveldb ?? new MemoryLevel() } @@ -126,6 +125,6 @@ export class LevelDB< async function main() { const trie = new Trie({ db: new LevelDB(new Level('MY_TRIE_DB_LOCATION') as any) }) - console.log(await trie.database().db) // LevelDB { ... + console.log(trie.database().db) // LevelDB { ... } -main() +void main() diff --git a/packages/trie/examples/level-legacy.js b/packages/trie/examples/level-legacy.js index 5f2dfeffd9..196a93cb4e 100644 --- a/packages/trie/examples/level-legacy.js +++ b/packages/trie/examples/level-legacy.js @@ -1,9 +1,10 @@ // LevelDB from https://github.com/ethereumjs/ethereumjs-monorepo/blob/ac053e1f9a364f8ae489159fecb79a3d0ddd7053/packages/trie/src/db.ts // eslint-disable-next-line implicit-dependencies/no-implicit +const { utf8ToBytes, bytesToUtf8 } = require('ethereum-cryptography/utils') const level = require('level-mem') -const { Trie } = require('../dist') +const { Trie } = require('../../dist/cjs/index.js') const ENCODING_OPTS = { keyEncoding: 'binary', valueEncoding: 'binary' } @@ -19,7 +20,7 @@ class LevelDB { try { value = await this._leveldb.get(key, ENCODING_OPTS) } catch (error) { - if (error.notFound) { + if (error.notFound !== undefined) { // not found, returning null } else { throw error @@ -48,9 +49,9 @@ class LevelDB { const trie = new Trie({ db: new LevelDB(level('MY_TRIE_DB_LOCATION')) }) async function test() { - await trie.put(Buffer.from('test'), Buffer.from('one')) - const value = await trie.get(Buffer.from('test')) - console.log(value.toString()) // 'one' + await trie.put(utf8ToBytes('test'), utf8ToBytes('one')) + const value = await trie.get(utf8ToBytes('test')) + console.log(bytesToUtf8(value)) // 'one' } -test() +void test() diff --git a/packages/trie/examples/level.js b/packages/trie/examples/level.js index 43edb67780..b67f57f491 100644 --- a/packages/trie/examples/level.js +++ b/packages/trie/examples/level.js @@ -1,7 +1,8 @@ +const { utf8ToBytes, bytesToUtf8 } = require('ethereum-cryptography/utils') const { Level } = require('level') const { MemoryLevel } = require('memory-level') -const { Trie } = require('../dist') +const { Trie } = require('../../dist/cjs/index.js') const ENCODING_OPTS = { keyEncoding: 'view', valueEncoding: 'view' } @@ -46,9 +47,9 @@ class LevelDB { const trie = new Trie({ db: new LevelDB(new Level('MY_TRIE_DB_LOCATION')) }) async function test() { - await trie.put(Buffer.from('test'), Buffer.from('one')) - const value = await trie.get(Buffer.from('test')) - console.log(value.toString()) // 'one' + await trie.put(utf8ToBytes('test'), utf8ToBytes('one')) + const value = await trie.get(utf8ToBytes('test')) + console.log(bytesToUtf8(value)) // 'one' } -test() +void test() diff --git a/packages/trie/examples/lmdb.js b/packages/trie/examples/lmdb.js index dd5bee3438..73da2d99ed 100644 --- a/packages/trie/examples/lmdb.js +++ b/packages/trie/examples/lmdb.js @@ -1,6 +1,7 @@ +const { utf8ToBytes, bytesToUtf8 } = require('ethereum-cryptography/utils') const { open } = require('lmdb') -const { Trie } = require('../dist') +const { Trie } = require('../../dist/cjs/index.js') class LMDB { constructor(path) { @@ -44,9 +45,9 @@ class LMDB { const trie = new Trie({ db: new LMDB('MY_TRIE_DB_LOCATION') }) async function test() { - await trie.put(Buffer.from('test'), Buffer.from('one')) - const value = await trie.get(Buffer.from('test')) - console.log(value.toString()) // 'one' + await trie.put(utf8ToBytes('test'), utf8ToBytes('one')) + const value = await trie.get(utf8ToBytes('test')) + console.log(bytesToUtf8(value)) // 'one' } -test() +void test() diff --git a/packages/trie/examples/logDemo.ts b/packages/trie/examples/logDemo.ts index f3b77fe7a5..536ec7e2a0 100644 --- a/packages/trie/examples/logDemo.ts +++ b/packages/trie/examples/logDemo.ts @@ -1,8 +1,8 @@ /** * Run with DEBUG=ethjs,trie:* to see debug log ouput */ -import { utf8ToBytes } from '@ethereumjs/util' import { Trie } from '@ethereumjs/trie' +import { utf8ToBytes } from '@ethereumjs/util' const trie_entries: [string, string | null][] = [ ['do', 'verb'], @@ -27,4 +27,4 @@ const main = async () => { console.log('valid', valid) } -main() +void main() diff --git a/packages/trie/examples/merkle_patricia_trees/example1a.js b/packages/trie/examples/merkle_patricia_trees/example1a.js index 4b70ee7160..4ee751e9fc 100644 --- a/packages/trie/examples/merkle_patricia_trees/example1a.js +++ b/packages/trie/examples/merkle_patricia_trees/example1a.js @@ -1,8 +1,9 @@ /* Example 1a - Creating and Updating a Base Trie*/ -const { Trie } = require('../../dist/cjs') // We import the library required to create a basic Merkle Patricia Tree const { bytesToHex, bytesToUtf8, utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') // We import the library required to create a basic Merkle Patricia Tree + const trie = new Trie() // We create an empty Merkle Patricia Tree console.log('Empty trie root (Bytes): ', bytesToHex(trie.root())) // The trie root (32 bytes) @@ -16,7 +17,7 @@ async function test() { console.log('Updated trie root:', bytesToHex(trie.root())) // The new trie root (32 bytes) } -test() +void test() /* Results: diff --git a/packages/trie/examples/merkle_patricia_trees/example1b.js b/packages/trie/examples/merkle_patricia_trees/example1b.js index a7c690b832..fb320edc87 100644 --- a/packages/trie/examples/merkle_patricia_trees/example1b.js +++ b/packages/trie/examples/merkle_patricia_trees/example1b.js @@ -1,9 +1,10 @@ /* Example 1b - Manually Creating and Updating a Secure Trie*/ -const { Trie } = require('../../dist/cjs') const { bytesToHex, bytesToUtf8, utf8ToBytes } = require('@ethereumjs/util') const { keccak256 } = require('ethereum-cryptography/keccak') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() console.log('Empty trie root (Bytes): ', bytesToHex(trie.root())) // The trie root (32 bytes) @@ -15,7 +16,7 @@ async function test() { console.log('Updated trie root:', bytesToHex(trie.root())) // The new trie root (32 bytes) } -test() +void test() /* Results: diff --git a/packages/trie/examples/merkle_patricia_trees/example1c.js b/packages/trie/examples/merkle_patricia_trees/example1c.js index 64f2c02e54..01c175cfbc 100644 --- a/packages/trie/examples/merkle_patricia_trees/example1c.js +++ b/packages/trie/examples/merkle_patricia_trees/example1c.js @@ -1,8 +1,8 @@ /* Example 1c - Creating an empty Merkle Patricia Tree and updating it with a single key-value pair */ - -const { Trie } = require('../../dist/cjs') const { bytesToHex, bytesToUtf8, utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie({ useKeyHashing: true }) // We create an empty Merkle Patricia Tree with key hashing enabled console.log('Empty trie root (Bytes): ', bytesToHex(trie.root())) // The trie root (32 bytes) @@ -14,7 +14,7 @@ async function test() { console.log('Updated trie root:', bytesToHex(trie.root())) // The new trie root (32 bytes) } -test() +void test() /* Results: diff --git a/packages/trie/examples/merkle_patricia_trees/example1d.js b/packages/trie/examples/merkle_patricia_trees/example1d.js index d205a6f183..892aef301e 100644 --- a/packages/trie/examples/merkle_patricia_trees/example1d.js +++ b/packages/trie/examples/merkle_patricia_trees/example1d.js @@ -1,8 +1,9 @@ /* Example 1d - Deleting a Key-Value Pair from a Trie*/ -const { Trie } = require('../../dist/cjs') const { bytesToHex, bytesToUtf8, utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() console.log('Empty trie root: ', bytesToHex(trie.root())) // The trie root @@ -20,7 +21,7 @@ async function test() { console.log('Trie root after deletion:', bytesToHex(trie.root())) // Our trie root is back to its initial value } -test() +void test() /* Results: diff --git a/packages/trie/examples/merkle_patricia_trees/example2a.js b/packages/trie/examples/merkle_patricia_trees/example2a.js index 5ce8c3f514..d83537fba9 100644 --- a/packages/trie/examples/merkle_patricia_trees/example2a.js +++ b/packages/trie/examples/merkle_patricia_trees/example2a.js @@ -1,8 +1,9 @@ // Example 2a - Creating and looking up a null node -const { Trie } = require('../../dist/cjs') const { utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() async function test() { @@ -10,7 +11,7 @@ async function test() { console.log('Node 1: ', node1.node) // null } -test() +void test() /* Result: diff --git a/packages/trie/examples/merkle_patricia_trees/example2b.js b/packages/trie/examples/merkle_patricia_trees/example2b.js index e607b94e0a..1906d0ab48 100644 --- a/packages/trie/examples/merkle_patricia_trees/example2b.js +++ b/packages/trie/examples/merkle_patricia_trees/example2b.js @@ -1,8 +1,9 @@ // Example 2b - Creating and looking up a branch node -const { Trie } = require('../../dist/cjs') const { bytesToHex, bytesToUtf8, utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() async function test() { @@ -31,13 +32,13 @@ async function test() { 'Node 1 branch 3 (hex): path: ', bytesToHex(node1.node._branches[3][0]), ' | value: ', - bytesToHex(node1.node._branches[3][1]) + bytesToHex(node1.node._branches[3][1]), ) console.log( 'Node 1 branch 4 (hex): path: ', bytesToHex(node1.node._branches[4][0]), ' | value:', - bytesToHex(node1.node._branches[4][1]) + bytesToHex(node1.node._branches[4][1]), ) console.log('Value of branch at index 3: ', bytesToUtf8(node1.node._branches[3][1])) @@ -47,4 +48,4 @@ async function test() { console.log('Node 2: ', node2.node) } -test() +void test() diff --git a/packages/trie/examples/merkle_patricia_trees/example2c.js b/packages/trie/examples/merkle_patricia_trees/example2c.js index 3259817050..cb4f9381f6 100644 --- a/packages/trie/examples/merkle_patricia_trees/example2c.js +++ b/packages/trie/examples/merkle_patricia_trees/example2c.js @@ -1,8 +1,9 @@ // Example 2c - Creating and looking up a leaf node -const { Trie } = require('../../dist/cjs') const { bytesToUtf8, utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() async function test() { @@ -14,4 +15,4 @@ async function test() { console.log('Node 1 value: ', bytesToUtf8(node1.node._value)) // The leaf node's value } -test() +void test() diff --git a/packages/trie/examples/merkle_patricia_trees/example2d.js b/packages/trie/examples/merkle_patricia_trees/example2d.js index ac9933ec3a..6ecaabeec7 100644 --- a/packages/trie/examples/merkle_patricia_trees/example2d.js +++ b/packages/trie/examples/merkle_patricia_trees/example2d.js @@ -1,8 +1,9 @@ // Example 2d - Creating and looking up an extension node -const { Trie } = require('../../dist/cjs') const { bytesToHex, utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() async function test() { @@ -25,4 +26,4 @@ async function test() { console.log(node3) } -test() +void test() diff --git a/packages/trie/examples/merkle_patricia_trees/example3a.js b/packages/trie/examples/merkle_patricia_trees/example3a.js index ea4a8b1747..688fface41 100644 --- a/packages/trie/examples/merkle_patricia_trees/example3a.js +++ b/packages/trie/examples/merkle_patricia_trees/example3a.js @@ -1,9 +1,11 @@ // Example 3a - Generating a hash -const { Trie } = require('../../dist/cjs') const rlp = require('@ethereumjs/rlp') const { bytesToHex, utf8ToBytes } = require('@ethereumjs/util') const { keccak256 } = require('ethereum-cryptography/keccak') + +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() async function test() { @@ -24,7 +26,7 @@ async function test() { 'path: ', bytesToHex(node3._branches[4][0]), ' | value: ', - bytesToHex(node3._branches[4][1]) + bytesToHex(node3._branches[4][1]), ) console.log('Raw node:', bytesToHex(rlp.encode(node2.raw()))) @@ -32,4 +34,4 @@ async function test() { console.log('The extension node hash: ', bytesToHex(node1.node._branches[3])) } -test() +void test() diff --git a/packages/trie/examples/merkle_patricia_trees/example3b.js b/packages/trie/examples/merkle_patricia_trees/example3b.js index bbb2605cd8..6fa833ba58 100644 --- a/packages/trie/examples/merkle_patricia_trees/example3b.js +++ b/packages/trie/examples/merkle_patricia_trees/example3b.js @@ -1,7 +1,9 @@ // Example 3b - Verification using a hash -const { Trie } = require('../../dist/cjs') const { bytesToHex, utf8ToBytes } = require('@ethereumjs/util') + +const { Trie } = require('../../dist/cjs/index.js') + const trie1 = new Trie() const trie2 = new Trie() @@ -27,4 +29,4 @@ async function test() { console.log('Root of trie 2: ', bytesToHex(trie2.root())) } -test() +void test() diff --git a/packages/trie/examples/merkle_patricia_trees/example4a.js b/packages/trie/examples/merkle_patricia_trees/example4a.js index 3d940e470f..c649c661d3 100644 --- a/packages/trie/examples/merkle_patricia_trees/example4a.js +++ b/packages/trie/examples/merkle_patricia_trees/example4a.js @@ -1,8 +1,9 @@ // Example 4a - Retrieving a Transaction from the Ethereum Blockchain -const INFURA_ENDPOINT = require('./infura_endpoint') const https = require('https') +const INFURA_ENDPOINT = require('./infura_endpoint.js') + // Looking up an individual transaction function lookupTransaction(transactionHash) { const data = JSON.stringify({ diff --git a/packages/trie/examples/merkle_patricia_trees/example4b.js b/packages/trie/examples/merkle_patricia_trees/example4b.js index d3fb8574db..a8e0fe3b2a 100644 --- a/packages/trie/examples/merkle_patricia_trees/example4b.js +++ b/packages/trie/examples/merkle_patricia_trees/example4b.js @@ -3,9 +3,10 @@ const rlp = require('@ethereumjs/rlp') const { bytesToHex } = require('@ethereumjs/util') const { keccak256 } = require('ethereum-cryptography/keccak') -const INFURA_ENDPOINT = require('./infura_endpoint') const https = require('https') +const INFURA_ENDPOINT = require('./infura_endpoint.js') + function recomputeTransactionHash(transactionHash) { const data = JSON.stringify({ jsonrpc: '2.0', diff --git a/packages/trie/examples/proofs.ts b/packages/trie/examples/proofs.ts index fffac8b346..4f618a688b 100644 --- a/packages/trie/examples/proofs.ts +++ b/packages/trie/examples/proofs.ts @@ -28,10 +28,10 @@ async function main() { proof = await trie.createProof(k2) proof[0].reverse() try { - const value = await trie.verifyProof(trie.root(), k2, proof) // results in error + const _value = await trie.verifyProof(trie.root(), k2, proof) // results in error } catch (err) { console.log(err) } } -main() +void main() diff --git a/packages/trie/examples/rootPersistence.ts b/packages/trie/examples/rootPersistence.ts index dadb55411d..e22841a585 100644 --- a/packages/trie/examples/rootPersistence.ts +++ b/packages/trie/examples/rootPersistence.ts @@ -1,4 +1,4 @@ -import { createTrie, Trie } from '@ethereumjs/trie' +import { createTrie } from '@ethereumjs/trie' import { bytesToHex } from '@ethereumjs/util' async function main() { @@ -9,4 +9,4 @@ async function main() { // this logs the empty root value that has been persisted to the trie db console.log(bytesToHex(trie.root())) // 0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421 } -main() +void main() diff --git a/packages/trie/examples/trieWalking.ts b/packages/trie/examples/trieWalking.ts index 7d63c26212..a384fa661f 100644 --- a/packages/trie/examples/trieWalking.ts +++ b/packages/trie/examples/trieWalking.ts @@ -1,4 +1,4 @@ -import { createTrie, Trie } from '@ethereumjs/trie' +import { createTrie } from '@ethereumjs/trie' import { utf8ToBytes } from '@ethereumjs/util' async function main() { @@ -11,4 +11,4 @@ async function main() { console.log({ node, currentKey }) } } -main() +void main() diff --git a/packages/trie/src/constructors.ts b/packages/trie/src/constructors.ts index c8fff50896..42676729ad 100644 --- a/packages/trie/src/constructors.ts +++ b/packages/trie/src/constructors.ts @@ -44,7 +44,7 @@ export async function createTrie(opts?: TrieOpts) { { keyEncoding: KeyEncoding.String, valueEncoding: encoding, - } + }, ) } } diff --git a/packages/trie/src/proof/index.ts b/packages/trie/src/proof/index.ts index a789891216..022acdaf27 100644 --- a/packages/trie/src/proof/index.ts +++ b/packages/trie/src/proof/index.ts @@ -19,7 +19,7 @@ import type { Proof, TrieOpts } from '../index.js' export async function verifyTrieProof( key: Uint8Array, proof: Proof, - opts?: TrieOpts + opts?: TrieOpts, ): Promise { try { const proofTrie = await createTrieFromProof(proof, opts) @@ -51,7 +51,7 @@ export function verifyTrieRangeProof( keys: Uint8Array[], values: Uint8Array[], proof: Uint8Array[] | null, - opts?: TrieOpts + opts?: TrieOpts, ): Promise { return verifyRangeProof( rootHash, @@ -60,7 +60,7 @@ export function verifyTrieRangeProof( keys.map((k) => k).map(bytesToNibbles), values, proof, - opts?.useKeyHashingFunction ?? keccak256 + opts?.useKeyHashingFunction ?? keccak256, ) } diff --git a/packages/trie/src/proof/range.ts b/packages/trie/src/proof/range.ts index 98003d2dc1..3c5e9f5566 100644 --- a/packages/trie/src/proof/range.ts +++ b/packages/trie/src/proof/range.ts @@ -27,7 +27,7 @@ async function unset( key: Nibbles, pos: number, removeLeft: boolean, - stack: TrieNode[] + stack: TrieNode[], ): Promise { if (child instanceof BranchNode) { /** @@ -321,7 +321,7 @@ async function verifyProof( rootHash: Uint8Array, key: Uint8Array, proof: Uint8Array[], - useKeyHashingFunction: HashKeysFunction + useKeyHashingFunction: HashKeysFunction, ): Promise<{ value: Uint8Array | null; trie: Trie }> { const proofTrie = await createTrieFromProof(proof, { root: rootHash, @@ -416,7 +416,7 @@ export async function verifyRangeProof( keys: Nibbles[], values: Uint8Array[], proof: Uint8Array[] | null, - useKeyHashingFunction: HashKeysFunction + useKeyHashingFunction: HashKeysFunction, ): Promise { if (keys.length !== values.length) { throw new Error('invalid keys length or values length') @@ -454,7 +454,7 @@ export async function verifyRangeProof( rootHash, nibblesTypeToPackedBytes(firstKey), proof, - useKeyHashingFunction + useKeyHashingFunction, ) if (value !== null || (await hasRightElement(trie, firstKey))) { @@ -467,7 +467,7 @@ export async function verifyRangeProof( if (proof === null || firstKey === null || lastKey === null) { throw new Error( - 'invalid all elements proof: proof, firstKey, lastKey must be null at the same time' + 'invalid all elements proof: proof, firstKey, lastKey must be null at the same time', ) } @@ -477,7 +477,7 @@ export async function verifyRangeProof( rootHash, nibblesTypeToPackedBytes(firstKey), proof, - useKeyHashingFunction + useKeyHashingFunction, ) if (nibblesCompare(firstKey, keys[0]) !== 0) { @@ -496,7 +496,7 @@ export async function verifyRangeProof( } if (firstKey.length !== lastKey.length) { throw new Error( - 'invalid two edge elements proof: the length of firstKey should be equal to the length of lastKey' + 'invalid two edge elements proof: the length of firstKey should be equal to the length of lastKey', ) } diff --git a/packages/trie/src/trie.ts b/packages/trie/src/trie.ts index 1792c30b30..53f8cea120 100644 --- a/packages/trie/src/trie.ts +++ b/packages/trie/src/trie.ts @@ -96,7 +96,7 @@ export class Trie { opts.common?.customCrypto.keccak256 ?? opts.useKeyHashingFunction ?? keccak256 valueEncoding = - opts.db !== undefined ? opts.valueEncoding ?? ValueEncoding.String : ValueEncoding.Bytes + opts.db !== undefined ? (opts.valueEncoding ?? ValueEncoding.String) : ValueEncoding.Bytes } else { // No opts are given, so create a MapDB later on // Use `Bytes` for ValueEncoding @@ -104,7 +104,7 @@ export class Trie { } this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false this.debug = this.DEBUG ? (message: string, namespaces: string[] = []) => { let log = this._debug @@ -153,7 +153,7 @@ export class Trie { lastKey: Uint8Array | null, keys: Uint8Array[], values: Uint8Array[], - proof: Uint8Array[] | null + proof: Uint8Array[] | null, ): Promise { return verifyRangeProof( rootHash, @@ -162,7 +162,7 @@ export class Trie { keys.map((k) => this.appliedKey(k)).map(bytesToNibbles), values, proof, - this._opts.useKeyHashingFunction + this._opts.useKeyHashingFunction, ) } @@ -228,15 +228,15 @@ export class Trie { async verifyProof( rootHash: Uint8Array, key: Uint8Array, - proof: Proof + proof: Proof, ): Promise { this.DEBUG && this.debug( `Verifying Proof:\n|| Key: ${bytesToHex(key)}\n|| Root: ${bytesToHex( - rootHash + rootHash, )}\n|| Proof: (${proof.length}) nodes `, - ['VERIFY_PROOF'] + ['VERIFY_PROOF'], ) const proofTrie = new Trie({ root: rootHash, @@ -308,7 +308,7 @@ export class Trie { this.DEBUG && this.debug(`Setting root to ${bytesToHex(value)}`) if (value.length !== this._hashLen) { throw new Error( - `Invalid root length. Roots are ${this._hashLen} bytes, got ${value.length} bytes` + `Invalid root length. Roots are ${this._hashLen} bytes, got ${value.length} bytes`, ) } @@ -360,7 +360,7 @@ export class Trie { async put( key: Uint8Array, value: Uint8Array | null, - skipKeyTransform: boolean = false + skipKeyTransform: boolean = false, ): Promise { this.DEBUG && this.debug(`Key: ${bytesToHex(key)}`, ['PUT']) this.DEBUG && this.debug(`Value: ${value === null ? 'null' : bytesToHex(key)}`, ['PUT']) @@ -474,7 +474,7 @@ export class Trie { stack: TrieNode[] } = { stack: [], - } + }, ): Promise { const targetKey = bytesToNibbles(key) const keyLen = targetKey.length @@ -505,9 +505,9 @@ export class Trie { branchNode === null ? 'NULL' : branchNode instanceof Uint8Array - ? `NodeHash: ${bytesToHex(branchNode)}` - : `Raw_Node: ${branchNode.toString()}`, - ['FIND_PATH', 'BranchNode', branchIndex.toString()] + ? `NodeHash: ${bytesToHex(branchNode)}` + : `Raw_Node: ${branchNode.toString()}`, + ['FIND_PATH', 'BranchNode', branchIndex.toString()], ) if (!branchNode) { result = { node: null, remaining: targetKey.slice(progress), stack } @@ -535,13 +535,13 @@ export class Trie { this.debug( `Comparing node key to expected\n|| Node_Key: [${node.key()}]\n|| Expected: [${targetKey.slice( progress, - progress + node.key().length + progress + node.key().length, )}]\n|| Matching: [${ targetKey.slice(progress, progress + node.key().length).toString() === node.key().toString() }] `, - ['FIND_PATH', 'ExtensionNode'] + ['FIND_PATH', 'ExtensionNode'], ) const _progress = progress for (const k of node.key()) { @@ -561,9 +561,9 @@ export class Trie { this.DEBUG && this.debug( `Walking trie from ${startingNode === undefined ? 'ROOT' : 'NODE'}: ${bytesToHex( - start as Uint8Array + start as Uint8Array, )}`, - ['FIND_PATH'] + ['FIND_PATH'], ) await this.walkTrie(start, onFound) } catch (error: any) { @@ -580,7 +580,7 @@ export class Trie { result.node !== null ? `Target Node FOUND for ${bytesToNibbles(key)}` : `Target Node NOT FOUND`, - ['FIND_PATH'] + ['FIND_PATH'], ) result.stack = result.stack.filter((e) => e !== undefined) @@ -591,7 +591,7 @@ export class Trie { || Remaining: [${result.remaining}]\n|| Stack: ${result.stack .map((e) => e.constructor.name) .join(', ')}`, - ['FIND_PATH'] + ['FIND_PATH'], ) return result } @@ -631,7 +631,7 @@ export class Trie { undefined, async (node) => { return node instanceof LeafNode || (node instanceof BranchNode && node.value() !== null) - } + }, )) { await onFound(node, currentKey) } @@ -687,7 +687,7 @@ export class Trie { k: Uint8Array, value: Uint8Array, keyRemainder: Nibbles, - stack: TrieNode[] + stack: TrieNode[], ): Promise { const toSave: BatchDBOp[] = [] const lastNode = stack.pop() @@ -792,7 +792,7 @@ export class Trie { branchKey: number, branchNode: TrieNode, parentNode: TrieNode, - stack: TrieNode[] + stack: TrieNode[], ) => { // branchNode is the node ON the branch node not THE branch node if (parentNode === null || parentNode === undefined || parentNode instanceof BranchNode) { @@ -966,7 +966,7 @@ export class Trie { node: TrieNode, topLevel: boolean, opStack: BatchDBOp[], - remove: boolean = false + remove: boolean = false, ): Uint8Array | (EmbeddedNode | null)[] { const encoded = node.serialize() @@ -1053,7 +1053,7 @@ export class Trie { if ( item !== null && bytesToUnprefixedHex( - isRawNode(item) ? controller.trie.appliedKey(RLP.encode(item)) : item + isRawNode(item) ? controller.trie.appliedKey(RLP.encode(item)) : item, ) === dbkey ) { found = true @@ -1117,9 +1117,9 @@ export class Trie { this.DEBUG && this.debug( `Persisting root: \n|| RootHash: ${bytesToHex(this.root())}\n|| RootKey: ${bytesToHex( - this.appliedKey(ROOT_DB_KEY) + this.appliedKey(ROOT_DB_KEY), )}`, - ['PERSIST_ROOT'] + ['PERSIST_ROOT'], ) let key = this.appliedKey(ROOT_DB_KEY) key = this._opts.keyPrefix ? concatBytes(this._opts.keyPrefix, key) : key @@ -1229,7 +1229,7 @@ export class Trie { */ async getValueMap( startKey = BIGINT_0, - limit?: number + limit?: number, ): Promise<{ values: { [key: string]: string }; nextKey: null | string }> { // If limit is undefined, all keys are inRange let inRange = limit !== undefined ? false : true diff --git a/packages/trie/src/types.ts b/packages/trie/src/types.ts index 5928c2ee8a..f17ecc424c 100644 --- a/packages/trie/src/types.ts +++ b/packages/trie/src/types.ts @@ -30,7 +30,7 @@ export type FoundNodeFunction = ( nodeRef: Uint8Array, node: TrieNode | null, key: Nibbles, - walkController: WalkController + walkController: WalkController, ) => void export type HashKeysFunction = (msg: Uint8Array) => Uint8Array diff --git a/packages/trie/src/util/asyncWalk.ts b/packages/trie/src/util/asyncWalk.ts index 2074302193..28e57586eb 100644 --- a/packages/trie/src/util/asyncWalk.ts +++ b/packages/trie/src/util/asyncWalk.ts @@ -29,7 +29,7 @@ export async function* _walkTrie( currentKey: number[] = [], onFound: OnFound = async (_trieNode: TrieNode, _key: number[]) => {}, filter: NodeFilter = async (_trieNode: TrieNode, _key: number[]) => true, - visited: Set = new Set() + visited: Set = new Set(), ): AsyncIterable<{ node: TrieNode; currentKey: number[] }> { if (equalsBytes(nodeHash, this.EMPTY_TRIE_ROOT)) { return diff --git a/packages/trie/src/util/encoding.ts b/packages/trie/src/util/encoding.ts index 5212d2f708..1e0708ec75 100644 --- a/packages/trie/src/util/encoding.ts +++ b/packages/trie/src/util/encoding.ts @@ -205,6 +205,6 @@ export const mergeAndFormatKeyPaths = (pathStrings: string[]) => { // full path is keybyte encoded return hexToKeybytes(unprefixedHexToBytes(s)) } - }) + }), ) } diff --git a/packages/trie/src/util/genesisState.ts b/packages/trie/src/util/genesisState.ts index ed8adce049..d6171d8088 100644 --- a/packages/trie/src/util/genesisState.ts +++ b/packages/trie/src/util/genesisState.ts @@ -36,7 +36,7 @@ export async function genesisStateRoot(genesisState: GenesisState) { for (const [k, val] of storage) { const storageKey = isHexString(k) ? hexToBytes(k) : unprefixedHexToBytes(k) const storageVal = RLP.encode( - unpadBytes(isHexString(val) ? hexToBytes(val) : unprefixedHexToBytes(val)) + unpadBytes(isHexString(val) ? hexToBytes(val) : unprefixedHexToBytes(val)), ) await storageTrie.put(storageKey, storageVal) } diff --git a/packages/trie/src/util/walkController.ts b/packages/trie/src/util/walkController.ts index 593bf45683..6c35761308 100644 --- a/packages/trie/src/util/walkController.ts +++ b/packages/trie/src/util/walkController.ts @@ -40,7 +40,7 @@ export class WalkController { onNode: FoundNodeFunction, trie: Trie, root: Uint8Array, - poolSize?: number + poolSize?: number, ): Promise { const strategy = new WalkController(onNode, trie, poolSize ?? 500) await strategy.startWalk(root) @@ -106,7 +106,7 @@ export class WalkController { } taskFinishedCallback() // this marks the current task as finished. If there are any tasks left in the queue, this will immediately execute the first task. this.processNode(nodeRef as Uint8Array, childNode as TrieNode, key) - } + }, ) } diff --git a/packages/trie/test/encoding.spec.ts b/packages/trie/test/encoding.spec.ts index d02d80b368..dc4ecace68 100644 --- a/packages/trie/test/encoding.spec.ts +++ b/packages/trie/test/encoding.spec.ts @@ -51,7 +51,7 @@ describe('support for Uint8Array', () => { for (const value of db._database.values()) { assert.ok( typeof value === 'string', - 'if a database is provided, string values will be used internally' + 'if a database is provided, string values will be used internally', ) } }) diff --git a/packages/trie/test/index.spec.ts b/packages/trie/test/index.spec.ts index 1d901575e9..276c51cd0b 100644 --- a/packages/trie/test/index.spec.ts +++ b/packages/trie/test/index.spec.ts @@ -22,7 +22,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { describe('simple save and retrieve', () => { it('should not crash if given a non-existent root', async () => { const root = hexToBytes( - '0x3f4399b08efe68945c1cf90ffe85bbe3ce978959da753f9e649f034015b8817d' + '0x3f4399b08efe68945c1cf90ffe85bbe3ce978959da753f9e649f034015b8817d', ) const trie = new Trie({ root, keyPrefix }) @@ -67,7 +67,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trie.put(utf8ToBytes('doge'), utf8ToBytes('coin')) assert.equal( '0xde8a34a8c1d558682eae1528b47523a483dd8685d6db14b291451a66066bf0fc', - bytesToHex(trie.root()) + bytesToHex(trie.root()), ) }) @@ -114,7 +114,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trie.put(utf8ToBytes('do'), utf8ToBytes('verb')) assert.equal( '0xf803dfcb7e8f1afd45e88eedb4699a7138d6c07b71243d9ae9bff720c99925f9', - bytesToHex(trie.root()) + bytesToHex(trie.root()), ) }) @@ -122,7 +122,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trie.put(utf8ToBytes('done'), utf8ToBytes('finished')) assert.equal( '0x409cff4d820b394ed3fb1cd4497bdd19ffa68d30ae34157337a7043c94a3e8cb', - bytesToHex(trie.root()) + bytesToHex(trie.root()), ) }) }) @@ -142,7 +142,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trie.put(utf8ToBytes('done'), utf8ToBytes('finished')) assert.equal( '0x409cff4d820b394ed3fb1cd4497bdd19ffa68d30ae34157337a7043c94a3e8cb', - bytesToHex(trie.root()) + bytesToHex(trie.root()), ) }) }) @@ -158,11 +158,11 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trieSetup.trie.put(new Uint8Array([11, 11, 11]), utf8ToBytes('first')) await trieSetup.trie.put( new Uint8Array([12, 22, 22]), - utf8ToBytes('create the first branch') + utf8ToBytes('create the first branch'), ) await trieSetup.trie.put( new Uint8Array([12, 34, 44]), - utf8ToBytes('create the last branch') + utf8ToBytes('create the last branch'), ) await trieSetup.trie.del(new Uint8Array([12, 22, 22])) @@ -174,15 +174,15 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trieSetup.trie.put(new Uint8Array([11, 11, 11]), utf8ToBytes('first')) await trieSetup.trie.put( new Uint8Array([12, 22, 22]), - utf8ToBytes('create the first branch') + utf8ToBytes('create the first branch'), ) await trieSetup.trie.put( new Uint8Array([12, 33, 33]), - utf8ToBytes('create the middle branch') + utf8ToBytes('create the middle branch'), ) await trieSetup.trie.put( new Uint8Array([12, 34, 44]), - utf8ToBytes('create the last branch') + utf8ToBytes('create the last branch'), ) await trieSetup.trie.del(new Uint8Array([12, 22, 22])) @@ -194,15 +194,15 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trieSetup.trie.put(new Uint8Array([11, 11, 11]), utf8ToBytes('first')) await trieSetup.trie.put( new Uint8Array([12, 22, 22]), - utf8ToBytes('create the first branch') + utf8ToBytes('create the first branch'), ) await trieSetup.trie.put( new Uint8Array([12, 33, 33]), - utf8ToBytes('create the middle branch') + utf8ToBytes('create the middle branch'), ) await trieSetup.trie.put( new Uint8Array([12, 34, 44]), - utf8ToBytes('create the last branch') + utf8ToBytes('create the last branch'), ) // delete the middle branch @@ -215,15 +215,15 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trieSetup.trie.put(new Uint8Array([11, 11, 11]), utf8ToBytes('first')) await trieSetup.trie.put( new Uint8Array([12, 22, 22]), - utf8ToBytes('create the first branch') + utf8ToBytes('create the first branch'), ) await trieSetup.trie.put( new Uint8Array([12, 33, 33]), - utf8ToBytes('create the middle branch') + utf8ToBytes('create the middle branch'), ) await trieSetup.trie.put( new Uint8Array([12, 34, 44]), - utf8ToBytes('create the last branch') + utf8ToBytes('create the last branch'), ) // delete the middle branch await trieSetup.trie.del(new Uint8Array([11, 11, 11])) @@ -259,7 +259,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { assert.ok(path.node === null, 'findPath should not return a node now') assert.ok( path.stack.length === 1, - 'findPath should find the first extension node which is still in the DB' + 'findPath should find the first extension node which is still in the DB', ) }) }) @@ -310,7 +310,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { const v2 = utf8ToBytes('this-is-some-longer-value-to-test-the-delete-operation-value2') const rootAfterK1 = hexToBytes( - '0x809e75931f394603657e113eb7244794f35b8d326cff99407111d600722e9425' + '0x809e75931f394603657e113eb7244794f35b8d326cff99407111d600722e9425', ) const trieSetup = { @@ -325,7 +325,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { assert.equal( await trieSetup.trie.get(k1), null, - 'should return null on latest state root independently from deleteFromDB setting' + 'should return null on latest state root independently from deleteFromDB setting', ) trieSetup.trie.root(rootAfterK1) @@ -341,7 +341,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { return concatBytes( utf8ToBytes('hash_'), new Uint8Array(hashLen - msg.length).fill(0), - msg + msg, ) } else { return concatBytes(utf8ToBytes('hash_'), msg.slice(0, hashLen - 5)) @@ -364,7 +364,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { assert.equal( bytesToHex(trie.root()), - '0xe118db4e01512253df38daafa16fc1d69e03e755595b5847d275d7404ebdc74a' + '0xe118db4e01512253df38daafa16fc1d69e03e755595b5847d275d7404ebdc74a', ) }) }) diff --git a/packages/trie/test/proof.spec.ts b/packages/trie/test/proof.spec.ts index e74313a367..2a5b751830 100644 --- a/packages/trie/test/proof.spec.ts +++ b/packages/trie/test/proof.spec.ts @@ -100,11 +100,11 @@ describe('simple merkle proofs generation and verification', () => { await trie.put( utf8ToBytes('key1aa'), - utf8ToBytes('0123456789012345678901234567890123456789xxx') + utf8ToBytes('0123456789012345678901234567890123456789xxx'), ) await trie.put( utf8ToBytes('key1'), - utf8ToBytes('0123456789012345678901234567890123456789Very_Long') + utf8ToBytes('0123456789012345678901234567890123456789Very_Long'), ) await trie.put(utf8ToBytes('key2bb'), utf8ToBytes('aval3')) await trie.put(utf8ToBytes('key2'), utf8ToBytes('short')) @@ -203,7 +203,7 @@ describe('simple merkle proofs generation and verification', () => { const updatedNewSafeValue = await newTrie.get(safeKey) assert.ok( equalsBytes(updatedNewSafeValue!, safeValue), - 'succesfully set the trie to the new root and got the correct value' + 'succesfully set the trie to the new root and got the correct value', ) }) }) diff --git a/packages/trie/test/proof/range.spec.ts b/packages/trie/test/proof/range.spec.ts index 8bde035b5f..3fb45cf464 100644 --- a/packages/trie/test/proof/range.spec.ts +++ b/packages/trie/test/proof/range.spec.ts @@ -83,7 +83,7 @@ async function verify( startKey?: Uint8Array, endKey?: Uint8Array, keys?: Uint8Array[], - vals?: Uint8Array[] + vals?: Uint8Array[], ) { startKey = startKey ?? entries[start][0] endKey = endKey ?? entries[end][0] @@ -94,7 +94,7 @@ async function verify( endKey, keys ?? targetRange.map(([key]) => key), vals ?? targetRange.map(([, val]) => val), - [...(await trie.createProof(startKey)), ...(await trie.createProof(endKey))] + [...(await trie.createProof(startKey)), ...(await trie.createProof(endKey))], ) } @@ -136,7 +136,7 @@ describe('simple merkle range proofs generation and verification', () => { assert.equal( await verify(trie, entries, start, end, startKey, endKey), - end !== entries.length - 1 + end !== entries.length - 1, ) } @@ -195,7 +195,7 @@ describe('simple merkle range proofs generation and verification', () => { // One element with two non-existent edge proofs assert.equal( await verify(trie, entries, start, start, decreasedStartKey, increasedEndKey), - true + true, ) // Test the mini trie with only a single element. @@ -217,9 +217,9 @@ describe('simple merkle range proofs generation and verification', () => { null, entries.map(([key]) => key), entries.map(([, val]) => val), - null + null, ), - false + false, ) // With edge proofs, it should still work. @@ -233,9 +233,9 @@ describe('simple merkle range proofs generation and verification', () => { 0, entries.length - 1, hexToBytes(`0x${'00'.repeat(32)}`), - hexToBytes(`0x${'ff'.repeat(32)}`) + hexToBytes(`0x${'ff'.repeat(32)}`), ), - false + false, ) }) @@ -261,7 +261,7 @@ describe('simple merkle range proofs generation and verification', () => { it('create a bad range proof and verify it', async () => { const runTest = async ( - cb: (trie: Trie, entries: [Uint8Array, Uint8Array][]) => Promise + cb: (trie: Trie, entries: [Uint8Array, Uint8Array][]) => Promise, ) => { const { trie, entries } = await randomTrie(new MapDB(), false) @@ -349,7 +349,7 @@ describe('simple merkle range proofs generation and verification', () => { undefined, undefined, targetRange.map(([key]) => key), - targetRange.map(([, val]) => val) + targetRange.map(([, val]) => val), ) result = true } catch (err) { diff --git a/packages/trie/test/trie/checkpoint.spec.ts b/packages/trie/test/trie/checkpoint.spec.ts index c5ea661609..aac6de1dfb 100644 --- a/packages/trie/test/trie/checkpoint.spec.ts +++ b/packages/trie/test/trie/checkpoint.spec.ts @@ -234,11 +234,11 @@ describe('testing checkpoints', () => { assert.equal(bytesToUtf8((await CommittedState.get(KEY))!), '1') assert.equal( bytesToHex((await CommittedState['_db'].get(KEY_ROOT))!), - '0x77ddd505d2a5b76a2a6ee34b827a0d35ca19f8d358bee3d74a84eab59794487c' + '0x77ddd505d2a5b76a2a6ee34b827a0d35ca19f8d358bee3d74a84eab59794487c', ) assert.equal( bytesToHex(CommittedState.root()), - '0x77ddd505d2a5b76a2a6ee34b827a0d35ca19f8d358bee3d74a84eab59794487c' + '0x77ddd505d2a5b76a2a6ee34b827a0d35ca19f8d358bee3d74a84eab59794487c', ) // From MemoryState, now take the final checkpoint @@ -264,7 +264,7 @@ describe('testing checkpoints', () => { [ hexToBytes('0xd7eba6ee0f011acb031b79554d57001c42fbfabb150eb9fdd3b6d434f7b791eb'), hexToBytes('0xe3a1202418cf7414b1e6c2c8d92b4673eecdb4aac88f7f58623e3be903aefb2fd4655c32'), - ] + ], ) // Verify that the key is updated assert.equal(bytesToUtf8((await CommittedState.get(KEY))!), '2') diff --git a/packages/trie/test/trie/secure.spec.ts b/packages/trie/test/trie/secure.spec.ts index f6b46483e8..2a866250af 100644 --- a/packages/trie/test/trie/secure.spec.ts +++ b/packages/trie/test/trie/secure.spec.ts @@ -128,35 +128,35 @@ describe('secure tests', () => { const trie = new Trie({ useKeyHashing: true, db: new MapDB() }) const a = hexToBytes( - '0xf8448080a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0a155280bc3c09fd31b0adebbdd4ef3d5128172c0d2008be964dc9e10e0f0fedf' + '0xf8448080a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0a155280bc3c09fd31b0adebbdd4ef3d5128172c0d2008be964dc9e10e0f0fedf', ) const ak = hexToBytes('0x095e7baea6a6c7c4c2dfeb977efac326af552d87') const b = hexToBytes( - '0xf844802ea056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0db94dc4aab9b6a1a11956906ea34f3252f394576aece12199b23b269bb2738ab' + '0xf844802ea056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0db94dc4aab9b6a1a11956906ea34f3252f394576aece12199b23b269bb2738ab', ) const bk = hexToBytes('0x945304eb96065b2a98b57a48a06ae28d285a71b5') const c = hexToBytes( - '0xf84c80880de0b6b3a7640000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' + '0xf84c80880de0b6b3a7640000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', ) const ck = hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b') // checkpoint // checkpoint // commit const d = hexToBytes( - '0xf8488084535500b1a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0a155280bc3c09fd31b0adebbdd4ef3d5128172c0d2008be964dc9e10e0f0fedf' + '0xf8488084535500b1a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0a155280bc3c09fd31b0adebbdd4ef3d5128172c0d2008be964dc9e10e0f0fedf', ) const dk = hexToBytes('0x095e7baea6a6c7c4c2dfeb977efac326af552d87') const e = hexToBytes( - '0xf8478083010851a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0db94dc4aab9b6a1a11956906ea34f3252f394576aece12199b23b269bb2738ab' + '0xf8478083010851a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0db94dc4aab9b6a1a11956906ea34f3252f394576aece12199b23b269bb2738ab', ) const ek = hexToBytes('0x945304eb96065b2a98b57a48a06ae28d285a71b5') const f = hexToBytes( - '0xf84c01880de0b6b3540df72ca056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' + '0xf84c01880de0b6b3540df72ca056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', ) const fk = hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b') // commit const g = hexToBytes( - '0xf8488084535500b1a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0a155280bc3c09fd31b0adebbdd4ef3d5128172c0d2008be964dc9e10e0f0fedf' + '0xf8488084535500b1a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0a155280bc3c09fd31b0adebbdd4ef3d5128172c0d2008be964dc9e10e0f0fedf', ) const gk = hexToBytes('0x095e7baea6a6c7c4c2dfeb977efac326af552d87') diff --git a/packages/trie/test/trie/trie.spec.ts b/packages/trie/test/trie/trie.spec.ts index a3f35ca252..b74d13d717 100644 --- a/packages/trie/test/trie/trie.spec.ts +++ b/packages/trie/test/trie/trie.spec.ts @@ -47,7 +47,7 @@ for (const { constructor, defaults, title } of [ it('creates an instance via createTrie and defaults to `false` with a database', async () => { // TODO: check this test assert.isUndefined( - ((await createTrie({ ...defaults, db: new MapDB() })) as any)._useRootPersistence + ((await createTrie({ ...defaults, db: new MapDB() })) as any)._useRootPersistence, ) }) @@ -60,7 +60,7 @@ for (const { constructor, defaults, title } of [ db: new MapDB(), useRootPersistence: false, })) as any - )._useRootPersistence + )._useRootPersistence, ) }) @@ -73,14 +73,14 @@ for (const { constructor, defaults, title } of [ db: new MapDB(), useRootPersistence: false, })) as any - )._useRootPersistence + )._useRootPersistence, ) }) it('creates an instance via createTrie and defaults to `false` without a database', async () => { // TODO: check this test assert.isUndefined( - ((await createTrie({ ...defaults, db: new MapDB() })) as any)._useRootPersistence + ((await createTrie({ ...defaults, db: new MapDB() })) as any)._useRootPersistence, ) }) @@ -189,7 +189,7 @@ for (const { constructor, defaults, title } of [ for (const root of roots) { assert.isTrue( await trie.checkRoot(unprefixedHexToBytes(root)), - 'Should return true for all nodes in trie' + 'Should return true for all nodes in trie', ) } }) @@ -207,16 +207,16 @@ for (const { constructor, defaults, title } of [ assert.deepEqual(emptyTrie.EMPTY_TRIE_ROOT, emptyTrie.root(), 'Should return empty trie root') assert.isTrue( await emptyTrie.checkRoot(emptyTrie.EMPTY_TRIE_ROOT), - 'Should return true for empty root' + 'Should return true for empty root', ) assert.isFalse( await emptyTrie.checkRoot(emptyTrie['appliedKey'](ROOT_DB_KEY)), - 'Should return false for persistence key' + 'Should return false for persistence key', ) for (const root of roots) { assert.isFalse( await emptyTrie.checkRoot(unprefixedHexToBytes(root)), - 'Should always return false' + 'Should always return false', ) } }) @@ -235,7 +235,7 @@ for (const { constructor, defaults, title } of [ assert.notEqual( 'Missing node in DB', e.message, - 'Should throw when error is unrelated to checkroot' + 'Should throw when error is unrelated to checkroot', ) } }) @@ -259,7 +259,7 @@ describe('keyHashingFunction', async () => { assert.equal( bytesToHex(trieWithHashFunction.root()), '0x8001', - 'used hash function from customKeyHashingFunction' + 'used hash function from customKeyHashingFunction', ) assert.equal(bytesToHex(trieWithCommon.root()), '0x80', 'used hash function from common') }) @@ -282,7 +282,7 @@ describe('keyHashingFunction', async () => { assert.equal( bytesToHex(trieWithHashFunctionCopy.root()), '0x8001', - 'used hash function from customKeyHashingFunction' + 'used hash function from customKeyHashingFunction', ) assert.equal(bytesToHex(trieWithCommonCopy.root()), '0x80', 'used hash function from common') }) diff --git a/packages/trie/test/util/encodingUtils.spec.ts b/packages/trie/test/util/encodingUtils.spec.ts index cf09a89d8d..d647c47929 100644 --- a/packages/trie/test/util/encodingUtils.spec.ts +++ b/packages/trie/test/util/encodingUtils.spec.ts @@ -81,7 +81,7 @@ describe('encoding', () => { assert.deepEqual( result, expected, - 'Returned hex-encoded key does not match the expected result' + 'Returned hex-encoded key does not match the expected result', ) }) @@ -98,7 +98,7 @@ describe('encoding', () => { assert.deepEqual( result, expected, - 'Returned hex-encoded key in "keybyte" encoding does not match the expected result' + 'Returned hex-encoded key in "keybyte" encoding does not match the expected result', ) }) @@ -110,7 +110,7 @@ describe('encoding', () => { assert.throws( () => pathToHexKey(path, extension, 'invalid'), Error, - 'retType must be either "keybyte" or "hex"' + 'retType must be either "keybyte" or "hex"', ) }) @@ -122,17 +122,17 @@ describe('encoding', () => { assert.equal( paths.reduce((count, subArray) => count + subArray.length, 0), pathStrings.length, - 'should have correct number of paths' + 'should have correct number of paths', ) assert.deepEqual( paths[0], [Uint8Array.of(26), Uint8Array.of(27), Uint8Array.of(28), Uint8Array.of(29)], - 'should merge paths correctly' + 'should merge paths correctly', ) assert.deepEqual( paths[1], [Uint8Array.of(30), Uint8Array.of(26)], - 'should merge paths correctly' + 'should merge paths correctly', ) assert.deepEqual(paths[2], [Uint8Array.of(31)], 'should merge paths correctly') }) diff --git a/packages/trie/test/util/genesisState.spec.ts b/packages/trie/test/util/genesisState.spec.ts index 77fc886905..9a672da74f 100644 --- a/packages/trie/test/util/genesisState.spec.ts +++ b/packages/trie/test/util/genesisState.spec.ts @@ -15,7 +15,7 @@ describe('[Util/genesisStateRoot]', () => { assert.equal( bytesToHex(stateRoot), '0x52e628c7f35996ba5a0402d02b34535993c89ff7fc4c430b2763ada8554bee62', - 'kiln stateRoot matches' + 'kiln stateRoot matches', ) }) }) @@ -25,6 +25,6 @@ it('should correctly derive mainnet stateRoot from ethereumjs genesis', async () assert.equal( bytesToHex(stateRoot), '0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544', - 'mainnet stateRoot matches' + 'mainnet stateRoot matches', ) }) diff --git a/packages/trie/tsconfig.lint.json b/packages/trie/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/trie/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/tx/.eslintrc.cjs b/packages/tx/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/tx/.eslintrc.cjs +++ b/packages/tx/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/tx/examples/blobTx.ts b/packages/tx/examples/blobTx.ts index 03f38924fe..fd4fc5ccaa 100644 --- a/packages/tx/examples/blobTx.ts +++ b/packages/tx/examples/blobTx.ts @@ -36,4 +36,4 @@ const main = async () => { console.log(bytesToHex(tx.hash())) //0x3c3e7c5e09c250d2200bcc3530f4a9088d7e3fb4ea3f4fccfd09f535a3539e84 } -main() +void main() diff --git a/packages/tx/examples/custom-chain-id-tx.ts b/packages/tx/examples/custom-chain-id-tx.ts index 30d5baf394..1402244dc9 100644 --- a/packages/tx/examples/custom-chain-id-tx.ts +++ b/packages/tx/examples/custom-chain-id-tx.ts @@ -1,9 +1,9 @@ +import { Hardfork, createCustomCommon } from '@ethereumjs/common' import { createLegacyTxFromRLP } from '@ethereumjs/tx' import { toBytes } from '@ethereumjs/util' -import { createCustomCommon, Hardfork } from '@ethereumjs/common' const txData = toBytes( - '0xf9010b82930284d09dc30083419ce0942d18de92e0f9aee1a29770c3b15c6cf8ac5498e580b8a42f43f4fb0000000000000000000000000000000000000000000000000000016b78998da900000000000000000000000000000000000000000000000000000000000cb1b70000000000000000000000000000000000000000000000000000000000000fa00000000000000000000000000000000000000000000000000000000001363e4f00000000000000000000000000000000000000000000000000000000000186a029a0fac36e66d329af0e831b2e61179b3ec8d7c7a8a2179e303cfed3364aff2bc3e4a07cb73d56e561ccbd838818dd3dea5fa0b5158577ffc61c0e6ec1f0ed55716891' + '0xf9010b82930284d09dc30083419ce0942d18de92e0f9aee1a29770c3b15c6cf8ac5498e580b8a42f43f4fb0000000000000000000000000000000000000000000000000000016b78998da900000000000000000000000000000000000000000000000000000000000cb1b70000000000000000000000000000000000000000000000000000000000000fa00000000000000000000000000000000000000000000000000000000001363e4f00000000000000000000000000000000000000000000000000000000000186a029a0fac36e66d329af0e831b2e61179b3ec8d7c7a8a2179e303cfed3364aff2bc3e4a07cb73d56e561ccbd838818dd3dea5fa0b5158577ffc61c0e6ec1f0ed55716891', ) const common = createCustomCommon({ chainId: 3 }) diff --git a/packages/tx/examples/custom-chain-tx.ts b/packages/tx/examples/custom-chain-tx.ts index 8ac85170fd..8d111ff918 100644 --- a/packages/tx/examples/custom-chain-tx.ts +++ b/packages/tx/examples/custom-chain-tx.ts @@ -1,7 +1,6 @@ -import { Address } from '@ethereumjs/util' import { createCustomCommon } from '@ethereumjs/common' import { createLegacyTx } from '@ethereumjs/tx' -import { hexToBytes } from '@ethereumjs/util' +import { Address, hexToBytes } from '@ethereumjs/util' // In this example we create a transaction for a custom network. @@ -15,7 +14,7 @@ const customCommon = createCustomCommon( { baseChain: 'mainnet', hardfork: 'petersburg', - } + }, ) // We pass our custom Common object whenever we create a transaction @@ -27,7 +26,7 @@ const tx = createLegacyTx( gasLimit: 1000000000, value: 100000, }, - opts + opts, ) // Once we created the transaction using the custom Common object, we can use it as a normal tx. diff --git a/packages/tx/examples/initKzg.ts b/packages/tx/examples/initKzg.ts index de6276d97f..9ed4303631 100644 --- a/packages/tx/examples/initKzg.ts +++ b/packages/tx/examples/initKzg.ts @@ -1,5 +1,5 @@ -import { loadKZG } from 'kzg-wasm' import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { loadKZG } from 'kzg-wasm' const main = async () => { const kzg = await loadKZG() @@ -14,4 +14,4 @@ const main = async () => { console.log(common.customCrypto.kzg) // should output the KZG API as an object } -main() +void main() diff --git a/packages/tx/examples/l2tx.ts b/packages/tx/examples/l2tx.ts index 5b9dd1ad55..15977fb777 100644 --- a/packages/tx/examples/l2tx.ts +++ b/packages/tx/examples/l2tx.ts @@ -1,4 +1,4 @@ -import { createCustomCommon, CustomChain } from '@ethereumjs/common' +import { CustomChain, createCustomCommon } from '@ethereumjs/common' import { createLegacyTx } from '@ethereumjs/tx' import { Address, bytesToHex, hexToBytes } from '@ethereumjs/util' diff --git a/packages/tx/examples/legacyTx.ts b/packages/tx/examples/legacyTx.ts index 69dd8220c1..7e96043564 100644 --- a/packages/tx/examples/legacyTx.ts +++ b/packages/tx/examples/legacyTx.ts @@ -1,9 +1,10 @@ import { Chain, Common, Hardfork } from '@ethereumjs/common' import { createLegacyTx } from '@ethereumjs/tx' import { bytesToHex } from '@ethereumjs/util' +import { hexToBytes } from 'ethereum-cryptography/utils' const txParams = { - nonce: '0x00', + nonce: '0x0', gasPrice: '0x09184e72a000', gasLimit: '0x2710', to: '0x0000000000000000000000000000000000000000', @@ -14,12 +15,9 @@ const txParams = { const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) const tx = createLegacyTx(txParams, { common }) -const privateKey = Buffer.from( - 'e331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', - 'hex' -) +const privateKey = hexToBytes('0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109') const signedTx = tx.sign(privateKey) -const serializedTx = signedTx.serialize() +const _serializedTx = signedTx.serialize() console.log(bytesToHex(signedTx.hash())) // 0x894b72d87f8333fccd29d1b3aca39af69d97a6bc281e7e7a3a60640690a3cd2b diff --git a/packages/tx/examples/londonTx.ts b/packages/tx/examples/londonTx.ts index 00bb618fe7..bff302f2ba 100644 --- a/packages/tx/examples/londonTx.ts +++ b/packages/tx/examples/londonTx.ts @@ -1,5 +1,5 @@ import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { create1559FeeMarketTx, FeeMarketEIP1559Transaction } from '@ethereumjs/tx' +import { create1559FeeMarketTx } from '@ethereumjs/tx' import { bytesToHex } from '@ethereumjs/util' const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) diff --git a/packages/tx/examples/transactions.ts b/packages/tx/examples/transactions.ts index 4104b91435..8d7509bde3 100644 --- a/packages/tx/examples/transactions.ts +++ b/packages/tx/examples/transactions.ts @@ -2,8 +2,8 @@ // You can run them with tsx, as this project is developed in TypeScript. // Install the dependencies and run `npx tsx examples/transactions.ts` -import { bytesToHex, toBytes, hexToBytes } from '@ethereumjs/util' import { createLegacyTx, createLegacyTxFromBytesArray } from '@ethereumjs/tx' +import { bytesToHex, hexToBytes, toBytes } from '@ethereumjs/util' // We create an unsigned transaction. // Notice we don't set the `to` field because we are creating a new contract. diff --git a/packages/tx/examples/txFactory.ts b/packages/tx/examples/txFactory.ts index dd967de644..fb23713107 100644 --- a/packages/tx/examples/txFactory.ts +++ b/packages/tx/examples/txFactory.ts @@ -1,5 +1,7 @@ import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { Capability, createTxFromTxData, EIP1559CompatibleTx } from '@ethereumjs/tx' +import { Capability, createTxFromTxData } from '@ethereumjs/tx' + +import type { EIP1559CompatibleTx } from '@ethereumjs/tx' const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) @@ -8,6 +10,6 @@ const tx = createTxFromTxData(txData, { common }) if (tx.supports(Capability.EIP1559FeeMarket)) { console.log( - `The max fee per gas for this transaction is ${(tx as EIP1559CompatibleTx).maxFeePerGas}` + `The max fee per gas for this transaction is ${(tx as EIP1559CompatibleTx).maxFeePerGas}`, ) } diff --git a/packages/tx/src/1559/constructors.ts b/packages/tx/src/1559/constructors.ts index 6972a8bdfd..2465046fd5 100644 --- a/packages/tx/src/1559/constructors.ts +++ b/packages/tx/src/1559/constructors.ts @@ -31,11 +31,11 @@ export function create1559FeeMarketTx(txData: TxData, opts: TxOptions = {}) { */ export function createEIP1559FeeMarketTxFromBytesArray( values: TxValuesArray, - opts: TxOptions = {} + opts: TxOptions = {}, ) { if (values.length !== 9 && values.length !== 12) { throw new Error( - 'Invalid EIP-1559 transaction. Only expecting 9 values (for unsigned tx) or 12 values (for signed tx).' + 'Invalid EIP-1559 transaction. Only expecting 9 values (for unsigned tx) or 12 values (for signed tx).', ) } @@ -72,7 +72,7 @@ export function createEIP1559FeeMarketTxFromBytesArray( r, s, }, - opts + opts, ) } @@ -89,7 +89,7 @@ export function create1559FeeMarketTxFromRLP(serialized: Uint8Array, opts: TxOpt throw new Error( `Invalid serialized tx input: not an EIP-1559 transaction (wrong tx type, expected: ${ TransactionType.FeeMarketEIP1559 - }, received: ${bytesToHex(serialized.subarray(0, 1))}` + }, received: ${bytesToHex(serialized.subarray(0, 1))}`, ) } diff --git a/packages/tx/src/1559/tx.ts b/packages/tx/src/1559/tx.ts index df4a3f155a..464b255b2a 100644 --- a/packages/tx/src/1559/tx.ts +++ b/packages/tx/src/1559/tx.ts @@ -90,7 +90,7 @@ export class FeeMarketEIP1559Transaction extends BaseTransaction { if (!(blobVersionedHashes.length === blobs.length && blobs.length === commitments.length)) { throw new Error('Number of blobVersionedHashes, blobs, and commitments not all equal') @@ -73,7 +73,7 @@ const validateBlobTransactionNetworkWrapper = ( export function create4844BlobTx(txData: TxData, opts?: TxOptions) { if (opts?.common?.customCrypto?.kzg === undefined) { throw new Error( - 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx' + 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx', ) } const kzg = opts!.common!.customCrypto!.kzg! @@ -96,7 +96,7 @@ export function create4844BlobTx(txData: TxData, opts?: TxOptions) { txData.kzgProofs = blobsToProofs( kzg, txData.blobs as Uint8Array[], - txData.kzgCommitments as Uint8Array[] + txData.kzgCommitments as Uint8Array[], ) } @@ -112,13 +112,13 @@ export function create4844BlobTx(txData: TxData, opts?: TxOptions) { export function create4844BlobTxFromBytesArray(values: TxValuesArray, opts: TxOptions = {}) { if (opts.common?.customCrypto?.kzg === undefined) { throw new Error( - 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx' + 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx', ) } if (values.length !== 11 && values.length !== 14) { throw new Error( - 'Invalid EIP-4844 transaction. Only expecting 11 values (for unsigned tx) or 14 values (for signed tx).' + 'Invalid EIP-4844 transaction. Only expecting 11 values (for unsigned tx) or 14 values (for signed tx).', ) } @@ -169,7 +169,7 @@ export function create4844BlobTxFromBytesArray(values: TxValuesArray, opts: TxOp r, s, }, - opts + opts, ) } @@ -182,7 +182,7 @@ export function create4844BlobTxFromBytesArray(values: TxValuesArray, opts: TxOp export function create4844BlobTxFromRLP(serialized: Uint8Array, opts: TxOptions = {}) { if (opts.common?.customCrypto?.kzg === undefined) { throw new Error( - 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx' + 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx', ) } @@ -190,7 +190,7 @@ export function create4844BlobTxFromRLP(serialized: Uint8Array, opts: TxOptions throw new Error( `Invalid serialized tx input: not an EIP-4844 transaction (wrong tx type, expected: ${ TransactionType.BlobEIP4844 - }, received: ${bytesToHex(serialized.subarray(0, 1))}` + }, received: ${bytesToHex(serialized.subarray(0, 1))}`, ) } @@ -211,7 +211,7 @@ export function create4844BlobTxFromRLP(serialized: Uint8Array, opts: TxOptions */ export function create4844BlobTxFromSerializedNetworkWrapper( serialized: Uint8Array, - opts?: TxOptions + opts?: TxOptions, ): BlobEIP4844Transaction { if (!opts || !opts.common) { throw new Error('common instance required to validate versioned hashes') @@ -219,7 +219,7 @@ export function create4844BlobTxFromSerializedNetworkWrapper( if (opts.common?.customCrypto?.kzg === undefined) { throw new Error( - 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx' + 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx', ) } @@ -227,7 +227,7 @@ export function create4844BlobTxFromSerializedNetworkWrapper( throw new Error( `Invalid serialized tx input: not an EIP-4844 transaction (wrong tx type, expected: ${ TransactionType.BlobEIP4844 - }, received: ${bytesToHex(serialized.subarray(0, 1))}` + }, received: ${bytesToHex(serialized.subarray(0, 1))}`, ) } @@ -252,7 +252,7 @@ export function create4844BlobTxFromSerializedNetworkWrapper( kzgCommitments, kzgProofs, version, - opts.common.customCrypto.kzg + opts.common.customCrypto.kzg, ) // set the network blob data on the tx @@ -278,11 +278,11 @@ export function create4844BlobTxFromSerializedNetworkWrapper( */ export function createMinimal4844TxFromNetworkWrapper( txData: BlobEIP4844Transaction, - opts?: TxOptions + opts?: TxOptions, ): BlobEIP4844Transaction { if (opts?.common?.customCrypto?.kzg === undefined) { throw new Error( - 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx' + 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx', ) } @@ -291,7 +291,7 @@ export function createMinimal4844TxFromNetworkWrapper( ...txData, ...{ blobs: undefined, kzgCommitments: undefined, kzgProofs: undefined }, }, - opts + opts, ) return tx } @@ -305,7 +305,7 @@ export function createMinimal4844TxFromNetworkWrapper( */ export function blobTxNetworkWrapperToJSON( serialized: Uint8Array, - opts?: TxOptions + opts?: TxOptions, ): JsonBlobTxNetworkWrapper { const tx = create4844BlobTxFromSerializedNetworkWrapper(serialized, opts) diff --git a/packages/tx/src/4844/tx.ts b/packages/tx/src/4844/tx.ts index 549ccd79ff..f6c69b2ad5 100644 --- a/packages/tx/src/4844/tx.ts +++ b/packages/tx/src/4844/tx.ts @@ -100,13 +100,13 @@ export class BlobEIP4844Transaction extends BaseTransaction toBytes(vh)) @@ -132,7 +132,7 @@ export class BlobEIP4844Transaction extends BaseTransaction if (this.getIntrinsicGas() > this.gasLimit) { errors.push( - `gasLimit is too low. given ${this.gasLimit}, need at least ${this.getIntrinsicGas()}` + `gasLimit is too low. given ${this.gasLimit}, need at least ${this.getIntrinsicGas()}`, ) } @@ -367,7 +367,7 @@ export abstract class BaseTransaction v: bigint, r: Uint8Array | bigint, s: Uint8Array | bigint, - convertV?: boolean + convertV?: boolean, ): Transaction[T] /** @@ -385,7 +385,7 @@ export abstract class BaseTransaction if (common) { if (common.chainId() !== chainIdBigInt) { const msg = this._errorMsg( - `The chain ID does not match the chain ID of Common. Got: ${chainIdBigInt}, expected: ${common.chainId()}` + `The chain ID does not match the chain ID of Common. Got: ${chainIdBigInt}, expected: ${common.chainId()}`, ) throw new Error(msg) } @@ -405,7 +405,7 @@ export abstract class BaseTransaction name: 'custom-chain', chainId: chainIdBigInt, }, - { baseChain: this.DEFAULT_CHAIN } + { baseChain: this.DEFAULT_CHAIN }, ) } } @@ -425,7 +425,7 @@ export abstract class BaseTransaction protected _validateCannotExceedMaxInteger( values: { [key: string]: bigint | undefined }, bits = 256, - cannotEqual = false + cannotEqual = false, ) { for (const [key, value] of Object.entries(values)) { switch (bits) { @@ -433,7 +433,7 @@ export abstract class BaseTransaction if (cannotEqual) { if (value !== undefined && value >= MAX_UINT64) { const msg = this._errorMsg( - `${key} cannot equal or exceed MAX_UINT64 (2^64-1), given ${value}` + `${key} cannot equal or exceed MAX_UINT64 (2^64-1), given ${value}`, ) throw new Error(msg) } @@ -448,14 +448,14 @@ export abstract class BaseTransaction if (cannotEqual) { if (value !== undefined && value >= MAX_INTEGER) { const msg = this._errorMsg( - `${key} cannot equal or exceed MAX_INTEGER (2^256-1), given ${value}` + `${key} cannot equal or exceed MAX_INTEGER (2^256-1), given ${value}`, ) throw new Error(msg) } } else { if (value !== undefined && value > MAX_INTEGER) { const msg = this._errorMsg( - `${key} cannot exceed MAX_INTEGER (2^256-1), given ${value}` + `${key} cannot exceed MAX_INTEGER (2^256-1), given ${value}`, ) throw new Error(msg) } diff --git a/packages/tx/src/capabilities/eip1559.ts b/packages/tx/src/capabilities/eip1559.ts index d76c744f39..7c9047ff9a 100644 --- a/packages/tx/src/capabilities/eip1559.ts +++ b/packages/tx/src/capabilities/eip1559.ts @@ -10,7 +10,7 @@ export function getUpfrontCost(tx: EIP1559CompatibleTx, baseFee: bigint): bigint export function getEffectivePriorityFee( tx: EIP1559CompatibleTx, - baseFee: bigint | undefined + baseFee: bigint | undefined, ): bigint { if (baseFee === undefined || baseFee > tx.maxFeePerGas) { throw new Error('Tx cannot pay baseFee') diff --git a/packages/tx/src/capabilities/eip7702.ts b/packages/tx/src/capabilities/eip7702.ts index 52c1b3bc91..3839485a0b 100644 --- a/packages/tx/src/capabilities/eip7702.ts +++ b/packages/tx/src/capabilities/eip7702.ts @@ -10,7 +10,7 @@ import type { EIP7702CompatibleTx } from '../types.js' export function getDataGas(tx: EIP7702CompatibleTx): bigint { const eip2930Cost = BigInt(AccessLists.getDataGasEIP2930(tx.accessList, tx.common)) const eip7702Cost = BigInt( - tx.authorizationList.length * Number(tx.common.param('perAuthBaseGas')) + tx.authorizationList.length * Number(tx.common.param('perAuthBaseGas')), ) return Legacy.getDataGas(tx, eip2930Cost + eip7702Cost) } diff --git a/packages/tx/src/capabilities/legacy.ts b/packages/tx/src/capabilities/legacy.ts index 342acd39a4..34ebd741dc 100644 --- a/packages/tx/src/capabilities/legacy.ts +++ b/packages/tx/src/capabilities/legacy.ts @@ -66,7 +66,7 @@ export function validateHighS(tx: LegacyTxInterface): void { if (tx.common.gteHardfork('homestead') && s !== undefined && s > SECP256K1_ORDER_DIV_2) { const msg = errorMsg( tx, - 'Invalid Signature: s-values greater than secp256k1n/2 are considered invalid' + 'Invalid Signature: s-values greater than secp256k1n/2 are considered invalid', ) throw new Error(msg) } @@ -90,7 +90,7 @@ export function getSenderPublicKey(tx: LegacyTxInterface): Uint8Array { v!, bigIntToUnpaddedBytes(r!), bigIntToUnpaddedBytes(s!), - tx.supports(Capability.EIP155ReplayProtection) ? tx.common.chainId() : undefined + tx.supports(Capability.EIP155ReplayProtection) ? tx.common.chainId() : undefined, ) if (Object.isFrozen(tx)) { tx.cache.senderPubKey = sender diff --git a/packages/tx/src/legacy/constructors.ts b/packages/tx/src/legacy/constructors.ts index 002d32bd55..35657d6897 100644 --- a/packages/tx/src/legacy/constructors.ts +++ b/packages/tx/src/legacy/constructors.ts @@ -28,7 +28,7 @@ export function createLegacyTxFromBytesArray(values: TxValuesArray, opts: TxOpti // This happens if you get the RLP data from `raw()` if (values.length !== 6 && values.length !== 9) { throw new Error( - 'Invalid transaction. Only expecting 6 values (for unsigned tx) or 9 values (for signed tx).' + 'Invalid transaction. Only expecting 6 values (for unsigned tx) or 9 values (for signed tx).', ) } @@ -48,7 +48,7 @@ export function createLegacyTxFromBytesArray(values: TxValuesArray, opts: TxOpti r, s, }, - opts + opts, ) } diff --git a/packages/tx/src/legacy/tx.ts b/packages/tx/src/legacy/tx.ts index 29ec2c64d2..ffec6db20f 100644 --- a/packages/tx/src/legacy/tx.ts +++ b/packages/tx/src/legacy/tx.ts @@ -219,7 +219,7 @@ export class LegacyTransaction extends BaseTransaction { v: bigint, r: Uint8Array | bigint, s: Uint8Array | bigint, - convertV: boolean = false + convertV: boolean = false, ): LegacyTransaction { r = toBytes(r) s = toBytes(s) @@ -241,7 +241,7 @@ export class LegacyTransaction extends BaseTransaction { r: bytesToBigInt(r), s: bytesToBigInt(s), }, - opts + opts, ) } @@ -268,7 +268,7 @@ export class LegacyTransaction extends BaseTransaction { // v is 2. not matching the classic v=27 or v=28 case if (v < 37 && v !== 27 && v !== 28) { throw new Error( - `Legacy txs need either v = 27/28 or v >= 37 (EIP-155 replay protection), got v = ${v}` + `Legacy txs need either v = 27/28 or v >= 37 (EIP-155 replay protection), got v = ${v}`, ) } } @@ -284,7 +284,7 @@ export class LegacyTransaction extends BaseTransaction { if (common) { if (!meetsEIP155(BigInt(v), common.chainId())) { throw new Error( - `Incompatible EIP155-based V ${v} and chain id ${common.chainId()}. See the Common parameter of the Transaction constructor to set the chain id.` + `Incompatible EIP155-based V ${v} and chain id ${common.chainId()}. See the Common parameter of the Transaction constructor to set the chain id.`, ) } } else { diff --git a/packages/tx/src/transactionFactory.ts b/packages/tx/src/transactionFactory.ts index 22ca0a7021..e543320f1d 100644 --- a/packages/tx/src/transactionFactory.ts +++ b/packages/tx/src/transactionFactory.ts @@ -29,7 +29,7 @@ import type { EthersProvider } from '@ethereumjs/util' */ export function createTxFromTxData( txData: TypedTxData, - txOptions: TxOptions = {} + txOptions: TxOptions = {}, ): Transaction[T] { if (!('type' in txData) || txData.type === undefined) { // Assume legacy transaction @@ -59,7 +59,7 @@ export function createTxFromTxData( */ export function createTxFromSerializedData( data: Uint8Array, - txOptions: TxOptions = {} + txOptions: TxOptions = {}, ): Transaction[T] { if (data[0] <= 0x7f) { // Determine the type. @@ -91,7 +91,7 @@ export function createTxFromSerializedData( */ export function createTxFromBlockBodyData( data: Uint8Array | Uint8Array[], - txOptions: TxOptions = {} + txOptions: TxOptions = {}, ) { if (data instanceof Uint8Array) { return createTxFromSerializedData(data, txOptions) @@ -112,7 +112,7 @@ export function createTxFromBlockBodyData( */ export async function createTxFromRPC( txData: TxData[T], - txOptions: TxOptions = {} + txOptions: TxOptions = {}, ): Promise { return createTxFromTxData(normalizeTxParams(txData), txOptions) } @@ -127,7 +127,7 @@ export async function createTxFromRPC( export async function createTxFromJsonRpcProvider( provider: string | EthersProvider, txHash: string, - txOptions?: TxOptions + txOptions?: TxOptions, ) { const prov = getProvider(provider) const txData = await fetchFromProvider(prov, { diff --git a/packages/tx/src/types.ts b/packages/tx/src/types.ts index 792fbe0427..f361532663 100644 --- a/packages/tx/src/types.ts +++ b/packages/tx/src/types.ts @@ -118,7 +118,7 @@ export function isAccessList(input: AccessListBytes | AccessList): input is Acce } export function isAuthorizationListBytes( - input: AuthorizationListBytes | AuthorizationList + input: AuthorizationListBytes | AuthorizationList, ): input is AuthorizationListBytes { if (input.length === 0) { return true @@ -131,7 +131,7 @@ export function isAuthorizationListBytes( } export function isAuthorizationList( - input: AuthorizationListBytes | AuthorizationList + input: AuthorizationListBytes | AuthorizationList, ): input is AuthorizationList { return !isAuthorizationListBytes(input) // This is exactly the same method, except the output is negated. } @@ -447,7 +447,7 @@ type AccessListEIP2930TxValuesArray = [ AccessListBytes, Uint8Array?, Uint8Array?, - Uint8Array? + Uint8Array?, ] /** @@ -465,7 +465,7 @@ type FeeMarketEIP1559TxValuesArray = [ AccessListBytes, Uint8Array?, Uint8Array?, - Uint8Array? + Uint8Array?, ] /** @@ -484,7 +484,7 @@ type EOACodeEIP7702TxValuesArray = [ AuthorizationListBytes, Uint8Array?, Uint8Array?, - Uint8Array? + Uint8Array?, ] /** @@ -504,14 +504,14 @@ type BlobEIP4844TxValuesArray = [ Uint8Array[], Uint8Array?, Uint8Array?, - Uint8Array? + Uint8Array?, ] export type BlobEIP4844NetworkValuesArray = [ BlobEIP4844TxValuesArray, Uint8Array[], Uint8Array[], - Uint8Array[] + Uint8Array[], ] type JsonAccessListItem = { address: string; storageKeys: string[] } diff --git a/packages/tx/src/util.ts b/packages/tx/src/util.ts index ab3594dc8e..2079045428 100644 --- a/packages/tx/src/util.ts +++ b/packages/tx/src/util.ts @@ -23,8 +23,8 @@ export function checkMaxInitCodeSize(common: Common, length: number) { if (maxInitCodeSize && BigInt(length) > maxInitCodeSize) { throw new Error( `the initcode size of this transaction is too large: it is ${length} while the max is ${common.param( - 'maxInitCodeSize' - )}` + 'maxInitCodeSize', + )}`, ) } } @@ -80,7 +80,7 @@ export class AccessLists { const storageSlots = accessListItem[1] if ((accessListItem)[2] !== undefined) { throw new Error( - 'Access list item cannot have 3 elements. It can only have an address, and an array of storage slots.' + 'Access list item cannot have 3 elements. It can only have an address, and an array of storage slots.', ) } if (address.length !== 20) { @@ -130,7 +130,7 @@ export class AccessLists { export class AuthorizationLists { public static getAuthorizationListData( - authorizationList: AuthorizationListBytes | AuthorizationList + authorizationList: AuthorizationListBytes | AuthorizationList, ) { let AuthorizationListJSON let bufferAuthorizationList diff --git a/packages/tx/test/base.spec.ts b/packages/tx/test/base.spec.ts index 6c93a58b3c..1f0d405449 100644 --- a/packages/tx/test/base.spec.ts +++ b/packages/tx/test/base.spec.ts @@ -119,7 +119,7 @@ describe('[BaseTransaction]', () => { assert.equal( tx.common.hardfork(), 'london', - `${txType.name}: should initialize with correct HF provided` + `${txType.name}: should initialize with correct HF provided`, ) assert.ok(Object.isFrozen(tx), `${txType.name}: tx should be frozen by default`) @@ -131,20 +131,20 @@ describe('[BaseTransaction]', () => { assert.equal( tx.common.hardfork(), 'london', - `${txType.name}: should initialize with correct HF provided` + `${txType.name}: should initialize with correct HF provided`, ) initCommon.setHardfork(Hardfork.Byzantium) assert.equal( tx.common.hardfork(), 'london', - `${txType.name}: should stay on correct HF if outer common HF changes` + `${txType.name}: should stay on correct HF if outer common HF changes`, ) tx = txType.create.txData({}, { common, freeze: false }) assert.ok( !Object.isFrozen(tx), - `${txType.name}: tx should not be frozen when freeze deactivated in options` + `${txType.name}: tx should not be frozen when freeze deactivated in options`, ) // Perform the same test as above, but now using a different construction method. This also implies that passing on the @@ -156,7 +156,7 @@ describe('[BaseTransaction]', () => { assert.equal( tx.type, txType.type, - `${txType.name}: fromSerializedTx() -> should initialize correctly` + `${txType.name}: fromSerializedTx() -> should initialize correctly`, ) assert.ok(Object.isFrozen(tx), `${txType.name}: tx should be frozen by default`) @@ -164,7 +164,7 @@ describe('[BaseTransaction]', () => { tx = txType.create.rlp(rlpData, { common, freeze: false }) assert.ok( !Object.isFrozen(tx), - `${txType.name}: tx should not be frozen when freeze deactivated in options` + `${txType.name}: tx should not be frozen when freeze deactivated in options`, ) tx = txType.create.bytesArray(txType.values as any, { common }) @@ -173,7 +173,7 @@ describe('[BaseTransaction]', () => { tx = txType.create.bytesArray(txType.values as any, { common, freeze: false }) assert.ok( !Object.isFrozen(tx), - `${txType.name}: tx should not be frozen when freeze deactivated in options` + `${txType.name}: tx should not be frozen when freeze deactivated in options`, ) } }) @@ -187,7 +187,7 @@ describe('[BaseTransaction]', () => { } catch (err: any) { assert.ok( err.message.includes('nonce cannot have leading zeroes'), - 'should throw with nonce with leading zeroes' + 'should throw with nonce with leading zeroes', ) } rlpData[0] = toBytes('0x') @@ -198,7 +198,7 @@ describe('[BaseTransaction]', () => { } catch (err: any) { assert.ok( err.message.includes('v cannot have leading zeroes'), - 'should throw with v with leading zeroes' + 'should throw with v with leading zeroes', ) } rlpData = eip2930Txs[0].raw() @@ -209,7 +209,7 @@ describe('[BaseTransaction]', () => { } catch (err: any) { assert.ok( err.message.includes('gasLimit cannot have leading zeroes'), - 'should throw with gasLimit with leading zeroes' + 'should throw with gasLimit with leading zeroes', ) } rlpData = eip1559Txs[0].raw() @@ -220,7 +220,7 @@ describe('[BaseTransaction]', () => { } catch (err: any) { assert.ok( err.message.includes('maxPriorityFeePerGas cannot have leading zeroes'), - 'should throw with maxPriorityFeePerGas with leading zeroes' + 'should throw with maxPriorityFeePerGas with leading zeroes', ) } }) @@ -230,11 +230,11 @@ describe('[BaseTransaction]', () => { for (const tx of txType.txs) { assert.ok( txType.create.rlp(tx.serialize(), { common }), - `${txType.name}: should do roundtrip serialize() -> fromSerializedTx()` + `${txType.name}: should do roundtrip serialize() -> fromSerializedTx()`, ) assert.ok( txType.create.rlp(tx.serialize(), { common }), - `${txType.name}: should do roundtrip serialize() -> fromSerializedTx()` + `${txType.name}: should do roundtrip serialize() -> fromSerializedTx()`, ) } } @@ -246,13 +246,13 @@ describe('[BaseTransaction]', () => { for (const activeCapability of txType.activeCapabilities) { assert.ok( tx.supports(activeCapability), - `${txType.name}: should recognize all supported capabilities` + `${txType.name}: should recognize all supported capabilities`, ) } for (const notActiveCapability of txType.notActiveCapabilities) { assert.notOk( tx.supports(notActiveCapability), - `${txType.name}: should reject non-active existing and not existing capabilities` + `${txType.name}: should reject non-active existing and not existing capabilities`, ) } } @@ -264,7 +264,7 @@ describe('[BaseTransaction]', () => { for (const tx of txType.txs) { assert.ok( txType.create.bytesArray(tx.raw() as any, { common }), - `${txType.name}: should do roundtrip raw() -> fromValuesArray()` + `${txType.name}: should do roundtrip raw() -> fromValuesArray()`, ) } } @@ -287,7 +287,7 @@ describe('[BaseTransaction]', () => { assert.equal(tx.verifySignature(), false, `${txType.name}: signature should not be valid`) assert.ok( tx.getValidationErrors().includes('Invalid Signature'), - `${txType.name}: should return an error string about not verifying signatures` + `${txType.name}: should return an error string about not verifying signatures`, ) assert.notOk(tx.isValid(), `${txType.name}: should not validate correctly`) } @@ -306,7 +306,7 @@ describe('[BaseTransaction]', () => { () => tx.sign(utf8ToBytes('invalid')), undefined, undefined, - `${txType.name}: should fail with invalid PK` + `${txType.name}: should fail with invalid PK`, ) } } @@ -323,14 +323,14 @@ describe('[BaseTransaction]', () => { v: undefined, r: undefined, s: undefined, - }) + }), ), ] for (const tx of txs) { assert.equal( tx.isSigned(), tx.v !== undefined && tx.r !== undefined && tx.s !== undefined, - 'isSigned() returns correctly' + 'isSigned() returns correctly', ) } } @@ -345,7 +345,7 @@ describe('[BaseTransaction]', () => { assert.equal( signedTx.getSenderAddress().toString(), `0x${sendersAddress}`, - `${txType.name}: should get sender's address after signing it` + `${txType.name}: should get sender's address after signing it`, ) } } @@ -362,7 +362,7 @@ describe('[BaseTransaction]', () => { const pubKeyFromPriv = privateToPublic(hexToBytes(`0x${privateKey}`)) assert.ok( equalsBytes(txPubKey, pubKeyFromPriv), - `${txType.name}: should get sender's public key after signing it` + `${txType.name}: should get sender's public key after signing it`, ) } } @@ -385,7 +385,7 @@ describe('[BaseTransaction]', () => { }, undefined, undefined, - 'should throw when s-value is greater than secp256k1n/2' + 'should throw when s-value is greater than secp256k1n/2', ) } } @@ -435,7 +435,7 @@ describe('[BaseTransaction]', () => { } catch (err: any) { assert.ok( err.message.includes('equal or exceed MAX_INTEGER'), - 'throws when value equals or exceeds MAX_INTEGER' + 'throws when value equals or exceeds MAX_INTEGER', ) } try { @@ -448,7 +448,7 @@ describe('[BaseTransaction]', () => { } catch (err: any) { assert.ok( err.message.includes('unimplemented bits value'), - 'throws when bits value other than 64 or 256 provided' + 'throws when bits value other than 64 or 256 provided', ) } try { @@ -461,7 +461,7 @@ describe('[BaseTransaction]', () => { } catch (err: any) { assert.ok( err.message.includes('2^64'), - 'throws when 64 bit integer equals or exceeds MAX_UINT64' + 'throws when 64 bit integer equals or exceeds MAX_UINT64', ) } }) diff --git a/packages/tx/test/eip1559.spec.ts b/packages/tx/test/eip1559.spec.ts index 8588d7929b..6f9e7f08df 100644 --- a/packages/tx/test/eip1559.spec.ts +++ b/packages/tx/test/eip1559.spec.ts @@ -75,7 +75,7 @@ describe('[FeeMarketEIP1559Transaction]', () => { gasLimit: 100, value: 6, }, - { common } + { common }, ) assert.equal(tx.getUpfrontCost(), BigInt(806), 'correct upfront cost with default base fee') let baseFee = BigInt(0) @@ -84,7 +84,7 @@ describe('[FeeMarketEIP1559Transaction]', () => { assert.equal( tx.getUpfrontCost(baseFee), BigInt(1006), - 'correct upfront cost with cost-changing base fee value' + 'correct upfront cost with cost-changing base fee value', ) }) @@ -94,7 +94,7 @@ describe('[FeeMarketEIP1559Transaction]', () => { maxFeePerGas: 10, maxPriorityFeePerGas: 8, }, - { common } + { common }, ) assert.equal(tx.getEffectivePriorityFee(BigInt(10)), BigInt(0)) assert.equal(tx.getEffectivePriorityFee(BigInt(9)), BigInt(1)) @@ -114,7 +114,7 @@ describe('[FeeMarketEIP1559Transaction]', () => { const rlpSerialized = RLP.encode(Uint8Array.from(signed.serialize())) assert.ok( equalsBytes(rlpSerialized, hexToBytes(data.signedTransactionRLP as PrefixedHexString)), - 'Should sign txs correctly' + 'Should sign txs correctly', ) } }) @@ -160,11 +160,11 @@ describe('[FeeMarketEIP1559Transaction]', () => { let txn = create1559FeeMarketTx(data as FeeMarketEIP1559TxData, { common }) let signed = txn.sign(pkey) const expectedHash = hexToBytes( - '0x2e564c87eb4b40e7f469b2eec5aa5d18b0b46a24e8bf0919439cfb0e8fcae446' + '0x2e564c87eb4b40e7f469b2eec5aa5d18b0b46a24e8bf0919439cfb0e8fcae446', ) assert.ok( equalsBytes(signed.hash(), expectedHash), - 'Should provide the correct hash when frozen' + 'Should provide the correct hash when frozen', ) txn = create1559FeeMarketTx(data as FeeMarketEIP1559TxData, { common, @@ -173,7 +173,7 @@ describe('[FeeMarketEIP1559Transaction]', () => { signed = txn.sign(pkey) assert.ok( equalsBytes(signed.hash(), expectedHash), - 'Should provide the correct hash when not frozen' + 'Should provide the correct hash when not frozen', ) }) @@ -209,7 +209,7 @@ describe('[FeeMarketEIP1559Transaction]', () => { const signedTxn = txn.sign(pkey) assert.ok( signedTxn.common.hardfork() === Hardfork.Paris, - 'signed tx common is taken from tx.common' + 'signed tx common is taken from tx.common', ) }) @@ -221,20 +221,20 @@ describe('[FeeMarketEIP1559Transaction]', () => { accessList: [[validAddress, [validSlot]]], chainId, }, - { common } + { common }, ) const expectedHash = hexToBytes( - '0xfa81814f7dd57bad435657a05eabdba2815f41e3f15ddd6139027e7db56b0dea' + '0xfa81814f7dd57bad435657a05eabdba2815f41e3f15ddd6139027e7db56b0dea', ) assert.deepEqual(unsignedTx.getHashedMessageToSign(), expectedHash), 'correct hashed version' const expectedSerialization = hexToBytes( - '0x02f85904808080809401010101010101010101010101010101010101018083010200f838f7940101010101010101010101010101010101010101e1a00101010101010101010101010101010101010101010101010101010101010101' + '0x02f85904808080809401010101010101010101010101010101010101018083010200f838f7940101010101010101010101010101010101010101e1a00101010101010101010101010101010101010101010101010101010101010101', ) assert.deepEqual( unsignedTx.getMessageToSign(), expectedSerialization, - 'correct serialized unhashed version' + 'correct serialized unhashed version', ) }) @@ -273,7 +273,7 @@ describe('[FeeMarketEIP1559Transaction]', () => { gasLimit: 1, value: 6, }, - { common } + { common }, ) }, 'fee can be 2^256 - 1') assert.throws( @@ -285,12 +285,12 @@ describe('[FeeMarketEIP1559Transaction]', () => { gasLimit: 100, value: 6, }, - { common } + { common }, ) }, undefined, undefined, - 'fee must be less than 2^256' + 'fee must be less than 2^256', ) assert.throws( () => { @@ -301,12 +301,12 @@ describe('[FeeMarketEIP1559Transaction]', () => { gasLimit: 100, value: 6, }, - { common } + { common }, ) }, undefined, undefined, - 'total fee must be the larger of the two' + 'total fee must be the larger of the two', ) }) }) diff --git a/packages/tx/test/eip3860.spec.ts b/packages/tx/test/eip3860.spec.ts index 8a7a2fbf73..8230db40aa 100644 --- a/packages/tx/test/eip3860.spec.ts +++ b/packages/tx/test/eip3860.spec.ts @@ -88,15 +88,15 @@ describe('[EIP3860 tests]', () => { for (const txType of txTypes) { const eip3860ActiveTx = createTxFromTxData( { data, type: txType }, - { common, allowUnlimitedInitCodeSize: true } + { common, allowUnlimitedInitCodeSize: true }, ) const eip3860DeactivedTx = createTxFromTxData( { data, type: txType }, - { common, allowUnlimitedInitCodeSize: false } + { common, allowUnlimitedInitCodeSize: false }, ) assert.ok( eip3860ActiveTx.getDataGas() === eip3860DeactivedTx.getDataGas(), - 'charged initcode analysis gas' + 'charged initcode analysis gas', ) } }) diff --git a/packages/tx/test/eip4844.spec.ts b/packages/tx/test/eip4844.spec.ts index 4bf52bfe47..d7e8c4d363 100644 --- a/packages/tx/test/eip4844.spec.ts +++ b/packages/tx/test/eip4844.spec.ts @@ -49,7 +49,7 @@ describe('EIP4844 addSignature tests', () => { to: Address.zero(), blobVersionedHashes: [concatBytes(new Uint8Array([1]), randomBytes(31))], }, - { common } + { common }, ) const signedTx = tx.sign(privateKey) const addSignatureTx = tx.addSignature(signedTx.v!, signedTx.r!, signedTx.s!) @@ -64,7 +64,7 @@ describe('EIP4844 addSignature tests', () => { to: Address.zero(), blobVersionedHashes: [concatBytes(new Uint8Array([1]), randomBytes(31))], }, - { common } + { common }, ) const msgHash = tx.getHashedMessageToSign() @@ -83,7 +83,7 @@ describe('EIP4844 addSignature tests', () => { to: Address.zero(), blobVersionedHashes: [concatBytes(new Uint8Array([1]), randomBytes(31))], }, - { common } + { common }, ) const msgHash = tx.getHashedMessageToSign() @@ -129,7 +129,7 @@ describe('EIP4844 constructor tests - valid scenarios', () => { assert.equal( decodedTx.getSenderAddress().toString(), sender, - 'signature and sender were deserialized correctly' + 'signature and sender were deserialized correctly', ) }) }) @@ -188,17 +188,17 @@ describe('fromTxData using from a json', () => { assert.deepEqual( { ...txData, accessList: [] }, { gasPrice: null, ...jsonData }, - 'toJSON should give correct json' + 'toJSON should give correct json', ) const fromSerializedTx = create4844BlobTxFromRLP( hexToBytes(txMeta.serialized as PrefixedHexString), - { common: c } + { common: c }, ) assert.equal( bytesToHex(fromSerializedTx.hash()), txMeta.hash, - 'fromSerializedTx hash should match' + 'fromSerializedTx hash should match', ) } catch (e) { assert.fail('failed to parse json data') @@ -240,7 +240,7 @@ describe('EIP4844 constructor tests - invalid scenarios', () => { } catch (err: any) { assert.ok( err.message.includes('versioned hash is invalid length'), - 'throws on invalid versioned hash length' + 'throws on invalid versioned hash length', ) } try { @@ -248,7 +248,7 @@ describe('EIP4844 constructor tests - invalid scenarios', () => { } catch (err: any) { assert.ok( err.message.includes('does not start with KZG commitment'), - 'throws on invalid commitment version' + 'throws on invalid commitment version', ) } try { @@ -256,7 +256,7 @@ describe('EIP4844 constructor tests - invalid scenarios', () => { } catch (err: any) { assert.ok( err.message.includes('tx can contain at most'), - 'throws on too many versioned hashes' + 'throws on too many versioned hashes', ) } }) @@ -288,7 +288,7 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ) const signedTx = unsignedTx.sign(pk) @@ -305,7 +305,7 @@ describe('Network wrapper tests', () => { assert.equal( jsonData.kzgCommitments.length, signedTx.kzgCommitments!.length, - 'contains the correct number of commitments' + 'contains the correct number of commitments', ) for (let i = 0; i < jsonData.kzgCommitments.length; i++) { const c1 = jsonData.kzgCommitments[i] @@ -315,7 +315,7 @@ describe('Network wrapper tests', () => { assert.equal( jsonData.kzgProofs?.length, signedTx.kzgProofs!.length, - 'contains the correct number of proofs' + 'contains the correct number of proofs', ) for (let i = 0; i < jsonData.kzgProofs.length; i++) { const p1 = jsonData.kzgProofs[i] @@ -330,19 +330,19 @@ describe('Network wrapper tests', () => { assert.equal( deserializedTx.type, 0x03, - 'successfully deserialized a blob transaction network wrapper' + 'successfully deserialized a blob transaction network wrapper', ) assert.equal(deserializedTx.blobs?.length, blobs.length, 'contains the correct number of blobs') assert.equal( deserializedTx.getSenderAddress().toString(), sender, - 'decoded sender address correctly' + 'decoded sender address correctly', ) const minimalTx = createMinimal4844TxFromNetworkWrapper(deserializedTx, { common }) assert.ok(minimalTx.blobs === undefined, 'minimal representation contains no blobs') assert.ok( equalsBytes(minimalTx.hash(), deserializedTx.hash()), - 'has the same hash as the network wrapper version' + 'has the same hash as the network wrapper version', ) const simpleBlobTx = create4844BlobTx( @@ -352,13 +352,13 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ) assert.equal( bytesToHex(unsignedTx.blobVersionedHashes[0]), bytesToHex(simpleBlobTx.blobVersionedHashes[0]), - 'tx versioned hash for simplified blob txData constructor matches fully specified versioned hashes' + 'tx versioned hash for simplified blob txData constructor matches fully specified versioned hashes', ) assert.throws( @@ -371,11 +371,11 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ), 'encoded blobs', undefined, - 'throws on blobsData and blobs in txData' + 'throws on blobsData and blobs in txData', ) assert.throws( @@ -388,11 +388,11 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ), 'KZG commitments', undefined, - 'throws on blobsData and KZG commitments in txData' + 'throws on blobsData and KZG commitments in txData', ) assert.throws( @@ -405,11 +405,11 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ), 'versioned hashes', undefined, - 'throws on blobsData and versioned hashes in txData' + 'throws on blobsData and versioned hashes in txData', ) assert.throws( @@ -422,11 +422,11 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ), 'KZG proofs', undefined, - 'throws on blobsData and KZG proofs in txData' + 'throws on blobsData and KZG proofs in txData', ) assert.throws( @@ -441,12 +441,12 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ) }, 'tx should contain at least one blob', undefined, - 'throws a transaction with no blobs' + 'throws a transaction with no blobs', ) const txWithMissingBlob = create4844BlobTx( @@ -459,7 +459,7 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ) const serializedWithMissingBlob = txWithMissingBlob.serializeNetworkWrapper() @@ -471,7 +471,7 @@ describe('Network wrapper tests', () => { }), 'Number of blobVersionedHashes, blobs, and commitments not all equal', undefined, - 'throws when blobs/commitments/hashes mismatch' + 'throws when blobs/commitments/hashes mismatch', ) const mangledValue = commitments[0][0] @@ -487,7 +487,7 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ) const serializedWithInvalidCommitment = txWithInvalidCommitment.serializeNetworkWrapper() @@ -499,7 +499,7 @@ describe('Network wrapper tests', () => { }), 'KZG proof cannot be verified from blobs/commitments', undefined, - 'throws when kzg proof cant be verified' + 'throws when kzg proof cant be verified', ) blobVersionedHashes[0][1] = 2 @@ -515,7 +515,7 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ) const serializedWithInvalidVersionedHashes = @@ -527,7 +527,7 @@ describe('Network wrapper tests', () => { }), 'commitment for blob at index 0 does not match versionedHash', undefined, - 'throws when versioned hashes dont match kzg commitments' + 'throws when versioned hashes dont match kzg commitments', ) }) }) @@ -562,21 +562,21 @@ describe('hash() and signature verification', () => { ], to: Address.zero(), }, - { common } + { common }, ) assert.equal( bytesToHex(unsignedTx.getHashedMessageToSign()), '0x02560c5173b0d793ce019cfa515ece6a04a4b3f3d67eab67fbca78dd92d4ed76', - 'produced the correct transaction hash' + 'produced the correct transaction hash', ) const signedTx = unsignedTx.sign( - hexToBytes('0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8') + hexToBytes('0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8'), ) assert.equal( signedTx.getSenderAddress().toString(), '0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b', - 'was able to recover sender address' + 'was able to recover sender address', ) assert.ok(signedTx.verifySignature(), 'signature is valid') }) @@ -596,7 +596,7 @@ it('getEffectivePriorityFee()', async () => { to: Address.zero(), blobVersionedHashes: [concatBytes(new Uint8Array([1]), randomBytes(31))], }, - { common } + { common }, ) assert.equal(tx.getEffectivePriorityFee(BigInt(10)), BigInt(0)) assert.equal(tx.getEffectivePriorityFee(BigInt(9)), BigInt(1)) @@ -665,7 +665,7 @@ describe('Network wrapper deserialization test', () => { assert.ok(equalsBytes(deserializedTx.blobs![0], blobs[0]), 'blobs should match') assert.ok( equalsBytes(deserializedTx.kzgCommitments![0], commitments[0]), - 'commitments should match' + 'commitments should match', ) assert.ok(equalsBytes(deserializedTx.kzgProofs![0], proofs[0]), 'proofs should match') @@ -685,7 +685,7 @@ describe('Network wrapper deserialization test', () => { sender, networkSerializedHexLength: networkSerialized.length, }, - 'txMeta should match' + 'txMeta should match', ) }) }) diff --git a/packages/tx/test/eip7702.spec.ts b/packages/tx/test/eip7702.spec.ts index 6213a1085f..47222ce5c4 100644 --- a/packages/tx/test/eip7702.spec.ts +++ b/packages/tx/test/eip7702.spec.ts @@ -27,7 +27,7 @@ describe('[EOACodeEIP7702Transaction]', () => { to: Address.zero(), data: new Uint8Array(1), }, - { common } + { common }, ) const signed = txn.sign(pkey) assert.ok(signed.getSenderAddress().equals(addr)) @@ -50,7 +50,7 @@ describe('[EOACodeEIP7702Transaction]', () => { }, ], }, - { common } + { common }, ) }, 'address length should be 20 bytes') @@ -68,7 +68,7 @@ describe('[EOACodeEIP7702Transaction]', () => { }, ], }, - { common } + { common }, ) }, 'nonce list should consist of at most 1 item') @@ -86,7 +86,7 @@ describe('[EOACodeEIP7702Transaction]', () => { }, ], }, - { common } + { common }, ) }, 's is not defined') @@ -104,7 +104,7 @@ describe('[EOACodeEIP7702Transaction]', () => { }, ], }, - { common } + { common }, ) }, 'r is not defined') @@ -122,7 +122,7 @@ describe('[EOACodeEIP7702Transaction]', () => { }, ], }, - { common } + { common }, ) }, 'yParity is not defined') @@ -140,7 +140,7 @@ describe('[EOACodeEIP7702Transaction]', () => { }, ], }, - { common } + { common }, ) }, 'nonce is not defined') @@ -158,7 +158,7 @@ describe('[EOACodeEIP7702Transaction]', () => { }, ], }, - { common } + { common }, ) }, 'address is not defined') @@ -176,7 +176,7 @@ describe('[EOACodeEIP7702Transaction]', () => { }, ], }, - { common } + { common }, ) }, 'chainId is not defined') @@ -194,7 +194,7 @@ describe('[EOACodeEIP7702Transaction]', () => { }, ], }, - { common } + { common }, ) }) }) diff --git a/packages/tx/test/fromRpc.spec.ts b/packages/tx/test/fromRpc.spec.ts index 171d7f14b5..45106f6871 100644 --- a/packages/tx/test/fromRpc.spec.ts +++ b/packages/tx/test/fromRpc.spec.ts @@ -64,7 +64,7 @@ describe('[fromJsonRpcProvider]', () => { } catch (err: any) { assert.ok( err.message.includes('No data returned from provider'), - 'throws correct error when no tx returned' + 'throws correct error when no tx returned', ) } global.fetch = realFetch diff --git a/packages/tx/test/inputValue.spec.ts b/packages/tx/test/inputValue.spec.ts index 9d718a3cbd..8fa64c4612 100644 --- a/packages/tx/test/inputValue.spec.ts +++ b/packages/tx/test/inputValue.spec.ts @@ -162,21 +162,21 @@ describe('[Invalid Array Input values]', () => { switch (txType) { case TransactionType.Legacy: assert.throws(() => - createLegacyTxFromBytesArray(rawValues as TxValuesArray[TransactionType.Legacy]) + createLegacyTxFromBytesArray(rawValues as TxValuesArray[TransactionType.Legacy]), ) break case TransactionType.AccessListEIP2930: assert.throws(() => create2930AccessListTxFromBytesArray( - rawValues as TxValuesArray[TransactionType.AccessListEIP2930] - ) + rawValues as TxValuesArray[TransactionType.AccessListEIP2930], + ), ) break case TransactionType.FeeMarketEIP1559: assert.throws(() => createEIP1559FeeMarketTxFromBytesArray( - rawValues as TxValuesArray[TransactionType.FeeMarketEIP1559] - ) + rawValues as TxValuesArray[TransactionType.FeeMarketEIP1559], + ), ) break } @@ -243,15 +243,15 @@ describe('[Invalid Access Lists]', () => { case TransactionType.AccessListEIP2930: assert.throws(() => create2930AccessListTxFromBytesArray( - rawValues as TxValuesArray[TransactionType.AccessListEIP2930] - ) + rawValues as TxValuesArray[TransactionType.AccessListEIP2930], + ), ) break case TransactionType.FeeMarketEIP1559: assert.throws(() => createEIP1559FeeMarketTxFromBytesArray( - rawValues as TxValuesArray[TransactionType.FeeMarketEIP1559] - ) + rawValues as TxValuesArray[TransactionType.FeeMarketEIP1559], + ), ) break } diff --git a/packages/tx/test/legacy.spec.ts b/packages/tx/test/legacy.spec.ts index 0f23043122..181400ba56 100644 --- a/packages/tx/test/legacy.spec.ts +++ b/packages/tx/test/legacy.spec.ts @@ -62,7 +62,7 @@ describe('[Transaction]', () => { const nonEIP2930Common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) assert.ok( createLegacyTx({}, { common: nonEIP2930Common }), - 'should initialize on a pre-Berlin Harfork (EIP-2930 not activated)' + 'should initialize on a pre-Berlin Harfork (EIP-2930 not activated)', ) const txData = txFixtures[3].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)) @@ -70,14 +70,14 @@ describe('[Transaction]', () => { let tx = createLegacyTxFromBytesArray(txData) assert.ok( tx.common.chainId() === BigInt(5), - 'should initialize Common with chain ID (supported) derived from v value (v with 0-parity)' + 'should initialize Common with chain ID (supported) derived from v value (v with 0-parity)', ) txData[6] = intToBytes(46) // v with 1-parity and chain ID 5 tx = createLegacyTxFromBytesArray(txData) assert.ok( tx.common.chainId() === BigInt(5), - 'should initialize Common with chain ID (supported) derived from v value (v with 1-parity)' + 'should initialize Common with chain ID (supported) derived from v value (v with 1-parity)', ) txData[6] = intToBytes(2033) // v with 0-parity and chain ID 999 @@ -85,7 +85,7 @@ describe('[Transaction]', () => { assert.equal( tx.common.chainId(), BigInt(999), - 'should initialize Common with chain ID (unsupported) derived from v value (v with 0-parity)' + 'should initialize Common with chain ID (unsupported) derived from v value (v with 0-parity)', ) txData[6] = intToBytes(2034) // v with 1-parity and chain ID 999 @@ -93,7 +93,7 @@ describe('[Transaction]', () => { assert.equal( tx.common.chainId(), BigInt(999), - 'should initialize Common with chain ID (unsupported) derived from v value (v with 1-parity)' + 'should initialize Common with chain ID (unsupported) derived from v value (v with 1-parity)', ) }) @@ -191,13 +191,13 @@ describe('[Transaction]', () => { assert.equal(tx.getDataGas(), BigInt(0)) tx = createLegacyTxFromBytesArray( - txFixtures[3].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)) + txFixtures[3].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), ) assert.equal(tx.getDataGas(), BigInt(1716)) tx = createLegacyTxFromBytesArray( txFixtures[3].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), - { freeze: false } + { freeze: false }, ) assert.equal(tx.getDataGas(), BigInt(1716)) }) @@ -211,7 +211,7 @@ describe('[Transaction]', () => { txFixtures[3].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), { common, - } + }, ) assert.equal(tx.getDataGas(), BigInt(1716)) }) @@ -222,7 +222,7 @@ describe('[Transaction]', () => { txFixtures[0].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), { common, - } + }, ) assert.equal(tx.getDataGas(), BigInt(656)) tx.common.setHardfork(Hardfork.Istanbul) @@ -277,7 +277,7 @@ describe('[Transaction]', () => { txFixtures[3].raw.slice(0, 6).map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), { common, - } + }, ) assert.throws( () => { @@ -285,44 +285,44 @@ describe('[Transaction]', () => { }, undefined, undefined, - 'should throw calling hash with unsigned tx' + 'should throw calling hash with unsigned tx', ) tx = createLegacyTxFromBytesArray( txFixtures[3].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), { common, - } + }, ) assert.deepEqual( tx.hash(), - hexToBytes('0x375a8983c9fc56d7cfd118254a80a8d7403d590a6c9e105532b67aca1efb97aa') + hexToBytes('0x375a8983c9fc56d7cfd118254a80a8d7403d590a6c9e105532b67aca1efb97aa'), ) assert.deepEqual( tx.getHashedMessageToSign(), - hexToBytes('0x61e1ec33764304dddb55348e7883d4437426f44ab3ef65e6da1e025734c03ff0') + hexToBytes('0x61e1ec33764304dddb55348e7883d4437426f44ab3ef65e6da1e025734c03ff0'), ) assert.equal(tx.getMessageToSign().length, 6) assert.deepEqual( tx.hash(), - hexToBytes('0x375a8983c9fc56d7cfd118254a80a8d7403d590a6c9e105532b67aca1efb97aa') + hexToBytes('0x375a8983c9fc56d7cfd118254a80a8d7403d590a6c9e105532b67aca1efb97aa'), ) }) it('hash() -> with defined chainId', () => { const tx = createLegacyTxFromBytesArray( - txFixtures[4].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)) + txFixtures[4].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), ) assert.equal( bytesToHex(tx.hash()), - '0x0f09dc98ea85b7872f4409131a790b91e7540953992886fc268b7ba5c96820e4' + '0x0f09dc98ea85b7872f4409131a790b91e7540953992886fc268b7ba5c96820e4', ) assert.equal( bytesToHex(tx.hash()), - '0x0f09dc98ea85b7872f4409131a790b91e7540953992886fc268b7ba5c96820e4' + '0x0f09dc98ea85b7872f4409131a790b91e7540953992886fc268b7ba5c96820e4', ) assert.equal( bytesToHex(tx.getHashedMessageToSign()), - '0xf97c73fdca079da7652dbc61a46cd5aeef804008e057be3e712c43eac389aaf0' + '0xf97c73fdca079da7652dbc61a46cd5aeef804008e057be3e712c43eac389aaf0', ) }) @@ -346,10 +346,10 @@ describe('[Transaction]', () => { '0x', ] const privateKey = hexToBytes( - '0x4646464646464646464646464646464646464646464646464646464646464646' + '0x4646464646464646464646464646464646464646464646464646464646464646', ) const pt = createLegacyTxFromBytesArray( - txRaw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)) + txRaw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), ) // Note that Vitalik's example has a very similar value denoted "signing data". @@ -357,16 +357,16 @@ describe('[Transaction]', () => { // We don't have a getter for such a value in LegacyTransaction. assert.equal( bytesToHex(pt.serialize()), - '0xec098504a817c800825208943535353535353535353535353535353535353535880de0b6b3a764000080808080' + '0xec098504a817c800825208943535353535353535353535353535353535353535880de0b6b3a764000080808080', ) const signedTx = pt.sign(privateKey) assert.equal( bytesToHex(signedTx.getHashedMessageToSign()), - '0xdaf5a779ae972f972197303d7b574746c7ef83eadac0f2791ad23db92e4c8e53' + '0xdaf5a779ae972f972197303d7b574746c7ef83eadac0f2791ad23db92e4c8e53', ) assert.equal( bytesToHex(signedTx.serialize()), - '0xf86c098504a817c800825208943535353535353535353535353535353535353535880de0b6b3a76400008025a028ef61340bd939bc2195fe537567866003e1a15d3c71ff63e1590620aa636276a067cbe9d8997f761aecb703304b3800ccf555c9f3dc64214b297fb1966a3b6d83' + '0xf86c098504a817c800825208943535353535353535353535353535353535353535880de0b6b3a76400008025a028ef61340bd939bc2195fe537567866003e1a15d3c71ff63e1590620aa636276a067cbe9d8997f761aecb703304b3800ccf555c9f3dc64214b297fb1966a3b6d83', ) }) @@ -377,7 +377,7 @@ describe('[Transaction]', () => { txData.raw.slice(0, 6).map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), { common, - } + }, ) const privKey = hexToBytes(`0x${txData.privateKey}`) @@ -386,7 +386,7 @@ describe('[Transaction]', () => { assert.equal( txSigned.getSenderAddress().toString(), '0x' + txData.sendersAddress, - "computed sender address should equal the fixture's one" + "computed sender address should equal the fixture's one", ) } }) @@ -401,17 +401,17 @@ describe('[Transaction]', () => { '0x', ] const privateKey = hexToBytes( - '0xDE3128752F183E8930D7F00A2AAA302DCB5E700B2CBA2D8CA5795660F07DEFD5' + '0xDE3128752F183E8930D7F00A2AAA302DCB5E700B2CBA2D8CA5795660F07DEFD5', ) const common = createCustomCommon({ chainId: 3 }) const tx = createLegacyTxFromBytesArray( txRaw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), - { common } + { common }, ) const signedTx = tx.sign(privateKey) assert.equal( bytesToHex(signedTx.serialize()), - '0xf86c018502540be40082520894d7250824390ec5c8b71d856b5de895e271170d9d880de0b6b3a76400008029a0d3512c68099d184ccf54f44d9d6905bff303128574b663dcf10b4c726ddd8133a0628acc8f481dea593f13309dfc5f0340f83fdd40cf9fbe47f782668f6f3aec74' + '0xf86c018502540be40082520894d7250824390ec5c8b71d856b5de895e271170d9d880de0b6b3a76400008029a0d3512c68099d184ccf54f44d9d6905bff303128574b663dcf10b4c726ddd8133a0628acc8f481dea593f13309dfc5f0340f83fdd40cf9fbe47f782668f6f3aec74', ) }) @@ -426,7 +426,7 @@ describe('[Transaction]', () => { } const privateKey = hexToBytes( - '0x4646464646464646464646464646464646464646464646464646464646464646' + '0x4646464646464646464646464646464646464646464646464646464646464646', ) const common = new Common({ @@ -457,7 +457,7 @@ describe('[Transaction]', () => { assert.isTrue(signedWithoutEIP155.verifySignature()) assert.isTrue( signedWithoutEIP155.v?.toString(16) === '1c' || signedWithoutEIP155.v?.toString(16) === '1b', - "v shouldn't be EIP155 encoded" + "v shouldn't be EIP155 encoded", ) signedWithoutEIP155 = createLegacyTx(txData, { @@ -467,7 +467,7 @@ describe('[Transaction]', () => { assert.isTrue(signedWithoutEIP155.verifySignature()) assert.isTrue( signedWithoutEIP155.v?.toString(16) === '1c' || signedWithoutEIP155.v?.toString(16) === '1b', - "v shouldn't be EIP155 encoded" + "v shouldn't be EIP155 encoded", ) }) @@ -525,7 +525,7 @@ describe('[Transaction]', () => { const signedTxn = txn.sign(pkey) assert.ok( signedTxn.common.hardfork() === Hardfork.Paris, - 'signed tx common is taken from tx.common' + 'signed tx common is taken from tx.common', ) }) @@ -542,7 +542,7 @@ describe('[Transaction]', () => { value: '0x0', } const privateKey = hexToBytes( - '0x4646464646464646464646464646464646464646464646464646464646464646' + '0x4646464646464646464646464646464646464646464646464646464646464646', ) tx = createLegacyTx(txData) assert.notOk(tx.isSigned()) diff --git a/packages/tx/test/testLoader.ts b/packages/tx/test/testLoader.ts index 62bcaa6adf..eea8a394fe 100644 --- a/packages/tx/test/testLoader.ts +++ b/packages/tx/test/testLoader.ts @@ -23,7 +23,7 @@ export async function getTests( fileFilter: RegExp | string[] = /.json$/, skipPredicate: (...args: any[]) => boolean = falsePredicate, directory: string, - excludeDir: RegExp | string[] = [] + excludeDir: RegExp | string[] = [], ): Promise { const options = { match: fileFilter, @@ -41,7 +41,7 @@ export async function getTests( err: Error | undefined, content: string | Uint8Array, fileName: string, - next: Function + next: Function, ) => { if (err) { reject(err) diff --git a/packages/tx/test/transactionFactory.spec.ts b/packages/tx/test/transactionFactory.spec.ts index ddb7c6f5ea..90cdeac4ba 100644 --- a/packages/tx/test/transactionFactory.spec.ts +++ b/packages/tx/test/transactionFactory.spec.ts @@ -65,7 +65,7 @@ describe('[TransactionFactory]: Basic functions', () => { assert.equal( factoryTx.constructor.name, txType.class.name, - `should return the right type (${txType.name})` + `should return the right type (${txType.name})`, ) } }) @@ -82,7 +82,7 @@ describe('[TransactionFactory]: Basic functions', () => { }, undefined, undefined, - `should throw when trying to create typed tx when not allowed in Common (${txType.name})` + `should throw when trying to create typed tx when not allowed in Common (${txType.name})`, ) assert.throws( @@ -93,7 +93,7 @@ describe('[TransactionFactory]: Basic functions', () => { }, undefined, undefined, - `should throw when trying to create typed tx with wrong type (${txType.name})` + `should throw when trying to create typed tx with wrong type (${txType.name})`, ) } } @@ -111,19 +111,19 @@ describe('[TransactionFactory]: Basic functions', () => { assert.equal( tx.constructor.name, txType.name, - `should return the right type (${txType.name})` + `should return the right type (${txType.name})`, ) if (txType.eip2718) { assert.deepEqual( tx.serialize(), rawTx, - `round-trip serialization should match (${txType.name})` + `round-trip serialization should match (${txType.name})`, ) } else { assert.deepEqual( tx.raw(), rawTx as Uint8Array[], - `round-trip raw() creation should match (${txType.name})` + `round-trip raw() creation should match (${txType.name})`, ) } } @@ -135,14 +135,14 @@ describe('[TransactionFactory]: Basic functions', () => { assert.equal( tx.constructor.name, txType.class.name, - `should return the right type (${txType.name})` + `should return the right type (${txType.name})`, ) if (!txType.eip2718) { const tx = createTxFromTxData({}) assert.equal( tx.constructor.name, txType.class.name, - `should return the right type (${txType.name})` + `should return the right type (${txType.name})`, ) } } diff --git a/packages/tx/test/transactionRunner.spec.ts b/packages/tx/test/transactionRunner.spec.ts index ac0a458161..cade96c4a5 100644 --- a/packages/tx/test/transactionRunner.spec.ts +++ b/packages/tx/test/transactionRunner.spec.ts @@ -52,7 +52,7 @@ describe('TransactionTests', async () => { _filename: string, subDir: string, testName: string, - testData: OfficialTransactionTestData + testData: OfficialTransactionTestData, ) => { it(testName, () => { for (const forkName of forkNames) { @@ -83,7 +83,7 @@ describe('TransactionTests', async () => { } else { assert.ok( hashAndSenderAreCorrect && txIsValid, - `Transaction should be valid on ${forkName}` + `Transaction should be valid on ${forkName}`, ) } } catch (e: any) { @@ -98,6 +98,6 @@ describe('TransactionTests', async () => { }, fileFilterRegex, undefined, - 'TransactionTests' + 'TransactionTests', ) }) diff --git a/packages/tx/test/typedTxsAndEIP2930.spec.ts b/packages/tx/test/typedTxsAndEIP2930.spec.ts index 5edf37d877..dbf8d5136c 100644 --- a/packages/tx/test/typedTxsAndEIP2930.spec.ts +++ b/packages/tx/test/typedTxsAndEIP2930.spec.ts @@ -71,7 +71,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 }) assert.ok( tx.common.chainId() === BigInt(5), - 'should initialize Common with chain ID provided (supported chain ID)' + 'should initialize Common with chain ID provided (supported chain ID)', ) tx = txType.create.txData({ @@ -79,7 +79,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 }) assert.ok( tx.common.chainId() === BigInt(99999), - 'should initialize Common with chain ID provided (unsupported chain ID)' + 'should initialize Common with chain ID provided (unsupported chain ID)', ) const nonEIP2930Common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) @@ -89,7 +89,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 }, undefined, undefined, - `should throw on a pre-Berlin Hardfork (EIP-2930 not activated) (${txType.name})` + `should throw on a pre-Berlin Hardfork (EIP-2930 not activated) (${txType.name})`, ) assert.throws( @@ -98,12 +98,12 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 { chainId: chainId + BigInt(1), }, - { common } + { common }, ) }, undefined, undefined, - `should reject transactions with wrong chain ID (${txType.name})` + `should reject transactions with wrong chain ID (${txType.name})`, ) assert.throws( @@ -112,12 +112,12 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 { v: 2, }, - { common } + { common }, ) }, undefined, undefined, - `should reject transactions with invalid yParity (v) values (${txType.name})` + `should reject transactions with invalid yParity (v) values (${txType.name})`, ) } }) @@ -168,7 +168,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 } catch (e: any) { assert.ok( e.message.includes('wrong tx type'), - `should throw on wrong tx type (${txType.name})` + `should throw on wrong tx type (${txType.name})`, ) } @@ -179,7 +179,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 } catch (e: any) { assert.ok( e.message.includes('must be array'), - `should throw when RLP payload not an array (${txType.name})` + `should throw when RLP payload not an array (${txType.name})`, ) } @@ -190,7 +190,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 } catch (e: any) { assert.ok( e.message.includes('values (for unsigned tx)'), - `should throw with invalid number of values (${txType.name})` + `should throw with invalid number of values (${txType.name})`, ) } } @@ -209,7 +209,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 accessList: access, chainId: Chain.Mainnet, }, - { common } + { common }, ) // Check if everything is converted @@ -229,7 +229,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 accessList: bytes, chainId: Chain.Mainnet, }, - { common } + { common }, ) const JSONRaw = txnRaw.AccessListJSON @@ -253,7 +253,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 }, undefined, undefined, - txType.name + txType.name, ) accessList = [ @@ -271,7 +271,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 }, undefined, undefined, - txType.name + txType.name, ) accessList = [[]] // Address does not exist @@ -282,7 +282,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 }, undefined, undefined, - txType.name + txType.name, ) accessList = [[validAddress]] // Slots does not exist @@ -293,7 +293,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 }, undefined, undefined, - txType.name + txType.name, ) accessList = [[validAddress, validSlot]] // Slots is not an array @@ -304,7 +304,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 }, undefined, undefined, - txType.name + txType.name, ) accessList = [[validAddress, [], []]] // 3 items where 2 are expected @@ -315,7 +315,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 }, undefined, undefined, - txType.name + txType.name, ) } }) @@ -329,13 +329,13 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 accessList: [[validAddress, [validSlot]]], chainId, }, - { common } + { common }, ) let signed = tx.sign(pKey) const signedAddress = signed.getSenderAddress() assert.ok( equalsBytes(signedAddress.bytes, address), - `should sign a transaction (${txType.name})` + `should sign a transaction (${txType.name})`, ) signed.verifySignature() // If this throws, test will not end. @@ -345,7 +345,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 assert.deepEqual( tx.accessList, [], - `should create and sign transactions without passing access list value (${txType.name})` + `should create and sign transactions without passing access list value (${txType.name})`, ) assert.deepEqual(signed.accessList, []) @@ -357,7 +357,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 }, undefined, undefined, - `should throw calling hash with unsigned tx (${txType.name})` + `should throw calling hash with unsigned tx (${txType.name})`, ) assert.throws(() => { @@ -373,7 +373,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 }, undefined, undefined, - `should throw with invalid s value (${txType.name})` + `should throw with invalid s value (${txType.name})`, ) } }) @@ -434,7 +434,7 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { const tx = create2930AccessListTx({}, { common }) assert.ok( create2930AccessListTx(tx, { common }), - 'should initialize correctly from its own data' + 'should initialize correctly from its own data', ) const validAddress = hexToBytes(`0x${'01'.repeat(20)}`) @@ -450,12 +450,12 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { gasLimit: MAX_UINT64, gasPrice: MAX_INTEGER, }, - { common } + { common }, ) } catch (err: any) { assert.ok( err.message.includes('gasLimit * gasPrice cannot exceed MAX_INTEGER'), - 'throws when gasLimit * gasPrice exceeds MAX_INTEGER' + 'throws when gasLimit * gasPrice exceeds MAX_INTEGER', ) } }) @@ -468,12 +468,12 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { const aclBytes: AccessListBytesItem = [address, storageKeys] create2930AccessListTxFromBytesArray( [bytes, bytes, bytes, bytes, bytes, bytes, bytes, [aclBytes], bytes], - {} + {}, ) }, undefined, undefined, - 'should throw with values array with length different than 8 or 11' + 'should throw with values array with length different than 8 or 11', ) it(`should return right upfront cost`, () => { @@ -484,7 +484,7 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { accessList: [[validAddress, [validSlot]]], chainId, }, - { common } + { common }, ) // Cost should be: // Base fee + 2*TxDataNonZero + TxDataZero + AccessListAddressCost + AccessListSlotCost @@ -502,8 +502,8 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { txDataZero + baseFee + accessListAddressCost + - accessListStorageKeyCost - ) + accessListStorageKeyCost, + ), ) // In this Tx, `to` is `undefined`, so we should charge homestead creation gas. @@ -513,7 +513,7 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { accessList: [[validAddress, [validSlot]]], chainId, }, - { common } + { common }, ) assert.ok( @@ -524,8 +524,8 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { creationFee + baseFee + accessListAddressCost + - accessListStorageKeyCost - ) + accessListStorageKeyCost, + ), ) // Explicitly check that even if we have duplicates in our list, we still charge for those @@ -538,12 +538,12 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { ], chainId, }, - { common } + { common }, ) assert.ok( tx.getIntrinsicGas() === - BigInt(baseFee + accessListAddressCost * 2 + accessListStorageKeyCost * 3) + BigInt(baseFee + accessListAddressCost * 2 + accessListStorageKeyCost * 3), ) }) @@ -565,7 +565,7 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { gasLimit: 10000000, value: 42, }, - { common } + { common }, ) assert.equal(tx.getUpfrontCost(), BigInt(10000000042)) }) @@ -578,20 +578,20 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { accessList: [[validAddress, [validSlot]]], chainId, }, - { common } + { common }, ) const expectedHash = hexToBytes( - '0x78528e2724aa359c58c13e43a7c467eb721ce8d410c2a12ee62943a3aaefb60b' + '0x78528e2724aa359c58c13e43a7c467eb721ce8d410c2a12ee62943a3aaefb60b', ) assert.deepEqual(unsignedTx.getHashedMessageToSign(), expectedHash), 'correct hashed version' const expectedSerialization = hexToBytes( - '0x01f858018080809401010101010101010101010101010101010101018083010200f838f7940101010101010101010101010101010101010101e1a00101010101010101010101010101010101010101010101010101010101010101' + '0x01f858018080809401010101010101010101010101010101010101018083010200f838f7940101010101010101010101010101010101010101e1a00101010101010101010101010101010101010101010101010101010101010101', ) assert.deepEqual( unsignedTx.getMessageToSign(), expectedSerialization, - 'correct serialized unhashed version' + 'correct serialized unhashed version', ) }) @@ -624,21 +624,21 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { usedCommon.setEIPs([2718, 2929, 2930]) const expectedUnsignedRaw = hexToBytes( - '0x01f86587796f6c6f76337880843b9aca008262d494df0a88b2b68c673713a8ec826003676f272e35730180f838f7940000000000000000000000000000000000001337e1a00000000000000000000000000000000000000000000000000000000000000000808080' + '0x01f86587796f6c6f76337880843b9aca008262d494df0a88b2b68c673713a8ec826003676f272e35730180f838f7940000000000000000000000000000000000001337e1a00000000000000000000000000000000000000000000000000000000000000000808080', ) const pkey = hexToBytes('0xfad9c8855b740a0b7ed4c221dbad0f33a83a49cad6b3fe8d5817ac83d38b6a19') const expectedSigned = hexToBytes( - '0x01f8a587796f6c6f76337880843b9aca008262d494df0a88b2b68c673713a8ec826003676f272e35730180f838f7940000000000000000000000000000000000001337e1a0000000000000000000000000000000000000000000000000000000000000000080a0294ac94077b35057971e6b4b06dfdf55a6fbed819133a6c1d31e187f1bca938da00be950468ba1c25a5cb50e9f6d8aa13c8cd21f24ba909402775b262ac76d374d' + '0x01f8a587796f6c6f76337880843b9aca008262d494df0a88b2b68c673713a8ec826003676f272e35730180f838f7940000000000000000000000000000000000001337e1a0000000000000000000000000000000000000000000000000000000000000000080a0294ac94077b35057971e6b4b06dfdf55a6fbed819133a6c1d31e187f1bca938da00be950468ba1c25a5cb50e9f6d8aa13c8cd21f24ba909402775b262ac76d374d', ) const expectedHash = hexToBytes( - '0xbbd570a3c6acc9bb7da0d5c0322fe4ea2a300db80226f7df4fef39b2d6649eec' + '0xbbd570a3c6acc9bb7da0d5c0322fe4ea2a300db80226f7df4fef39b2d6649eec', ) const v = BigInt(0) const r = bytesToBigInt( - hexToBytes('0x294ac94077b35057971e6b4b06dfdf55a6fbed819133a6c1d31e187f1bca938d') + hexToBytes('0x294ac94077b35057971e6b4b06dfdf55a6fbed819133a6c1d31e187f1bca938d'), ) const s = bytesToBigInt( - hexToBytes('0x0be950468ba1c25a5cb50e9f6d8aa13c8cd21f24ba909402775b262ac76d374d') + hexToBytes('0x0be950468ba1c25a5cb50e9f6d8aa13c8cd21f24ba909402775b262ac76d374d'), ) const unsignedTx = create2930AccessListTx(txData, { common: usedCommon }) @@ -647,7 +647,7 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { assert.ok( equalsBytes(expectedUnsignedRaw, serializedMessageRaw), - 'serialized unsigned message correct' + 'serialized unsigned message correct', ) const signed = unsignedTx.sign(pkey) @@ -701,7 +701,7 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { const signedTxn = txn.sign(pKey) assert.ok( signedTxn.common.hardfork() === Hardfork.Paris, - 'signed tx common is taken from tx.common' + 'signed tx common is taken from tx.common', ) }) }) diff --git a/packages/tx/tsconfig.lint.json b/packages/tx/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/tx/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/util/.eslintrc.cjs b/packages/util/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/util/.eslintrc.cjs +++ b/packages/util/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/util/src/account.ts b/packages/util/src/account.ts index 2ade70908d..081288cba3 100644 --- a/packages/util/src/account.ts +++ b/packages/util/src/account.ts @@ -135,7 +135,7 @@ export class Account { storageRoot: Uint8Array | null = KECCAK256_RLP, codeHash: Uint8Array | null = KECCAK256_NULL, codeSize: number | null = null, - version: number | null = 0 + version: number | null = 0, ) { this._nonce = nonce this._balance = balance @@ -280,7 +280,7 @@ export function createAccount(accountData: AccountData) { nonce !== undefined ? bytesToBigInt(toBytes(nonce)) : undefined, balance !== undefined ? bytesToBigInt(toBytes(balance)) : undefined, storageRoot !== undefined ? toBytes(storageRoot) : undefined, - codeHash !== undefined ? toBytes(codeHash) : undefined + codeHash !== undefined ? toBytes(codeHash) : undefined, ) } @@ -310,7 +310,7 @@ export function createPartialAccount(partialAccountData: PartialAccountData) { storageRoot !== undefined && storageRoot !== null ? toBytes(storageRoot) : storageRoot, codeHash !== undefined && codeHash !== null ? toBytes(codeHash) : codeHash, codeSize !== undefined && codeSize !== null ? bytesToInt(toBytes(codeSize)) : codeSize, - version !== undefined && version !== null ? bytesToInt(toBytes(version)) : version + version !== undefined && version !== null ? bytesToInt(toBytes(version)) : version, ) } @@ -439,7 +439,7 @@ export const isValidAddress = function (hexAddress: string): hexAddress is Prefi */ export const toChecksumAddress = function ( hexAddress: string, - eip1191ChainId?: BigIntLike + eip1191ChainId?: BigIntLike, ): PrefixedHexString { assertIsHexString(hexAddress) const address = stripHexPrefix(hexAddress).toLowerCase() @@ -472,7 +472,7 @@ export const toChecksumAddress = function ( */ export const isValidChecksumAddress = function ( hexAddress: string, - eip1191ChainId?: BigIntLike + eip1191ChainId?: BigIntLike, ): boolean { return isValidAddress(hexAddress) && toChecksumAddress(hexAddress, eip1191ChainId) === hexAddress } @@ -505,7 +505,7 @@ export const generateAddress = function (from: Uint8Array, nonce: Uint8Array): U export const generateAddress2 = function ( from: Uint8Array, salt: Uint8Array, - initCode: Uint8Array + initCode: Uint8Array, ): Uint8Array { assertIsBytes(from) assertIsBytes(salt) diff --git a/packages/util/src/asyncEventEmitter.ts b/packages/util/src/asyncEventEmitter.ts index 7085b0a672..e5eefc1ffe 100644 --- a/packages/util/src/asyncEventEmitter.ts +++ b/packages/util/src/asyncEventEmitter.ts @@ -18,7 +18,7 @@ export interface EventMap { async function runInSeries( context: any, tasks: Array<(data: unknown, callback?: (error?: Error) => void) => void>, - data: unknown + data: unknown, ): Promise { let error: Error | undefined for await (const task of tasks) { @@ -132,7 +132,7 @@ export class AsyncEventEmitter extends EventEmitter { event: E & string, target: T[E], listener: T[E], - beforeOrAfter?: string + beforeOrAfter?: string, ) { let listeners = (this as any)._events[event] ?? [] let i diff --git a/packages/util/src/bytes.ts b/packages/util/src/bytes.ts index 8ab36fb35e..7226c5b4b5 100644 --- a/packages/util/src/bytes.ts +++ b/packages/util/src/bytes.ts @@ -125,7 +125,7 @@ export const hexToBytes = (hex: PrefixedHexString): Uint8Array => { const unprefixedHex = hex.slice(2) return _unprefixedHexToBytes( - unprefixedHex.length % 2 === 0 ? unprefixedHex : padToEven(unprefixedHex) + unprefixedHex.length % 2 === 0 ? unprefixedHex : padToEven(unprefixedHex), ) } @@ -226,7 +226,7 @@ export const setLengthRight = (msg: Uint8Array, length: number): Uint8Array => { * @return {Uint8Array|number[]|string} */ const stripZeros = ( - a: T + a: T, ): T => { let first = a[0] while (a.length > 0 && first.toString() === '0') { @@ -296,7 +296,7 @@ export const toBytes = (v: ToBytesInputTypes): Uint8Array => { if (typeof v === 'string') { if (!isHexString(v)) { throw new Error( - `Cannot convert string to Uint8Array. toBytes only supports 0x-prefixed hex strings and this string was given: ${v}` + `Cannot convert string to Uint8Array. toBytes only supports 0x-prefixed hex strings and this string was given: ${v}`, ) } return hexToBytes(v) diff --git a/packages/util/src/constants.ts b/packages/util/src/constants.ts index d1c8110465..b1587c9bc7 100644 --- a/packages/util/src/constants.ts +++ b/packages/util/src/constants.ts @@ -11,7 +11,7 @@ export const MAX_UINT64 = BigInt('0xffffffffffffffff') * The max integer that the evm can handle (2^256-1) */ export const MAX_INTEGER = BigInt( - '0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff' + '0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', ) /** @@ -20,7 +20,7 @@ export const MAX_INTEGER = BigInt( * We use literal value instead of calculated value for compatibility issue. */ export const MAX_INTEGER_BIGINT = BigInt( - '115792089237316195423570985008687907853269984665640564039457584007913129639935' + '115792089237316195423570985008687907853269984665640564039457584007913129639935', ) export const SECP256K1_ORDER = secp256k1.CURVE.n @@ -30,7 +30,7 @@ export const SECP256K1_ORDER_DIV_2 = secp256k1.CURVE.n / BigInt(2) * 2^256 */ export const TWO_POW256 = BigInt( - '0x10000000000000000000000000000000000000000000000000000000000000000' + '0x10000000000000000000000000000000000000000000000000000000000000000', ) /** diff --git a/packages/util/src/db.ts b/packages/util/src/db.ts index 9bdfabcd90..cc0e8a2a21 100644 --- a/packages/util/src/db.ts +++ b/packages/util/src/db.ts @@ -3,7 +3,7 @@ export type DBObject = { } export type BatchDBOp< TKey extends Uint8Array | string | number = Uint8Array, - TValue extends Uint8Array | string | DBObject = Uint8Array + TValue extends Uint8Array | string | DBObject = Uint8Array, > = PutBatch | DelBatch export enum KeyEncoding { @@ -24,7 +24,7 @@ export type EncodingOpts = { } export interface PutBatch< TKey extends Uint8Array | string | number = Uint8Array, - TValue extends Uint8Array | string | DBObject = Uint8Array + TValue extends Uint8Array | string | DBObject = Uint8Array, > { type: 'put' key: TKey @@ -40,7 +40,7 @@ export interface DelBatch { /** * Retrieves a raw value from db. diff --git a/packages/util/src/genesis.ts b/packages/util/src/genesis.ts index 8ad5426d6f..4cfebc2f70 100644 --- a/packages/util/src/genesis.ts +++ b/packages/util/src/genesis.ts @@ -9,7 +9,7 @@ export type AccountState = [ balance: PrefixedHexString, code: PrefixedHexString, storage: Array, - nonce: PrefixedHexString + nonce: PrefixedHexString, ] /** diff --git a/packages/util/src/internal.ts b/packages/util/src/internal.ts index 6333fe40a5..4af836eaa4 100644 --- a/packages/util/src/internal.ts +++ b/packages/util/src/internal.ts @@ -93,16 +93,16 @@ export function getBinarySize(str: string) { export function arrayContainsArray( superset: unknown[], subset: unknown[], - some?: boolean + some?: boolean, ): boolean { if (Array.isArray(superset) !== true) { throw new Error( - `[arrayContainsArray] method requires input 'superset' to be an array, got type '${typeof superset}'` + `[arrayContainsArray] method requires input 'superset' to be an array, got type '${typeof superset}'`, ) } if (Array.isArray(subset) !== true) { throw new Error( - `[arrayContainsArray] method requires input 'subset' to be an array, got type '${typeof subset}'` + `[arrayContainsArray] method requires input 'subset' to be an array, got type '${typeof subset}'`, ) } @@ -179,7 +179,7 @@ export function getKeys(params: Record[], key: string, allowEmpt } if (typeof key !== 'string') { throw new Error( - `[getKeys] method expects input 'key' to be type 'string', got ${typeof params}` + `[getKeys] method expects input 'key' to be type 'string', got ${typeof params}`, ) } diff --git a/packages/util/src/kzg.ts b/packages/util/src/kzg.ts index 4930b161ca..256d2e35a4 100644 --- a/packages/util/src/kzg.ts +++ b/packages/util/src/kzg.ts @@ -14,12 +14,12 @@ export interface Kzg { polynomialKzg: Uint8Array, z: Uint8Array, y: Uint8Array, - kzgProof: Uint8Array + kzgProof: Uint8Array, ): boolean verifyBlobKzgProofBatch( blobs: Uint8Array[], expectedKzgCommitments: Uint8Array[], - kzgProofs: Uint8Array[] + kzgProofs: Uint8Array[], ): boolean } diff --git a/packages/util/src/mapDB.ts b/packages/util/src/mapDB.ts index 4df7954fdb..88957cb19a 100644 --- a/packages/util/src/mapDB.ts +++ b/packages/util/src/mapDB.ts @@ -4,7 +4,7 @@ import type { BatchDBOp, DB, DBObject } from './db.js' export class MapDB< TKey extends Uint8Array | string | number, - TValue extends Uint8Array | string | DBObject + TValue extends Uint8Array | string | DBObject, > implements DB { _database: Map diff --git a/packages/util/src/provider.ts b/packages/util/src/provider.ts index bc8dfa009b..28bc74a766 100644 --- a/packages/util/src/provider.ts +++ b/packages/util/src/provider.ts @@ -47,8 +47,8 @@ export const fetchFromProvider = async (url: string, params: rpcParams) => { }), }, null, - 2 - )}` + 2, + )}`, ) } const json = await res.json() diff --git a/packages/util/src/requests.ts b/packages/util/src/requests.ts index 576ae52856..3490e06977 100644 --- a/packages/util/src/requests.ts +++ b/packages/util/src/requests.ts @@ -96,7 +96,7 @@ export class DepositRequest extends CLRequest { public readonly withdrawalCredentials: Uint8Array, public readonly amount: bigint, public readonly signature: Uint8Array, - public readonly index: bigint + public readonly index: bigint, ) { super(CLRequestType.Deposit) } @@ -124,7 +124,13 @@ export class DepositRequest extends CLRequest { return concatBytes( Uint8Array.from([this.type]), - RLP.encode([this.pubkey, this.withdrawalCredentials, amountBytes, this.signature, indexBytes]) + RLP.encode([ + this.pubkey, + this.withdrawalCredentials, + amountBytes, + this.signature, + indexBytes, + ]), ) } @@ -140,7 +146,7 @@ export class DepositRequest extends CLRequest { public static deserialize(bytes: Uint8Array): DepositRequest { const [pubkey, withdrawalCredentials, amount, signature, index] = RLP.decode( - bytes.slice(1) + bytes.slice(1), ) as [Uint8Array, Uint8Array, Uint8Array, Uint8Array, Uint8Array] return this.fromRequestData({ pubkey, @@ -156,7 +162,7 @@ export class WithdrawalRequest extends CLRequest { constructor( public readonly sourceAddress: Uint8Array, public readonly validatorPubkey: Uint8Array, - public readonly amount: bigint + public readonly amount: bigint, ) { super(CLRequestType.Withdrawal) } @@ -180,7 +186,7 @@ export class WithdrawalRequest extends CLRequest { return concatBytes( Uint8Array.from([this.type]), - RLP.encode([this.sourceAddress, this.validatorPubkey, amountBytes]) + RLP.encode([this.sourceAddress, this.validatorPubkey, amountBytes]), ) } @@ -196,7 +202,7 @@ export class WithdrawalRequest extends CLRequest { const [sourceAddress, validatorPubkey, amount] = RLP.decode(bytes.slice(1)) as [ Uint8Array, Uint8Array, - Uint8Array + Uint8Array, ] return this.fromRequestData({ sourceAddress, @@ -210,7 +216,7 @@ export class ConsolidationRequest extends CLRequest constructor( public readonly sourceAddress: Uint8Array, public readonly sourcePubkey: Uint8Array, - public readonly targetPubkey: Uint8Array + public readonly targetPubkey: Uint8Array, ) { super(CLRequestType.Consolidation) } @@ -232,7 +238,7 @@ export class ConsolidationRequest extends CLRequest serialize() { return concatBytes( Uint8Array.from([this.type]), - RLP.encode([this.sourceAddress, this.sourcePubkey, this.targetPubkey]) + RLP.encode([this.sourceAddress, this.sourcePubkey, this.targetPubkey]), ) } @@ -248,7 +254,7 @@ export class ConsolidationRequest extends CLRequest const [sourceAddress, sourcePubkey, targetPubkey] = RLP.decode(bytes.slice(1)) as [ Uint8Array, Uint8Array, - Uint8Array + Uint8Array, ] return this.fromRequestData({ sourceAddress, diff --git a/packages/util/src/signature.ts b/packages/util/src/signature.ts index 9dd8466836..03d43da741 100644 --- a/packages/util/src/signature.ts +++ b/packages/util/src/signature.ts @@ -37,7 +37,7 @@ export interface ECDSASignature { export function ecsign( msgHash: Uint8Array, privateKey: Uint8Array, - chainId?: bigint + chainId?: bigint, ): ECDSASignature { const sig = secp256k1.sign(msgHash, privateKey) const buf = sig.toCompactRawBytes() @@ -75,7 +75,7 @@ export const ecrecover = function ( v: bigint, r: Uint8Array, s: Uint8Array, - chainId?: bigint + chainId?: bigint, ): Uint8Array { const signature = concatBytes(setLengthLeft(r, 32), setLengthLeft(s, 32)) const recovery = calculateSigRecovery(v, chainId) @@ -97,7 +97,7 @@ export const toRpcSig = function ( v: bigint, r: Uint8Array, s: Uint8Array, - chainId?: bigint + chainId?: bigint, ): string { const recovery = calculateSigRecovery(v, chainId) if (!isValidSigRecovery(recovery)) { @@ -118,7 +118,7 @@ export const toCompactSig = function ( v: bigint, r: Uint8Array, s: Uint8Array, - chainId?: bigint + chainId?: bigint, ): string { const recovery = calculateSigRecovery(v, chainId) if (!isValidSigRecovery(recovery)) { @@ -183,7 +183,7 @@ export const isValidSignature = function ( r: Uint8Array, s: Uint8Array, homesteadOrLater: boolean = true, - chainId?: bigint + chainId?: bigint, ): boolean { if (r.length !== 32 || s.length !== 32) { return false diff --git a/packages/util/src/types.ts b/packages/util/src/types.ts index f81a7ed70e..2cc0df8f9c 100644 --- a/packages/util/src/types.ts +++ b/packages/util/src/types.ts @@ -79,11 +79,11 @@ export function toType(input: null, outputType: T): null export function toType(input: undefined, outputType: T): undefined export function toType( input: ToBytesInputTypes, - outputType: T + outputType: T, ): TypeOutputReturnType[T] export function toType( input: ToBytesInputTypes, - outputType: T + outputType: T, ): TypeOutputReturnType[T] | undefined | null { if (input === null) { return null @@ -96,7 +96,7 @@ export function toType( throw new Error(`A string must be provided with a 0x-prefix, given: ${input}`) } else if (typeof input === 'number' && !Number.isSafeInteger(input)) { throw new Error( - 'The provided number is greater than MAX_SAFE_INTEGER (please use an alternative input type)' + 'The provided number is greater than MAX_SAFE_INTEGER (please use an alternative input type)', ) } @@ -111,7 +111,7 @@ export function toType( const bigInt = bytesToBigInt(output) if (bigInt > BigInt(Number.MAX_SAFE_INTEGER)) { throw new Error( - 'The provided number is greater than MAX_SAFE_INTEGER (please use an alternative output type)' + 'The provided number is greater than MAX_SAFE_INTEGER (please use an alternative output type)', ) } return Number(bigInt) as TypeOutputReturnType[T] diff --git a/packages/util/src/units.ts b/packages/util/src/units.ts index dfbca2d2de..5465baf5b1 100644 --- a/packages/util/src/units.ts +++ b/packages/util/src/units.ts @@ -5,7 +5,7 @@ export const GWEI_TO_WEI = BigInt(1000000000) export function formatBigDecimal( numerator: bigint, denominator: bigint, - maxDecimalFactor: bigint + maxDecimalFactor: bigint, ): string { if (denominator === BIGINT_0) { denominator = BIGINT_1 diff --git a/packages/util/src/verkle.ts b/packages/util/src/verkle.ts index 959559f701..7f6e2dc3cf 100644 --- a/packages/util/src/verkle.ts +++ b/packages/util/src/verkle.ts @@ -26,7 +26,7 @@ export interface VerkleCrypto { commitment: Uint8Array, commitmentIndex: number, oldScalarValue: Uint8Array, - newScalarValue: Uint8Array + newScalarValue: Uint8Array, ) => Uint8Array // Commitment zeroCommitment: Uint8Array verifyExecutionWitnessPreState: (prestateRoot: string, execution_witness_json: string) => boolean @@ -45,7 +45,7 @@ export interface VerkleCrypto { export function getVerkleStem( ffi: VerkleCrypto, address: Address, - treeIndex: number | bigint = 0 + treeIndex: number | bigint = 0, ): Uint8Array { const address32 = setLengthLeft(address.toBytes(), 32) @@ -71,11 +71,11 @@ export function getVerkleStem( export function verifyVerkleProof( ffi: VerkleCrypto, prestateRoot: Uint8Array, - executionWitness: VerkleExecutionWitness + executionWitness: VerkleExecutionWitness, ): boolean { return ffi.verifyExecutionWitnessPreState( bytesToHex(prestateRoot), - JSON.stringify(executionWitness) + JSON.stringify(executionWitness), ) } @@ -190,7 +190,7 @@ export function getVerkleTreeIndicesForCodeChunk(chunkId: number) { export const getVerkleTreeKeyForCodeChunk = async ( address: Address, chunkId: number, - verkleCrypto: VerkleCrypto + verkleCrypto: VerkleCrypto, ) => { const { treeIndex, subIndex } = getVerkleTreeIndicesForCodeChunk(chunkId) return concatBytes(getVerkleStem(verkleCrypto, address, treeIndex), toBytes(subIndex)) @@ -209,7 +209,7 @@ export const chunkifyCode = (code: Uint8Array) => { export const getVerkleTreeKeyForStorageSlot = async ( address: Address, storageKey: bigint, - verkleCrypto: VerkleCrypto + verkleCrypto: VerkleCrypto, ) => { const { treeIndex, subIndex } = getVerkleTreeIndexesForStorageSlot(storageKey) diff --git a/packages/util/src/withdrawal.ts b/packages/util/src/withdrawal.ts index 1724aef448..d8f2a133e7 100644 --- a/packages/util/src/withdrawal.ts +++ b/packages/util/src/withdrawal.ts @@ -45,7 +45,7 @@ export class Withdrawal { /** * withdrawal amount in Gwei to match the CL repesentation and eventually ssz withdrawalsRoot */ - public readonly amount: bigint + public readonly amount: bigint, ) {} public static fromWithdrawalData(withdrawalData: WithdrawalData) { diff --git a/packages/util/test/account.spec.ts b/packages/util/test/account.spec.ts index b7a37eb53d..b9ef96541d 100644 --- a/packages/util/test/account.spec.ts +++ b/packages/util/test/account.spec.ts @@ -48,12 +48,12 @@ describe('Account', () => { assert.equal( bytesToHex(account.storageRoot), '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', - 'should have storageRoot equal to KECCAK256_RLP' + 'should have storageRoot equal to KECCAK256_RLP', ) assert.equal( bytesToHex(account.codeHash), '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', - 'should have codeHash equal to KECCAK256_NULL' + 'should have codeHash equal to KECCAK256_NULL', ) }) @@ -71,12 +71,12 @@ describe('Account', () => { assert.equal( bytesToHex(account.storageRoot), '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', - 'should have correct storageRoot' + 'should have correct storageRoot', ) assert.equal( bytesToHex(account.codeHash), '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', - 'should have correct codeHash' + 'should have correct codeHash', ) }) @@ -93,18 +93,18 @@ describe('Account', () => { assert.equal( bytesToHex(account.storageRoot), '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', - 'should have correct storageRoot' + 'should have correct storageRoot', ) assert.equal( bytesToHex(account.codeHash), '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', - 'should have correct codeHash' + 'should have correct codeHash', ) }) it('from RLP data', () => { const accountRlp = hexToBytes( - '0xf84602820384a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' + '0xf84602820384a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', ) const account = createAccountFromRLP(accountRlp) assert.equal(account.nonce, BigInt(2), 'should have correct nonce') @@ -112,12 +112,12 @@ describe('Account', () => { assert.equal( bytesToHex(account.storageRoot), '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', - 'should have correct storageRoot' + 'should have correct storageRoot', ) assert.equal( bytesToHex(account.codeHash), '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', - 'should have correct codeHash' + 'should have correct codeHash', ) }) @@ -136,7 +136,7 @@ describe('Account', () => { it('isContract', () => { const accountRlp = hexToBytes( - '0xf84602820384a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' + '0xf84602820384a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', ) let account = createAccountFromRLP(accountRlp) assert.notOk(account.isContract(), 'should return false for a non-contract account') @@ -172,7 +172,7 @@ describe('Account', () => { }, undefined, undefined, - 'should only accept length 32 buffer for storageRoot' + 'should only accept length 32 buffer for storageRoot', ) assert.throws( @@ -181,7 +181,7 @@ describe('Account', () => { }, undefined, undefined, - 'should only accept length 32 buffer for codeHash' + 'should only accept length 32 buffer for codeHash', ) const data = { balance: BigInt(5) } @@ -191,7 +191,7 @@ describe('Account', () => { }, undefined, undefined, - 'should only accept an array in fromRlpSerializedAccount' + 'should only accept an array in fromRlpSerializedAccount', ) assert.throws( @@ -200,7 +200,7 @@ describe('Account', () => { }, undefined, undefined, - 'should not accept nonce less than 0' + 'should not accept nonce less than 0', ) assert.throws( @@ -209,7 +209,7 @@ describe('Account', () => { }, undefined, undefined, - 'should not accept balance less than 0' + 'should not accept balance less than 0', ) }) }) @@ -223,66 +223,66 @@ describe('Utility Functions', () => { assert.notOk( isValidPrivate( hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' - ) + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', + ), ), - 'should fail on too big input' + 'should fail on too big input', ) assert.notOk( isValidPrivate(('WRONG_INPUT_TYPE') as Uint8Array), - 'should fail on wrong input type' + 'should fail on wrong input type', ) assert.notOk( isValidPrivate( - hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000000') + hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000000'), ), - 'should fail on invalid curve (zero)' + 'should fail on invalid curve (zero)', ) assert.notOk( isValidPrivate(hexToBytes(`0x${SECP256K1_N.toString(16)}`)), - 'should fail on invalid curve (== N)' + 'should fail on invalid curve (== N)', ) assert.notOk( isValidPrivate(hexToBytes(`0x${(SECP256K1_N + BigInt(1)).toString(16)}`)), - 'should fail on invalid curve (>= N)' + 'should fail on invalid curve (>= N)', ) assert.ok( isValidPrivate(hexToBytes(`0x${(SECP256K1_N - BigInt(1)).toString(16)}`)), - 'should work otherwise (< N)' + 'should work otherwise (< N)', ) }) it('isValidPublic', () => { let pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae744' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae744', ) assert.notOk(isValidPublic(pubKey), 'should fail on too short input') pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d00' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d00', ) assert.notOk(isValidPublic(pubKey), 'should fail on too big input') pubKey = hexToBytes( - '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) assert.notOk(isValidPublic(pubKey), 'should fail on SEC1 key') pubKey = hexToBytes( - '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) assert.ok( isValidPublic(pubKey, true), - "shouldn't fail on SEC1 key wt.testh sant.testize enabled" + "shouldn't fail on SEC1 key wt.testh sant.testize enabled", ) pubKey = hexToBytes( - '0x023a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x023a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) assert.notOk(isValidPublic(pubKey), 'should fail wt.testh an invalid SEC1 public key') @@ -290,28 +290,28 @@ describe('Utility Functions', () => { assert.notOk(isValidPublic(pubKey), 'should fail an invalid 33-byte public key') pubKey = hexToBytes( - '0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f0000000000000000000000000000000000000000000000000000000000000001' + '0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f0000000000000000000000000000000000000000000000000000000000000001', ) assert.notOk(isValidPublic(pubKey), 'should fail an invalid 64-byte public key') pubKey = hexToBytes( - '0x04fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f0000000000000000000000000000000000000000000000000000000000000001' + '0x04fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f0000000000000000000000000000000000000000000000000000000000000001', ) assert.notOk(isValidPublic(pubKey, true), 'should fail an invalid 65-byte public key') pubKey = hexToBytes('0x033a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a') assert.ok( isValidPublic(pubKey, true), - 'should work wt.testh compressed keys wt.testh sant.testize enabled' + 'should work wt.testh compressed keys wt.testh sant.testize enabled', ) pubKey = hexToBytes( - '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) assert.ok(isValidPublic(pubKey, true), 'should work wt.testh sant.testize enabled') pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) assert.ok(isValidPublic(pubKey), 'should work otherwise') @@ -322,7 +322,7 @@ describe('Utility Functions', () => { } catch (err: any) { assert.ok( err.message.includes('This method only supports Uint8Array'), - 'should throw if input is not Uint8Array' + 'should throw if input is not Uint8Array', ) } }) @@ -335,34 +335,34 @@ describe('Utility Functions', () => { bytesToHex( importPublic( hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' - ) - ) + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', + ), + ), ), pubKey, - 'should work wt.testh an Ethereum public key' + 'should work wt.testh an Ethereum public key', ) assert.equal( bytesToHex( importPublic( hexToBytes( - '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' - ) - ) + '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', + ), + ), ), pubKey, - 'should work wt.testh uncompressed SEC1 keys' + 'should work wt.testh uncompressed SEC1 keys', ) assert.equal( bytesToHex( importPublic( - hexToBytes('0x033a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a') - ) + hexToBytes('0x033a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a'), + ), ), pubKey, - 'should work wt.testh compressed SEC1 keys' + 'should work wt.testh compressed SEC1 keys', ) assert.throws( @@ -371,27 +371,27 @@ describe('Utility Functions', () => { }, undefined, undefined, - 'should throw if input is not Uint8Array' + 'should throw if input is not Uint8Array', ) }) it('publicToAddress', () => { let pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) let address = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a' let r = publicToAddress(pubKey) assert.equal(bytesToHex(r), address, 'should produce an address given a public key') pubKey = hexToBytes( - '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) address = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a' r = publicToAddress(pubKey, true) assert.equal(bytesToHex(r), address, 'should produce an address given a SEC1 public key') pubKey = hexToBytes( - '0x023a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x023a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) assert.throws( function () { @@ -399,11 +399,11 @@ describe('Utility Functions', () => { }, undefined, undefined, - "shouldn't produce an address given an invalid SEC1 public key" + "shouldn't produce an address given an invalid SEC1 public key", ) pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae744' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae744', ) assert.throws( function () { @@ -411,7 +411,7 @@ describe('Utility Functions', () => { }, undefined, undefined, - "shouldn't produce an address given an invalid public key" + "shouldn't produce an address given an invalid public key", ) pubKey = @@ -422,7 +422,7 @@ describe('Utility Functions', () => { }, undefined, undefined, - 'should throw if input is not a Uint8Array' + 'should throw if input is not a Uint8Array', ) }) @@ -430,7 +430,7 @@ describe('Utility Functions', () => { const pubKey = '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' let privateKey = hexToBytes( - '0xea54bdc52d163f88c93ab0615782cf718a2efb9e51a7989aab1b08067e9c1c5f' + '0xea54bdc52d163f88c93ab0615782cf718a2efb9e51a7989aab1b08067e9c1c5f', ) const r = privateToPublic(privateKey) assert.equal(bytesToHex(r), pubKey, 'should produce a public key given a private key') @@ -442,7 +442,7 @@ describe('Utility Functions', () => { }, undefined, undefined, - "shouldn't produce a public key given an invalid private key" + "shouldn't produce a public key given an invalid private key", ) privateKey = hexToBytes('0xea54bdc52d163f88c93ab0615782cf718a2efb9e51a7989aab1b08067e9c1c') @@ -452,7 +452,7 @@ describe('Utility Functions', () => { }, undefined, undefined, - "shouldn't produce a public key given an invalid private key" + "shouldn't produce a public key given an invalid private key", ) privateKey = '0xea54bdc52d163f88c93ab0615782cf718a2efb9e51a7989aab1b08067e9c1c5f' as any @@ -461,7 +461,7 @@ describe('Utility Functions', () => { } catch (err: any) { assert.ok( err.message.includes('This method only supports Uint8Array'), - 'should throw if private key is not Uint8Array' + 'should throw if private key is not Uint8Array', ) assert.ok(err.message.includes(privateKey), 'should throw if private key is not Uint8Array') } @@ -471,7 +471,7 @@ describe('Utility Functions', () => { const address = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a' // Our private key const privateKey = hexToBytes( - '0xea54bdc52d163f88c93ab0615782cf718a2efb9e51a7989aab1b08067e9c1c5f' + '0xea54bdc52d163f88c93ab0615782cf718a2efb9e51a7989aab1b08067e9c1c5f', ) const r = privateToAddress(privateKey) assert.equal(bytesToHex(r), address, 'should produce an address given a private key') @@ -480,12 +480,12 @@ describe('Utility Functions', () => { it('generateAddress', () => { const addr = generateAddress( utf8ToBytes('990ccf8a0de58091c028d6ff76bb235ee67c1c39'), - toBytes(14) + toBytes(14), ) assert.equal( bytesToHex(addr), '0x936a4295d8d74e310c0c95f0a63e53737b998d12', - 'should produce an address given a public key' + 'should produce an address given a public key', ) }) @@ -494,7 +494,7 @@ describe('Utility Functions', () => { assert.equal( bytesToHex(addr), '0xd658a4b8247c14868f3c512fa5cbb6e458e4a989', - 'should produce an address given a public key' + 'should produce an address given a public key', ) }) @@ -503,7 +503,7 @@ describe('Utility Functions', () => { assert.equal( bytesToHex(addr), '0xbfa69ba91385206bfdd2d8b9c1a5d6c10097a85b', - 'should produce an address given a public key' + 'should produce an address given a public key', ) }) @@ -512,24 +512,24 @@ describe('Utility Functions', () => { function () { generateAddress( ('0x990ccf8a0de58091c028d6ff76bb235ee67c1c39') as Uint8Array, - toBytes(0) + toBytes(0), ) }, undefined, undefined, - 'should throw if address is not Uint8Array' + 'should throw if address is not Uint8Array', ) assert.throws( function () { generateAddress( toBytes('0x990ccf8a0de58091c028d6ff76bb235ee67c1c39'), - (0) as Uint8Array + (0) as Uint8Array, ) }, undefined, undefined, - 'should throw if nonce is not Uint8Array' + 'should throw if nonce is not Uint8Array', ) }) @@ -539,7 +539,7 @@ describe('Utility Functions', () => { const addr = generateAddress2( hexToBytes(address as PrefixedHexString), hexToBytes(salt as PrefixedHexString), - hexToBytes(initCode as PrefixedHexString) + hexToBytes(initCode as PrefixedHexString), ) assert.equal(bytesToHex(addr), result, `${comment}: should generate the addresses provided`) } @@ -553,12 +553,12 @@ describe('Utility Functions', () => { generateAddress2( (address) as Uint8Array, hexToBytes(salt as PrefixedHexString), - hexToBytes(initCode as PrefixedHexString) + hexToBytes(initCode as PrefixedHexString), ) }, undefined, undefined, - 'should throw if address is not Uint8Array' + 'should throw if address is not Uint8Array', ) assert.throws( @@ -566,12 +566,12 @@ describe('Utility Functions', () => { generateAddress2( hexToBytes(address as PrefixedHexString), (salt) as Uint8Array, - hexToBytes(initCode as PrefixedHexString) + hexToBytes(initCode as PrefixedHexString), ) }, undefined, undefined, - 'should throw if salt is not Uint8Array' + 'should throw if salt is not Uint8Array', ) assert.throws( @@ -579,12 +579,12 @@ describe('Utility Functions', () => { generateAddress2( hexToBytes(address as PrefixedHexString), hexToBytes(salt as PrefixedHexString), - (initCode) as Uint8Array + (initCode) as Uint8Array, ) }, undefined, undefined, - 'should throw if initCode is not Uint8Array' + 'should throw if initCode is not Uint8Array', ) }) @@ -657,17 +657,17 @@ describe('Utility Functions', () => { assert.equal( toChecksumAddress( addr.toLowerCase(), - hexToBytes(`0x${padToEven(chainId)}`) + hexToBytes(`0x${padToEven(chainId)}`), ).toLowerCase(), - addr.toLowerCase() + addr.toLowerCase(), ) assert.equal( toChecksumAddress(addr.toLowerCase(), BigInt(chainId)).toLowerCase(), - addr.toLowerCase() + addr.toLowerCase(), ) assert.equal( toChecksumAddress(addr.toLowerCase(), `0x${padToEven(chainId)}`).toLowerCase(), - addr.toLowerCase() + addr.toLowerCase(), ) } } @@ -688,7 +688,7 @@ describe('Utility Functions', () => { }, undefined, undefined, - 'Should throw when the address is not hex-prefixed' + 'Should throw when the address is not hex-prefixed', ) assert.throws( @@ -697,7 +697,7 @@ describe('Utility Functions', () => { }, undefined, undefined, - 'Should throw when the chainId is not hex-prefixed' + 'Should throw when the chainId is not hex-prefixed', ) }) }) @@ -718,7 +718,7 @@ describe('Utility Functions', () => { assert.ok(isValidChecksumAddress(addr, intToBytes(parseInt(chainId)))) assert.ok(isValidChecksumAddress(addr, BigInt(chainId))) assert.ok( - isValidChecksumAddress(addr, `0x${padToEven(intToHex(parseInt(chainId)).slice(2))}`) + isValidChecksumAddress(addr, `0x${padToEven(intToHex(parseInt(chainId)).slice(2))}`), ) } } @@ -773,12 +773,12 @@ describe('Utility Functions', () => { assert.equal( JSON.stringify(result[2]), JSON.stringify(KECCAK256_RLP), - 'Empty storageRoot should be changed to hash of RLP of null' + 'Empty storageRoot should be changed to hash of RLP of null', ) assert.equal( JSON.stringify(result[3]), JSON.stringify(KECCAK256_NULL), - 'Empty codeRoot should be changed to hash of RLP of null' + 'Empty codeRoot should be changed to hash of RLP of null', ) }) diff --git a/packages/util/test/address.spec.ts b/packages/util/test/address.spec.ts index c0fc0b0e76..05e57fa3e2 100644 --- a/packages/util/test/address.spec.ts +++ b/packages/util/test/address.spec.ts @@ -38,7 +38,7 @@ describe('Address', () => { it('should instantiate from public key', () => { const pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) const str = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a' const addr = Address.fromPublicKey(pubKey) @@ -47,7 +47,7 @@ describe('Address', () => { it('should fail to instantiate from invalid public key', () => { const pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae744' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae744', ) assert.throws(() => Address.fromPublicKey(pubKey)) }) @@ -75,7 +75,7 @@ describe('Address', () => { const nonPrecompile = Address.fromString('0x990ccf8a0de58091c028d6ff76bb235ee67c1c39') assert.isFalse( nonPrecompile.isPrecompileOrSystemAddress(), - 'should detect non-precompile address' + 'should detect non-precompile address', ) }) @@ -86,7 +86,7 @@ describe('Address', () => { const addr = Address.generate2( from, hexToBytes(salt as PrefixedHexString), - hexToBytes(initCode as PrefixedHexString) + hexToBytes(initCode as PrefixedHexString), ) assert.equal(addr.toString(), result) } diff --git a/packages/util/test/bytes.spec.ts b/packages/util/test/bytes.spec.ts index 3d6954b2b6..f08ce15ea3 100644 --- a/packages/util/test/bytes.spec.ts +++ b/packages/util/test/bytes.spec.ts @@ -242,7 +242,7 @@ describe('toBytes', () => { return Uint8Array.from([1]) }, }), - Uint8Array.from([1]) + Uint8Array.from([1]), ) }) it('should fail', () => { @@ -287,7 +287,7 @@ describe('intToBytes', () => { () => intToBytes(Number.MAX_SAFE_INTEGER + 1), undefined, undefined, - 'throws on unsafe integers' + 'throws on unsafe integers', ) }) @@ -316,7 +316,7 @@ describe('intToHex', () => { () => intToHex(Number.MAX_SAFE_INTEGER + 1), undefined, undefined, - 'throws on unsafe integers' + 'throws on unsafe integers', ) }) it('should pass on correct input', () => { @@ -349,19 +349,19 @@ describe('validateNoLeadingZeroes', () => { it('should pass on correct input', () => { assert.doesNotThrow( () => validateNoLeadingZeroes(noLeadingZeroes), - 'does not throw when no leading zeroes' + 'does not throw when no leading zeroes', ) assert.doesNotThrow( () => validateNoLeadingZeroes(emptyBuffer), - 'does not throw with empty buffer' + 'does not throw with empty buffer', ) assert.doesNotThrow( () => validateNoLeadingZeroes(undefinedValue), - 'does not throw when undefined passed in' + 'does not throw when undefined passed in', ) assert.doesNotThrow( () => validateNoLeadingZeroes(noleadingZeroBytes), - 'does not throw when value has leading zero bytes' + 'does not throw when value has leading zero bytes', ) }) @@ -370,13 +370,13 @@ describe('validateNoLeadingZeroes', () => { () => validateNoLeadingZeroes(leadingZeroBytes), undefined, undefined, - 'throws when value has leading zero bytes' + 'throws when value has leading zero bytes', ) assert.throws( () => validateNoLeadingZeroes(onlyZeroes), undefined, undefined, - 'throws when value has only zeroes' + 'throws when value has only zeroes', ) }) }) diff --git a/packages/util/test/constants.spec.ts b/packages/util/test/constants.spec.ts index dc2c9f758e..8b6e56f969 100644 --- a/packages/util/test/constants.spec.ts +++ b/packages/util/test/constants.spec.ts @@ -16,47 +16,47 @@ describe('constants', () => { it('should match constants', () => { assert.equal( MAX_INTEGER.toString(16), - 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff' + 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', ) assert.equal( TWO_POW256.toString(16), - '10000000000000000000000000000000000000000000000000000000000000000' + '10000000000000000000000000000000000000000000000000000000000000000', ) assert.equal( TWO_POW256.toString(16), - '10000000000000000000000000000000000000000000000000000000000000000' + '10000000000000000000000000000000000000000000000000000000000000000', ) assert.equal( KECCAK256_NULL_S, - '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' + '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', ) assert.equal( bytesToHex(KECCAK256_NULL), - '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' + '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', ) assert.equal( KECCAK256_RLP_ARRAY_S, - '0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347' + '0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347', ) assert.equal( bytesToHex(KECCAK256_RLP_ARRAY), - '0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347' + '0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347', ) assert.equal( KECCAK256_RLP_S, - '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421' + '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', ) assert.equal( bytesToHex(KECCAK256_RLP), - '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421' + '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', ) }) }) diff --git a/packages/util/test/genesis.spec.ts b/packages/util/test/genesis.spec.ts index 053a998f0e..43f89947a0 100644 --- a/packages/util/test/genesis.spec.ts +++ b/packages/util/test/genesis.spec.ts @@ -12,10 +12,10 @@ describe('[Util/genesis]', () => { assert.equal( genesisState['0x4242424242424242424242424242424242424242'][1].includes( // sample data check - '0x60806040526004361061003' + '0x60806040526004361061003', ), true, - 'should have deposit contract' + 'should have deposit contract', ) }) }) diff --git a/packages/util/test/internal.spec.ts b/packages/util/test/internal.spec.ts index 29f71ef84a..c8c94faf49 100644 --- a/packages/util/test/internal.spec.ts +++ b/packages/util/test/internal.spec.ts @@ -51,9 +51,9 @@ describe('internal', () => { { a: '1', b: '2' }, { a: '3', b: '4' }, ], - 'a' + 'a', ), - ['1', '3'] + ['1', '3'], ) assert.deepEqual( getKeys( @@ -62,9 +62,9 @@ describe('internal', () => { { a: '3', b: '4' }, ], 'a', - true + true, ), - ['', '3'] + ['', '3'], ) }) diff --git a/packages/util/test/provider.spec.ts b/packages/util/test/provider.spec.ts index f8a40b5b7a..bd20df314a 100644 --- a/packages/util/test/provider.spec.ts +++ b/packages/util/test/provider.spec.ts @@ -18,13 +18,13 @@ describe('getProvider', () => { assert.equal( getProvider(fakeEthersProvider), fakeEthersProvider._getConnection().url, - 'returned correct provider url string' + 'returned correct provider url string', ) assert.throws( () => getProvider(1), 'Must provide valid provider URL or Web3Provider', undefined, - 'throws correct error' + 'throws correct error', ) }) }) diff --git a/packages/util/test/requests.spec.ts b/packages/util/test/requests.spec.ts index c2f51e7f47..96b4a49f57 100644 --- a/packages/util/test/requests.spec.ts +++ b/packages/util/test/requests.spec.ts @@ -49,7 +49,7 @@ describe('Requests', () => { for (const [requestName, requestData, requestType, RequestInstanceType] of testCases) { it(`${requestName}`, () => { const requestObject = RequestInstanceType.fromRequestData( - requestData + requestData, ) as CLRequest const requestJSON = requestObject.toJSON() const serialized = requestObject.serialize() diff --git a/packages/util/test/signature.spec.ts b/packages/util/test/signature.spec.ts index b7bc655ccf..7e05c34295 100644 --- a/packages/util/test/signature.spec.ts +++ b/packages/util/test/signature.spec.ts @@ -24,11 +24,11 @@ describe('ecsign', () => { const sig = ecsign(echash, ecprivkey) assert.deepEqual( sig.r, - hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') + hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9'), ) assert.deepEqual( sig.s, - hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66') + hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66'), ) assert.equal(sig.v, BigInt(27)) }) @@ -37,21 +37,21 @@ describe('ecsign', () => { const sig = ecsign(echash, ecprivkey, chainId) assert.deepEqual( sig.r, - hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') + hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9'), ) assert.deepEqual( sig.s, - hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66') + hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66'), ) assert.equal(sig.v, BigInt(41)) }) it('should produce a signature for chainId=150', () => { const expectedSigR = hexToBytes( - '0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9' + '0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9', ) const expectedSigS = hexToBytes( - '0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66' + '0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66', ) const sig = ecsign(echash, ecprivkey, BigInt(150)) @@ -63,10 +63,10 @@ describe('ecsign', () => { it('should produce a signature for a high number chainId greater than MAX_SAFE_INTEGER', () => { const chainIDBuffer = hexToBytes('0x796f6c6f763378') const expectedSigR = hexToBytes( - '0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9' + '0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9', ) const expectedSigS = hexToBytes( - '0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66' + '0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66', ) const expectedSigV = BigInt('68361967398315795') @@ -144,7 +144,7 @@ describe('ecrecover', () => { } */ const senderPubKey = hexToBytes( - '0x78988201fbceed086cfca7b64e382d08d0bd776898731443d2907c097745b7324c54f522087f5964412cddba019f192de0fd57a0ffa63f098c2b200e53594b15' + '0x78988201fbceed086cfca7b64e382d08d0bd776898731443d2907c097745b7324c54f522087f5964412cddba019f192de0fd57a0ffa63f098c2b200e53594b15', ) const msgHash = hexToBytes('0x8ae8cb685a7a9f29494b07b287c3f6a103b73fa178419d10d1184861a40f6afe') @@ -163,7 +163,7 @@ describe('hashPersonalMessage', () => { const h = hashPersonalMessage(utf8ToBytes('Hello world')) assert.deepEqual( h, - hexToBytes('0x8144a6fa26be252b86456491fbcd43c1de7e022241845ffea1c3df066f7cfede') + hexToBytes('0x8144a6fa26be252b86456491fbcd43c1de7e022241845ffea1c3df066f7cfede'), ) }) it('should throw if input is not a Uint8Array', () => { @@ -198,7 +198,7 @@ describe('isValidSignature', () => { }) it('should fail when on homestead and s > secp256k1n/2', () => { const SECP256K1_N_DIV_2 = BigInt( - '0x7fffffffffffffffffffffffffffffff5d576e7357a4501ddfe92f46681b20a0' + '0x7fffffffffffffffffffffffffffffff5d576e7357a4501ddfe92f46681b20a0', ) const r = hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') @@ -209,7 +209,7 @@ describe('isValidSignature', () => { }) it('should not fail when not on homestead but s > secp256k1n/2', () => { const SECP256K1_N_DIV_2 = BigInt( - '0x7fffffffffffffffffffffffffffffff5d576e7357a4501ddfe92f46681b20a0' + '0x7fffffffffffffffffffffffffffffff5d576e7357a4501ddfe92f46681b20a0', ) const r = hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') @@ -336,7 +336,7 @@ describe('message sig', () => { }) assert.throws(function () { fromRpcSig( - '0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca' + '0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca', ) }) }) @@ -344,7 +344,7 @@ describe('message sig', () => { it('pad short r and s values', () => { assert.equal( toRpcSig(BigInt(27), r.slice(20), s.slice(20)), - '0x00000000000000000000000000000000000000004a1579cf389ef88b20a1abe90000000000000000000000000000000000000000326fa689f228040429e3ca661b' + '0x00000000000000000000000000000000000000004a1579cf389ef88b20a1abe90000000000000000000000000000000000000000326fa689f228040429e3ca661b', ) }) diff --git a/packages/util/test/verkle.spec.ts b/packages/util/test/verkle.spec.ts index 3a66f1244a..8eed055405 100644 --- a/packages/util/test/verkle.spec.ts +++ b/packages/util/test/verkle.spec.ts @@ -27,24 +27,24 @@ describe('Verkle cryptographic helpers', () => { // Empty address assert.equal( bytesToHex( - getVerkleStem(verkle, Address.fromString('0x0000000000000000000000000000000000000000')) + getVerkleStem(verkle, Address.fromString('0x0000000000000000000000000000000000000000')), ), - '0x1a100684fd68185060405f3f160e4bb6e034194336b547bdae323f888d5332' + '0x1a100684fd68185060405f3f160e4bb6e034194336b547bdae323f888d5332', ) // Non-empty address assert.equal( bytesToHex( - getVerkleStem(verkle, Address.fromString('0x71562b71999873DB5b286dF957af199Ec94617f7')) + getVerkleStem(verkle, Address.fromString('0x71562b71999873DB5b286dF957af199Ec94617f7')), ), - '0x1540dfad7755b40be0768c6aa0a5096fbf0215e0e8cf354dd928a178346466' + '0x1540dfad7755b40be0768c6aa0a5096fbf0215e0e8cf354dd928a178346466', ) }) it('verifyVerkleProof(): should verify verkle proofs', () => { // Src: Kaustinen6 testnet, block 71 state root (parent of block 72) const prestateRoot = hexToBytes( - '0x64e1a647f42e5c2e3c434531ccf529e1b3e93363a40db9fc8eec81f492123510' + '0x64e1a647f42e5c2e3c434531ccf529e1b3e93363a40db9fc8eec81f492123510', ) const executionWitness = verkleBlockJSON.executionWitness as VerkleExecutionWitness assert.isTrue(verifyVerkleProof(verkle, prestateRoot, executionWitness)) diff --git a/packages/util/test/withdrawal.spec.ts b/packages/util/test/withdrawal.spec.ts index 502d654e1f..b090ac361c 100644 --- a/packages/util/test/withdrawal.spec.ts +++ b/packages/util/test/withdrawal.spec.ts @@ -70,7 +70,7 @@ describe('Withdrawal', () => { const gethWithdrawalsRlp = bytesToHex(encode(gethWithdrawalsBuffer)) it('fromWithdrawalData and toBytesArray', () => { const withdrawals = withdrawalsGethVector.map((withdrawal) => - Withdrawal.fromWithdrawalData(withdrawal as WithdrawalData) + Withdrawal.fromWithdrawalData(withdrawal as WithdrawalData), ) const withdrawalstoBytesArr = withdrawals.map((wt) => wt.raw()) const withdrawalsToRlp = bytesToHex(encode(withdrawalstoBytesArr)) @@ -79,7 +79,7 @@ describe('Withdrawal', () => { it('toBytesArray from withdrawalData', () => { const withdrawalsDatatoBytesArr = withdrawalsGethVector.map((withdrawal) => - Withdrawal.toBytesArray(withdrawal as WithdrawalData) + Withdrawal.toBytesArray(withdrawal as WithdrawalData), ) const withdrawalsDataToRlp = bytesToHex(encode(withdrawalsDatatoBytesArr)) assert.equal(gethWithdrawalsRlp, withdrawalsDataToRlp, 'The withdrawals to buffer should match') @@ -93,7 +93,7 @@ describe('Withdrawal', () => { const withdrawalsValue = withdrawals.map((wt) => wt.toValue()) assert.deepEqual( withdrawalsValue.map((wt) => bytesToHex(wt.address)), - withdrawalsJson.map((wt) => wt.address) + withdrawalsJson.map((wt) => wt.address), ) }) }) diff --git a/packages/util/tsconfig.lint.json b/packages/util/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/util/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/verkle/.eslintrc.cjs b/packages/verkle/.eslintrc.cjs index 1fa27a8fea..887e31b0e6 100644 --- a/packages/verkle/.eslintrc.cjs +++ b/packages/verkle/.eslintrc.cjs @@ -1,12 +1,12 @@ module.exports = { extends: '../../config/eslint.cjs', parserOptions: { - project: ['./tsconfig.json', './tsconfig.benchmarks.json'], + project: ['./tsconfig.lint.json'], }, ignorePatterns: ['src/rust-verkle-wasm/rust_verkle_wasm.js', '**/vendor/*.js'], overrides: [ { - files: ['benchmarks/*.ts'], + files: ['benchmarks/*.ts', 'examples/*.ts'], rules: { 'no-console': 'off', }, diff --git a/packages/verkle/src/node/internalNode.ts b/packages/verkle/src/node/internalNode.ts index 2a401dc8d0..86a16a500e 100644 --- a/packages/verkle/src/node/internalNode.ts +++ b/packages/verkle/src/node/internalNode.ts @@ -33,7 +33,7 @@ export class InternalNode extends BaseVerkleNode { childIndex, // The hashed child commitments are used when updating the internal node commitment this.verkleCrypto.hashCommitment(oldChildReference.commitment), - this.verkleCrypto.hashCommitment(child.commitment) + this.verkleCrypto.hashCommitment(child.commitment), ) } diff --git a/packages/verkle/src/node/leafNode.ts b/packages/verkle/src/node/leafNode.ts index 8592fa4d38..ceac0b02f4 100644 --- a/packages/verkle/src/node/leafNode.ts +++ b/packages/verkle/src/node/leafNode.ts @@ -38,7 +38,7 @@ export class LeafNode extends BaseVerkleNode { static async create( stem: Uint8Array, verkleCrypto: VerkleCrypto, - values?: (Uint8Array | VerkleLeafNodeValue)[] + values?: (Uint8Array | VerkleLeafNodeValue)[], ): Promise { // Generate the value arrays for c1 and c2 values = values !== undefined ? values : createDefaultLeafValues() @@ -66,13 +66,13 @@ export class LeafNode extends BaseVerkleNode { verkleCrypto.zeroCommitment, 0, new Uint8Array(32), - setLengthLeft(intToBytes(1), 32) + setLengthLeft(intToBytes(1), 32), ) commitment = verkleCrypto.updateCommitment( commitment, 1, new Uint8Array(32), - setLengthRight(stem, 32) + setLengthRight(stem, 32), ) commitment = verkleCrypto.updateCommitment( commitment, @@ -80,13 +80,13 @@ export class LeafNode extends BaseVerkleNode { new Uint8Array(32), // We hash the commitment when using in the leaf node commitment since c1 is 64 bytes long // and we need a 32 byte input for the scalar value in `updateCommitment` - verkleCrypto.hashCommitment(c1) + verkleCrypto.hashCommitment(c1), ) commitment = verkleCrypto.updateCommitment( commitment, 3, new Uint8Array(32), - verkleCrypto.hashCommitment(c2) + verkleCrypto.hashCommitment(c2), ) return new LeafNode({ stem, @@ -164,7 +164,7 @@ export class LeafNode extends BaseVerkleNode { commitmentIndex, cValues[commitmentIndex], // Right pad the value with zeroes since commitments require 32 byte scalars - setLengthRight(val.slice(0, 16), 32) + setLengthRight(val.slice(0, 16), 32), ) // Update the commitment for the second 16 bytes of the value cCommitment = this.verkleCrypto.updateCommitment( @@ -172,7 +172,7 @@ export class LeafNode extends BaseVerkleNode { commitmentIndex + 1, cValues[commitmentIndex + 1], // Right pad the value with zeroes since commitments require 32 byte scalars - setLengthRight(val.slice(16), 32) + setLengthRight(val.slice(16), 32), ) // Update the cCommitment corresponding to the index let oldCCommitment: Uint8Array | undefined @@ -191,7 +191,7 @@ export class LeafNode extends BaseVerkleNode { this.commitment, cIndex, this.verkleCrypto.hashCommitment(oldCCommitment!), - this.verkleCrypto.hashCommitment(cCommitment) + this.verkleCrypto.hashCommitment(cCommitment), ) } diff --git a/packages/verkle/src/verkleTree.ts b/packages/verkle/src/verkleTree.ts index efebeaee67..c1839b966a 100644 --- a/packages/verkle/src/verkleTree.ts +++ b/packages/verkle/src/verkleTree.ts @@ -86,7 +86,7 @@ export class VerkleTree { this.verkleCrypto = opts?.verkleCrypto this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false this.debug = this.DEBUG ? (message: string, namespaces: string[] = []) => { let log = this._debug @@ -237,7 +237,7 @@ export class VerkleTree { // Sanity check to verify we have the right node type if (!isLeafNode(foundPath.node)) { throw new Error( - `expected leaf node found at ${bytesToHex(stem)}. Got internal node instead` + `expected leaf node found at ${bytesToHex(stem)}. Got internal node instead`, ) } leafNode = foundPath.node @@ -245,8 +245,8 @@ export class VerkleTree { if (!equalsBytes(leafNode.stem, stem)) { throw new Error( `invalid leaf node found. Expected stem: ${bytesToHex(stem)}; got ${bytesToHex( - foundPath.node.stem - )}` + foundPath.node.stem, + )}`, ) } } else { @@ -265,7 +265,7 @@ export class VerkleTree { this.DEBUG && this.debug( `Updating value for suffix: ${suffix} at leaf node with stem: ${bytesToHex(stem)}`, - ['PUT'] + ['PUT'], ) putStack.push([leafNode.hash(), leafNode]) @@ -296,9 +296,9 @@ export class VerkleTree { this.DEBUG && this.debug( `Updating child reference for node with path: ${bytesToHex( - lastPath + lastPath, )} at index ${childIndex} in internal node at path ${bytesToHex(nextPath)}`, - ['PUT'] + ['PUT'], ) // Hold onto `path` to current node for updating next parent node child index lastPath = nextPath @@ -318,7 +318,7 @@ export class VerkleTree { `Updating child reference for node with path: ${bytesToHex(lastPath)} at index ${ lastPath[0] } in root node`, - ['PUT'] + ['PUT'], ) this.DEBUG && this.debug(`Updating root node hash to ${bytesToHex(this._root)}`, ['PUT']) putStack.push([this._root, rootNode]) @@ -342,7 +342,7 @@ export class VerkleTree { updateParent( leafNode: LeafNode, nearestNode: VerkleNode, - pathToNode: Uint8Array + pathToNode: Uint8Array, ): { node: InternalNode; lastPath: Uint8Array } { // Compute the portion of leafNode.stem and nearestNode.path that match (i.e. the partial path closest to leafNode.stem) const partialMatchingStemIndex = matchingBytesLength(leafNode.stem, pathToNode) @@ -375,13 +375,13 @@ export class VerkleTree { this.DEBUG && this.debug( `Updating child reference for leaf node with stem: ${bytesToHex( - leafNode.stem + leafNode.stem, )} at index ${ leafNode.stem[partialMatchingStemIndex] } in internal node at path ${bytesToHex( - leafNode.stem.slice(0, partialMatchingStemIndex) + leafNode.stem.slice(0, partialMatchingStemIndex), )}`, - ['PUT'] + ['PUT'], ) } return { node: internalNode, lastPath: pathToNode } @@ -440,9 +440,9 @@ export class VerkleTree { this.DEBUG && this.debug( `Path ${bytesToHex(key)} - found full path to node ${bytesToHex( - decodedNode.hash() + decodedNode.hash(), )}.`, - ['FIND_PATH'] + ['FIND_PATH'], ) result.node = decodedNode result.remaining = new Uint8Array() @@ -455,9 +455,9 @@ export class VerkleTree { this.DEBUG && this.debug( `Path ${bytesToHex(pathToNearestNode)} - found path to nearest node ${bytesToHex( - decodedNode.hash() + decodedNode.hash(), )} but target node not found.`, - ['FIND_PATH'] + ['FIND_PATH'], ) result.stack.push([decodedNode, pathToNearestNode]) return result @@ -467,9 +467,9 @@ export class VerkleTree { this.DEBUG && this.debug( `Partial Path ${bytesToHex( - key.slice(0, matchingKeyLength) + key.slice(0, matchingKeyLength), )} - found next node in path ${bytesToHex(decodedNode.hash())}.`, - ['FIND_PATH'] + ['FIND_PATH'], ) // Get the next child node in the path const childIndex = key[matchingKeyLength] @@ -478,9 +478,9 @@ export class VerkleTree { this.DEBUG && this.debug( `Found partial path ${key.slice( - 31 - result.remaining.length + 31 - result.remaining.length, )} but sought node is not present in trie.`, - ['FIND_PATH'] + ['FIND_PATH'], ) return result } @@ -547,7 +547,7 @@ export class VerkleTree { async verifyProof( _rootHash: Uint8Array, _key: Uint8Array, - _proof: Proof + _proof: Proof, ): Promise { throw new Error('Not implemented') } diff --git a/packages/verkle/test/internalNode.spec.ts b/packages/verkle/test/internalNode.spec.ts index 328658d9f4..f8fd34102f 100644 --- a/packages/verkle/test/internalNode.spec.ts +++ b/packages/verkle/test/internalNode.spec.ts @@ -22,7 +22,7 @@ describe('verkle node - internal', () => { assert.equal(node.children.length, NODE_WIDTH, 'number of children should equal verkle width') assert.ok( node.children.every((child) => child === null), - 'every children should be null' + 'every children should be null', ) }) @@ -33,14 +33,14 @@ describe('verkle node - internal', () => { assert.deepEqual( node.commitment, verkleCrypto.zeroCommitment, - 'commitment should be set to point identity' + 'commitment should be set to point identity', ) // Children nodes should all default to null. assert.equal(node.children.length, NODE_WIDTH, 'number of children should equal verkle width') assert.ok( node.children.every((child) => child === null), - 'every children should be null' + 'every children should be null', ) }) it('should serialize and deserialize a node', async () => { diff --git a/packages/verkle/test/leafNode.spec.ts b/packages/verkle/test/leafNode.spec.ts index ff62de67ba..b9addecde5 100644 --- a/packages/verkle/test/leafNode.spec.ts +++ b/packages/verkle/test/leafNode.spec.ts @@ -35,14 +35,14 @@ describe('verkle node - leaf', () => { assert.equal(node.type, VerkleNodeType.Leaf, 'type should be set') assert.ok( equalsBytes(node.commitment as unknown as Uint8Array, commitment), - 'commitment should be set' + 'commitment should be set', ) assert.ok(equalsBytes(node.c1 as unknown as Uint8Array, c1), 'c1 should be set') assert.ok(equalsBytes(node.c2 as unknown as Uint8Array, c2), 'c2 should be set') assert.ok(equalsBytes(node.stem, stem), 'stem should be set') assert.ok( values.every((value, index) => equalsBytes(value, node.values[index] as Uint8Array)), - 'values should be set' + 'values should be set', ) }) diff --git a/packages/verkle/test/verkle.spec.ts b/packages/verkle/test/verkle.spec.ts index 9cbfb15fe6..c023ec604f 100644 --- a/packages/verkle/test/verkle.spec.ts +++ b/packages/verkle/test/verkle.spec.ts @@ -97,7 +97,7 @@ describe('Verkle tree', () => { assert.deepEqual( verkleCrypto.serializeCommitment(pathToNonExistentNode.stack[0][0].commitment), tree.root(), - 'contains the root node in the stack' + 'contains the root node in the stack', ) }) @@ -264,7 +264,7 @@ describe('Verkle tree', () => { assert.ok(res.node !== null) assert.deepEqual( (res.node as LeafNode).values[hexToBytes(keys[0])[31]], - VerkleLeafNodeValue.Deleted + VerkleLeafNodeValue.Deleted, ) }) }) diff --git a/packages/verkle/tsconfig.lint.json b/packages/verkle/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/verkle/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/vm/.eslintrc.cjs b/packages/vm/.eslintrc.cjs index 6aaac545e9..811a41ecc2 100644 --- a/packages/vm/.eslintrc.cjs +++ b/packages/vm/.eslintrc.cjs @@ -1,14 +1,16 @@ module.exports = { extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, rules: { '@typescript-eslint/no-use-before-define': 'off', 'no-invalid-this': 'off', 'no-restricted-syntax': 'off', - 'import/extensions': 'off', }, overrides: [ { - files: ['test/util.ts', 'test/tester/**/*.ts'], + files: ['test/util.ts', 'test/tester/**/*.ts', 'examples/**/*.ts'], rules: { 'no-console': 'off', }, diff --git a/packages/vm/examples/buildBlock.ts b/packages/vm/examples/buildBlock.ts index c94159478b..b6c181746d 100644 --- a/packages/vm/examples/buildBlock.ts +++ b/packages/vm/examples/buildBlock.ts @@ -2,7 +2,7 @@ import { createBlockFromBlockData } from '@ethereumjs/block' import { Chain, Common } from '@ethereumjs/common' import { createLegacyTx } from '@ethereumjs/tx' import { Account, Address, bytesToHex, hexToBytes } from '@ethereumjs/util' -import { buildBlock, VM } from '@ethereumjs/vm' +import { VM, buildBlock } from '@ethereumjs/vm' const main = async () => { const common = new Common({ chain: Chain.Mainnet }) @@ -10,7 +10,7 @@ const main = async () => { const parentBlock = createBlockFromBlockData( { header: { number: 1n } }, - { skipConsensusFormatValidation: true } + { skipConsensusFormatValidation: true }, ) const headerData = { number: 2n, @@ -39,4 +39,4 @@ const main = async () => { console.log(`Built a block with hash ${bytesToHex(block.hash())}`) } -main() +void main() diff --git a/packages/vm/examples/helpers/account-utils.ts b/packages/vm/examples/helpers/account-utils.ts index 97d56a10af..55e6fc6fe3 100644 --- a/packages/vm/examples/helpers/account-utils.ts +++ b/packages/vm/examples/helpers/account-utils.ts @@ -1,5 +1,6 @@ -import { VM } from '@ethereumjs/vm' -import { Account, createAccount, Address } from '@ethereumjs/util' +import { Address, createAccount } from '@ethereumjs/util' + +import type { VM } from '@ethereumjs/vm' export const keyPair = { secretKey: '0x3cd7232cd6f3fc66a57a6bedc1a8ed6c228fff0a327e169c2bcc5e869ed49511', diff --git a/packages/vm/examples/helpers/tx-builder.ts b/packages/vm/examples/helpers/tx-builder.ts index b00be99f7b..c859cb3152 100644 --- a/packages/vm/examples/helpers/tx-builder.ts +++ b/packages/vm/examples/helpers/tx-builder.ts @@ -1,12 +1,13 @@ -import { Interface, defaultAbiCoder as AbiCoder } from '@ethersproject/abi' -import { LegacyTxData } from '@ethereumjs/tx' +import { defaultAbiCoder as AbiCoder, Interface } from '@ethersproject/abi' + +import type { LegacyTxData } from '@ethereumjs/tx' export const encodeFunction = ( method: string, params?: { types: any[] values: unknown[] - } + }, ): string => { const parameters = params?.types ?? [] const methodWithParameters = `function ${method}(${parameters.join(',')})` @@ -21,7 +22,7 @@ export const encodeDeployment = ( params?: { types: any[] values: unknown[] - } + }, ) => { const deploymentData = '0x' + bytecode if (params) { diff --git a/packages/vm/examples/run-blockchain.ts b/packages/vm/examples/run-blockchain.ts index b50a1d44e3..92aa4b6f69 100644 --- a/packages/vm/examples/run-blockchain.ts +++ b/packages/vm/examples/run-blockchain.ts @@ -6,31 +6,25 @@ // 4. Puts the blocks from ../utils/blockchain-mock-data "blocks" attribute into the Blockchain // 5. Runs the Blockchain on the VM. +import { createBlockFromBlockData, createBlockFromRLPSerializedBlock } from '@ethereumjs/block' +import { EthashConsensus, createBlockchain } from '@ethereumjs/blockchain' +import { Common, ConsensusAlgorithm, ConsensusType } from '@ethereumjs/common' +import { Ethash } from '@ethereumjs/ethash' import { Address, - toBytes, - setLengthLeft, bytesToHex, - hexToBytes, createAccount, + hexToBytes, + setLengthLeft, + toBytes, } from '@ethereumjs/util' -import { - Block, - createBlockFromBlockData, - createBlockFromRLPSerializedBlock, -} from '@ethereumjs/block' -import { - Blockchain, - ConsensusDict, - createBlockchain, - EthashConsensus, -} from '@ethereumjs/blockchain' -import { Common, ConsensusAlgorithm, ConsensusType } from '@ethereumjs/common' -import { Ethash } from '@ethereumjs/ethash' -import { runBlock, VM } from '@ethereumjs/vm' +import { VM, runBlock } from '@ethereumjs/vm' import testData from './helpers/blockchain-mock-data.json' +import type { Block } from '@ethereumjs/block' +import type { Blockchain, ConsensusDict } from '@ethereumjs/blockchain' + async function main() { const common = new Common({ chain: 1, hardfork: testData.network.toLowerCase() }) const validatePow = common.consensusType() === ConsensusType.ProofOfWork @@ -54,7 +48,7 @@ async function main() { await putBlocks(blockchain, common, testData) - await blockchain.iterator('vm', async (block: Block, reorg: boolean) => { + await blockchain.iterator('vm', async (block: Block, _reorg: boolean) => { const parentBlock = await blockchain!.getBlock(block.header.parentHash) const parentState = parentBlock.header.stateRoot // run block diff --git a/packages/vm/examples/run-solidity-contract.ts b/packages/vm/examples/run-solidity-contract.ts index 4e7b928c00..efb53c03dd 100644 --- a/packages/vm/examples/run-solidity-contract.ts +++ b/packages/vm/examples/run-solidity-contract.ts @@ -2,12 +2,13 @@ import { createBlockFromBlockData } from '@ethereumjs/block' import { Chain, Common, Hardfork } from '@ethereumjs/common' import { createLegacyTx } from '@ethereumjs/tx' import { Address, bytesToHex, hexToBytes } from '@ethereumjs/util' -import { runTx, VM } from '@ethereumjs/vm' +import { VM, runTx } from '@ethereumjs/vm' import { defaultAbiCoder as AbiCoder, Interface } from '@ethersproject/abi' import { readFileSync } from 'fs' import path from 'path' import solc from 'solc' import { fileURLToPath } from 'url' + import { getAccountNonce, insertAccount } from './helpers/account-utils.js' import { buildTransaction, encodeDeployment, encodeFunction } from './helpers/tx-builder.js' @@ -65,7 +66,7 @@ function compileContracts() { let compilationFailed = false - if (output.errors) { + if (output.errors !== undefined) { for (const error of output.errors) { if (error.severity === 'error') { console.error(error.formattedMessage) @@ -91,7 +92,7 @@ async function deployContract( vm: VM, senderPrivateKey: Uint8Array, deploymentBytecode: string, - greeting: string + greeting: string, ): Promise
{ // Contracts are deployed by sending their deployment bytecode to the address 0 // The contract params should be abi-encoded and appended to the deployment bytecode. @@ -120,7 +121,7 @@ async function setGreeting( vm: VM, senderPrivateKey: Uint8Array, contractAddress: Address, - greeting: string + greeting: string, ) { const data = encodeFunction('setGreeting', { types: ['string'], @@ -147,7 +148,7 @@ async function getGreeting(vm: VM, contractAddress: Address, caller: Address) { const greetResult = await vm.evm.runCall({ to: contractAddress, - caller: caller, + caller, origin: caller, // The tx.origin is also the caller here data: hexToBytes(sigHash), block, @@ -194,7 +195,7 @@ async function main() { if (greeting !== INITIAL_GREETING) throw new Error( - `initial greeting not equal, received ${greeting}, expected ${INITIAL_GREETING}` + `initial greeting not equal, received ${greeting}, expected ${INITIAL_GREETING}`, ) console.log('Changing greeting...') diff --git a/packages/vm/examples/runGoerliBlock.ts b/packages/vm/examples/runGoerliBlock.ts index 4f821fcdcb..0acc1c9bbe 100644 --- a/packages/vm/examples/runGoerliBlock.ts +++ b/packages/vm/examples/runGoerliBlock.ts @@ -1,9 +1,11 @@ -import { Block, createBlockFromRPC } from '@ethereumjs/block' +import { createBlockFromRPC } from '@ethereumjs/block' import { Chain, Common } from '@ethereumjs/common' -import { bytesToHex, hexToBytes } from '@ethereumjs/util' +import { bytesToHex } from '@ethereumjs/util' + +import { runBlock } from '../src/index.js' import { VM } from '../src/vm.js' + import goerliBlock2 from './testData/goerliBlock2.json' -import { runBlock } from '../src/index.js' const main = async () => { const common = new Common({ chain: Chain.Goerli, hardfork: 'london' }) @@ -14,4 +16,4 @@ const main = async () => { console.log(`The state root for Goerli block 2 is ${bytesToHex(result.stateRoot)}`) } -main() +void main() diff --git a/packages/vm/examples/runTx.ts b/packages/vm/examples/runTx.ts index cb76ff24f6..09bec35f7a 100644 --- a/packages/vm/examples/runTx.ts +++ b/packages/vm/examples/runTx.ts @@ -1,7 +1,7 @@ import { Chain, Common, Hardfork } from '@ethereumjs/common' import { createLegacyTx } from '@ethereumjs/tx' import { Address } from '@ethereumjs/util' -import { runTx, VM } from '@ethereumjs/vm' +import { VM, runTx } from '@ethereumjs/vm' const main = async () => { const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai }) @@ -20,4 +20,4 @@ const main = async () => { console.log(res.totalGasSpent) // 21000n - gas cost for simple ETH transfer } -main() +void main() diff --git a/packages/vm/examples/vmWith4844.ts b/packages/vm/examples/vmWith4844.ts index 24e68d436e..b751850d84 100644 --- a/packages/vm/examples/vmWith4844.ts +++ b/packages/vm/examples/vmWith4844.ts @@ -1,4 +1,5 @@ -import { Common, Chain, Hardfork } from '@ethereumjs/common' +import { Chain, Common, Hardfork } from '@ethereumjs/common' + import { VM } from '../src/vm.js' const main = async () => { @@ -7,4 +8,4 @@ const main = async () => { console.log(`4844 is active in the VM - ${vm.common.isActivatedEIP(4844)}`) } -main() +void main() diff --git a/packages/vm/examples/vmWithEIPs.ts b/packages/vm/examples/vmWithEIPs.ts index 3720a66a52..6446dd37c3 100644 --- a/packages/vm/examples/vmWithEIPs.ts +++ b/packages/vm/examples/vmWithEIPs.ts @@ -6,4 +6,4 @@ const main = async () => { const vm = await VM.create({ common }) console.log(`EIP 3074 is active in the VM - ${vm.common.isActivatedEIP(3074)}`) } -main() +void main() diff --git a/packages/vm/examples/vmWithGenesisState.ts b/packages/vm/examples/vmWithGenesisState.ts index 387bd11953..c8c9e1d21b 100644 --- a/packages/vm/examples/vmWithGenesisState.ts +++ b/packages/vm/examples/vmWithGenesisState.ts @@ -10,12 +10,12 @@ const main = async () => { const blockchain = await createBlockchain({ genesisState }) const vm = await VM.create({ blockchain, genesisState }) const account = await vm.stateManager.getAccount( - Address.fromString('0x000d836201318ec6899a67540690382780743280') + Address.fromString('0x000d836201318ec6899a67540690382780743280'), ) console.log( `This balance for account 0x000d836201318ec6899a67540690382780743280 in this chain's genesis state is ${Number( - account?.balance - )}` + account?.balance, + )}`, ) } -main() +void main() diff --git a/packages/vm/src/buildBlock.ts b/packages/vm/src/buildBlock.ts index 8c89a53ba7..4e79c1e197 100644 --- a/packages/vm/src/buildBlock.ts +++ b/packages/vm/src/buildBlock.ts @@ -210,7 +210,7 @@ export class BlockBuilder { */ async addTransaction( tx: TypedTransaction, - { skipHardForkValidation }: { skipHardForkValidation?: boolean } = {} + { skipHardForkValidation }: { skipHardForkValidation?: boolean } = {}, ) { this.checkStatus() diff --git a/packages/vm/src/emitEVMProfile.ts b/packages/vm/src/emitEVMProfile.ts index 819874c027..c06140ca3a 100644 --- a/packages/vm/src/emitEVMProfile.ts +++ b/packages/vm/src/emitEVMProfile.ts @@ -111,7 +111,7 @@ export function emitEVMProfile(logs: EVMPerformanceLogOutput[], profileTitle: st console.log( `+== Calls: ${calls}, Total time: ${ Math.round(totalMs * 1e3) / 1e3 - }ms, Total gas: ${totalGas}, MGas/s: ${mGasSAvg}, Blocks per Slot (BpS): ${bpSAvg} ==+` + }ms, Total gas: ${totalGas}, MGas/s: ${mGasSAvg}, Blocks per Slot (BpS): ${bpSAvg} ==+`, ) // Generate and write the header diff --git a/packages/vm/src/requests.ts b/packages/vm/src/requests.ts index 36c4214262..10100698c0 100644 --- a/packages/vm/src/requests.ts +++ b/packages/vm/src/requests.ts @@ -25,7 +25,7 @@ import type { CLRequest, CLRequestType } from '@ethereumjs/util' */ export const accumulateRequests = async ( vm: VM, - txResults: RunTxResult[] + txResults: RunTxResult[], ): Promise[]> => { const requests: CLRequest[] = [] const common = vm.common @@ -58,12 +58,12 @@ export const accumulateRequests = async ( const accumulateEIP7002Requests = async ( vm: VM, - requests: CLRequest[] + requests: CLRequest[], ): Promise => { // Partial withdrawals logic const addressBytes = setLengthLeft( bigIntToBytes(vm.common.param('withdrawalRequestPredeployAddress')), - 20 + 20, ) const withdrawalsAddress = Address.fromString(bytesToHex(addressBytes)) @@ -71,7 +71,7 @@ const accumulateEIP7002Requests = async ( if (code.length === 0) { throw new Error( - 'Attempt to accumulate EIP-7002 requests failed: the contract does not exist. Ensure the deployment tx has been run, or that the required contract code is stored' + 'Attempt to accumulate EIP-7002 requests failed: the contract does not exist. Ensure the deployment tx has been run, or that the required contract code is stored', ) } @@ -108,12 +108,12 @@ const accumulateEIP7002Requests = async ( const accumulateEIP7251Requests = async ( vm: VM, - requests: CLRequest[] + requests: CLRequest[], ): Promise => { // Partial withdrawals logic const addressBytes = setLengthLeft( bigIntToBytes(vm.common.param('consolidationRequestPredeployAddress')), - 20 + 20, ) const consolidationsAddress = Address.fromString(bytesToHex(addressBytes)) @@ -121,7 +121,7 @@ const accumulateEIP7251Requests = async ( if (code.length === 0) { throw new Error( - 'Attempt to accumulate EIP-7251 requests failed: the contract does not exist. Ensure the deployment tx has been run, or that the required contract code is stored' + 'Attempt to accumulate EIP-7251 requests failed: the contract does not exist. Ensure the deployment tx has been run, or that the required contract code is stored', ) } @@ -145,7 +145,7 @@ const accumulateEIP7251Requests = async ( const sourcePubkey = slicedBytes.slice(20, 68) // 48 Bytes const targetPubkey = slicedBytes.slice(68, 116) // 48 bytes requests.push( - ConsolidationRequest.fromRequestData({ sourceAddress, sourcePubkey, targetPubkey }) + ConsolidationRequest.fromRequestData({ sourceAddress, sourcePubkey, targetPubkey }), ) } } @@ -161,7 +161,7 @@ const accumulateEIP7251Requests = async ( const accumulateDeposits = async ( depositContractAddress: string, txResults: RunTxResult[], - requests: CLRequest[] + requests: CLRequest[], ) => { for (const [_, tx] of txResults.entries()) { for (let i = 0; i < tx.receipt.logs.length; i++) { @@ -179,7 +179,7 @@ const accumulateDeposits = async ( const pubKeySize = bytesToInt(log[2].slice(pubKeyIdx, pubKeyIdx + 32)) const withdrawalCredsIdx = bytesToInt(log[2].slice(32, 64)) const withdrawalCredsSize = bytesToInt( - log[2].slice(withdrawalCredsIdx, withdrawalCredsIdx + 32) + log[2].slice(withdrawalCredsIdx, withdrawalCredsIdx + 32), ) const amountIdx = bytesToInt(log[2].slice(64, 96)) const amountSize = bytesToInt(log[2].slice(amountIdx, amountIdx + 32)) @@ -190,7 +190,7 @@ const accumulateDeposits = async ( const pubkey = log[2].slice(pubKeyIdx + 32, pubKeyIdx + 32 + pubKeySize) const withdrawalCredentials = log[2].slice( withdrawalCredsIdx + 32, - withdrawalCredsIdx + 32 + withdrawalCredsSize + withdrawalCredsIdx + 32 + withdrawalCredsSize, ) const amountBytes = log[2].slice(amountIdx + 32, amountIdx + 32 + amountSize) const amountBytesBigEndian = new Uint8Array([ @@ -228,7 +228,7 @@ const accumulateDeposits = async ( amount, signature, index, - }) + }), ) } } diff --git a/packages/vm/src/runBlock.ts b/packages/vm/src/runBlock.ts index 5365822f59..3715a65d78 100644 --- a/packages/vm/src/runBlock.ts +++ b/packages/vm/src/runBlock.ts @@ -51,7 +51,7 @@ import type { CLRequest, CLRequestType, PrefixedHexString } from '@ethereumjs/ut const debug = debugDefault('vm:block') const parentBeaconBlockRootAddress = Address.fromString( - '0x000F3df6D732807Ef1319fB7B8bB8522d0Beac02' + '0x000F3df6D732807Ef1319fB7B8bB8522d0Beac02', ) let enableProfiler = false @@ -126,7 +126,7 @@ export async function runBlock(vm: VM, opts: RunBlockOpts): Promise { function calculateOmmerReward( ommerBlockNumber: bigint, blockNumber: bigint, - minerReward: bigint + minerReward: bigint, ): bigint { const heightDiff = blockNumber - ommerBlockNumber let reward = ((BIGINT_8 - heightDiff) * minerReward) / BIGINT_8 @@ -734,7 +734,7 @@ export async function rewardAccount( evm: EVMInterface, address: Address, reward: bigint, - common?: Common + common?: Common, ): Promise { let account = await evm.stateManager.getAccount(address) if (account === undefined) { @@ -752,7 +752,7 @@ export async function rewardAccount( // use vm utility to build access but the computed gas is not charged and hence free ;(evm.stateManager as StatelessVerkleStateManager).accessWitness!.touchTxTargetAndComputeGas( address, - { sendsValue: true } + { sendsValue: true }, ) } return account diff --git a/packages/vm/src/runTx.ts b/packages/vm/src/runTx.ts index 40c58cdc5a..128860c0fa 100644 --- a/packages/vm/src/runTx.ts +++ b/packages/vm/src/runTx.ts @@ -72,7 +72,7 @@ const entireTxLabel = 'Entire tx' */ function execHardfork( hardfork: Hardfork | string, - preMergeHf: Hardfork | string + preMergeHf: Hardfork | string, ): string | Hardfork { return hardfork !== Hardfork.Paris ? hardfork : preMergeHf } @@ -160,7 +160,7 @@ export async function runTx(vm: VM, opts: RunTxOpts): Promise { 'Cannot run transaction: EIP 2930 is not activated.', vm, opts.block, - opts.tx + opts.tx, ) throw new Error(msg) } @@ -170,7 +170,7 @@ export async function runTx(vm: VM, opts: RunTxOpts): Promise { 'Cannot run transaction: EIP 1559 is not activated.', vm, opts.block, - opts.tx + opts.tx, ) throw new Error(msg) } @@ -250,7 +250,7 @@ async function _runTx(vm: VM, opts: RunTxOpts): Promise { debug( `New tx run hash=${ opts.tx.isSigned() ? bytesToHex(opts.tx.hash()) : 'unsigned' - } sender=${caller}` + } sender=${caller}`, ) } @@ -276,11 +276,11 @@ async function _runTx(vm: VM, opts: RunTxOpts): Promise { if (gasLimit < intrinsicGas) { const msg = _errorMsg( `tx gas limit ${Number(gasLimit)} is lower than the minimum gas limit of ${Number( - intrinsicGas + intrinsicGas, )}`, vm, block, - tx + tx, ) throw new Error(msg) } @@ -302,7 +302,7 @@ async function _runTx(vm: VM, opts: RunTxOpts): Promise { } (${maxFeePerGas}) is less than the block's baseFeePerGas (${baseFeePerGas})`, vm, block, - tx + tx, ) throw new Error(msg) } @@ -343,7 +343,7 @@ async function _runTx(vm: VM, opts: RunTxOpts): Promise { `sender doesn't have enough funds to send tx. The upfront cost is: ${upFrontCost} and the sender's account (${caller}) only has: ${balance}`, vm, block, - tx + tx, ) throw new Error(msg) } @@ -378,7 +378,7 @@ async function _runTx(vm: VM, opts: RunTxOpts): Promise { `Block option must be supplied to compute blob gas price`, vm, block, - tx + tx, ) throw new Error(msg) } @@ -388,7 +388,7 @@ async function _runTx(vm: VM, opts: RunTxOpts): Promise { `Transaction's maxFeePerBlobGas ${castTx.maxFeePerBlobGas}) is less than block blobGasPrice (${blobGasPrice}).`, vm, block, - tx + tx, ) throw new Error(msg) } @@ -404,7 +404,7 @@ async function _runTx(vm: VM, opts: RunTxOpts): Promise { `sender doesn't have enough funds to send tx. The max cost is: ${maxCost} and the sender's account (${caller}) only has: ${balance}`, vm, block, - tx + tx, ) throw new Error(msg) } @@ -416,7 +416,7 @@ async function _runTx(vm: VM, opts: RunTxOpts): Promise { `the tx doesn't have the correct nonce. account has nonce of: ${nonce} tx has nonce of: ${tx.nonce}`, vm, block, - tx + tx, ) throw new Error(msg) } @@ -523,7 +523,7 @@ async function _runTx(vm: VM, opts: RunTxOpts): Promise { tx.isSigned() ? bytesToHex(tx.hash()) : 'unsigned' } with caller=${caller} gasLimit=${gasLimit} to=${ to?.toString() ?? 'none' - } value=${value} data=${short(data)}` + } value=${value} data=${short(data)}`, ) } @@ -562,7 +562,7 @@ async function _runTx(vm: VM, opts: RunTxOpts): Promise { debug( `Received tx execResult: [ executionGasUsed=${executionGasUsed} exceptionError=${ exceptionError !== undefined ? `'${exceptionError.error}'` : 'none' - } returnValue=${short(returnValue)} gasRefund=${results.gasRefund ?? 0} ]` + } returnValue=${short(returnValue)} gasRefund=${results.gasRefund ?? 0} ]`, ) } @@ -615,7 +615,7 @@ async function _runTx(vm: VM, opts: RunTxOpts): Promise { await vm.evm.journal.putAccount(caller, fromAccount) if (vm.DEBUG) { debug( - `Refunded txCostDiff (${txCostDiff}) to fromAccount (caller) balance (-> ${fromAccount.balance})` + `Refunded txCostDiff (${txCostDiff}) to fromAccount (caller) balance (-> ${fromAccount.balance})`, ) } @@ -744,7 +744,7 @@ async function _runTx(vm: VM, opts: RunTxOpts): Promise { results, cumulativeGasUsed, totalblobGas, - blobGasPrice + blobGasPrice, ) if (enableProfiler) { @@ -765,7 +765,7 @@ async function _runTx(vm: VM, opts: RunTxOpts): Promise { debug( `tx run finished hash=${ opts.tx.isSigned() ? bytesToHex(opts.tx.hash()) : 'unsigned' - } sender=${caller}` + } sender=${caller}`, ) } @@ -808,7 +808,7 @@ export async function generateTxReceipt( txResult: RunTxResult, cumulativeGasUsed: bigint, blobGasUsed?: bigint, - blobGasPrice?: bigint + blobGasPrice?: bigint, ): Promise { const baseReceipt: BaseTxReceipt = { cumulativeBlockGasUsed: cumulativeGasUsed, @@ -823,7 +823,7 @@ export async function generateTxReceipt( tx.type } cumulativeBlockGasUsed=${cumulativeGasUsed} bitvector=${short(baseReceipt.bitvector)} (${ baseReceipt.bitvector.length - } bytes) logs=${baseReceipt.logs.length}` + } bytes) logs=${baseReceipt.logs.length}`, ) } diff --git a/packages/vm/src/vm.ts b/packages/vm/src/vm.ts index b41b7e8f12..4fce2f5cc3 100644 --- a/packages/vm/src/vm.ts +++ b/packages/vm/src/vm.ts @@ -96,7 +96,7 @@ export class VM { const profilerOpts = opts.profilerOpts if (profilerOpts.reportAfterBlock === true && profilerOpts.reportAfterTx === true) { throw new Error( - 'Cannot have `reportProfilerAfterBlock` and `reportProfilerAfterTx` set to `true` at the same time' + 'Cannot have `reportProfilerAfterBlock` and `reportProfilerAfterTx` set to `true` at the same time', ) } } @@ -171,7 +171,7 @@ export class VM { // Skip DEBUG calls unless 'ethjs' included in environmental DEBUG variables // Additional window check is to prevent vite browser bundling (and potentially other) to break this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } /** diff --git a/packages/vm/test/api/EIPs/eip-1153.spec.ts b/packages/vm/test/api/EIPs/eip-1153.spec.ts index f27569126c..894cfc57b0 100644 --- a/packages/vm/test/api/EIPs/eip-1153.spec.ts +++ b/packages/vm/test/api/EIPs/eip-1153.spec.ts @@ -32,13 +32,13 @@ describe('EIP 1153: transient storage', () => { assert.equal( step.opcode.name, test.steps[i].expectedOpcode, - `Expected Opcode: ${test.steps[i].expectedOpcode}` + `Expected Opcode: ${test.steps[i].expectedOpcode}`, ) assert.deepEqual( step.stack.map((e: bigint) => e.toString()), test.steps[i].expectedStack.map((e: bigint) => e.toString()), - `Expected stack: ${step.stack}` + `Expected stack: ${step.stack}`, ) if (i > 0) { @@ -47,7 +47,7 @@ describe('EIP 1153: transient storage', () => { gasUsed === expectedGasUsed, `Opcode: ${ test.steps[i - 1].expectedOpcode - }, Gas Used: ${gasUsed}, Expected: ${expectedGasUsed}` + }, Gas Used: ${gasUsed}, Expected: ${expectedGasUsed}`, ) } i++ diff --git a/packages/vm/test/api/EIPs/eip-1559-FeeMarket.spec.ts b/packages/vm/test/api/EIPs/eip-1559-FeeMarket.spec.ts index 406f5fa7a3..6a4d50708e 100644 --- a/packages/vm/test/api/EIPs/eip-1559-FeeMarket.spec.ts +++ b/packages/vm/test/api/EIPs/eip-1559-FeeMarket.spec.ts @@ -63,7 +63,7 @@ function makeBlock(baseFee: bigint, transaction: TypedTransaction, txType: Trans }, transactions: [json], }, - { common } + { common }, ) return block } @@ -79,7 +79,7 @@ describe('EIP1559 tests', () => { }, { common, - } + }, ) const block = makeBlock(GWEI, tx, 2) const vm = await VM.create({ common }) @@ -116,7 +116,7 @@ describe('EIP1559 tests', () => { gasPrice: GWEI * BigInt(5), to: Address.zero(), }, - { common } + { common }, ) const block2 = makeBlock(GWEI, tx2, 1) await vm.stateManager.modifyAccountFields(sender, { balance }) @@ -144,7 +144,7 @@ describe('EIP1559 tests', () => { gasPrice: GWEI * BigInt(5), to: Address.zero(), }, - { common } + { common }, ) const block3 = makeBlock(GWEI, tx3, 0) await vm.stateManager.modifyAccountFields(sender, { balance }) @@ -178,7 +178,7 @@ describe('EIP1559 tests', () => { }, { common, - } + }, ) const block = makeBlock(GWEI, tx, 2) const vm = await VM.create({ common }) diff --git a/packages/vm/test/api/EIPs/eip-2565-modexp-gas-cost.spec.ts b/packages/vm/test/api/EIPs/eip-2565-modexp-gas-cost.spec.ts index 1a11905235..07a61287dd 100644 --- a/packages/vm/test/api/EIPs/eip-2565-modexp-gas-cost.spec.ts +++ b/packages/vm/test/api/EIPs/eip-2565-modexp-gas-cost.spec.ts @@ -26,7 +26,7 @@ describe('EIP-2565 ModExp gas cost tests', () => { if (result.execResult.executionGasUsed !== BigInt(test.Gas)) { assert.fail( - `[${testName}]: Gas usage incorrect, expected ${test.Gas}, got ${result.execResult.executionGasUsed}` + `[${testName}]: Gas usage incorrect, expected ${test.Gas}, got ${result.execResult.executionGasUsed}`, ) continue } @@ -40,7 +40,7 @@ describe('EIP-2565 ModExp gas cost tests', () => { assert.fail( `[${testName}]: Return value not the expected value (expected: ${ test.Expected - }, received: ${bytesToHex(result.execResult.returnValue)})` + }, received: ${bytesToHex(result.execResult.returnValue)})`, ) continue } diff --git a/packages/vm/test/api/EIPs/eip-2929.spec.ts b/packages/vm/test/api/EIPs/eip-2929.spec.ts index 91f3a7baca..e1c0f4967d 100644 --- a/packages/vm/test/api/EIPs/eip-2929.spec.ts +++ b/packages/vm/test/api/EIPs/eip-2929.spec.ts @@ -26,7 +26,7 @@ describe('EIP 2929: gas cost tests', () => { assert.equal( step.opcode.name, test.steps[i].expectedOpcode, - `Expected Opcode: ${test.steps[i].expectedOpcode}` + `Expected Opcode: ${test.steps[i].expectedOpcode}`, ) // Validates the gas consumption of the (i - 1)th opcode @@ -40,7 +40,7 @@ describe('EIP 2929: gas cost tests', () => { gasUsed === expectedGasUsed, `Opcode: ${ test.steps[i - 1].expectedOpcode - }, Gas Used: ${gasUsed}, Expected: ${expectedGasUsed}` + }, Gas Used: ${gasUsed}, Expected: ${expectedGasUsed}`, ) } } @@ -66,7 +66,7 @@ describe('EIP 2929: gas cost tests', () => { const runCodeTest = async function (code: PrefixedHexString, expectedGasUsed: bigint) { // setup the accounts for this test const privateKey = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) const contractAddress = new Address(hexToBytes('0x00000000000000000000000000000000000000ff')) @@ -90,7 +90,7 @@ describe('EIP 2929: gas cost tests', () => { const account = await vm.stateManager.getAccount(address) await vm.stateManager.putAccount( address, - createAccount({ ...account, balance: initialBalance }) + createAccount({ ...account, balance: initialBalance }), ) const result = await runTx(vm, { tx, skipHardForkValidation: true }) @@ -275,7 +275,7 @@ describe('EIP 2929: gas cost tests', () => { // call to contract, call 0xFF..FF, revert, call 0xFF..FF (should be cold) await runCodeTest( `0x341515601557${callFF}600080FD5B600080808080305AF1${callFF}00`, - BigInt(26414) + BigInt(26414), ) }) }) diff --git a/packages/vm/test/api/EIPs/eip-2930-accesslists.spec.ts b/packages/vm/test/api/EIPs/eip-2930-accesslists.spec.ts index 379696aede..042377e11b 100644 --- a/packages/vm/test/api/EIPs/eip-2930-accesslists.spec.ts +++ b/packages/vm/test/api/EIPs/eip-2930-accesslists.spec.ts @@ -33,7 +33,7 @@ describe('EIP-2930 Optional Access Lists tests', () => { gasLimit: BigInt(100000), to: contractAddress, }, - { common } + { common }, ).sign(privateKey) const txnWithoutAccessList = create2930AccessListTx( { @@ -42,7 +42,7 @@ describe('EIP-2930 Optional Access Lists tests', () => { gasLimit: BigInt(100000), to: contractAddress, }, - { common } + { common }, ).sign(privateKey) const vm = await VM.create({ common }) @@ -56,7 +56,7 @@ describe('EIP-2930 Optional Access Lists tests', () => { const account = await vm.stateManager.getAccount(address) await vm.stateManager.putAccount( address, - createAccount({ ...account, balance: initialBalance }) + createAccount({ ...account, balance: initialBalance }), ) let trace: any = [] diff --git a/packages/vm/test/api/EIPs/eip-2935-historical-block-hashes.spec.ts b/packages/vm/test/api/EIPs/eip-2935-historical-block-hashes.spec.ts index ad658433fc..b312f056c2 100644 --- a/packages/vm/test/api/EIPs/eip-2935-historical-block-hashes.spec.ts +++ b/packages/vm/test/api/EIPs/eip-2935-historical-block-hashes.spec.ts @@ -192,7 +192,7 @@ describe('EIP 2935: historical block hashes', () => { const storage = await vm.stateManager.getContractStorage( historyAddress, - setLengthLeft(bigIntToBytes(BigInt(0)), 32) + setLengthLeft(bigIntToBytes(BigInt(0)), 32), ) assert.ok(equalsBytes(storage, genesis.hash())) }) @@ -248,7 +248,7 @@ describe('EIP 2935: historical block hashes', () => { const block = await blockchain.getBlock(i) const storage = await vm.stateManager.getContractStorage( historyAddress, - setLengthLeft(bigIntToBytes(BigInt(i) % historyServeWindow), 32) + setLengthLeft(bigIntToBytes(BigInt(i) % historyServeWindow), 32), ) // we will evaluate on lastBlock where 7709 is active and BLOCKHASH @@ -288,7 +288,7 @@ describe('EIP 2935: historical block hashes', () => { number: blocksToBuild, }, }, - { common } + { common }, ) // should be able to resolve blockhash via contract code but from the blocksActivation -1 onwards diff --git a/packages/vm/test/api/EIPs/eip-3074-authcall.spec.ts b/packages/vm/test/api/EIPs/eip-3074-authcall.spec.ts index f5913e816d..e912290e6d 100644 --- a/packages/vm/test/api/EIPs/eip-3074-authcall.spec.ts +++ b/packages/vm/test/api/EIPs/eip-3074-authcall.spec.ts @@ -41,7 +41,7 @@ const block = createBlockFromBlockData( baseFeePerGas: BigInt(7), }, }, - { common } + { common }, ) const callerPrivateKey = hexToBytes(`0x${'44'.repeat(32)}`) @@ -71,7 +71,7 @@ function signMessage( commitUnpadded: Uint8Array, address: Address, privateKey: Uint8Array, - nonce: bigint = BIGINT_0 + nonce: bigint = BIGINT_0, ) { const commit = setLengthLeft(commitUnpadded, 32) const paddedInvokerAddress = setLengthLeft(address.bytes, 32) @@ -82,7 +82,7 @@ function signMessage( chainId, noncePadded, paddedInvokerAddress, - commit + commit, ) const msgHash = keccak256(message) return ecsign(msgHash, privateKey) @@ -99,7 +99,7 @@ function getAuthCode( commitUnpadded: Uint8Array, signature: ECDSASignature, address: Address, - msizeBuffer?: Uint8Array + msizeBuffer?: Uint8Array, ) { const commit = setLengthLeft(commitUnpadded, 32) let v: Uint8Array @@ -154,7 +154,7 @@ function getAuthCode( hexToBytes('0x6000'), PUSH32, addressBuffer, - AUTH + AUTH, ) } @@ -180,7 +180,7 @@ function MSTORE(position: Uint8Array, value: Uint8Array) { setLengthLeft(value, 32), hexToBytes('0x7F'), setLengthLeft(position, 32), - hexToBytes('0x52') + hexToBytes('0x52'), ) } @@ -361,7 +361,7 @@ describe('EIP-3074 AUTH', () => { const code = concatBytes( getAuthCode(message, signature, authAddress), getAuthCode(message, signature2, callerAddress), - RETURNTOP + RETURNTOP, ) await vm.stateManager.putContractCode(contractAddress, code) @@ -387,7 +387,7 @@ describe('EIP-3074 AUTH', () => { const signature = signMessage(message, contractAddress, privateKey) const code = concatBytes( getAuthCode(message, signature, authAddress, hexToBytes('0x60')), - RETURNTOP + RETURNTOP, ) await vm.stateManager.putContractCode(contractAddress, code) @@ -430,13 +430,13 @@ describe('EIP-3074 AUTH', () => { assert.deepEqual( result.execResult.returnValue.slice(31), hexToBytes('0x80'), - 'reported msize is correct' + 'reported msize is correct', ) const gas = result.execResult.executionGasUsed const code2 = concatBytes( getAuthCode(message, signature, authAddress, hexToBytes('0x90')), - RETURNMEMSIZE + RETURNMEMSIZE, ) await vm.stateManager.putContractCode(contractAddress, code2) @@ -454,7 +454,7 @@ describe('EIP-3074 AUTH', () => { assert.deepEqual( result2.execResult.returnValue.slice(31), hexToBytes('0xa0'), - 'reported msize is correct' + 'reported msize is correct', ) assert.ok(result2.execResult.executionGasUsed > gas, 'charged more gas for memory expansion') }) @@ -481,7 +481,7 @@ describe('EIP-3074 AUTHCALL', () => { getAuthCallCode({ address: contractStorageAddress, }), - RETURNTOP + RETURNTOP, ) const vm = await setupVM(code) @@ -508,7 +508,7 @@ describe('EIP-3074 AUTHCALL', () => { getAuthCallCode({ address: contractStorageAddress, }), - RETURNTOP + RETURNTOP, ) const vm = await setupVM(code) @@ -529,7 +529,7 @@ describe('EIP-3074 AUTHCALL', () => { const gasUsed = await vm.stateManager.getContractStorage( contractStorageAddress, - hexToBytes(`0x${'00'.repeat(31)}01`) + hexToBytes(`0x${'00'.repeat(31)}01`), ) const gasBigInt = bytesToBigInt(gasUsed) const preGas = @@ -549,7 +549,7 @@ describe('EIP-3074 AUTHCALL', () => { getAuthCallCode({ address: contractStorageAddress, }), - RETURNTOP + RETURNTOP, ) const vm = await setupVM(code) @@ -570,7 +570,7 @@ describe('EIP-3074 AUTHCALL', () => { const gasUsed = await vm.stateManager.getContractStorage( contractStorageAddress, - hexToBytes(`0x${'00'.repeat(31)}01`) + hexToBytes(`0x${'00'.repeat(31)}01`), ) const gasBigInt = bytesToBigInt(gasUsed) const preGas = gas! - common.param('warmstoragereadGas')! @@ -587,7 +587,7 @@ describe('EIP-3074 AUTHCALL', () => { address: new Address(hexToBytes(`0x${'cc'.repeat(20)}`)), value: 1n, }), - RETURNTOP + RETURNTOP, ) const vm = await setupVM(code) const account = new Account(BIGINT_0, BIGINT_1) @@ -632,7 +632,7 @@ describe('EIP-3074 AUTHCALL', () => { address: contractStorageAddress, value: 1n, }), - RETURNTOP + RETURNTOP, ) const vm = await setupVM(code) const authAccount = new Account(BIGINT_0, BIGINT_1) @@ -659,7 +659,7 @@ describe('EIP-3074 AUTHCALL', () => { const gasUsed = await vm.stateManager.getContractStorage( contractStorageAddress, - hexToBytes(`0x${'00'.repeat(31)}01`) + hexToBytes(`0x${'00'.repeat(31)}01`), ) const gasBigInt = bytesToBigInt(gasUsed) const preGas = @@ -691,7 +691,7 @@ describe('EIP-3074 AUTHCALL', () => { address: contractStorageAddress, value: 1n, }), - RETURNTOP + RETURNTOP, ) const vm = await setupVM(code) @@ -715,7 +715,7 @@ describe('EIP-3074 AUTHCALL', () => { getAuthCallCode({ address: contractStorageAddress, }), - RETURNTOP + RETURNTOP, ) const vm = await setupVM(code) @@ -729,7 +729,7 @@ describe('EIP-3074 AUTHCALL', () => { assert.equal( result.execResult.exceptionError?.error, EVMErrorMessage.AUTHCALL_UNSET, - 'threw with right error' + 'threw with right error', ) assert.equal(result.amountSpent, tx.gasPrice * tx.gasLimit, 'spent all gas') }) @@ -751,7 +751,7 @@ describe('EIP-3074 AUTHCALL', () => { getAuthCallCode({ address: contractStorageAddress, }), - RETURNTOP + RETURNTOP, ) const vm = await setupVM(code) @@ -765,7 +765,7 @@ describe('EIP-3074 AUTHCALL', () => { assert.equal( result.execResult.exceptionError?.error, EVMErrorMessage.AUTHCALL_UNSET, - 'threw with right error' + 'threw with right error', ) assert.equal(result.amountSpent, tx.gasPrice * tx.gasLimit, 'spent all gas') }) @@ -779,7 +779,7 @@ describe('EIP-3074 AUTHCALL', () => { address: contractStorageAddress, gasLimit: 10000000n, }), - RETURNTOP + RETURNTOP, ) const vm = await setupVM(code) @@ -794,7 +794,7 @@ describe('EIP-3074 AUTHCALL', () => { assert.equal( result.execResult.exceptionError?.error, EVMErrorMessage.OUT_OF_GAS, - 'correct error type' + 'correct error type', ) }) @@ -807,7 +807,7 @@ describe('EIP-3074 AUTHCALL', () => { address: contractStorageAddress, gasLimit: 700000n, }), - RETURNTOP + RETURNTOP, ) const vm = await setupVM(code) @@ -820,7 +820,7 @@ describe('EIP-3074 AUTHCALL', () => { await runTx(vm, { tx, block, skipHardForkValidation: true }) const gas = await vm.stateManager.getContractStorage( contractStorageAddress, - hexToBytes(`0x${'00'.repeat(31)}01`) + hexToBytes(`0x${'00'.repeat(31)}01`), ) const gasBigInt = bytesToBigInt(gas) assert.equal(gasBigInt, BigInt(700000 - 2), 'forwarded the right amount of gas') // The 2 is subtracted due to the GAS opcode base fee @@ -840,7 +840,7 @@ describe('EIP-3074 AUTHCALL', () => { retOffset: 64n, retLength: 32n, }), - hexToBytes('0x60206040F3') // PUSH 32 PUSH 64 RETURN -> This returns the 32 bytes at memory position 64 + hexToBytes('0x60206040F3'), // PUSH 32 PUSH 64 RETURN -> This returns the 32 bytes at memory position 64 ) const vm = await setupVM(code) @@ -853,7 +853,7 @@ describe('EIP-3074 AUTHCALL', () => { const result = await runTx(vm, { tx, block, skipHardForkValidation: true }) const callInput = await vm.stateManager.getContractStorage( contractStorageAddress, - hexToBytes(`0x${'00'.repeat(31)}02`) + hexToBytes(`0x${'00'.repeat(31)}02`), ) assert.deepEqual(callInput, input, 'authcall input ok') assert.deepEqual(result.execResult.returnValue, input, 'authcall output ok') diff --git a/packages/vm/test/api/EIPs/eip-3198-BaseFee.spec.ts b/packages/vm/test/api/EIPs/eip-3198-BaseFee.spec.ts index 1557b8fd7b..8c8562b133 100644 --- a/packages/vm/test/api/EIPs/eip-3198-BaseFee.spec.ts +++ b/packages/vm/test/api/EIPs/eip-3198-BaseFee.spec.ts @@ -53,7 +53,7 @@ function makeBlock(baseFee: bigint, transaction: TypedTransaction) { }, transactions: [json], }, - { common } + { common }, ) return block } @@ -72,7 +72,7 @@ describe('EIP3198 tests', () => { }, { common, - } + }, ) const block = makeBlock(fee, tx) const vm = await VM.create({ common }) diff --git a/packages/vm/test/api/EIPs/eip-3529.spec.ts b/packages/vm/test/api/EIPs/eip-3529.spec.ts index 0e7d90cab0..1a80cd793f 100644 --- a/packages/vm/test/api/EIPs/eip-3529.spec.ts +++ b/packages/vm/test/api/EIPs/eip-3529.spec.ts @@ -135,7 +135,7 @@ describe('EIP-3529 tests', () => { await vm.stateManager.putContractStorage( address, key, - hexToBytes(`0x${testCase.original.toString().padStart(64, '0')}`) + hexToBytes(`0x${testCase.original.toString().padStart(64, '0')}`), ) await vm.stateManager.getContractStorage(address, key) diff --git a/packages/vm/test/api/EIPs/eip-3651-warm-coinbase.spec.ts b/packages/vm/test/api/EIPs/eip-3651-warm-coinbase.spec.ts index 47583ee3b1..8a43198746 100644 --- a/packages/vm/test/api/EIPs/eip-3651-warm-coinbase.spec.ts +++ b/packages/vm/test/api/EIPs/eip-3651-warm-coinbase.spec.ts @@ -24,7 +24,7 @@ const block = createBlockFromBlockData( coinbase, }, }, - { common } + { common }, ) const code = hexToBytes('0x60008080806001415AF100') @@ -63,7 +63,7 @@ describe('EIP 3651 tests', () => { new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London, - }) + }), ) const result2 = await runTx(vm2, { block, tx, skipHardForkValidation: true }) @@ -71,7 +71,7 @@ describe('EIP 3651 tests', () => { assert.equal( result2.totalGasSpent - result.totalGasSpent, expectedDiff, - 'gas difference is correct' + 'gas difference is correct', ) }) }) diff --git a/packages/vm/test/api/EIPs/eip-3860.spec.ts b/packages/vm/test/api/EIPs/eip-3860.spec.ts index 18fac79da6..5ac7f20567 100644 --- a/packages/vm/test/api/EIPs/eip-3860.spec.ts +++ b/packages/vm/test/api/EIPs/eip-3860.spec.ts @@ -30,18 +30,18 @@ describe('EIP 3860 tests', () => { const tx = create1559FeeMarketTx( { data: `0x7F6000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060005260206000F3${bytesToHex( - bytes + bytes, ).slice(2)}`, gasLimit: 100000000000, maxFeePerGas: 7, nonce: 0, }, - { common: txCommon } + { common: txCommon }, ).sign(pkey) const result = await runTx(vm, { tx }) assert.ok( (result.execResult.exceptionError?.error as string) === 'initcode exceeds max initcode size', - 'initcode exceeds max size' + 'initcode exceeds max size', ) }) }) diff --git a/packages/vm/test/api/EIPs/eip-4399-supplant-difficulty-opcode-with-prevrando.spec.ts b/packages/vm/test/api/EIPs/eip-4399-supplant-difficulty-opcode-with-prevrando.spec.ts index 2d1a1cb9d6..db4327f6ed 100644 --- a/packages/vm/test/api/EIPs/eip-4399-supplant-difficulty-opcode-with-prevrando.spec.ts +++ b/packages/vm/test/api/EIPs/eip-4399-supplant-difficulty-opcode-with-prevrando.spec.ts @@ -21,7 +21,7 @@ describe('EIP-4399 -> 0x44 (DIFFICULTY) should return PREVRANDAO', () => { } let block = createBlockFromBlockData( { header }, - { common, calcDifficultyFromHeader: genesis.header } + { common, calcDifficultyFromHeader: genesis.header }, ) // Track stack @@ -48,7 +48,7 @@ describe('EIP-4399 -> 0x44 (DIFFICULTY) should return PREVRANDAO', () => { mixHash: prevRandao, }, }, - { common } + { common }, ) await vm.evm.runCode!({ ...runCodeArgs, block }) assert.equal(stack[0], prevRandao, '0x44 returns PREVRANDAO (Merge)') diff --git a/packages/vm/test/api/EIPs/eip-4788-beaconroot.spec.ts b/packages/vm/test/api/EIPs/eip-4788-beaconroot.spec.ts index c4479ee1b8..1e00713d3c 100644 --- a/packages/vm/test/api/EIPs/eip-4788-beaconroot.spec.ts +++ b/packages/vm/test/api/EIPs/eip-4788-beaconroot.spec.ts @@ -23,8 +23,7 @@ import { } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { VM } from '../../../src' -import { runBlock as runBlockVM } from '../../../src/index.js' +import { VM, runBlock as runBlockVM } from '../../../src/index.js' import type { Block } from '@ethereumjs/block' import type { BigIntLike, PrefixedHexString } from '@ethereumjs/util' @@ -41,7 +40,7 @@ const contractAddress = Address.fromString('0x' + 'c0de'.repeat(10)) function beaconrootBlock( blockroot: bigint, timestamp: BigIntLike, - transactions: Array + transactions: Array, ) { const newTxData = [] @@ -62,7 +61,7 @@ function beaconrootBlock( parentBeaconBlockRoot: root, timestamp, }, - { common, freeze: false } + { common, freeze: false }, ) const block = createBlockFromBlockData( { @@ -72,7 +71,7 @@ function beaconrootBlock( { common, freeze: false, - } + }, ) return block } @@ -144,7 +143,7 @@ async function runBlockTest(input: { const data = setLengthRight( setLengthLeft(bigIntToBytes(timestamp), input.extLeft ?? 32), - input.extRight ?? 32 + input.extRight ?? 32, ) const block = beaconrootBlock(blockRoot, timestampBlock, [ { diff --git a/packages/vm/test/api/EIPs/eip-4844-blobs.spec.ts b/packages/vm/test/api/EIPs/eip-4844-blobs.spec.ts index efcf4aab0f..4376394f26 100644 --- a/packages/vm/test/api/EIPs/eip-4844-blobs.spec.ts +++ b/packages/vm/test/api/EIPs/eip-4844-blobs.spec.ts @@ -34,7 +34,7 @@ describe('EIP4844 tests', () => { }) const genesisBlock = createBlockFromBlockData( { header: { gasLimit: 50000, parentBeaconBlockRoot: zeros(32) } }, - { common } + { common }, ) const blockchain = await createBlockchain({ genesisBlock, @@ -76,7 +76,7 @@ describe('EIP4844 tests', () => { gasLimit: 0xffffn, to: hexToBytes('0xffb38a7a99e3e2335be83fc74b7faa19d5531243'), }, - { common } + { common }, ) const signedTx = unsignedTx.sign(pk) @@ -87,7 +87,7 @@ describe('EIP4844 tests', () => { assert.equal( bytesToHex(block.transactions[0].hash()), bytesToHex(signedTx.hash()), - 'blob transaction should be same' + 'blob transaction should be same', ) const blobGasPerBlob = common.param('blobGasPerBlob') diff --git a/packages/vm/test/api/EIPs/eip-4895-withdrawals.spec.ts b/packages/vm/test/api/EIPs/eip-4895-withdrawals.spec.ts index 166b55288a..fb313c63bd 100644 --- a/packages/vm/test/api/EIPs/eip-4895-withdrawals.spec.ts +++ b/packages/vm/test/api/EIPs/eip-4895-withdrawals.spec.ts @@ -64,7 +64,7 @@ describe('EIP4895 tests', () => { */ await vm.stateManager.putContractCode( contractAddress, - hexToBytes(`0x73${addresses[0]}3160005260206000F3`) + hexToBytes(`0x73${addresses[0]}3160005260206000F3`), ) const transaction = create1559FeeMarketTx({ @@ -95,16 +95,16 @@ describe('EIP4895 tests', () => { header: { baseFeePerGas: BigInt(7), withdrawalsRoot: hexToBytes( - '0x267414525d22e2be123b619719b92c561f31e0cdd40959148230f5713aecd6b8' + '0x267414525d22e2be123b619719b92c561f31e0cdd40959148230f5713aecd6b8', ), transactionsTrie: hexToBytes( - '0x9a744e8acc2886e5809ff013e3b71bf8ec97f9941cafbd7730834fc8f76391ba' + '0x9a744e8acc2886e5809ff013e3b71bf8ec97f9941cafbd7730834fc8f76391ba', ), }, transactions: [transaction], withdrawals, }, - { common: vm.common } + { common: vm.common }, ) let result: Uint8Array @@ -135,12 +135,12 @@ describe('EIP4895 tests', () => { assert.equal( preState, '0xca3149fa9e37db08d1cd49c9061db1002ef1cd58db2210f2115c8c989b2bdf45', - 'preState should be correct' + 'preState should be correct', ) const gethBlockBufferArray = decode(hexToBytes(gethWithdrawals8BlockRlp)) const withdrawals = (gethBlockBufferArray[3] as WithdrawalBytes[]).map((wa) => - Withdrawal.fromValuesArray(wa) + Withdrawal.fromValuesArray(wa), ) assert.equal(withdrawals[0].amount, BigInt(0), 'withdrawal 0 should have 0 amount') let block: Block @@ -157,7 +157,7 @@ describe('EIP4895 tests', () => { transactions: [], withdrawals: withdrawals.slice(0, 1), }, - { common: vm.common } + { common: vm.common }, ) postState = bytesToHex(await vm.stateManager.getStateRoot()) @@ -165,7 +165,7 @@ describe('EIP4895 tests', () => { assert.equal( postState, '0xca3149fa9e37db08d1cd49c9061db1002ef1cd58db2210f2115c8c989b2bdf45', - 'post state should not change' + 'post state should not change', ) // construct a block with all the withdrawals @@ -179,14 +179,14 @@ describe('EIP4895 tests', () => { transactions: [], withdrawals, }, - { common: vm.common } + { common: vm.common }, ) await runBlock(vm, { block, generate: true }) postState = bytesToHex(await vm.stateManager.getStateRoot()) assert.equal( postState, '0x23eadd91fca55c0e14034e4d63b2b3ed43f2e807b6bf4d276b784ac245e7fa3f', - 'post state should match' + 'post state should match', ) }) @@ -205,7 +205,7 @@ describe('EIP4895 tests', () => { assert.equal( bytesToHex(genesisBlock.header.stateRoot), '0xca3149fa9e37db08d1cd49c9061db1002ef1cd58db2210f2115c8c989b2bdf45', - 'correct state root should be generated' + 'correct state root should be generated', ) const vm = await VM.create({ common, blockchain }) await vm.stateManager.generateCanonicalGenesis(parseGethGenesisState(genesisJSON)) @@ -213,7 +213,7 @@ describe('EIP4895 tests', () => { const gethBlockBufferArray = decode(hexToBytes(gethWithdrawals8BlockRlp)) const withdrawals = (gethBlockBufferArray[3] as WithdrawalBytes[]).map((wa) => - Withdrawal.fromValuesArray(wa) + Withdrawal.fromValuesArray(wa), ) const td = await blockchain.getTotalDifficulty(genesisBlock.hash()) @@ -232,7 +232,7 @@ describe('EIP4895 tests', () => { assert.equal( bytesToHex(block.header.stateRoot), '0x23eadd91fca55c0e14034e4d63b2b3ed43f2e807b6bf4d276b784ac245e7fa3f', - 'correct state root should be generated' + 'correct state root should be generated', ) // block should successfully execute with VM.runBlock and have same outputs diff --git a/packages/vm/test/api/EIPs/eip-6110.spec.ts b/packages/vm/test/api/EIPs/eip-6110.spec.ts index 2dd631d6a9..d7d1425bd0 100644 --- a/packages/vm/test/api/EIPs/eip-6110.spec.ts +++ b/packages/vm/test/api/EIPs/eip-6110.spec.ts @@ -12,7 +12,7 @@ import type { DepositRequest } from '../../../../util/src/requests.js' import type { PrefixedHexString } from '@ethereumjs/util' const depositContractByteCode = hexToBytes( - '0x60806040526004361061003f5760003560e01c806301ffc9a71461004457806322895118146100a4578063621fd130146101ba578063c5f2892f14610244575b600080fd5b34801561005057600080fd5b506100906004803603602081101561006757600080fd5b50357fffffffff000000000000000000000000000000000000000000000000000000001661026b565b604080519115158252519081900360200190f35b6101b8600480360360808110156100ba57600080fd5b8101906020810181356401000000008111156100d557600080fd5b8201836020820111156100e757600080fd5b8035906020019184600183028401116401000000008311171561010957600080fd5b91939092909160208101903564010000000081111561012757600080fd5b82018360208201111561013957600080fd5b8035906020019184600183028401116401000000008311171561015b57600080fd5b91939092909160208101903564010000000081111561017957600080fd5b82018360208201111561018b57600080fd5b803590602001918460018302840111640100000000831117156101ad57600080fd5b919350915035610304565b005b3480156101c657600080fd5b506101cf6110b5565b6040805160208082528351818301528351919283929083019185019080838360005b838110156102095781810151838201526020016101f1565b50505050905090810190601f1680156102365780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561025057600080fd5b506102596110c7565b60408051918252519081900360200190f35b60007fffffffff0000000000000000000000000000000000000000000000000000000082167f01ffc9a70000000000000000000000000000000000000000000000000000000014806102fe57507fffffffff0000000000000000000000000000000000000000000000000000000082167f8564090700000000000000000000000000000000000000000000000000000000145b92915050565b6030861461035d576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118056026913960400191505060405180910390fd5b602084146103b6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252603681526020018061179c6036913960400191505060405180910390fd5b6060821461040f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260298152602001806118786029913960400191505060405180910390fd5b670de0b6b3a7640000341015610470576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118526026913960400191505060405180910390fd5b633b9aca003406156104cd576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260338152602001806117d26033913960400191505060405180910390fd5b633b9aca00340467ffffffffffffffff811115610535576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252602781526020018061182b6027913960400191505060405180910390fd5b6060610540826114ba565b90507f649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c589898989858a8a6105756020546114ba565b6040805160a0808252810189905290819060208201908201606083016080840160c085018e8e80828437600083820152601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690910187810386528c815260200190508c8c808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690920188810386528c5181528c51602091820193918e019250908190849084905b83811015610648578181015183820152602001610630565b50505050905090810190601f1680156106755780820380516001836020036101000a031916815260200191505b5086810383528881526020018989808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169092018881038452895181528951602091820193918b019250908190849084905b838110156106ef5781810151838201526020016106d7565b50505050905090810190601f16801561071c5780820380516001836020036101000a031916815260200191505b509d505050505050505050505050505060405180910390a1600060028a8a600060801b604051602001808484808284377fffffffffffffffffffffffffffffffff0000000000000000000000000000000090941691909301908152604080517ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0818403018152601090920190819052815191955093508392506020850191508083835b602083106107fc57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016107bf565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610859573d6000803e3d6000fd5b5050506040513d602081101561086e57600080fd5b5051905060006002806108846040848a8c6116fe565b6040516020018083838082843780830192505050925050506040516020818303038152906040526040518082805190602001908083835b602083106108f857805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016108bb565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610955573d6000803e3d6000fd5b5050506040513d602081101561096a57600080fd5b5051600261097b896040818d6116fe565b60405160009060200180848480828437919091019283525050604080518083038152602092830191829052805190945090925082918401908083835b602083106109f457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016109b7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610a51573d6000803e3d6000fd5b5050506040513d6020811015610a6657600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610ada57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610a9d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610b37573d6000803e3d6000fd5b5050506040513d6020811015610b4c57600080fd5b50516040805160208101858152929350600092600292839287928f928f92018383808284378083019250505093505050506040516020818303038152906040526040518082805190602001908083835b60208310610bd957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610b9c565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610c36573d6000803e3d6000fd5b5050506040513d6020811015610c4b57600080fd5b50516040518651600291889160009188916020918201918291908601908083835b60208310610ca957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610c6c565b6001836020036101000a0380198251168184511680821785525050505050509050018367ffffffffffffffff191667ffffffffffffffff1916815260180182815260200193505050506040516020818303038152906040526040518082805190602001908083835b60208310610d4e57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610d11565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610dab573d6000803e3d6000fd5b5050506040513d6020811015610dc057600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610e3457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610df7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610e91573d6000803e3d6000fd5b5050506040513d6020811015610ea657600080fd5b50519050858114610f02576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260548152602001806117486054913960600191505060405180910390fd5b60205463ffffffff11610f60576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260218152602001806117276021913960400191505060405180910390fd5b602080546001019081905560005b60208110156110a9578160011660011415610fa0578260008260208110610f9157fe5b0155506110ac95505050505050565b600260008260208110610faf57fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061102557805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610fe8565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015611082573d6000803e3d6000fd5b5050506040513d602081101561109757600080fd5b50519250600282049150600101610f6e565b50fe5b50505050505050565b60606110c26020546114ba565b905090565b6020546000908190815b60208110156112f05781600116600114156111e6576002600082602081106110f557fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061116b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161112e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156111c8573d6000803e3d6000fd5b5050506040513d60208110156111dd57600080fd5b505192506112e2565b600283602183602081106111f657fe5b015460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061126b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161122e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156112c8573d6000803e3d6000fd5b5050506040513d60208110156112dd57600080fd5b505192505b6002820491506001016110d1565b506002826112ff6020546114ba565b600060401b6040516020018084815260200183805190602001908083835b6020831061135a57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161131d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790527fffffffffffffffffffffffffffffffffffffffffffffffff000000000000000095909516920191825250604080518083037ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8018152601890920190819052815191955093508392850191508083835b6020831061143f57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101611402565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa15801561149c573d6000803e3d6000fd5b5050506040513d60208110156114b157600080fd5b50519250505090565b60408051600880825281830190925260609160208201818036833701905050905060c082901b8060071a60f81b826000815181106114f457fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060061a60f81b8260018151811061153757fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060051a60f81b8260028151811061157a57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060041a60f81b826003815181106115bd57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060031a60f81b8260048151811061160057fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060021a60f81b8260058151811061164357fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060011a60f81b8260068151811061168657fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060001a60f81b826007815181106116c957fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a90535050919050565b6000808585111561170d578182fd5b83861115611719578182fd5b505082019391909203915056fe4465706f736974436f6e74726163743a206d65726b6c6520747265652066756c6c4465706f736974436f6e74726163743a207265636f6e7374727563746564204465706f7369744461746120646f6573206e6f74206d6174636820737570706c696564206465706f7369745f646174615f726f6f744465706f736974436f6e74726163743a20696e76616c6964207769746864726177616c5f63726564656e7469616c73206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c7565206e6f74206d756c7469706c65206f6620677765694465706f736974436f6e74726163743a20696e76616c6964207075626b6579206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f20686967684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f206c6f774465706f736974436f6e74726163743a20696e76616c6964207369676e6174757265206c656e677468a2646970667358221220dceca8706b29e917dacf25fceef95acac8d90d765ac926663ce4096195952b6164736f6c634300060b0033' + '0x60806040526004361061003f5760003560e01c806301ffc9a71461004457806322895118146100a4578063621fd130146101ba578063c5f2892f14610244575b600080fd5b34801561005057600080fd5b506100906004803603602081101561006757600080fd5b50357fffffffff000000000000000000000000000000000000000000000000000000001661026b565b604080519115158252519081900360200190f35b6101b8600480360360808110156100ba57600080fd5b8101906020810181356401000000008111156100d557600080fd5b8201836020820111156100e757600080fd5b8035906020019184600183028401116401000000008311171561010957600080fd5b91939092909160208101903564010000000081111561012757600080fd5b82018360208201111561013957600080fd5b8035906020019184600183028401116401000000008311171561015b57600080fd5b91939092909160208101903564010000000081111561017957600080fd5b82018360208201111561018b57600080fd5b803590602001918460018302840111640100000000831117156101ad57600080fd5b919350915035610304565b005b3480156101c657600080fd5b506101cf6110b5565b6040805160208082528351818301528351919283929083019185019080838360005b838110156102095781810151838201526020016101f1565b50505050905090810190601f1680156102365780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561025057600080fd5b506102596110c7565b60408051918252519081900360200190f35b60007fffffffff0000000000000000000000000000000000000000000000000000000082167f01ffc9a70000000000000000000000000000000000000000000000000000000014806102fe57507fffffffff0000000000000000000000000000000000000000000000000000000082167f8564090700000000000000000000000000000000000000000000000000000000145b92915050565b6030861461035d576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118056026913960400191505060405180910390fd5b602084146103b6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252603681526020018061179c6036913960400191505060405180910390fd5b6060821461040f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260298152602001806118786029913960400191505060405180910390fd5b670de0b6b3a7640000341015610470576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118526026913960400191505060405180910390fd5b633b9aca003406156104cd576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260338152602001806117d26033913960400191505060405180910390fd5b633b9aca00340467ffffffffffffffff811115610535576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252602781526020018061182b6027913960400191505060405180910390fd5b6060610540826114ba565b90507f649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c589898989858a8a6105756020546114ba565b6040805160a0808252810189905290819060208201908201606083016080840160c085018e8e80828437600083820152601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690910187810386528c815260200190508c8c808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690920188810386528c5181528c51602091820193918e019250908190849084905b83811015610648578181015183820152602001610630565b50505050905090810190601f1680156106755780820380516001836020036101000a031916815260200191505b5086810383528881526020018989808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169092018881038452895181528951602091820193918b019250908190849084905b838110156106ef5781810151838201526020016106d7565b50505050905090810190601f16801561071c5780820380516001836020036101000a031916815260200191505b509d505050505050505050505050505060405180910390a1600060028a8a600060801b604051602001808484808284377fffffffffffffffffffffffffffffffff0000000000000000000000000000000090941691909301908152604080517ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0818403018152601090920190819052815191955093508392506020850191508083835b602083106107fc57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016107bf565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610859573d6000803e3d6000fd5b5050506040513d602081101561086e57600080fd5b5051905060006002806108846040848a8c6116fe565b6040516020018083838082843780830192505050925050506040516020818303038152906040526040518082805190602001908083835b602083106108f857805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016108bb565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610955573d6000803e3d6000fd5b5050506040513d602081101561096a57600080fd5b5051600261097b896040818d6116fe565b60405160009060200180848480828437919091019283525050604080518083038152602092830191829052805190945090925082918401908083835b602083106109f457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016109b7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610a51573d6000803e3d6000fd5b5050506040513d6020811015610a6657600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610ada57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610a9d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610b37573d6000803e3d6000fd5b5050506040513d6020811015610b4c57600080fd5b50516040805160208101858152929350600092600292839287928f928f92018383808284378083019250505093505050506040516020818303038152906040526040518082805190602001908083835b60208310610bd957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610b9c565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610c36573d6000803e3d6000fd5b5050506040513d6020811015610c4b57600080fd5b50516040518651600291889160009188916020918201918291908601908083835b60208310610ca957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610c6c565b6001836020036101000a0380198251168184511680821785525050505050509050018367ffffffffffffffff191667ffffffffffffffff1916815260180182815260200193505050506040516020818303038152906040526040518082805190602001908083835b60208310610d4e57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610d11565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610dab573d6000803e3d6000fd5b5050506040513d6020811015610dc057600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610e3457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610df7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610e91573d6000803e3d6000fd5b5050506040513d6020811015610ea657600080fd5b50519050858114610f02576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260548152602001806117486054913960600191505060405180910390fd5b60205463ffffffff11610f60576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260218152602001806117276021913960400191505060405180910390fd5b602080546001019081905560005b60208110156110a9578160011660011415610fa0578260008260208110610f9157fe5b0155506110ac95505050505050565b600260008260208110610faf57fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061102557805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610fe8565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015611082573d6000803e3d6000fd5b5050506040513d602081101561109757600080fd5b50519250600282049150600101610f6e565b50fe5b50505050505050565b60606110c26020546114ba565b905090565b6020546000908190815b60208110156112f05781600116600114156111e6576002600082602081106110f557fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061116b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161112e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156111c8573d6000803e3d6000fd5b5050506040513d60208110156111dd57600080fd5b505192506112e2565b600283602183602081106111f657fe5b015460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061126b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161122e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156112c8573d6000803e3d6000fd5b5050506040513d60208110156112dd57600080fd5b505192505b6002820491506001016110d1565b506002826112ff6020546114ba565b600060401b6040516020018084815260200183805190602001908083835b6020831061135a57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161131d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790527fffffffffffffffffffffffffffffffffffffffffffffffff000000000000000095909516920191825250604080518083037ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8018152601890920190819052815191955093508392850191508083835b6020831061143f57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101611402565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa15801561149c573d6000803e3d6000fd5b5050506040513d60208110156114b157600080fd5b50519250505090565b60408051600880825281830190925260609160208201818036833701905050905060c082901b8060071a60f81b826000815181106114f457fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060061a60f81b8260018151811061153757fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060051a60f81b8260028151811061157a57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060041a60f81b826003815181106115bd57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060031a60f81b8260048151811061160057fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060021a60f81b8260058151811061164357fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060011a60f81b8260068151811061168657fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060001a60f81b826007815181106116c957fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a90535050919050565b6000808585111561170d578182fd5b83861115611719578182fd5b505082019391909203915056fe4465706f736974436f6e74726163743a206d65726b6c6520747265652066756c6c4465706f736974436f6e74726163743a207265636f6e7374727563746564204465706f7369744461746120646f6573206e6f74206d6174636820737570706c696564206465706f7369745f646174615f726f6f744465706f736974436f6e74726163743a20696e76616c6964207769746864726177616c5f63726564656e7469616c73206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c7565206e6f74206d756c7469706c65206f6620677765694465706f736974436f6e74726163743a20696e76616c6964207075626b6579206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f20686967684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f206c6f774465706f736974436f6e74726163743a20696e76616c6964207369676e6174757265206c656e677468a2646970667358221220dceca8706b29e917dacf25fceef95acac8d90d765ac926663ce4096195952b6164736f6c634300060b0033', ) const common = new Common({ chain: Chain.Mainnet, @@ -53,7 +53,7 @@ describe('EIP-6110 runBlock tests', () => { { transactions: [depositTx], }, - { common } + { common }, ) const res = await runBlock(vm, { block, generate: true, skipBlockValidation: true }) assert.equal(res.requests?.length, 1) diff --git a/packages/vm/test/api/EIPs/eip-6780-selfdestruct-same-tx.spec.ts b/packages/vm/test/api/EIPs/eip-6780-selfdestruct-same-tx.spec.ts index 9364c2653b..236c3cb238 100644 --- a/packages/vm/test/api/EIPs/eip-6780-selfdestruct-same-tx.spec.ts +++ b/packages/vm/test/api/EIPs/eip-6780-selfdestruct-same-tx.spec.ts @@ -63,7 +63,7 @@ describe('EIP 6780 tests', () => { (await vm.stateManager.getAccount(Address.fromString('0x' + '00'.repeat(19) + '01')))! .balance, BigInt(value), - 'balance sent to target' + 'balance sent to target', ) }) @@ -99,7 +99,7 @@ describe('EIP 6780 tests', () => { (await vm.stateManager.getAccount(Address.fromString('0x' + '00'.repeat(19) + '01')))! .balance, BigInt(value), - 'balance sent to target' + 'balance sent to target', ) }) }) diff --git a/packages/vm/test/api/EIPs/eip-6800-verkle.spec.ts b/packages/vm/test/api/EIPs/eip-6800-verkle.spec.ts index d767053055..142bfb2a3f 100644 --- a/packages/vm/test/api/EIPs/eip-6800-verkle.spec.ts +++ b/packages/vm/test/api/EIPs/eip-6800-verkle.spec.ts @@ -8,8 +8,7 @@ import { loadVerkleCrypto } from 'verkle-cryptography-wasm' import { describe, it } from 'vitest' import * as verkleBlockJSON from '../../../../statemanager/test/testdata/verkleKaustinen6Block72.json' -import { VM } from '../../../src' -import { runBlock } from '../../../src/index.js' +import { VM, runBlock } from '../../../src/index.js' import type { BlockData } from '@ethereumjs/block' import type { PrefixedHexString } from '@ethereumjs/util' @@ -20,18 +19,18 @@ const common = createCustomCommon(customChainParams, { eips: [2935, 4895, 6800], }) const decodedTxs = verkleBlockJSON.transactions.map((tx) => - createTxFromSerializedData(hexToBytes(tx as PrefixedHexString)) + createTxFromSerializedData(hexToBytes(tx as PrefixedHexString)), ) const parentStateRoot = hexToBytes( - '0x64e1a647f42e5c2e3c434531ccf529e1b3e93363a40db9fc8eec81f492123510' + '0x64e1a647f42e5c2e3c434531ccf529e1b3e93363a40db9fc8eec81f492123510', ) const block = createBlockFromBlockData( { ...verkleBlockJSON, transactions: decodedTxs } as BlockData, { common, - } + }, ) describe('EIP 6800 tests', () => { diff --git a/packages/vm/test/api/EIPs/eip-7002.spec.ts b/packages/vm/test/api/EIPs/eip-7002.spec.ts index db84982302..120a2ddea8 100644 --- a/packages/vm/test/api/EIPs/eip-7002.spec.ts +++ b/packages/vm/test/api/EIPs/eip-7002.spec.ts @@ -33,7 +33,7 @@ const deploymentTxData = { gasLimit: BigInt('0x3d090'), gasPrice: BigInt('0xe8d4a51000'), data: hexToBytes( - '0x61049d5f5561013280600f5f395ff33373fffffffffffffffffffffffffffffffffffffffe146090573615156028575f545f5260205ff35b366038141561012e5760115f54600182026001905f5b5f82111560595781019083028483029004916001019190603e565b90939004341061012e57600154600101600155600354806003026004013381556001015f3581556001016020359055600101600355005b6003546002548082038060101160a4575060105b5f5b81811460dd5780604c02838201600302600401805490600101805490600101549160601b83528260140152906034015260010160a6565b910180921460ed579060025560f8565b90505f6002555f6003555b5f548061049d141561010757505f5b60015460028282011161011c5750505f610122565b01600290035b5f555f600155604c025ff35b5f5ffd' + '0x61049d5f5561013280600f5f395ff33373fffffffffffffffffffffffffffffffffffffffe146090573615156028575f545f5260205ff35b366038141561012e5760115f54600182026001905f5b5f82111560595781019083028483029004916001019190603e565b90939004341061012e57600154600101600155600354806003026004013381556001015f3581556001016020359055600101600355005b6003546002548082038060101160a4575060105b5f5b81811460dd5780604c02838201600302600401805490600101805490600101549160601b83528260140152906034015260010160a6565b910180921460ed579060025560f8565b90505f6002555f6003555b5f548061049d141561010757505f5b60015460028282011161011c5750505f610122565b01600290035b5f555f600155604c025ff35b5f5ffd', ), v: BigInt('0x1b'), r: BigInt('0x539'), @@ -53,7 +53,7 @@ const amountBytes = setLengthLeft(bigIntToBytes(amount), 8) function generateTx(nonce: bigint) { const addressBytes = setLengthLeft( bigIntToBytes(common.param('withdrawalRequestPredeployAddress')), - 20 + 20, ) const withdrawalsAddress = Address.fromString(bytesToHex(addressBytes)) @@ -77,7 +77,7 @@ describe('EIP-7002 tests', () => { }, transactions: [deploymentTx], }, - { common } + { common }, ) await vm.stateManager.putAccount(sender, acc) await vm.stateManager.putAccount(addr, acc) @@ -103,7 +103,7 @@ describe('EIP-7002 tests', () => { }, transactions: [tx], }, - { common } + { common }, ) let generatedBlock: Block @@ -149,7 +149,7 @@ describe('EIP-7002 tests', () => { }, transactions: [tx2, tx3], }, - { common } + { common }, ) await runBlock(vm, { @@ -172,7 +172,7 @@ describe('EIP-7002 tests', () => { number: 1, }, }, - { common } + { common }, ) try { await runBlock(vm, { diff --git a/packages/vm/test/api/EIPs/eip-7685.spec.ts b/packages/vm/test/api/EIPs/eip-7685.spec.ts index c910d73f23..887a137f54 100644 --- a/packages/vm/test/api/EIPs/eip-7685.spec.ts +++ b/packages/vm/test/api/EIPs/eip-7685.spec.ts @@ -16,7 +16,7 @@ import { setupVM } from '../utils.js' import type { CLRequest, CLRequestType } from '@ethereumjs/util' const invalidRequestsRoot = hexToBytes( - '0xc98048d6605eb79ecc08d90b8817f44911ec474acd8d11688453d2c6ef743bc5' + '0xc98048d6605eb79ecc08d90b8817f44911ec474acd8d11688453d2c6ef743bc5', ) function getRandomDepositRequest(): CLRequest { const depositRequestData = { @@ -46,12 +46,12 @@ describe('EIP-7685 runBlock tests', () => { const emptyBlock = createBlockFromBlockData( { header: { requestsRoot: invalidRequestsRoot } }, - { common } + { common }, ) await expect(async () => runBlock(vm, { block: emptyBlock, - }) + }), ).rejects.toThrow('invalid requestsRoot') }) it('should not throw invalid requestsRoot error when valid requests are provided', async () => { @@ -63,7 +63,7 @@ describe('EIP-7685 runBlock tests', () => { requests: [request], header: { requestsRoot }, }, - { common } + { common }, ) await expect(async () => runBlock(vm, { block })).rejects.toThrow(/invalid requestsRoot/) }) @@ -75,7 +75,7 @@ describe('EIP-7685 runBlock tests', () => { requests: [request], header: { requestsRoot: invalidRequestsRoot }, }, - { common } + { common }, ) await expect(() => runBlock(vm, { block })).rejects.toThrow('invalid requestsRoot') }) @@ -90,7 +90,7 @@ describe('EIP 7685 buildBlock tests', () => { }) const genesisBlock = createBlockFromBlockData( { header: { gasLimit: 50000, baseFeePerGas: 100 } }, - { common } + { common }, ) const blockchain = await createBlockchain({ genesisBlock, common, validateConsensus: false }) const vm = await VM.create({ common, blockchain }) diff --git a/packages/vm/test/api/EIPs/eip-7702.spec.ts b/packages/vm/test/api/EIPs/eip-7702.spec.ts index 566dcdc599..bb94ba5226 100644 --- a/packages/vm/test/api/EIPs/eip-7702.spec.ts +++ b/packages/vm/test/api/EIPs/eip-7702.spec.ts @@ -62,7 +62,7 @@ async function runTest( authorizationListOpts: GetAuthListOpts[], expect: Uint8Array, vm?: VM, - skipEmptyCode?: boolean + skipEmptyCode?: boolean, ) { vm = vm ?? (await VM.create({ common })) const authList = authorizationListOpts.map((opt) => getAuthorizationListItem(opt)) @@ -74,7 +74,7 @@ async function runTest( to: defaultAuthAddr, value: BIGINT_1, }, - { common } + { common }, ).sign(defaultSenderPkey) const code1 = hexToBytes('0x600160015500') @@ -109,7 +109,7 @@ describe('EIP 7702: set code to EOA accounts', () => { address: code1Addr, }, ], - new Uint8Array([1]) + new Uint8Array([1]), ) // Try to set code to two different addresses @@ -123,7 +123,7 @@ describe('EIP 7702: set code to EOA accounts', () => { address: code2Addr, }, ], - new Uint8Array([1]) + new Uint8Array([1]), ) // Chain id check: is chain id 1 also valid? @@ -138,7 +138,7 @@ describe('EIP 7702: set code to EOA accounts', () => { address: code2Addr, }, ], - new Uint8Array([2]) + new Uint8Array([2]), ) // Check if chain id 2 is ignored @@ -152,7 +152,7 @@ describe('EIP 7702: set code to EOA accounts', () => { address: code2Addr, }, ], - new Uint8Array([2]) + new Uint8Array([2]), ) // Check if nonce is ignored in case the nonce is incorrect @@ -166,7 +166,7 @@ describe('EIP 7702: set code to EOA accounts', () => { address: code2Addr, }, ], - new Uint8Array([2]) + new Uint8Array([2]), ) }) @@ -181,7 +181,7 @@ describe('EIP 7702: set code to EOA accounts', () => { ], new Uint8Array(), vm, - true + true, ) }) @@ -201,7 +201,7 @@ describe('EIP 7702: set code to EOA accounts', () => { // 1x warm call: 100 // Total: 115 const checkAddressWarmCode = hexToBytes( - `0x5F5F5F5F5F73${defaultAuthAddr.toString().slice(2)}5AF1` + `0x5F5F5F5F5F73${defaultAuthAddr.toString().slice(2)}5AF1`, ) const checkAddressWarm = Address.fromString(`0x${'FA'.repeat(20)}`) @@ -215,7 +215,7 @@ describe('EIP 7702: set code to EOA accounts', () => { to: checkAddressWarm, value: BIGINT_1, }, - { common } + { common }, ).sign(defaultSenderPkey) const code1 = hexToBytes('0x') @@ -248,7 +248,7 @@ describe('EIP 7702: set code to EOA accounts', () => { // value: BIGINT_1 // Note, by enabling this line, the account will not get deleted // Therefore, this test will pass }, - { common } + { common }, ).sign(defaultSenderPkey) // Store value 1 in storage slot 1 diff --git a/packages/vm/test/api/bloom.spec.ts b/packages/vm/test/api/bloom.spec.ts index a83323843e..7c52154a50 100644 --- a/packages/vm/test/api/bloom.spec.ts +++ b/packages/vm/test/api/bloom.spec.ts @@ -2,7 +2,7 @@ import * as utils from '@ethereumjs/util' import { bytesToHex, hexToBytes, utf8ToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { Bloom } from '../../src/bloom' +import { Bloom } from '../../src/bloom/index.js' const byteSize = 256 @@ -17,7 +17,7 @@ describe('bloom', () => { () => new Bloom(utils.zeros(byteSize / 2)), /bitvectors must be 2048 bits long/, undefined, - 'should fail for invalid length' + 'should fail for invalid length', ) }) @@ -69,7 +69,7 @@ describe('bloom', () => { bloom.add(hexToBytes('0x0000000000000000000000001dc4c1cefef38a777b15aa20260a54e584b16c48')) assert.equal( bytesToHex(bloom.bitvector), - '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000081100200000000000000000000000000000000000000000000000000000000008000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000002000000000000000004000000000000000000000' + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000081100200000000000000000000000000000000000000000000000000000000008000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000002000000000000000004000000000000000000000', ) }) }) diff --git a/packages/vm/test/api/buildBlock.spec.ts b/packages/vm/test/api/buildBlock.spec.ts index 52ca540938..c1519ba2de 100644 --- a/packages/vm/test/api/buildBlock.spec.ts +++ b/packages/vm/test/api/buildBlock.spec.ts @@ -42,7 +42,7 @@ describe('BlockBuilder', () => { // Set up tx const tx = createLegacyTx( { to: Address.zero(), value: 1000, gasLimit: 21000, gasPrice: 1 }, - { common, freeze: false } + { common, freeze: false }, ).sign(privateKey) await blockBuilder.addTransaction(tx) @@ -50,7 +50,7 @@ describe('BlockBuilder', () => { assert.equal( blockBuilder.transactionReceipts.length, 1, - 'should have the correct number of tx receipts' + 'should have the correct number of tx receipts', ) const result = await runBlock(vmCopy, { block }) assert.equal(result.gasUsed, block.header.gasUsed) @@ -73,7 +73,7 @@ describe('BlockBuilder', () => { } catch (error: any) { if ( (error.message as string).includes( - 'tx has a higher gas limit than the remaining gas in the block' + 'tx has a higher gas limit than the remaining gas in the block', ) ) { assert.ok(true, 'correct error thrown') @@ -84,7 +84,7 @@ describe('BlockBuilder', () => { assert.equal( blockBuilder.transactionReceipts.length, 0, - 'should have the correct number of tx receipts' + 'should have the correct number of tx receipts', ) }) @@ -112,7 +112,7 @@ describe('BlockBuilder', () => { // Set up tx const tx = createLegacyTx( { to: Address.zero(), value: 1000, gasLimit: 21000, gasPrice: 1 }, - { common, freeze: false } + { common, freeze: false }, ).sign(privateKey) await blockBuilder.addTransaction(tx) @@ -133,7 +133,7 @@ describe('BlockBuilder', () => { address: new Address(hexToBytes('0x0b90087d864e82a284dca15923f3776de6bb016f')), privateKey: hexToBytes('0x64bf9cc30328b0e42387b3c82c614e6386259136235e20c1357bd11cdee86993'), publicKey: hexToBytes( - '0x40b2ebdf4b53206d2d3d3d59e7e2f13b1ea68305aec71d5d24cefe7f24ecae886d241f9267f04702d7f693655eb7b4aa23f30dcd0c3c5f2b970aad7c8a828195' + '0x40b2ebdf4b53206d2d3d3d59e7e2f13b1ea68305aec71d5d24cefe7f24ecae886d241f9267f04702d7f693655eb7b4aa23f30dcd0c3c5f2b970aad7c8a828195', ), } @@ -194,7 +194,7 @@ describe('BlockBuilder', () => { const cliqueSigner = signer.privateKey const genesisBlock = createBlockFromBlockData( { header: { gasLimit: 50000, extraData } }, - { common, cliqueSigner } + { common, cliqueSigner }, ) const blockchain = await createBlockchain({ genesisBlock, common }) const vm = await VM.create({ common, blockchain }) @@ -211,7 +211,7 @@ describe('BlockBuilder', () => { // Set up tx const tx = createLegacyTx( { to: Address.zero(), value: 1000, gasLimit: 21000, gasPrice: 1 }, - { common, freeze: false } + { common, freeze: false }, ).sign(signer.privateKey) await blockBuilder.addTransaction(tx) @@ -222,7 +222,7 @@ describe('BlockBuilder', () => { assert.deepEqual( block.header.cliqueSigner(), signer.address, - 'should recover the correct signer address' + 'should recover the correct signer address', ) }) @@ -241,7 +241,7 @@ describe('BlockBuilder', () => { const tx = createLegacyTx( { to: Address.zero(), value: 1000, gasLimit: 21000, gasPrice: 1 }, - { common, freeze: false } + { common, freeze: false }, ).sign(privateKey) await blockBuilder.addTransaction(tx) @@ -252,7 +252,7 @@ describe('BlockBuilder', () => { assert.equal( blockBuilder.getStatus().status, 'reverted', - 'block should be in reverted status' + 'block should be in reverted status', ) } catch (error: any) { assert.fail('shoud not throw') @@ -262,7 +262,7 @@ describe('BlockBuilder', () => { const tx2 = createLegacyTx( { to: Address.zero(), value: 1000, gasLimit: 21000, gasPrice: 1, nonce: 1 }, - { common, freeze: false } + { common, freeze: false }, ).sign(privateKey) await blockBuilder.addTransaction(tx2) @@ -273,7 +273,7 @@ describe('BlockBuilder', () => { assert.equal( blockBuilder.getStatus().status, 'reverted', - 'block should be in reverted status' + 'block should be in reverted status', ) } catch (error: any) { assert.fail('shoud not throw') @@ -306,7 +306,7 @@ describe('BlockBuilder', () => { const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London, eips: [1559] }) const genesisBlock = createBlockFromBlockData( { header: { gasLimit: 50000, baseFeePerGas: 100 } }, - { common } + { common }, ) const blockchain = await createBlockchain({ genesisBlock, common, validateConsensus: false }) const vm = await VM.create({ common, blockchain }) @@ -324,12 +324,12 @@ describe('BlockBuilder', () => { // Set up underpriced txs to test error response const tx1 = createLegacyTx( { to: Address.zero(), value: 1000, gasLimit: 21000, gasPrice: 1 }, - { common, freeze: false } + { common, freeze: false }, ).sign(privateKey) const tx2 = create1559FeeMarketTx( { to: Address.zero(), value: 1000, gasLimit: 21000, maxFeePerGas: 10 }, - { common, freeze: false } + { common, freeze: false }, ).sign(privateKey) for (const tx of [tx1, tx2]) { @@ -339,7 +339,7 @@ describe('BlockBuilder', () => { } catch (error: any) { assert.ok( (error.message as string).includes("is less than the block's baseFeePerGas"), - 'should fail with appropriate error' + 'should fail with appropriate error', ) } } @@ -347,12 +347,12 @@ describe('BlockBuilder', () => { // Set up correctly priced txs const tx3 = createLegacyTx( { to: Address.zero(), value: 1000, gasLimit: 21000, gasPrice: 101 }, - { common, freeze: false } + { common, freeze: false }, ).sign(privateKey) const tx4 = create1559FeeMarketTx( { to: Address.zero(), value: 1000, gasLimit: 21000, maxFeePerGas: 101, nonce: 1 }, - { common, freeze: false } + { common, freeze: false }, ).sign(privateKey) for (const tx of [tx3, tx4]) { @@ -364,12 +364,12 @@ describe('BlockBuilder', () => { assert.equal( blockBuilder.transactionReceipts.length, 2, - 'should have the correct number of tx receipts' + 'should have the correct number of tx receipts', ) assert.ok( block.header.baseFeePerGas! === genesisBlock.header.calcNextBaseFee(), - "baseFeePerGas should equal parentHeader's calcNextBaseFee" + "baseFeePerGas should equal parentHeader's calcNextBaseFee", ) const result = await runBlock(vmCopy, { block }) diff --git a/packages/vm/test/api/copy.spec.ts b/packages/vm/test/api/copy.spec.ts index 3250cda902..49f74969d5 100644 --- a/packages/vm/test/api/copy.spec.ts +++ b/packages/vm/test/api/copy.spec.ts @@ -1,7 +1,7 @@ import { Address, createAccount } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { setupVM } from './utils' +import { setupVM } from './utils.js' describe('VM Copy Test', () => { it('should pass copy of state manager', async () => { @@ -15,13 +15,13 @@ describe('VM Copy Test', () => { assert.ok( (await vm.stateManager.getAccount(address)) !== undefined, - 'account exists before copy' + 'account exists before copy', ) const vmCopy = await vm.shallowCopy() assert.isUndefined( await vmCopy.stateManager.getAccount(address), - 'non-committed checkpoints will not be copied' + 'non-committed checkpoints will not be copied', ) await vm.stateManager.checkpoint() @@ -31,7 +31,7 @@ describe('VM Copy Test', () => { assert.ok( (await vmCopy2.stateManager.getAccount(address)) !== undefined, - 'committed checkpoints will be copied' + 'committed checkpoints will be copied', ) }) }) diff --git a/packages/vm/test/api/customChain.spec.ts b/packages/vm/test/api/customChain.spec.ts index f70127f53e..be4387d97a 100644 --- a/packages/vm/test/api/customChain.spec.ts +++ b/packages/vm/test/api/customChain.spec.ts @@ -58,7 +58,7 @@ const block = createBlockFromBlockData( }, { common, - } + }, ) const privateKey = hexToBytes('0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109') @@ -77,7 +77,7 @@ describe('VM initialized with custom state', () => { }, { common, - } + }, ).sign(privateKey) const result = await runTx(vm, { tx, @@ -95,7 +95,7 @@ describe('VM initialized with custom state', () => { common.setHardfork(Hardfork.London) const vm = await VM.create({ blockchain, common, genesisState }) const sigHash = new Interface(['function retrieve()']).getSighash( - 'retrieve' + 'retrieve', ) as PrefixedHexString const callResult = await vm.evm.runCall({ diff --git a/packages/vm/test/api/events.spec.ts b/packages/vm/test/api/events.spec.ts index 580f3c97c0..9feecc0b46 100644 --- a/packages/vm/test/api/events.spec.ts +++ b/packages/vm/test/api/events.spec.ts @@ -174,7 +174,7 @@ describe('VM events', () => { assert.equal( bytesToHex(emitted.code), - '0x7f410000000000000000000000000000000000000000000000000000000000000060005260016000f3' + '0x7f410000000000000000000000000000000000000000000000000000000000000060005260016000f3', ) }) }) diff --git a/packages/vm/test/api/index.spec.ts b/packages/vm/test/api/index.spec.ts index d4b9d7e6a5..074141908f 100644 --- a/packages/vm/test/api/index.spec.ts +++ b/packages/vm/test/api/index.spec.ts @@ -4,14 +4,14 @@ import { Account, Address, KECCAK256_RLP, hexToBytes } from '@ethereumjs/util' import * as util from 'util' // eslint-disable-line @typescript-eslint/no-unused-vars import { assert, describe, it } from 'vitest' -import { VM } from '../../src/vm' +import { VM } from '../../src/vm.js' import * as testnet from './testdata/testnet.json' import * as testnet2 from './testdata/testnet2.json' import * as testnetMerge from './testdata/testnetMerge.json' -import { setupVM } from './utils' +import { setupVM } from './utils.js' -import type { VMOpts } from '../../src' +import type { VMOpts } from '../../src/index.js' import type { ChainConfig } from '@ethereumjs/common' import type { DefaultStateManager } from '@ethereumjs/statemanager' @@ -37,7 +37,7 @@ describe('VM -> basic instantiation / boolean switches', () => { assert.deepEqual( (vm.stateManager as DefaultStateManager)['_trie'].root(), KECCAK256_RLP, - 'it has default trie' + 'it has default trie', ) assert.equal(vm.common.hardfork(), Hardfork.Shanghai, 'it has correct default HF') }) @@ -47,7 +47,7 @@ describe('VM -> basic instantiation / boolean switches', () => { assert.notDeepEqual( (vm.stateManager as DefaultStateManager)['_trie'].root(), KECCAK256_RLP, - 'it has different root' + 'it has different root', ) }) }) @@ -73,7 +73,7 @@ describe('VM -> Default EVM / Custom EVM Opts', () => { const copiedVM = await vm.shallowCopy() assert.isTrue( (copiedVM.evm as EVM).allowUnlimitedContractSize, - 'allowUnlimitedContractSize=true (for shallowCopied VM)' + 'allowUnlimitedContractSize=true (for shallowCopied VM)', ) }) @@ -86,7 +86,7 @@ describe('VM -> Default EVM / Custom EVM Opts', () => { assert.equal( (copiedVM.evm as EVM).common.hardfork(), 'byzantium', - 'use modfied HF from VM common (for shallowCopied VM)' + 'use modfied HF from VM common (for shallowCopied VM)', ) }) @@ -99,7 +99,7 @@ describe('VM -> Default EVM / Custom EVM Opts', () => { assert.equal( (copiedVM.evm as EVM).common.hardfork(), 'byzantium', - 'use modfied HF from evmOpts (for shallowCopied VM)' + 'use modfied HF from evmOpts (for shallowCopied VM)', ) }) }) @@ -218,7 +218,7 @@ describe('VM -> setHardfork, state (deprecated), blockchain', () => { assert.deepEqual( (vm.stateManager as DefaultStateManager)['_trie'].root(), KECCAK256_RLP, - 'it has default trie' + 'it has default trie', ) }) @@ -245,12 +245,12 @@ describe('VM -> setHardfork, state (deprecated), blockchain', () => { assert.deepEqual( (vmCopy as any)._setHardfork, true, - 'copy() correctly passes setHardfork option' + 'copy() correctly passes setHardfork option', ) assert.deepEqual( (vm as any)._setHardfork, (vmCopy as any)._setHardfork, - 'setHardfork options match' + 'setHardfork options match', ) // @@ -263,12 +263,12 @@ describe('VM -> setHardfork, state (deprecated), blockchain', () => { assert.deepEqual( (vmCopy as any)._setHardfork, BigInt(5001), - 'copy() correctly passes setHardfork option' + 'copy() correctly passes setHardfork option', ) assert.deepEqual( (vm as any)._setHardfork, (vmCopy as any)._setHardfork, - 'setHardfork options match' + 'setHardfork options match', ) }) describe('Ensure that precompile activation creates non-empty accounts', () => { diff --git a/packages/vm/test/api/istanbul/eip-1108.spec.ts b/packages/vm/test/api/istanbul/eip-1108.spec.ts index ce7a429b1d..8facba9487 100644 --- a/packages/vm/test/api/istanbul/eip-1108.spec.ts +++ b/packages/vm/test/api/istanbul/eip-1108.spec.ts @@ -45,7 +45,7 @@ describe('Istanbul: EIP-1108 tests', () => { const result = await ECPAIRING({ data: hexToBytes( - '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa000000000000000000000000000000000000000000000000000000000000000130644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd45198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa' + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa000000000000000000000000000000000000000000000000000000000000000130644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd45198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa', ), gasLimit: BigInt(0xffffff), common, @@ -55,7 +55,7 @@ describe('Istanbul: EIP-1108 tests', () => { assert.deepEqual( result.executionGasUsed, BigInt(113000), - 'should use petersburg gas costs (k ^= 2 pairings)' + 'should use petersburg gas costs (k ^= 2 pairings)', ) }) }) diff --git a/packages/vm/test/api/istanbul/eip-2200.spec.ts b/packages/vm/test/api/istanbul/eip-2200.spec.ts index 66a12856c2..dd3b2948c5 100644 --- a/packages/vm/test/api/istanbul/eip-2200.spec.ts +++ b/packages/vm/test/api/istanbul/eip-2200.spec.ts @@ -59,7 +59,7 @@ describe('Istanbul: EIP-2200', () => { await vm.stateManager.putContractStorage( addr, key, - hexToBytes(`0x${testCase.original.toString(16)}`) + hexToBytes(`0x${testCase.original.toString(16)}`), ) } diff --git a/packages/vm/test/api/level.ts b/packages/vm/test/api/level.ts index 88e77b7671..39ba1e1a96 100644 --- a/packages/vm/test/api/level.ts +++ b/packages/vm/test/api/level.ts @@ -18,7 +18,7 @@ export class LevelDB implements DB { * @param leveldb - An abstract-leveldown compliant store */ constructor( - leveldb?: AbstractLevel + leveldb?: AbstractLevel, ) { this._leveldb = leveldb ?? new MemoryLevel(ENCODING_OPTS) } diff --git a/packages/vm/test/api/runBlock.spec.ts b/packages/vm/test/api/runBlock.spec.ts index 56d7d43f36..0d554e61f5 100644 --- a/packages/vm/test/api/runBlock.spec.ts +++ b/packages/vm/test/api/runBlock.spec.ts @@ -33,8 +33,8 @@ import { keccak256 } from 'ethereum-cryptography/keccak' import { assert, describe, it } from 'vitest' import { runBlock } from '../../src/index.js' -import { VM } from '../../src/vm' -import { getDAOCommon, setupPreConditions } from '../util' +import { VM } from '../../src/vm.js' +import { getDAOCommon, setupPreConditions } from '../util.js' import * as testData from './testdata/blockchain.json' import * as testnet from './testdata/testnet.json' @@ -67,7 +67,7 @@ describe('runBlock() -> successful API parameter usage', async () => { assert.deepEqual( (vm.stateManager as DefaultStateManager)['_trie'].root(), genesis.header.stateRoot, - 'genesis state root should match calculated state root' + 'genesis state root should match calculated state root', ) const res = await runBlock(vm, { @@ -80,7 +80,7 @@ describe('runBlock() -> successful API parameter usage', async () => { assert.equal( res.results[0].totalGasSpent.toString(16), '5208', - 'actual gas used should equal blockHeader gasUsed' + 'actual gas used should equal blockHeader gasUsed', ) } @@ -120,13 +120,13 @@ describe('runBlock() -> successful API parameter usage', async () => { }) const uncleReward = (await vm.stateManager.getAccount( - Address.fromString('0xb94f5374fce5ed0000000097c15331677e6ebf0b') + Address.fromString('0xb94f5374fce5ed0000000097c15331677e6ebf0b'), ))!.balance.toString(16) assert.equal( `0x${uncleReward}`, testData.postState['0xb94f5374fce5ed0000000097c15331677e6ebf0b'].balance, - 'calculated balance should equal postState balance' + 'calculated balance should equal postState balance', ) } @@ -170,7 +170,7 @@ describe('runBlock() -> successful API parameter usage', async () => { }) const privateKey = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) function getBlock(common: Common): Block { @@ -185,11 +185,11 @@ describe('runBlock() -> successful API parameter usage', async () => { data: '0x600154', // PUSH 01 SLOAD gasLimit: BigInt(100000), }, - { common } + { common }, ).sign(privateKey), ], }, - { common } + { common }, ) } @@ -209,12 +209,12 @@ describe('runBlock() -> successful API parameter usage', async () => { assert.equal( txResultChainstart.results[0].totalGasSpent, BigInt(21000) + BigInt(68) * BigInt(3) + BigInt(3) + BigInt(50), - 'tx charged right gas on chainstart hard fork' + 'tx charged right gas on chainstart hard fork', ) assert.equal( txResultMuirGlacier.results[0].totalGasSpent, BigInt(21000) + BigInt(32000) + BigInt(16) * BigInt(3) + BigInt(3) + BigInt(800), - 'tx charged right gas on muir glacier hard fork' + 'tx charged right gas on muir glacier hard fork', ) }) }) @@ -258,7 +258,7 @@ describe('runBlock() -> API parameter usage/data errors', async () => { .catch((e) => { assert.ok( e.message.includes('not found in DB'), - 'block failed validation due to no parent header' + 'block failed validation due to no parent header', ) }) }) @@ -274,7 +274,7 @@ describe('runBlock() -> API parameter usage/data errors', async () => { assert.equal( err.message, 'cannot validate header: blockchain has no `validateHeader` method', - 'should error' + 'should error', ) } }) @@ -291,7 +291,7 @@ describe('runBlock() -> API parameter usage/data errors', async () => { const opts = { common: block.common } block.transactions[0] = new LegacyTransaction( { nonce, gasPrice, gasLimit, to, value, data, v, r, s }, - opts + opts, ) await runBlock(vm, { block, skipBlockValidation: true }) @@ -317,14 +317,14 @@ describe('runBlock() -> runtime behavior', async () => { const fundBalance1 = BigInt('0x1111') const accountFunded1 = createAccountWithDefaults(BigInt(0), fundBalance1) const DAOFundedContractAddress1 = new Address( - hexToBytes('0xd4fe7bc31cedb7bfb8a345f31e668033056b2728') + hexToBytes('0xd4fe7bc31cedb7bfb8a345f31e668033056b2728'), ) await vm.stateManager.putAccount(DAOFundedContractAddress1, accountFunded1) const fundBalance2 = BigInt('0x2222') const accountFunded2 = createAccountWithDefaults(BigInt(0), fundBalance2) const DAOFundedContractAddress2 = new Address( - hexToBytes('0xb3fb0e5aba0e20e5c49d252dfd30e102b171a425') + hexToBytes('0xb3fb0e5aba0e20e5c49d252dfd30e102b171a425'), ) await vm.stateManager.putAccount(DAOFundedContractAddress2, accountFunded2) @@ -361,7 +361,7 @@ describe('runBlock() -> runtime behavior', async () => { address: new Address(hexToBytes('0x0b90087d864e82a284dca15923f3776de6bb016f')), privateKey: hexToBytes('0x64bf9cc30328b0e42387b3c82c614e6386259136235e20c1357bd11cdee86993'), publicKey: hexToBytes( - '0x40b2ebdf4b53206d2d3d3d59e7e2f13b1ea68305aec71d5d24cefe7f24ecae886d241f9267f04702d7f693655eb7b4aa23f30dcd0c3c5f2b970aad7c8a828195' + '0x40b2ebdf4b53206d2d3d3d59e7e2f13b1ea68305aec71d5d24cefe7f24ecae886d241f9267f04702d7f693655eb7b4aa23f30dcd0c3c5f2b970aad7c8a828195', ), } @@ -369,7 +369,7 @@ describe('runBlock() -> runtime behavior', async () => { address: new Address(hexToBytes('0x6f62d8382bf2587361db73ceca28be91b2acb6df')), privateKey: hexToBytes('0x2a6e9ad5a6a8e4f17149b8bc7128bf090566a11dbd63c30e5a0ee9f161309cd6'), publicKey: hexToBytes( - '0xca0a55f6e81cb897aee6a1c390aa83435c41048faa0564b226cfc9f3df48b73e846377fb0fd606df073addc7bd851f22547afbbdd5c3b028c91399df802083a2' + '0xca0a55f6e81cb897aee6a1c390aa83435c41048faa0564b226cfc9f3df48b73e846377fb0fd606df073addc7bd851f22547afbbdd5c3b028c91399df802083a2', ), } @@ -377,13 +377,13 @@ describe('runBlock() -> runtime behavior', async () => { await vm.stateManager.putAccount(otherUser.address, new Account(BigInt(0), BigInt(42000))) const tx = createLegacyTx( { to: Address.zero(), gasLimit: 21000, gasPrice: 1 }, - { common } + { common }, ).sign(otherUser.privateKey) // create block with the signer and txs const block = createBlockFromBlockData( { header: { extraData: new Uint8Array(97) }, transactions: [tx, tx] }, - { common, cliqueSigner: signer.privateKey } + { common, cliqueSigner: signer.privateKey }, ) await runBlock(vm, { block, skipNonce: true, skipBlockValidation: true, generate: true }) @@ -391,14 +391,14 @@ describe('runBlock() -> runtime behavior', async () => { assert.equal( account!.balance, BigInt(42000), - 'beneficiary balance should equal the cost of the txs' + 'beneficiary balance should equal the cost of the txs', ) }) }) async function runBlockAndGetAfterBlockEvent( vm: VM, - runBlockOpts: RunBlockOpts + runBlockOpts: RunBlockOpts, ): Promise { let results: AfterBlockEvent function handler(event: AfterBlockEvent) { @@ -464,14 +464,14 @@ describe('runBlock() -> API return values', () => { assert.equal( (res.receipts[0] as PostByzantiumTxReceipt).status, 1, - 'should return correct post-Byzantium receipt format' + 'should return correct post-Byzantium receipt format', ) res = await runWithHf('spuriousDragon') assert.deepEqual( (res.receipts[0] as PreByzantiumTxReceipt).stateRoot, hexToBytes('0x4477e2cfaf9fd2eed4f74426798b55d140f6a9612da33413c4745f57d7a97fcc'), - 'should return correct pre-Byzantium receipt format' + 'should return correct pre-Byzantium receipt format', ) }) }) @@ -504,7 +504,7 @@ describe('runBlock() -> tx types', async () => { res.receipts .map((r) => r.cumulativeBlockGasUsed) .reduce((prevValue: bigint, currValue: bigint) => prevValue + currValue, BigInt(0)), - "gas used should equal transaction's total gasUsed" + "gas used should equal transaction's total gasUsed", ) } @@ -533,7 +533,7 @@ describe('runBlock() -> tx types', async () => { const tx = create2930AccessListTx( { gasLimit: 53000, value: 1, v: 1, r: 1, s: 1 }, - { common, freeze: false } + { common, freeze: false }, ) tx.getSenderAddress = () => { @@ -552,7 +552,7 @@ describe('runBlock() -> tx types', async () => { const tx = create1559FeeMarketTx( { maxFeePerGas: 10, maxPriorityFeePerGas: 4, gasLimit: 100000, value: 6 }, - { common, freeze: false } + { common, freeze: false }, ) tx.getSenderAddress = () => { @@ -645,7 +645,7 @@ describe('runBlock() -> tx types', async () => { to: defaultAuthAddr, value: BIGINT_1, }, - { common } + { common }, ).sign(defaultSenderPkey) const tx2 = create7702EOACodeTx( { @@ -657,13 +657,13 @@ describe('runBlock() -> tx types', async () => { value: BIGINT_1, nonce: 1, }, - { common } + { common }, ).sign(defaultSenderPkey) const block = createBlockFromBlockData( { transactions: [tx1, tx2], }, - { common, setHardfork: false, skipConsensusFormatValidation: true } + { common, setHardfork: false, skipConsensusFormatValidation: true }, ) await runBlock(vm, { block, skipBlockValidation: true, generate: true }) diff --git a/packages/vm/test/api/runTx.spec.ts b/packages/vm/test/api/runTx.spec.ts index dd716efc46..f3b6920f11 100644 --- a/packages/vm/test/api/runTx.spec.ts +++ b/packages/vm/test/api/runTx.spec.ts @@ -60,7 +60,7 @@ describe('runTx() -> successful API parameter usage', async () => { // Setup block with correct extraData for POA block = createBlockFromBlockData( { header: { extraData: new Uint8Array(97) } }, - { common: vm.common } + { common: vm.common }, ) } @@ -117,7 +117,7 @@ describe('runTx() -> successful API parameter usage', async () => { assert.equal( (e as Error).message.includes('block has a different hardfork than the vm'), true, - 'block has a different hardfork than the vm' + 'block has a different hardfork than the vm', ) assert.ok(true, 'vm/tx mismatched hardfork correctly failed') } @@ -131,7 +131,7 @@ describe('runTx() -> successful API parameter usage', async () => { assert.equal( (e as Error).message.includes('block has a different hardfork than the vm'), true, - 'block has a different hardfork than the vm' + 'block has a different hardfork than the vm', ) assert.ok(true, 'vm/tx mismatched hardfork correctly failed') } @@ -177,7 +177,7 @@ describe('runTx() -> successful API parameter usage', async () => { assert.equal( res.receipt.cumulativeBlockGasUsed, blockGasUsed + res.totalGasSpent, - 'receipt.gasUsed should equal block gas used + tx gas used' + 'receipt.gasUsed should equal block gas used + tx gas used', ) }) @@ -194,7 +194,7 @@ describe('runTx() -> successful API parameter usage', async () => { const res = await runTx(vm, { tx }) assert.isTrue( res.totalGasSpent > BigInt(0), - `mainnet (PoW), istanbul HF, default SM - should run without errors (${TRANSACTION_TYPES[0].name})` + `mainnet (PoW), istanbul HF, default SM - should run without errors (${TRANSACTION_TYPES[0].name})`, ) }) @@ -203,7 +203,7 @@ describe('runTx() -> successful API parameter usage', async () => { const vm = await VM.create({ common }) const privateKey = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) const address = Address.fromPrivateKey(privateKey) const initialBalance = BigInt(10) ** BigInt(18) @@ -211,7 +211,7 @@ describe('runTx() -> successful API parameter usage', async () => { const account = await vm.stateManager.getAccount(address) await vm.stateManager.putAccount( address, - createAccount({ ...account, balance: initialBalance }) + createAccount({ ...account, balance: initialBalance }), ) const transferCost = 21000 @@ -225,7 +225,7 @@ describe('runTx() -> successful API parameter usage', async () => { maxPriorityFeePerGas: 50, maxFeePerGas: 50, } as TypedTxData, - { common } + { common }, ) const tx = unsignedTx.sign(privateKey) @@ -238,7 +238,7 @@ describe('runTx() -> successful API parameter usage', async () => { baseFeePerGas: 7, }, }, - { common } + { common }, ) const result = await runTx(vm, { @@ -264,13 +264,13 @@ describe('runTx() -> successful API parameter usage', async () => { assert.equal( coinbaseAccount!.balance, expectedCoinbaseBalance, - `should use custom block (${txType.name})` + `should use custom block (${txType.name})`, ) assert.equal( result.execResult.exceptionError, undefined, - `should run ${txType.name} without errors` + `should run ${txType.name} without errors`, ) } }) @@ -284,7 +284,7 @@ describe('runTx() -> API parameter usage/data errors', () => { const tx = getTransaction( new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Berlin }), 1, - true + true, ) const caller = tx.getSenderAddress() @@ -298,7 +298,7 @@ describe('runTx() -> API parameter usage/data errors', () => { } catch (e: any) { assert.ok( e.message.includes('(EIP-2718) not activated'), - `should fail for ${TRANSACTION_TYPES[1].name}` + `should fail for ${TRANSACTION_TYPES[1].name}`, ) } }) @@ -315,7 +315,7 @@ describe('runTx() -> API parameter usage/data errors', () => { const res = await runTx(vm, { tx, reportAccessList: true }) assert.isTrue( res.totalGasSpent > BigInt(0), - `mainnet (PoW), istanbul HF, default SM - should run without errors (${TRANSACTION_TYPES[0].name})` + `mainnet (PoW), istanbul HF, default SM - should run without errors (${TRANSACTION_TYPES[0].name})`, ) assert.deepEqual(res.accessList, []) }) @@ -349,7 +349,7 @@ describe('runTx() -> API parameter usage/data errors', () => { assert.ok( e.message.includes('not signed') === true || e.message.includes('Invalid Signature') === true, - `should fail for ${txType.name}` + `should fail for ${txType.name}`, ) } } @@ -364,7 +364,7 @@ describe('runTx() -> API parameter usage/data errors', () => { } catch (e: any) { assert.ok( e.message.toLowerCase().includes('enough funds'), - `should fail for ${txType.name}` + `should fail for ${txType.name}`, ) } } @@ -378,7 +378,7 @@ describe('runTx() -> API parameter usage/data errors', () => { const maxCost: bigint = tx.gasLimit * tx.maxFeePerGas await vm.stateManager.putAccount( address, - createAccountWithDefaults(BigInt(0), maxCost - BigInt(1)) + createAccountWithDefaults(BigInt(0), maxCost - BigInt(1)), ) try { await runTx(vm, { tx }) @@ -386,7 +386,7 @@ describe('runTx() -> API parameter usage/data errors', () => { } catch (e: any) { assert.ok( e.message.toLowerCase().includes('max cost'), - `should fail if max cost exceeds balance` + `should fail if max cost exceeds balance`, ) } // set sufficient balance @@ -445,7 +445,7 @@ describe('runTx() -> API parameter usage/data errors', () => { } catch (e: any) { assert.ok( e.message.includes("is less than the block's baseFeePerGas"), - 'should fail with appropriate error' + 'should fail with appropriate error', ) } } @@ -458,7 +458,7 @@ describe('runTx() -> runtime behavior', () => { const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Berlin }) const vm = await VM.create({ common }) const privateKey = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) /* Code which is deployed here: PUSH1 01 @@ -472,7 +472,7 @@ describe('runTx() -> runtime behavior', () => { await vm.stateManager.putContractStorage( address, hexToBytes(`0x${'00'.repeat(32)}`), - hexToBytes(`0x${'00'.repeat(31)}01`) + hexToBytes(`0x${'00'.repeat(31)}01`), ) const txParams: any = { nonce: '0x00', @@ -494,7 +494,7 @@ describe('runTx() -> runtime behavior', () => { assert.equal( (vm.stateManager).originalStorageCache.map.size, 0, - `should clear storage cache after every ${txType.name}` + `should clear storage cache after every ${txType.name}`, ) } }) @@ -518,12 +518,12 @@ describe('runTx() -> runtime errors', () => { assert.equal( res.execResult!.exceptionError!.error, 'value overflow', - `result should have 'value overflow' error set (${txType.name})` + `result should have 'value overflow' error set (${txType.name})`, ) assert.equal( (vm.stateManager)._checkpointCount, 0, - `checkpoint count should be 0 (${txType.name})` + `checkpoint count should be 0 (${txType.name})`, ) } }) @@ -546,12 +546,12 @@ describe('runTx() -> runtime errors', () => { assert.equal( res.execResult!.exceptionError!.error, 'value overflow', - `result should have 'value overflow' error set (${txType.name})` + `result should have 'value overflow' error set (${txType.name})`, ) assert.equal( (vm.stateManager)._checkpointCount, 0, - `checkpoint count should be 0 (${txType.name})` + `checkpoint count should be 0 (${txType.name})`, ) } }) @@ -572,17 +572,17 @@ describe('runTx() -> API return values', () => { assert.equal( res.execResult.executionGasUsed, BigInt(0), - `execution result -> gasUsed -> 0 (${txType.name})` + `execution result -> gasUsed -> 0 (${txType.name})`, ) assert.equal( res.execResult.exceptionError, undefined, - `execution result -> exception error -> undefined (${txType.name})` + `execution result -> exception error -> undefined (${txType.name})`, ) assert.deepEqual( res.execResult.returnValue, Uint8Array.from([]), - `execution result -> return value -> empty Uint8Array (${txType.name})` + `execution result -> return value -> empty Uint8Array (${txType.name})`, ) assert.equal(res.gasRefund, BigInt(0), `gasRefund -> 0 (${txType.name})`) } @@ -602,7 +602,7 @@ describe('runTx() -> API return values', () => { assert.equal( res.totalGasSpent, tx.getIntrinsicGas(), - `runTx result -> gasUsed -> tx.getIntrinsicGas() (${txType.name})` + `runTx result -> gasUsed -> tx.getIntrinsicGas() (${txType.name})`, ) if (tx instanceof FeeMarketEIP1559Transaction) { const baseFee = BigInt(7) @@ -614,35 +614,35 @@ describe('runTx() -> API return values', () => { assert.equal( res.amountSpent, res.totalGasSpent * gasPrice, - `runTx result -> amountSpent -> gasUsed * gasPrice (${txType.name})` + `runTx result -> amountSpent -> gasUsed * gasPrice (${txType.name})`, ) } else { assert.equal( res.amountSpent, res.totalGasSpent * (tx).gasPrice, - `runTx result -> amountSpent -> gasUsed * gasPrice (${txType.name})` + `runTx result -> amountSpent -> gasUsed * gasPrice (${txType.name})`, ) } assert.deepEqual( res.bloom.bitvector, hexToBytes(`0x${'00'.repeat(256)}`), - `runTx result -> bloom.bitvector -> should be empty (${txType.name})` + `runTx result -> bloom.bitvector -> should be empty (${txType.name})`, ) assert.equal( res.receipt.cumulativeBlockGasUsed, res.totalGasSpent, - `runTx result -> receipt.gasUsed -> result.gasUsed (${txType.name})` + `runTx result -> receipt.gasUsed -> result.gasUsed (${txType.name})`, ) assert.deepEqual( res.receipt.bitvector, res.bloom.bitvector, - `runTx result -> receipt.bitvector -> result.bloom.bitvector (${txType.name})` + `runTx result -> receipt.bitvector -> result.bloom.bitvector (${txType.name})`, ) assert.deepEqual( res.receipt.logs, [], - `runTx result -> receipt.logs -> empty array (${txType.name})` + `runTx result -> receipt.logs -> empty array (${txType.name})`, ) } }) @@ -726,7 +726,7 @@ describe('runTx() -> consensus bugs', () => { assert.equal( result.totalGasSpent, BigInt(66382), - 'should use the right amount of gas and not consume all' + 'should use the right amount of gas and not consume all', ) }) }) @@ -751,7 +751,7 @@ describe('runTx() -> RunTxOptions', () => { } catch (err: any) { assert.ok( err.message.includes('value field cannot be negative'), - 'throws on negative call value' + 'throws on negative call value', ) } } @@ -781,7 +781,7 @@ it('runTx() -> skipBalance behavior', async () => { assert.equal( afterTxBalance, balance !== undefined ? balance - 1n : BigInt(0), - `sender balance should be >= 0 after transaction with skipBalance` + `sender balance should be >= 0 after transaction with skipBalance`, ) assert.equal(res.execResult.exceptionError, undefined, 'no exceptionError with skipBalance') } @@ -847,7 +847,7 @@ it('Validate CALL does not charge new account gas when calling CALLER and caller assert.equal( (await runTx(vm, { tx, skipHardForkValidation: true })).totalGasSpent, BigInt(27818), - 'did not charge callNewAccount' + 'did not charge callNewAccount', ) }) @@ -878,7 +878,7 @@ it('Validate SELFDESTRUCT does not charge new account gas when calling CALLER an assert.equal( (await runTx(vm, { tx, skipHardForkValidation: true })).totalGasSpent, BigInt(13001), - 'did not charge callNewAccount' + 'did not charge callNewAccount', ) }) @@ -909,13 +909,13 @@ describe('EIP 4844 transaction tests', () => { { common, skipConsensusFormatValidation: true, - } + }, ), }, { common, skipConsensusFormatValidation: true, - } + }, ) } const blockchain = await createBlockchain({ @@ -937,10 +937,10 @@ describe('EIP 4844 transaction tests', () => { { common, skipConsensusFormatValidation: true, - } + }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) const res = await runTx(vm, { tx, block, skipBalance: true }) assert.ok(res.execResult.exceptionError === undefined, 'simple blob tx run succeeds') diff --git a/packages/vm/test/api/state/accountExists.spec.ts b/packages/vm/test/api/state/accountExists.spec.ts index 8e3da9c557..71ddc7c518 100644 --- a/packages/vm/test/api/state/accountExists.spec.ts +++ b/packages/vm/test/api/state/accountExists.spec.ts @@ -30,14 +30,14 @@ describe('correctly apply new account gas fee on pre-Spurious Dragon hardforks', await vm.stateManager.putContractStorage( contractAddress, hexToBytes('0xd08f588b94e47566eea77acec87441cecca23f61aea9ed8eb086c062d3837605'), - hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000001') + hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000001'), ) // setup the call arguments const runCallArgs = { caller, // call address gasLimit: BigInt(174146 - 22872), // tx gas limit minus the tx fee (21000) and data fee (1872) to represent correct gas costs data: hexToBytes( - '0xa9059cbb000000000000000000000000f48a1bdc65d9ccb4b569ffd4bffff415b90783d60000000000000000000000000000000000000000000000000000000000000001' + '0xa9059cbb000000000000000000000000f48a1bdc65d9ccb4b569ffd4bffff415b90783d60000000000000000000000000000000000000000000000000000000000000001', ), to: contractAddress, // call to the contract address value: BigInt(0), @@ -47,7 +47,7 @@ describe('correctly apply new account gas fee on pre-Spurious Dragon hardforks', assert.equal( result.execResult.executionGasUsed, BigInt(53552), - 'vm correctly applies new account gas price' + 'vm correctly applies new account gas price', ) }) }) @@ -73,20 +73,20 @@ describe('do not apply new account gas fee for empty account in DB on pre-Spurio const emptyAccount = (await vm.stateManager.getAccount(emptyAddress)) as Account await (vm.stateManager as DefaultStateManager)['_trie'].put( toBytes(emptyAddress), - emptyAccount.serialize() + emptyAccount.serialize(), ) await vm.stateManager.putContractCode(contractAddress, hexToBytes(code)) // setup the contract code await vm.stateManager.putContractStorage( contractAddress, hexToBytes('0xd08f588b94e47566eea77acec87441cecca23f61aea9ed8eb086c062d3837605'), - hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000001') + hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000001'), ) // setup the call arguments const runCallArgs = { caller, // call address gasLimit: BigInt(174146 - 22872), // tx gas limit minus the tx fee (21000) and data fee (1872) to represent correct gas costs data: hexToBytes( - '0xa9059cbb000000000000000000000000f48a1bdc65d9ccb4b569ffd4bffff415b90783d60000000000000000000000000000000000000000000000000000000000000001' + '0xa9059cbb000000000000000000000000f48a1bdc65d9ccb4b569ffd4bffff415b90783d60000000000000000000000000000000000000000000000000000000000000001', ), to: contractAddress, // call to the contract address value: BigInt(0), @@ -96,7 +96,7 @@ describe('do not apply new account gas fee for empty account in DB on pre-Spurio assert.equal( result.execResult.executionGasUsed, BigInt(28552), - 'new account price not applied as empty account exists' + 'new account price not applied as empty account exists', ) }) }) diff --git a/packages/vm/test/api/tester/tester.config.spec.ts b/packages/vm/test/api/tester/tester.config.spec.ts index fe478b233e..1513b3c7c5 100644 --- a/packages/vm/test/api/tester/tester.config.spec.ts +++ b/packages/vm/test/api/tester/tester.config.spec.ts @@ -1,7 +1,7 @@ import { Hardfork } from '@ethereumjs/common' import { assert, describe, it } from 'vitest' -import { getCommon } from '../../tester/config' +import { getCommon } from '../../tester/config.js' describe('bloom', () => { it('should initialize common with the right hardfork', () => { diff --git a/packages/vm/test/api/utils.ts b/packages/vm/test/api/utils.ts index 3e88786b3c..ebcea00bb4 100644 --- a/packages/vm/test/api/utils.ts +++ b/packages/vm/test/api/utils.ts @@ -9,11 +9,11 @@ import { } from '@ethereumjs/util' import { MemoryLevel } from 'memory-level' -import { VM } from '../../src/vm' +import { VM } from '../../src/vm.js' -import { LevelDB } from './level' +import { LevelDB } from './level.js' -import type { VMOpts } from '../../src/types' +import type { VMOpts } from '../../src/types.js' import type { Block } from '@ethereumjs/block' import type { Common } from '@ethereumjs/common' import type { Address } from '@ethereumjs/util' @@ -53,7 +53,7 @@ export function getTransaction( sign = false, value = '0x00', createContract = false, - nonce = 0 + nonce = 0, ) { let to: string | undefined = '0x0000000000000000000000000000000000000000' let data = '0x7f7465737432000000000000000000000000000000000000000000000000000000600057' @@ -105,11 +105,11 @@ export function getTransaction( txParams['kzgProofs'] = txParams['blobs'].map((blob: Uint8Array, ctx: number) => common.customCrypto!.kzg!.computeBlobKzgProof( blob, - txParams['kzgCommitments'][ctx] as Uint8Array - ) + txParams['kzgCommitments'][ctx] as Uint8Array, + ), ) txParams['blobVersionedHashes'] = txParams['kzgCommitments'].map((commitment: Uint8Array) => - computeVersionedHash(commitment, 0x1) + computeVersionedHash(commitment, 0x1), ) } @@ -117,7 +117,7 @@ export function getTransaction( if (sign) { const privateKey = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) return tx.sign(privateKey) } diff --git a/packages/vm/test/tester/config.ts b/packages/vm/test/tester/config.ts index 3aa1f00701..42eff75264 100644 --- a/packages/vm/test/tester/config.ts +++ b/packages/vm/test/tester/config.ts @@ -224,7 +224,7 @@ function setupCommonWithNetworks(network: string, ttd?: number, timestamp?: numb // normal hard fork, return the common with this hard fork // find the right upper/lowercased version const hfName = normalHardforks.reduce((previousValue, currentValue) => - currentValue.toLowerCase() === networkLowercase ? currentValue : previousValue + currentValue.toLowerCase() === networkLowercase ? currentValue : previousValue, ) const mainnetCommon = new Common({ chain: Chain.Mainnet, hardfork: hfName }) const hardforks = mainnetCommon.hardforks() @@ -270,7 +270,7 @@ function setupCommonWithNetworks(network: string, ttd?: number, timestamp?: numb hardforks: testHardforks, defaultHardfork: hfName, }, - { eips: [3607], customCrypto: { kzg } } + { eips: [3607], customCrypto: { kzg } }, ) // Activate EIPs const eips = network.match(/(?<=\+)(.\d+)/g) @@ -358,7 +358,7 @@ export function getCommon(network: string, kzg?: Kzg): Common { hardfork: transitionForks.startFork, eips: [3607], customCrypto: { kzg }, - } + }, ) return common } @@ -422,7 +422,7 @@ const expectedTestsFull: { */ export function getExpectedTests( fork: string, - name: 'BlockchainTests' | 'GeneralStateTests' + name: 'BlockchainTests' | 'GeneralStateTests', ): number | undefined { if (expectedTestsFull[name] === undefined) { return diff --git a/packages/vm/test/tester/index.ts b/packages/vm/test/tester/index.ts index 060f37f3e6..864c22eaac 100755 --- a/packages/vm/test/tester/index.ts +++ b/packages/vm/test/tester/index.ts @@ -15,10 +15,10 @@ import { getRequiredForkConfigAlias, getSkipTests, getTestDirs, -} from './config' -import { runBlockchainTest } from './runners/BlockchainTestsRunner' -import { runStateTest } from './runners/GeneralStateTestsRunner' -import { getTestFromSource, getTestsFromArgs } from './testLoader' +} from './config.js' +import { runBlockchainTest } from './runners/BlockchainTestsRunner.js' +import { runStateTest } from './runners/GeneralStateTestsRunner.js' +import { getTestFromSource, getTestsFromArgs } from './testLoader.js' import type { Common } from '@ethereumjs/common' import type { EVMBLSInterface } from '@ethereumjs/evm/dist/cjs/types' @@ -169,8 +169,8 @@ async function runTests() { argv['verify-test-amount-alltests'] > 0 ? getExpectedTests(FORK_CONFIG_VM, name) : argv['expected-test-amount'] !== undefined && argv['expected-test-amount'] > 0 - ? argv['expected-test-amount'] - : undefined + ? argv['expected-test-amount'] + : undefined /** * Initialization output to console @@ -186,7 +186,7 @@ async function runTests() { .filter(([_k, v]) => typeof v === 'string' || (Array.isArray(v) && v.length !== 0)) .map(([k, v]) => ({ [k]: Array.isArray(v) && v.length > 0 ? v.length : v, - })) + })), ) } const formattedGetterArgs = formatArgs(testGetterArgs) @@ -268,7 +268,7 @@ async function runTests() { await runner(runnerArgs, test, t) } }, - testGetterArgs + testGetterArgs, ) .then(() => { resolve() diff --git a/packages/vm/test/tester/runners/BlockchainTestsRunner.ts b/packages/vm/test/tester/runners/BlockchainTestsRunner.ts index 10af87c5cb..15d2a4c1b0 100644 --- a/packages/vm/test/tester/runners/BlockchainTestsRunner.ts +++ b/packages/vm/test/tester/runners/BlockchainTestsRunner.ts @@ -110,7 +110,7 @@ export async function runBlockchainTest(options: any, testData: any, t: tape.Tes t.deepEquals( await vm.stateManager.getStateRoot(), genesisBlock.header.stateRoot, - 'correct pre stateRoot' + 'correct pre stateRoot', ) async function handleError(error: string | undefined, expectException: string | boolean) { @@ -245,7 +245,7 @@ export async function runBlockchainTest(options: any, testData: any, t: tape.Tes t.equal( bytesToHex((blockchain as any)._headHeaderHash), '0x' + testData.lastblockhash, - 'correct last header block' + 'correct last header block', ) const end = Date.now() diff --git a/packages/vm/test/tester/runners/GeneralStateTestsRunner.ts b/packages/vm/test/tester/runners/GeneralStateTestsRunner.ts index 94f70a2007..1c592cd73f 100644 --- a/packages/vm/test/tester/runners/GeneralStateTestsRunner.ts +++ b/packages/vm/test/tester/runners/GeneralStateTestsRunner.ts @@ -15,7 +15,7 @@ function parseTestCases( testData: any, data: string | undefined, gasLimit: string | undefined, - value: string | undefined + value: string | undefined, ) { let testCases = [] @@ -183,7 +183,7 @@ export async function runStateTest(options: any, testData: any, t: tape.Test) { testData, options.data, options.gasLimit, - options.value + options.value, ) if (testCases.length === 0) { t.comment(`No ${options.forkConfigTestSuite} post state defined, skip test`) diff --git a/packages/vm/test/tester/testLoader.ts b/packages/vm/test/tester/testLoader.ts index 35d96be47f..a7ad7fb22d 100644 --- a/packages/vm/test/tester/testLoader.ts +++ b/packages/vm/test/tester/testLoader.ts @@ -2,7 +2,7 @@ import * as fs from 'fs' import * as dir from 'node-dir' import * as path from 'path' -import { DEFAULT_TESTS_PATH } from './config' +import { DEFAULT_TESTS_PATH } from './config.js' const falsePredicate = () => false @@ -20,7 +20,7 @@ export async function getTests( fileFilter: RegExp | string[] = /.json$/, skipPredicate: (...args: any[]) => boolean = falsePredicate, directory: string, - excludeDir: RegExp | string[] = [] + excludeDir: RegExp | string[] = [], ): Promise { const options = { match: fileFilter, @@ -38,7 +38,7 @@ export async function getTests( err: Error | undefined, content: string | Uint8Array, fileName: string, - next: Function + next: Function, ) => { if (err) { reject(err) diff --git a/packages/vm/test/util.ts b/packages/vm/test/util.ts index 626019ac52..96ed73a1ba 100644 --- a/packages/vm/test/util.ts +++ b/packages/vm/test/util.ts @@ -125,7 +125,7 @@ export function format(a: any, toZero: boolean = false, isHex: boolean = false): */ export function makeTx( txData: any, - opts?: TxOptions + opts?: TxOptions, ): | EOACodeEIP7702Transaction | BlobEIP4844Transaction @@ -215,7 +215,7 @@ export function verifyAccountPostConditions( address: string, account: Account, acctData: any, - t: tape.Test + t: tape.Test, ) { return new Promise((resolve) => { t.comment('Account: ' + address) @@ -223,12 +223,12 @@ export function verifyAccountPostConditions( t.comment( `Expected balance of ${bytesToBigInt(format(acctData.balance, true))}, but got ${ account.balance - }` + }`, ) } if (!equalsBytes(format(account.nonce, true), format(acctData.nonce, true))) { t.comment( - `Expected nonce of ${bytesToBigInt(format(acctData.nonce, true))}, but got ${account.nonce}` + `Expected nonce of ${bytesToBigInt(format(acctData.nonce, true))}, but got ${account.nonce}`, ) } @@ -258,7 +258,7 @@ export function verifyAccountPostConditions( t.comment( `Expected storage key ${bytesToHex(data.key)} at address ${address} to have value ${ hashedStorage[key] ?? '0x' - }, but got ${val}}` + }, but got ${val}}`, ) } delete hashedStorage[key] @@ -332,7 +332,7 @@ export function makeBlockHeader(data: any, opts?: BlockOptions) { gasUsed: parentGasUsed, baseFeePerGas: parentBaseFee, }, - { common: opts.common } + { common: opts.common }, ) headerData['baseFeePerGas'] = parentBlockHeader.calcNextBaseFee() } @@ -429,7 +429,7 @@ export function getDAOCommon(activationBlock: number) { { baseChain: 'mainnet', hardfork: Hardfork.Dao, - } + }, ) return DAOCommon } diff --git a/packages/vm/tsconfig.lint.json b/packages/vm/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/vm/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/wallet/.eslintrc.cjs b/packages/wallet/.eslintrc.cjs index 015c06f88c..38a6d05f92 100644 --- a/packages/wallet/.eslintrc.cjs +++ b/packages/wallet/.eslintrc.cjs @@ -1,14 +1,15 @@ module.exports = { extends: '../../config/eslint.cjs', parserOptions: { - project: ['./tsconfig.json'], + project: ['./tsconfig.lint.json'], }, overrides: [ { - files: ['test/index.spec.ts'], + files: ['test/index.spec.ts', "examples/**/*"], rules: { 'github/array-foreach': 'warn', 'no-prototype-builtins': 'warn', + 'no-console': 'off', }, }, ], diff --git a/packages/wallet/examples/hdKey.js b/packages/wallet/examples/hdKey.cjs similarity index 100% rename from packages/wallet/examples/hdKey.js rename to packages/wallet/examples/hdKey.cjs diff --git a/packages/wallet/examples/hdKey.ts b/packages/wallet/examples/hdKey.ts index a4d99922ec..f33ae1dd20 100644 --- a/packages/wallet/examples/hdKey.ts +++ b/packages/wallet/examples/hdKey.ts @@ -1,6 +1,6 @@ import { hdkey } from '@ethereumjs/wallet' const wallet = hdkey.EthereumHDKey.fromMnemonic( - 'clown galaxy face oxygen birth round modify fame correct stumble kind excess' + 'clown galaxy face oxygen birth round modify fame correct stumble kind excess', ) console.log(wallet.getWallet().getAddressString()) // Should print an Ethereum address diff --git a/packages/wallet/examples/thirdparty.js b/packages/wallet/examples/thirdparty.cjs similarity index 100% rename from packages/wallet/examples/thirdparty.js rename to packages/wallet/examples/thirdparty.cjs diff --git a/packages/wallet/examples/wallet.js b/packages/wallet/examples/wallet.cjs similarity index 100% rename from packages/wallet/examples/wallet.js rename to packages/wallet/examples/wallet.cjs diff --git a/packages/wallet/src/thirdparty.ts b/packages/wallet/src/thirdparty.ts index a47dfbb738..756e3aae15 100644 --- a/packages/wallet/src/thirdparty.ts +++ b/packages/wallet/src/thirdparty.ts @@ -117,7 +117,7 @@ export interface EtherWalletOptions { */ export async function fromEtherWallet( input: string | EtherWalletOptions, - password: string + password: string, ): Promise { const json: EtherWalletOptions = typeof input === 'object' ? input : JSON.parse(input) diff --git a/packages/wallet/src/wallet.ts b/packages/wallet/src/wallet.ts index c778a48a2d..b49cb56c98 100644 --- a/packages/wallet/src/wallet.ts +++ b/packages/wallet/src/wallet.ts @@ -99,7 +99,7 @@ function validateBytes(paramName: string, bytes: Uint8Array, length?: number) { typeof length === 'number' ? `${length * 2}` : 'empty or a non-zero even number of' const howManyBytes = typeof length === 'number' ? ` (${length} bytes)` : '' throw new Error( - `Invalid ${paramName}, must be a string (${howManyHex} hex characters) or Uint8Array${howManyBytes}` + `Invalid ${paramName}, must be a string (${howManyHex} hex characters) or Uint8Array${howManyBytes}`, ) } if (typeof length === 'number' && bytes.length !== length) { @@ -265,7 +265,7 @@ interface EthSaleKeystore { export class Wallet { constructor( private readonly privateKey?: Uint8Array | undefined, - private publicKey: Uint8Array | undefined = undefined + private publicKey: Uint8Array | undefined = undefined, ) { if (privateKey && publicKey) { throw new Error('Cannot supply both a private and a public key to the constructor') @@ -393,7 +393,7 @@ export class Wallet { ciphertext, keccak256(derivedKey.subarray(0, 16)).subarray(0, 16), unprefixedHexToBytes(json.Crypto.IV), - 'aes-128-cbc' + 'aes-128-cbc', ) return new Wallet(seed) } @@ -407,7 +407,7 @@ export class Wallet { public static async fromV3( input: string | V3Keystore, password: string, - nonStrict = false + nonStrict = false, ): Promise { const json: V3Keystore = typeof input === 'object' ? input : JSON.parse(nonStrict ? input.toLowerCase() : input) @@ -433,7 +433,7 @@ export class Wallet { unprefixedHexToBytes(kdfparams.salt), kdfparams.c, kdfparams.dklen, - 'sha256' + 'sha256', ) } else { throw new Error('Unsupported key derivation scheme') @@ -449,7 +449,7 @@ export class Wallet { ciphertext, derivedKey.subarray(0, 16), unprefixedHexToBytes(json.crypto.cipherparams.iv), - json.crypto.cipher + json.crypto.cipher, ) return new Wallet(seed) } @@ -464,7 +464,7 @@ export class Wallet { */ public static async fromEthSale( input: string | EthSaleKeystore, - password: string + password: string, ): Promise { const json: EthSaleKeystore = typeof input === 'object' ? input : JSON.parse(input) @@ -482,7 +482,7 @@ export class Wallet { derivedKey, encseed.subarray(0, 16), 'aes-128-cbc', - true + true, ) const wallet = new Wallet(keccak256(seed)) @@ -587,7 +587,7 @@ export class Wallet { kdfParams.salt, kdfParams.c, kdfParams.dklen, - 'sha256' + 'sha256', ) break case KDFFunctions.Scrypt: @@ -604,7 +604,7 @@ export class Wallet { derivedKey.subarray(0, 16), v3Params.iv, v3Params.cipher, - false + false, ) const mac = keccak256(concatBytes(derivedKey.subarray(16, 32), ciphertext)) diff --git a/packages/wallet/test/hdkey.spec.ts b/packages/wallet/test/hdkey.spec.ts index 5b8a99c40f..7b70f6aa25 100644 --- a/packages/wallet/test/hdkey.spec.ts +++ b/packages/wallet/test/hdkey.spec.ts @@ -5,7 +5,7 @@ import { EthereumHDKey } from '../src/hdkey.js' // from BIP39 mnemonic: awake book subject inch gentle blur grant damage process float month clown const fixtureseed = hexToBytes( - '0x747f302d9c916698912d5f70be53a6cf53bc495803a5523d3a7c3afa2afba94ec3803f838b3e1929ab5481f9da35441372283690fdcf27372c38f40ba134fe03' + '0x747f302d9c916698912d5f70be53a6cf53bc495803a5523d3a7c3afa2afba94ec3803f838b3e1929ab5481f9da35441372283690fdcf27372c38f40ba134fe03', ) const fixturehd = EthereumHDKey.fromMasterSeed(fixtureseed) const fixtureMnemonic = 'awake book subject inch gentle blur grant damage process float month clown' @@ -30,44 +30,44 @@ describe('HD Key tests', () => { it('.privateExtendedKey()', () => { assert.deepEqual( fixturehd.privateExtendedKey(), - 'xprv9s21ZrQH143K4KqQx9Zrf1eN8EaPQVFxM2Ast8mdHn7GKiDWzNEyNdduJhWXToy8MpkGcKjxeFWd8oBSvsz4PCYamxR7TX49pSpp3bmHVAY' + 'xprv9s21ZrQH143K4KqQx9Zrf1eN8EaPQVFxM2Ast8mdHn7GKiDWzNEyNdduJhWXToy8MpkGcKjxeFWd8oBSvsz4PCYamxR7TX49pSpp3bmHVAY', ) }, 30000) it('.publicExtendedKey()', () => { assert.deepEqual( fixturehd.publicExtendedKey(), - 'xpub661MyMwAqRbcGout4B6s29b6gGQsowyoiF6UgXBEr7eFCWYfXuZDvRxP9zEh1Kwq3TLqDQMbkbaRpSnoC28oWvjLeshoQz1StZ9YHM1EpcJ' + 'xpub661MyMwAqRbcGout4B6s29b6gGQsowyoiF6UgXBEr7eFCWYfXuZDvRxP9zEh1Kwq3TLqDQMbkbaRpSnoC28oWvjLeshoQz1StZ9YHM1EpcJ', ) }, 30000) it('.fromExtendedKey()', () => { const onlyPublicExtendedKey = EthereumHDKey.fromExtendedKey( - 'xpub661MyMwAqRbcGout4B6s29b6gGQsowyoiF6UgXBEr7eFCWYfXuZDvRxP9zEh1Kwq3TLqDQMbkbaRpSnoC28oWvjLeshoQz1StZ9YHM1EpcJ' + 'xpub661MyMwAqRbcGout4B6s29b6gGQsowyoiF6UgXBEr7eFCWYfXuZDvRxP9zEh1Kwq3TLqDQMbkbaRpSnoC28oWvjLeshoQz1StZ9YHM1EpcJ', ) assert.deepEqual( onlyPublicExtendedKey.publicExtendedKey(), - 'xpub661MyMwAqRbcGout4B6s29b6gGQsowyoiF6UgXBEr7eFCWYfXuZDvRxP9zEh1Kwq3TLqDQMbkbaRpSnoC28oWvjLeshoQz1StZ9YHM1EpcJ' + 'xpub661MyMwAqRbcGout4B6s29b6gGQsowyoiF6UgXBEr7eFCWYfXuZDvRxP9zEh1Kwq3TLqDQMbkbaRpSnoC28oWvjLeshoQz1StZ9YHM1EpcJ', ) assert.throws( function () { onlyPublicExtendedKey.privateExtendedKey() }, /^No private key$/, - 'throws when trying to access private extended key with no private key provided' + 'throws when trying to access private extended key with no private key provided', ) const fullExtendedKey = EthereumHDKey.fromExtendedKey( - 'xprv9s21ZrQH143K4KqQx9Zrf1eN8EaPQVFxM2Ast8mdHn7GKiDWzNEyNdduJhWXToy8MpkGcKjxeFWd8oBSvsz4PCYamxR7TX49pSpp3bmHVAY' + 'xprv9s21ZrQH143K4KqQx9Zrf1eN8EaPQVFxM2Ast8mdHn7GKiDWzNEyNdduJhWXToy8MpkGcKjxeFWd8oBSvsz4PCYamxR7TX49pSpp3bmHVAY', ) assert.deepEqual( fullExtendedKey.publicExtendedKey(), 'xpub661MyMwAqRbcGout4B6s29b6gGQsowyoiF6UgXBEr7eFCWYfXuZDvRxP9zEh1Kwq3TLqDQMbkbaRpSnoC28oWvjLeshoQz1StZ9YHM1EpcJ', - 'successfully generated key from extended private key' + 'successfully generated key from extended private key', ) assert.deepEqual( fullExtendedKey.privateExtendedKey(), 'xprv9s21ZrQH143K4KqQx9Zrf1eN8EaPQVFxM2Ast8mdHn7GKiDWzNEyNdduJhWXToy8MpkGcKjxeFWd8oBSvsz4PCYamxR7TX49pSpp3bmHVAY', - 'successfully generated key from extended private key' + 'successfully generated key from extended private key', ) }, 30000) @@ -75,7 +75,7 @@ describe('HD Key tests', () => { const hdnode = fixturehd.deriveChild(1) assert.deepEqual( hdnode.privateExtendedKey(), - 'xprv9vYSvrg3eR5FaKbQE4Ao2vHdyvfFL27aWMyH6X818mKWMsqqQZAN6HmRqYDGDPLArzaqbLExRsxFwtx2B2X2QKkC9uoKsiBNi22tLPKZHNS' + 'xprv9vYSvrg3eR5FaKbQE4Ao2vHdyvfFL27aWMyH6X818mKWMsqqQZAN6HmRqYDGDPLArzaqbLExRsxFwtx2B2X2QKkC9uoKsiBNi22tLPKZHNS', ) }, 30000) @@ -84,27 +84,27 @@ describe('HD Key tests', () => { assert.deepEqual( hdnode1.privateExtendedKey(), 'xprv9s21ZrQH143K4KqQx9Zrf1eN8EaPQVFxM2Ast8mdHn7GKiDWzNEyNdduJhWXToy8MpkGcKjxeFWd8oBSvsz4PCYamxR7TX49pSpp3bmHVAY', - 'should work with m' + 'should work with m', ) const hdnode2 = fixturehd.derivePath("m/44'/0'/0/1") assert.deepEqual( hdnode2.privateExtendedKey(), 'xprvA1ErCzsuXhpB8iDTsbmgpkA2P8ggu97hMZbAXTZCdGYeaUrDhyR8fEw47BNEgLExsWCVzFYuGyeDZJLiFJ9kwBzGojQ6NB718tjVJrVBSrG', - "should work with m/44'/0'/0/1" + "should work with m/44'/0'/0/1", ) }, 30000) it('.getWallet()', () => { assert.deepEqual( fixturehd.getWallet().getPrivateKeyString(), - '0x26cc9417b89cd77c4acdbe2e3cd286070a015d8e380f9cd1244ae103b7d89d81' + '0x26cc9417b89cd77c4acdbe2e3cd286070a015d8e380f9cd1244ae103b7d89d81', ) assert.deepEqual( fixturehd.getWallet().getPublicKeyString(), - '0x0639797f6cc72aea0f3d309730844a9e67d9f1866e55845c5f7e0ab48402973defa5cb69df462bcc6d73c31e1c663c225650e80ef14a507b203f2a12aea55bc1' + '0x0639797f6cc72aea0f3d309730844a9e67d9f1866e55845c5f7e0ab48402973defa5cb69df462bcc6d73c31e1c663c225650e80ef14a507b203f2a12aea55bc1', ) const hdnode = EthereumHDKey.fromExtendedKey( - 'xpub661MyMwAqRbcGout4B6s29b6gGQsowyoiF6UgXBEr7eFCWYfXuZDvRxP9zEh1Kwq3TLqDQMbkbaRpSnoC28oWvjLeshoQz1StZ9YHM1EpcJ' + 'xpub661MyMwAqRbcGout4B6s29b6gGQsowyoiF6UgXBEr7eFCWYfXuZDvRxP9zEh1Kwq3TLqDQMbkbaRpSnoC28oWvjLeshoQz1StZ9YHM1EpcJ', ) assert.throws(function () { hdnode.getWallet().getPrivateKeyString() @@ -112,7 +112,7 @@ describe('HD Key tests', () => { assert.deepEqual( hdnode.getWallet().getPublicKeyString(), '0x0639797f6cc72aea0f3d309730844a9e67d9f1866e55845c5f7e0ab48402973defa5cb69df462bcc6d73c31e1c663c225650e80ef14a507b203f2a12aea55bc1', - 'should work with public nodes' + 'should work with public nodes', ) }, 30000) }) diff --git a/packages/wallet/test/index.spec.ts b/packages/wallet/test/index.spec.ts index 96bb85a648..49c386e32c 100644 --- a/packages/wallet/test/index.spec.ts +++ b/packages/wallet/test/index.spec.ts @@ -49,7 +49,7 @@ describe('Wallet tests', () => { it('.getAddress()', () => { assert.deepEqual( bytesToUnprefixedHex(fixtureWallet.getAddress()), - 'b14ab53e38da1c172f877dbc6d65e4a1b0474c3c' + 'b14ab53e38da1c172f877dbc6d65e4a1b0474c3c', ) }, 30000) @@ -60,7 +60,7 @@ describe('Wallet tests', () => { it('.getChecksumAddressString()', () => { assert.deepEqual( fixtureWallet.getChecksumAddressString(), - '0xB14Ab53E38DA1C172f877DBC6d65e4a1B0474C3c' + '0xB14Ab53E38DA1C172f877DBC6d65e4a1B0474C3c', ) }, 30000) @@ -71,7 +71,7 @@ describe('Wallet tests', () => { assert.deepEqual( fixtureWallet.verifyPublicKey(new Uint8Array(64)), false, - 'should return false if publicKey, privateKey pair is invalid' + 'should return false if publicKey, privateKey pair is invalid', ) }, 30000) @@ -80,32 +80,34 @@ describe('Wallet tests', () => { assert.deepEqual( bytesToUnprefixedHex(Wallet.fromPublicKey(pubKey).getPublicKey()), fixturePublicKey, - '.fromPublicKey() should work' + '.fromPublicKey() should work', ) assert.throws( function () { Wallet.fromPublicKey( - unprefixedHexToBytes('030639797f6cc72aea0f3d309730844a9e67d9f1866e55845c5f7e0ab48402973d') + unprefixedHexToBytes( + '030639797f6cc72aea0f3d309730844a9e67d9f1866e55845c5f7e0ab48402973d', + ), ) }, 'Invalid public key', - '.fromPublicKey() should not accept compressed keys in strict mode' + '.fromPublicKey() should not accept compressed keys in strict mode', ) const tmp = unprefixedHexToBytes( - '030639797f6cc72aea0f3d309730844a9e67d9f1866e55845c5f7e0ab48402973d' + '030639797f6cc72aea0f3d309730844a9e67d9f1866e55845c5f7e0ab48402973d', ) assert.deepEqual( bytesToUnprefixedHex(Wallet.fromPublicKey(tmp, true).getPublicKey()), '0639797f6cc72aea0f3d309730844a9e67d9f1866e55845c5f7e0ab48402973defa5cb69df462bcc6d73c31e1c663c225650e80ef14a507b203f2a12aea55bc1', - '.fromPublicKey() should accept compressed keys in non-strict mode' + '.fromPublicKey() should accept compressed keys in non-strict mode', ) assert.deepEqual( bytesToUnprefixedHex(Wallet.fromPublicKey(pubKey).getAddress()), 'b14ab53e38da1c172f877dbc6d65e4a1b0474c3c', - '.getAddress() should work' + '.getAddress() should work', ) assert.throws( @@ -113,7 +115,7 @@ describe('Wallet tests', () => { Wallet.fromPublicKey(pubKey).getPrivateKey() }, 'This is a public key only wallet', - '.getPrivateKey() should fail' + '.getPrivateKey() should fail', ) }, 30000) @@ -123,7 +125,7 @@ describe('Wallet tests', () => { } catch (err: any) { assert.ok( err.message.includes('This is a public key only wallet'), - 'fails to generate V3 when no private key present' + 'fails to generate V3 when no private key present', ) } }, 30000) @@ -133,7 +135,7 @@ describe('Wallet tests', () => { 'xprv9s21ZrQH143K4KqQx9Zrf1eN8EaPQVFxM2Ast8mdHn7GKiDWzNEyNdduJhWXToy8MpkGcKjxeFWd8oBSvsz4PCYamxR7TX49pSpp3bmHVAY' assert.deepEqual( Wallet.fromExtendedPrivateKey(xprv).getAddressString(), - '0xb800bf5435f67c7ee7d83c3a863269969a57c57c' + '0xb800bf5435f67c7ee7d83c3a863269969a57c57c', ) }, 30000) @@ -142,7 +144,7 @@ describe('Wallet tests', () => { 'xpub661MyMwAqRbcGout4B6s29b6gGQsowyoiF6UgXBEr7eFCWYfXuZDvRxP9zEh1Kwq3TLqDQMbkbaRpSnoC28oWvjLeshoQz1StZ9YHM1EpcJ' assert.deepEqual( Wallet.fromExtendedPublicKey(xpub).getAddressString(), - '0xb800bf5435f67c7ee7d83c3a863269969a57c57c' + '0xb800bf5435f67c7ee7d83c3a863269969a57c57c', ) }, 30000) @@ -155,7 +157,7 @@ describe('Wallet tests', () => { assert.equal( BigInt('0x' + addr) <= max, true, - 'should generate an account compatible with ICAP Direct' + 'should generate an account compatible with ICAP Direct', ) }, 30000) @@ -166,7 +168,7 @@ describe('Wallet tests', () => { assert.deepEqual( wallet.getAddress()[1] >>> 4, 0, - 'should generate an account with 000 prefix (object)' + 'should generate an account with 000 prefix (object)', ) wallet = Wallet.generateVanityAddress('^000') @@ -175,14 +177,14 @@ describe('Wallet tests', () => { assert.deepEqual( wallet.getAddress()[1] >>> 4, 0, - 'should generate an account with 000 prefix (string)' + 'should generate an account with 000 prefix (string)', ) }, 30000) it('.getV3Filename()', () => { assert.deepEqual( fixtureWallet.getV3Filename(1457917509265), - 'UTC--2016-03-14T01-05-09.265Z--b14ab53e38da1c172f877dbc6d65e4a1b0474c3c' + 'UTC--2016-03-14T01-05-09.265Z--b14ab53e38da1c172f877dbc6d65e4a1b0474c3c', ) }, 30000) @@ -218,7 +220,7 @@ describe('Wallet tests', () => { acc.add(key) }) return acc - }, new Set()) + }, new Set()), ) const radix = objs.length const numPermus = radix ** keys.length @@ -369,8 +371,8 @@ describe('Wallet tests', () => { } catch (err: any) { assert.ok( err.message.includes( - 'Invalid salt, must be a string (empty or a non-zero even number of hex characters) or Uint8Array' - ) + 'Invalid salt, must be a string (empty or a non-zero even number of hex characters) or Uint8Array', + ), ) } }, 30000) @@ -383,7 +385,7 @@ describe('Wallet tests', () => { assert.equal(salt, w.crypto.kdfparams.salt) assert.equal( fixtureWallet.getPrivateKeyString(), - (await Wallet.fromV3(w, pw)).getPrivateKeyString() + (await Wallet.fromV3(w, pw)).getPrivateKeyString(), ) salt = '0x' @@ -392,7 +394,7 @@ describe('Wallet tests', () => { assert.equal('', w.crypto.kdfparams.salt) assert.equal( fixtureWallet.getPrivateKeyString(), - (await Wallet.fromV3(w, pw)).getPrivateKeyString() + (await Wallet.fromV3(w, pw)).getPrivateKeyString(), ) salt = unprefixedHexToBytes('') @@ -401,7 +403,7 @@ describe('Wallet tests', () => { assert.equal('', w.crypto.kdfparams.salt) assert.equal( fixtureWallet.getPrivateKeyString(), - (await Wallet.fromV3(w, pw)).getPrivateKeyString() + (await Wallet.fromV3(w, pw)).getPrivateKeyString(), ) salt = '' @@ -424,18 +426,18 @@ describe('Wallet tests', () => { salt: '0x' + salt, iv: '0x' + iv, uuid: '0x' + uuid, - } + }, ) assert.deepEqual(salt, JSON.parse(wStr).crypto.kdfparams.salt) assert.deepEqual(JSON.parse(wStr), JSON.parse(wEthersStr.toLowerCase())) assert.equal( fixtureWallet.getPrivateKeyString(), - (await Wallet.fromV3(JSON.parse(wStr), pw)).getPrivateKeyString() + (await Wallet.fromV3(JSON.parse(wStr), pw)).getPrivateKeyString(), ) assert.equal( fixtureWallet.getPrivateKeyString(), - (await ethersWallet.fromEncryptedJson(wEthersStr, pw)).privateKey + (await ethersWallet.fromEncryptedJson(wEthersStr, pw)).privateKey, ) salt = '0x' @@ -458,18 +460,18 @@ describe('Wallet tests', () => { salt, iv, uuid, - } + }, ) assert.equal('', JSON.parse(wStr).crypto.kdfparams.salt) assert.deepEqual(JSON.parse(wStr), JSON.parse(wEthersStr.toLowerCase())) assert.equal( fixtureWallet.getPrivateKeyString(), - (await Wallet.fromV3(JSON.parse(wStr), pw)).getPrivateKeyString() + (await Wallet.fromV3(JSON.parse(wStr), pw)).getPrivateKeyString(), ) assert.equal( fixtureWallet.getPrivateKeyString(), - (await ethersWallet.fromEncryptedJson(wEthersStr, pw)).privateKey + (await ethersWallet.fromEncryptedJson(wEthersStr, pw)).privateKey, ) salt = unprefixedHexToBytes('') @@ -490,18 +492,18 @@ describe('Wallet tests', () => { salt, iv, uuid, - } + }, ) assert.equal('', JSON.parse(wStr).crypto.kdfparams.salt) assert.deepEqual(JSON.parse(wStr), JSON.parse(wEthersStr.toLowerCase())) assert.equal( fixtureWallet.getPrivateKeyString(), - (await Wallet.fromV3(JSON.parse(wStr), pw)).getPrivateKeyString() + (await Wallet.fromV3(JSON.parse(wStr), pw)).getPrivateKeyString(), ) assert.equal( fixtureWallet.getPrivateKeyString(), - (await ethersWallet.fromEncryptedJson(wEthersStr, pw)).privateKey + (await ethersWallet.fromEncryptedJson(wEthersStr, pw)).privateKey, ) }, 120000) @@ -563,8 +565,8 @@ describe('Wallet tests', () => { } catch (err: any) { assert.ok( err.message.includes( - 'Invalid iv, must be a string (32 hex characters) or Uint8Array (16 bytes)' - ) + 'Invalid iv, must be a string (32 hex characters) or Uint8Array (16 bytes)', + ), ) } }, 30000) @@ -625,8 +627,8 @@ describe('Wallet tests', () => { } catch (err: any) { assert.ok( err.message.includes( - 'Invalid uuid, must be a string (32 hex characters) or Uint8Array (16 bytes)' - ) + 'Invalid uuid, must be a string (32 hex characters) or Uint8Array (16 bytes)', + ), ) } }, 30000) @@ -654,11 +656,11 @@ describe('Wallet tests', () => { assert.equal(w.id, w2.id) assert.equal( fixtureWallet.getPrivateKeyString(), - (await Wallet.fromV3(w, pw)).getPrivateKeyString() + (await Wallet.fromV3(w, pw)).getPrivateKeyString(), ) assert.equal( fixtureWallet.getPrivateKeyString(), - (await Wallet.fromV3(w2, pw)).getPrivateKeyString() + (await Wallet.fromV3(w2, pw)).getPrivateKeyString(), ) w = await fixtureWallet.toV3(pw, { @@ -679,11 +681,11 @@ describe('Wallet tests', () => { assert.equal(w.id, w2.id) assert.equal( fixtureWallet.getPrivateKeyString(), - (await Wallet.fromV3(w, pw)).getPrivateKeyString() + (await Wallet.fromV3(w, pw)).getPrivateKeyString(), ) assert.equal( fixtureWallet.getPrivateKeyString(), - (await Wallet.fromV3(w2, pw)).getPrivateKeyString() + (await Wallet.fromV3(w2, pw)).getPrivateKeyString(), ) }, 60000) @@ -703,11 +705,11 @@ describe('Wallet tests', () => { assert.deepEqual(wallet.getAddressString(), '0x008aeeda4d805471df9b2a5b0f38a0c3bcba786b') assert.deepEqual( wallet.getAddressString(), - (await ethersWallet.fromEncryptedJson(wEthersCompat, pw)).address.toLowerCase() + (await ethersWallet.fromEncryptedJson(wEthersCompat, pw)).address.toLowerCase(), ) assert.deepEqual( walletRandom.getAddressString(), - (await ethersWallet.fromEncryptedJson(wRandom, pw)).address.toLowerCase() + (await ethersWallet.fromEncryptedJson(wRandom, pw)).address.toLowerCase(), ) }, 30000) @@ -722,11 +724,11 @@ describe('Wallet tests', () => { assert.deepEqual(wallet.getAddressString(), '0x2f91eb73a6cd5620d7abb50889f24eea7a6a4feb') assert.deepEqual( wallet.getAddressString(), - (await ethersWallet.fromEncryptedJson(sample, pw)).address.toLowerCase() + (await ethersWallet.fromEncryptedJson(sample, pw)).address.toLowerCase(), ) assert.deepEqual( walletRandom.getAddressString(), - (await ethersWallet.fromEncryptedJson(sampleRandom, pw)).address.toLowerCase() + (await ethersWallet.fromEncryptedJson(sampleRandom, pw)).address.toLowerCase(), ) }) it.skip("should work with 'unencrypted' wallets", async () => { @@ -830,7 +832,7 @@ describe('Wallet tests', () => { assert.equal( wallet.getAddressString(), '0x182b6ca390224c455f11b6337d74119305014ed4', - 'should work with seed text' + 'should work with seed text', ) }, 30000) @@ -845,7 +847,7 @@ describe('Wallet tests', () => { new Wallet(fixturePrivateKeyBuffer, fixturePublicKeyBuffer) }, 'Cannot supply both a private and a public key to the constructor', - 'should fail when both priv and pub key provided' + 'should fail when both priv and pub key provided', ) }, 30000) }) diff --git a/packages/wallet/tsconfig.lint.json b/packages/wallet/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/wallet/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +}