diff --git a/.github/workflows/cspell.yml b/.github/workflows/cspell.yml new file mode 100644 index 0000000000..9fb463b5f5 --- /dev/null +++ b/.github/workflows/cspell.yml @@ -0,0 +1,25 @@ +name: Spell check (cspell) +on: + push: + branches: [master, develop] + tags: ['*'] + pull_request: + types: [opened, reopened, synchronize] + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + run-spellcheck-cspell: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: 18 + cache: 'npm' + + - run: npm run cspell diff --git a/.gitignore b/.gitignore index 4616e4fc08..f6a429995e 100644 --- a/.gitignore +++ b/.gitignore @@ -115,3 +115,6 @@ stats.html ## Vitest __snapshots__ + +# CSpell +.cspellcache \ No newline at end of file diff --git a/config/cspell-md.json b/config/cspell-md.json new file mode 100644 index 0000000000..a00979bf8d --- /dev/null +++ b/config/cspell-md.json @@ -0,0 +1,318 @@ +{ + "language": "en-US", + "ignoreRegExpList": ["/0x[0-9A-Fa-f]+/"], + "words": [ + "Dencun", + "Hardfork", + "acolytec", + "hardfork", + "Holesky", + "MCOPY", + "SELFDESTRUCT", + "BLOBBASEFEE", + "keccak", + "Verkle", + "devnet", + "devp2p", + "hardforks", + "renamings", + "chainsafe", + "secp256k1", + "eips", + "extradata", + "devnets", + "tada", + "statemanager", + "ethash", + "NOTURN", + "subclassing", + "Randao", + "PREVRANDAO", + "Ghorbanian", + "merkle", + "backports", + "behaviour", + "Ethash", + "Chainstart", + "Promisification", + "promisified", + "chainstart", + "callbackify", + "vpulim", + "chaindata", + "leveldb", + "Gitter", + "ethjs", + "ruleset", + "polkadot", + "stabilizations", + "holesky", + "newpayload", + "bootnodes", + "bootnode", + "vmexecution", + "sszify", + "libp2p", + "forkchoice", + "blockhash", + "Dockerfiles", + "blockfetcher", + "txpool", + "subchain", + "enode", + "unpadded", + "Ints", + "reverseblockfetcher", + "forkhash", + "Logvinov", + "Taunas", + "helprpc", + "loglevel", + "Logfile", + "prefund", + "Yolov", + "Calaveras", + "Vinay", + "Pulim", + "Rlpx", + "rlpx", + "hiveview", + "Prysm", + "prysm", + "prysmctl", + "datadir", + "jwtsecret", + "rustup", + "DATADIR", + "maxpeers", + "syncmode", + "nodiscover", + "sealhash", + "Nethermind", + "Merkle", + "ethpandaops", + "Teku", + "prefunded", + "etherbase", + "findneighbour", + "testnetworks", + "testnests", + "DATAFEE", + "MULTIPEER", + "syncpeer", + "Beaconsync", + "datadirs", + "NETWORKID", + "ELCLIENT", + "beaconsync", + "testvectors", + "Snapsync", + "snapsync", + "beaconchain", + "VZWK", + "Rwlk", + "USVGK", + "Kmhd", + "EAAAEAIAAAAAAAAAI", + "Ahtu", + "NBHJMH", + "Nwgi", + "multiaddrs", + "teku", + "nethermind", + "besu", + "Forkid", + "initcode", + "erigon", + "Lvmc", + "DATAHASH", + "BLOBHASH", + "timebased", + "Renamings", + "Authcall", + "Tyneway", + "SLOAD", + "SSTORE", + "calaveras", + "yolov", + "BASEFEE", + "EIPs", + "Forkhash", + "Forkhashes", + "AUTHCALL", + "Kademlia", + "forkid", + "scure", + "paulmillr", + "Neighbours", + "neighbour", + "ecies", + "RLPX", + "BOOTNODES", + "Kbucket", + "datagram", + "UDPPORT", + "TCPPORT", + "ECIES", + "Neighbour", + "findneighbours", + "pydevp", + "mkcache", + "ethashjs", + "validblock", + "rustbn", + "Preimage", + "verkle", + "Preimages", + "modexp", + "roninjin", + "kchojn", + "ripemd", + "Jochem", + "JUMPDEST", + "MLOAD", + "MSTORE", + "KECCAK", + "TLOAD", + "TSTORE", + "Initcode", + "bnadd", + "bnmul", + "fulltext", + "eventemitter", + "Codesize", + "BLOCKHASH", + "selfdestruct", + "jochem", + "brouwer", + "alcuadrado", + "Checkpointing", + "checkpointing", + "MODEXP", + "RIPEMD", + "Rebalance", + "Sina", + "Promisified", + "ecmul", + "Stateroot", + "hotfixing", + "EXTCODEHASH", + "mattdean", + "digicatapult", + "rmeissner", + "jwasinger", + "Agusx", + "Holger", + "danjm", + "whymarrh", + "seesemichaelj", + "axic", + "totalgas", + "RETURNDATA", + "STATICCALL", + "sstore", + "CALLCODE", + "Mgas", + "wemeetagain", + "Schtroumpf", + "holgerd", + "rbtree", + "sdsl", + "vitalik", + "Vitalik", + "Kaustinen", + "faustbrian", + "bmark", + "samlior", + "checkpointed", + "retwrite", + "maindb", + "memdown", + "Rocheleau", + "Vitalik's", + "LMDB", + "lmdb", + "flamegraph", + "thislog", + "thatlog", + "otherlog", + "sublog", + "myscript", + "behaviours", + "unhashed", + "xghi", + "gregthegreek", + "danksharding", + "ecsign", + "Unpadded", + "talentlessguy", + "Nomic", + "unpad", + "Keccak", + "prebuilds", + "keccakjs", + "retesteth", + "testdata", + "Statetest", + "jsontrace", + "nomemory", + "statetest", + "pyethereum", + "evmlab", + "holiman", + "flamegraphs", + "testpath", + "TESTPATH", + "RETESTETH", + "thirdparty", + "hdkey", + "jackalope", + "Libray", + "explcit", + "ricmoo", + "scryptsy", + "randombytes", + "ICAP", + "cryptocoinjs", + "xpub", + "xpriv", + "Thirdparty", + "icap", + "ethereumhdkey", + "dklen", + "hmac", + "Beregszaszi", + "mfornet", + "blockfill", + "Heydiho", + "bazel", + "tlsv", + "lcli", + "extip", + "unauditability", + "ized", + "`scanf`", + "tomonari", + "dryajov", + "dgram", + "krzkaczor", + "libotony", + "xvfb", + "alextsg", + "Merkling", + "ledgerhq", + "accountprivate", + "accountimport", + "echash", + "pubkey", + "unpublish", + "poap", + "nycrc", + "gitpkg", + "multiaddress", + "triggerable", + "MCLBLS", + "heartedly", + "beaconroot" + ] +} diff --git a/config/cspell-ts.json b/config/cspell-ts.json new file mode 100644 index 0000000000..4b95f2ab9d --- /dev/null +++ b/config/cspell-ts.json @@ -0,0 +1,599 @@ +{ + "language": "en-US", + "ignoreRegExpList": ["/0x[0-9A-Fa-f]+/", "@scure"], + "overrides": [ + { + "filename": "**/packages/devp2p/**", + "ignoreWords": ["pirl", "ubiq", "gwhale", "prichain", "zfill"] + }, + { + "filename": "**/packages/ethash/**", + "ignoreWords": ["epoc", "cmix"] + } + ], + "words": [ + "Hardfork", + "hardfork", + "Chainstart", + "ethash", + "KECCAK", + "keccak", + "Verkle", + "verkle", + "maxblob", + "Randao", + "hardforks", + "ecsign", + "extradata", + "randao", + "Unpadded", + "Ethash", + "blocktime", + "timebomb", + "prestate", + "testdata", + "eips", + "chainstart", + "Besu", + "devnet", + "Kaustinen", + "kaustinen", + "premerge", + "txns", + "blockdata", + "Mockchain", + "HARDFORK", + "BLOCKHASH", + "checkpointed", + "headerchain", + "Merkle", + "cornercase", + "Deauthorizations", + "Deauthorizing", + "deauthorized", + "Deauthorize", + "checkpointing", + "Holesky", + "holesky", + "newblock", + "newheader", + "libp2p", + "multiaddrs", + "bootnode", + "bootnodes", + "Multiaddrs", + "peerpool", + "ethprotocol", + "flowcontrol", + "lesprotocol", + "fullethereumservice", + "lightethereumservice", + "fullsync", + "lightsync", + "loglevel", + "datadir", + "DATADIR", + "syncmode", + "Libp", + "chainsafe", + "Multiaddr", + "multiformats", + "multiaddr", + "Websockets", + "websockets", + "Bootnodes", + "secp256k1", + "Polkadot", + "polkadot", + "helprpc", + "SYNCMODE", + "LIGHTSERV", + "enode", + "discport", + "MAXPERREQUEST", + "MAXFETCHERJOBS", + "MINPEERS", + "MAXPEERS", + "DNSADDR", + "reexecuting", + "DEBUGCODE", + "findneighbour", + "prefunded", + "etherbase", + "merkle", + "Newpayload", + "lightserv", + "txpool", + "Devnet", + "prefund", + "recid", + "prefunding", + "mockserver", + "jwtsecret", + "pkey", + "qheap", + "leveldown", + "devp2p", + "Rlpx", + "backstep", + "subchains", + "subchain", + "Subchain", + "unsynced", + "forkchoice", + "preimages", + "Preimages", + "MAXFETCHERREQUESTS", + "BACKSTEP", + "SUBCHAIN", + "NEWPAYLOAD", + "snapstate", + "vmexecution", + "lightchain", + "leveldb", + "preimage", + "Preimage", + "bitvector", + "MCLBLS", + "statemanager", + "rustbn", + "resetted", + "stateroot", + "backstepping", + "urlnopad", + "bubbleup", + "bubbledown", + "payloadid", + "blobsbundles", + "blockhash", + "rlpxpeer", + "handshaked", + "Devp", + "rlpx", + "Reinitiating", + "tablesize", + "peertablesize", + "snapprotocol", + "typeguard", + "boundprotocol", + "rlpxsender", + "rlpxserver", + "ECIES", + "Neighbours", + "Forkchoice", + "FORKCHOICE", + "errormsg", + "snapsync", + "newpayloadv", + "vmhead", + "sethead", + "statebuild", + "safeblock", + "prio", + "Keccak", + "accountfetcher", + "Snapsync", + "headstate", + "syncer", + "syncedchain", + "lastfilled", + "lastexecuted", + "lastfetched", + "lastvalid", + "lastsynced", + "lastchain", + "inited", + "Initing", + "unfinalized", + "canonicality", + "tailparent", + "Backstepped", + "Unfinalized", + "stepback", + "vmlog", + "snapprogress", + "trienodes", + "minblob", + "basefee", + "sendobject", + "broadcasterrors", + "knownpeers", + "sendobjects", + "handledadds", + "handlederrors", + "handledobject", + "successfuladds", + "failedadds", + "syncable", + "bytecodefetcher", + "storagefetcher", + "trienodefetcher", + "trienode", + "reqs", + "SNAPSYNC", + "rerequest", + "Receivedhash", + "blockfetcherbase", + "blockfetcher", + "CODEHASH", + "bytecodes", + "headerfetcher", + "reverseblockfetcher", + "multiaccount", + "slotset", + "hashset", + "proofset", + "sdsl", + "subtrie", + "beaconsync", + "reinited", + "syncability", + "ethjs", + "matchip", + "withengine", + "withoutengine", + "engineonly", + "enrtree", + "Hardforks", + "restopped", + "keyid", + "mockchain", + "pushable", + "mocksender", + "Pushable", + "mockpeer", + "simutils", + "codehash", + "nodeinfo", + "postmerge", + "newpayload", + "testvectors", + "statediffs", + "feerecipient", + "Testvectors", + "rpctestnet", + "blockopt", + "TXINDEX", + "badhex", + "checksummed", + "invalidlength", + "blockhashes", + "chainid", + "txes", + "DATAFEE", + "kzgs", + "iszero", + "staticcall", + "mload", + "multipeer", + "startnetwork", + "NETWORKID", + "MULTIPEER", + "snapfetcher", + "sidechain", + "wasmecrecover", + "forkhash", + "besu", + "EIPs", + "SWAPN", + "DUPN", + "BASEFEE", + "initcode", + "PREVRANDAO", + "MCOPY", + "JUMPF", + "SELFDESTRUCT", + "triggerable", + "RETURNDATASIZE", + "RETURNDATACOPY", + "STATICCALL", + "BLOBBASEFEE", + "regenesis", + "SSTORE", + "danksharding", + "selfdestruct", + "pectra", + "Checkpointing", + "typesafe", + "epochlength", + "Sstore", + "sstore", + "forkhashes", + "BOOTNODES", + "CLIENTID", + "snappyjs", + "uncompress", + "sscanf", + "scanf", + "unpadded", + "kbucket", + "banlist", + "neighbour", + "findneighbours", + "KBUCKET", + "unstrict", + "neighbours", + "typedata", + "sighash", + "hashdata", + "hashfn", + "ecies", + "ekey", + "Neighbour", + "ethdisco", + "findnode", + "misformatted", + "dpts", + "rlpxs", + "RLPXs", + "RLPX", + "peername", + "Comfortability", + "bitstream", + "Ecies", + "Testdata", + "mkcache", + "validblock", + "mixhashes", + "newdata", + "SDIV", + "SMOD", + "ADDMOD", + "MULMOD", + "SIGNEXTEND", + "ISZERO", + "CALLVALUE", + "CALLDATALOAD", + "CALLDATASIZE", + "CALLDATACOPY", + "CODESIZE", + "CODECOPY", + "EXTCODECOPY", + "EXTCODEHASH", + "CHAINID", + "SELFBALANCE", + "BLOBAHASH", + "MLOAD", + "MSTORE", + "SLOAD", + "JUMPI", + "MSIZE", + "JUMPDEST", + "TLOAD", + "TSTORE", + "DATALOAD", + "DATALOADN", + "DATASIZE", + "DATACOPY", + "RJUMP", + "RJUMPI", + "RJUMPV", + "CALLF", + "RETF", + "EOFCREATE", + "RETURNCONTRACT", + "CALLCODE", + "DELEGATECALL", + "RETURNDATALOAD", + "EXTCALL", + "EXTDELEGATECALL", + "EXTSTATICCALL", + "Initmode", + "subcontainers", + "initmode", + "subcontainer", + "Subcontainers", + "rjump", + "callf", + "jumpf", + "Retf", + "Extcall", + "extcall", + "EOFBYTES", + "EOFHASH", + "RJUMPing", + "delegatecall", + "INITCODE", + "Codestore", + "Codesize", + "COOG", + "Accessfee", + "CODESTORE", + "BEGINSUB", + "RETURNSUB", + "JUMPSUB", + "jumpdest", + "beginsub", + "codechunk", + "jumpsub", + "Selfdestruct", + "eofcreate", + "RIPEMD", + "journaling", + "BLOBHASH", + "coldaccountaccess", + "warmstorageread", + "coldsload", + "sstorenoop", + "Sload", + "jumptable", + "VERKLE", + "Toaddress", + "sload", + "zeroness", + "ripemd", + "sdiv", + "smod", + "addmod", + "mulmod", + "signextend", + "callvalue", + "calldataload", + "calldatasize", + "calldatacopy", + "codesize", + "codecopy", + "extcodecopy", + "mstore", + "jumpi", + "msize", + "callcode", + "prevrandao", + "modexp", + "Gquaddivisor", + "returndatasize", + "returndatacopy", + "extcodehash", + "selfbalance", + "dupn", + "swapn", + "tstore", + "tload", + "rjumpi", + "rjumpv", + "retf", + "blobhash", + "mcopy", + "extdelegatecall", + "extstaticcall", + "returndataload", + "dataload", + "dataloadn", + "datasize", + "datacopy", + "blobbasefee", + "returncontract", + "MODEXP", + "ECADD", + "ecadd", + "ECMUL", + "ecmul", + "ECPAIRING", + "ecpairing", + "BADARGS", + "Jochem", + "MAPFPTOG", + "MAPFP", + "IRTF", + "Nethermind", + "unnormalized", + "Pippenger's", + "Exponentiate", + "Zcash", + "ECPAIR", + "behaviour", + "unpad", + "returndata", + "RETURNDATA", + "multiexp", + "mclbls", + "sstores", + "blobgas", + "rlptest", + "vitalik", + "Vitalik", + "pedersen", + "ZEROVALUE", + "unaccessed", + "poststate", + "downleveled", + "codehashes", + "misbehaviour", + "blocktag", + "downleveling", + "lmdb", + "LMDB", + "trietest", + "dbkey", + "unhashed", + "KEYBYTES", + "Keybytes", + "unmatching", + "keyvals", + "checkroot", + "noderef", + "SECP", + "startgas", + "Vitalik's", + "txbytes", + "chunkify", + "reserialized", + "withdrawalsto", + "leafnode", + "lastblockhash", + "bitvectors", + "nethermind", + "sgas", + "dgas", + "Mgas", + "beaconroot", + "predeploy", + "Predeploy", + "ommers", + "BLOCKHASHes", + "ommer", + "Ommer", + "nibling", + "Nibling", + "totalblob", + "rlpd", + "staticcalls", + "PREBALANCE", + "Blockhash", + "selfdestructs", + "Beaconroot", + "blockroot", + "CALLDATACOPYs", + "BROOT", + "broot", + "Blockroot", + "selfdestructed", + "statelessly", + "Pkey", + "alcuadrado", + "retesteth", + "tomergeatdiff", + "shanghaitocancunattime", + "cancuntopragueattime", + "jsontrace", + "alltests", + "postconditions", + "hdkey", + "thirdparty", + "keysize", + "ciphertext", + "userid", + "cipherparams", + "encseed", + "ethaddr", + "btcaddr", + "icap", + "ICAP", + "fixtureseed", + "fixturehd", + "hdnode", + "testpassword", + "wrongtestpassword", + "keybyte", + "unstub", + "unmock", + "unjustifiedly", + "uncompression", + "dedicatedly", + "EVMBLS", + "EVMBN", + "EVMONEs", + "INTURN", + "NOTURN", + "Andras", + "Radics", + "Fedor", + "Indutny", + "Kademlia", + "Slominski", + "patarapolw", + "nickdodson", + "Kintsugi", + "deauthorization" + ] +} diff --git a/config/eslint.cjs b/config/eslint.cjs index ce8c6e484b..d2f97618a8 100644 --- a/config/eslint.cjs +++ b/config/eslint.cjs @@ -19,7 +19,6 @@ module.exports = { 'benchmarks', 'coverage', 'dist', - 'examples', 'node_modules', 'prettier.config.js', 'recipes', @@ -116,11 +115,11 @@ module.exports = { parserOptions: { extraFileExtensions: ['.json'], sourceType: 'module', - project: './tsconfig.json', + project: './tsconfig.lint.json', }, overrides: [ { - files: ['test/**/*.ts', 'tests/**/*.ts'], + files: ['test/**/*.ts', 'tests/**/*.ts', 'examples/**/*.ts'], rules: { 'implicit-dependencies/no-implicit': 'off', 'import/no-extraneous-dependencies': 'off', diff --git a/config/tsconfig.lint.json b/config/tsconfig.lint.json new file mode 100644 index 0000000000..574cc64e1f --- /dev/null +++ b/config/tsconfig.lint.json @@ -0,0 +1,15 @@ +{ + "extends": "./tsconfig.json", + "include": [ + "../packages/**/src/**/*.ts", + "../packages/**/test/**/*.ts", + "../packages/**/examples/**/*.ts", + "../packages/**/examples/**/*.cjs", + "../packages/**/examples/**/*.js", + "../packages/**/benchmarks/**/*.ts", + "../packages/**/bin/**/*.ts" + ], + "compilerOptions": { + "noEmit": true + } +} diff --git a/package-lock.json b/package-lock.json index 7d78164924..e0acd5abf0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -18,21 +18,22 @@ "@vitest/coverage-v8": "^v2.0.0-beta.1", "@vitest/ui": "^v2.0.0-beta.12", "c8": "7.12.0", + "cspell": "^8.13.3", "embedme": "1.22.1", - "eslint": "8.45.0", - "eslint-config-prettier": "8.8.0", - "eslint-config-typestrict": "1.0.5", + "eslint": "8.57.0", + "eslint-config-prettier": "^9.1.0", + "eslint-config-typestrict": "^1.0.5", "eslint-formatter-codeframe": "7.32.1", "eslint-plugin-ethereumjs": "file:./eslint", "eslint-plugin-github": "4.9.2", "eslint-plugin-implicit-dependencies": "1.1.1", "eslint-plugin-import": "2.26.0", - "eslint-plugin-prettier": "4.2.1", + "eslint-plugin-prettier": "^5.2.1", "eslint-plugin-simple-import-sort": "7.0.0", "eslint-plugin-sonarjs": "0.19.0", "lint-staged": "13.0.3", "lockfile-lint-api": "^5.5.1", - "prettier": "2.7.1", + "prettier": "^3.3.3", "sort-package-json": "1.57.0", "tape": "5.6.0", "tsx": "^4.6.2", @@ -74,15 +75,13 @@ }, "node_modules/@adraffy/ens-normalize": { "version": "1.10.1", - "resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz", - "integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@ampproject/remapping": { "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", - "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", - "devOptional": true, + "dev": true, + "license": "Apache-2.0", "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" @@ -93,38 +92,35 @@ }, "node_modules/@babel/code-frame": { "version": "7.12.11", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", - "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "@babel/highlight": "^7.10.4" } }, "node_modules/@babel/compat-data": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.7.tgz", - "integrity": "sha512-qJzAIcv03PyaWqxRgO4mSU3lihncDT296vnyuE2O8uA4w3UHWI4S3hgeZd1L8W1Bft40w9JxJ2b412iDUFFRhw==", + "version": "7.25.2", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.7.tgz", - "integrity": "sha512-nykK+LEK86ahTkX/3TgauT0ikKoNCfKHEaZYTUVupJdTLzGNvrblu4u6fa7DhZONAltdf8e662t/abY8idrd/g==", + "version": "7.25.2", "dev": true, + "license": "MIT", "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.24.7", - "@babel/generator": "^7.24.7", - "@babel/helper-compilation-targets": "^7.24.7", - "@babel/helper-module-transforms": "^7.24.7", - "@babel/helpers": "^7.24.7", - "@babel/parser": "^7.24.7", - "@babel/template": "^7.24.7", - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7", + "@babel/generator": "^7.25.0", + "@babel/helper-compilation-targets": "^7.25.2", + "@babel/helper-module-transforms": "^7.25.2", + "@babel/helpers": "^7.25.0", + "@babel/parser": "^7.25.0", + "@babel/template": "^7.25.0", + "@babel/traverse": "^7.25.2", + "@babel/types": "^7.25.2", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -141,9 +137,8 @@ }, "node_modules/@babel/core/node_modules/@babel/code-frame": { "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", - "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/highlight": "^7.24.7", "picocolors": "^1.0.0" @@ -154,9 +149,8 @@ }, "node_modules/@babel/core/node_modules/json5": { "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", "dev": true, + "license": "MIT", "bin": { "json5": "lib/cli.js" }, @@ -166,20 +160,18 @@ }, "node_modules/@babel/core/node_modules/semver": { "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/generator": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.24.7.tgz", - "integrity": "sha512-oipXieGC3i45Y1A41t4tAqpnEZWgB/lC6Ehh6+rOviR5XWpTtMmLN+fGjz9vOiNRt0p6RtO6DtD0pdU3vpqdSA==", + "version": "7.25.0", "dev": true, + "license": "MIT", "dependencies": { - "@babel/types": "^7.24.7", + "@babel/types": "^7.25.0", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", "jsesc": "^2.5.1" @@ -189,14 +181,13 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.24.7.tgz", - "integrity": "sha512-ctSdRHBi20qWOfy27RUb4Fhp07KSJ3sXcuSvTrXrc4aG8NSYDo1ici3Vhg9bg69y5bj0Mr1lh0aeEgTvc12rMg==", + "version": "7.25.2", "dev": true, + "license": "MIT", "dependencies": { - "@babel/compat-data": "^7.24.7", - "@babel/helper-validator-option": "^7.24.7", - "browserslist": "^4.22.2", + "@babel/compat-data": "^7.25.2", + "@babel/helper-validator-option": "^7.24.8", + "browserslist": "^4.23.1", "lru-cache": "^5.1.1", "semver": "^6.3.1" }, @@ -206,64 +197,24 @@ }, "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": { "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "dev": true, + "license": "ISC", "dependencies": { "yallist": "^3.0.2" } }, "node_modules/@babel/helper-compilation-targets/node_modules/semver": { "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } }, - "node_modules/@babel/helper-environment-visitor": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.24.7.tgz", - "integrity": "sha512-DoiN84+4Gnd0ncbBOM9AZENV4a5ZiL39HYMyZJGZ/AZEykHYdJw0wW3kdcsh9/Kn+BRXHLkkklZ51ecPKmI1CQ==", - "dev": true, - "dependencies": { - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-function-name": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.24.7.tgz", - "integrity": "sha512-FyoJTsj/PEUWu1/TYRiXTIHc8lbw+TDYkZuoE43opPS5TrI7MyONBE1oNvfguEXAD9yhQRrVBnXdXzSLQl9XnA==", - "dev": true, - "dependencies": { - "@babel/template": "^7.24.7", - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-hoist-variables": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.24.7.tgz", - "integrity": "sha512-MJJwhkoGy5c4ehfoRyrJ/owKeMl19U54h27YYftT0o2teQ3FJ3nQUf/I3LlJsX4l3qlw7WRXUmiyajvHXoTubQ==", - "dev": true, - "dependencies": { - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/helper-module-imports": { "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz", - "integrity": "sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/traverse": "^7.24.7", "@babel/types": "^7.24.7" @@ -273,16 +224,14 @@ } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.24.7.tgz", - "integrity": "sha512-1fuJEwIrp+97rM4RWdO+qrRsZlAeL1lQJoPqtCYWv0NL115XM93hIH4CSRln2w52SqvmY5hqdtauB6QFCDiZNQ==", + "version": "7.25.2", "dev": true, + "license": "MIT", "dependencies": { - "@babel/helper-environment-visitor": "^7.24.7", "@babel/helper-module-imports": "^7.24.7", "@babel/helper-simple-access": "^7.24.7", - "@babel/helper-split-export-declaration": "^7.24.7", - "@babel/helper-validator-identifier": "^7.24.7" + "@babel/helper-validator-identifier": "^7.24.7", + "@babel/traverse": "^7.25.2" }, "engines": { "node": ">=6.9.0" @@ -293,9 +242,8 @@ }, "node_modules/@babel/helper-simple-access": { "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz", - "integrity": "sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/traverse": "^7.24.7", "@babel/types": "^7.24.7" @@ -304,53 +252,37 @@ "node": ">=6.9.0" } }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.7.tgz", - "integrity": "sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA==", - "dev": true, - "dependencies": { - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/helper-string-parser": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.7.tgz", - "integrity": "sha512-7MbVt6xrwFQbunH2DNQsAP5sTGxfqQtErvBIvIMi6EQnbgUOuVYanvREcmFrOPhoXBrTtjhhP+lW+o5UfK+tDg==", + "version": "7.24.8", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", - "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.7.tgz", - "integrity": "sha512-yy1/KvjhV/ZCL+SM7hBrvnZJ3ZuT9OuZgIJAGpPEToANvc3iM6iDvBnRjtElWibHU6n8/LPR/EjX9EtIEYO3pw==", + "version": "7.24.8", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.7.tgz", - "integrity": "sha512-NlmJJtvcw72yRJRcnCmGvSi+3jDEg8qFu3z0AFoymmzLx5ERVWyzd9kVXr7Th9/8yIJi2Zc6av4Tqz3wFs8QWg==", + "version": "7.25.0", "dev": true, + "license": "MIT", "dependencies": { - "@babel/template": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.0" }, "engines": { "node": ">=6.9.0" @@ -358,9 +290,8 @@ }, "node_modules/@babel/highlight": { "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", - "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-validator-identifier": "^7.24.7", "chalk": "^2.4.2", @@ -373,9 +304,8 @@ }, "node_modules/@babel/highlight/node_modules/ansi-styles": { "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "color-convert": "^1.9.0" }, @@ -385,9 +315,8 @@ }, "node_modules/@babel/highlight/node_modules/chalk": { "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", @@ -399,42 +328,37 @@ }, "node_modules/@babel/highlight/node_modules/color-convert": { "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "color-name": "1.1.3" } }, "node_modules/@babel/highlight/node_modules/color-name": { "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "devOptional": true + "dev": true, + "license": "MIT" }, "node_modules/@babel/highlight/node_modules/escape-string-regexp": { "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=0.8.0" } }, "node_modules/@babel/highlight/node_modules/has-flag": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=4" } }, "node_modules/@babel/highlight/node_modules/supports-color": { "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "has-flag": "^3.0.0" }, @@ -443,10 +367,12 @@ } }, "node_modules/@babel/parser": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.7.tgz", - "integrity": "sha512-9uUYRm6OqQrCqQdG1iCBwBPZgN8ciDBro2nIOFaiRz1/BCxaI7CNvQbDHvsArAC7Tw9Hda/B3U+6ui9u4HWXPw==", - "devOptional": true, + "version": "7.25.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.25.2" + }, "bin": { "parser": "bin/babel-parser.js" }, @@ -454,28 +380,14 @@ "node": ">=6.0.0" } }, - "node_modules/@babel/runtime": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.7.tgz", - "integrity": "sha512-UwgBRMjJP+xv857DCngvqXI3Iq6J4v0wXmwc6sapg+zyhbwmQX67LUEFrkK5tbyJ30jGuG3ZvWpBiB9LCy1kWw==", - "optional": true, - "peer": true, - "dependencies": { - "regenerator-runtime": "^0.14.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/template": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.7.tgz", - "integrity": "sha512-jYqfPrU9JTF0PmPy1tLYHW4Mp4KlgxJD9l2nP9fD6yT/ICi554DmrWBAEYpIelzjHf1msDP3PxJIRt/nFNfBig==", + "version": "7.25.0", "dev": true, + "license": "MIT", "dependencies": { "@babel/code-frame": "^7.24.7", - "@babel/parser": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/parser": "^7.25.0", + "@babel/types": "^7.25.0" }, "engines": { "node": ">=6.9.0" @@ -483,9 +395,8 @@ }, "node_modules/@babel/template/node_modules/@babel/code-frame": { "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", - "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/highlight": "^7.24.7", "picocolors": "^1.0.0" @@ -495,19 +406,15 @@ } }, "node_modules/@babel/traverse": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.7.tgz", - "integrity": "sha512-yb65Ed5S/QAcewNPh0nZczy9JdYXkkAbIsEo+P7BE7yO3txAY30Y/oPa3QkQ5It3xVG2kpKMg9MsdxZaO31uKA==", + "version": "7.25.3", "dev": true, + "license": "MIT", "dependencies": { "@babel/code-frame": "^7.24.7", - "@babel/generator": "^7.24.7", - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-function-name": "^7.24.7", - "@babel/helper-hoist-variables": "^7.24.7", - "@babel/helper-split-export-declaration": "^7.24.7", - "@babel/parser": "^7.24.7", - "@babel/types": "^7.24.7", + "@babel/generator": "^7.25.0", + "@babel/parser": "^7.25.3", + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.2", "debug": "^4.3.1", "globals": "^11.1.0" }, @@ -517,9 +424,8 @@ }, "node_modules/@babel/traverse/node_modules/@babel/code-frame": { "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", - "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/highlight": "^7.24.7", "picocolors": "^1.0.0" @@ -530,20 +436,18 @@ }, "node_modules/@babel/traverse/node_modules/globals": { "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } }, "node_modules/@babel/types": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.7.tgz", - "integrity": "sha512-XEFXSlxiG5td2EJRe8vOmRbaXVgfcBlszKujvVmWIK/UpywWljQCfzAv3RQCGujWQ1RD4YYWEAqDXfuJiy8f5Q==", + "version": "7.25.2", "dev": true, + "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.24.7", + "@babel/helper-string-parser": "^7.24.8", "@babel/helper-validator-identifier": "^7.24.7", "to-fast-properties": "^2.0.0" }, @@ -553,421 +457,520 @@ }, "node_modules/@bcoe/v8-coverage": { "version": "0.2.3", - "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", - "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", - "dev": true - }, - "node_modules/@bundled-es-modules/cookie": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@bundled-es-modules/cookie/-/cookie-2.0.0.tgz", - "integrity": "sha512-Or6YHg/kamKHpxULAdSqhGqnWFneIXu1NKvvfBBzKGwpVsYuFIQ5aBPHDnnoR3ghW1nvSkALd+EF9iMtY7Vjxw==", - "optional": true, - "peer": true, - "dependencies": { - "cookie": "^0.5.0" - } - }, - "node_modules/@bundled-es-modules/statuses": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@bundled-es-modules/statuses/-/statuses-1.0.1.tgz", - "integrity": "sha512-yn7BklA5acgcBr+7w064fGV+SGIFySjCKpqjcWgBAIfrAkY+4GQTJJHQMeT3V/sgz23VTEVV8TtOmkvJAhFVfg==", - "optional": true, - "peer": true, - "dependencies": { - "statuses": "^2.0.1" - } - }, - "node_modules/@bundled-es-modules/statuses/node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", - "optional": true, - "peer": true, - "engines": { - "node": ">= 0.8" - } + "dev": true, + "license": "MIT" }, "node_modules/@chainsafe/is-ip": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@chainsafe/is-ip/-/is-ip-2.0.2.tgz", - "integrity": "sha512-ndGqEMG1W5WkGagaqOZHpPU172AGdxr+LD15sv3WIUvT5oCFUrG1Y0CW/v2Egwj4JXEvSibaIIIqImsm98y1nA==" + "license": "MIT" }, "node_modules/@chainsafe/netmask": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@chainsafe/netmask/-/netmask-2.0.0.tgz", - "integrity": "sha512-I3Z+6SWUoaljh3TBzCnCxjlUyN8tA+NAk5L6m9IxvCf1BENQTePzPMis97CoN/iMW1St3WN+AWCCRp+TTBRiDg==", + "license": "MIT", "dependencies": { "@chainsafe/is-ip": "^2.0.1" } }, "node_modules/@colors/colors": { "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.6.0.tgz", - "integrity": "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==", + "license": "MIT", "engines": { "node": ">=0.1.90" } }, - "node_modules/@dabh/diagnostics": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz", - "integrity": "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==", - "dependencies": { - "colorspace": "1.1.x", - "enabled": "2.0.x", - "kuler": "^2.0.0" - } - }, - "node_modules/@esbuild/aix-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", - "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", - "cpu": [ - "ppc64" - ], - "optional": true, - "os": [ - "aix" - ], + "node_modules/@cspell/cspell-bundled-dicts": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/@cspell/cspell-bundled-dicts/-/cspell-bundled-dicts-8.13.3.tgz", + "integrity": "sha512-OfCxUBMyayxKyeDaUZG3LQpiyH8MFUbg9nbIZCGh2x8U6N0fHaP9uR6R+gPzdi/bJp32Kr+RC/Yebojd+AQCGA==", + "dev": true, + "dependencies": { + "@cspell/dict-ada": "^4.0.2", + "@cspell/dict-aws": "^4.0.3", + "@cspell/dict-bash": "^4.1.3", + "@cspell/dict-companies": "^3.1.4", + "@cspell/dict-cpp": "^5.1.12", + "@cspell/dict-cryptocurrencies": "^5.0.0", + "@cspell/dict-csharp": "^4.0.2", + "@cspell/dict-css": "^4.0.12", + "@cspell/dict-dart": "^2.0.3", + "@cspell/dict-django": "^4.1.0", + "@cspell/dict-docker": "^1.1.7", + "@cspell/dict-dotnet": "^5.0.2", + "@cspell/dict-elixir": "^4.0.3", + "@cspell/dict-en_us": "^4.3.23", + "@cspell/dict-en-common-misspellings": "^2.0.4", + "@cspell/dict-en-gb": "1.1.33", + "@cspell/dict-filetypes": "^3.0.4", + "@cspell/dict-fonts": "^4.0.0", + "@cspell/dict-fsharp": "^1.0.1", + "@cspell/dict-fullstack": "^3.2.0", + "@cspell/dict-gaming-terms": "^1.0.5", + "@cspell/dict-git": "^3.0.0", + "@cspell/dict-golang": "^6.0.9", + "@cspell/dict-google": "^1.0.1", + "@cspell/dict-haskell": "^4.0.1", + "@cspell/dict-html": "^4.0.5", + "@cspell/dict-html-symbol-entities": "^4.0.0", + "@cspell/dict-java": "^5.0.7", + "@cspell/dict-julia": "^1.0.1", + "@cspell/dict-k8s": "^1.0.6", + "@cspell/dict-latex": "^4.0.0", + "@cspell/dict-lorem-ipsum": "^4.0.0", + "@cspell/dict-lua": "^4.0.3", + "@cspell/dict-makefile": "^1.0.0", + "@cspell/dict-monkeyc": "^1.0.6", + "@cspell/dict-node": "^5.0.1", + "@cspell/dict-npm": "^5.0.18", + "@cspell/dict-php": "^4.0.8", + "@cspell/dict-powershell": "^5.0.5", + "@cspell/dict-public-licenses": "^2.0.7", + "@cspell/dict-python": "^4.2.4", + "@cspell/dict-r": "^2.0.1", + "@cspell/dict-ruby": "^5.0.2", + "@cspell/dict-rust": "^4.0.5", + "@cspell/dict-scala": "^5.0.3", + "@cspell/dict-software-terms": "^4.0.6", + "@cspell/dict-sql": "^2.1.5", + "@cspell/dict-svelte": "^1.0.2", + "@cspell/dict-swift": "^2.0.1", + "@cspell/dict-terraform": "^1.0.0", + "@cspell/dict-typescript": "^3.1.6", + "@cspell/dict-vue": "^3.0.0" + }, "engines": { - "node": ">=12" + "node": ">=18" } }, - "node_modules/@esbuild/android-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", - "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", - "cpu": [ - "arm" - ], - "optional": true, - "os": [ - "android" - ], + "node_modules/@cspell/cspell-json-reporter": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/@cspell/cspell-json-reporter/-/cspell-json-reporter-8.13.3.tgz", + "integrity": "sha512-QrHxWkm0cfD+rTjFOxm5lpE4+wBANDzMIM8NOeQC6v8Dc1L8PUkm6hF6CsEv2tKmuwvdVr+jy6GilDMkPXalCg==", + "dev": true, + "dependencies": { + "@cspell/cspell-types": "8.13.3" + }, "engines": { - "node": ">=12" + "node": ">=18" } }, - "node_modules/@esbuild/android-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", - "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "android" - ], + "node_modules/@cspell/cspell-pipe": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/@cspell/cspell-pipe/-/cspell-pipe-8.13.3.tgz", + "integrity": "sha512-6a9Zd+fDltgXoJ0fosWqEMx0UdXBXZ7iakhslMNPRmv7GhVAoHBoIXzMVilOE4kYT2Mh/9NM/QW/NbNEpneZIQ==", + "dev": true, "engines": { - "node": ">=12" + "node": ">=18" } }, - "node_modules/@esbuild/android-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", - "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "android" - ], + "node_modules/@cspell/cspell-resolver": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/@cspell/cspell-resolver/-/cspell-resolver-8.13.3.tgz", + "integrity": "sha512-vlwtMTEWsPPtWfktzT75eGQ0n+0M+9kN+89eSvUUYdCfvY9XAS6z+bTmhS2ULJgntgWtX6gUjABQK0PYYVedOg==", + "dev": true, + "dependencies": { + "global-directory": "^4.0.1" + }, "engines": { - "node": ">=12" + "node": ">=18" } }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", - "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "darwin" - ], + "node_modules/@cspell/cspell-service-bus": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/@cspell/cspell-service-bus/-/cspell-service-bus-8.13.3.tgz", + "integrity": "sha512-mFkeWXwGQSDxRiN6Kez77GaMNGNgG7T6o9UE42jyXEgf/bLJTpefbUy4fY5pU3p2mA0eoMzmnJX8l+TC5YJpbA==", + "dev": true, "engines": { - "node": ">=12" + "node": ">=18" } }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", - "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "darwin" - ], + "node_modules/@cspell/cspell-types": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/@cspell/cspell-types/-/cspell-types-8.13.3.tgz", + "integrity": "sha512-lA5GbhLOL6FlKCWNMbooRFgNGfTsM6NJnHz60+EEN7XD9OgpFc7w+MBcK4aHsVCxcrIvnejIc8xQDqPnrdmN3w==", + "dev": true, "engines": { - "node": ">=12" + "node": ">=18" } }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", - "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } + "node_modules/@cspell/dict-ada": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@cspell/dict-ada/-/dict-ada-4.0.2.tgz", + "integrity": "sha512-0kENOWQeHjUlfyId/aCM/mKXtkEgV0Zu2RhUXCBr4hHo9F9vph+Uu8Ww2b0i5a4ZixoIkudGA+eJvyxrG1jUpA==", + "dev": true }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", - "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } + "node_modules/@cspell/dict-aws": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@cspell/dict-aws/-/dict-aws-4.0.3.tgz", + "integrity": "sha512-0C0RQ4EM29fH0tIYv+EgDQEum0QI6OrmjENC9u98pB8UcnYxGG/SqinuPxo+TgcEuInj0Q73MsBpJ1l5xUnrsw==", + "dev": true }, - "node_modules/@esbuild/linux-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", - "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", - "cpu": [ - "arm" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } + "node_modules/@cspell/dict-bash": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/@cspell/dict-bash/-/dict-bash-4.1.3.tgz", + "integrity": "sha512-tOdI3QVJDbQSwPjUkOiQFhYcu2eedmX/PtEpVWg0aFps/r6AyjUQINtTgpqMYnYuq8O1QUIQqnpx21aovcgZCw==", + "dev": true }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", - "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } + "node_modules/@cspell/dict-companies": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@cspell/dict-companies/-/dict-companies-3.1.4.tgz", + "integrity": "sha512-y9e0amzEK36EiiKx3VAA+SHQJPpf2Qv5cCt5eTUSggpTkiFkCh6gRKQ97rVlrKh5GJrqinDwYIJtTsxuh2vy2Q==", + "dev": true }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", - "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", - "cpu": [ - "ia32" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } + "node_modules/@cspell/dict-cpp": { + "version": "5.1.12", + "resolved": "https://registry.npmjs.org/@cspell/dict-cpp/-/dict-cpp-5.1.12.tgz", + "integrity": "sha512-6lXLOFIa+k/qBcu0bjaE/Kc6v3sh9VhsDOXD1Dalm3zgd0QIMjp5XBmkpSdCAK3pWCPV0Se7ysVLDfCea1BuXg==", + "dev": true }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", - "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", - "cpu": [ - "loong64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } + "node_modules/@cspell/dict-cryptocurrencies": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@cspell/dict-cryptocurrencies/-/dict-cryptocurrencies-5.0.0.tgz", + "integrity": "sha512-Z4ARIw5+bvmShL+4ZrhDzGhnc9znaAGHOEMaB/GURdS/jdoreEDY34wdN0NtdLHDO5KO7GduZnZyqGdRoiSmYA==", + "dev": true }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", - "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", - "cpu": [ - "mips64el" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } + "node_modules/@cspell/dict-csharp": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@cspell/dict-csharp/-/dict-csharp-4.0.2.tgz", + "integrity": "sha512-1JMofhLK+4p4KairF75D3A924m5ERMgd1GvzhwK2geuYgd2ZKuGW72gvXpIV7aGf52E3Uu1kDXxxGAiZ5uVG7g==", + "dev": true }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", - "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", - "cpu": [ - "ppc64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } + "node_modules/@cspell/dict-css": { + "version": "4.0.13", + "resolved": "https://registry.npmjs.org/@cspell/dict-css/-/dict-css-4.0.13.tgz", + "integrity": "sha512-WfOQkqlAJTo8eIQeztaH0N0P+iF5hsJVKFuhy4jmARPISy8Efcv8QXk2/IVbmjJH0/ZV7dKRdnY5JFVXuVz37g==", + "dev": true }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", - "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", - "cpu": [ - "riscv64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } + "node_modules/@cspell/dict-dart": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@cspell/dict-dart/-/dict-dart-2.0.3.tgz", + "integrity": "sha512-cLkwo1KT5CJY5N5RJVHks2genFkNCl/WLfj+0fFjqNR+tk3tBI1LY7ldr9piCtSFSm4x9pO1x6IV3kRUY1lLiw==", + "dev": true }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", - "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", - "cpu": [ - "s390x" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } + "node_modules/@cspell/dict-data-science": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@cspell/dict-data-science/-/dict-data-science-2.0.1.tgz", + "integrity": "sha512-xeutkzK0eBe+LFXOFU2kJeAYO6IuFUc1g7iRLr7HeCmlC4rsdGclwGHh61KmttL3+YHQytYStxaRBdGAXWC8Lw==", + "dev": true }, - "node_modules/@esbuild/linux-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", - "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } + "node_modules/@cspell/dict-django": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@cspell/dict-django/-/dict-django-4.1.0.tgz", + "integrity": "sha512-bKJ4gPyrf+1c78Z0Oc4trEB9MuhcB+Yg+uTTWsvhY6O2ncFYbB/LbEZfqhfmmuK/XJJixXfI1laF2zicyf+l0w==", + "dev": true }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", - "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=12" + "node_modules/@cspell/dict-docker": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@cspell/dict-docker/-/dict-docker-1.1.7.tgz", + "integrity": "sha512-XlXHAr822euV36GGsl2J1CkBIVg3fZ6879ZOg5dxTIssuhUOCiV2BuzKZmt6aIFmcdPmR14+9i9Xq+3zuxeX0A==", + "dev": true + }, + "node_modules/@cspell/dict-dotnet": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@cspell/dict-dotnet/-/dict-dotnet-5.0.2.tgz", + "integrity": "sha512-UD/pO2A2zia/YZJ8Kck/F6YyDSpCMq0YvItpd4YbtDVzPREfTZ48FjZsbYi4Jhzwfvc6o8R56JusAE58P+4sNQ==", + "dev": true + }, + "node_modules/@cspell/dict-elixir": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@cspell/dict-elixir/-/dict-elixir-4.0.3.tgz", + "integrity": "sha512-g+uKLWvOp9IEZvrIvBPTr/oaO6619uH/wyqypqvwpmnmpjcfi8+/hqZH8YNKt15oviK8k4CkINIqNhyndG9d9Q==", + "dev": true + }, + "node_modules/@cspell/dict-en_us": { + "version": "4.3.23", + "resolved": "https://registry.npmjs.org/@cspell/dict-en_us/-/dict-en_us-4.3.23.tgz", + "integrity": "sha512-l0SoEQBsi3zDSl3OuL4/apBkxjuj4hLIg/oy6+gZ7LWh03rKdF6VNtSZNXWAmMY+pmb1cGA3ouleTiJIglbsIg==", + "dev": true + }, + "node_modules/@cspell/dict-en-common-misspellings": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@cspell/dict-en-common-misspellings/-/dict-en-common-misspellings-2.0.4.tgz", + "integrity": "sha512-lvOiRjV/FG4pAGZL3PN2GCVHSTCE92cwhfLGGkOsQtxSmef6WCHfHwp9auafkBlX0yFQSKDfq6/TlpQbjbJBtQ==", + "dev": true + }, + "node_modules/@cspell/dict-en-gb": { + "version": "1.1.33", + "resolved": "https://registry.npmjs.org/@cspell/dict-en-gb/-/dict-en-gb-1.1.33.tgz", + "integrity": "sha512-tKSSUf9BJEV+GJQAYGw5e+ouhEe2ZXE620S7BLKe3ZmpnjlNG9JqlnaBhkIMxKnNFkLY2BP/EARzw31AZnOv4g==", + "dev": true + }, + "node_modules/@cspell/dict-filetypes": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@cspell/dict-filetypes/-/dict-filetypes-3.0.4.tgz", + "integrity": "sha512-IBi8eIVdykoGgIv5wQhOURi5lmCNJq0we6DvqKoPQJHthXbgsuO1qrHSiUVydMiQl/XvcnUWTMeAlVUlUClnVg==", + "dev": true + }, + "node_modules/@cspell/dict-fonts": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@cspell/dict-fonts/-/dict-fonts-4.0.0.tgz", + "integrity": "sha512-t9V4GeN/m517UZn63kZPUYP3OQg5f0OBLSd3Md5CU3eH1IFogSvTzHHnz4Wqqbv8NNRiBZ3HfdY/pqREZ6br3Q==", + "dev": true + }, + "node_modules/@cspell/dict-fsharp": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@cspell/dict-fsharp/-/dict-fsharp-1.0.1.tgz", + "integrity": "sha512-23xyPcD+j+NnqOjRHgW3IU7Li912SX9wmeefcY0QxukbAxJ/vAN4rBpjSwwYZeQPAn3fxdfdNZs03fg+UM+4yQ==", + "dev": true + }, + "node_modules/@cspell/dict-fullstack": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@cspell/dict-fullstack/-/dict-fullstack-3.2.0.tgz", + "integrity": "sha512-sIGQwU6G3rLTo+nx0GKyirR5dQSFeTIzFTOrURw51ISf+jKG9a3OmvsVtc2OANfvEAOLOC9Wfd8WYhmsO8KRDQ==", + "dev": true + }, + "node_modules/@cspell/dict-gaming-terms": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@cspell/dict-gaming-terms/-/dict-gaming-terms-1.0.5.tgz", + "integrity": "sha512-C3riccZDD3d9caJQQs1+MPfrUrQ+0KHdlj9iUR1QD92FgTOF6UxoBpvHUUZ9YSezslcmpFQK4xQQ5FUGS7uWfw==", + "dev": true + }, + "node_modules/@cspell/dict-git": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@cspell/dict-git/-/dict-git-3.0.0.tgz", + "integrity": "sha512-simGS/lIiXbEaqJu9E2VPoYW1OTC2xrwPPXNXFMa2uo/50av56qOuaxDrZ5eH1LidFXwoc8HROCHYeKoNrDLSw==", + "dev": true + }, + "node_modules/@cspell/dict-golang": { + "version": "6.0.9", + "resolved": "https://registry.npmjs.org/@cspell/dict-golang/-/dict-golang-6.0.9.tgz", + "integrity": "sha512-etDt2WQauyEQDA+qPS5QtkYTb2I9l5IfQftAllVoB1aOrT6bxxpHvMEpJ0Hsn/vezxrCqa/BmtUbRxllIxIuSg==", + "dev": true + }, + "node_modules/@cspell/dict-google": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@cspell/dict-google/-/dict-google-1.0.1.tgz", + "integrity": "sha512-dQr4M3n95uOhtloNSgB9tYYGXGGEGEykkFyRtfcp5pFuEecYUa0BSgtlGKx9RXVtJtKgR+yFT/a5uQSlt8WjqQ==", + "dev": true + }, + "node_modules/@cspell/dict-haskell": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@cspell/dict-haskell/-/dict-haskell-4.0.1.tgz", + "integrity": "sha512-uRrl65mGrOmwT7NxspB4xKXFUenNC7IikmpRZW8Uzqbqcu7ZRCUfstuVH7T1rmjRgRkjcIjE4PC11luDou4wEQ==", + "dev": true + }, + "node_modules/@cspell/dict-html": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@cspell/dict-html/-/dict-html-4.0.5.tgz", + "integrity": "sha512-p0brEnRybzSSWi8sGbuVEf7jSTDmXPx7XhQUb5bgG6b54uj+Z0Qf0V2n8b/LWwIPJNd1GygaO9l8k3HTCy1h4w==", + "dev": true + }, + "node_modules/@cspell/dict-html-symbol-entities": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@cspell/dict-html-symbol-entities/-/dict-html-symbol-entities-4.0.0.tgz", + "integrity": "sha512-HGRu+48ErJjoweR5IbcixxETRewrBb0uxQBd6xFGcxbEYCX8CnQFTAmKI5xNaIt2PKaZiJH3ijodGSqbKdsxhw==", + "dev": true + }, + "node_modules/@cspell/dict-java": { + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/@cspell/dict-java/-/dict-java-5.0.7.tgz", + "integrity": "sha512-ejQ9iJXYIq7R09BScU2y5OUGrSqwcD+J5mHFOKbduuQ5s/Eh/duz45KOzykeMLI6KHPVxhBKpUPBWIsfewECpQ==", + "dev": true + }, + "node_modules/@cspell/dict-julia": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@cspell/dict-julia/-/dict-julia-1.0.1.tgz", + "integrity": "sha512-4JsCLCRhhLMLiaHpmR7zHFjj1qOauzDI5ZzCNQS31TUMfsOo26jAKDfo0jljFAKgw5M2fEG7sKr8IlPpQAYrmQ==", + "dev": true + }, + "node_modules/@cspell/dict-k8s": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@cspell/dict-k8s/-/dict-k8s-1.0.6.tgz", + "integrity": "sha512-srhVDtwrd799uxMpsPOQqeDJY+gEocgZpoK06EFrb4GRYGhv7lXo9Fb+xQMyQytzOW9dw4DNOEck++nacDuymg==", + "dev": true + }, + "node_modules/@cspell/dict-latex": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@cspell/dict-latex/-/dict-latex-4.0.0.tgz", + "integrity": "sha512-LPY4y6D5oI7D3d+5JMJHK/wxYTQa2lJMSNxps2JtuF8hbAnBQb3igoWEjEbIbRRH1XBM0X8dQqemnjQNCiAtxQ==", + "dev": true + }, + "node_modules/@cspell/dict-lorem-ipsum": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@cspell/dict-lorem-ipsum/-/dict-lorem-ipsum-4.0.0.tgz", + "integrity": "sha512-1l3yjfNvMzZPibW8A7mQU4kTozwVZVw0AvFEdy+NcqtbxH+TvbSkNMqROOFWrkD2PjnKG0+Ea0tHI2Pi6Gchnw==", + "dev": true + }, + "node_modules/@cspell/dict-lua": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@cspell/dict-lua/-/dict-lua-4.0.3.tgz", + "integrity": "sha512-lDHKjsrrbqPaea13+G9s0rtXjMO06gPXPYRjRYawbNmo4E/e3XFfVzeci3OQDQNDmf2cPOwt9Ef5lu2lDmwfJg==", + "dev": true + }, + "node_modules/@cspell/dict-makefile": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@cspell/dict-makefile/-/dict-makefile-1.0.0.tgz", + "integrity": "sha512-3W9tHPcSbJa6s0bcqWo6VisEDTSN5zOtDbnPabF7rbyjRpNo0uHXHRJQF8gAbFzoTzBBhgkTmrfSiuyQm7vBUQ==", + "dev": true + }, + "node_modules/@cspell/dict-monkeyc": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@cspell/dict-monkeyc/-/dict-monkeyc-1.0.6.tgz", + "integrity": "sha512-oO8ZDu/FtZ55aq9Mb67HtaCnsLn59xvhO/t2mLLTHAp667hJFxpp7bCtr2zOrR1NELzFXmKln/2lw/PvxMSvrA==", + "dev": true + }, + "node_modules/@cspell/dict-node": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@cspell/dict-node/-/dict-node-5.0.1.tgz", + "integrity": "sha512-lax/jGz9h3Dv83v8LHa5G0bf6wm8YVRMzbjJPG/9rp7cAGPtdrga+XANFq+B7bY5+jiSA3zvj10LUFCFjnnCCg==", + "dev": true + }, + "node_modules/@cspell/dict-npm": { + "version": "5.0.18", + "resolved": "https://registry.npmjs.org/@cspell/dict-npm/-/dict-npm-5.0.18.tgz", + "integrity": "sha512-weMTyxWpzz19q4wv9n183BtFvdD5fCjtze+bFKpl+4rO/YlPhHL2cXLAeexJz/VDSBecwX4ybTZYoknd1h2J4w==", + "dev": true + }, + "node_modules/@cspell/dict-php": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/@cspell/dict-php/-/dict-php-4.0.8.tgz", + "integrity": "sha512-TBw3won4MCBQ2wdu7kvgOCR3dY2Tb+LJHgDUpuquy3WnzGiSDJ4AVelrZdE1xu7mjFJUr4q48aB21YT5uQqPZA==", + "dev": true + }, + "node_modules/@cspell/dict-powershell": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/@cspell/dict-powershell/-/dict-powershell-5.0.5.tgz", + "integrity": "sha512-3JVyvMoDJesAATYGOxcUWPbQPUvpZmkinV3m8HL1w1RrjeMVXXuK7U1jhopSneBtLhkU+9HKFwgh9l9xL9mY2Q==", + "dev": true + }, + "node_modules/@cspell/dict-public-licenses": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@cspell/dict-public-licenses/-/dict-public-licenses-2.0.7.tgz", + "integrity": "sha512-KlBXuGcN3LE7tQi/GEqKiDewWGGuopiAD0zRK1QilOx5Co8XAvs044gk4MNIQftc8r0nHeUI+irJKLGcR36DIQ==", + "dev": true + }, + "node_modules/@cspell/dict-python": { + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/@cspell/dict-python/-/dict-python-4.2.4.tgz", + "integrity": "sha512-sCtLBqMreb+8zRW2bXvFsfSnRUVU6IFm4mT6Dc4xbz0YajprbaPPh/kOUTw5IJRP8Uh+FFb7Xp2iH03CNWRq/A==", + "dev": true, + "dependencies": { + "@cspell/dict-data-science": "^2.0.1" } }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", - "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "openbsd" - ], + "node_modules/@cspell/dict-r": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@cspell/dict-r/-/dict-r-2.0.1.tgz", + "integrity": "sha512-KCmKaeYMLm2Ip79mlYPc8p+B2uzwBp4KMkzeLd5E6jUlCL93Y5Nvq68wV5fRLDRTf7N1LvofkVFWfDcednFOgA==", + "dev": true + }, + "node_modules/@cspell/dict-ruby": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@cspell/dict-ruby/-/dict-ruby-5.0.2.tgz", + "integrity": "sha512-cIh8KTjpldzFzKGgrqUX4bFyav5lC52hXDKo4LbRuMVncs3zg4hcSf4HtURY+f2AfEZzN6ZKzXafQpThq3dl2g==", + "dev": true + }, + "node_modules/@cspell/dict-rust": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@cspell/dict-rust/-/dict-rust-4.0.5.tgz", + "integrity": "sha512-DIvlPRDemjKQy8rCqftAgGNZxY5Bg+Ps7qAIJjxkSjmMETyDgl0KTVuaJPt7EK4jJt6uCZ4ILy96npsHDPwoXA==", + "dev": true + }, + "node_modules/@cspell/dict-scala": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@cspell/dict-scala/-/dict-scala-5.0.3.tgz", + "integrity": "sha512-4yGb4AInT99rqprxVNT9TYb1YSpq58Owzq7zi3ZS5T0u899Y4VsxsBiOgHnQ/4W+ygi+sp+oqef8w8nABR2lkg==", + "dev": true + }, + "node_modules/@cspell/dict-software-terms": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@cspell/dict-software-terms/-/dict-software-terms-4.0.6.tgz", + "integrity": "sha512-UDhUzNSf7GN529a0Ip9hlSoGbpscz0YlUYBEJmZBXi8otpkrbCJqs50T74Ppd+SWqNil04De8urv4af2c6SY5Q==", + "dev": true + }, + "node_modules/@cspell/dict-sql": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@cspell/dict-sql/-/dict-sql-2.1.5.tgz", + "integrity": "sha512-FmxanytHXss7GAWAXmgaxl3icTCW7YxlimyOSPNfm+njqeUDjw3kEv4mFNDDObBJv8Ec5AWCbUDkWIpkE3IpKg==", + "dev": true + }, + "node_modules/@cspell/dict-svelte": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@cspell/dict-svelte/-/dict-svelte-1.0.2.tgz", + "integrity": "sha512-rPJmnn/GsDs0btNvrRBciOhngKV98yZ9SHmg8qI6HLS8hZKvcXc0LMsf9LLuMK1TmS2+WQFAan6qeqg6bBxL2Q==", + "dev": true + }, + "node_modules/@cspell/dict-swift": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@cspell/dict-swift/-/dict-swift-2.0.1.tgz", + "integrity": "sha512-gxrCMUOndOk7xZFmXNtkCEeroZRnS2VbeaIPiymGRHj5H+qfTAzAKxtv7jJbVA3YYvEzWcVE2oKDP4wcbhIERw==", + "dev": true + }, + "node_modules/@cspell/dict-terraform": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@cspell/dict-terraform/-/dict-terraform-1.0.0.tgz", + "integrity": "sha512-Ak+vy4HP/bOgzf06BAMC30+ZvL9mzv21xLM2XtfnBLTDJGdxlk/nK0U6QT8VfFLqJ0ZZSpyOxGsUebWDCTr/zQ==", + "dev": true + }, + "node_modules/@cspell/dict-typescript": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/@cspell/dict-typescript/-/dict-typescript-3.1.6.tgz", + "integrity": "sha512-1beC6O4P/j23VuxX+i0+F7XqPVc3hhiAzGJHEKqnWf5cWAXQtg0xz3xQJ5MvYx2a7iLaSa+lu7+05vG9UHyu9Q==", + "dev": true + }, + "node_modules/@cspell/dict-vue": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@cspell/dict-vue/-/dict-vue-3.0.0.tgz", + "integrity": "sha512-niiEMPWPV9IeRBRzZ0TBZmNnkK3olkOPYxC1Ny2AX4TGlYRajcW0WUtoSHmvvjZNfWLSg2L6ruiBeuPSbjnG6A==", + "dev": true + }, + "node_modules/@cspell/dynamic-import": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/@cspell/dynamic-import/-/dynamic-import-8.13.3.tgz", + "integrity": "sha512-YN83CFWnMkt9B0q0RBadfEoptUaDRqBikh8b91MOQ0haEnUo6t57j4jAaLnbIEP4ynzMhgruWFKpIC/QaEtCuA==", + "dev": true, + "dependencies": { + "import-meta-resolve": "^4.1.0" + }, "engines": { - "node": ">=12" + "node": ">=18.0" } }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", - "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "sunos" - ], + "node_modules/@cspell/strong-weak-map": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/@cspell/strong-weak-map/-/strong-weak-map-8.13.3.tgz", + "integrity": "sha512-/QYUEthesPuDarOHa6kcWKJmVq0HIotjPrmAWQ5QpH+dDik1Qin4G/9QdnWX75ueR4DC4WFjBNBU14C4TVSwHQ==", + "dev": true, "engines": { - "node": ">=12" + "node": ">=18" } }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", - "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "win32" - ], + "node_modules/@cspell/url": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/@cspell/url/-/url-8.13.3.tgz", + "integrity": "sha512-hsxoTnZHwtdR2x9QEE6yfDBB1LUwAj67o1GyKTvI8A2OE/AfzAttirZs+9sxgOGWoBdTOxM9sMLtqB3SxtDB3A==", + "dev": true, "engines": { - "node": ">=12" + "node": ">=18.0" } }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", - "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", - "cpu": [ - "ia32" - ], - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" + "node_modules/@dabh/diagnostics": { + "version": "2.0.3", + "license": "MIT", + "dependencies": { + "colorspace": "1.1.x", + "enabled": "2.0.x", + "kuler": "^2.0.0" } }, - "node_modules/@esbuild/win32-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", - "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "node_modules/@esbuild/linux-x64": { + "version": "0.23.0", "cpu": [ "x64" ], + "dev": true, + "license": "MIT", "optional": true, "os": [ - "win32" + "linux" ], "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/@eslint-community/eslint-utils": { "version": "4.4.0", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", - "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", "dev": true, + "license": "MIT", "dependencies": { "eslint-visitor-keys": "^3.3.0" }, @@ -980,18 +983,16 @@ }, "node_modules/@eslint-community/regexpp": { "version": "4.11.0", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.0.tgz", - "integrity": "sha512-G/M/tIiMrTAxEWRfLfQJMmGNX28IxBg4PBz8XqQhqUHLFI6TL2htpIB1iQCj144V5ee/JaKyT9/WZ0MGZWfA7A==", "dev": true, + "license": "MIT", "engines": { "node": "^12.0.0 || ^14.0.0 || >=16.0.0" } }, "node_modules/@eslint/eslintrc": { "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", - "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", "dev": true, + "license": "MIT", "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", @@ -1011,10 +1012,9 @@ } }, "node_modules/@eslint/js": { - "version": "8.44.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.44.0.tgz", - "integrity": "sha512-Ag+9YM4ocKQx9AarydN0KY2j0ErMHNIocPDrVo8zAE44xLTjEtz81OdR68/cydGtk6m6jDb5Za3r2useMzYmSw==", + "version": "8.57.0", "dev": true, + "license": "MIT", "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } @@ -1085,8 +1085,6 @@ }, "node_modules/@ethersproject/abi": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/abi/-/abi-5.7.0.tgz", - "integrity": "sha512-351ktp42TiRcYB3H1OP8yajPeAQstMW/yCFokj/AthP9bLHzQFPlOrxOcwYEDkUAICmOHljvN4K39OMTMUa9RA==", "dev": true, "funding": [ { @@ -1098,6 +1096,7 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/address": "^5.7.0", "@ethersproject/bignumber": "^5.7.0", @@ -1112,8 +1111,6 @@ }, "node_modules/@ethersproject/abstract-provider": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/abstract-provider/-/abstract-provider-5.7.0.tgz", - "integrity": "sha512-R41c9UkchKCpAqStMYUpdunjo3pkEvZC3FAwZn5S5MGbXoMQOHIdHItezTETxAO5bevtMApSyEhn9+CHcDsWBw==", "dev": true, "funding": [ { @@ -1125,6 +1122,7 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/bignumber": "^5.7.0", "@ethersproject/bytes": "^5.7.0", @@ -1137,8 +1135,6 @@ }, "node_modules/@ethersproject/abstract-signer": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/abstract-signer/-/abstract-signer-5.7.0.tgz", - "integrity": "sha512-a16V8bq1/Cz+TGCkE2OPMTOUDLS3grCpdjoJCYNnVBbdYEMSgKrU0+B90s8b6H+ByYTBZN7a3g76jdIJi7UfKQ==", "dev": true, "funding": [ { @@ -1150,6 +1146,7 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/abstract-provider": "^5.7.0", "@ethersproject/bignumber": "^5.7.0", @@ -1160,8 +1157,6 @@ }, "node_modules/@ethersproject/address": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/address/-/address-5.7.0.tgz", - "integrity": "sha512-9wYhYt7aghVGo758POM5nqcOMaE168Q6aRLJZwUmiqSrAungkG74gSSeKEIR7ukixesdRZGPgVqme6vmxs1fkA==", "dev": true, "funding": [ { @@ -1173,6 +1168,7 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/bignumber": "^5.7.0", "@ethersproject/bytes": "^5.7.0", @@ -1183,8 +1179,6 @@ }, "node_modules/@ethersproject/base64": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/base64/-/base64-5.7.0.tgz", - "integrity": "sha512-Dr8tcHt2mEbsZr/mwTPIQAf3Ai0Bks/7gTw9dSqk1mQvhW3XvRlmDJr/4n+wg1JmCl16NZue17CDh8xb/vZ0sQ==", "dev": true, "funding": [ { @@ -1196,14 +1190,13 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/bytes": "^5.7.0" } }, "node_modules/@ethersproject/bignumber": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/bignumber/-/bignumber-5.7.0.tgz", - "integrity": "sha512-n1CAdIHRWjSucQO3MC1zPSVgV/6dy/fjL9pMrPP9peL+QxEg9wOsVqwD4+818B6LUEtaXzVHQiuivzRoxPxUGw==", "dev": true, "funding": [ { @@ -1215,6 +1208,7 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/bytes": "^5.7.0", "@ethersproject/logger": "^5.7.0", @@ -1223,8 +1217,6 @@ }, "node_modules/@ethersproject/bytes": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/bytes/-/bytes-5.7.0.tgz", - "integrity": "sha512-nsbxwgFXWh9NyYWo+U8atvmMsSdKJprTcICAkvbBffT75qDocbuggBU0SJiVK2MuTrp0q+xvLkTnGMPK1+uA9A==", "dev": true, "funding": [ { @@ -1236,14 +1228,13 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/logger": "^5.7.0" } }, "node_modules/@ethersproject/constants": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/constants/-/constants-5.7.0.tgz", - "integrity": "sha512-DHI+y5dBNvkpYUMiRQyxRBYBefZkJfo70VUkUAsRjcPs47muV9evftfZ0PJVCXYbAiCgght0DtcF9srFQmIgWA==", "dev": true, "funding": [ { @@ -1255,14 +1246,13 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/bignumber": "^5.7.0" } }, "node_modules/@ethersproject/hash": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/hash/-/hash-5.7.0.tgz", - "integrity": "sha512-qX5WrQfnah1EFnO5zJv1v46a8HW0+E5xuBBDTwMFZLuVTx0tbU2kkx15NqdjxecrLGatQN9FGQKpb1FKdHCt+g==", "dev": true, "funding": [ { @@ -1274,6 +1264,7 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/abstract-signer": "^5.7.0", "@ethersproject/address": "^5.7.0", @@ -1288,8 +1279,6 @@ }, "node_modules/@ethersproject/keccak256": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/keccak256/-/keccak256-5.7.0.tgz", - "integrity": "sha512-2UcPboeL/iW+pSg6vZ6ydF8tCnv3Iu/8tUmLLzWWGzxWKFFqOBQFLo6uLUv6BDrLgCDfN28RJ/wtByx+jZ4KBg==", "dev": true, "funding": [ { @@ -1301,6 +1290,7 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/bytes": "^5.7.0", "js-sha3": "0.8.0" @@ -1308,8 +1298,6 @@ }, "node_modules/@ethersproject/logger": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/logger/-/logger-5.7.0.tgz", - "integrity": "sha512-0odtFdXu/XHtjQXJYA3u9G0G8btm0ND5Cu8M7i5vhEcE8/HmF4Lbdqanwyv4uQTr2tx6b7fQRmgLrsnpQlmnig==", "dev": true, "funding": [ { @@ -1320,12 +1308,11 @@ "type": "individual", "url": "https://www.buymeacoffee.com/ricmoo" } - ] + ], + "license": "MIT" }, "node_modules/@ethersproject/networks": { "version": "5.7.1", - "resolved": "https://registry.npmjs.org/@ethersproject/networks/-/networks-5.7.1.tgz", - "integrity": "sha512-n/MufjFYv3yFcUyfhnXotyDlNdFb7onmkSy8aQERi2PjNcnWQ66xXxa3XlS8nCcA8aJKJjIIMNJTC7tu80GwpQ==", "dev": true, "funding": [ { @@ -1337,14 +1324,13 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/logger": "^5.7.0" } }, "node_modules/@ethersproject/properties": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/properties/-/properties-5.7.0.tgz", - "integrity": "sha512-J87jy8suntrAkIZtecpxEPxY//szqr1mlBaYlQ0r4RCaiD2hjheqF9s1LVE8vVuJCXisjIP+JgtK/Do54ej4Sw==", "dev": true, "funding": [ { @@ -1356,14 +1342,13 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/logger": "^5.7.0" } }, "node_modules/@ethersproject/rlp": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/rlp/-/rlp-5.7.0.tgz", - "integrity": "sha512-rBxzX2vK8mVF7b0Tol44t5Tb8gomOHkj5guL+HhzQ1yBh/ydjGnpw6at+X6Iw0Kp3OzzzkcKp8N9r0W4kYSs9w==", "dev": true, "funding": [ { @@ -1375,6 +1360,7 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/bytes": "^5.7.0", "@ethersproject/logger": "^5.7.0" @@ -1382,8 +1368,6 @@ }, "node_modules/@ethersproject/signing-key": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/signing-key/-/signing-key-5.7.0.tgz", - "integrity": "sha512-MZdy2nL3wO0u7gkB4nA/pEf8lu1TlFswPNmy8AiYkfKTdO6eXBJyUdmHO/ehm/htHw9K/qF8ujnTyUAD+Ry54Q==", "dev": true, "funding": [ { @@ -1395,6 +1379,7 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/bytes": "^5.7.0", "@ethersproject/logger": "^5.7.0", @@ -1404,31 +1389,8 @@ "hash.js": "1.1.7" } }, - "node_modules/@ethersproject/signing-key/node_modules/elliptic": { - "version": "6.5.4", - "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", - "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", - "dev": true, - "dependencies": { - "bn.js": "^4.11.9", - "brorand": "^1.1.0", - "hash.js": "^1.0.0", - "hmac-drbg": "^1.0.1", - "inherits": "^2.0.4", - "minimalistic-assert": "^1.0.1", - "minimalistic-crypto-utils": "^1.0.1" - } - }, - "node_modules/@ethersproject/signing-key/node_modules/elliptic/node_modules/bn.js": { - "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", - "dev": true - }, "node_modules/@ethersproject/strings": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/strings/-/strings-5.7.0.tgz", - "integrity": "sha512-/9nu+lj0YswRNSH0NXYqrh8775XNyEdUQAuf3f+SmOrnVewcJ5SBNAjF7lpgehKi4abvNNXyf+HX86czCdJ8Mg==", "dev": true, "funding": [ { @@ -1440,6 +1402,7 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/bytes": "^5.7.0", "@ethersproject/constants": "^5.7.0", @@ -1448,8 +1411,6 @@ }, "node_modules/@ethersproject/transactions": { "version": "5.7.0", - "resolved": "https://registry.npmjs.org/@ethersproject/transactions/-/transactions-5.7.0.tgz", - "integrity": "sha512-kmcNicCp1lp8qanMTC3RIikGgoJ80ztTyvtsFvCYpSCfkjhD0jZ2LOrnbcuxuToLIUYYf+4XwD1rP+B/erDIhQ==", "dev": true, "funding": [ { @@ -1461,6 +1422,7 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/address": "^5.7.0", "@ethersproject/bignumber": "^5.7.0", @@ -1475,8 +1437,6 @@ }, "node_modules/@ethersproject/web": { "version": "5.7.1", - "resolved": "https://registry.npmjs.org/@ethersproject/web/-/web-5.7.1.tgz", - "integrity": "sha512-Gueu8lSvyjBWL4cYsWsjh6MtMwM0+H4HvqFPZfB6dV8ctbP9zFAO73VG1cMWae0FLPCtz0peKPpZY8/ugJJX2w==", "dev": true, "funding": [ { @@ -1488,6 +1448,7 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@ethersproject/base64": "^5.7.0", "@ethersproject/bytes": "^5.7.0", @@ -1498,16 +1459,13 @@ }, "node_modules/@github/browserslist-config": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@github/browserslist-config/-/browserslist-config-1.0.0.tgz", - "integrity": "sha512-gIhjdJp/c2beaIWWIlsXdqXVRUz3r2BxBCpfz/F3JXHvSAQ1paMYjLH+maEATtENg+k5eLV7gA+9yPp762ieuw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@humanwhocodes/config-array": { "version": "0.11.14", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", - "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", - "deprecated": "Use @eslint/config-array instead", "dev": true, + "license": "Apache-2.0", "dependencies": { "@humanwhocodes/object-schema": "^2.0.2", "debug": "^4.3.1", @@ -1519,9 +1477,8 @@ }, "node_modules/@humanwhocodes/module-importer": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", - "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=12.22" }, @@ -1532,148 +1489,13 @@ }, "node_modules/@humanwhocodes/object-schema": { "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", - "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", - "deprecated": "Use @eslint/object-schema instead", - "dev": true - }, - "node_modules/@inquirer/confirm": { - "version": "3.1.12", - "resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-3.1.12.tgz", - "integrity": "sha512-s5Sod79QsBBi5Qm7zxCq9DcAD0i7WRcjd/LzsiIAWqWZKW4+OJTGrCgVSLGIHTulwbZgdxM4AAxpCXe86hv4/Q==", - "optional": true, - "peer": true, - "dependencies": { - "@inquirer/core": "^9.0.0", - "@inquirer/type": "^1.4.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@inquirer/core": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/@inquirer/core/-/core-9.0.0.tgz", - "integrity": "sha512-y3q+fkCTGmvwk9Wf6yZlI3QGlLXbEm5M7Y7Eh8abaUbv+ffvmw2aB4FxSUrWaoaozwvEJSG60raHbCaUorXEzA==", - "optional": true, - "peer": true, - "dependencies": { - "@inquirer/figures": "^1.0.3", - "@inquirer/type": "^1.4.0", - "@types/mute-stream": "^0.0.4", - "@types/node": "^20.14.9", - "@types/wrap-ansi": "^3.0.0", - "ansi-escapes": "^4.3.2", - "cli-spinners": "^2.9.2", - "cli-width": "^4.1.0", - "mute-stream": "^1.0.0", - "signal-exit": "^4.1.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^6.2.0", - "yoctocolors-cjs": "^2.1.1" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@inquirer/core/node_modules/@types/node": { - "version": "20.14.9", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz", - "integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==", - "optional": true, - "peer": true, - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/@inquirer/core/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "optional": true, - "peer": true - }, - "node_modules/@inquirer/core/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "optional": true, - "peer": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/@inquirer/core/node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "optional": true, - "peer": true, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@inquirer/core/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "optional": true, - "peer": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@inquirer/core/node_modules/wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", - "optional": true, - "peer": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@inquirer/figures": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.3.tgz", - "integrity": "sha512-ErXXzENMH5pJt5/ssXV0DfWUZqly8nGzf0UcBV9xTnP+KyffE2mqyxIMBrZ8ijQck2nU0TQm40EQB53YreyWHw==", - "optional": true, - "peer": true, - "engines": { - "node": ">=18" - } - }, - "node_modules/@inquirer/type": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@inquirer/type/-/type-1.4.0.tgz", - "integrity": "sha512-AjOqykVyjdJQvtfkNDGUyMYGF8xN50VUxftCQWsOyIo4DFRLr6VQhW0VItGI1JIyQGCGgIpKa7hMMwNhZb4OIw==", - "optional": true, - "peer": true, - "dependencies": { - "mute-stream": "^1.0.0" - }, - "engines": { - "node": ">=18" - } + "dev": true, + "license": "BSD-3-Clause" }, "node_modules/@isaacs/cliui": { "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "devOptional": true, + "dev": true, + "license": "ISC", "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", @@ -1688,9 +1510,8 @@ }, "node_modules/@isaacs/cliui/node_modules/ansi-regex": { "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -1700,9 +1521,8 @@ }, "node_modules/@isaacs/cliui/node_modules/ansi-styles": { "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -1712,9 +1532,8 @@ }, "node_modules/@isaacs/cliui/node_modules/strip-ansi": { "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "ansi-regex": "^6.0.1" }, @@ -1727,9 +1546,8 @@ }, "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", @@ -1744,9 +1562,8 @@ }, "node_modules/@istanbuljs/load-nyc-config": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", - "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", "dev": true, + "license": "ISC", "dependencies": { "camelcase": "^5.3.1", "find-up": "^4.1.0", @@ -1760,18 +1577,16 @@ }, "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": { "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", "dev": true, + "license": "MIT", "dependencies": { "sprintf-js": "~1.0.2" } }, "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, + "license": "MIT", "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" @@ -1782,9 +1597,8 @@ }, "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", "dev": true, + "license": "MIT", "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -1795,9 +1609,8 @@ }, "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, + "license": "MIT", "dependencies": { "p-locate": "^4.1.0" }, @@ -1807,9 +1620,8 @@ }, "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": { "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, + "license": "MIT", "dependencies": { "p-try": "^2.0.0" }, @@ -1822,9 +1634,8 @@ }, "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, + "license": "MIT", "dependencies": { "p-limit": "^2.2.0" }, @@ -1834,39 +1645,24 @@ }, "node_modules/@istanbuljs/load-nyc-config/node_modules/resolve-from": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", - "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/@istanbuljs/schema": { "version": "0.1.3", - "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", - "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/@jest/schemas": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", - "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", - "devOptional": true, - "dependencies": { - "@sinclair/typebox": "^0.27.8" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, "node_modules/@jridgewell/gen-mapping": { "version": "0.3.5", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", - "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", @@ -1878,47 +1674,49 @@ }, "node_modules/@jridgewell/resolve-uri": { "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/set-array": { "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", - "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.15", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", - "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", - "devOptional": true + "version": "1.5.0", + "dev": true, + "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", - "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@js-sdsl/ordered-map": { + "version": "4.4.2", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } + }, "node_modules/@leichtgewicht/ip-codec": { "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.5.tgz", - "integrity": "sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==" + "license": "MIT" }, "node_modules/@libp2p/interface": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@libp2p/interface/-/interface-1.5.0.tgz", - "integrity": "sha512-SivVvZ+7r7Vgnv+Y88nGZTpG449PYZAPgfLhVqTXn6T4NAFt47InzC7UMFnVqSQuT21YmI9DoeaVXH73CsHNrg==", + "version": "1.6.3", + "license": "Apache-2.0 OR MIT", "dependencies": { "@multiformats/multiaddr": "^12.2.3", "it-pushable": "^3.2.3", @@ -1930,137 +1728,18 @@ }, "node_modules/@libp2p/interface/node_modules/it-pushable": { "version": "3.2.3", - "resolved": "https://registry.npmjs.org/it-pushable/-/it-pushable-3.2.3.tgz", - "integrity": "sha512-gzYnXYK8Y5t5b/BnJUr7glfQLO4U5vyb05gPx/TyTw+4Bv1zM9gFk4YsOrnulWefMewlphCjKkakFvj1y99Tcg==", + "license": "Apache-2.0 OR MIT", "dependencies": { "p-defer": "^4.0.0" } }, - "node_modules/@lmdb/lmdb-darwin-arm64": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@lmdb/lmdb-darwin-arm64/-/lmdb-darwin-arm64-2.9.4.tgz", - "integrity": "sha512-38XmbES/wVcvMXdwcM5QzL0cSaZu3VwE7mCd0I89eliHQTMQblgWXsr2HQoP9v0JnH6jVt7+E/TkeGNLmp4wzA==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@lmdb/lmdb-darwin-x64": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@lmdb/lmdb-darwin-x64/-/lmdb-darwin-x64-2.9.4.tgz", - "integrity": "sha512-JkPrV8rEu88FzMcuouZeU2b/NuVC3KwQxKo5vKhNycBtsCn7KCWHalxL4sdTiHQ4xtgMca3mmeDAdxgqQqnDig==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@lmdb/lmdb-linux-arm": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@lmdb/lmdb-linux-arm/-/lmdb-linux-arm-2.9.4.tgz", - "integrity": "sha512-b3JZL5pLuvcGEbcsThUQPFlQdBFaBxImrlNbFUeJmzLwpdgGRi0RSQdZZ2PuIoZvpRj0tfNlhXQwXiYMz+9iTw==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@lmdb/lmdb-linux-arm64": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@lmdb/lmdb-linux-arm64/-/lmdb-linux-arm64-2.9.4.tgz", - "integrity": "sha512-aIzmw0g4Wdd/w2rDppGfo1JEl4xWpg6HPbf5ZeuWXCjFms8oc8cazm6oBEAimiZEgYYBFPDPdM644xJcwuJbxQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, "node_modules/@lmdb/lmdb-linux-x64": { "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@lmdb/lmdb-linux-x64/-/lmdb-linux-x64-2.9.4.tgz", - "integrity": "sha512-Yj6Nb+/j+ZZ65oH/UCE0UfUu/6TO5wWLIeE2izGCpsCxcozZVbzwhzrCs0FUXf6lXv46DJteONosWH9o1XjzqQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@lmdb/lmdb-win32-x64": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/@lmdb/lmdb-win32-x64/-/lmdb-win32-x64-2.9.4.tgz", - "integrity": "sha512-0L6Tyun47/kQb+FzTDIumrfZgU6oEos0RgekKa/3YC7nsUY+ZASZHikzGgEZpMQHSz5YeR+DDUtOMSwqodWHDg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz", - "integrity": "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@msgpackr-extract/msgpackr-extract-darwin-x64": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-3.0.3.tgz", - "integrity": "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==", "cpu": [ "x64" ], "dev": true, - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@msgpackr-extract/msgpackr-extract-linux-arm": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-3.0.3.tgz", - "integrity": "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@msgpackr-extract/msgpackr-extract-linux-arm64": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-3.0.3.tgz", - "integrity": "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==", - "cpu": [ - "arm64" - ], - "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" @@ -2068,62 +1747,19 @@ }, "node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": { "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz", - "integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==", "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@msgpackr-extract/msgpackr-extract-win32-x64": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-3.0.3.tgz", - "integrity": "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@mswjs/cookies": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@mswjs/cookies/-/cookies-1.1.1.tgz", - "integrity": "sha512-W68qOHEjx1iD+4VjQudlx26CPIoxmIAtK4ZCexU0/UJBG6jYhcuyzKJx+Iw8uhBIGd9eba64XgWVgo20it1qwA==", - "optional": true, - "peer": true, - "engines": { - "node": ">=18" - } - }, - "node_modules/@mswjs/interceptors": { - "version": "0.29.1", - "resolved": "https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.29.1.tgz", - "integrity": "sha512-3rDakgJZ77+RiQUuSK69t1F0m8BQKA8Vh5DCS5V0DWvNY67zob2JhhQrhCO0AKLGINTRSFd1tBaHcJTkhefoSw==", - "optional": true, - "peer": true, - "dependencies": { - "@open-draft/deferred-promise": "^2.2.0", - "@open-draft/logger": "^0.3.0", - "@open-draft/until": "^2.0.0", - "is-node-process": "^1.2.0", - "outvariant": "^1.2.1", - "strict-event-emitter": "^0.5.1" - }, - "engines": { - "node": ">=18" - } + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] }, "node_modules/@multiformats/dns": { "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@multiformats/dns/-/dns-1.0.6.tgz", - "integrity": "sha512-nt/5UqjMPtyvkG9BQYdJ4GfLK3nMqGpFZOzf4hAmIa0sJh2LlS9YKXZ4FgwBDsaHvzZqR/rUFIywIc7pkHNNuw==", + "license": "Apache-2.0 OR MIT", "dependencies": { "@types/dns-packet": "^5.6.5", "buffer": "^6.0.3", @@ -2136,8 +1772,7 @@ }, "node_modules/@multiformats/multiaddr": { "version": "12.3.0", - "resolved": "https://registry.npmjs.org/@multiformats/multiaddr/-/multiaddr-12.3.0.tgz", - "integrity": "sha512-JQ8Gc/jgucqqvEaDTFN/AvxlYDHEE7lgEWLMYW7hKZkWggER+GvG/tVxUgUxIP8M0vFpvEHKKHE0lKzyMsgi8Q==", + "license": "Apache-2.0 OR MIT", "dependencies": { "@chainsafe/is-ip": "^2.0.1", "@chainsafe/netmask": "^2.0.0", @@ -2150,8 +1785,7 @@ }, "node_modules/@noble/curves": { "version": "1.4.2", - "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.4.2.tgz", - "integrity": "sha512-TavHr8qycMChk8UwMld0ZDRvatedkzWfH8IiaeGCfymOP5i0hSCozz9vHOL0nkwk7HRMlFnAiKpS2jrUmSybcw==", + "license": "MIT", "dependencies": { "@noble/hashes": "1.4.0" }, @@ -2161,8 +1795,7 @@ }, "node_modules/@noble/hashes": { "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.4.0.tgz", - "integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==", + "license": "MIT", "engines": { "node": ">= 16" }, @@ -2172,9 +1805,8 @@ }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" @@ -2185,18 +1817,16 @@ }, "node_modules/@nodelib/fs.stat": { "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">= 8" } }, "node_modules/@nodelib/fs.walk": { "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" @@ -2205,43 +1835,17 @@ "node": ">= 8" } }, - "node_modules/@open-draft/deferred-promise": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/@open-draft/deferred-promise/-/deferred-promise-2.2.0.tgz", - "integrity": "sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==", - "optional": true, - "peer": true - }, - "node_modules/@open-draft/logger": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/@open-draft/logger/-/logger-0.3.0.tgz", - "integrity": "sha512-X2g45fzhxH238HKO4xbSr7+wBS8Fvw6ixhTDuvLd5mqh6bJJCFAPwU9mPDxbcrRtfxv4u5IHCEH77BmxvXmmxQ==", - "optional": true, - "peer": true, - "dependencies": { - "is-node-process": "^1.2.0", - "outvariant": "^1.4.0" - } - }, - "node_modules/@open-draft/until": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@open-draft/until/-/until-2.1.0.tgz", - "integrity": "sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==", - "optional": true, - "peer": true - }, "node_modules/@opentelemetry/api": { "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", - "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "license": "Apache-2.0", "engines": { "node": ">=8.0.0" } }, "node_modules/@pkgjs/parseargs": { "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "license": "MIT", "optional": true, "engines": { "node": ">=14" @@ -2249,9 +1853,8 @@ }, "node_modules/@pkgr/core": { "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.1.1.tgz", - "integrity": "sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==", "dev": true, + "license": "MIT", "engines": { "node": "^12.20.0 || ^14.18.0 || >=16.0.0" }, @@ -2261,19 +1864,17 @@ }, "node_modules/@polka/url": { "version": "1.0.0-next.25", - "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.25.tgz", - "integrity": "sha512-j7P6Rgr3mmtdkeDGTe0E/aYyWEWVtc5yFXtHCRHs28/jptDEWfaVOc5T7cblqy1XKPPfCxJc/8DwQ5YgLOZOVQ==", - "devOptional": true + "dev": true, + "license": "MIT" }, "node_modules/@polkadot/util": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/util/-/util-12.6.2.tgz", - "integrity": "sha512-l8TubR7CLEY47240uki0TQzFvtnxFIO7uI/0GoWzpYD/O62EIAMRsuY01N4DuwgKq2ZWD59WhzsLYmA5K6ksdw==", + "version": "13.0.2", + "license": "Apache-2.0", "dependencies": { - "@polkadot/x-bigint": "12.6.2", - "@polkadot/x-global": "12.6.2", - "@polkadot/x-textdecoder": "12.6.2", - "@polkadot/x-textencoder": "12.6.2", + "@polkadot/x-bigint": "13.0.2", + "@polkadot/x-global": "13.0.2", + "@polkadot/x-textdecoder": "13.0.2", + "@polkadot/x-textencoder": "13.0.2", "@types/bn.js": "^5.1.5", "bn.js": "^5.2.1", "tslib": "^2.6.2" @@ -2284,8 +1885,7 @@ }, "node_modules/@polkadot/wasm-bridge": { "version": "7.3.2", - "resolved": "https://registry.npmjs.org/@polkadot/wasm-bridge/-/wasm-bridge-7.3.2.tgz", - "integrity": "sha512-AJEXChcf/nKXd5Q/YLEV5dXQMle3UNT7jcXYmIffZAo/KI394a+/24PaISyQjoNC0fkzS1Q8T5pnGGHmXiVz2g==", + "license": "Apache-2.0", "dependencies": { "@polkadot/wasm-util": "7.3.2", "tslib": "^2.6.2" @@ -2300,8 +1900,7 @@ }, "node_modules/@polkadot/wasm-crypto": { "version": "7.3.2", - "resolved": "https://registry.npmjs.org/@polkadot/wasm-crypto/-/wasm-crypto-7.3.2.tgz", - "integrity": "sha512-+neIDLSJ6jjVXsjyZ5oLSv16oIpwp+PxFqTUaZdZDoA2EyFRQB8pP7+qLsMNk+WJuhuJ4qXil/7XiOnZYZ+wxw==", + "license": "Apache-2.0", "dependencies": { "@polkadot/wasm-bridge": "7.3.2", "@polkadot/wasm-crypto-asmjs": "7.3.2", @@ -2320,8 +1919,7 @@ }, "node_modules/@polkadot/wasm-crypto-asmjs": { "version": "7.3.2", - "resolved": "https://registry.npmjs.org/@polkadot/wasm-crypto-asmjs/-/wasm-crypto-asmjs-7.3.2.tgz", - "integrity": "sha512-QP5eiUqUFur/2UoF2KKKYJcesc71fXhQFLT3D4ZjG28Mfk2ZPI0QNRUfpcxVQmIUpV5USHg4geCBNuCYsMm20Q==", + "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" }, @@ -2334,8 +1932,7 @@ }, "node_modules/@polkadot/wasm-crypto-init": { "version": "7.3.2", - "resolved": "https://registry.npmjs.org/@polkadot/wasm-crypto-init/-/wasm-crypto-init-7.3.2.tgz", - "integrity": "sha512-FPq73zGmvZtnuJaFV44brze3Lkrki3b4PebxCy9Fplw8nTmisKo9Xxtfew08r0njyYh+uiJRAxPCXadkC9sc8g==", + "license": "Apache-2.0", "dependencies": { "@polkadot/wasm-bridge": "7.3.2", "@polkadot/wasm-crypto-asmjs": "7.3.2", @@ -2353,8 +1950,7 @@ }, "node_modules/@polkadot/wasm-crypto-wasm": { "version": "7.3.2", - "resolved": "https://registry.npmjs.org/@polkadot/wasm-crypto-wasm/-/wasm-crypto-wasm-7.3.2.tgz", - "integrity": "sha512-15wd0EMv9IXs5Abp1ZKpKKAVyZPhATIAHfKsyoWCEFDLSOA0/K0QGOxzrAlsrdUkiKZOq7uzSIgIDgW8okx2Mw==", + "license": "Apache-2.0", "dependencies": { "@polkadot/wasm-util": "7.3.2", "tslib": "^2.6.2" @@ -2368,8 +1964,7 @@ }, "node_modules/@polkadot/wasm-util": { "version": "7.3.2", - "resolved": "https://registry.npmjs.org/@polkadot/wasm-util/-/wasm-util-7.3.2.tgz", - "integrity": "sha512-bmD+Dxo1lTZyZNxbyPE380wd82QsX+43mgCm40boyKrRppXEyQmWT98v/Poc7chLuskYb6X8IQ6lvvK2bGR4Tg==", + "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" }, @@ -2381,11 +1976,10 @@ } }, "node_modules/@polkadot/x-bigint": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/x-bigint/-/x-bigint-12.6.2.tgz", - "integrity": "sha512-HSIk60uFPX4GOFZSnIF7VYJz7WZA7tpFJsne7SzxOooRwMTWEtw3fUpFy5cYYOeLh17/kHH1Y7SVcuxzVLc74Q==", + "version": "13.0.2", + "license": "Apache-2.0", "dependencies": { - "@polkadot/x-global": "12.6.2", + "@polkadot/x-global": "13.0.2", "tslib": "^2.6.2" }, "engines": { @@ -2393,9 +1987,8 @@ } }, "node_modules/@polkadot/x-global": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/x-global/-/x-global-12.6.2.tgz", - "integrity": "sha512-a8d6m+PW98jmsYDtAWp88qS4dl8DyqUBsd0S+WgyfSMtpEXu6v9nXDgPZgwF5xdDvXhm+P0ZfVkVTnIGrScb5g==", + "version": "13.0.2", + "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" }, @@ -2404,28 +1997,26 @@ } }, "node_modules/@polkadot/x-randomvalues": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/x-randomvalues/-/x-randomvalues-12.6.2.tgz", - "integrity": "sha512-Vr8uG7rH2IcNJwtyf5ebdODMcr0XjoCpUbI91Zv6AlKVYOGKZlKLYJHIwpTaKKB+7KPWyQrk4Mlym/rS7v9feg==", + "version": "13.0.2", + "license": "Apache-2.0", "peer": true, "dependencies": { - "@polkadot/x-global": "12.6.2", + "@polkadot/x-global": "13.0.2", "tslib": "^2.6.2" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@polkadot/util": "12.6.2", + "@polkadot/util": "13.0.2", "@polkadot/wasm-util": "*" } }, "node_modules/@polkadot/x-textdecoder": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/x-textdecoder/-/x-textdecoder-12.6.2.tgz", - "integrity": "sha512-M1Bir7tYvNappfpFWXOJcnxUhBUFWkUFIdJSyH0zs5LmFtFdbKAeiDXxSp2Swp5ddOZdZgPac294/o2TnQKN1w==", + "version": "13.0.2", + "license": "Apache-2.0", "dependencies": { - "@polkadot/x-global": "12.6.2", + "@polkadot/x-global": "13.0.2", "tslib": "^2.6.2" }, "engines": { @@ -2433,158 +2024,20 @@ } }, "node_modules/@polkadot/x-textencoder": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/x-textencoder/-/x-textencoder-12.6.2.tgz", - "integrity": "sha512-4N+3UVCpI489tUJ6cv3uf0PjOHvgGp9Dl+SZRLgFGt9mvxnvpW/7+XBADRMtlG4xi5gaRK7bgl5bmY6OMDsNdw==", + "version": "13.0.2", + "license": "Apache-2.0", "dependencies": { - "@polkadot/x-global": "12.6.2", + "@polkadot/x-global": "13.0.2", "tslib": "^2.6.2" }, "engines": { "node": ">=18" } }, - "node_modules/@promptbook/utils": { - "version": "0.58.0", - "resolved": "https://registry.npmjs.org/@promptbook/utils/-/utils-0.58.0.tgz", - "integrity": "sha512-TglWndmjikWN+OGg9eNOUaMTM7RHr8uFCtgxfWULT1BUjcohywdijf54vS1U4mZ1tBLdHD4/fIrIHtmHzPUIZQ==", - "funding": [ - { - "type": "individual", - "url": "https://buymeacoffee.com/hejny" - }, - { - "type": "github", - "url": "https://github.com/webgptorg/promptbook/blob/main/README.md#%EF%B8%8F-contributing" - } - ], - "optional": true, - "peer": true, - "dependencies": { - "spacetrim": "0.11.36" - } - }, - "node_modules/@puppeteer/browsers": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-1.9.1.tgz", - "integrity": "sha512-PuvK6xZzGhKPvlx3fpfdM2kYY3P/hB1URtK8wA7XUJ6prn6pp22zvJHu48th0SGcHL9SutbPHrFuQgfXTFobWA==", - "optional": true, - "peer": true, - "dependencies": { - "debug": "4.3.4", - "extract-zip": "2.0.1", - "progress": "2.0.3", - "proxy-agent": "6.3.1", - "tar-fs": "3.0.4", - "unbzip2-stream": "1.4.3", - "yargs": "17.7.2" - }, - "bin": { - "browsers": "lib/cjs/main-cli.js" - }, - "engines": { - "node": ">=16.3.0" - } - }, - "node_modules/@puppeteer/browsers/node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "optional": true, - "peer": true, - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@puppeteer/browsers/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "optional": true, - "peer": true, - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/@puppeteer/browsers/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "optional": true, - "peer": true - }, - "node_modules/@puppeteer/browsers/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "optional": true, - "peer": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/@puppeteer/browsers/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "optional": true, - "peer": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@puppeteer/browsers/node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", - "optional": true, - "peer": true, - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@puppeteer/browsers/node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "optional": true, - "peer": true, - "engines": { - "node": ">=12" - } - }, "node_modules/@rollup/plugin-inject": { "version": "5.0.5", - "resolved": "https://registry.npmjs.org/@rollup/plugin-inject/-/plugin-inject-5.0.5.tgz", - "integrity": "sha512-2+DEJbNBoPROPkgTDNe8/1YXWcqxbN5DTjASVIOx8HS+pITXushyNiBV56RB08zuptzz8gT3YfkqriTBVycepg==", "dev": true, + "license": "MIT", "dependencies": { "@rollup/pluginutils": "^5.0.1", "estree-walker": "^2.0.2", @@ -2602,261 +2055,87 @@ } } }, - "node_modules/@rollup/plugin-inject/node_modules/estree-walker": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", - "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", - "dev": true - }, - "node_modules/@rollup/plugin-virtual": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@rollup/plugin-virtual/-/plugin-virtual-3.0.2.tgz", - "integrity": "sha512-10monEYsBp3scM4/ND4LNH5Rxvh3e/cVeL3jWTgZ2SrQ+BmUoQcopVQvnaMcOnykb1VkxUFuDAN+0FnpTFRy2A==", - "dev": true, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/pluginutils": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.1.0.tgz", - "integrity": "sha512-XTIWOPPcpvyKI6L1NHo0lFlCyznUEyPmPY1mc3KpPVDYulHSTvyeLNVW00QTLIAFNhR3kYnJTQHeGqU4M3n09g==", - "dev": true, - "dependencies": { - "@types/estree": "^1.0.0", - "estree-walker": "^2.0.2", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/pluginutils/node_modules/estree-walker": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", - "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", - "dev": true - }, - "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.18.0.tgz", - "integrity": "sha512-Tya6xypR10giZV1XzxmH5wr25VcZSncG0pZIjfePT0OVBvqNEurzValetGNarVrGiq66EBVAFn15iYX4w6FKgQ==", - "cpu": [ - "arm" - ], - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-android-arm64": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.18.0.tgz", - "integrity": "sha512-avCea0RAP03lTsDhEyfy+hpfr85KfyTctMADqHVhLAF3MlIkq83CP8UfAHUssgXTYd+6er6PaAhx/QGv4L1EiA==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.18.0.tgz", - "integrity": "sha512-IWfdwU7KDSm07Ty0PuA/W2JYoZ4iTj3TUQjkVsO/6U+4I1jN5lcR71ZEvRh52sDOERdnNhhHU57UITXz5jC1/w==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.18.0.tgz", - "integrity": "sha512-n2LMsUz7Ynu7DoQrSQkBf8iNrjOGyPLrdSg802vk6XT3FtsgX6JbE8IHRvposskFm9SNxzkLYGSq9QdpLYpRNA==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.18.0.tgz", - "integrity": "sha512-C/zbRYRXFjWvz9Z4haRxcTdnkPt1BtCkz+7RtBSuNmKzMzp3ZxdM28Mpccn6pt28/UWUCTXa+b0Mx1k3g6NOMA==", - "cpu": [ - "arm" - ], - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.18.0.tgz", - "integrity": "sha512-l3m9ewPgjQSXrUMHg93vt0hYCGnrMOcUpTz6FLtbwljo2HluS4zTXFy2571YQbisTnfTKPZ01u/ukJdQTLGh9A==", - "cpu": [ - "arm" - ], - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.18.0.tgz", - "integrity": "sha512-rJ5D47d8WD7J+7STKdCUAgmQk49xuFrRi9pZkWoRD1UeSMakbcepWXPF8ycChBoAqs1pb2wzvbY6Q33WmN2ftw==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.18.0.tgz", - "integrity": "sha512-be6Yx37b24ZwxQ+wOQXXLZqpq4jTckJhtGlWGZs68TgdKXJgw54lUUoFYrg6Zs/kjzAQwEwYbp8JxZVzZLRepQ==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.18.0.tgz", - "integrity": "sha512-hNVMQK+qrA9Todu9+wqrXOHxFiD5YmdEi3paj6vP02Kx1hjd2LLYR2eaN7DsEshg09+9uzWi2W18MJDlG0cxJA==", - "cpu": [ - "ppc64" - ], - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.18.0.tgz", - "integrity": "sha512-ROCM7i+m1NfdrsmvwSzoxp9HFtmKGHEqu5NNDiZWQtXLA8S5HBCkVvKAxJ8U+CVctHwV2Gb5VUaK7UAkzhDjlg==", - "cpu": [ - "riscv64" - ], - "optional": true, - "os": [ - "linux" - ] + "node_modules/@rollup/plugin-inject/node_modules/estree-walker": { + "version": "2.0.2", + "dev": true, + "license": "MIT" }, - "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.18.0.tgz", - "integrity": "sha512-0UyyRHyDN42QL+NbqevXIIUnKA47A+45WyasO+y2bGJ1mhQrfrtXUpTxCOrfxCR4esV3/RLYyucGVPiUsO8xjg==", - "cpu": [ - "s390x" - ], - "optional": true, - "os": [ - "linux" - ] + "node_modules/@rollup/plugin-virtual": { + "version": "3.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/pluginutils": { + "version": "5.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^2.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/pluginutils/node_modules/estree-walker": { + "version": "2.0.2", + "dev": true, + "license": "MIT" }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.18.0.tgz", - "integrity": "sha512-xuglR2rBVHA5UsI8h8UbX4VJ470PtGCf5Vpswh7p2ukaqBGFTnsfzxUBetoWBWymHMxbIG0Cmx7Y9qDZzr648w==", + "version": "4.20.0", "cpu": [ "x64" ], + "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.18.0.tgz", - "integrity": "sha512-LKaqQL9osY/ir2geuLVvRRs+utWUNilzdE90TpyoX0eNqPzWjRm14oMEE+YLve4k/NAqCdPkGYDaDF5Sw+xBfg==", + "version": "4.20.0", "cpu": [ "x64" ], + "dev": true, + "license": "MIT", "optional": true, "os": [ "linux" ] }, - "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.18.0.tgz", - "integrity": "sha512-7J6TkZQFGo9qBKH0pk2cEVSRhJbL6MtfWxth7Y5YmZs57Pi+4x6c2dStAUvaQkHQLnEQv1jzBUW43GvZW8OFqA==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.18.0.tgz", - "integrity": "sha512-Txjh+IxBPbkUB9+SXZMpv+b/vnTEtFyfWZgJ6iyCmt2tdx0OF5WhFowLmnh8ENGNpfUlUZkdI//4IEmhwPieNg==", - "cpu": [ - "ia32" - ], - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.18.0.tgz", - "integrity": "sha512-UOo5FdvOL0+eIVTgS4tIdbW+TtnBLWg1YBCcU2KWM7nuNwRz9bksDX1bekJJCpu25N1DVWaCwnT39dVQxzqS8g==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "win32" - ] - }, "node_modules/@scure/base": { "version": "1.1.7", - "resolved": "https://registry.npmjs.org/@scure/base/-/base-1.1.7.tgz", - "integrity": "sha512-PPNYBslrLNNUQ/Yad37MHYsNQtK67EhWb6WtSvNLLPo7SdVZgkUjD6Dg+5On7zNwmskf8OX7I7Nx5oN+MIWE0g==", + "license": "MIT", "funding": { "url": "https://paulmillr.com/funding/" } }, "node_modules/@scure/bip32": { "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@scure/bip32/-/bip32-1.4.0.tgz", - "integrity": "sha512-sVUpc0Vq3tXCkDGYVWGIZTRfnvu8LoTDaev7vbwh0omSvVORONr960MQWdKqJDCReIEmTj3PAr73O3aoxz7OPg==", + "license": "MIT", "dependencies": { "@noble/curves": "~1.4.0", "@noble/hashes": "~1.4.0", @@ -2868,8 +2147,7 @@ }, "node_modules/@scure/bip39": { "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@scure/bip39/-/bip39-1.3.0.tgz", - "integrity": "sha512-disdg7gHuTDZtY+ZdkmLpPCk7fxZSu3gBiEGuoC1XYxv9cGx3Z6cpTggCgW6odSOOIXCiDjuGejW+aJKCY/pIQ==", + "license": "MIT", "dependencies": { "@noble/hashes": "~1.4.0", "@scure/base": "~1.1.6" @@ -2878,34 +2156,14 @@ "url": "https://paulmillr.com/funding/" } }, - "node_modules/@sinclair/typebox": { - "version": "0.27.8", - "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", - "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", - "devOptional": true - }, - "node_modules/@sindresorhus/is": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-5.6.0.tgz", - "integrity": "sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g==", - "optional": true, - "peer": true, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sindresorhus/is?sponsor=1" - } - }, "node_modules/@swc/core": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.6.6.tgz", - "integrity": "sha512-sHfmIUPUXNrQTwFMVCY5V5Ena2GTOeaWjS2GFUpjLhAgVfP90OP67DWow7+cYrfFtqBdILHuWnjkTcd0+uPKlg==", + "version": "1.7.11", "dev": true, "hasInstallScript": true, + "license": "Apache-2.0", "dependencies": { "@swc/counter": "^0.1.3", - "@swc/types": "^0.1.9" + "@swc/types": "^0.1.12" }, "engines": { "node": ">=10" @@ -2915,16 +2173,16 @@ "url": "https://opencollective.com/swc" }, "optionalDependencies": { - "@swc/core-darwin-arm64": "1.6.6", - "@swc/core-darwin-x64": "1.6.6", - "@swc/core-linux-arm-gnueabihf": "1.6.6", - "@swc/core-linux-arm64-gnu": "1.6.6", - "@swc/core-linux-arm64-musl": "1.6.6", - "@swc/core-linux-x64-gnu": "1.6.6", - "@swc/core-linux-x64-musl": "1.6.6", - "@swc/core-win32-arm64-msvc": "1.6.6", - "@swc/core-win32-ia32-msvc": "1.6.6", - "@swc/core-win32-x64-msvc": "1.6.6" + "@swc/core-darwin-arm64": "1.7.11", + "@swc/core-darwin-x64": "1.7.11", + "@swc/core-linux-arm-gnueabihf": "1.7.11", + "@swc/core-linux-arm64-gnu": "1.7.11", + "@swc/core-linux-arm64-musl": "1.7.11", + "@swc/core-linux-x64-gnu": "1.7.11", + "@swc/core-linux-x64-musl": "1.7.11", + "@swc/core-win32-arm64-msvc": "1.7.11", + "@swc/core-win32-ia32-msvc": "1.7.11", + "@swc/core-win32-x64-msvc": "1.7.11" }, "peerDependencies": { "@swc/helpers": "*" @@ -2935,76 +2193,74 @@ } } }, - "node_modules/@swc/core-darwin-arm64": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.6.6.tgz", - "integrity": "sha512-5DA8NUGECcbcK1YLKJwNDKqdtTYDVnkfDU1WvQSXq/rU+bjYCLtn5gCe8/yzL7ISXA6rwqPU1RDejhbNt4ARLQ==", + "node_modules/@swc/core-linux-x64-gnu": { + "version": "1.7.11", "cpu": [ - "arm64" + "x64" ], "dev": true, + "license": "Apache-2.0 AND MIT", "optional": true, "os": [ - "darwin" + "linux" ], "engines": { "node": ">=10" } }, - "node_modules/@swc/core-darwin-x64": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.6.6.tgz", - "integrity": "sha512-2nbh/RHpweNRsJiYDFk1KcX7UtaKgzzTNUjwtvK5cp0wWrpbXmPvdlWOx3yzwoiSASDFx78242JHHXCIOlEdsw==", + "node_modules/@swc/core-linux-x64-musl": { + "version": "1.7.11", "cpu": [ "x64" ], "dev": true, + "license": "Apache-2.0 AND MIT", "optional": true, "os": [ - "darwin" + "linux" ], "engines": { "node": ">=10" } }, - "node_modules/@swc/core-linux-arm-gnueabihf": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.6.6.tgz", - "integrity": "sha512-YgytuyUfR7b0z0SRHKV+ylr83HmgnROgeT7xryEkth6JGpAEHooCspQ4RrWTU8+WKJ7aXiZlGXPgybQ4TiS+TA==", + "node_modules/@swc/core/node_modules/@swc/core-darwin-arm64": { + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.7.11.tgz", + "integrity": "sha512-HRQv4qIeMBPThZ6Y/4yYW52rGsS6yrpusvuxLGyoFo45Y0y12/V2yXkOIA/0HIQyrqoUAxn1k4zQXpPaPNCmnw==", "cpu": [ - "arm" + "arm64" ], "dev": true, "optional": true, "os": [ - "linux" + "darwin" ], "engines": { "node": ">=10" } }, - "node_modules/@swc/core-linux-arm64-gnu": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.6.6.tgz", - "integrity": "sha512-yGwx9fddzEE0iURqRVwKBQ4IwRHE6hNhl15WliHpi/PcYhzmYkUIpcbRXjr0dssubXAVPVnx6+jZVDSbutvnfg==", + "node_modules/@swc/core/node_modules/@swc/core-darwin-x64": { + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.7.11.tgz", + "integrity": "sha512-vtMQj0F3oYwDu5yhO7SKDRg1XekRSi6/TbzHAbBXv+dBhlGGvcZZynT1H90EVFTv+7w7Sh+lOFvRv5Z4ZTcxow==", "cpu": [ - "arm64" + "x64" ], "dev": true, "optional": true, "os": [ - "linux" + "darwin" ], "engines": { "node": ">=10" } }, - "node_modules/@swc/core-linux-arm64-musl": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.6.6.tgz", - "integrity": "sha512-a6fMbqzSAsS5KCxFJyg1mD5kwN3ZFO8qQLyJ75R/htZP/eCt05jrhmOI7h2n+1HjiG332jLnZ9S8lkVE5O8Nqw==", + "node_modules/@swc/core/node_modules/@swc/core-linux-arm-gnueabihf": { + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.7.11.tgz", + "integrity": "sha512-mHtzWKxhtyreI4CSxs+3+ENv8t/Qo35WFoYG66qHEgJz/Z2Lh6jv1E+MYgHdYwnpQHgHbdvAco7HsBu/Dt6xXw==", "cpu": [ - "arm64" + "arm" ], "dev": true, "optional": true, @@ -3015,12 +2271,12 @@ "node": ">=10" } }, - "node_modules/@swc/core-linux-x64-gnu": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.6.6.tgz", - "integrity": "sha512-hRGsUKNzzZle28YF0dYIpN0bt9PceR9LaVBq7x8+l9TAaDLFbgksSxcnU/ubTtsy+WsYSYGn+A83w3xWC0O8CQ==", + "node_modules/@swc/core/node_modules/@swc/core-linux-arm64-gnu": { + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.7.11.tgz", + "integrity": "sha512-FRwe/x0GfXSQjGP2lIk+NO0pUFS/lI/RorCLBPiK808EVE9JTbh9DKCc/4Bbb4jgScAjNkrFCUVObQYl3YKmpA==", "cpu": [ - "x64" + "arm64" ], "dev": true, "optional": true, @@ -3031,12 +2287,12 @@ "node": ">=10" } }, - "node_modules/@swc/core-linux-x64-musl": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.6.6.tgz", - "integrity": "sha512-NokIUtFxJDVv3LzGeEtYMTV3j2dnGKLac59luTeq36DQLZdJQawQIdTbzzWl2jE7lxxTZme+dhsVOH9LxE3ceg==", + "node_modules/@swc/core/node_modules/@swc/core-linux-arm64-musl": { + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.7.11.tgz", + "integrity": "sha512-GY/rs0+GUq14Gbnza90KOrQd/9yHd5qQMii5jcSWcUCT5A8QTa8kiicsM2NxZeTJ69xlKmT7sLod5l99lki/2A==", "cpu": [ - "x64" + "arm64" ], "dev": true, "optional": true, @@ -3047,10 +2303,10 @@ "node": ">=10" } }, - "node_modules/@swc/core-win32-arm64-msvc": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.6.6.tgz", - "integrity": "sha512-lzYdI4qb4k1dFG26yv+9Jaq/bUMAhgs/2JsrLncGjLof86+uj74wKYCQnbzKAsq2hDtS5DqnHnl+//J+miZfGA==", + "node_modules/@swc/core/node_modules/@swc/core-win32-arm64-msvc": { + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.7.11.tgz", + "integrity": "sha512-a2Y4xxEsLLYHJN7sMnw9+YQJDi3M1BxEr9hklfopPuGGnYLFNnx5CypH1l9ReijEfWjIAHNi7pq3m023lzW1Hg==", "cpu": [ "arm64" ], @@ -3063,10 +2319,10 @@ "node": ">=10" } }, - "node_modules/@swc/core-win32-ia32-msvc": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.6.6.tgz", - "integrity": "sha512-bvl7FMaXIJQ76WZU0ER4+RyfKIMGb6S2MgRkBhJOOp0i7VFx4WLOnrmMzaeoPJaJSkityVKAftfNh7NBzTIydQ==", + "node_modules/@swc/core/node_modules/@swc/core-win32-ia32-msvc": { + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.7.11.tgz", + "integrity": "sha512-ZbZFMwZO+j8ulhegJ7EhJ/QVZPoQ5qc30ylJQSxizizTJaen71Q7/13lXWc6ksuCKvg6dUKrp/TPgoxOOtSrFA==", "cpu": [ "ia32" ], @@ -3079,10 +2335,10 @@ "node": ">=10" } }, - "node_modules/@swc/core-win32-x64-msvc": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.6.6.tgz", - "integrity": "sha512-WAP0JoCTfgeYKgOeYJoJV4ZS0sQUmU3OwvXa2dYYtMLF7zsNqOiW4niU7QlThBHgUv/qNZm2p6ITEgh3w1cltw==", + "node_modules/@swc/core/node_modules/@swc/core-win32-x64-msvc": { + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.7.11.tgz", + "integrity": "sha512-IUohZedSJyDu/ReEBG/mqX6uG29uA7zZ9z6dIAF+p6eFxjXmh9MuHryyM+H8ebUyoq/Ad3rL+rUCksnuYNnI0w==", "cpu": [ "x64" ], @@ -3097,134 +2353,33 @@ }, "node_modules/@swc/counter": { "version": "0.1.3", - "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", - "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==", - "dev": true + "dev": true, + "license": "Apache-2.0" }, "node_modules/@swc/types": { - "version": "0.1.9", - "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.9.tgz", - "integrity": "sha512-qKnCno++jzcJ4lM4NTfYifm1EFSCeIfKiAHAfkENZAV5Kl9PjJIyd2yeeVv6c/2CckuLyv2NmRC5pv6pm2WQBg==", + "version": "0.1.12", "dev": true, + "license": "Apache-2.0", "dependencies": { "@swc/counter": "^0.1.3" } }, - "node_modules/@szmarczak/http-timer": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-5.0.1.tgz", - "integrity": "sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==", - "optional": true, - "peer": true, - "dependencies": { - "defer-to-connect": "^2.0.1" - }, - "engines": { - "node": ">=14.16" - } - }, - "node_modules/@testing-library/dom": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.3.0.tgz", - "integrity": "sha512-pT/TYB2+IyMYkkB6lqpkzD7VFbsR0JBJtflK3cS68sCNWxmOhWwRm1XvVHlseNEorsNcxkYsb4sRDV3aNIpttg==", - "optional": true, - "peer": true, - "dependencies": { - "@babel/code-frame": "^7.10.4", - "@babel/runtime": "^7.12.5", - "@types/aria-query": "^5.0.1", - "aria-query": "5.3.0", - "chalk": "^4.1.0", - "dom-accessibility-api": "^0.5.9", - "lz-string": "^1.5.0", - "pretty-format": "^27.0.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@testing-library/dom/node_modules/ansi-styles": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", - "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", - "optional": true, - "peer": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/@testing-library/dom/node_modules/pretty-format": { - "version": "27.5.1", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", - "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", - "optional": true, - "peer": true, - "dependencies": { - "ansi-regex": "^5.0.1", - "ansi-styles": "^5.0.0", - "react-is": "^17.0.1" - }, - "engines": { - "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" - } - }, - "node_modules/@testing-library/dom/node_modules/react-is": { - "version": "17.0.2", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", - "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", - "optional": true, - "peer": true - }, - "node_modules/@testing-library/user-event": { - "version": "14.5.2", - "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-14.5.2.tgz", - "integrity": "sha512-YAh82Wh4TIrxYLmfGcixwD18oIjyC1pFQC2Y01F2lzV2HTMiYrI0nze0FD0ocB//CKS/7jIUgae+adPqxK5yCQ==", - "optional": true, - "peer": true, - "engines": { - "node": ">=12", - "npm": ">=6" - }, - "peerDependencies": { - "@testing-library/dom": ">=7.21.4" - } - }, - "node_modules/@tootallnate/quickjs-emscripten": { - "version": "0.23.0", - "resolved": "https://registry.npmjs.org/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz", - "integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==", - "optional": true, - "peer": true - }, - "node_modules/@types/aria-query": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.4.tgz", - "integrity": "sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==", - "optional": true, - "peer": true - }, "node_modules/@types/benchmark": { "version": "1.0.33", - "resolved": "https://registry.npmjs.org/@types/benchmark/-/benchmark-1.0.33.tgz", - "integrity": "sha512-rG7Ieasa9UfZJnL72qiFvY9ivhEIYjCGgfcLLb5tJ/EL9+Mcxernj6W3HVCv/cOfJYuwNUwvVVhnrKl8iT8aqA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@types/bn.js": { "version": "5.1.5", - "resolved": "https://registry.npmjs.org/@types/bn.js/-/bn.js-5.1.5.tgz", - "integrity": "sha512-V46N0zwKRF5Q00AZ6hWtN0T8gGmDUaUzLWQvHFo5yThtVwK/VCenFY3wXVbOvNfajEpsTfQM4IN9k/d6gUVX3A==", + "license": "MIT", "dependencies": { "@types/node": "*" } }, "node_modules/@types/body-parser": { "version": "1.19.5", - "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", - "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", "dev": true, + "license": "MIT", "dependencies": { "@types/connect": "*", "@types/node": "*" @@ -3232,61 +2387,47 @@ }, "node_modules/@types/connect": { "version": "3.4.38", - "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", - "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "license": "MIT", "dependencies": { "@types/node": "*" } }, - "node_modules/@types/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", - "optional": true, - "peer": true - }, "node_modules/@types/core-js": { "version": "2.5.8", - "resolved": "https://registry.npmjs.org/@types/core-js/-/core-js-2.5.8.tgz", - "integrity": "sha512-VgnAj6tIAhJhZdJ8/IpxdatM8G4OD3VWGlp6xIxUGENZlpbob9Ty4VVdC1FIEp0aK6DBscDDjyzy5FB60TuNqg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@types/cors": { "version": "2.8.17", - "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.17.tgz", - "integrity": "sha512-8CGDvrBj1zgo2qE+oS3pOCyYNqCPryMWY2bGfwA0dcfopWGgxs+78df0Rs3rc9THP4JkOhLsAa+15VdpAqkcUA==", "dev": true, + "license": "MIT", "dependencies": { "@types/node": "*" } }, "node_modules/@types/debug": { "version": "4.1.12", - "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", - "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "license": "MIT", "dependencies": { "@types/ms": "*" } }, "node_modules/@types/dns-packet": { "version": "5.6.5", - "resolved": "https://registry.npmjs.org/@types/dns-packet/-/dns-packet-5.6.5.tgz", - "integrity": "sha512-qXOC7XLOEe43ehtWJCMnQXvgcIpv6rPmQ1jXT98Ad8A3TB1Ue50jsCbSSSyuazScEuZ/Q026vHbrOTVkmwA+7Q==", + "license": "MIT", "dependencies": { "@types/node": "*" } }, "node_modules/@types/estree": { "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", - "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", - "devOptional": true + "dev": true, + "license": "MIT" }, "node_modules/@types/fs-extra": { "version": "11.0.4", - "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-11.0.4.tgz", - "integrity": "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==", "dev": true, + "license": "MIT", "dependencies": { "@types/jsonfile": "*", "@types/node": "*" @@ -3294,123 +2435,91 @@ }, "node_modules/@types/glob": { "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz", - "integrity": "sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==", "dev": true, + "license": "MIT", "dependencies": { "@types/minimatch": "*", "@types/node": "*" } }, - "node_modules/@types/http-cache-semantics": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz", - "integrity": "sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==", - "optional": true, - "peer": true - }, "node_modules/@types/istanbul-lib-coverage": { "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", - "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@types/js-md5": { "version": "0.4.3", - "resolved": "https://registry.npmjs.org/@types/js-md5/-/js-md5-0.4.3.tgz", - "integrity": "sha512-BIga/WEqTi35ccnGysOuO4RmwVnpajv9oDB/sDQSY2b7/Ac7RyYR30bv7otZwByMvOJV9Vqq6/O1DFAnOzE4Pg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@types/json-schema": { "version": "7.0.15", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", - "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@types/json5": { "version": "0.0.29", - "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@types/jsonfile": { "version": "6.1.4", - "resolved": "https://registry.npmjs.org/@types/jsonfile/-/jsonfile-6.1.4.tgz", - "integrity": "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==", "dev": true, + "license": "MIT", "dependencies": { "@types/node": "*" } }, "node_modules/@types/k-bucket": { "version": "5.0.4", - "resolved": "https://registry.npmjs.org/@types/k-bucket/-/k-bucket-5.0.4.tgz", - "integrity": "sha512-tFVMUVXBjPF94/XEB/aShfKnReNpaTthIF0Ut1uDynaEMm6dykyVACaYLd36va8ewZL5jFBclneCppoKdVYKNw==", "dev": true, + "license": "MIT", "dependencies": { "@types/node": "*" } }, "node_modules/@types/lodash": { - "version": "4.17.6", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.6.tgz", - "integrity": "sha512-OpXEVoCKSS3lQqjx9GGGOapBeuW5eUboYHRlHP9urXPX25IKZ6AnP5ZRxtVf63iieUbsHxLn8NQ5Nlftc6yzAA==", - "dev": true + "version": "4.17.7", + "dev": true, + "license": "MIT" }, "node_modules/@types/lodash.zip": { "version": "4.2.9", - "resolved": "https://registry.npmjs.org/@types/lodash.zip/-/lodash.zip-4.2.9.tgz", - "integrity": "sha512-cJvqtEzLgHUPF6H6v7K6Q/yIc1DAYpsUkHD1Q7bUOAcCE0b7drzoUMi/Toj0MjQI3WeM6rI6v295mkenAQ+R7A==", "dev": true, + "license": "MIT", "dependencies": { "@types/lodash": "*" } }, "node_modules/@types/minimatch": { "version": "5.1.2", - "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", - "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@types/minimist": { "version": "1.2.5", - "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@types/ms": { "version": "0.7.34", - "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.34.tgz", - "integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==" - }, - "node_modules/@types/mute-stream": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/@types/mute-stream/-/mute-stream-0.0.4.tgz", - "integrity": "sha512-CPM9nzrCPPJHQNA9keH9CVkVI+WR5kMa+7XEs5jcGQ0VoAGnLv242w8lIVgwAEfmE4oufJRaTc9PNLQl0ioAow==", - "optional": true, - "peer": true, - "dependencies": { - "@types/node": "*" - } + "license": "MIT" }, "node_modules/@types/node": { "version": "18.11.9", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.9.tgz", - "integrity": "sha512-CRpX21/kGdzjOpFsZSkcrXMGIBWMGNIHXXBVFSH+ggkftxg+XYP20TESbh+zFvFj3EQOl5byk0HTRn1IL6hbqg==" + "license": "MIT" }, "node_modules/@types/node-dir": { "version": "0.0.34", - "resolved": "https://registry.npmjs.org/@types/node-dir/-/node-dir-0.0.34.tgz", - "integrity": "sha512-FwNgAbQyXvMP/kTsi/lH7Cpz+2xny+/ZhpDMophHcZerMxYvM+eqa8an1isNbykSQ9VCZutdbmMx2FLp5ufeMw==", "dev": true, + "license": "MIT", "dependencies": { "@types/node": "*" } }, "node_modules/@types/readable-stream": { "version": "2.3.15", - "resolved": "https://registry.npmjs.org/@types/readable-stream/-/readable-stream-2.3.15.tgz", - "integrity": "sha512-oM5JSKQCcICF1wvGgmecmHldZ48OZamtMxcGGVICOJA8o8cahXC1zEVAif8iwoc5j8etxFaRFnf095+CDsuoFQ==", + "license": "MIT", "dependencies": { "@types/node": "*", "safe-buffer": "~5.1.1" @@ -3418,119 +2527,63 @@ }, "node_modules/@types/readable-stream/node_modules/safe-buffer": { "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + "license": "MIT" }, "node_modules/@types/rollup-plugin-visualizer": { "version": "4.2.4", - "resolved": "https://registry.npmjs.org/@types/rollup-plugin-visualizer/-/rollup-plugin-visualizer-4.2.4.tgz", - "integrity": "sha512-BW4Q6D1Qy5gno5qHWrnMDC2dOe/TAKXvqCpckOggCCu+XpS+ZZJJ1lq1+K3bvYccoO3Y7f5kglbFAgYGqCgULg==", "dev": true, + "license": "MIT", "dependencies": { "rollup": "^2.42.3" } }, - "node_modules/@types/rollup-plugin-visualizer/node_modules/rollup": { - "version": "2.79.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.1.tgz", - "integrity": "sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw==", - "dev": true, - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=10.0.0" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, "node_modules/@types/semver": { "version": "7.5.8", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz", - "integrity": "sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==", - "dev": true - }, - "node_modules/@types/statuses": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@types/statuses/-/statuses-2.0.5.tgz", - "integrity": "sha512-jmIUGWrAiwu3dZpxntxieC+1n/5c3mjrImkmOSQ2NC5uP6cYO4aAZDdSmRcI5C1oiTmqlZGHC+/NmJrKogbP5A==", - "optional": true, - "peer": true + "dev": true, + "license": "MIT" }, "node_modules/@types/tape": { "version": "4.13.2", - "resolved": "https://registry.npmjs.org/@types/tape/-/tape-4.13.2.tgz", - "integrity": "sha512-V1ez/RtYRGN9cNYApw5xf27DpMkTB0033X6a2i3KUmKhSojBfbWN0i3EgZxboUG96WJLHLdOyZ01aiZwVW5aSA==", "dev": true, + "license": "MIT", "dependencies": { "@types/node": "*" } }, "node_modules/@types/triple-beam": { "version": "1.3.5", - "resolved": "https://registry.npmjs.org/@types/triple-beam/-/triple-beam-1.3.5.tgz", - "integrity": "sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==" + "license": "MIT" }, "node_modules/@types/uuid": { "version": "9.0.8", - "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", - "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==", - "dev": true - }, - "node_modules/@types/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@types/which/-/which-2.0.2.tgz", - "integrity": "sha512-113D3mDkZDjo+EeUEHCFy0qniNc1ZpecGiAU7WSo7YDoSzolZIQKpYFHrPpjkB2nuyahcKfrmLXeQlh7gqJYdw==", - "optional": true, - "peer": true - }, - "node_modules/@types/wrap-ansi": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@types/wrap-ansi/-/wrap-ansi-3.0.0.tgz", - "integrity": "sha512-ltIpx+kM7g/MLRZfkbL7EsCEjfzCcScLpkg37eXEtx5kmrAKBkTJwd1GIAjDSL8wTpM6Hzn5YO4pSb91BEwu1g==", - "optional": true, - "peer": true + "dev": true, + "license": "MIT" }, "node_modules/@types/ws": { - "version": "8.5.10", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.10.tgz", - "integrity": "sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==", - "devOptional": true, + "version": "8.5.12", + "dev": true, + "license": "MIT", "dependencies": { "@types/node": "*" } }, "node_modules/@types/yargs": { - "version": "17.0.32", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.32.tgz", - "integrity": "sha512-xQ67Yc/laOG5uMfX/093MRlGGCIBzZMarVa+gfNKJxWAIgykYpVGkBdbqEzGDDfCrVUj6Hiff4mTZ5BA6TmAog==", + "version": "17.0.33", "dev": true, + "license": "MIT", "dependencies": { "@types/yargs-parser": "*" } }, "node_modules/@types/yargs-parser": { "version": "21.0.3", - "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", - "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", - "dev": true - }, - "node_modules/@types/yauzl": { - "version": "2.10.3", - "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz", - "integrity": "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q==", - "optional": true, - "peer": true, - "dependencies": { - "@types/node": "*" - } + "dev": true, + "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "5.33.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.33.1.tgz", - "integrity": "sha512-S1iZIxrTvKkU3+m63YUOxYPKaP+yWDQrdhxTglVDVEVBf+aCSw85+BmJnyUaQQsk5TXFG/LpBu9fa+LrAQ91fQ==", "dev": true, + "license": "MIT", "dependencies": { "@typescript-eslint/scope-manager": "5.33.1", "@typescript-eslint/type-utils": "5.33.1", @@ -3561,9 +2614,8 @@ }, "node_modules/@typescript-eslint/parser": { "version": "5.33.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.33.1.tgz", - "integrity": "sha512-IgLLtW7FOzoDlmaMoXdxG8HOCByTBXrB1V2ZQYSEV1ggMmJfAkMWTwUjjzagS6OkfpySyhKFkBw7A9jYmcHpZA==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "@typescript-eslint/scope-manager": "5.33.1", "@typescript-eslint/types": "5.33.1", @@ -3588,9 +2640,8 @@ }, "node_modules/@typescript-eslint/scope-manager": { "version": "5.33.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.33.1.tgz", - "integrity": "sha512-8ibcZSqy4c5m69QpzJn8XQq9NnqAToC8OdH/W6IXPXv83vRyEDPYLdjAlUx8h/rbusq6MkW4YdQzURGOqsn3CA==", "dev": true, + "license": "MIT", "dependencies": { "@typescript-eslint/types": "5.33.1", "@typescript-eslint/visitor-keys": "5.33.1" @@ -3605,9 +2656,8 @@ }, "node_modules/@typescript-eslint/type-utils": { "version": "5.33.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.33.1.tgz", - "integrity": "sha512-X3pGsJsD8OiqhNa5fim41YtlnyiWMF/eKsEZGsHID2HcDqeSC5yr/uLOeph8rNF2/utwuI0IQoAK3fpoxcLl2g==", "dev": true, + "license": "MIT", "dependencies": { "@typescript-eslint/utils": "5.33.1", "debug": "^4.3.4", @@ -3631,9 +2681,8 @@ }, "node_modules/@typescript-eslint/types": { "version": "5.33.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.33.1.tgz", - "integrity": "sha512-7K6MoQPQh6WVEkMrMW5QOA5FO+BOwzHSNd0j3+BlBwd6vtzfZceJ8xJ7Um2XDi/O3umS8/qDX6jdy2i7CijkwQ==", "dev": true, + "license": "MIT", "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, @@ -3644,9 +2693,8 @@ }, "node_modules/@typescript-eslint/typescript-estree": { "version": "5.33.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.33.1.tgz", - "integrity": "sha512-JOAzJ4pJ+tHzA2pgsWQi4804XisPHOtbvwUyqsuuq8+y5B5GMZs7lI1xDWs6V2d7gE/Ez5bTGojSK12+IIPtXA==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "@typescript-eslint/types": "5.33.1", "@typescript-eslint/visitor-keys": "5.33.1", @@ -3671,9 +2719,8 @@ }, "node_modules/@typescript-eslint/utils": { "version": "5.33.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.33.1.tgz", - "integrity": "sha512-uphZjkMaZ4fE8CR4dU7BquOV6u0doeQAr8n6cQenl/poMaIyJtBu8eys5uk6u5HiDH01Mj5lzbJ5SfeDz7oqMQ==", "dev": true, + "license": "MIT", "dependencies": { "@types/json-schema": "^7.0.9", "@typescript-eslint/scope-manager": "5.33.1", @@ -3695,9 +2742,8 @@ }, "node_modules/@typescript-eslint/visitor-keys": { "version": "5.33.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.33.1.tgz", - "integrity": "sha512-nwIxOK8Z2MPWltLKMLOEZwmfBZReqUdbEoHQXeCpa+sRVARe5twpJGHCB4dk9903Yaf0nMAlGbQfaAH92F60eg==", "dev": true, + "license": "MIT", "dependencies": { "@typescript-eslint/types": "5.33.1", "eslint-visitor-keys": "^3.3.0" @@ -3710,575 +2756,135 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@vitest/browser": { - "version": "2.0.0-beta.12", - "resolved": "https://registry.npmjs.org/@vitest/browser/-/browser-2.0.0-beta.12.tgz", - "integrity": "sha512-q8qYJhry+zrJ9I6RPFBlqpCH71PZv0YSTyAgsd90t2OBCafxA793xamuOmJFngHsviG2sY/5HkBW6cHFhxaVtg==", - "optional": true, - "peer": true, - "dependencies": { - "@testing-library/dom": "^10.1.0", - "@testing-library/user-event": "^14.5.2", - "@vitest/utils": "2.0.0-beta.12", - "magic-string": "^0.30.10", - "msw": "^2.3.1", - "sirv": "^2.0.4", - "ws": "^8.17.1" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "playwright": "*", - "vitest": "2.0.0-beta.12", - "webdriverio": "*" - }, - "peerDependenciesMeta": { - "playwright": { - "optional": true - }, - "safaridriver": { - "optional": true - }, - "webdriverio": { - "optional": true - } - } + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "dev": true, + "license": "ISC" }, "node_modules/@vitest/coverage-v8": { - "version": "2.0.0-beta.12", - "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-2.0.0-beta.12.tgz", - "integrity": "sha512-yH+sU8xceBkoxiAvRT5JROByglgdYV7Q8i1gIQYd7VWce+9cX81u9BJUeXSjYIXgEg7orT0IB9I1zUBH0gEwsQ==", + "version": "2.0.5", "dev": true, + "license": "MIT", "dependencies": { "@ampproject/remapping": "^2.3.0", "@bcoe/v8-coverage": "^0.2.3", - "debug": "^4.3.5", - "istanbul-lib-coverage": "^3.2.2", - "istanbul-lib-report": "^3.0.1", - "istanbul-lib-source-maps": "^5.0.4", - "istanbul-reports": "^3.1.7", - "magic-string": "^0.30.10", - "magicast": "^0.3.4", - "picocolors": "^1.0.1", - "std-env": "^3.7.0", - "strip-literal": "^2.1.0", - "test-exclude": "^7.0.1" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "vitest": "2.0.0-beta.12" - } - }, - "node_modules/@vitest/expect": { - "version": "2.0.0-beta.12", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-2.0.0-beta.12.tgz", - "integrity": "sha512-4AoKb3aZRFzFWGVF6iFHuAjsFH0dydQKzEfT8TfCNzx7+iXtVnLJ5nQUC6D4qlvyEmJeGIbbXZcgiSxY4Ry7eA==", - "devOptional": true, - "dependencies": { - "@vitest/spy": "2.0.0-beta.12", - "@vitest/utils": "2.0.0-beta.12", - "chai": "^5.1.1" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/runner": { - "version": "2.0.0-beta.12", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-2.0.0-beta.12.tgz", - "integrity": "sha512-nAerpQvAw1/6vO4vRjOy0A+7IwtktSME3thwUoqWZxMKBgmTzIO2/WevbtFsAwYPc3V8NEY/Erv4PjQt9JTlzQ==", - "devOptional": true, - "dependencies": { - "@vitest/utils": "2.0.0-beta.12", - "p-limit": "^5.0.0", - "pathe": "^1.1.2" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/runner/node_modules/p-limit": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-5.0.0.tgz", - "integrity": "sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==", - "devOptional": true, - "dependencies": { - "yocto-queue": "^1.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@vitest/runner/node_modules/yocto-queue": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.1.1.tgz", - "integrity": "sha512-b4JR1PFR10y1mKjhHY9LaGo6tmrgjit7hxVIeAmyMw3jegXR4dhYqLaQF5zMXZxY7tLpMyJeLjr1C4rLmkVe8g==", - "devOptional": true, - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@vitest/snapshot": { - "version": "2.0.0-beta.12", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-2.0.0-beta.12.tgz", - "integrity": "sha512-NBqn1rTNQ/e3Dsw8LnniHgeZslgIxg8UvSfje/QV3hJLSoLMLbKLopHmK9T2FQA0hcibAaq/TZVyVrBoX+6aig==", - "devOptional": true, - "dependencies": { - "magic-string": "^0.30.10", - "pathe": "^1.1.2", - "pretty-format": "^29.7.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/spy": { - "version": "2.0.0-beta.12", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-2.0.0-beta.12.tgz", - "integrity": "sha512-o9Di4HtCMY/81YZr13ozhvkEdF2cI/4VmkOO0rC5s4v1kTcM4PpvkkSut/Cwj5LfeENRQI6JINvDaKNgBPSXhA==", - "devOptional": true, - "dependencies": { - "tinyspy": "^3.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vitest/ui": { - "version": "2.0.0-beta.12", - "resolved": "https://registry.npmjs.org/@vitest/ui/-/ui-2.0.0-beta.12.tgz", - "integrity": "sha512-eQ1LemJRmZqMAQkIUE152/7oJ4Pj6zrCfOE7hVAq1a5zMKNHyMoSOfYjZgVqzS+5fL1i+/XA805ykFp+g5N4RA==", - "devOptional": true, - "dependencies": { - "@vitest/utils": "2.0.0-beta.12", - "fast-glob": "^3.3.2", - "fflate": "^0.8.2", - "flatted": "^3.3.1", - "pathe": "^1.1.2", - "picocolors": "^1.0.1", - "sirv": "^2.0.4", - "vue-virtual-scroller": "2.0.0-beta.8" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "vitest": "2.0.0-beta.12" - } - }, - "node_modules/@vitest/utils": { - "version": "2.0.0-beta.12", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.0.0-beta.12.tgz", - "integrity": "sha512-qjVhzdcGnZeWMOoEDk/wgfuO4f/jcz7MIOpSAr2apxeJoWOZ+4GrV77/3EZ9qBodU4MbXBeHdR5KHdMPfl3kAQ==", - "devOptional": true, - "dependencies": { - "diff-sequences": "^29.6.3", - "estree-walker": "^3.0.3", - "loupe": "^3.1.1", - "pretty-format": "^29.7.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/@vue/compiler-core": { - "version": "3.4.31", - "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.4.31.tgz", - "integrity": "sha512-skOiodXWTV3DxfDhB4rOf3OGalpITLlgCeOwb+Y9GJpfQ8ErigdBUHomBzvG78JoVE8MJoQsb+qhZiHfKeNeEg==", - "devOptional": true, - "peer": true, - "dependencies": { - "@babel/parser": "^7.24.7", - "@vue/shared": "3.4.31", - "entities": "^4.5.0", - "estree-walker": "^2.0.2", - "source-map-js": "^1.2.0" - } - }, - "node_modules/@vue/compiler-core/node_modules/estree-walker": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", - "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", - "devOptional": true, - "peer": true - }, - "node_modules/@vue/compiler-dom": { - "version": "3.4.31", - "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.4.31.tgz", - "integrity": "sha512-wK424WMXsG1IGMyDGyLqB+TbmEBFM78hIsOJ9QwUVLGrcSk0ak6zYty7Pj8ftm7nEtdU/DGQxAXp0/lM/2cEpQ==", - "devOptional": true, - "peer": true, - "dependencies": { - "@vue/compiler-core": "3.4.31", - "@vue/shared": "3.4.31" - } - }, - "node_modules/@vue/compiler-sfc": { - "version": "3.4.31", - "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.4.31.tgz", - "integrity": "sha512-einJxqEw8IIJxzmnxmJBuK2usI+lJonl53foq+9etB2HAzlPjAS/wa7r0uUpXw5ByX3/0uswVSrjNb17vJm1kQ==", - "devOptional": true, - "peer": true, - "dependencies": { - "@babel/parser": "^7.24.7", - "@vue/compiler-core": "3.4.31", - "@vue/compiler-dom": "3.4.31", - "@vue/compiler-ssr": "3.4.31", - "@vue/shared": "3.4.31", - "estree-walker": "^2.0.2", - "magic-string": "^0.30.10", - "postcss": "^8.4.38", - "source-map-js": "^1.2.0" - } - }, - "node_modules/@vue/compiler-sfc/node_modules/estree-walker": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", - "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", - "devOptional": true, - "peer": true - }, - "node_modules/@vue/compiler-ssr": { - "version": "3.4.31", - "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.4.31.tgz", - "integrity": "sha512-RtefmITAje3fJ8FSg1gwgDhdKhZVntIVbwupdyZDSifZTRMiWxWehAOTCc8/KZDnBOcYQ4/9VWxsTbd3wT0hAA==", - "devOptional": true, - "peer": true, - "dependencies": { - "@vue/compiler-dom": "3.4.31", - "@vue/shared": "3.4.31" - } - }, - "node_modules/@vue/reactivity": { - "version": "3.4.31", - "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.4.31.tgz", - "integrity": "sha512-VGkTani8SOoVkZNds1PfJ/T1SlAIOf8E58PGAhIOUDYPC4GAmFA2u/E14TDAFcf3vVDKunc4QqCe/SHr8xC65Q==", - "devOptional": true, - "peer": true, - "dependencies": { - "@vue/shared": "3.4.31" - } - }, - "node_modules/@vue/runtime-core": { - "version": "3.4.31", - "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.4.31.tgz", - "integrity": "sha512-LDkztxeUPazxG/p8c5JDDKPfkCDBkkiNLVNf7XZIUnJ+66GVGkP+TIh34+8LtPisZ+HMWl2zqhIw0xN5MwU1cw==", - "devOptional": true, - "peer": true, - "dependencies": { - "@vue/reactivity": "3.4.31", - "@vue/shared": "3.4.31" - } - }, - "node_modules/@vue/runtime-dom": { - "version": "3.4.31", - "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.4.31.tgz", - "integrity": "sha512-2Auws3mB7+lHhTFCg8E9ZWopA6Q6L455EcU7bzcQ4x6Dn4cCPuqj6S2oBZgN2a8vJRS/LSYYxwFFq2Hlx3Fsaw==", - "devOptional": true, - "peer": true, - "dependencies": { - "@vue/reactivity": "3.4.31", - "@vue/runtime-core": "3.4.31", - "@vue/shared": "3.4.31", - "csstype": "^3.1.3" - } - }, - "node_modules/@vue/server-renderer": { - "version": "3.4.31", - "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.4.31.tgz", - "integrity": "sha512-D5BLbdvrlR9PE3by9GaUp1gQXlCNadIZytMIb8H2h3FMWJd4oUfkUTEH2wAr3qxoRz25uxbTcbqd3WKlm9EHQA==", - "devOptional": true, - "peer": true, - "dependencies": { - "@vue/compiler-ssr": "3.4.31", - "@vue/shared": "3.4.31" - }, - "peerDependencies": { - "vue": "3.4.31" - } - }, - "node_modules/@vue/shared": { - "version": "3.4.31", - "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.4.31.tgz", - "integrity": "sha512-Yp3wtJk//8cO4NItOPpi3QkLExAr/aLBGZMmTtW9WpdwBCJpRM6zj9WgWktXAl8IDIozwNMByT45JP3tO3ACWA==", - "devOptional": true, - "peer": true - }, - "node_modules/@wdio/config": { - "version": "8.39.0", - "resolved": "https://registry.npmjs.org/@wdio/config/-/config-8.39.0.tgz", - "integrity": "sha512-yNuGPMPibY91s936gnJCHWlStvIyDrwLwGfLC/NCdTin4F7HL4Gp5iJnHWkJFty1/DfFi8jjoIUBNLM8HEez+A==", - "optional": true, - "peer": true, - "dependencies": { - "@wdio/logger": "8.38.0", - "@wdio/types": "8.39.0", - "@wdio/utils": "8.39.0", - "decamelize": "^6.0.0", - "deepmerge-ts": "^5.0.0", - "glob": "^10.2.2", - "import-meta-resolve": "^4.0.0" - }, - "engines": { - "node": "^16.13 || >=18" - } - }, - "node_modules/@wdio/config/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "optional": true, - "peer": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@wdio/config/node_modules/decamelize": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-6.0.0.tgz", - "integrity": "sha512-Fv96DCsdOgB6mdGl67MT5JaTNKRzrzill5OH5s8bjYJXVlcXyPYGyPsUkWyGV5p1TXI5esYIYMMeDJL0hEIwaA==", - "optional": true, - "peer": true, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@wdio/config/node_modules/foreground-child": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.2.1.tgz", - "integrity": "sha512-PXUUyLqrR2XCWICfv6ukppP96sdFwWbNEnfEMt7jNsISjMsvaLNinAHNDYyvkyU+SZG2BTSbT5NjG+vZslfGTA==", - "optional": true, - "peer": true, - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@wdio/config/node_modules/glob": { - "version": "10.4.2", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.2.tgz", - "integrity": "sha512-GwMlUF6PkPo3Gk21UxkCohOv0PLcIXVtKyLlpEI28R/cO/4eNOdmLk3CMW1wROV/WR/EsZOWAfBbBOqYvs88/w==", - "optional": true, - "peer": true, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@wdio/config/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "optional": true, - "peer": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@wdio/config/node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "optional": true, - "peer": true, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@wdio/logger": { - "version": "8.38.0", - "resolved": "https://registry.npmjs.org/@wdio/logger/-/logger-8.38.0.tgz", - "integrity": "sha512-kcHL86RmNbcQP+Gq/vQUGlArfU6IIcbbnNp32rRIraitomZow+iEoc519rdQmSVusDozMS5DZthkgDdxK+vz6Q==", - "optional": true, - "peer": true, - "dependencies": { - "chalk": "^5.1.2", - "loglevel": "^1.6.0", - "loglevel-plugin-prefix": "^0.8.4", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": "^16.13 || >=18" - } - }, - "node_modules/@wdio/logger/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "optional": true, - "peer": true, - "engines": { - "node": ">=12" + "debug": "^4.3.5", + "istanbul-lib-coverage": "^3.2.2", + "istanbul-lib-report": "^3.0.1", + "istanbul-lib-source-maps": "^5.0.6", + "istanbul-reports": "^3.1.7", + "magic-string": "^0.30.10", + "magicast": "^0.3.4", + "std-env": "^3.7.0", + "test-exclude": "^7.0.1", + "tinyrainbow": "^1.2.0" }, "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/@wdio/logger/node_modules/chalk": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", - "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", - "optional": true, - "peer": true, - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" + "url": "https://opencollective.com/vitest" }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" + "peerDependencies": { + "vitest": "2.0.5" } }, - "node_modules/@wdio/logger/node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "optional": true, - "peer": true, + "node_modules/@vitest/expect": { + "version": "2.0.5", + "dev": true, + "license": "MIT", "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" + "@vitest/spy": "2.0.5", + "@vitest/utils": "2.0.5", + "chai": "^5.1.1", + "tinyrainbow": "^1.2.0" }, "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" + "url": "https://opencollective.com/vitest" } }, - "node_modules/@wdio/protocols": { - "version": "8.38.0", - "resolved": "https://registry.npmjs.org/@wdio/protocols/-/protocols-8.38.0.tgz", - "integrity": "sha512-7BPi7aXwUtnXZPeWJRmnCNFjyDvGrXlBmN9D4Pi58nILkyjVRQKEY9/qv/pcdyB0cvmIvw++Kl/1Lg+RxG++UA==", - "optional": true, - "peer": true - }, - "node_modules/@wdio/repl": { - "version": "8.24.12", - "resolved": "https://registry.npmjs.org/@wdio/repl/-/repl-8.24.12.tgz", - "integrity": "sha512-321F3sWafnlw93uRTSjEBVuvWCxTkWNDs7ektQS15drrroL3TMeFOynu4rDrIz0jXD9Vas0HCD2Tq/P0uxFLdw==", - "optional": true, - "peer": true, + "node_modules/@vitest/pretty-format": { + "version": "2.0.5", + "dev": true, + "license": "MIT", "dependencies": { - "@types/node": "^20.1.0" + "tinyrainbow": "^1.2.0" }, - "engines": { - "node": "^16.13 || >=18" + "funding": { + "url": "https://opencollective.com/vitest" } }, - "node_modules/@wdio/repl/node_modules/@types/node": { - "version": "20.14.9", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz", - "integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==", - "optional": true, - "peer": true, + "node_modules/@vitest/runner": { + "version": "2.0.5", + "dev": true, + "license": "MIT", "dependencies": { - "undici-types": "~5.26.4" + "@vitest/utils": "2.0.5", + "pathe": "^1.1.2" + }, + "funding": { + "url": "https://opencollective.com/vitest" } }, - "node_modules/@wdio/types": { - "version": "8.39.0", - "resolved": "https://registry.npmjs.org/@wdio/types/-/types-8.39.0.tgz", - "integrity": "sha512-86lcYROTapOJuFd9ouomFDfzDnv3Kn+jE0RmqfvN9frZAeLVJ5IKjX9M6HjplsyTZhjGO1uCaehmzx+HJus33Q==", - "optional": true, - "peer": true, + "node_modules/@vitest/snapshot": { + "version": "2.0.5", + "dev": true, + "license": "MIT", "dependencies": { - "@types/node": "^20.1.0" + "@vitest/pretty-format": "2.0.5", + "magic-string": "^0.30.10", + "pathe": "^1.1.2" }, - "engines": { - "node": "^16.13 || >=18" + "funding": { + "url": "https://opencollective.com/vitest" } }, - "node_modules/@wdio/types/node_modules/@types/node": { - "version": "20.14.9", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz", - "integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==", - "optional": true, - "peer": true, + "node_modules/@vitest/spy": { + "version": "2.0.5", + "dev": true, + "license": "MIT", "dependencies": { - "undici-types": "~5.26.4" + "tinyspy": "^3.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" } }, - "node_modules/@wdio/utils": { - "version": "8.39.0", - "resolved": "https://registry.npmjs.org/@wdio/utils/-/utils-8.39.0.tgz", - "integrity": "sha512-jY+n6jlGeK+9Tx8T659PKLwMQTGpLW5H78CSEWgZLbjbVSr2LfGR8Lx0CRktNXxAtqEVZPj16Pi74OtAhvhE6Q==", - "optional": true, - "peer": true, + "node_modules/@vitest/ui": { + "version": "2.0.5", + "dev": true, + "license": "MIT", "dependencies": { - "@puppeteer/browsers": "^1.6.0", - "@wdio/logger": "8.38.0", - "@wdio/types": "8.39.0", - "decamelize": "^6.0.0", - "deepmerge-ts": "^5.1.0", - "edgedriver": "^5.5.0", - "geckodriver": "^4.3.1", - "get-port": "^7.0.0", - "import-meta-resolve": "^4.0.0", - "locate-app": "^2.1.0", - "safaridriver": "^0.1.0", - "split2": "^4.2.0", - "wait-port": "^1.0.4" + "@vitest/utils": "2.0.5", + "fast-glob": "^3.3.2", + "fflate": "^0.8.2", + "flatted": "^3.3.1", + "pathe": "^1.1.2", + "sirv": "^2.0.4", + "tinyrainbow": "^1.2.0" }, - "engines": { - "node": "^16.13 || >=18" + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "vitest": "2.0.5" } }, - "node_modules/@wdio/utils/node_modules/decamelize": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-6.0.0.tgz", - "integrity": "sha512-Fv96DCsdOgB6mdGl67MT5JaTNKRzrzill5OH5s8bjYJXVlcXyPYGyPsUkWyGV5p1TXI5esYIYMMeDJL0hEIwaA==", - "optional": true, - "peer": true, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node_modules/@vitest/utils": { + "version": "2.0.5", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "2.0.5", + "estree-walker": "^3.0.3", + "loupe": "^3.1.1", + "tinyrainbow": "^1.2.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://opencollective.com/vitest" } }, "node_modules/@yarnpkg/parsers": { "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@yarnpkg/parsers/-/parsers-3.0.2.tgz", - "integrity": "sha512-/HcYgtUSiJiot/XWGLOlGxPYUG65+/31V8oqk17vZLW1xlCoR4PampyePljOxY2n8/3jz9+tIFzICsyGujJZoA==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "js-yaml": "^3.10.0", "tslib": "^2.4.0" @@ -4289,18 +2895,16 @@ }, "node_modules/@yarnpkg/parsers/node_modules/argparse": { "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", "dev": true, + "license": "MIT", "dependencies": { "sprintf-js": "~1.0.2" } }, "node_modules/@yarnpkg/parsers/node_modules/js-yaml": { "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", "dev": true, + "license": "MIT", "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -4309,35 +2913,9 @@ "js-yaml": "bin/js-yaml.js" } }, - "node_modules/@zip.js/zip.js": { - "version": "2.7.45", - "resolved": "https://registry.npmjs.org/@zip.js/zip.js/-/zip.js-2.7.45.tgz", - "integrity": "sha512-Mm2EXF33DJQ/3GWWEWeP1UCqzpQ5+fiMvT3QWspsXY05DyqqxWu7a9awSzU4/spHMHVFrTjani1PR0vprgZpow==", - "optional": true, - "peer": true, - "engines": { - "bun": ">=0.7.0", - "deno": ">=1.0.0", - "node": ">=16.5.0" - } - }, - "node_modules/abort-controller": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", - "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", - "optional": true, - "peer": true, - "dependencies": { - "event-target-shim": "^5.0.0" - }, - "engines": { - "node": ">=6.5" - } - }, "node_modules/abstract-level": { "version": "1.0.4", - "resolved": "https://registry.npmjs.org/abstract-level/-/abstract-level-1.0.4.tgz", - "integrity": "sha512-eUP/6pbXBkMbXFdx4IH2fVgvB7M0JvR7/lIL33zcs0IBcwjdzSSl31TOJsaCzmKSSDF9h8QYSOJux4Nd4YJqFg==", + "license": "MIT", "dependencies": { "buffer": "^6.0.3", "catering": "^2.1.0", @@ -4353,9 +2931,8 @@ }, "node_modules/abstract-leveldown": { "version": "7.2.0", - "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-7.2.0.tgz", - "integrity": "sha512-DnhQwcFEaYsvYDnACLZhMmCWd3rkOeEvglpa4q5i/5Jlm3UIsWaxVzuXvDLFCSCWRO3yy2/+V/G7FusFgejnfQ==", "dev": true, + "license": "MIT", "dependencies": { "buffer": "^6.0.3", "catering": "^2.0.0", @@ -4370,18 +2947,16 @@ }, "node_modules/abstract-leveldown/node_modules/level-supports": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-2.1.0.tgz", - "integrity": "sha512-E486g1NCjW5cF78KGPrMDRBYzPuueMZ6VBXHT6gC7A8UYWGiM14fGgp+s/L1oFfDWSPV/+SFkYCmZ0SiESkRKA==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" } }, "node_modules/acorn": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.0.tgz", - "integrity": "sha512-RTvkC4w+KNXrM39/lWCUaG0IbRkWdCv7W/IOW9oU6SawyxulvkQy5HQPVTKxEjczcUvapcrw3cFx/60VN/NRNw==", + "version": "8.12.1", "dev": true, + "license": "MIT", "bin": { "acorn": "bin/acorn" }, @@ -4391,37 +2966,21 @@ }, "node_modules/acorn-jsx": { "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "dev": true, + "license": "MIT", "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "node_modules/aes-js": { "version": "4.0.0-beta.5", - "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz", - "integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==", - "dev": true - }, - "node_modules/agent-base": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", - "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", - "optional": true, - "peer": true, - "dependencies": { - "debug": "^4.3.4" - }, - "engines": { - "node": ">= 14" - } + "dev": true, + "license": "MIT" }, "node_modules/aggregate-error": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", "dev": true, + "license": "MIT", "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" @@ -4432,9 +2991,8 @@ }, "node_modules/ajv": { "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, + "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -4448,9 +3006,8 @@ }, "node_modules/ansi-escapes": { "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "type-fest": "^0.21.3" }, @@ -4463,9 +3020,8 @@ }, "node_modules/ansi-escapes/node_modules/type-fest": { "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", - "devOptional": true, + "dev": true, + "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=10" }, @@ -4475,229 +3031,57 @@ }, "node_modules/ansi-regex": { "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/ansi-styles": { "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/append-transform": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz", - "integrity": "sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==", - "dev": true, - "dependencies": { - "default-require-extensions": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/archiver": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/archiver/-/archiver-7.0.1.tgz", - "integrity": "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==", - "optional": true, - "peer": true, - "dependencies": { - "archiver-utils": "^5.0.2", - "async": "^3.2.4", - "buffer-crc32": "^1.0.0", - "readable-stream": "^4.0.0", - "readdir-glob": "^1.1.2", - "tar-stream": "^3.0.0", - "zip-stream": "^6.0.1" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/archiver-utils": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-5.0.2.tgz", - "integrity": "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==", - "optional": true, - "peer": true, - "dependencies": { - "glob": "^10.0.0", - "graceful-fs": "^4.2.0", - "is-stream": "^2.0.1", - "lazystream": "^1.0.0", - "lodash": "^4.17.15", - "normalize-path": "^3.0.0", - "readable-stream": "^4.0.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/archiver-utils/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "optional": true, - "peer": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/archiver-utils/node_modules/foreground-child": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.2.1.tgz", - "integrity": "sha512-PXUUyLqrR2XCWICfv6ukppP96sdFwWbNEnfEMt7jNsISjMsvaLNinAHNDYyvkyU+SZG2BTSbT5NjG+vZslfGTA==", - "optional": true, - "peer": true, - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/archiver-utils/node_modules/glob": { - "version": "10.4.2", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.2.tgz", - "integrity": "sha512-GwMlUF6PkPo3Gk21UxkCohOv0PLcIXVtKyLlpEI28R/cO/4eNOdmLk3CMW1wROV/WR/EsZOWAfBbBOqYvs88/w==", - "optional": true, - "peer": true, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/archiver-utils/node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "optional": true, - "peer": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/archiver-utils/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "optional": true, - "peer": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/archiver-utils/node_modules/readable-stream": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", - "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", - "optional": true, - "peer": true, + "license": "MIT", "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" + "color-convert": "^2.0.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/archiver-utils/node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "optional": true, - "peer": true, - "engines": { - "node": ">=14" + "node": ">=8" }, "funding": { - "url": "https://github.com/sponsors/isaacs" + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/archiver/node_modules/readable-stream": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", - "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", - "optional": true, - "peer": true, + "node_modules/append-transform": { + "version": "2.0.0", + "dev": true, + "license": "MIT", "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" + "default-require-extensions": "^3.0.0" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": ">=8" } }, "node_modules/archy": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", - "integrity": "sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/argparse": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true + "dev": true, + "license": "Python-2.0" }, "node_modules/aria-query": { "version": "5.3.0", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", - "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", - "devOptional": true, + "dev": true, + "license": "Apache-2.0", "dependencies": { "dequal": "^2.0.3" } }, "node_modules/array-buffer-byte-length": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", - "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.5", "is-array-buffer": "^3.0.4" @@ -4711,9 +3095,8 @@ }, "node_modules/array-includes": { "version": "3.1.8", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz", - "integrity": "sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -4729,20 +3112,24 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/array-timsort": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/array-timsort/-/array-timsort-1.0.3.tgz", + "integrity": "sha512-/+3GRL7dDAGEfM6TseQk/U+mi18TU2Ms9I3UlLdUMhz2hbvGNTKdj9xniwXfUqgYhHxRx0+8UnKkvlNwVU+cWQ==", + "dev": true + }, "node_modules/array-union": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/array.prototype.every": { "version": "1.1.6", - "resolved": "https://registry.npmjs.org/array.prototype.every/-/array.prototype.every-1.1.6.tgz", - "integrity": "sha512-gNEqZD97w6bfQRNmHkFv7rNnGM+VWyHZT+h/rf9C+22owcXuENr66Lfo0phItpU5KoXW6Owb34q2+8MnSIZ57w==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -4759,9 +3146,8 @@ }, "node_modules/array.prototype.flat": { "version": "1.3.2", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz", - "integrity": "sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", @@ -4777,9 +3163,8 @@ }, "node_modules/array.prototype.flatmap": { "version": "1.3.2", - "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz", - "integrity": "sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", @@ -4795,9 +3180,8 @@ }, "node_modules/arraybuffer.prototype.slice": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", - "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", "dev": true, + "license": "MIT", "dependencies": { "array-buffer-byte-length": "^1.0.1", "call-bind": "^1.0.5", @@ -4817,9 +3201,8 @@ }, "node_modules/asn1.js": { "version": "4.10.1", - "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-4.10.1.tgz", - "integrity": "sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw==", "dev": true, + "license": "MIT", "dependencies": { "bn.js": "^4.0.0", "inherits": "^2.0.1", @@ -4828,15 +3211,13 @@ }, "node_modules/asn1.js/node_modules/bn.js": { "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/assert": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/assert/-/assert-2.1.0.tgz", - "integrity": "sha512-eLHpSK/Y4nhMJ07gDaAzoX/XAKS8PSaojml3M0DM4JpV1LAi5JOJ/p6H/XWrl8L+DzVEvVCW1z3vWAaB9oTsQw==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "is-nan": "^1.3.2", @@ -4847,51 +3228,33 @@ }, "node_modules/assertion-error": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", - "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=12" } }, - "node_modules/ast-types": { - "version": "0.13.4", - "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.13.4.tgz", - "integrity": "sha512-x1FCFnFifvYDDzTaLII71vG5uvDwgtmDTEVWAxrgeiR8VjMONcCXJx7E+USjDtHlwFmt9MysbqgF9b9Vjr6w+w==", - "optional": true, - "peer": true, - "dependencies": { - "tslib": "^2.0.1" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/ast-types-flow": { "version": "0.0.8", - "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.8.tgz", - "integrity": "sha512-OH/2E5Fg20h2aPrbe+QL8JZQFko0YZaF+j4mnQ7BGhfavO7OpSLa8a0y9sBwomHdSbkhTS8TQNayBfnW5DwbvQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/astral-regex": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", - "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/async": { "version": "3.2.5", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", - "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" + "license": "MIT" }, "node_modules/available-typed-arrays": { "version": "1.0.7", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", - "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", "dev": true, + "license": "MIT", "dependencies": { "possible-typed-array-names": "^1.0.0" }, @@ -4903,86 +3266,28 @@ } }, "node_modules/axe-core": { - "version": "4.9.1", - "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.9.1.tgz", - "integrity": "sha512-QbUdXJVTpvUTHU7871ppZkdOLBeGUKBQWHkHrvN2V9IQWGMt61zf3B45BtzjxEJzYuj0JBjBZP/hmYS/R9pmAw==", + "version": "4.10.0", "dev": true, + "license": "MPL-2.0", "engines": { "node": ">=4" } }, "node_modules/axobject-query": { "version": "3.1.1", - "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.1.1.tgz", - "integrity": "sha512-goKlv8DZrK9hUh975fnHzhNIO4jUnFCfv/dszV5VwUGDFjI6vQ2VwoyjYjYNEbBE8AH87TduWP5uyDR1D+Iteg==", "dev": true, + "license": "Apache-2.0", "dependencies": { "deep-equal": "^2.0.5" } }, - "node_modules/b4a": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz", - "integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==", - "optional": true, - "peer": true - }, "node_modules/balanced-match": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "devOptional": true - }, - "node_modules/bare-events": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.4.2.tgz", - "integrity": "sha512-qMKFd2qG/36aA4GwvKq8MxnPgCQAmBWmSyLWsJcbn8v03wvIPQ/hG1Ms8bPzndZxMDoHpxez5VOS+gC9Yi24/Q==", - "optional": true, - "peer": true - }, - "node_modules/bare-fs": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-2.3.1.tgz", - "integrity": "sha512-W/Hfxc/6VehXlsgFtbB5B4xFcsCl+pAh30cYhoFyXErf6oGrwjh8SwiPAdHgpmWonKuYpZgGywN0SXt7dgsADA==", - "optional": true, - "peer": true, - "dependencies": { - "bare-events": "^2.0.0", - "bare-path": "^2.0.0", - "bare-stream": "^2.0.0" - } - }, - "node_modules/bare-os": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-2.4.0.tgz", - "integrity": "sha512-v8DTT08AS/G0F9xrhyLtepoo9EJBJ85FRSMbu1pQUlAf6A8T0tEEQGMVObWeqpjhSPXsE0VGlluFBJu2fdoTNg==", - "optional": true, - "peer": true - }, - "node_modules/bare-path": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/bare-path/-/bare-path-2.1.3.tgz", - "integrity": "sha512-lh/eITfU8hrj9Ru5quUp0Io1kJWIk1bTjzo7JH1P5dWmQ2EL4hFUlfI8FonAhSlgIfhn63p84CDY/x+PisgcXA==", - "optional": true, - "peer": true, - "dependencies": { - "bare-os": "^2.1.0" - } - }, - "node_modules/bare-stream": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/bare-stream/-/bare-stream-2.1.3.tgz", - "integrity": "sha512-tiDAH9H/kP+tvNO5sczyn9ZAA7utrSMobyDchsnyyXBuUe2FSQWbxhtuHB8jwpHYYevVo2UJpcmvvjrbHboUUQ==", - "optional": true, - "peer": true, - "dependencies": { - "streamx": "^2.18.0" - } + "dev": true, + "license": "MIT" }, "node_modules/base64-js": { "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", "funding": [ { "type": "github", @@ -4996,23 +3301,13 @@ "type": "consulting", "url": "https://feross.org/support" } - ] - }, - "node_modules/basic-ftp": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/basic-ftp/-/basic-ftp-5.0.5.tgz", - "integrity": "sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg==", - "optional": true, - "peer": true, - "engines": { - "node": ">=10.0.0" - } + ], + "license": "MIT" }, "node_modules/benchmark": { "version": "2.1.4", - "resolved": "https://registry.npmjs.org/benchmark/-/benchmark-2.1.4.tgz", - "integrity": "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ==", "dev": true, + "license": "MIT", "dependencies": { "lodash": "^4.17.4", "platform": "^1.3.3" @@ -5020,26 +3315,22 @@ }, "node_modules/bigint-crypto-utils": { "version": "3.3.0", - "resolved": "https://registry.npmjs.org/bigint-crypto-utils/-/bigint-crypto-utils-3.3.0.tgz", - "integrity": "sha512-jOTSb+drvEDxEq6OuUybOAv/xxoh3cuYRUIPyu8sSHQNKM303UQ2R1DAo45o1AkcIXw6fzbaFI1+xGGdaXs2lg==", + "license": "MIT", "engines": { "node": ">=14.0.0" } }, "node_modules/bintrees": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.2.tgz", - "integrity": "sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw==" + "license": "MIT" }, "node_modules/bn.js": { "version": "5.2.1", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz", - "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==" + "license": "MIT" }, "node_modules/body-parser": { "version": "1.20.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", - "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", + "license": "MIT", "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.5", @@ -5061,22 +3352,19 @@ }, "node_modules/body-parser/node_modules/debug": { "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", "dependencies": { "ms": "2.0.0" } }, "node_modules/body-parser/node_modules/ms": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + "license": "MIT" }, "node_modules/brace-expansion": { "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -5084,9 +3372,8 @@ }, "node_modules/braces": { "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "fill-range": "^7.1.1" }, @@ -5096,14 +3383,12 @@ }, "node_modules/brorand": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", - "integrity": "sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/browser-level": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/browser-level/-/browser-level-1.0.1.tgz", - "integrity": "sha512-XECYKJ+Dbzw0lbydyQuJzwNXtOpbMSq737qxJN11sIRTErOMShvDpbzTlgju7orJKvx4epULolZAuJGLzCmWRQ==", + "license": "MIT", "dependencies": { "abstract-level": "^1.0.2", "catering": "^2.1.1", @@ -5113,18 +3398,16 @@ }, "node_modules/browser-resolve": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/browser-resolve/-/browser-resolve-2.0.0.tgz", - "integrity": "sha512-7sWsQlYL2rGLy2IWm8WL8DCTJvYLc/qlOnsakDac87SOoCd16WLsaAMdCiAqsTNHIe+SXfaqyxyo6THoWqs8WQ==", "dev": true, + "license": "MIT", "dependencies": { "resolve": "^1.17.0" } }, "node_modules/browserify-aes": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", - "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", "dev": true, + "license": "MIT", "dependencies": { "buffer-xor": "^1.0.3", "cipher-base": "^1.0.0", @@ -5136,9 +3419,8 @@ }, "node_modules/browserify-cipher": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/browserify-cipher/-/browserify-cipher-1.0.1.tgz", - "integrity": "sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==", "dev": true, + "license": "MIT", "dependencies": { "browserify-aes": "^1.0.4", "browserify-des": "^1.0.0", @@ -5147,9 +3429,8 @@ }, "node_modules/browserify-des": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/browserify-des/-/browserify-des-1.0.2.tgz", - "integrity": "sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==", "dev": true, + "license": "MIT", "dependencies": { "cipher-base": "^1.0.1", "des.js": "^1.0.0", @@ -5159,9 +3440,8 @@ }, "node_modules/browserify-rsa": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.1.0.tgz", - "integrity": "sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==", "dev": true, + "license": "MIT", "dependencies": { "bn.js": "^5.0.0", "randombytes": "^2.0.1" @@ -5169,9 +3449,8 @@ }, "node_modules/browserify-sign": { "version": "4.2.3", - "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.3.tgz", - "integrity": "sha512-JWCZW6SKhfhjJxO8Tyiiy+XYB7cqd2S5/+WeYHsKdNKFlCBhKbblba1A/HN/90YwtxKc8tCErjffZl++UNmGiw==", "dev": true, + "license": "ISC", "dependencies": { "bn.js": "^5.2.1", "browserify-rsa": "^4.1.0", @@ -5188,17 +3467,34 @@ "node": ">= 0.12" } }, + "node_modules/browserify-sign/node_modules/elliptic": { + "version": "6.5.7", + "dev": true, + "license": "MIT", + "dependencies": { + "bn.js": "^4.11.9", + "brorand": "^1.1.0", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.1", + "inherits": "^2.0.4", + "minimalistic-assert": "^1.0.1", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "node_modules/browserify-sign/node_modules/elliptic/node_modules/bn.js": { + "version": "4.12.0", + "dev": true, + "license": "MIT" + }, "node_modules/browserify-sign/node_modules/isarray": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/browserify-sign/node_modules/readable-stream": { "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", "dev": true, + "license": "MIT", "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", @@ -5211,38 +3507,32 @@ }, "node_modules/browserify-sign/node_modules/readable-stream/node_modules/safe-buffer": { "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/browserify-sign/node_modules/string_decoder": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "dev": true, + "license": "MIT", "dependencies": { "safe-buffer": "~5.1.0" } }, "node_modules/browserify-sign/node_modules/string_decoder/node_modules/safe-buffer": { "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/browserify-zlib": { "version": "0.2.0", - "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz", - "integrity": "sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==", "dev": true, + "license": "MIT", "dependencies": { "pako": "~1.0.5" } }, "node_modules/browserslist": { - "version": "4.23.1", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.1.tgz", - "integrity": "sha512-TUfofFo/KsK/bWZ9TWQ5O26tsWW4Uhmt8IYklbnUa70udB6P2wA7w7o4PY4muaEPBQaAX+CEnmmIA41NVHtPVw==", + "version": "4.23.3", "dev": true, "funding": [ { @@ -5258,11 +3548,12 @@ "url": "https://github.com/sponsors/ai" } ], + "license": "MIT", "dependencies": { - "caniuse-lite": "^1.0.30001629", - "electron-to-chromium": "^1.4.796", - "node-releases": "^2.0.14", - "update-browserslist-db": "^1.0.16" + "caniuse-lite": "^1.0.30001646", + "electron-to-chromium": "^1.5.4", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" }, "bin": { "browserslist": "cli.js" @@ -5273,8 +3564,6 @@ }, "node_modules/buffer": { "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", "funding": [ { "type": "github", @@ -5289,55 +3578,41 @@ "url": "https://feross.org/support" } ], + "license": "MIT", "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.2.1" } }, - "node_modules/buffer-crc32": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz", - "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==", - "optional": true, - "peer": true, - "engines": { - "node": ">=8.0.0" - } - }, "node_modules/buffer-xor": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", - "integrity": "sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/builtin-modules": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", - "integrity": "sha512-wxXCdllwGhI2kCC0MnvTGYTMvnVZTvqgypkiTI8Pa5tcz2i6VqsqwYGgqwXji+4RgCzms6EajE4IxiUH6HH8nQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/builtin-status-codes": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz", - "integrity": "sha512-HpGFw18DgFWlncDfjTa2rcQ4W88O1mC8e8yZ2AvQY5KDaktSTwo+KRf6nHK6FRI5FyRyb/5T6+TSxfP7QyGsmQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/bytes": { "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, "node_modules/c8": { "version": "7.12.0", - "resolved": "https://registry.npmjs.org/c8/-/c8-7.12.0.tgz", - "integrity": "sha512-CtgQrHOkyxr5koX1wEUmN/5cfDa2ckbHRA4Gy5LAL0zaCFtVWJS5++n+w4/sr2GWGerBxgTjpKeDclk/Qk6W/A==", "dev": true, + "license": "ISC", "dependencies": { "@bcoe/v8-coverage": "^0.2.3", "@istanbuljs/schema": "^0.1.3", @@ -5361,9 +3636,8 @@ }, "node_modules/c8/node_modules/test-exclude": { "version": "6.0.0", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", - "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", "dev": true, + "license": "ISC", "dependencies": { "@istanbuljs/schema": "^0.1.2", "glob": "^7.1.4", @@ -5375,47 +3649,16 @@ }, "node_modules/cac": { "version": "6.7.14", - "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", - "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/cacheable-lookup": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-7.0.0.tgz", - "integrity": "sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w==", - "optional": true, - "peer": true, - "engines": { - "node": ">=14.16" - } - }, - "node_modules/cacheable-request": { - "version": "10.2.14", - "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-10.2.14.tgz", - "integrity": "sha512-zkDT5WAF4hSSoUgyfg5tFIxz8XQK+25W/TLVojJTMKBaxevLBBtLxgqguAuVQB8PVW79FVjHcU+GJ9tVbDZ9mQ==", - "optional": true, - "peer": true, - "dependencies": { - "@types/http-cache-semantics": "^4.0.2", - "get-stream": "^6.0.1", - "http-cache-semantics": "^4.1.1", - "keyv": "^4.5.3", - "mimic-response": "^4.0.0", - "normalize-url": "^8.0.0", - "responselike": "^3.0.0" - }, - "engines": { - "node": ">=14.16" - } - }, "node_modules/caching-transform": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz", - "integrity": "sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==", "dev": true, + "license": "MIT", "dependencies": { "hasha": "^5.0.0", "make-dir": "^3.0.0", @@ -5428,9 +3671,8 @@ }, "node_modules/caching-transform/node_modules/make-dir": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "dev": true, + "license": "MIT", "dependencies": { "semver": "^6.0.0" }, @@ -5443,17 +3685,15 @@ }, "node_modules/caching-transform/node_modules/semver": { "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } }, "node_modules/call-bind": { "version": "1.0.7", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", - "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "license": "MIT", "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", @@ -5470,26 +3710,22 @@ }, "node_modules/callsites": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/camelcase": { "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/caniuse-lite": { - "version": "1.0.30001639", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001639.tgz", - "integrity": "sha512-eFHflNTBIlFwP2AIKaYuBQN/apnUoKNhBdza8ZnW/h2di4LCZ4xFqYlxUxo+LQ76KFI1PGcC1QDxMbxTZpSCAg==", + "version": "1.0.30001651", "dev": true, "funding": [ { @@ -5504,21 +3740,20 @@ "type": "github", "url": "https://github.com/sponsors/ai" } - ] + ], + "license": "CC-BY-4.0" }, "node_modules/catering": { "version": "2.1.1", - "resolved": "https://registry.npmjs.org/catering/-/catering-2.1.1.tgz", - "integrity": "sha512-K7Qy8O9p76sL3/3m7/zLKbRkyOlSZAgzEaLhyj2mXS8PsCud2Eo4hAb8aLtZqHh0QGqLcb9dlJSu6lHRVENm1w==", + "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/chai": { "version": "5.1.1", - "resolved": "https://registry.npmjs.org/chai/-/chai-5.1.1.tgz", - "integrity": "sha512-pT1ZgP8rPNqUgieVaEY+ryQr6Q4HXNg8Ei9UnLUrjN4IA7dvQC5JB+/kxVcPNDHyBcc/26CXPkbNzq3qwrOEKA==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "assertion-error": "^2.0.1", "check-error": "^2.1.1", @@ -5532,8 +3767,7 @@ }, "node_modules/chalk": { "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -5545,40 +3779,45 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/check-error": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", - "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==", - "devOptional": true, + "node_modules/chalk-template": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/chalk-template/-/chalk-template-1.1.0.tgz", + "integrity": "sha512-T2VJbcDuZQ0Tb2EWwSotMPJjgpy1/tGee1BTpUNsGZ/qgNjV2t7Mvu+d4600U564nbLesN1x2dPL+xii174Ekg==", + "dev": true, + "dependencies": { + "chalk": "^5.2.0" + }, "engines": { - "node": ">= 16" + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/chalk/chalk-template?sponsor=1" } }, - "node_modules/chromium-bidi": { - "version": "0.4.16", - "resolved": "https://registry.npmjs.org/chromium-bidi/-/chromium-bidi-0.4.16.tgz", - "integrity": "sha512-7ZbXdWERxRxSwo3txsBjjmc/NLxqb1Bk30mRb0BMS4YIaiV6zvKZqL/UAH+DdqcDYayDWk2n/y8klkBDODrPvA==", - "optional": true, - "peer": true, - "dependencies": { - "mitt": "3.0.0" + "node_modules/chalk-template/node_modules/chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "dev": true, + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" }, - "peerDependencies": { - "devtools-protocol": "*" + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/chromium-bidi/node_modules/mitt": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.0.tgz", - "integrity": "sha512-7dX2/10ITVyqh4aOSVI9gdape+t9l2/8QxHrFmUXu4EEUpdlxl6RudZUPZoc+zuY2hk1j7XxVroIVIan/pD/SQ==", - "optional": true, - "peer": true + "node_modules/check-error": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 16" + } }, "node_modules/cipher-base": { "version": "1.0.4", - "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", - "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", "dev": true, + "license": "MIT", "dependencies": { "inherits": "^2.0.1", "safe-buffer": "^5.0.1" @@ -5586,9 +3825,8 @@ }, "node_modules/classic-level": { "version": "1.4.1", - "resolved": "https://registry.npmjs.org/classic-level/-/classic-level-1.4.1.tgz", - "integrity": "sha512-qGx/KJl3bvtOHrGau2WklEZuXhS3zme+jf+fsu6Ej7W7IP/C49v7KNlWIsT1jZu0YnfzSIYDGcEWpCa1wKGWXQ==", "hasInstallScript": true, + "license": "MIT", "dependencies": { "abstract-level": "^1.0.2", "catering": "^2.1.0", @@ -5602,69 +3840,79 @@ }, "node_modules/clean-stack": { "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, - "node_modules/cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "node_modules/clear-module": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/clear-module/-/clear-module-4.1.2.tgz", + "integrity": "sha512-LWAxzHqdHsAZlPlEyJ2Poz6AIs384mPeqLVCru2p0BrP9G/kVGuhNyZYClLO6cXlnuJjzC8xtsJIuMjKqLXoAw==", "dev": true, "dependencies": { - "restore-cursor": "^3.1.0" + "parent-module": "^2.0.0", + "resolve-from": "^5.0.0" }, "engines": { "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/cli-spinners": { - "version": "2.9.2", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", - "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", - "optional": true, - "peer": true, + "node_modules/clear-module/node_modules/parent-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-2.0.0.tgz", + "integrity": "sha512-uo0Z9JJeWzv8BG+tRcapBKNJ0dro9cLyczGzulS6EfeyAdeC9sbojtW6XwvYxJkEne9En+J2XEl4zyglVeIwFg==", + "dev": true, + "dependencies": { + "callsites": "^3.1.0" + }, "engines": { - "node": ">=6" + "node": ">=8" + } + }, + "node_modules/clear-module/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-cursor": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "restore-cursor": "^3.1.0" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "engines": { + "node": ">=8" } }, "node_modules/cli-truncate": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-3.1.0.tgz", - "integrity": "sha512-wfOBkjXteqSnI59oPcJkcPl/ZmwvMMOj340qUIY1SKZCv0B9Cf4D4fAucRkIKQmsIuYK3x1rrgU7MeGRruiuiA==", "dev": true, + "license": "MIT", "dependencies": { "slice-ansi": "^5.0.0", "string-width": "^5.0.0" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-width": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz", - "integrity": "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==", - "optional": true, - "peer": true, - "engines": { - "node": ">= 12" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/cliui": { "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", "dev": true, + "license": "ISC", "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", @@ -5673,24 +3921,21 @@ }, "node_modules/cliui/node_modules/emoji-regex": { "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/cliui/node_modules/is-fullwidth-code-point": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/cliui/node_modules/string-width": { "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -5702,8 +3947,7 @@ }, "node_modules/color": { "version": "3.2.1", - "resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz", - "integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==", + "license": "MIT", "dependencies": { "color-convert": "^1.9.3", "color-string": "^1.6.0" @@ -5711,8 +3955,7 @@ }, "node_modules/color-convert": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -5722,13 +3965,11 @@ }, "node_modules/color-name": { "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + "license": "MIT" }, "node_modules/color-string": { "version": "1.9.1", - "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", - "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", + "license": "MIT", "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" @@ -5736,27 +3977,22 @@ }, "node_modules/color/node_modules/color-convert": { "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "license": "MIT", "dependencies": { "color-name": "1.1.3" } }, "node_modules/color/node_modules/color-name": { "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + "license": "MIT" }, "node_modules/colorette": { "version": "2.0.20", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", - "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/colors": { "version": "0.6.2", - "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", - "integrity": "sha512-OsSVtHK8Ir8r3+Fxw/b4jS1ZLPXkV6ZxDRJQzeD7qo0SqMXWrHDM71DgYzPMHY8SFJ0Ao+nNU2p1MmwdzKqPrw==", "dev": true, "engines": { "node": ">=0.1.90" @@ -5764,8 +4000,7 @@ }, "node_modules/colorspace": { "version": "1.1.4", - "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz", - "integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==", + "license": "MIT", "dependencies": { "color": "^3.1.3", "text-hex": "1.0.x" @@ -5773,82 +4008,46 @@ }, "node_modules/command-exists": { "version": "1.2.9", - "resolved": "https://registry.npmjs.org/command-exists/-/command-exists-1.2.9.tgz", - "integrity": "sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/commander": { "version": "5.1.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", - "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==", "dev": true, + "license": "MIT", "engines": { "node": ">= 6" } }, - "node_modules/commondir": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", - "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==", - "dev": true - }, - "node_modules/compress-commons": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz", - "integrity": "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==", - "optional": true, - "peer": true, + "node_modules/comment-json": { + "version": "4.2.5", + "resolved": "https://registry.npmjs.org/comment-json/-/comment-json-4.2.5.tgz", + "integrity": "sha512-bKw/r35jR3HGt5PEPm1ljsQQGyCrR8sFGNiN5L+ykDHdpO8Smxkrkla9Yi6NkQyUrb8V54PGhfMs6NrIwtxtdw==", + "dev": true, "dependencies": { - "crc-32": "^1.2.0", - "crc32-stream": "^6.0.0", - "is-stream": "^2.0.1", - "normalize-path": "^3.0.0", - "readable-stream": "^4.0.0" + "array-timsort": "^1.0.3", + "core-util-is": "^1.0.3", + "esprima": "^4.0.1", + "has-own-prop": "^2.0.0", + "repeat-string": "^1.6.1" }, "engines": { - "node": ">= 14" - } - }, - "node_modules/compress-commons/node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "optional": true, - "peer": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">= 6" } }, - "node_modules/compress-commons/node_modules/readable-stream": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", - "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", - "optional": true, - "peer": true, - "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } + "node_modules/commondir": { + "version": "1.0.1", + "dev": true, + "license": "MIT" }, "node_modules/concat-map": { "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/connect": { "version": "3.7.0", - "resolved": "https://registry.npmjs.org/connect/-/connect-3.7.0.tgz", - "integrity": "sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ==", + "license": "MIT", "dependencies": { "debug": "2.6.9", "finalhandler": "1.1.2", @@ -5861,63 +4060,44 @@ }, "node_modules/connect/node_modules/debug": { "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", "dependencies": { "ms": "2.0.0" } }, "node_modules/connect/node_modules/ms": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + "license": "MIT" }, "node_modules/console-browserify": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.2.0.tgz", - "integrity": "sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==", "dev": true }, "node_modules/constants-browserify": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz", - "integrity": "sha512-xFxOwqIzR/e1k1gLiWEophSCMqXcwVHIH7akf7b/vxcUeGunlj3hvZaaqxwHsTgn+IndtkQJgSztIDWeumWJDQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/content-type": { "version": "1.0.5", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", - "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/convert-source-map": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", - "dev": true - }, - "node_modules/cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", - "optional": true, - "peer": true, - "engines": { - "node": ">= 0.6" - } + "dev": true, + "license": "MIT" }, "node_modules/core-util-is": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", - "devOptional": true + "dev": true, + "license": "MIT" }, "node_modules/cors": { "version": "2.8.5", - "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", - "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", "dependencies": { "object-assign": "^4", "vary": "^1" @@ -5926,55 +4106,10 @@ "node": ">= 0.10" } }, - "node_modules/crc-32": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", - "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", - "optional": true, - "peer": true, - "bin": { - "crc32": "bin/crc32.njs" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/crc32-stream": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-6.0.0.tgz", - "integrity": "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==", - "optional": true, - "peer": true, - "dependencies": { - "crc-32": "^1.2.0", - "readable-stream": "^4.0.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/crc32-stream/node_modules/readable-stream": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", - "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", - "optional": true, - "peer": true, - "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, "node_modules/create-ecdh": { "version": "4.0.4", - "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.4.tgz", - "integrity": "sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==", "dev": true, + "license": "MIT", "dependencies": { "bn.js": "^4.1.0", "elliptic": "^6.5.3" @@ -5982,15 +4117,13 @@ }, "node_modules/create-ecdh/node_modules/bn.js": { "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/create-hash": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", - "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", "dev": true, + "license": "MIT", "dependencies": { "cipher-base": "^1.0.1", "inherits": "^2.0.1", @@ -6001,9 +4134,8 @@ }, "node_modules/create-hmac": { "version": "1.1.7", - "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", - "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", "dev": true, + "license": "MIT", "dependencies": { "cipher-base": "^1.0.3", "create-hash": "^1.1.0", @@ -6015,46 +4147,13 @@ }, "node_modules/create-require": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", - "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", - "dev": true - }, - "node_modules/cross-fetch": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-4.0.0.tgz", - "integrity": "sha512-e4a5N8lVvuLgAWgnCrLr2PP0YyDOTHa9H/Rj54dirp61qXnNq46m82bRhNqIA5VccJtWBvPTFRV3TtvHUKPB1g==", - "optional": true, - "peer": true, - "dependencies": { - "node-fetch": "^2.6.12" - } - }, - "node_modules/cross-fetch/node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "optional": true, - "peer": true, - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } + "dev": true, + "license": "MIT" }, "node_modules/cross-spawn": { "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -6066,9 +4165,8 @@ }, "node_modules/crypto-browserify": { "version": "3.12.0", - "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.12.0.tgz", - "integrity": "sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==", "dev": true, + "license": "MIT", "dependencies": { "browserify-cipher": "^1.0.0", "browserify-sign": "^4.0.0", @@ -6083,51 +4181,274 @@ "randomfill": "^1.0.3" }, "engines": { - "node": "*" + "node": "*" + } + }, + "node_modules/cspell": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/cspell/-/cspell-8.13.3.tgz", + "integrity": "sha512-2wv4Eby7g8wDB553fI8IoZjyitoKrD2kmtdeoYUN2EjVs3RMpIOver3fL+0VaFAaN0uLfAoeAAIB5xJEakvZYQ==", + "dev": true, + "dependencies": { + "@cspell/cspell-json-reporter": "8.13.3", + "@cspell/cspell-pipe": "8.13.3", + "@cspell/cspell-types": "8.13.3", + "@cspell/dynamic-import": "8.13.3", + "@cspell/url": "8.13.3", + "chalk": "^5.3.0", + "chalk-template": "^1.1.0", + "commander": "^12.1.0", + "cspell-dictionary": "8.13.3", + "cspell-gitignore": "8.13.3", + "cspell-glob": "8.13.3", + "cspell-io": "8.13.3", + "cspell-lib": "8.13.3", + "fast-glob": "^3.3.2", + "fast-json-stable-stringify": "^2.1.0", + "file-entry-cache": "^9.0.0", + "get-stdin": "^9.0.0", + "semver": "^7.6.3", + "strip-ansi": "^7.1.0" + }, + "bin": { + "cspell": "bin.mjs", + "cspell-esm": "bin.mjs" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/streetsidesoftware/cspell?sponsor=1" + } + }, + "node_modules/cspell-config-lib": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/cspell-config-lib/-/cspell-config-lib-8.13.3.tgz", + "integrity": "sha512-dzVdar8Kenwxho0PnUxOxwjUvyFYn6Q9mQAMHcQNXQrvo32bdpoF+oNtWC/5FfrQgUgyl19CVQ607bRigYWoOQ==", + "dev": true, + "dependencies": { + "@cspell/cspell-types": "8.13.3", + "comment-json": "^4.2.5", + "yaml": "^2.5.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/cspell-dictionary": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/cspell-dictionary/-/cspell-dictionary-8.13.3.tgz", + "integrity": "sha512-DQ3Tee7LIoy+9Mu52ht32O/MNBZ6i4iUeSTY2sMDDwogno3361BLRyfEjyiYNo3Fqf0Pcnt5MqY2DqIhrF/H/Q==", + "dev": true, + "dependencies": { + "@cspell/cspell-pipe": "8.13.3", + "@cspell/cspell-types": "8.13.3", + "cspell-trie-lib": "8.13.3", + "fast-equals": "^5.0.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/cspell-gitignore": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/cspell-gitignore/-/cspell-gitignore-8.13.3.tgz", + "integrity": "sha512-0OZXuP33CXV4P95ySHGNqhq3VR5RaLwpyo0nGvLHOjPm3mCsQSjURLBKHvyQ3r2M7LWsGV1Xc81FfTx30FBZLg==", + "dev": true, + "dependencies": { + "@cspell/url": "8.13.3", + "cspell-glob": "8.13.3", + "cspell-io": "8.13.3", + "find-up-simple": "^1.0.0" + }, + "bin": { + "cspell-gitignore": "bin.mjs" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/cspell-glob": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/cspell-glob/-/cspell-glob-8.13.3.tgz", + "integrity": "sha512-+jGIMYyKDLmoOJIxNPXRdI7utcvw+9FMSmj1ApIdEff5dCkehi0gtzK4H7orXGYEvRdKQvfaXiyduVi79rXsZQ==", + "dev": true, + "dependencies": { + "@cspell/url": "8.13.3", + "micromatch": "^4.0.7" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/cspell-grammar": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/cspell-grammar/-/cspell-grammar-8.13.3.tgz", + "integrity": "sha512-xPSgKk9HY5EsI8lkMPC9hiZCeAUs+RY/IVliUBW1xEicAJhP4RZIGRdIwtDNNJGwKfNXazjqYhcS4LS0q7xPAQ==", + "dev": true, + "dependencies": { + "@cspell/cspell-pipe": "8.13.3", + "@cspell/cspell-types": "8.13.3" + }, + "bin": { + "cspell-grammar": "bin.mjs" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/cspell-io": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/cspell-io/-/cspell-io-8.13.3.tgz", + "integrity": "sha512-AeMIkz7+4VuJaPKO/v1pUpyUSOOTyLOAfzeTRRAXEt+KRKOUe36MyUmBMza6gzNcX2yD04VgJukRL408TY9ntw==", + "dev": true, + "dependencies": { + "@cspell/cspell-service-bus": "8.13.3", + "@cspell/url": "8.13.3" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/cspell-lib": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/cspell-lib/-/cspell-lib-8.13.3.tgz", + "integrity": "sha512-aEqxIILeqDtNoCa47/oSl5c926b50ue3PobYs4usn0Ymf0434RopCP+DCGsF7BPtog4j4XWnEmvkcJs57DYWDg==", + "dev": true, + "dependencies": { + "@cspell/cspell-bundled-dicts": "8.13.3", + "@cspell/cspell-pipe": "8.13.3", + "@cspell/cspell-resolver": "8.13.3", + "@cspell/cspell-types": "8.13.3", + "@cspell/dynamic-import": "8.13.3", + "@cspell/strong-weak-map": "8.13.3", + "@cspell/url": "8.13.3", + "clear-module": "^4.1.2", + "comment-json": "^4.2.5", + "cspell-config-lib": "8.13.3", + "cspell-dictionary": "8.13.3", + "cspell-glob": "8.13.3", + "cspell-grammar": "8.13.3", + "cspell-io": "8.13.3", + "cspell-trie-lib": "8.13.3", + "env-paths": "^3.0.0", + "fast-equals": "^5.0.1", + "gensequence": "^7.0.0", + "import-fresh": "^3.3.0", + "resolve-from": "^5.0.0", + "vscode-languageserver-textdocument": "^1.0.12", + "vscode-uri": "^3.0.8", + "xdg-basedir": "^5.1.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/cspell-lib/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cspell-trie-lib": { + "version": "8.13.3", + "resolved": "https://registry.npmjs.org/cspell-trie-lib/-/cspell-trie-lib-8.13.3.tgz", + "integrity": "sha512-Z0iLGi9HI+Vf+WhVVeru6dYgQdtaYCKWRlc1SayLfAZhw9BcjrXL8KTXDfAfv/lUgnRu6xwP1isLlDNZECsKVQ==", + "dev": true, + "dependencies": { + "@cspell/cspell-pipe": "8.13.3", + "@cspell/cspell-types": "8.13.3", + "gensequence": "^7.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/cspell/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/cspell/node_modules/chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "dev": true, + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/cspell/node_modules/commander": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", + "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", + "dev": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/cspell/node_modules/file-entry-cache": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-9.0.0.tgz", + "integrity": "sha512-6MgEugi8p2tiUhqO7GnPsmbCCzj0YRCwwaTbpGRyKZesjRSzkqkAE9fPp7V2yMs5hwfgbQLgdvSSkGNg1s5Uvw==", + "dev": true, + "dependencies": { + "flat-cache": "^5.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/cspell/node_modules/flat-cache": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-5.0.0.tgz", + "integrity": "sha512-JrqFmyUl2PnPi1OvLyTVHnQvwQ0S+e6lGSwu8OkAZlSaNIZciTY2H/cOOROxsBA1m/LZNHDsqAgDZt6akWcjsQ==", + "dev": true, + "dependencies": { + "flatted": "^3.3.1", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/cspell/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, - "node_modules/css-shorthand-properties": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/css-shorthand-properties/-/css-shorthand-properties-1.1.1.tgz", - "integrity": "sha512-Md+Juc7M3uOdbAFwOYlTrccIZ7oCFuzrhKYQjdeUEW/sE1hv17Jp/Bws+ReOPpGVBTYCBoYo+G17V5Qo8QQ75A==", - "optional": true, - "peer": true - }, - "node_modules/css-value": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/css-value/-/css-value-0.0.1.tgz", - "integrity": "sha512-FUV3xaJ63buRLgHrLQVlVgQnQdR4yqdLGaDu7g8CQcWjInDfM9plBTPI9FRfpahju1UBSaMckeb2/46ApS/V1Q==", - "optional": true, - "peer": true - }, - "node_modules/csstype": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", - "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", - "devOptional": true, - "peer": true - }, "node_modules/damerau-levenshtein": { "version": "1.0.8", - "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", - "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==", - "dev": true - }, - "node_modules/data-uri-to-buffer": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", - "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", - "optional": true, - "peer": true, - "engines": { - "node": ">= 12" - } + "dev": true, + "license": "BSD-2-Clause" }, "node_modules/data-view-buffer": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", - "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.6", "es-errors": "^1.3.0", @@ -6142,9 +4463,8 @@ }, "node_modules/data-view-byte-length": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", - "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "es-errors": "^1.3.0", @@ -6159,9 +4479,8 @@ }, "node_modules/data-view-byte-offset": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", - "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.6", "es-errors": "^1.3.0", @@ -6175,9 +4494,8 @@ } }, "node_modules/debug": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", - "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "version": "4.3.6", + "license": "MIT", "dependencies": { "ms": "2.1.2" }, @@ -6192,56 +4510,24 @@ }, "node_modules/decamelize": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, - "node_modules/decompress-response": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", - "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", - "optional": true, - "peer": true, - "dependencies": { - "mimic-response": "^3.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/decompress-response/node_modules/mimic-response": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", - "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", - "optional": true, - "peer": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/deep-eql": { "version": "5.0.2", - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", - "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/deep-equal": { "version": "2.2.3", - "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-2.2.3.tgz", - "integrity": "sha512-ZIwpnevOurS8bpT4192sqAowWM76JDKSHYzMLty3BZGSswgq6pBaH3DhCSW5xVAZICZyKdOBPjwww5wfgT/6PA==", "dev": true, + "license": "MIT", "dependencies": { "array-buffer-byte-length": "^1.0.0", "call-bind": "^1.0.5", @@ -6271,25 +4557,13 @@ }, "node_modules/deep-is": { "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true - }, - "node_modules/deepmerge-ts": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/deepmerge-ts/-/deepmerge-ts-5.1.0.tgz", - "integrity": "sha512-eS8dRJOckyo9maw9Tu5O5RUi/4inFLrnoLkBe3cPfDMx3WZioXtmOew4TXQaxq7Rhl4xjDtR7c6x8nNTxOvbFw==", - "optional": true, - "peer": true, - "engines": { - "node": ">=16.0.0" - } + "dev": true, + "license": "MIT" }, "node_modules/default-require-extensions": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.1.tgz", - "integrity": "sha512-eXTJmRbm2TIt9MgWTsOH1wEuhew6XGZcMeGKCtLedIg/NCsg1iBePXkceTdK4Fii7pzmN9tGsZhKzZ4h7O/fxw==", "dev": true, + "license": "MIT", "dependencies": { "strip-bom": "^4.0.0" }, @@ -6302,28 +4576,16 @@ }, "node_modules/default-require-extensions/node_modules/strip-bom": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", - "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/defer-to-connect": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", - "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", - "optional": true, - "peer": true, - "engines": { - "node": ">=10" - } - }, "node_modules/deferred-leveldown": { "version": "7.0.0", - "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-7.0.0.tgz", - "integrity": "sha512-QKN8NtuS3BC6m0B8vAnBls44tX1WXAFATUsJlruyAYbZpysWV3siH6o/i3g9DCHauzodksO60bdj5NazNbjCmg==", "dev": true, + "license": "MIT", "dependencies": { "abstract-leveldown": "^7.2.0", "inherits": "^2.0.3" @@ -6334,8 +4596,7 @@ }, "node_modules/define-data-property": { "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", - "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "license": "MIT", "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", @@ -6350,18 +4611,16 @@ }, "node_modules/define-lazy-prop": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", - "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/define-properties": { "version": "1.2.1", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", - "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", "dev": true, + "license": "MIT", "dependencies": { "define-data-property": "^1.0.1", "has-property-descriptors": "^1.0.0", @@ -6370,717 +4629,887 @@ "engines": { "node": ">= 0.4" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/defined": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/delay": { + "version": "5.0.0", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/des.js": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "license": "MIT", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/detect-indent": { + "version": "6.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/detect-libc": { + "version": "2.0.3", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/diffie-hellman": { + "version": "5.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "bn.js": "^4.1.0", + "miller-rabin": "^4.0.0", + "randombytes": "^2.0.0" + } + }, + "node_modules/diffie-hellman/node_modules/bn.js": { + "version": "4.12.0", + "dev": true, + "license": "MIT" + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/dns-packet": { + "version": "5.6.1", + "license": "MIT", + "dependencies": { + "@leichtgewicht/ip-codec": "^2.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/domain-browser": { + "version": "4.23.0", + "dev": true, + "license": "Artistic-2.0", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://bevry.me/fund" + } + }, + "node_modules/dotignore": { + "version": "0.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "minimatch": "^3.0.4" + }, + "bin": { + "ignored": "bin/ignored" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ee-first": { + "version": "1.1.1", + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.5.8", + "dev": true, + "license": "ISC" + }, + "node_modules/elliptic": { + "version": "6.5.4", + "dev": true, + "license": "MIT", + "dependencies": { + "bn.js": "^4.11.9", + "brorand": "^1.1.0", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.1", + "inherits": "^2.0.4", + "minimalistic-assert": "^1.0.1", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "node_modules/elliptic/node_modules/bn.js": { + "version": "4.12.0", + "dev": true, + "license": "MIT" + }, + "node_modules/embedme": { + "version": "1.22.1", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "3.0.0", + "commander": "5.1.0", + "gitignore-parser": "~0.0.2", + "glob": "~7.1.4" + }, + "bin": { + "embedme": "dist/embedme.js" } }, - "node_modules/defined": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/defined/-/defined-1.0.1.tgz", - "integrity": "sha512-hsBd2qSVCRE+5PmNdHt1uzyrFu5d3RwmFDKzyNZMFq/EwDNJF7Ee5+D5oEKF0hU6LhtoUF1macFvOe4AskQC1Q==", + "node_modules/embedme/node_modules/chalk": { + "version": "3.0.0", "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/degenerator": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/degenerator/-/degenerator-5.0.1.tgz", - "integrity": "sha512-TllpMR/t0M5sqCXfj85i4XaAzxmS5tVA16dqvdkMwGmzI+dXLXnw3J+3Vdv7VKw+ThlTMboK6i9rnZ6Nntj5CQ==", - "optional": true, - "peer": true, + "license": "MIT", "dependencies": { - "ast-types": "^0.13.4", - "escodegen": "^2.1.0", - "esprima": "^4.0.1" + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" }, "engines": { - "node": ">= 14" + "node": ">=8" } }, - "node_modules/delay": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/delay/-/delay-5.0.0.tgz", - "integrity": "sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw==", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } + "node_modules/emoji-regex": { + "version": "9.2.2", + "dev": true, + "license": "MIT" }, - "node_modules/depd": { + "node_modules/enabled": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "engines": { - "node": ">= 0.8" - } + "license": "MIT" }, - "node_modules/dequal": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", - "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", - "devOptional": true, + "node_modules/encodeurl": { + "version": "1.0.2", + "license": "MIT", "engines": { - "node": ">=6" + "node": ">= 0.8" } }, - "node_modules/des.js": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.1.0.tgz", - "integrity": "sha512-r17GxjhUCjSRy8aiJpr8/UadFIzMzJGexI3Nmz4ADi9LYSFx4gTBp80+NaX/YsXWWLhpZ7v/v/ubEc/bCNfKwg==", + "node_modules/encoding-down": { + "version": "7.1.0", "dev": true, + "license": "MIT", "dependencies": { - "inherits": "^2.0.1", - "minimalistic-assert": "^1.0.0" - } - }, - "node_modules/destroy": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", - "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "abstract-leveldown": "^7.2.0", + "inherits": "^2.0.3", + "level-codec": "^10.0.0", + "level-errors": "^3.0.0" + }, "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" + "node": ">=10" } }, - "node_modules/detect-indent": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz", - "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==", + "node_modules/env-paths": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", + "integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==", "dev": true, "engines": { - "node": ">=8" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/detect-libc": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", - "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", + "node_modules/es-abstract": { + "version": "1.23.3", "dev": true, + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "arraybuffer.prototype.slice": "^1.0.3", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "data-view-buffer": "^1.0.1", + "data-view-byte-length": "^1.0.1", + "data-view-byte-offset": "^1.0.0", + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.0.3", + "es-to-primitive": "^1.2.1", + "function.prototype.name": "^1.1.6", + "get-intrinsic": "^1.2.4", + "get-symbol-description": "^1.0.2", + "globalthis": "^1.0.3", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", + "has-symbols": "^1.0.3", + "hasown": "^2.0.2", + "internal-slot": "^1.0.7", + "is-array-buffer": "^3.0.4", + "is-callable": "^1.2.7", + "is-data-view": "^1.0.1", + "is-negative-zero": "^2.0.3", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.3", + "is-string": "^1.0.7", + "is-typed-array": "^1.1.13", + "is-weakref": "^1.0.2", + "object-inspect": "^1.13.1", + "object-keys": "^1.1.1", + "object.assign": "^4.1.5", + "regexp.prototype.flags": "^1.5.2", + "safe-array-concat": "^1.1.2", + "safe-regex-test": "^1.0.3", + "string.prototype.trim": "^1.2.9", + "string.prototype.trimend": "^1.0.8", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-length": "^1.0.1", + "typed-array-byte-offset": "^1.0.2", + "typed-array-length": "^1.0.6", + "unbox-primitive": "^1.0.2", + "which-typed-array": "^1.1.15" + }, "engines": { - "node": ">=8" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/detect-newline": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", - "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", - "dev": true, + "node_modules/es-define-property": { + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.2.4" + }, "engines": { - "node": ">=8" + "node": ">= 0.4" } }, - "node_modules/devtools-protocol": { - "version": "0.0.1302984", - "resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1302984.tgz", - "integrity": "sha512-Rgh2Sk5fUSCtEx4QGH9iwTyECdFPySG2nlz5J8guGh2Wlha6uzSOCq/DCEC8faHlLaMPZJMuZ4ovgcX4LvOkKA==", - "optional": true, - "peer": true - }, - "node_modules/diff-sequences": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", - "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", - "devOptional": true, + "node_modules/es-errors": { + "version": "1.3.0", + "license": "MIT", "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": ">= 0.4" } }, - "node_modules/diffie-hellman": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", - "integrity": "sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==", + "node_modules/es-get-iterator": { + "version": "1.1.3", "dev": true, + "license": "MIT", "dependencies": { - "bn.js": "^4.1.0", - "miller-rabin": "^4.0.0", - "randombytes": "^2.0.0" + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.3", + "has-symbols": "^1.0.3", + "is-arguments": "^1.1.1", + "is-map": "^2.0.2", + "is-set": "^2.0.2", + "is-string": "^1.0.7", + "isarray": "^2.0.5", + "stop-iteration-iterator": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/diffie-hellman/node_modules/bn.js": { - "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", - "dev": true - }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "node_modules/es-iterator-helpers": { + "version": "1.0.19", "dev": true, + "license": "MIT", "dependencies": { - "path-type": "^4.0.0" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.3", + "es-errors": "^1.3.0", + "es-set-tostringtag": "^2.0.3", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "globalthis": "^1.0.3", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", + "has-symbols": "^1.0.3", + "internal-slot": "^1.0.7", + "iterator.prototype": "^1.1.2", + "safe-array-concat": "^1.1.2" }, "engines": { - "node": ">=8" + "node": ">= 0.4" } }, - "node_modules/dns-packet": { - "version": "5.6.1", - "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-5.6.1.tgz", - "integrity": "sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw==", + "node_modules/es-object-atoms": { + "version": "1.0.0", + "dev": true, + "license": "MIT", "dependencies": { - "@leichtgewicht/ip-codec": "^2.0.1" + "es-errors": "^1.3.0" }, "engines": { - "node": ">=6" + "node": ">= 0.4" } }, - "node_modules/doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "node_modules/es-set-tostringtag": { + "version": "2.0.3", "dev": true, + "license": "MIT", "dependencies": { - "esutils": "^2.0.2" + "get-intrinsic": "^1.2.4", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.1" }, "engines": { - "node": ">=6.0.0" + "node": ">= 0.4" } }, - "node_modules/dom-accessibility-api": { - "version": "0.5.16", - "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz", - "integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==", - "optional": true, - "peer": true + "node_modules/es-shim-unscopables": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.0" + } }, - "node_modules/domain-browser": { - "version": "4.23.0", - "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-4.23.0.tgz", - "integrity": "sha512-ArzcM/II1wCCujdCNyQjXrAFwS4mrLh4C7DZWlaI8mdh7h3BfKdNd3bKXITfl2PT9FtfQqaGvhi1vPRQPimjGA==", + "node_modules/es-to-primitive": { + "version": "1.2.1", "dev": true, + "license": "MIT", + "dependencies": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, "engines": { - "node": ">=10" + "node": ">= 0.4" }, "funding": { - "url": "https://bevry.me/fund" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/dotignore": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/dotignore/-/dotignore-0.1.2.tgz", - "integrity": "sha512-UGGGWfSauusaVJC+8fgV+NVvBXkCTmVv7sk6nojDZZvuOUNGUy0Zk4UpHQD6EDjS0jpBwcACvH4eofvyzBcRDw==", + "node_modules/es6-error": { + "version": "4.1.1", "dev": true, - "dependencies": { - "minimatch": "^3.0.4" - }, - "bin": { - "ignored": "bin/ignored" - } + "license": "MIT" }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "devOptional": true - }, - "node_modules/edge-paths": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/edge-paths/-/edge-paths-3.0.5.tgz", - "integrity": "sha512-sB7vSrDnFa4ezWQk9nZ/n0FdpdUuC6R1EOrlU3DL+bovcNFK28rqu2emmAUjujYEJTWIgQGqgVVWUZXMnc8iWg==", - "optional": true, - "peer": true, + "node_modules/es6-promise": { + "version": "4.2.8", + "license": "MIT" + }, + "node_modules/es6-promisify": { + "version": "5.0.0", + "license": "MIT", "dependencies": { - "@types/which": "^2.0.1", - "which": "^2.0.2" - }, - "engines": { - "node": ">=14.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/shirshak55" + "es6-promise": "^4.0.3" } }, - "node_modules/edgedriver": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/edgedriver/-/edgedriver-5.6.0.tgz", - "integrity": "sha512-IeJXEczG+DNYBIa9gFgVYTqrawlxmc9SUqUsWU2E98jOsO/amA7wzabKOS8Bwgr/3xWoyXCJ6yGFrbFKrilyyQ==", + "node_modules/esbuild": { + "version": "0.23.0", + "dev": true, "hasInstallScript": true, - "optional": true, - "peer": true, - "dependencies": { - "@wdio/logger": "^8.28.0", - "@zip.js/zip.js": "^2.7.44", - "decamelize": "^6.0.0", - "edge-paths": "^3.0.5", - "node-fetch": "^3.3.2", - "which": "^4.0.0" - }, + "license": "MIT", "bin": { - "edgedriver": "bin/edgedriver.js" - } - }, - "node_modules/edgedriver/node_modules/decamelize": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-6.0.0.tgz", - "integrity": "sha512-Fv96DCsdOgB6mdGl67MT5JaTNKRzrzill5OH5s8bjYJXVlcXyPYGyPsUkWyGV5p1TXI5esYIYMMeDJL0hEIwaA==", - "optional": true, - "peer": true, + "esbuild": "bin/esbuild" + }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=18" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.23.0", + "@esbuild/android-arm": "0.23.0", + "@esbuild/android-arm64": "0.23.0", + "@esbuild/android-x64": "0.23.0", + "@esbuild/darwin-arm64": "0.23.0", + "@esbuild/darwin-x64": "0.23.0", + "@esbuild/freebsd-arm64": "0.23.0", + "@esbuild/freebsd-x64": "0.23.0", + "@esbuild/linux-arm": "0.23.0", + "@esbuild/linux-arm64": "0.23.0", + "@esbuild/linux-ia32": "0.23.0", + "@esbuild/linux-loong64": "0.23.0", + "@esbuild/linux-mips64el": "0.23.0", + "@esbuild/linux-ppc64": "0.23.0", + "@esbuild/linux-riscv64": "0.23.0", + "@esbuild/linux-s390x": "0.23.0", + "@esbuild/linux-x64": "0.23.0", + "@esbuild/netbsd-x64": "0.23.0", + "@esbuild/openbsd-arm64": "0.23.0", + "@esbuild/openbsd-x64": "0.23.0", + "@esbuild/sunos-x64": "0.23.0", + "@esbuild/win32-arm64": "0.23.0", + "@esbuild/win32-ia32": "0.23.0", + "@esbuild/win32-x64": "0.23.0" + } + }, + "node_modules/esbuild/node_modules/@esbuild/aix-ppc64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.23.0.tgz", + "integrity": "sha512-3sG8Zwa5fMcA9bgqB8AfWPQ+HFke6uD3h1s3RIwUNK8EG7a4buxvuFTs3j1IMs2NXAk9F30C/FF4vxRgQCcmoQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" } }, - "node_modules/edgedriver/node_modules/isexe": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", - "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", + "node_modules/esbuild/node_modules/@esbuild/android-arm": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.23.0.tgz", + "integrity": "sha512-+KuOHTKKyIKgEEqKbGTK8W7mPp+hKinbMBeEnNzjJGyFcWsfrXjSTNluJHCY1RqhxFurdD8uNXQDei7qDlR6+g==", + "cpu": [ + "arm" + ], + "dev": true, "optional": true, - "peer": true, + "os": [ + "android" + ], "engines": { - "node": ">=16" + "node": ">=18" } }, - "node_modules/edgedriver/node_modules/which": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz", - "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", + "node_modules/esbuild/node_modules/@esbuild/android-arm64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.23.0.tgz", + "integrity": "sha512-EuHFUYkAVfU4qBdyivULuu03FhJO4IJN9PGuABGrFy4vUuzk91P2d+npxHcFdpUnfYKy0PuV+n6bKIpHOB3prQ==", + "cpu": [ + "arm64" + ], + "dev": true, "optional": true, - "peer": true, - "dependencies": { - "isexe": "^3.1.1" - }, - "bin": { - "node-which": "bin/which.js" - }, + "os": [ + "android" + ], "engines": { - "node": "^16.13.0 || >=18.0.0" + "node": ">=18" } }, - "node_modules/ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" - }, - "node_modules/electron-to-chromium": { - "version": "1.4.816", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.816.tgz", - "integrity": "sha512-EKH5X5oqC6hLmiS7/vYtZHZFTNdhsYG5NVPRN6Yn0kQHNBlT59+xSM8HBy66P5fxWpKgZbPqb+diC64ng295Jw==", - "dev": true - }, - "node_modules/elliptic": { - "version": "6.5.5", - "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.5.tgz", - "integrity": "sha512-7EjbcmUm17NQFu4Pmgmq2olYMj8nwMnpcddByChSUjArp8F5DQWcIcpriwO4ZToLNAJig0yiyjswfyGNje/ixw==", + "node_modules/esbuild/node_modules/@esbuild/android-x64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.23.0.tgz", + "integrity": "sha512-WRrmKidLoKDl56LsbBMhzTTBxrsVwTKdNbKDalbEZr0tcsBgCLbEtoNthOW6PX942YiYq8HzEnb4yWQMLQuipQ==", + "cpu": [ + "x64" + ], "dev": true, - "dependencies": { - "bn.js": "^4.11.9", - "brorand": "^1.1.0", - "hash.js": "^1.0.0", - "hmac-drbg": "^1.0.1", - "inherits": "^2.0.4", - "minimalistic-assert": "^1.0.1", - "minimalistic-crypto-utils": "^1.0.1" + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" } }, - "node_modules/elliptic/node_modules/bn.js": { - "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", - "dev": true - }, - "node_modules/embedme": { - "version": "1.22.1", - "resolved": "https://registry.npmjs.org/embedme/-/embedme-1.22.1.tgz", - "integrity": "sha512-wHLuAOI9XoCAQ322mbslIR7PQNgPGYCWrDlYw5C6fesakuhCzi6ce0BrLTZ/EEKgiHEUqcG9V3s7MGO0x1Zgig==", + "node_modules/esbuild/node_modules/@esbuild/darwin-arm64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.23.0.tgz", + "integrity": "sha512-YLntie/IdS31H54Ogdn+v50NuoWF5BDkEUFpiOChVa9UnKpftgwzZRrI4J132ETIi+D8n6xh9IviFV3eXdxfow==", + "cpu": [ + "arm64" + ], "dev": true, - "dependencies": { - "chalk": "3.0.0", - "commander": "5.1.0", - "gitignore-parser": "~0.0.2", - "glob": "~7.1.4" - }, - "bin": { - "embedme": "dist/embedme.js" + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" } }, - "node_modules/embedme/node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "node_modules/esbuild/node_modules/@esbuild/darwin-x64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.23.0.tgz", + "integrity": "sha512-IMQ6eme4AfznElesHUPDZ+teuGwoRmVuuixu7sv92ZkdQcPbsNHzutd+rAfaBKo8YK3IrBEi9SLLKWJdEvJniQ==", + "cpu": [ + "x64" + ], "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, + "optional": true, + "os": [ + "darwin" + ], "engines": { - "node": ">=8" + "node": ">=18" } }, - "node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "devOptional": true - }, - "node_modules/enabled": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/enabled/-/enabled-2.0.0.tgz", - "integrity": "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==" - }, - "node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "node_modules/esbuild/node_modules/@esbuild/freebsd-arm64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.23.0.tgz", + "integrity": "sha512-0muYWCng5vqaxobq6LB3YNtevDFSAZGlgtLoAc81PjUfiFz36n4KMpwhtAd4he8ToSI3TGyuhyx5xmiWNYZFyw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], "engines": { - "node": ">= 0.8" + "node": ">=18" } }, - "node_modules/encoding-down": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/encoding-down/-/encoding-down-7.1.0.tgz", - "integrity": "sha512-ky47X5jP84ryk5EQmvedQzELwVJPjCgXDQZGeb9F6r4PdChByCGHTBrVcF3h8ynKVJ1wVbkxTsDC8zBROPypgQ==", + "node_modules/esbuild/node_modules/@esbuild/freebsd-x64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.23.0.tgz", + "integrity": "sha512-XKDVu8IsD0/q3foBzsXGt/KjD/yTKBCIwOHE1XwiXmrRwrX6Hbnd5Eqn/WvDekddK21tfszBSrE/WMaZh+1buQ==", + "cpu": [ + "x64" + ], "dev": true, - "dependencies": { - "abstract-leveldown": "^7.2.0", - "inherits": "^2.0.3", - "level-codec": "^10.0.0", - "level-errors": "^3.0.0" - }, + "optional": true, + "os": [ + "freebsd" + ], "engines": { - "node": ">=10" + "node": ">=18" } }, - "node_modules/end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "node_modules/esbuild/node_modules/@esbuild/linux-arm": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.23.0.tgz", + "integrity": "sha512-SEELSTEtOFu5LPykzA395Mc+54RMg1EUgXP+iw2SJ72+ooMwVsgfuwXo5Fn0wXNgWZsTVHwY2cg4Vi/bOD88qw==", + "cpu": [ + "arm" + ], + "dev": true, "optional": true, - "peer": true, - "dependencies": { - "once": "^1.4.0" + "os": [ + "linux" + ], + "engines": { + "node": ">=18" } }, - "node_modules/entities": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "devOptional": true, - "peer": true, + "node_modules/esbuild/node_modules/@esbuild/linux-arm64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.23.0.tgz", + "integrity": "sha512-j1t5iG8jE7BhonbsEg5d9qOYcVZv/Rv6tghaXM/Ug9xahM0nX/H2gfu6X6z11QRTMT6+aywOMA8TDkhPo8aCGw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" + "node": ">=18" } }, - "node_modules/es-abstract": { - "version": "1.23.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", - "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", + "node_modules/esbuild/node_modules/@esbuild/linux-ia32": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.23.0.tgz", + "integrity": "sha512-P7O5Tkh2NbgIm2R6x1zGJJsnacDzTFcRWZyTTMgFdVit6E98LTxO+v8LCCLWRvPrjdzXHx9FEOA8oAZPyApWUA==", + "cpu": [ + "ia32" + ], "dev": true, - "dependencies": { - "array-buffer-byte-length": "^1.0.1", - "arraybuffer.prototype.slice": "^1.0.3", - "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.7", - "data-view-buffer": "^1.0.1", - "data-view-byte-length": "^1.0.1", - "data-view-byte-offset": "^1.0.0", - "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0", - "es-set-tostringtag": "^2.0.3", - "es-to-primitive": "^1.2.1", - "function.prototype.name": "^1.1.6", - "get-intrinsic": "^1.2.4", - "get-symbol-description": "^1.0.2", - "globalthis": "^1.0.3", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.2", - "has-proto": "^1.0.3", - "has-symbols": "^1.0.3", - "hasown": "^2.0.2", - "internal-slot": "^1.0.7", - "is-array-buffer": "^3.0.4", - "is-callable": "^1.2.7", - "is-data-view": "^1.0.1", - "is-negative-zero": "^2.0.3", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.3", - "is-string": "^1.0.7", - "is-typed-array": "^1.1.13", - "is-weakref": "^1.0.2", - "object-inspect": "^1.13.1", - "object-keys": "^1.1.1", - "object.assign": "^4.1.5", - "regexp.prototype.flags": "^1.5.2", - "safe-array-concat": "^1.1.2", - "safe-regex-test": "^1.0.3", - "string.prototype.trim": "^1.2.9", - "string.prototype.trimend": "^1.0.8", - "string.prototype.trimstart": "^1.0.8", - "typed-array-buffer": "^1.0.2", - "typed-array-byte-length": "^1.0.1", - "typed-array-byte-offset": "^1.0.2", - "typed-array-length": "^1.0.6", - "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.15" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" } }, - "node_modules/es-define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", - "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", - "dependencies": { - "get-intrinsic": "^1.2.4" - }, + "node_modules/esbuild/node_modules/@esbuild/linux-loong64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.23.0.tgz", + "integrity": "sha512-InQwepswq6urikQiIC/kkx412fqUZudBO4SYKu0N+tGhXRWUqAx+Q+341tFV6QdBifpjYgUndV1hhMq3WeJi7A==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">= 0.4" + "node": ">=18" } }, - "node_modules/es-errors": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "node_modules/esbuild/node_modules/@esbuild/linux-mips64el": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.23.0.tgz", + "integrity": "sha512-J9rflLtqdYrxHv2FqXE2i1ELgNjT+JFURt/uDMoPQLcjWQA5wDKgQA4t/dTqGa88ZVECKaD0TctwsUfHbVoi4w==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">= 0.4" + "node": ">=18" } }, - "node_modules/es-get-iterator": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz", - "integrity": "sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw==", + "node_modules/esbuild/node_modules/@esbuild/linux-ppc64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.23.0.tgz", + "integrity": "sha512-cShCXtEOVc5GxU0fM+dsFD10qZ5UpcQ8AM22bYj0u/yaAykWnqXJDpd77ublcX6vdDsWLuweeuSNZk4yUxZwtw==", + "cpu": [ + "ppc64" + ], "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", - "has-symbols": "^1.0.3", - "is-arguments": "^1.1.1", - "is-map": "^2.0.2", - "is-set": "^2.0.2", - "is-string": "^1.0.7", - "isarray": "^2.0.5", - "stop-iteration-iterator": "^1.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" } }, - "node_modules/es-iterator-helpers": { - "version": "1.0.19", - "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.19.tgz", - "integrity": "sha512-zoMwbCcH5hwUkKJkT8kDIBZSz9I6mVG//+lDCinLCGov4+r7NIy0ld8o03M0cJxl2spVf6ESYVS6/gpIfq1FFw==", + "node_modules/esbuild/node_modules/@esbuild/linux-riscv64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.23.0.tgz", + "integrity": "sha512-HEtaN7Y5UB4tZPeQmgz/UhzoEyYftbMXrBCUjINGjh3uil+rB/QzzpMshz3cNUxqXN7Vr93zzVtpIDL99t9aRw==", + "cpu": [ + "riscv64" + ], "dev": true, - "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.3", - "es-errors": "^1.3.0", - "es-set-tostringtag": "^2.0.3", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "globalthis": "^1.0.3", - "has-property-descriptors": "^1.0.2", - "has-proto": "^1.0.3", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.7", - "iterator.prototype": "^1.1.2", - "safe-array-concat": "^1.1.2" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">= 0.4" + "node": ">=18" } }, - "node_modules/es-object-atoms": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", - "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "node_modules/esbuild/node_modules/@esbuild/linux-s390x": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.23.0.tgz", + "integrity": "sha512-WDi3+NVAuyjg/Wxi+o5KPqRbZY0QhI9TjrEEm+8dmpY9Xir8+HE/HNx2JoLckhKbFopW0RdO2D72w8trZOV+Wg==", + "cpu": [ + "s390x" + ], "dev": true, - "dependencies": { - "es-errors": "^1.3.0" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">= 0.4" + "node": ">=18" } }, - "node_modules/es-set-tostringtag": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", - "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", + "node_modules/esbuild/node_modules/@esbuild/netbsd-x64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.23.0.tgz", + "integrity": "sha512-cRK+YDem7lFTs2Q5nEv/HHc4LnrfBCbH5+JHu6wm2eP+d8OZNoSMYgPZJq78vqQ9g+9+nMuIsAO7skzphRXHyw==", + "cpu": [ + "x64" + ], "dev": true, - "dependencies": { - "get-intrinsic": "^1.2.4", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.1" - }, + "optional": true, + "os": [ + "netbsd" + ], "engines": { - "node": ">= 0.4" + "node": ">=18" } }, - "node_modules/es-shim-unscopables": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", - "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", + "node_modules/esbuild/node_modules/@esbuild/openbsd-arm64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.23.0.tgz", + "integrity": "sha512-suXjq53gERueVWu0OKxzWqk7NxiUWSUlrxoZK7usiF50C6ipColGR5qie2496iKGYNLhDZkPxBI3erbnYkU0rQ==", + "cpu": [ + "arm64" + ], "dev": true, - "dependencies": { - "hasown": "^2.0.0" + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" } }, - "node_modules/es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "node_modules/esbuild/node_modules/@esbuild/openbsd-x64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.23.0.tgz", + "integrity": "sha512-6p3nHpby0DM/v15IFKMjAaayFhqnXV52aEmv1whZHX56pdkK+MEaLoQWj+H42ssFarP1PcomVhbsR4pkz09qBg==", + "cpu": [ + "x64" + ], "dev": true, - "dependencies": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - }, + "optional": true, + "os": [ + "openbsd" + ], "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">=18" } }, - "node_modules/es6-error": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", - "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==", - "dev": true - }, - "node_modules/es6-promise": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", - "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==" - }, - "node_modules/es6-promisify": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha512-C+d6UdsYDk0lMebHNR4S2NybQMMngAOnOwYBQjTOiv0MkoJMP0Myw2mgpDLBcpfCmRLxyFqYhS/CfOENq4SJhQ==", - "dependencies": { - "es6-promise": "^4.0.3" + "node_modules/esbuild/node_modules/@esbuild/sunos-x64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.23.0.tgz", + "integrity": "sha512-BFelBGfrBwk6LVrmFzCq1u1dZbG4zy/Kp93w2+y83Q5UGYF1d8sCzeLI9NXjKyujjBBniQa8R8PzLFAUrSM9OA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" } }, - "node_modules/esbuild": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", - "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", - "devOptional": true, - "hasInstallScript": true, - "bin": { - "esbuild": "bin/esbuild" - }, + "node_modules/esbuild/node_modules/@esbuild/win32-arm64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.23.0.tgz", + "integrity": "sha512-lY6AC8p4Cnb7xYHuIxQ6iYPe6MfO2CC43XXKo9nBXDb35krYt7KGhQnOkRGar5psxYkircpCqfbNDB4uJbS2jQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": ">=12" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.21.5", - "@esbuild/android-arm": "0.21.5", - "@esbuild/android-arm64": "0.21.5", - "@esbuild/android-x64": "0.21.5", - "@esbuild/darwin-arm64": "0.21.5", - "@esbuild/darwin-x64": "0.21.5", - "@esbuild/freebsd-arm64": "0.21.5", - "@esbuild/freebsd-x64": "0.21.5", - "@esbuild/linux-arm": "0.21.5", - "@esbuild/linux-arm64": "0.21.5", - "@esbuild/linux-ia32": "0.21.5", - "@esbuild/linux-loong64": "0.21.5", - "@esbuild/linux-mips64el": "0.21.5", - "@esbuild/linux-ppc64": "0.21.5", - "@esbuild/linux-riscv64": "0.21.5", - "@esbuild/linux-s390x": "0.21.5", - "@esbuild/linux-x64": "0.21.5", - "@esbuild/netbsd-x64": "0.21.5", - "@esbuild/openbsd-x64": "0.21.5", - "@esbuild/sunos-x64": "0.21.5", - "@esbuild/win32-arm64": "0.21.5", - "@esbuild/win32-ia32": "0.21.5", - "@esbuild/win32-x64": "0.21.5" + "node": ">=18" } }, - "node_modules/escalade": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", - "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "node_modules/esbuild/node_modules/@esbuild/win32-ia32": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.23.0.tgz", + "integrity": "sha512-7L1bHlOTcO4ByvI7OXVI5pNN6HSu6pUQq9yodga8izeuB1KcT2UkHaH6118QJwopExPn0rMHIseCTx1CRo/uNA==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": ">=6" + "node": ">=18" } }, - "node_modules/escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" - }, - "node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "node_modules/esbuild/node_modules/@esbuild/win32-x64": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.23.0.tgz", + "integrity": "sha512-Arm+WgUFLUATuoxCJcahGuk6Yj9Pzxd6l11Zb/2aAuv5kWWvvfhLFo2fni4uSK5vzlUdCGZ/BdV5tH8klj8p8g==", + "cpu": [ + "x64" + ], "dev": true, + "optional": true, + "os": [ + "win32" + ], "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=18" } }, - "node_modules/escodegen": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", - "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", - "optional": true, - "peer": true, - "dependencies": { - "esprima": "^4.0.1", - "estraverse": "^5.2.0", - "esutils": "^2.0.2" - }, - "bin": { - "escodegen": "bin/escodegen.js", - "esgenerate": "bin/esgenerate.js" - }, + "node_modules/escalade": { + "version": "3.1.2", + "license": "MIT", "engines": { - "node": ">=6.0" - }, - "optionalDependencies": { - "source-map": "~0.6.1" + "node": ">=6" } }, - "node_modules/escodegen/node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "optional": true, - "peer": true, + "node_modules/escape-html": { + "version": "1.0.3", + "license": "MIT" + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "dev": true, + "license": "MIT", "engines": { - "node": ">=4.0" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/eslint": { - "version": "8.45.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.45.0.tgz", - "integrity": "sha512-pd8KSxiQpdYRfYa9Wufvdoct3ZPQQuVuU5O6scNgMuOMYuxvH0IGaYK0wUFjo4UYYQQCUndlXiMbnxopwvvTiw==", + "version": "8.57.0", "dev": true, + "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.4.0", - "@eslint/eslintrc": "^2.1.0", - "@eslint/js": "8.44.0", - "@humanwhocodes/config-array": "^0.11.10", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.0", + "@humanwhocodes/config-array": "^0.11.14", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", - "ajv": "^6.10.0", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.2.0", - "eslint-visitor-keys": "^3.4.1", - "espree": "^9.6.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -7114,10 +5543,9 @@ } }, "node_modules/eslint-config-prettier": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.8.0.tgz", - "integrity": "sha512-wLbQiFre3tdGgpDv67NQKnJuTlcUVYHas3k+DZCc2U2BadthoEY4B7hLPvAxaqdyOGCzuLfii2fqGph10va7oA==", + "version": "9.1.0", "dev": true, + "license": "MIT", "bin": { "eslint-config-prettier": "bin/cli.js" }, @@ -7127,9 +5555,8 @@ }, "node_modules/eslint-config-typestrict": { "version": "1.0.5", - "resolved": "https://registry.npmjs.org/eslint-config-typestrict/-/eslint-config-typestrict-1.0.5.tgz", - "integrity": "sha512-6W48TD8kXMpj9lUTBoDWFKI+qRpgPQPKy9NPIf2cP56HiT6RBO9g7uvApvvl0DtfmAKP1kXbbI+Mg6xVROrXZA==", "dev": true, + "license": "MIT", "peerDependencies": { "@typescript-eslint/eslint-plugin": "^5", "eslint-plugin-sonarjs": "*" @@ -7137,9 +5564,8 @@ }, "node_modules/eslint-formatter-codeframe": { "version": "7.32.1", - "resolved": "https://registry.npmjs.org/eslint-formatter-codeframe/-/eslint-formatter-codeframe-7.32.1.tgz", - "integrity": "sha512-DK/3Q3+zVKq/7PdSYiCxPrsDF8H/TRMK5n8Hziwr4IMkMy+XiKSwbpj25AdajS63I/B61Snetq4uVvX9fOLyAg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/code-frame": "7.12.11", "chalk": "^4.0.0" @@ -7150,9 +5576,8 @@ }, "node_modules/eslint-import-resolver-node": { "version": "0.3.9", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", - "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", "dev": true, + "license": "MIT", "dependencies": { "debug": "^3.2.7", "is-core-module": "^2.13.0", @@ -7161,18 +5586,16 @@ }, "node_modules/eslint-import-resolver-node/node_modules/debug": { "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, + "license": "MIT", "dependencies": { "ms": "^2.1.1" } }, "node_modules/eslint-module-utils": { "version": "2.8.1", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.1.tgz", - "integrity": "sha512-rXDXR3h7cs7dy9RNpUlQf80nX31XWJEyGq1tRMo+6GsO5VmTe4UTwtmonAD4ZkAsrfMVDA2wlGJ3790Ys+D49Q==", "dev": true, + "license": "MIT", "dependencies": { "debug": "^3.2.7" }, @@ -7187,20 +5610,18 @@ }, "node_modules/eslint-module-utils/node_modules/debug": { "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, + "license": "MIT", "dependencies": { "ms": "^2.1.1" } }, "node_modules/eslint-plugin-escompat": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-escompat/-/eslint-plugin-escompat-3.4.0.tgz", - "integrity": "sha512-ufTPv8cwCxTNoLnTZBFTQ5SxU2w7E7wiMIS7PSxsgP1eAxFjtSaoZ80LRn64hI8iYziE6kJG6gX/ZCJVxh48Bg==", + "version": "3.11.1", "dev": true, + "license": "MIT", "dependencies": { - "browserslist": "^4.21.0" + "browserslist": "^4.23.1" }, "peerDependencies": { "eslint": ">=5.14.1" @@ -7208,9 +5629,8 @@ }, "node_modules/eslint-plugin-eslint-comments": { "version": "3.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-eslint-comments/-/eslint-plugin-eslint-comments-3.2.0.tgz", - "integrity": "sha512-0jkOl0hfojIHHmEHgmNdqv4fmh7300NdpA9FFpF7zaoLvB/QeXOGNLIo86oAveJFrfB1p05kC8hpEMHM8DwWVQ==", "dev": true, + "license": "MIT", "dependencies": { "escape-string-regexp": "^1.0.5", "ignore": "^5.0.5" @@ -7227,9 +5647,8 @@ }, "node_modules/eslint-plugin-eslint-comments/node_modules/escape-string-regexp": { "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.8.0" } @@ -7240,9 +5659,8 @@ }, "node_modules/eslint-plugin-filenames": { "version": "1.3.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-filenames/-/eslint-plugin-filenames-1.3.2.tgz", - "integrity": "sha512-tqxJTiEM5a0JmRCUYQmxw23vtTxrb2+a3Q2mMOPhFxvt7ZQQJmdiuMby9B/vUAuVMghyP7oET+nIf6EO6CBd/w==", "dev": true, + "license": "MIT", "dependencies": { "lodash.camelcase": "4.3.0", "lodash.kebabcase": "4.1.1", @@ -7255,9 +5673,8 @@ }, "node_modules/eslint-plugin-github": { "version": "4.9.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-github/-/eslint-plugin-github-4.9.2.tgz", - "integrity": "sha512-osez6Sio/fLr/3QkW5HE1wbCOcmYG5030/6QIa9IcKyyfchewlecdnYcsbeUMUtdIiU9lWqhroQp2H/O7auxBA==", "dev": true, + "license": "MIT", "dependencies": { "@github/browserslist-config": "^1.0.0", "@typescript-eslint/eslint-plugin": "^6.0.0", @@ -7286,9 +5703,8 @@ }, "node_modules/eslint-plugin-github/node_modules/@typescript-eslint/eslint-plugin": { "version": "6.21.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.21.0.tgz", - "integrity": "sha512-oy9+hTPCUFpngkEZUSzbf9MxI65wbKFoQYsgPdILTfbUldp5ovUuphZVe4i30emU9M/kP+T64Di0mxl7dSw3MA==", "dev": true, + "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.5.1", "@typescript-eslint/scope-manager": "6.21.0", @@ -7321,9 +5737,8 @@ }, "node_modules/eslint-plugin-github/node_modules/@typescript-eslint/parser": { "version": "6.21.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.21.0.tgz", - "integrity": "sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "@typescript-eslint/scope-manager": "6.21.0", "@typescript-eslint/types": "6.21.0", @@ -7349,9 +5764,8 @@ }, "node_modules/eslint-plugin-github/node_modules/@typescript-eslint/scope-manager": { "version": "6.21.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.21.0.tgz", - "integrity": "sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg==", "dev": true, + "license": "MIT", "dependencies": { "@typescript-eslint/types": "6.21.0", "@typescript-eslint/visitor-keys": "6.21.0" @@ -7366,9 +5780,8 @@ }, "node_modules/eslint-plugin-github/node_modules/@typescript-eslint/type-utils": { "version": "6.21.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.21.0.tgz", - "integrity": "sha512-rZQI7wHfao8qMX3Rd3xqeYSMCL3SoiSQLBATSiVKARdFGCYSRvmViieZjqc58jKgs8Y8i9YvVVhRbHSTA4VBag==", "dev": true, + "license": "MIT", "dependencies": { "@typescript-eslint/typescript-estree": "6.21.0", "@typescript-eslint/utils": "6.21.0", @@ -7393,9 +5806,8 @@ }, "node_modules/eslint-plugin-github/node_modules/@typescript-eslint/types": { "version": "6.21.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.21.0.tgz", - "integrity": "sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg==", "dev": true, + "license": "MIT", "engines": { "node": "^16.0.0 || >=18.0.0" }, @@ -7406,9 +5818,8 @@ }, "node_modules/eslint-plugin-github/node_modules/@typescript-eslint/typescript-estree": { "version": "6.21.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.21.0.tgz", - "integrity": "sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "@typescript-eslint/types": "6.21.0", "@typescript-eslint/visitor-keys": "6.21.0", @@ -7434,9 +5845,8 @@ }, "node_modules/eslint-plugin-github/node_modules/@typescript-eslint/utils": { "version": "6.21.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.21.0.tgz", - "integrity": "sha512-NfWVaC8HP9T8cbKQxHcsJBY5YE1O33+jpMwN45qzWWaPDZgLIbo12toGMWnmhvCpd3sIxkpDw3Wv1B3dYrbDQQ==", "dev": true, + "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", @@ -7459,9 +5869,8 @@ }, "node_modules/eslint-plugin-github/node_modules/@typescript-eslint/visitor-keys": { "version": "6.21.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.21.0.tgz", - "integrity": "sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A==", "dev": true, + "license": "MIT", "dependencies": { "@typescript-eslint/types": "6.21.0", "eslint-visitor-keys": "^3.4.1" @@ -7476,48 +5885,16 @@ }, "node_modules/eslint-plugin-github/node_modules/brace-expansion": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } }, - "node_modules/eslint-plugin-github/node_modules/eslint-plugin-prettier": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.1.3.tgz", - "integrity": "sha512-C9GCVAs4Eq7ZC/XFQHITLiHJxQngdtraXaM+LoUFoFp/lHNl2Zn8f3WQbe9HvTBBQ9YnKFB0/2Ajdqwo5D1EAw==", - "dev": true, - "dependencies": { - "prettier-linter-helpers": "^1.0.0", - "synckit": "^0.8.6" - }, - "engines": { - "node": "^14.18.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint-plugin-prettier" - }, - "peerDependencies": { - "@types/eslint": ">=8.0.0", - "eslint": ">=8.0.0", - "eslint-config-prettier": "*", - "prettier": ">=3.0.0" - }, - "peerDependenciesMeta": { - "@types/eslint": { - "optional": true - }, - "eslint-config-prettier": { - "optional": true - } - } - }, "node_modules/eslint-plugin-github/node_modules/minimatch": { "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", "dev": true, + "license": "ISC", "dependencies": { "brace-expansion": "^2.0.1" }, @@ -7528,35 +5905,18 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/eslint-plugin-github/node_modules/prettier": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.2.tgz", - "integrity": "sha512-rAVeHYMcv8ATV5d508CFdn+8/pHPpXeIid1DdrPwXnaAdH7cqjVbpJaT5eq4yRAFU/lsbwYwSF/n5iNrdJHPQA==", - "dev": true, - "bin": { - "prettier": "bin/prettier.cjs" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" - } - }, "node_modules/eslint-plugin-i18n-text": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-i18n-text/-/eslint-plugin-i18n-text-1.0.1.tgz", - "integrity": "sha512-3G3UetST6rdqhqW9SfcfzNYMpQXS7wNkJvp6dsXnjzGiku6Iu5hl3B0kmk6lIcFPwYjhQIY+tXVRtK9TlGT7RA==", "dev": true, + "license": "MIT", "peerDependencies": { "eslint": ">=5.0.0" } }, "node_modules/eslint-plugin-implicit-dependencies": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-implicit-dependencies/-/eslint-plugin-implicit-dependencies-1.1.1.tgz", - "integrity": "sha512-/EbKwaWTASieQR+hWeSIYaNRUCwIn/wAuPNFsiZZTMKYLXegVJyHvepDnJIIpfYWpqtYcrukNLQDit1yfmGD/A==", "dev": true, + "license": "MIT", "dependencies": { "builtin-modules": "^1.1.1", "findup": "^0.1.5" @@ -7564,9 +5924,8 @@ }, "node_modules/eslint-plugin-import": { "version": "2.26.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz", - "integrity": "sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==", "dev": true, + "license": "MIT", "dependencies": { "array-includes": "^3.1.4", "array.prototype.flat": "^1.2.5", @@ -7591,18 +5950,16 @@ }, "node_modules/eslint-plugin-import/node_modules/debug": { "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dev": true, + "license": "MIT", "dependencies": { "ms": "2.0.0" } }, "node_modules/eslint-plugin-import/node_modules/doctrine": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", "dev": true, + "license": "Apache-2.0", "dependencies": { "esutils": "^2.0.2" }, @@ -7612,15 +5969,13 @@ }, "node_modules/eslint-plugin-import/node_modules/ms": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/eslint-plugin-jsx-a11y": { "version": "6.9.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.9.0.tgz", - "integrity": "sha512-nOFOCaJG2pYqORjK19lqPqxMO/JpvdCZdPtNdxY3kvom3jTvkAbOvQvD8wuD0G8BYR0IGAGYDlzqWJOh/ybn2g==", "dev": true, + "license": "MIT", "dependencies": { "aria-query": "~5.1.3", "array-includes": "^3.1.8", @@ -7648,38 +6003,44 @@ }, "node_modules/eslint-plugin-jsx-a11y/node_modules/aria-query": { "version": "5.1.3", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.1.3.tgz", - "integrity": "sha512-R5iJ5lkuHybztUfuOAznmboyjWq8O6sqNqtK7CLOqdydi54VNbORp49mb14KbWgG1QD3JFO9hJdZ+y4KutfdOQ==", "dev": true, + "license": "Apache-2.0", "dependencies": { "deep-equal": "^2.0.5" } }, "node_modules/eslint-plugin-no-only-tests": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-no-only-tests/-/eslint-plugin-no-only-tests-3.1.0.tgz", - "integrity": "sha512-Lf4YW/bL6Un1R6A76pRZyE1dl1vr31G/ev8UzIc/geCgFWyrKil8hVjYqWVKGB/UIGmb6Slzs9T0wNezdSVegw==", + "version": "3.3.0", "dev": true, + "license": "MIT", "engines": { "node": ">=5.0.0" } }, "node_modules/eslint-plugin-prettier": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz", - "integrity": "sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==", + "version": "5.2.1", "dev": true, + "license": "MIT", "dependencies": { - "prettier-linter-helpers": "^1.0.0" + "prettier-linter-helpers": "^1.0.0", + "synckit": "^0.9.1" }, "engines": { - "node": ">=12.0.0" + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint-plugin-prettier" }, "peerDependencies": { - "eslint": ">=7.28.0", - "prettier": ">=2.0.0" + "@types/eslint": ">=8.0.0", + "eslint": ">=8.0.0", + "eslint-config-prettier": "*", + "prettier": ">=3.0.0" }, "peerDependenciesMeta": { + "@types/eslint": { + "optional": true + }, "eslint-config-prettier": { "optional": true } @@ -7687,18 +6048,16 @@ }, "node_modules/eslint-plugin-simple-import-sort": { "version": "7.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-simple-import-sort/-/eslint-plugin-simple-import-sort-7.0.0.tgz", - "integrity": "sha512-U3vEDB5zhYPNfxT5TYR7u01dboFZp+HNpnGhkDB2g/2E4wZ/g1Q9Ton8UwCLfRV9yAKyYqDh62oHOamvkFxsvw==", "dev": true, + "license": "MIT", "peerDependencies": { "eslint": ">=5.0.0" } }, "node_modules/eslint-plugin-sonarjs": { "version": "0.19.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-sonarjs/-/eslint-plugin-sonarjs-0.19.0.tgz", - "integrity": "sha512-6+s5oNk5TFtVlbRxqZN7FIGmjdPCYQKaTzFPmqieCmsU1kBYDzndTeQav0xtQNwZJWu5awWfTGe8Srq9xFOGnw==", "dev": true, + "license": "LGPL-3.0", "engines": { "node": ">=14" }, @@ -7708,18 +6067,16 @@ }, "node_modules/eslint-rule-documentation": { "version": "1.0.23", - "resolved": "https://registry.npmjs.org/eslint-rule-documentation/-/eslint-rule-documentation-1.0.23.tgz", - "integrity": "sha512-pWReu3fkohwyvztx/oQWWgld2iad25TfUdi6wvhhaDPIQjHU/pyvlKgXFw1kX31SQK2Nq9MH+vRDWB0ZLy8fYw==", "dev": true, + "license": "MIT", "engines": { "node": ">=4.0.0" } }, "node_modules/eslint-scope": { "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^4.1.1" @@ -7730,9 +6087,8 @@ }, "node_modules/eslint-utils": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", - "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", "dev": true, + "license": "MIT", "dependencies": { "eslint-visitor-keys": "^2.0.0" }, @@ -7748,18 +6104,16 @@ }, "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=10" } }, "node_modules/eslint-visitor-keys": { "version": "3.4.3", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", - "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", "dev": true, + "license": "Apache-2.0", "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, @@ -7769,9 +6123,8 @@ }, "node_modules/eslint/node_modules/eslint-scope": { "version": "7.2.2", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", - "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" @@ -7785,18 +6138,16 @@ }, "node_modules/eslint/node_modules/estraverse": { "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=4.0" } }, "node_modules/espree": { "version": "9.6.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", - "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "acorn": "^8.9.0", "acorn-jsx": "^5.3.2", @@ -7811,9 +6162,8 @@ }, "node_modules/esprima": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "devOptional": true, + "dev": true, + "license": "BSD-2-Clause", "bin": { "esparse": "bin/esparse.js", "esvalidate": "bin/esvalidate.js" @@ -7823,10 +6173,9 @@ } }, "node_modules/esquery": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", - "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "version": "1.6.0", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "estraverse": "^5.1.0" }, @@ -7836,18 +6185,16 @@ }, "node_modules/esquery/node_modules/estraverse": { "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=4.0" } }, "node_modules/esrecurse": { "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "estraverse": "^5.2.0" }, @@ -7857,44 +6204,39 @@ }, "node_modules/esrecurse/node_modules/estraverse": { "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=4.0" } }, "node_modules/estraverse": { "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=4.0" } }, "node_modules/estree-walker": { "version": "3.0.3", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", - "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "@types/estree": "^1.0.0" } }, "node_modules/esutils": { "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "devOptional": true, + "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=0.10.0" } }, "node_modules/ethereum-cryptography": { "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ethereum-cryptography/-/ethereum-cryptography-2.2.1.tgz", - "integrity": "sha512-r/W8lkHSiTLxUxW8Rf3u4HGB0xQweG2RyETjywylKZSzLWoWAijRz8WCuOtJ6wah+avllXBqZuk29HCCvhEIRg==", + "license": "MIT", "dependencies": { "@noble/curves": "1.4.2", "@noble/hashes": "1.4.0", @@ -7903,9 +6245,7 @@ } }, "node_modules/ethers": { - "version": "6.13.1", - "resolved": "https://registry.npmjs.org/ethers/-/ethers-6.13.1.tgz", - "integrity": "sha512-hdJ2HOxg/xx97Lm9HdCWk949BfYqYWpyw4//78SiwOLgASyfrNszfMUNB2joKjvGUdwhHfaiMMFFwacVVoLR9A==", + "version": "6.13.2", "dev": true, "funding": [ { @@ -7917,6 +6257,7 @@ "url": "https://www.buymeacoffee.com/ricmoo" } ], + "license": "MIT", "dependencies": { "@adraffy/ens-normalize": "1.10.1", "@noble/curves": "1.2.0", @@ -7932,9 +6273,8 @@ }, "node_modules/ethers/node_modules/@noble/curves": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.2.0.tgz", - "integrity": "sha512-oYclrNgRaM9SsBUBVbb8M6DTV7ZHRTKugureoYEncY5c65HOmRzvSiTE3y5CYaPYJA/GVkrhXEoF0M3Ya9PMnw==", "dev": true, + "license": "MIT", "dependencies": { "@noble/hashes": "1.3.2" }, @@ -7944,9 +6284,8 @@ }, "node_modules/ethers/node_modules/@noble/hashes": { "version": "1.3.2", - "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.2.tgz", - "integrity": "sha512-MVC8EAQp7MvEcm30KWENFjgR+Mkmf+D189XJTkFIlwohU5hcBbn1ZkKq7KVTi2Hme3PMGF390DaL52beVrIihQ==", "dev": true, + "license": "MIT", "engines": { "node": ">= 16" }, @@ -7956,54 +6295,58 @@ }, "node_modules/ethers/node_modules/@types/node": { "version": "18.15.13", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.13.tgz", - "integrity": "sha512-N+0kuo9KgrUQ1Sn/ifDXsvg0TTleP7rIy4zOBGECxAljqvqfqpTfzx0Q1NUedOixRMBfe2Whhb056a42cWs26Q==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/ethers/node_modules/tslib": { "version": "2.4.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", - "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==", - "dev": true + "dev": true, + "license": "0BSD" }, - "node_modules/event-target-shim": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", - "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", - "optional": true, - "peer": true, + "node_modules/ethers/node_modules/ws": { + "version": "8.17.1", + "dev": true, + "license": "MIT", "engines": { - "node": ">=6" + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } } }, "node_modules/eventemitter3": { "version": "5.0.1", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", - "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==" + "license": "MIT" }, "node_modules/events": { "version": "3.3.0", - "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", - "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=0.8.x" } }, "node_modules/eventsource": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", - "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==", "dev": true, + "license": "MIT", "engines": { "node": ">=12.0.0" } }, "node_modules/evp_bytestokey": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", - "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==", "dev": true, + "license": "MIT", "dependencies": { "md5.js": "^1.3.4", "safe-buffer": "^5.1.1" @@ -8011,9 +6354,8 @@ }, "node_modules/execa": { "version": "6.1.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-6.1.0.tgz", - "integrity": "sha512-QVWlX2e50heYJcCPG0iWtf8r0xjEYfz/OYLGDYH+IyjWezzPNxz63qNFOu0l4YftGWuizFVZHHs8PrLU5p2IDA==", "dev": true, + "license": "MIT", "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^6.0.1", @@ -8032,74 +6374,40 @@ "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, - "node_modules/extract-zip": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", - "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", - "optional": true, - "peer": true, - "dependencies": { - "debug": "^4.1.1", - "get-stream": "^5.1.0", - "yauzl": "^2.10.0" - }, - "bin": { - "extract-zip": "cli.js" - }, - "engines": { - "node": ">= 10.17.0" - }, - "optionalDependencies": { - "@types/yauzl": "^2.9.1" - } - }, - "node_modules/extract-zip/node_modules/get-stream": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", - "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", - "optional": true, - "peer": true, - "dependencies": { - "pump": "^3.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/eyes": { "version": "0.1.8", - "resolved": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", - "integrity": "sha512-GipyPsXO1anza0AOZdy69Im7hGFCNB7Y/NGjDlZGJ3GJJLtwNSb2vrzYrTYJRrRloVx7pl+bhUaTB8yiccPvFQ==", "engines": { "node": "> 0.1.90" } }, "node_modules/fast-deep-equal": { "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/fast-diff": { "version": "1.3.0", - "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz", - "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", - "dev": true + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/fast-equals": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-5.0.1.tgz", + "integrity": "sha512-WF1Wi8PwwSY7/6Kx0vKXtw8RwuSGoM1bvDaJbu7MxDlR1vovZjIAKrnzyrThgAjm6JDTu0fVgWXDlMGspodfoQ==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } }, "node_modules/fast-fifo": { "version": "1.3.2", - "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", - "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==", - "devOptional": true + "dev": true, + "license": "MIT" }, "node_modules/fast-glob": { "version": "3.3.2", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", - "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", @@ -8113,9 +6421,8 @@ }, "node_modules/fast-glob/node_modules/glob-parent": { "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "devOptional": true, + "dev": true, + "license": "ISC", "dependencies": { "is-glob": "^4.0.1" }, @@ -8125,75 +6432,35 @@ }, "node_modules/fast-json-stable-stringify": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/fast-levenshtein": { "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/fastq": { "version": "1.17.1", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", - "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", - "devOptional": true, + "dev": true, + "license": "ISC", "dependencies": { "reusify": "^1.0.4" } }, - "node_modules/fd-slicer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", - "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", - "optional": true, - "peer": true, - "dependencies": { - "pend": "~1.2.0" - } - }, "node_modules/fecha": { "version": "4.2.3", - "resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.3.tgz", - "integrity": "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==" - }, - "node_modules/fetch-blob": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", - "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/jimmywarting" - }, - { - "type": "paypal", - "url": "https://paypal.me/jimmywarting" - } - ], - "optional": true, - "peer": true, - "dependencies": { - "node-domexception": "^1.0.0", - "web-streams-polyfill": "^3.0.3" - }, - "engines": { - "node": "^12.20 || >= 14.13" - } + "license": "MIT" }, "node_modules/fflate": { "version": "0.8.2", - "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", - "integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==", - "devOptional": true + "dev": true, + "license": "MIT" }, "node_modules/file-entry-cache": { "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", "dev": true, + "license": "MIT", "dependencies": { "flat-cache": "^3.0.4" }, @@ -8203,17 +6470,15 @@ }, "node_modules/file-stream-rotator": { "version": "0.6.1", - "resolved": "https://registry.npmjs.org/file-stream-rotator/-/file-stream-rotator-0.6.1.tgz", - "integrity": "sha512-u+dBid4PvZw17PmDeRcNOtCP9CCK/9lRN2w+r1xIS7yOL9JFrIBKTvrYsxT4P0pGtThYTn++QS5ChHaUov3+zQ==", + "license": "MIT", "dependencies": { "moment": "^2.29.1" } }, "node_modules/fill-range": { "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "to-regex-range": "^5.0.1" }, @@ -8223,8 +6488,7 @@ }, "node_modules/finalhandler": { "version": "1.1.2", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", - "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==", + "license": "MIT", "dependencies": { "debug": "2.6.9", "encodeurl": "~1.0.2", @@ -8240,21 +6504,18 @@ }, "node_modules/finalhandler/node_modules/debug": { "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", "dependencies": { "ms": "2.0.0" } }, "node_modules/finalhandler/node_modules/ms": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + "license": "MIT" }, "node_modules/finalhandler/node_modules/on-finished": { "version": "2.3.0", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", + "license": "MIT", "dependencies": { "ee-first": "1.1.1" }, @@ -8264,9 +6525,8 @@ }, "node_modules/find-cache-dir": { "version": "3.3.2", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz", - "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==", "dev": true, + "license": "MIT", "dependencies": { "commondir": "^1.0.1", "make-dir": "^3.0.2", @@ -8281,9 +6541,8 @@ }, "node_modules/find-cache-dir/node_modules/find-up": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, + "license": "MIT", "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" @@ -8294,9 +6553,8 @@ }, "node_modules/find-cache-dir/node_modules/locate-path": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, + "license": "MIT", "dependencies": { "p-locate": "^4.1.0" }, @@ -8306,9 +6564,8 @@ }, "node_modules/find-cache-dir/node_modules/make-dir": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "dev": true, + "license": "MIT", "dependencies": { "semver": "^6.0.0" }, @@ -8321,9 +6578,8 @@ }, "node_modules/find-cache-dir/node_modules/p-limit": { "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, + "license": "MIT", "dependencies": { "p-try": "^2.0.0" }, @@ -8336,9 +6592,8 @@ }, "node_modules/find-cache-dir/node_modules/p-locate": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, + "license": "MIT", "dependencies": { "p-limit": "^2.2.0" }, @@ -8348,9 +6603,8 @@ }, "node_modules/find-cache-dir/node_modules/pkg-dir": { "version": "4.2.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", - "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", "dev": true, + "license": "MIT", "dependencies": { "find-up": "^4.0.0" }, @@ -8360,18 +6614,16 @@ }, "node_modules/find-cache-dir/node_modules/semver": { "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } }, "node_modules/find-up": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dev": true, + "license": "MIT", "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" @@ -8383,10 +6635,20 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/find-up-simple": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/find-up-simple/-/find-up-simple-1.0.0.tgz", + "integrity": "sha512-q7Us7kcjj2VMePAa02hDAF6d+MzsdsAWEwYyOpwUtlerRBkOEPBCRZrAV4XfcSN8fHAgaD0hP7miwoay6DCprw==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/findup": { "version": "0.1.5", - "resolved": "https://registry.npmjs.org/findup/-/findup-0.1.5.tgz", - "integrity": "sha512-Udxo3C9A6alt2GZ2MNsgnIvX7De0V3VGxeP/x98NSVgSlizcDHdmJza61LI7zJy4OEtSiJyE72s0/+tBl5/ZxA==", "dev": true, "dependencies": { "colors": "~0.6.0-1", @@ -8401,8 +6663,6 @@ }, "node_modules/findup/node_modules/commander": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.1.0.tgz", - "integrity": "sha512-J2wnb6TKniXNOtoHS8TSrG9IOQluPrsmyAJ8oCUJOBmv+uLBCyPYAZkD2jFvw2DCzIXNnISIM01NIvr35TkBMQ==", "dev": true, "engines": { "node": ">= 0.6.x" @@ -8410,9 +6670,8 @@ }, "node_modules/flat-cache": { "version": "3.2.0", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", - "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", "dev": true, + "license": "MIT", "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.3", @@ -8424,19 +6683,15 @@ }, "node_modules/flatted": { "version": "3.3.1", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", - "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", - "devOptional": true + "dev": true, + "license": "ISC" }, "node_modules/fn.name": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz", - "integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==" + "license": "MIT" }, "node_modules/follow-redirects": { "version": "1.15.6", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", - "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", "dev": true, "funding": [ { @@ -8444,6 +6699,7 @@ "url": "https://github.com/sponsors/RubenVerborgh" } ], + "license": "MIT", "engines": { "node": ">=4.0" }, @@ -8455,53 +6711,26 @@ }, "node_modules/for-each": { "version": "0.3.3", - "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", - "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", "dev": true, + "license": "MIT", "dependencies": { "is-callable": "^1.1.3" } }, "node_modules/foreground-child": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", - "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", "dev": true, + "license": "ISC", "dependencies": { "cross-spawn": "^7.0.0", "signal-exit": "^3.0.2" }, "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/form-data-encoder": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-2.1.4.tgz", - "integrity": "sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==", - "optional": true, - "peer": true, - "engines": { - "node": ">= 14.17" - } - }, - "node_modules/formdata-polyfill": { - "version": "4.0.10", - "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", - "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", - "optional": true, - "peer": true, - "dependencies": { - "fetch-blob": "^3.1.2" - }, - "engines": { - "node": ">=12.20.0" + "node": ">=8.0.0" } }, "node_modules/fromentries": { "version": "1.3.2", - "resolved": "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz", - "integrity": "sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==", "dev": true, "funding": [ { @@ -8516,55 +6745,25 @@ "type": "consulting", "url": "https://feross.org/support" } - ] - }, - "node_modules/fs-extra": { - "version": "11.2.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz", - "integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==", - "optional": true, - "peer": true, - "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=14.14" - } + ], + "license": "MIT" }, "node_modules/fs.realpath": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true - }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } + "dev": true, + "license": "ISC" }, "node_modules/function-bind": { "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/function.prototype.name": { "version": "1.1.6", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", - "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", @@ -8580,126 +6779,51 @@ }, "node_modules/functional-red-black-tree": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==" + "license": "MIT" }, "node_modules/functions-have-names": { "version": "1.2.3", - "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", - "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", "dev": true, + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/geckodriver": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/geckodriver/-/geckodriver-4.4.1.tgz", - "integrity": "sha512-nnAdIrwLkMcDu4BitWXF23pEMeZZ0Cj7HaWWFdSpeedBP9z6ft150JYiGO2mwzw6UiR823Znk1JeIf07RyzloA==", - "hasInstallScript": true, - "optional": true, - "peer": true, - "dependencies": { - "@wdio/logger": "^8.28.0", - "@zip.js/zip.js": "^2.7.44", - "decamelize": "^6.0.0", - "http-proxy-agent": "^7.0.2", - "https-proxy-agent": "^7.0.4", - "node-fetch": "^3.3.2", - "tar-fs": "^3.0.6", - "which": "^4.0.0" - }, - "bin": { - "geckodriver": "bin/geckodriver.js" - }, - "engines": { - "node": "^16.13 || >=18 || >=20" - } - }, - "node_modules/geckodriver/node_modules/decamelize": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-6.0.0.tgz", - "integrity": "sha512-Fv96DCsdOgB6mdGl67MT5JaTNKRzrzill5OH5s8bjYJXVlcXyPYGyPsUkWyGV5p1TXI5esYIYMMeDJL0hEIwaA==", - "optional": true, - "peer": true, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/geckodriver/node_modules/isexe": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", - "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", - "optional": true, - "peer": true, - "engines": { - "node": ">=16" - } - }, - "node_modules/geckodriver/node_modules/tar-fs": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.6.tgz", - "integrity": "sha512-iokBDQQkUyeXhgPYaZxmczGPhnhXZ0CmrqI+MOb/WFGS9DW5wnfrLgtjUJBvz50vQ3qfRwJ62QVoCFu8mPVu5w==", - "optional": true, - "peer": true, - "dependencies": { - "pump": "^3.0.0", - "tar-stream": "^3.1.5" - }, - "optionalDependencies": { - "bare-fs": "^2.1.1", - "bare-path": "^2.1.0" - } - }, - "node_modules/geckodriver/node_modules/which": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz", - "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", - "optional": true, - "peer": true, - "dependencies": { - "isexe": "^3.1.1" - }, - "bin": { - "node-which": "bin/which.js" - }, + "node_modules/gensequence": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/gensequence/-/gensequence-7.0.0.tgz", + "integrity": "sha512-47Frx13aZh01afHJTB3zTtKIlFI6vWY+MYCN9Qpew6i52rfKjnhCF/l1YlC8UmEMvvntZZ6z4PiCcmyuedR2aQ==", + "dev": true, "engines": { - "node": "^16.13.0 || >=18.0.0" + "node": ">=18" } }, "node_modules/gensync": { "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/get-caller-file": { "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "license": "ISC", "engines": { "node": "6.* || 8.* || >= 10.*" } }, "node_modules/get-func-name": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz", - "integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": "*" } }, "node_modules/get-intrinsic": { "version": "1.2.4", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", - "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "license": "MIT", "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2", @@ -8716,27 +6840,24 @@ }, "node_modules/get-iterator": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/get-iterator/-/get-iterator-1.0.2.tgz", - "integrity": "sha512-v+dm9bNVfOYsY1OrhaCrmyOcYoSeVvbt+hHZ0Au+T+p1y+0Uyj9aMaGIeUTT6xdpRbWzDeYKvfOslPhggQMcsg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/get-package-type": { "version": "0.1.0", - "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", - "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=8.0.0" } }, - "node_modules/get-port": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/get-port/-/get-port-7.1.0.tgz", - "integrity": "sha512-QB9NKEeDg3xxVwCCwJQ9+xycaz6pBB6iQ76wiWMl1927n0Kir6alPiP+yuiICLLU4jpMe08dXfpebuQppFA2zw==", - "optional": true, - "peer": true, + "node_modules/get-stdin": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-9.0.0.tgz", + "integrity": "sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA==", + "dev": true, "engines": { - "node": ">=16" + "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -8744,9 +6865,8 @@ }, "node_modules/get-stream": { "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -8756,9 +6876,8 @@ }, "node_modules/get-symbol-description": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", - "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.5", "es-errors": "^1.3.0", @@ -8772,10 +6891,9 @@ } }, "node_modules/get-tsconfig": { - "version": "4.7.5", - "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.5.tgz", - "integrity": "sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==", + "version": "4.7.6", "dev": true, + "license": "MIT", "dependencies": { "resolve-pkg-maps": "^1.0.0" }, @@ -8783,45 +6901,16 @@ "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" } }, - "node_modules/get-uri": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/get-uri/-/get-uri-6.0.3.tgz", - "integrity": "sha512-BzUrJBS9EcUb4cFol8r4W3v1cPsSyajLSthNkz5BxbpDcHN5tIrM10E2eNvfnvBn3DaT3DUgx0OpsBKkaOpanw==", - "optional": true, - "peer": true, - "dependencies": { - "basic-ftp": "^5.0.2", - "data-uri-to-buffer": "^6.0.2", - "debug": "^4.3.4", - "fs-extra": "^11.2.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/get-uri/node_modules/data-uri-to-buffer": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-6.0.2.tgz", - "integrity": "sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw==", - "optional": true, - "peer": true, - "engines": { - "node": ">= 14" - } - }, "node_modules/git-hooks-list": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/git-hooks-list/-/git-hooks-list-1.0.3.tgz", - "integrity": "sha512-Y7wLWcrLUXwk2noSka166byGCvhMtDRpgHdzCno1UQv/n/Hegp++a2xBWJL1lJarnKD3SWaljD+0z1ztqxuKyQ==", "dev": true, + "license": "MIT", "funding": { "url": "https://github.com/fisker/git-hooks-list?sponsor=1" } }, "node_modules/gitignore-parser": { "version": "0.0.2", - "resolved": "https://registry.npmjs.org/gitignore-parser/-/gitignore-parser-0.0.2.tgz", - "integrity": "sha512-X6mpqUv59uWLGD4n3hZ8Cu8KbF2PMWPSFYmxZjdkpm3yOU7hSUYnzTkZI1mcWqchphvqyuz3/BhgBR4E/JtkCg==", "dev": true, "engines": { "node": ">=0.10.0" @@ -8829,10 +6918,8 @@ }, "node_modules/glob": { "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", - "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, + "license": "ISC", "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -8850,9 +6937,8 @@ }, "node_modules/glob-parent": { "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dev": true, + "license": "ISC", "dependencies": { "is-glob": "^4.0.3" }, @@ -8860,11 +6946,25 @@ "node": ">=10.13.0" } }, + "node_modules/global-directory": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/global-directory/-/global-directory-4.0.1.tgz", + "integrity": "sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q==", + "dev": true, + "dependencies": { + "ini": "4.1.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/globals": { "version": "13.24.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", - "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", "dev": true, + "license": "MIT", "dependencies": { "type-fest": "^0.20.2" }, @@ -8877,9 +6977,8 @@ }, "node_modules/globalthis": { "version": "1.0.4", - "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", - "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", "dev": true, + "license": "MIT", "dependencies": { "define-properties": "^1.2.1", "gopd": "^1.0.1" @@ -8893,9 +6992,8 @@ }, "node_modules/globby": { "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", "dev": true, + "license": "MIT", "dependencies": { "array-union": "^2.1.0", "dir-glob": "^3.0.1", @@ -8913,8 +7011,7 @@ }, "node_modules/gopd": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "license": "MIT", "dependencies": { "get-intrinsic": "^1.1.3" }, @@ -8922,66 +7019,20 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/got": { - "version": "12.6.1", - "resolved": "https://registry.npmjs.org/got/-/got-12.6.1.tgz", - "integrity": "sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ==", - "optional": true, - "peer": true, - "dependencies": { - "@sindresorhus/is": "^5.2.0", - "@szmarczak/http-timer": "^5.0.1", - "cacheable-lookup": "^7.0.0", - "cacheable-request": "^10.2.8", - "decompress-response": "^6.0.0", - "form-data-encoder": "^2.1.2", - "get-stream": "^6.0.1", - "http2-wrapper": "^2.1.10", - "lowercase-keys": "^3.0.0", - "p-cancelable": "^3.0.0", - "responselike": "^3.0.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sindresorhus/got?sponsor=1" - } - }, "node_modules/graceful-fs": { "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "devOptional": true - }, - "node_modules/grapheme-splitter": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", - "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", - "optional": true, - "peer": true + "dev": true, + "license": "ISC" }, "node_modules/graphemer": { "version": "1.4.0", - "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", - "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", - "dev": true - }, - "node_modules/graphql": { - "version": "16.9.0", - "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.9.0.tgz", - "integrity": "sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw==", - "optional": true, - "peer": true, - "engines": { - "node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0" - } + "dev": true, + "license": "MIT" }, "node_modules/handlebars": { "version": "4.7.8", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", - "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", "dev": true, + "license": "MIT", "dependencies": { "minimist": "^1.2.5", "neo-async": "^2.6.2", @@ -8998,29 +7049,34 @@ "uglify-js": "^3.1.4" } }, + "node_modules/handlebars/node_modules/source-map": { + "version": "0.6.1", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/has": { "version": "1.0.4", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.4.tgz", - "integrity": "sha512-qdSAmqLF6209RFj4VVItywPMbm3vWylknmB3nvNiUIs72xAimcM8nVYxYr7ncvZq5qzk9MKIZR8ijqD/1QuYjQ==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4.0" } }, "node_modules/has-bigints": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", - "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", "dev": true, + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/has-dynamic-import": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/has-dynamic-import/-/has-dynamic-import-2.1.0.tgz", - "integrity": "sha512-su0anMkNEnJKZ/rB99jn3y6lV/J8Ro96hBJ28YAeVzj5rWxH+YL/AdCyiYYA1HDLV9YhmvqpWSJJj2KLo1MX6g==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.5", "get-intrinsic": "^1.2.2" @@ -9034,16 +7090,23 @@ }, "node_modules/has-flag": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-own-prop": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-own-prop/-/has-own-prop-2.0.0.tgz", + "integrity": "sha512-Pq0h+hvsVm6dDEa8x82GnLSYHOzNDt7f0ddFa3FqcQlgzEiptPqL+XrOJNavjOzSYiYWIrgeVYYgGlLmnxwilQ==", + "dev": true, "engines": { "node": ">=8" } }, "node_modules/has-property-descriptors": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", - "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "license": "MIT", "dependencies": { "es-define-property": "^1.0.0" }, @@ -9053,8 +7116,7 @@ }, "node_modules/has-proto": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", - "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -9064,8 +7126,7 @@ }, "node_modules/has-symbols": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -9075,9 +7136,8 @@ }, "node_modules/has-tostringtag": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", - "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "dev": true, + "license": "MIT", "dependencies": { "has-symbols": "^1.0.3" }, @@ -9090,9 +7150,8 @@ }, "node_modules/hash-base": { "version": "3.0.4", - "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", - "integrity": "sha512-EeeoJKjTyt868liAlVmcv2ZsUfGHlE3Q+BICOXcZiwN3osr5Q/zFGYmTJpoIzuaSTAwndFy+GqhEwlU4L3j4Ow==", "dev": true, + "license": "MIT", "dependencies": { "inherits": "^2.0.1", "safe-buffer": "^5.0.1" @@ -9103,9 +7162,8 @@ }, "node_modules/hash.js": { "version": "1.1.7", - "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz", - "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", "dev": true, + "license": "MIT", "dependencies": { "inherits": "^2.0.3", "minimalistic-assert": "^1.0.1" @@ -9113,9 +7171,8 @@ }, "node_modules/hasha": { "version": "5.2.2", - "resolved": "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz", - "integrity": "sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==", "dev": true, + "license": "MIT", "dependencies": { "is-stream": "^2.0.0", "type-fest": "^0.8.0" @@ -9129,9 +7186,8 @@ }, "node_modules/hasha/node_modules/is-stream": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" }, @@ -9141,22 +7197,19 @@ }, "node_modules/hasha/node_modules/type-fest": { "version": "0.8.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", - "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", "dev": true, + "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=8" } }, "node_modules/hashlru": { "version": "2.3.0", - "resolved": "https://registry.npmjs.org/hashlru/-/hashlru-2.3.0.tgz", - "integrity": "sha512-0cMsjjIC8I+D3M44pOQdsy0OHXGLVz6Z0beRuufhKa0KfaD2wGwAev6jILzXsd3/vpnNQJmWyZtIILqM1N+n5A==" + "license": "MIT" }, "node_modules/hasown": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", "dependencies": { "function-bind": "^1.1.2" }, @@ -9164,18 +7217,10 @@ "node": ">= 0.4" } }, - "node_modules/headers-polyfill": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/headers-polyfill/-/headers-polyfill-4.0.3.tgz", - "integrity": "sha512-IScLbePpkvO846sIwOtOTDjutRMWdXdJmXdMvk6gCBHxFO8d+QKOQedyZSxFTTFYRSmlgSTDtXqqq4pcenBXLQ==", - "optional": true, - "peer": true - }, "node_modules/hmac-drbg": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", - "integrity": "sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==", "dev": true, + "license": "MIT", "dependencies": { "hash.js": "^1.0.3", "minimalistic-assert": "^1.0.0", @@ -9184,21 +7229,12 @@ }, "node_modules/html-escaper": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", - "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", - "dev": true - }, - "node_modules/http-cache-semantics": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", - "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", - "optional": true, - "peer": true + "dev": true, + "license": "MIT" }, "node_modules/http-errors": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", @@ -9212,73 +7248,27 @@ }, "node_modules/http-errors/node_modules/statuses": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, - "node_modules/http-proxy-agent": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", - "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", - "optional": true, - "peer": true, - "dependencies": { - "agent-base": "^7.1.0", - "debug": "^4.3.4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/http2-wrapper": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-2.2.1.tgz", - "integrity": "sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ==", - "optional": true, - "peer": true, - "dependencies": { - "quick-lru": "^5.1.1", - "resolve-alpn": "^1.2.0" - }, - "engines": { - "node": ">=10.19.0" - } - }, "node_modules/https-browserify": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/https-browserify/-/https-browserify-1.0.0.tgz", - "integrity": "sha512-J+FkSdyD+0mA0N+81tMotaRMfSL9SGi+xpD3T6YApKsc3bGSXJlfXri3VyFOeYkfLRQisDk1W+jIFFKBeUBbBg==", - "dev": true - }, - "node_modules/https-proxy-agent": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", - "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", - "optional": true, - "peer": true, - "dependencies": { - "agent-base": "^7.0.2", - "debug": "4" - }, - "engines": { - "node": ">= 14" - } + "dev": true, + "license": "MIT" }, "node_modules/human-signals": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-3.0.1.tgz", - "integrity": "sha512-rQLskxnM/5OCldHo+wNXbpVgDn5A17CUoKX+7Sokwaknlq7CdSnphy0W39GU8dw59XiCXmFXDg4fRuckQRKewQ==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=12.20.0" } }, "node_modules/iconv-lite": { "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "license": "MIT", "dependencies": { "safer-buffer": ">= 2.1.2 < 3" }, @@ -9306,26 +7296,17 @@ ] }, "node_modules/ignore": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz", - "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==", + "version": "5.3.2", "dev": true, + "license": "MIT", "engines": { "node": ">= 4" } }, - "node_modules/immediate": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", - "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==", - "optional": true, - "peer": true - }, "node_modules/import-fresh": { "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", "dev": true, + "license": "MIT", "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" @@ -9341,8 +7322,7 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/import-meta-resolve/-/import-meta-resolve-4.1.0.tgz", "integrity": "sha512-I6fiaX09Xivtk+THaMfAwnA3MVA5Big1WHF1Dfx9hFuvNIWpXnorlkzhcQf6ehrqQiiZECRt1poOAkPmer3ruw==", - "optional": true, - "peer": true, + "dev": true, "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -9350,28 +7330,24 @@ }, "node_modules/imurmurhash": { "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.8.19" } }, "node_modules/indent-string": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/inflight": { "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", "dev": true, + "license": "ISC", "dependencies": { "once": "^1.3.0", "wrappy": "1" @@ -9379,14 +7355,21 @@ }, "node_modules/inherits": { "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + "license": "ISC" + }, + "node_modules/ini": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.1.tgz", + "integrity": "sha512-QQnnxNyfvmHFIsj7gkPcYymR8Jdw/o7mp5ZFihxn6h8Ci6fh3Dx4E1gPjpQEpIuPo9XVNY/ZUwh4BPMjGyL01g==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } }, "node_modules/internal-slot": { "version": "1.0.7", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", - "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", "dev": true, + "license": "MIT", "dependencies": { "es-errors": "^1.3.0", "hasown": "^2.0.0", @@ -9396,32 +7379,10 @@ "node": ">= 0.4" } }, - "node_modules/ip-address": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", - "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", - "optional": true, - "peer": true, - "dependencies": { - "jsbn": "1.1.0", - "sprintf-js": "^1.1.3" - }, - "engines": { - "node": ">= 12" - } - }, - "node_modules/ip-address/node_modules/sprintf-js": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", - "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", - "optional": true, - "peer": true - }, "node_modules/is-arguments": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", - "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" @@ -9435,9 +7396,8 @@ }, "node_modules/is-array-buffer": { "version": "3.0.4", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", - "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "get-intrinsic": "^1.2.1" @@ -9451,14 +7411,12 @@ }, "node_modules/is-arrayish": { "version": "0.3.2", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" + "license": "MIT" }, "node_modules/is-async-function": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.0.0.tgz", - "integrity": "sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA==", "dev": true, + "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -9471,9 +7429,8 @@ }, "node_modules/is-bigint": { "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", - "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", "dev": true, + "license": "MIT", "dependencies": { "has-bigints": "^1.0.1" }, @@ -9483,9 +7440,8 @@ }, "node_modules/is-boolean-object": { "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", - "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" @@ -9499,8 +7455,6 @@ }, "node_modules/is-buffer": { "version": "2.0.5", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", - "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==", "funding": [ { "type": "github", @@ -9515,15 +7469,15 @@ "url": "https://feross.org/support" } ], + "license": "MIT", "engines": { "node": ">=4" } }, "node_modules/is-callable": { "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -9532,10 +7486,9 @@ } }, "node_modules/is-core-module": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.14.0.tgz", - "integrity": "sha512-a5dFJih5ZLYlRtDc0dZWP7RiKr6xIKzmn/oAYCDvdLThadVgyJwlaoQPmRtMSpz+rk0OGAgIu+TcM9HUF0fk1A==", + "version": "2.15.0", "dev": true, + "license": "MIT", "dependencies": { "hasown": "^2.0.2" }, @@ -9548,9 +7501,8 @@ }, "node_modules/is-data-view": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", - "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", "dev": true, + "license": "MIT", "dependencies": { "is-typed-array": "^1.1.13" }, @@ -9563,9 +7515,8 @@ }, "node_modules/is-date-object": { "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", - "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", "dev": true, + "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -9578,9 +7529,8 @@ }, "node_modules/is-docker": { "version": "2.2.1", - "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", - "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", "dev": true, + "license": "MIT", "bin": { "is-docker": "cli.js" }, @@ -9593,18 +7543,16 @@ }, "node_modules/is-extglob": { "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/is-finalizationregistry": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.0.2.tgz", - "integrity": "sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.2" }, @@ -9614,9 +7562,8 @@ }, "node_modules/is-fullwidth-code-point": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz", - "integrity": "sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -9626,9 +7573,8 @@ }, "node_modules/is-generator-function": { "version": "1.0.10", - "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", - "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", "dev": true, + "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -9641,9 +7587,8 @@ }, "node_modules/is-glob": { "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "is-extglob": "^2.1.1" }, @@ -9653,9 +7598,8 @@ }, "node_modules/is-map": { "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", - "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -9665,9 +7609,8 @@ }, "node_modules/is-nan": { "version": "1.3.2", - "resolved": "https://registry.npmjs.org/is-nan/-/is-nan-1.3.2.tgz", - "integrity": "sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.0", "define-properties": "^1.1.3" @@ -9681,9 +7624,8 @@ }, "node_modules/is-negative-zero": { "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", - "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -9691,27 +7633,18 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-node-process": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/is-node-process/-/is-node-process-1.2.0.tgz", - "integrity": "sha512-Vg4o6/fqPxIjtxgUH5QLJhwZ7gW5diGCVlXpuUfELC62CuxM1iHcRe51f2W1FDy04Ai4KJkagKjx3XaqyfRKXw==", - "optional": true, - "peer": true - }, "node_modules/is-number": { "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=0.12.0" } }, "node_modules/is-number-object": { "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", - "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", "dev": true, + "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -9724,27 +7657,24 @@ }, "node_modules/is-path-inside": { "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/is-plain-obj": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", - "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/is-regex": { "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" @@ -9758,18 +7688,16 @@ }, "node_modules/is-regexp": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz", - "integrity": "sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/is-set": { "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", - "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -9779,9 +7707,8 @@ }, "node_modules/is-shared-array-buffer": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", - "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7" }, @@ -9794,9 +7721,8 @@ }, "node_modules/is-stream": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", - "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": "^12.20.0 || ^14.13.1 || >=16.0.0" }, @@ -9806,9 +7732,8 @@ }, "node_modules/is-string": { "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", "dev": true, + "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -9821,9 +7746,8 @@ }, "node_modules/is-symbol": { "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", - "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", "dev": true, + "license": "MIT", "dependencies": { "has-symbols": "^1.0.2" }, @@ -9836,9 +7760,8 @@ }, "node_modules/is-typed-array": { "version": "1.1.13", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", - "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", "dev": true, + "license": "MIT", "dependencies": { "which-typed-array": "^1.1.14" }, @@ -9851,15 +7774,13 @@ }, "node_modules/is-typedarray": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/is-weakmap": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", - "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -9869,9 +7790,8 @@ }, "node_modules/is-weakref": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", - "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.2" }, @@ -9881,9 +7801,8 @@ }, "node_modules/is-weakset": { "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.3.tgz", - "integrity": "sha512-LvIm3/KWzS9oRFHugab7d+M/GcBXuXX5xZkzPmN+NxihdQlZUQ4dWuSV1xR/sq6upL1TJEDrfBgRepHFdBtSNQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "get-intrinsic": "^1.2.4" @@ -9897,18 +7816,16 @@ }, "node_modules/is-windows": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", - "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/is-wsl": { "version": "2.2.0", - "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", - "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", "dev": true, + "license": "MIT", "dependencies": { "is-docker": "^2.0.0" }, @@ -9918,48 +7835,42 @@ }, "node_modules/isarray": { "version": "2.0.5", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", - "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/isexe": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "devOptional": true + "dev": true, + "license": "ISC" }, "node_modules/isomorphic-timers-promises": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/isomorphic-timers-promises/-/isomorphic-timers-promises-1.0.1.tgz", - "integrity": "sha512-u4sej9B1LPSxTGKB/HiuzvEQnXH0ECYkSVQU39koSwmFAxhlEAFl9RdTvLv4TOTQUgBS5O3O5fwUxk6byBZ+IQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" } }, "node_modules/isomorphic-ws": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz", - "integrity": "sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw==", "dev": true, + "license": "MIT", "peerDependencies": { "ws": "*" } }, "node_modules/istanbul-lib-coverage": { "version": "3.2.2", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", - "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", "dev": true, + "license": "BSD-3-Clause", "engines": { "node": ">=8" } }, "node_modules/istanbul-lib-hook": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz", - "integrity": "sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "append-transform": "^2.0.0" }, @@ -9969,9 +7880,8 @@ }, "node_modules/istanbul-lib-instrument": { "version": "4.0.3", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz", - "integrity": "sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "@babel/core": "^7.7.5", "@istanbuljs/schema": "^0.1.2", @@ -9984,18 +7894,16 @@ }, "node_modules/istanbul-lib-instrument/node_modules/semver": { "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } }, "node_modules/istanbul-lib-processinfo": { "version": "2.0.3", - "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.3.tgz", - "integrity": "sha512-NkwHbo3E00oybX6NGJi6ar0B29vxyvNwoC7eJ4G4Yq28UfY758Hgn/heV8VRFhevPED4LXfFz0DQ8z/0kw9zMg==", "dev": true, + "license": "ISC", "dependencies": { "archy": "^1.0.0", "cross-spawn": "^7.0.3", @@ -10010,9 +7918,8 @@ }, "node_modules/istanbul-lib-processinfo/node_modules/p-map": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", - "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", "dev": true, + "license": "MIT", "dependencies": { "aggregate-error": "^3.0.0" }, @@ -10022,9 +7929,8 @@ }, "node_modules/istanbul-lib-report": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", - "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "istanbul-lib-coverage": "^3.0.0", "make-dir": "^4.0.0", @@ -10035,10 +7941,9 @@ } }, "node_modules/istanbul-lib-source-maps": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.5.tgz", - "integrity": "sha512-gKf4eJ8bHmSX/ljiOCpnd8vtmHTwG71uugm0kXYd5aqFCl6z8cj8k7QduXSwU6QOst6LCdSXTlaoc8W4554crQ==", + "version": "5.0.6", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "@jridgewell/trace-mapping": "^0.3.23", "debug": "^4.1.1", @@ -10050,9 +7955,8 @@ }, "node_modules/istanbul-reports": { "version": "3.1.7", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", - "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "html-escaper": "^2.0.0", "istanbul-lib-report": "^3.0.0" @@ -10063,31 +7967,27 @@ }, "node_modules/it-pair": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/it-pair/-/it-pair-1.0.0.tgz", - "integrity": "sha512-9raOiDu5OAuDOahtMtapKQDrQTxBfzlzrNcB6o7JARHkt+7Bb1dMkW/TpYdAjBJE77KH3e2zGzwpGUP9tXbLww==", "dev": true, + "license": "MIT", "dependencies": { "get-iterator": "^1.0.2" } }, "node_modules/it-pipe": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/it-pipe/-/it-pipe-1.1.0.tgz", - "integrity": "sha512-lF0/3qTVeth13TOnHVs0BTFaziwQF7m5Gg+E6JV0BXcLKutC92YjSi7bASgkPOXaLEb+YvNZrPorGMBIJvZfxg==" + "license": "MIT" }, "node_modules/it-pushable": { "version": "1.4.2", - "resolved": "https://registry.npmjs.org/it-pushable/-/it-pushable-1.4.2.tgz", - "integrity": "sha512-vVPu0CGRsTI8eCfhMknA7KIBqqGFolbRx+1mbQ6XuZ7YCz995Qj7L4XUviwClFunisDq96FdxzF5FnAbw15afg==", "dev": true, + "license": "MIT", "dependencies": { "fast-fifo": "^1.0.0" } }, "node_modules/it-stream-types": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/it-stream-types/-/it-stream-types-2.0.1.tgz", - "integrity": "sha512-6DmOs5r7ERDbvS4q8yLKENcj6Yecr7QQTqWApbZdfAUTEC947d+PEha7PCqhm//9oxaLYL7TWRekwhoXl2s6fg==", + "license": "Apache-2.0 OR MIT", "engines": { "node": ">=16.0.0", "npm": ">=7.0.0" @@ -10095,9 +7995,8 @@ }, "node_modules/iterator.prototype": { "version": "1.1.2", - "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.2.tgz", - "integrity": "sha512-DR33HMMr8EzwuRL8Y9D3u2BMj8+RqSE850jfGu59kS7tbmPLzGkZmVSfyCFSDxuZiEY6Rzt3T2NA/qU+NwVj1w==", "dev": true, + "license": "MIT", "dependencies": { "define-properties": "^1.2.1", "get-intrinsic": "^1.2.1", @@ -10107,16 +8006,12 @@ } }, "node_modules/jackspeak": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.0.tgz", - "integrity": "sha512-JVYhQnN59LVPFCEcVa2C3CrEKYacvjRfqIQl+h8oi91aLYQVWRYbxjPcv1bUiUy/kLmQaANrYfNMCO3kuEDHfw==", - "devOptional": true, + "version": "3.4.3", + "dev": true, + "license": "BlueOak-1.0.0", "dependencies": { "@isaacs/cliui": "^8.0.2" }, - "engines": { - "node": ">=14" - }, "funding": { "url": "https://github.com/sponsors/isaacs" }, @@ -10125,9 +8020,8 @@ } }, "node_modules/jayson": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/jayson/-/jayson-4.1.0.tgz", - "integrity": "sha512-R6JlbyLN53Mjku329XoRT2zJAE6ZgOQ8f91ucYdMCD4nkGCF9kZSrcGXpHIU4jeKj58zUZke2p+cdQchU7Ly7A==", + "version": "4.1.1", + "license": "MIT", "dependencies": { "@types/connect": "^3.4.33", "@types/node": "^12.12.54", @@ -10140,7 +8034,7 @@ "json-stringify-safe": "^5.0.1", "JSONStream": "^1.3.5", "uuid": "^8.3.2", - "ws": "^7.4.5" + "ws": "^7.5.10" }, "bin": { "jayson": "bin/jayson.js" @@ -10151,34 +8045,29 @@ }, "node_modules/jayson/node_modules/@types/node": { "version": "12.20.55", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.20.55.tgz", - "integrity": "sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==" + "license": "MIT" }, "node_modules/jayson/node_modules/@types/ws": { "version": "7.4.7", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-7.4.7.tgz", - "integrity": "sha512-JQbbmxZTZehdc2iszGKs5oC3NFnjeay7mtAWrdt7qNtAVK0g19muApzAy4bm9byz79xa2ZnO/BOBC2R8RC5Lww==", + "license": "MIT", "dependencies": { "@types/node": "*" } }, "node_modules/jayson/node_modules/commander": { "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + "license": "MIT" }, "node_modules/jayson/node_modules/isomorphic-ws": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz", - "integrity": "sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==", + "license": "MIT", "peerDependencies": { "ws": "*" } }, "node_modules/jayson/node_modules/ws": { "version": "7.5.10", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz", - "integrity": "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==", + "license": "MIT", "engines": { "node": ">=8.3.0" }, @@ -10197,35 +8086,22 @@ }, "node_modules/js-md5": { "version": "0.8.3", - "resolved": "https://registry.npmjs.org/js-md5/-/js-md5-0.8.3.tgz", - "integrity": "sha512-qR0HB5uP6wCuRMrWPTrkMaev7MJZwJuuw4fnwAzRgP4J4/F8RwtodOKpGp4XpqsLBFzzgqIO42efFAyz2Et6KQ==" - }, - "node_modules/js-sdsl": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.4.2.tgz", - "integrity": "sha512-dwXFwByc/ajSV6m5bcKAPwe4yDDF6D614pxmIi5odytzxRlwqF6nwoiCek80Ixc7Cvma5awClxrzFtxCQvcM8w==", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/js-sdsl" - } + "license": "MIT" }, "node_modules/js-sha3": { "version": "0.8.0", - "resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz", - "integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/js-tokens": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "devOptional": true + "dev": true, + "license": "MIT" }, "node_modules/js-yaml": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "dev": true, + "license": "MIT", "dependencies": { "argparse": "^2.0.1" }, @@ -10233,18 +8109,10 @@ "js-yaml": "bin/js-yaml.js" } }, - "node_modules/jsbn": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz", - "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==", - "optional": true, - "peer": true - }, "node_modules/jsesc": { "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", "dev": true, + "license": "MIT", "bin": { "jsesc": "bin/jsesc" }, @@ -10254,32 +8122,27 @@ }, "node_modules/json-buffer": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", - "devOptional": true + "dev": true, + "license": "MIT" }, "node_modules/json-schema-traverse": { "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/json-stable-stringify-without-jsonify": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/json-stringify-safe": { "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" + "license": "ISC" }, "node_modules/json5": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", - "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", "dev": true, + "license": "MIT", "dependencies": { "minimist": "^1.2.0" }, @@ -10289,35 +8152,19 @@ }, "node_modules/jsonc-parser": { "version": "3.3.1", - "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.3.1.tgz", - "integrity": "sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ==", - "dev": true - }, - "node_modules/jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "optional": true, - "peer": true, - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } + "dev": true, + "license": "MIT" }, "node_modules/jsonparse": { "version": "1.3.1", - "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", - "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==", "engines": [ "node >= 0.2.0" - ] + ], + "license": "MIT" }, "node_modules/JSONStream": { "version": "1.3.5", - "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz", - "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==", + "license": "(MIT OR Apache-2.0)", "dependencies": { "jsonparse": "^1.2.0", "through": ">=2.2.7 <3" @@ -10331,9 +8178,8 @@ }, "node_modules/jsx-ast-utils": { "version": "3.3.5", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", - "integrity": "sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==", "dev": true, + "license": "MIT", "dependencies": { "array-includes": "^3.1.6", "array.prototype.flat": "^1.3.1", @@ -10344,102 +8190,31 @@ "node": ">=4.0" } }, - "node_modules/jszip": { - "version": "3.10.1", - "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz", - "integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==", - "optional": true, - "peer": true, - "dependencies": { - "lie": "~3.3.0", - "pako": "~1.0.2", - "readable-stream": "~2.3.6", - "setimmediate": "^1.0.5" - } - }, - "node_modules/jszip/node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", - "optional": true, - "peer": true - }, - "node_modules/jszip/node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "optional": true, - "peer": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/jszip/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "optional": true, - "peer": true - }, - "node_modules/jszip/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "optional": true, - "peer": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, "node_modules/keyv": { "version": "4.5.4", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", - "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "json-buffer": "3.0.1" } }, "node_modules/kuler": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/kuler/-/kuler-2.0.0.tgz", - "integrity": "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==" - }, - "node_modules/ky": { - "version": "0.33.3", - "resolved": "https://registry.npmjs.org/ky/-/ky-0.33.3.tgz", - "integrity": "sha512-CasD9OCEQSFIam2U8efFK81Yeg8vNMTBUqtMOHlrcWQHqUX3HeCl9Dr31u4toV7emlH8Mymk5+9p0lL6mKb/Xw==", - "optional": true, - "peer": true, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sindresorhus/ky?sponsor=1" - } + "license": "MIT" }, "node_modules/kzg-wasm": { "version": "0.4.0", - "resolved": "https://registry.npmjs.org/kzg-wasm/-/kzg-wasm-0.4.0.tgz", - "integrity": "sha512-hKEwFbKrY1LZnAH5gY8+PlVWfkGnj2wd2tc83eIgzuC4NoshXqplW9OzGlBDqpAmXxwhiN8fgPG2+NcvUIBSwg==" + "license": "MIT" }, "node_modules/language-subtag-registry": { "version": "0.3.23", - "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.23.tgz", - "integrity": "sha512-0K65Lea881pHotoGEa5gDlMxt3pctLi2RplBb7Ezh4rRdLEOtgi7n4EwK9lamnUCkKBqaeKRVebTq6BAxSkpXQ==", - "dev": true + "dev": true, + "license": "CC0-1.0" }, "node_modules/language-tags": { "version": "1.0.9", - "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.9.tgz", - "integrity": "sha512-MbjN408fEndfiQXbFQ1vnd+1NoLDsnQW41410oQBXiyXDMYH5z505juWa4KUE1LqxRC7DgOgZDbKLxHIwm27hA==", "dev": true, + "license": "MIT", "dependencies": { "language-subtag-registry": "^0.3.20" }, @@ -10447,63 +8222,9 @@ "node": ">=0.10" } }, - "node_modules/lazystream": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", - "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", - "optional": true, - "peer": true, - "dependencies": { - "readable-stream": "^2.0.5" - }, - "engines": { - "node": ">= 0.6.3" - } - }, - "node_modules/lazystream/node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", - "optional": true, - "peer": true - }, - "node_modules/lazystream/node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "optional": true, - "peer": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/lazystream/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "optional": true, - "peer": true - }, - "node_modules/lazystream/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "optional": true, - "peer": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, "node_modules/level": { "version": "8.0.1", - "resolved": "https://registry.npmjs.org/level/-/level-8.0.1.tgz", - "integrity": "sha512-oPBGkheysuw7DmzFQYyFe8NAia5jFLAgEnkgWnK3OXAuJr8qFT+xBQIwokAZPME2bhPFzS8hlYcL16m8UZrtwQ==", + "license": "MIT", "dependencies": { "abstract-level": "^1.0.4", "browser-level": "^1.0.1", @@ -10519,9 +8240,8 @@ }, "node_modules/level-codec": { "version": "10.0.0", - "resolved": "https://registry.npmjs.org/level-codec/-/level-codec-10.0.0.tgz", - "integrity": "sha512-QW3VteVNAp6c/LuV6nDjg7XDXx9XHK4abmQarxZmlRSDyXYk20UdaJTSX6yzVvQ4i0JyWSB7jert0DsyD/kk6g==", "dev": true, + "license": "MIT", "dependencies": { "buffer": "^6.0.3" }, @@ -10531,9 +8251,8 @@ }, "node_modules/level-concat-iterator": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-3.1.0.tgz", - "integrity": "sha512-BWRCMHBxbIqPxJ8vHOvKUsaO0v1sLYZtjN3K2iZJsRBYtp+ONsY6Jfi6hy9K3+zolgQRryhIn2NRZjZnWJ9NmQ==", "dev": true, + "license": "MIT", "dependencies": { "catering": "^2.1.0" }, @@ -10543,18 +8262,16 @@ }, "node_modules/level-errors": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-3.0.1.tgz", - "integrity": "sha512-tqTL2DxzPDzpwl0iV5+rBCv65HWbHp6eutluHNcVIftKZlQN//b6GEnZDM2CvGZvzGYMwyPtYppYnydBQd2SMQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" } }, "node_modules/level-iterator-stream": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-5.0.0.tgz", - "integrity": "sha512-wnb1+o+CVFUDdiSMR/ZymE2prPs3cjVLlXuDeSq9Zb8o032XrabGEXcTCsBxprAtseO3qvFeGzh6406z9sOTRA==", "dev": true, + "license": "MIT", "dependencies": { "inherits": "^2.0.4", "readable-stream": "^3.4.0" @@ -10565,9 +8282,8 @@ }, "node_modules/level-js": { "version": "6.1.0", - "resolved": "https://registry.npmjs.org/level-js/-/level-js-6.1.0.tgz", - "integrity": "sha512-i7mPtkZm68aewfv0FnIUWvFUFfoyzIvVKnUmuQGrelEkP72vSPTaA1SGneWWoCV5KZJG4wlzbJLp1WxVNGuc6A==", "dev": true, + "license": "MIT", "dependencies": { "abstract-leveldown": "^7.2.0", "buffer": "^6.0.3", @@ -10579,9 +8295,8 @@ "node_modules/level-legacy": { "name": "level", "version": "7.0.1", - "resolved": "https://registry.npmjs.org/level/-/level-7.0.1.tgz", - "integrity": "sha512-w3E64+ALx2eZf8RV5JL4kIcE0BFAvQscRYd1yU4YVqZN9RGTQxXSvH202xvK15yZwFFxRXe60f13LJjcJ//I4Q==", "dev": true, + "license": "MIT", "dependencies": { "level-js": "^6.1.0", "level-packager": "^6.0.1", @@ -10597,9 +8312,8 @@ }, "node_modules/level-mem": { "version": "6.0.1", - "resolved": "https://registry.npmjs.org/level-mem/-/level-mem-6.0.1.tgz", - "integrity": "sha512-IBliILfS59qDUibuGs/jCD0loih0oI0+5pmvsZ0Yxa/SWBEEgVT70dKnArEo7UdOciUHEcyD07LEx5Ox5QHIMg==", "dev": true, + "license": "MIT", "dependencies": { "level-packager": "^6.0.1", "memdown": "^6.1.1" @@ -10610,9 +8324,8 @@ }, "node_modules/level-packager": { "version": "6.0.1", - "resolved": "https://registry.npmjs.org/level-packager/-/level-packager-6.0.1.tgz", - "integrity": "sha512-8Ezr0XM6hmAwqX9uu8IGzGNkWz/9doyPA8Oo9/D7qcMI6meJC+XhIbNYHukJhIn8OGdlzQs/JPcL9B8lA2F6EQ==", "dev": true, + "license": "MIT", "dependencies": { "encoding-down": "^7.1.0", "levelup": "^5.1.1" @@ -10623,16 +8336,14 @@ }, "node_modules/level-supports": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-4.0.1.tgz", - "integrity": "sha512-PbXpve8rKeNcZ9C1mUicC9auIYFyGpkV9/i6g76tLgANwWhtG2v7I4xNBUlkn3lE2/dZF3Pi0ygYGtLc4RXXdA==", + "license": "MIT", "engines": { "node": ">=12" } }, "node_modules/level-transcoder": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/level-transcoder/-/level-transcoder-1.0.1.tgz", - "integrity": "sha512-t7bFwFtsQeD8cl8NIoQ2iwxA0CL/9IFw7/9gAjOonH0PWTTiRfY7Hq+Ejbsxh86tXobDQ6IOiddjNYIfOBs06w==", + "license": "MIT", "dependencies": { "buffer": "^6.0.3", "module-error": "^1.0.1" @@ -10643,10 +8354,9 @@ }, "node_modules/leveldown": { "version": "6.1.1", - "resolved": "https://registry.npmjs.org/leveldown/-/leveldown-6.1.1.tgz", - "integrity": "sha512-88c+E+Eizn4CkQOBHwqlCJaTNEjGpaEIikn1S+cINc5E9HEvJ77bqY4JY/HxT5u0caWqsc3P3DcFIKBI1vHt+A==", "dev": true, "hasInstallScript": true, + "license": "MIT", "dependencies": { "abstract-leveldown": "^7.2.0", "napi-macros": "~2.0.0", @@ -10658,15 +8368,13 @@ }, "node_modules/leveldown/node_modules/napi-macros": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/napi-macros/-/napi-macros-2.0.0.tgz", - "integrity": "sha512-A0xLykHtARfueITVDernsAWdtIMbOJgKgcluwENp3AlsKN/PloyO10HtmoqnFAQAcxPkgZN7wdfPfEd0zNGxbg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/levelup": { "version": "5.1.1", - "resolved": "https://registry.npmjs.org/levelup/-/levelup-5.1.1.tgz", - "integrity": "sha512-0mFCcHcEebOwsQuk00WJwjLI6oCjbBuEYdh/RaRqhjnyVlzqf41T1NnDtCedumZ56qyIh8euLFDqV1KfzTAVhg==", "dev": true, + "license": "MIT", "dependencies": { "catering": "^2.0.0", "deferred-leveldown": "^7.0.0", @@ -10681,18 +8389,16 @@ }, "node_modules/levelup/node_modules/level-supports": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-2.1.0.tgz", - "integrity": "sha512-E486g1NCjW5cF78KGPrMDRBYzPuueMZ6VBXHT6gC7A8UYWGiM14fGgp+s/L1oFfDWSPV/+SFkYCmZ0SiESkRKA==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" } }, "node_modules/levn": { "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "dev": true, + "license": "MIT", "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" @@ -10701,30 +8407,18 @@ "node": ">= 0.8.0" } }, - "node_modules/lie": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", - "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==", - "optional": true, - "peer": true, - "dependencies": { - "immediate": "~3.0.5" - } - }, "node_modules/lilconfig": { "version": "2.0.5", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.5.tgz", - "integrity": "sha512-xaYmXZtTHPAw5m+xLN8ab9C+3a8YmV3asNSPOATITbtwrfbwaLJj8h66H1WMIpALCkqsIzK3h7oQ+PdX+LQ9Eg==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" } }, "node_modules/lint-staged": { "version": "13.0.3", - "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-13.0.3.tgz", - "integrity": "sha512-9hmrwSCFroTSYLjflGI8Uk+GWAwMB4OlpU4bMJEAT5d/llQwtYKoim4bLOyLCuWFAhWEupE0vkIFqtw/WIsPug==", "dev": true, + "license": "MIT", "dependencies": { "cli-truncate": "^3.1.0", "colorette": "^2.0.17", @@ -10752,18 +8446,16 @@ }, "node_modules/lint-staged/node_modules/commander": { "version": "9.5.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz", - "integrity": "sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==", "dev": true, + "license": "MIT", "engines": { "node": "^12.20.0 || >=14" } }, "node_modules/listr2": { "version": "4.0.5", - "resolved": "https://registry.npmjs.org/listr2/-/listr2-4.0.5.tgz", - "integrity": "sha512-juGHV1doQdpNT3GSTs9IUN43QJb7KHdF9uqg7Vufs/tG9VTzpFphqF4pm/ICdAABGQxsyNn9CiYA3StkI6jpwA==", "dev": true, + "license": "MIT", "dependencies": { "cli-truncate": "^2.1.0", "colorette": "^2.0.16", @@ -10788,9 +8480,8 @@ }, "node_modules/listr2/node_modules/cli-truncate": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", - "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", "dev": true, + "license": "MIT", "dependencies": { "slice-ansi": "^3.0.0", "string-width": "^4.2.0" @@ -10804,24 +8495,21 @@ }, "node_modules/listr2/node_modules/emoji-regex": { "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/listr2/node_modules/is-fullwidth-code-point": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/listr2/node_modules/slice-ansi": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", - "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", "astral-regex": "^2.0.0", @@ -10833,9 +8521,8 @@ }, "node_modules/listr2/node_modules/string-width": { "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -10847,10 +8534,9 @@ }, "node_modules/lmdb": { "version": "2.9.4", - "resolved": "https://registry.npmjs.org/lmdb/-/lmdb-2.9.4.tgz", - "integrity": "sha512-Kri5TSKgpLk5q1VO7vYCcqAMyXTxmis6Et+6UARkU7ygvg3ZxUX2oEu/UwBkBskaS1d73effiBrTiHYyDppcBg==", "dev": true, "hasInstallScript": true, + "license": "MIT", "dependencies": { "msgpackr": "^1.9.9", "node-addon-api": "^6.1.0", @@ -10870,46 +8556,10 @@ "@lmdb/lmdb-win32-x64": "2.9.4" } }, - "node_modules/locate-app": { - "version": "2.4.21", - "resolved": "https://registry.npmjs.org/locate-app/-/locate-app-2.4.21.tgz", - "integrity": "sha512-ySSBwlUnVKoLgw39q8YaNtvklhaTMoVqBf2+CuY3hkOFuWubHAJ6NJuTjv+jfTV1FuOgKsigRdsYUIeVgKHvNA==", - "funding": [ - { - "type": "individual", - "url": "https://buymeacoffee.com/hejny" - }, - { - "type": "github", - "url": "https://github.com/hejny/locate-app/blob/main/README.md#%EF%B8%8F-contributing" - } - ], - "optional": true, - "peer": true, - "dependencies": { - "@promptbook/utils": "0.58.0", - "type-fest": "2.13.0", - "userhome": "1.0.0" - } - }, - "node_modules/locate-app/node_modules/type-fest": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.13.0.tgz", - "integrity": "sha512-lPfAm42MxE4/456+QyIaaVBAwgpJb6xZ8PRu09utnhPdWwcyj9vgy6Sq0Z5yNbJ21EdxB5dRU/Qg8bsyAMtlcw==", - "optional": true, - "peer": true, - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/locate-path": { "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "dev": true, + "license": "MIT", "dependencies": { "p-locate": "^5.0.0" }, @@ -10922,9 +8572,8 @@ }, "node_modules/lockfile-lint-api": { "version": "5.9.1", - "resolved": "https://registry.npmjs.org/lockfile-lint-api/-/lockfile-lint-api-5.9.1.tgz", - "integrity": "sha512-us5IT1bGA6KXbq1WrhrSzk9mtPgHKz5nhvv3S4hwcYnhcVOKW2uK0W8+PN9oIgv4pI49WsD5wBdTQFTpNChF/Q==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@yarnpkg/parsers": "^3.0.0-rc.48.1", "debug": "^4.3.4", @@ -10936,64 +8585,48 @@ }, "node_modules/lodash": { "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "devOptional": true + "dev": true, + "license": "MIT" }, "node_modules/lodash.camelcase": { "version": "4.3.0", - "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", - "dev": true - }, - "node_modules/lodash.clonedeep": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", - "integrity": "sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ==", - "optional": true, - "peer": true + "dev": true, + "license": "MIT" }, "node_modules/lodash.flattendeep": { "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz", - "integrity": "sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/lodash.kebabcase": { "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz", - "integrity": "sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/lodash.merge": { "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/lodash.snakecase": { "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz", - "integrity": "sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/lodash.upperfirst": { "version": "4.3.1", - "resolved": "https://registry.npmjs.org/lodash.upperfirst/-/lodash.upperfirst-4.3.1.tgz", - "integrity": "sha512-sReKOYJIJf74dhJONhU4e0/shzi1trVbSWDOhKYE5XV2O+H7Sb2Dihwuc7xWxVl+DgFPyTqIN3zMfT9cq5iWDg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/lodash.zip": { "version": "4.2.0", - "resolved": "https://registry.npmjs.org/lodash.zip/-/lodash.zip-4.2.0.tgz", - "integrity": "sha512-C7IOaBBK/0gMORRBd8OETNx3kmOkgIWIPvyDpZSCTwUrpYmgZwJkjZeOD8ww4xbOUOs4/attY+pciKvadNfFbg==", - "devOptional": true + "dev": true, + "license": "MIT" }, "node_modules/log-update": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", - "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", "dev": true, + "license": "MIT", "dependencies": { "ansi-escapes": "^4.3.0", "cli-cursor": "^3.1.0", @@ -11009,24 +8642,21 @@ }, "node_modules/log-update/node_modules/emoji-regex": { "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/log-update/node_modules/is-fullwidth-code-point": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/log-update/node_modules/slice-ansi": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", - "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", "astral-regex": "^2.0.0", @@ -11041,9 +8671,8 @@ }, "node_modules/log-update/node_modules/string-width": { "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -11055,9 +8684,8 @@ }, "node_modules/log-update/node_modules/wrap-ansi": { "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -11068,114 +8696,60 @@ } }, "node_modules/logform": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/logform/-/logform-2.6.0.tgz", - "integrity": "sha512-1ulHeNPp6k/LD8H91o7VYFBng5i1BDE7HoKxVbZiGFidS1Rj65qcywLxX+pVfAPoQJEjRdvKcusKwOupHCVOVQ==", + "version": "2.6.1", + "license": "MIT", "dependencies": { "@colors/colors": "1.6.0", "@types/triple-beam": "^1.3.2", - "fecha": "^4.2.0", - "ms": "^2.1.1", - "safe-stable-stringify": "^2.3.1", - "triple-beam": "^1.3.0" - }, - "engines": { - "node": ">= 12.0.0" - } - }, - "node_modules/loglevel": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.9.1.tgz", - "integrity": "sha512-hP3I3kCrDIMuRwAwHltphhDM1r8i55H33GgqjXbrisuJhF4kRhW1dNuxsRklp4bXl8DSdLaNLuiL4A/LWRfxvg==", - "optional": true, - "peer": true, - "engines": { - "node": ">= 0.6.0" + "fecha": "^4.2.0", + "ms": "^2.1.1", + "safe-stable-stringify": "^2.3.1", + "triple-beam": "^1.3.0" }, - "funding": { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/loglevel" + "engines": { + "node": ">= 12.0.0" } }, - "node_modules/loglevel-plugin-prefix": { - "version": "0.8.4", - "resolved": "https://registry.npmjs.org/loglevel-plugin-prefix/-/loglevel-plugin-prefix-0.8.4.tgz", - "integrity": "sha512-WpG9CcFAOjz/FtNht+QJeGpvVl/cdR6P0z6OcXSkr8wFJOsV2GRj2j10JLfjuA4aYkcKCNIEqRGCyTife9R8/g==", - "optional": true, - "peer": true - }, "node_modules/lolex": { "version": "1.6.0", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-1.6.0.tgz", - "integrity": "sha512-/bpxDL56TG5LS5zoXxKqA6Ro5tkOS5M8cm/7yQcwLIKIcM2HR5fjjNCaIhJNv96SEk4hNGSafYMZK42Xv5fihQ==", - "dev": true + "dev": true, + "license": "BSD-3-Clause" }, "node_modules/loupe": { "version": "3.1.1", - "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.1.tgz", - "integrity": "sha512-edNu/8D5MKVfGVFRhFf8aAxiTM6Wumfz5XsaatSxlD3w4R1d/WEKUTydCdPGbl9K7QG/Ca3GnDV2sIKIpXRQcw==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "get-func-name": "^2.0.1" } }, - "node_modules/lowercase-keys": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz", - "integrity": "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==", - "optional": true, - "peer": true, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/lru-cache": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.1.0.tgz", - "integrity": "sha512-/1clY/ui8CzjKFyjdvwPWJUYKiFVXG2I2cY0ssG7h4+hwk+XOIX7ZSG9Q7TW8TW3Kp3BUSqgFWBLgL4PJ+Blag==", - "engines": { - "node": "14 || >=16.14" - } + "version": "10.4.3", + "dev": true, + "license": "ISC" }, "node_modules/ltgt": { "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ltgt/-/ltgt-2.2.1.tgz", - "integrity": "sha512-AI2r85+4MquTw9ZYqabu4nMwy9Oftlfa/e/52t9IjtfG+mGBbTNdAoZ3RQKLHR6r0wQnwZnPIEh/Ya6XTWAKNA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/lunr": { "version": "2.3.9", - "resolved": "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz", - "integrity": "sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==", - "dev": true - }, - "node_modules/lz-string": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", - "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", - "optional": true, - "peer": true, - "bin": { - "lz-string": "bin/bin.js" - } + "dev": true, + "license": "MIT" }, "node_modules/magic-string": { - "version": "0.30.10", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.10.tgz", - "integrity": "sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==", - "devOptional": true, + "version": "0.30.11", + "dev": true, + "license": "MIT", "dependencies": { - "@jridgewell/sourcemap-codec": "^1.4.15" + "@jridgewell/sourcemap-codec": "^1.5.0" } }, "node_modules/magicast": { "version": "0.3.4", - "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.3.4.tgz", - "integrity": "sha512-TyDF/Pn36bBji9rWKHlZe+PZb6Mx5V8IHCSxk7X4aljM4e/vyDvZZYwHewdVaqiA0nb3ghfHU/6AUpDxWoER2Q==", "dev": true, + "license": "MIT", "dependencies": { "@babel/parser": "^7.24.4", "@babel/types": "^7.24.0", @@ -11184,9 +8758,8 @@ }, "node_modules/make-dir": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", - "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", "dev": true, + "license": "MIT", "dependencies": { "semver": "^7.5.3" }, @@ -11199,9 +8772,8 @@ }, "node_modules/marked": { "version": "4.3.0", - "resolved": "https://registry.npmjs.org/marked/-/marked-4.3.0.tgz", - "integrity": "sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A==", "dev": true, + "license": "MIT", "bin": { "marked": "bin/marked.js" }, @@ -11211,8 +8783,7 @@ }, "node_modules/mcl-wasm": { "version": "1.5.0", - "resolved": "https://registry.npmjs.org/mcl-wasm/-/mcl-wasm-1.5.0.tgz", - "integrity": "sha512-+Bnefweg0PWhQ//pVAawNkZAC+TH/mMZVsxmEyHvw8Ujhwu3cxUe9WITFK74dfgPRB09Zkmf6aUFXnW23OnVUw==", + "license": "BSD-3-Clause", "dependencies": { "@types/node": "^20.2.5" }, @@ -11221,18 +8792,16 @@ } }, "node_modules/mcl-wasm/node_modules/@types/node": { - "version": "20.14.9", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz", - "integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==", + "version": "20.14.15", + "license": "MIT", "dependencies": { "undici-types": "~5.26.4" } }, "node_modules/md5.js": { "version": "1.3.5", - "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", - "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", "dev": true, + "license": "MIT", "dependencies": { "hash-base": "^3.0.0", "inherits": "^2.0.1", @@ -11241,17 +8810,15 @@ }, "node_modules/media-typer": { "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/memdown": { "version": "6.1.1", - "resolved": "https://registry.npmjs.org/memdown/-/memdown-6.1.1.tgz", - "integrity": "sha512-vh2RiuVrn6Vv73088C1KzLwy9+hhRwoZsgddYqIoVuFFrcoc2Rt+lq/KrmkFn6ulko7AtQ0AvqtYid35exb38A==", "dev": true, + "license": "MIT", "dependencies": { "abstract-leveldown": "^7.2.0", "buffer": "^6.0.3", @@ -11265,8 +8832,7 @@ }, "node_modules/memory-level": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/memory-level/-/memory-level-1.0.0.tgz", - "integrity": "sha512-UXzwewuWeHBz5krr7EvehKcmLFNoXxGcvuYhC41tRnkrTbJohtS7kVn9akmgirtRygg+f7Yjsfi8Uu5SGSQ4Og==", + "license": "MIT", "dependencies": { "abstract-level": "^1.0.0", "functional-red-black-tree": "^1.0.1", @@ -11278,8 +8844,6 @@ }, "node_modules/memorystream": { "version": "0.3.1", - "resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz", - "integrity": "sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw==", "dev": true, "engines": { "node": ">= 0.10.0" @@ -11287,30 +8851,26 @@ }, "node_modules/merge-stream": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "devOptional": true + "dev": true, + "license": "MIT" }, "node_modules/merge2": { "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">= 8" } }, "node_modules/micro-bmark": { "version": "0.2.0", - "resolved": "https://registry.npmjs.org/micro-bmark/-/micro-bmark-0.2.0.tgz", - "integrity": "sha512-snLV+mDYMZjZ/4TZEockpW5kh888HmnV/bFsb0C5uTwgZi3Kfrl0O28eu/Kc+746GyW1alrMI2L+L1ubVCacPg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/micromatch": { "version": "4.0.7", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", - "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" @@ -11321,9 +8881,8 @@ }, "node_modules/miller-rabin": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz", - "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==", "dev": true, + "license": "MIT", "dependencies": { "bn.js": "^4.0.0", "brorand": "^1.0.1" @@ -11334,22 +8893,19 @@ }, "node_modules/miller-rabin/node_modules/bn.js": { "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/mime-db": { "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/mime-types": { "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", "dependencies": { "mime-db": "1.52.0" }, @@ -11359,9 +8915,8 @@ }, "node_modules/mimic-fn": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", - "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -11369,36 +8924,20 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/mimic-response": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-4.0.0.tgz", - "integrity": "sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==", - "optional": true, - "peer": true, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/minimalistic-assert": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", - "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/minimalistic-crypto-utils": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", - "integrity": "sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/minimatch": { "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, + "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" }, @@ -11408,80 +8947,59 @@ }, "node_modules/minimist": { "version": "1.2.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", - "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", "dev": true, + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/minipass": { "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "devOptional": true, + "dev": true, + "license": "ISC", "engines": { "node": ">=16 || 14 >=14.17" } }, - "node_modules/mitt": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mitt/-/mitt-2.1.0.tgz", - "integrity": "sha512-ILj2TpLiysu2wkBbWjAmww7TkZb65aiQO+DkVdUTBpBXq+MHYiETENkKFMtsJZX1Lf4pe4QOrTSjIfUwN5lRdg==", - "devOptional": true - }, - "node_modules/mkdirp-classic": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", - "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", - "optional": true, - "peer": true - }, "node_modules/module-error": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/module-error/-/module-error-1.0.2.tgz", - "integrity": "sha512-0yuvsqSCv8LbaOKhnsQ/T5JhyFlCYLPXK3U2sgV10zoKQwzs/MyfuQUOZQ1V/6OCOJsK/TRgNVrPuPDqtdMFtA==", + "license": "MIT", "engines": { "node": ">=10" } }, "node_modules/moment": { "version": "2.30.1", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz", - "integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==", + "license": "MIT", "engines": { "node": "*" } }, "node_modules/mrmime": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.0.tgz", - "integrity": "sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=10" } }, "node_modules/ms": { "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "license": "MIT" }, "node_modules/msgpackr": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.10.2.tgz", - "integrity": "sha512-L60rsPynBvNE+8BWipKKZ9jHcSGbtyJYIwjRq0VrIvQ08cRjntGXJYW/tmciZ2IHWIY8WEW32Qa2xbh5+SKBZA==", + "version": "1.11.0", "dev": true, + "license": "MIT", "optionalDependencies": { "msgpackr-extract": "^3.0.2" } }, "node_modules/msgpackr-extract": { "version": "3.0.3", - "resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz", - "integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==", "dev": true, "hasInstallScript": true, + "license": "MIT", "optional": true, "dependencies": { "node-gyp-build-optional-packages": "5.2.2" @@ -11500,9 +9018,8 @@ }, "node_modules/msgpackr-extract/node_modules/node-gyp-build-optional-packages": { "version": "5.2.2", - "resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz", - "integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==", "dev": true, + "license": "MIT", "optional": true, "dependencies": { "detect-libc": "^2.0.1" @@ -11513,165 +9030,20 @@ "node-gyp-build-optional-packages-test": "build-test.js" } }, - "node_modules/msw": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/msw/-/msw-2.3.1.tgz", - "integrity": "sha512-ocgvBCLn/5l3jpl1lssIb3cniuACJLoOfZu01e3n5dbJrpA5PeeWn28jCLgQDNt6d7QT8tF2fYRzm9JoEHtiig==", - "hasInstallScript": true, - "optional": true, - "peer": true, - "dependencies": { - "@bundled-es-modules/cookie": "^2.0.0", - "@bundled-es-modules/statuses": "^1.0.1", - "@inquirer/confirm": "^3.0.0", - "@mswjs/cookies": "^1.1.0", - "@mswjs/interceptors": "^0.29.0", - "@open-draft/until": "^2.1.0", - "@types/cookie": "^0.6.0", - "@types/statuses": "^2.0.4", - "chalk": "^4.1.2", - "graphql": "^16.8.1", - "headers-polyfill": "^4.0.2", - "is-node-process": "^1.2.0", - "outvariant": "^1.4.2", - "path-to-regexp": "^6.2.0", - "strict-event-emitter": "^0.5.1", - "type-fest": "^4.9.0", - "yargs": "^17.7.2" - }, - "bin": { - "msw": "cli/index.js" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/mswjs" - }, - "peerDependencies": { - "typescript": ">= 4.7.x" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/msw/node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "optional": true, - "peer": true, - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/msw/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "optional": true, - "peer": true - }, - "node_modules/msw/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "optional": true, - "peer": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/msw/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "optional": true, - "peer": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/msw/node_modules/type-fest": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.21.0.tgz", - "integrity": "sha512-ADn2w7hVPcK6w1I0uWnM//y1rLXZhzB9mr0a3OirzclKF1Wp6VzevUmzz/NRAWunOT6E8HrnpGY7xOfc6K57fA==", - "optional": true, - "peer": true, - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/msw/node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", - "optional": true, - "peer": true, - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/msw/node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "optional": true, - "peer": true, - "engines": { - "node": ">=12" - } - }, "node_modules/multiformats": { - "version": "13.1.1", - "resolved": "https://registry.npmjs.org/multiformats/-/multiformats-13.1.1.tgz", - "integrity": "sha512-JiptvwMmlxlzIlLLwhCi/srf/nk409UL0eUBr0kioRJq15hqqKyg68iftrBvhCRjR6Rw4fkNnSc4ZJXJDuta/Q==" - }, - "node_modules/mute-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-1.0.0.tgz", - "integrity": "sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==", - "optional": true, - "peer": true, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } + "version": "13.2.2", + "license": "Apache-2.0 OR MIT" }, "node_modules/nanoid": { "version": "3.3.7", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", - "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", - "devOptional": true, + "dev": true, "funding": [ { "type": "github", "url": "https://github.com/sponsors/ai" } ], + "license": "MIT", "bin": { "nanoid": "bin/nanoid.cjs" }, @@ -11681,42 +9053,27 @@ }, "node_modules/napi-macros": { "version": "2.2.2", - "resolved": "https://registry.npmjs.org/napi-macros/-/napi-macros-2.2.2.tgz", - "integrity": "sha512-hmEVtAGYzVQpCKdbQea4skABsdXW4RUh5t5mJ2zzqowJS2OyXZTU1KhDVFhx+NlWZ4ap9mqR9TcDO3LTTttd+g==" + "license": "MIT" }, "node_modules/natural-compare": { "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/neo-async": { "version": "2.6.2", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", - "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", - "dev": true - }, - "node_modules/netmask": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/netmask/-/netmask-2.0.2.tgz", - "integrity": "sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg==", - "optional": true, - "peer": true, - "engines": { - "node": ">= 0.4.0" - } + "dev": true, + "license": "MIT" }, "node_modules/node-addon-api": { "version": "6.1.0", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-6.1.0.tgz", - "integrity": "sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/node-dir": { "version": "0.1.17", - "resolved": "https://registry.npmjs.org/node-dir/-/node-dir-0.1.17.tgz", - "integrity": "sha512-tmPX422rYgofd4epzrNoOXiE8XFZYOcCq1vD7MAXCDO+O+zndlA2ztdKKMa+EeuBG5tHETpr4ml4RGgpqDCCAg==", "dev": true, + "license": "MIT", "dependencies": { "minimatch": "^3.0.2" }, @@ -11724,49 +9081,9 @@ "node": ">= 0.10.5" } }, - "node_modules/node-domexception": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", - "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/jimmywarting" - }, - { - "type": "github", - "url": "https://paypal.me/jimmywarting" - } - ], - "optional": true, - "peer": true, - "engines": { - "node": ">=10.5.0" - } - }, - "node_modules/node-fetch": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", - "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", - "optional": true, - "peer": true, - "dependencies": { - "data-uri-to-buffer": "^4.0.0", - "fetch-blob": "^3.1.4", - "formdata-polyfill": "^4.0.10" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/node-fetch" - } - }, "node_modules/node-gyp-build": { "version": "4.8.1", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.1.tgz", - "integrity": "sha512-OSs33Z9yWr148JZcbZd5WiAXhh/n9z8TxQcdMhIOlpN9AhWpLfvVFO73+m77bBABQMaY9XSvIa+qk0jlI7Gcaw==", + "license": "MIT", "bin": { "node-gyp-build": "bin.js", "node-gyp-build-optional": "optional.js", @@ -11775,9 +9092,8 @@ }, "node_modules/node-gyp-build-optional-packages": { "version": "5.1.1", - "resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.1.1.tgz", - "integrity": "sha512-+P72GAjVAbTxjjwUmwjVrqrdZROD4nf8KgpBoDxqXXTiYZZt/ud60dE5yvCSr9lRO8e8yv6kgJIC0K0PfZFVQw==", "dev": true, + "license": "MIT", "dependencies": { "detect-libc": "^2.0.1" }, @@ -11789,9 +9105,8 @@ }, "node_modules/node-preload": { "version": "0.2.1", - "resolved": "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz", - "integrity": "sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==", "dev": true, + "license": "MIT", "dependencies": { "process-on-spawn": "^1.0.0" }, @@ -11800,16 +9115,14 @@ } }, "node_modules/node-releases": { - "version": "2.0.14", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", - "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==", - "dev": true + "version": "2.0.18", + "dev": true, + "license": "MIT" }, "node_modules/node-stdlib-browser": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/node-stdlib-browser/-/node-stdlib-browser-1.2.0.tgz", - "integrity": "sha512-VSjFxUhRhkyed8AtLwSCkMrJRfQ3e2lGtG3sP6FEgaLKBBbxM/dLfjRe1+iLhjvyLFW3tBQ8+c0pcOtXGbAZJg==", "dev": true, + "license": "MIT", "dependencies": { "assert": "^2.0.0", "browser-resolve": "^2.0.0", @@ -11845,8 +9158,6 @@ }, "node_modules/node-stdlib-browser/node_modules/buffer": { "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", "dev": true, "funding": [ { @@ -11862,6 +9173,7 @@ "url": "https://feross.org/support" } ], + "license": "MIT", "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" @@ -11869,37 +9181,21 @@ }, "node_modules/node-stdlib-browser/node_modules/punycode": { "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/normalize-path": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, - "node_modules/normalize-url": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-8.0.1.tgz", - "integrity": "sha512-IO9QvjUMWxPQQhs60oOu10CRkWCiZzSUkzbXGGV9pviYl1fXYcvkzQ5jV9z8Y6un8ARoVRl4EtC6v6jNqbaJ/w==", - "optional": true, - "peer": true, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/npm-run-path": { "version": "5.3.0", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", - "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "path-key": "^4.0.0" }, @@ -11912,9 +9208,8 @@ }, "node_modules/npm-run-path/node_modules/path-key": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", - "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -11924,9 +9219,8 @@ }, "node_modules/nyc": { "version": "15.1.0", - "resolved": "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz", - "integrity": "sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==", "dev": true, + "license": "ISC", "dependencies": { "@istanbuljs/load-nyc-config": "^1.0.0", "@istanbuljs/schema": "^0.1.2", @@ -11965,9 +9259,8 @@ }, "node_modules/nyc/node_modules/cliui": { "version": "6.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", - "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", "dev": true, + "license": "ISC", "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", @@ -11976,21 +9269,18 @@ }, "node_modules/nyc/node_modules/convert-source-map": { "version": "1.9.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", - "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/nyc/node_modules/emoji-regex": { "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/nyc/node_modules/find-up": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, + "license": "MIT", "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" @@ -12001,18 +9291,16 @@ }, "node_modules/nyc/node_modules/is-fullwidth-code-point": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/nyc/node_modules/istanbul-lib-source-maps": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", - "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "debug": "^4.1.1", "istanbul-lib-coverage": "^3.0.0", @@ -12024,9 +9312,8 @@ }, "node_modules/nyc/node_modules/locate-path": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, + "license": "MIT", "dependencies": { "p-locate": "^4.1.0" }, @@ -12036,9 +9323,8 @@ }, "node_modules/nyc/node_modules/make-dir": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "dev": true, + "license": "MIT", "dependencies": { "semver": "^6.0.0" }, @@ -12051,9 +9337,8 @@ }, "node_modules/nyc/node_modules/p-limit": { "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, + "license": "MIT", "dependencies": { "p-try": "^2.0.0" }, @@ -12066,9 +9351,8 @@ }, "node_modules/nyc/node_modules/p-locate": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, + "license": "MIT", "dependencies": { "p-limit": "^2.2.0" }, @@ -12078,9 +9362,8 @@ }, "node_modules/nyc/node_modules/p-map": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", - "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", "dev": true, + "license": "MIT", "dependencies": { "aggregate-error": "^3.0.0" }, @@ -12090,27 +9373,32 @@ }, "node_modules/nyc/node_modules/resolve-from": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", - "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/nyc/node_modules/semver": { "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } }, + "node_modules/nyc/node_modules/source-map": { + "version": "0.6.1", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/nyc/node_modules/string-width": { "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -12122,9 +9410,8 @@ }, "node_modules/nyc/node_modules/test-exclude": { "version": "6.0.0", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", - "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", "dev": true, + "license": "ISC", "dependencies": { "@istanbuljs/schema": "^0.1.2", "glob": "^7.1.4", @@ -12136,9 +9423,8 @@ }, "node_modules/nyc/node_modules/wrap-ansi": { "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -12150,15 +9436,13 @@ }, "node_modules/nyc/node_modules/y18n": { "version": "4.0.3", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", - "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/nyc/node_modules/yargs": { "version": "15.4.1", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", - "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", "dev": true, + "license": "MIT", "dependencies": { "cliui": "^6.0.0", "decamelize": "^1.2.0", @@ -12178,9 +9462,8 @@ }, "node_modules/nyc/node_modules/yargs-parser": { "version": "18.1.3", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", - "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", "dev": true, + "license": "ISC", "dependencies": { "camelcase": "^5.0.0", "decamelize": "^1.2.0" @@ -12191,25 +9474,22 @@ }, "node_modules/object-assign": { "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/object-hash": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", - "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", "dev": true, + "license": "MIT", "engines": { "node": ">= 6" } }, "node_modules/object-inspect": { "version": "1.13.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz", - "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -12219,9 +9499,8 @@ }, "node_modules/object-is": { "version": "1.1.6", - "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.6.tgz", - "integrity": "sha512-F8cZ+KfGlSGi09lJT7/Nd6KJZ9ygtvYC0/UYYLI9nmQKLMnydpB9yvbv9K1uSkEu7FU9vYPmVwLg328tX+ot3Q==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1" @@ -12235,18 +9514,16 @@ }, "node_modules/object-keys": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" } }, "node_modules/object.assign": { "version": "4.1.5", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", - "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.5", "define-properties": "^1.2.1", @@ -12262,9 +9539,8 @@ }, "node_modules/object.fromentries": { "version": "2.0.8", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", - "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -12280,9 +9556,8 @@ }, "node_modules/object.values": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.0.tgz", - "integrity": "sha512-yBYjY9QX2hnRmZHAjG/f13MzmBzxzYgQhFrke06TTyKY5zSTEqkOeukBzIdVA3j3ulu8Qa3MbVFShV7T2RmGtQ==", "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -12297,8 +9572,7 @@ }, "node_modules/on-finished": { "version": "2.4.1", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", - "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", "dependencies": { "ee-first": "1.1.1" }, @@ -12308,26 +9582,23 @@ }, "node_modules/once": { "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "devOptional": true, + "dev": true, + "license": "ISC", "dependencies": { "wrappy": "1" } }, "node_modules/one-time": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/one-time/-/one-time-1.0.0.tgz", - "integrity": "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==", + "license": "MIT", "dependencies": { "fn.name": "1.x.x" } }, "node_modules/onetime": { "version": "6.0.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", - "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "mimic-fn": "^4.0.0" }, @@ -12340,9 +9611,8 @@ }, "node_modules/open": { "version": "8.4.2", - "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", - "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", "dev": true, + "license": "MIT", "dependencies": { "define-lazy-prop": "^2.0.0", "is-docker": "^2.1.1", @@ -12357,9 +9627,8 @@ }, "node_modules/optionator": { "version": "0.9.4", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", - "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", "dev": true, + "license": "MIT", "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", @@ -12374,46 +9643,25 @@ }, "node_modules/ordered-binary": { "version": "1.5.1", - "resolved": "https://registry.npmjs.org/ordered-binary/-/ordered-binary-1.5.1.tgz", - "integrity": "sha512-5VyHfHY3cd0iza71JepYG50My+YUbrFtGoUz2ooEydPyPM7Aai/JW098juLr+RG6+rDJuzNNTsEQu2DZa1A41A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/os-browserify": { "version": "0.3.0", - "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz", - "integrity": "sha512-gjcpUc3clBf9+210TRaDWbf+rZZZEshZ+DlXMRCeAjp0xhTrnQsKHypIy1J3d5hKdUzj69t708EHtU8P6bUn0A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/os-tmpdir": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, - "node_modules/outvariant": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/outvariant/-/outvariant-1.4.2.tgz", - "integrity": "sha512-Ou3dJ6bA/UJ5GVHxah4LnqDwZRwAmWxrG3wtrHrbGnP4RnLCtA64A4F+ae7Y8ww660JaddSoArUR5HjipWSHAQ==", - "optional": true, - "peer": true - }, - "node_modules/p-cancelable": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-3.0.0.tgz", - "integrity": "sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==", - "optional": true, - "peer": true, - "engines": { - "node": ">=12.20" - } - }, "node_modules/p-defer": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-4.0.1.tgz", - "integrity": "sha512-Mr5KC5efvAK5VUptYEIopP1bakB85k2IWXaRC0rsh1uwn1L6M0LVml8OIQ4Gudg4oyZakf7FmeRLkMMtZW1i5A==", + "license": "MIT", "engines": { "node": ">=12" }, @@ -12423,9 +9671,8 @@ }, "node_modules/p-limit": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dev": true, + "license": "MIT", "dependencies": { "yocto-queue": "^0.1.0" }, @@ -12438,9 +9685,8 @@ }, "node_modules/p-locate": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dev": true, + "license": "MIT", "dependencies": { "p-limit": "^3.0.2" }, @@ -12453,9 +9699,8 @@ }, "node_modules/p-map": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", - "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", "dev": true, + "license": "MIT", "dependencies": { "aggregate-error": "^3.0.0" }, @@ -12468,8 +9713,7 @@ }, "node_modules/p-queue": { "version": "8.0.1", - "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-8.0.1.tgz", - "integrity": "sha512-NXzu9aQJTAzbBqOt2hwsR63ea7yvxJc0PwN/zobNAudYfb1B7R08SzB4TsLeSbUCuG467NhnoT0oO6w1qRO+BA==", + "license": "MIT", "dependencies": { "eventemitter3": "^5.0.1", "p-timeout": "^6.1.2" @@ -12483,8 +9727,7 @@ }, "node_modules/p-timeout": { "version": "6.1.2", - "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-6.1.2.tgz", - "integrity": "sha512-UbD77BuZ9Bc9aABo74gfXhNvzC9Tx7SxtHSh1fxvx3jTLLYvmVhiQZZrJzqqU0jKbN32kb5VOKiLEQI/3bIjgQ==", + "license": "MIT", "engines": { "node": ">=14.16" }, @@ -12494,52 +9737,16 @@ }, "node_modules/p-try": { "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, - "node_modules/pac-proxy-agent": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/pac-proxy-agent/-/pac-proxy-agent-7.0.2.tgz", - "integrity": "sha512-BFi3vZnO9X5Qt6NRz7ZOaPja3ic0PhlsmCRYLOpN11+mWBCR6XJDqW5RF3j8jm4WGGQZtBA+bTfxYzeKW73eHg==", - "optional": true, - "peer": true, - "dependencies": { - "@tootallnate/quickjs-emscripten": "^0.23.0", - "agent-base": "^7.0.2", - "debug": "^4.3.4", - "get-uri": "^6.0.1", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.5", - "pac-resolver": "^7.0.1", - "socks-proxy-agent": "^8.0.4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/pac-resolver": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/pac-resolver/-/pac-resolver-7.0.1.tgz", - "integrity": "sha512-5NPgf87AT2STgwa2ntRMr45jTKrYBGkVU36yT0ig/n/GMAa3oPqhZfIQ2kMEimReg0+t9kZViDVZ83qfVUlckg==", - "optional": true, - "peer": true, - "dependencies": { - "degenerator": "^5.0.0", - "netmask": "^2.0.2" - }, - "engines": { - "node": ">= 14" - } - }, "node_modules/package-hash": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz", - "integrity": "sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==", "dev": true, + "license": "ISC", "dependencies": { "graceful-fs": "^4.1.15", "hasha": "^5.0.0", @@ -12552,21 +9759,18 @@ }, "node_modules/package-json-from-dist": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz", - "integrity": "sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==", - "devOptional": true + "dev": true, + "license": "BlueOak-1.0.0" }, "node_modules/pako": { "version": "1.0.11", - "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", - "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", - "devOptional": true + "dev": true, + "license": "(MIT AND Zlib)" }, "node_modules/parent-module": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", "dev": true, + "license": "MIT", "dependencies": { "callsites": "^3.0.0" }, @@ -12576,9 +9780,8 @@ }, "node_modules/parse-asn1": { "version": "5.1.7", - "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.7.tgz", - "integrity": "sha512-CTM5kuWR3sx9IFamcl5ErfPl6ea/N8IYwiJ+vpeB2g+1iknv7zBl5uPwbMbRVznRVbrNY6lGuDoE5b30grmbqg==", "dev": true, + "license": "ISC", "dependencies": { "asn1.js": "^4.10.1", "browserify-aes": "^1.2.0", @@ -12593,56 +9796,49 @@ }, "node_modules/parseurl": { "version": "1.3.3", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", - "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, "node_modules/path-browserify": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", - "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/path-exists": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/path-is-absolute": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/path-key": { "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/path-parse": { "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/path-scurry": { "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", - "devOptional": true, + "dev": true, + "license": "BlueOak-1.0.0", "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" @@ -12654,51 +9850,31 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/path-scurry/node_modules/lru-cache": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.3.0.tgz", - "integrity": "sha512-CQl19J/g+Hbjbv4Y3mFNNXFEL/5t/KCg8POCuUqd4rMKjGG+j1ybER83hxV58zL+dFI1PTkt3GNFSHRt+d8qEQ==", - "devOptional": true, - "engines": { - "node": "14 || >=16.14" - } - }, - "node_modules/path-to-regexp": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.2.tgz", - "integrity": "sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw==", - "optional": true, - "peer": true - }, "node_modules/path-type": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/pathe": { "version": "1.1.2", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", - "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", - "devOptional": true + "dev": true, + "license": "MIT" }, "node_modules/pathval": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz", - "integrity": "sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">= 14.16" } }, "node_modules/pbkdf2": { "version": "3.1.2", - "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", - "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==", "dev": true, + "license": "MIT", "dependencies": { "create-hash": "^1.1.2", "create-hmac": "^1.1.4", @@ -12710,357 +9886,508 @@ "node": ">=0.12" } }, - "node_modules/pend": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", - "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", - "optional": true, - "peer": true + "node_modules/picocolors": { + "version": "1.0.1", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pidtree": { + "version": "0.6.0", + "dev": true, + "license": "MIT", + "bin": { + "pidtree": "bin/pidtree.js" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/pkg-dir": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^5.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/platform": { + "version": "1.3.6", + "dev": true, + "license": "MIT" + }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/postcss": { + "version": "8.4.41", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.7", + "picocolors": "^1.0.1", + "source-map-js": "^1.2.0" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.3.3", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prettier-linter-helpers": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-diff": "^1.1.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/process": { + "version": "0.11.10", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/process-on-spawn": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "fromentries": "^1.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/progress-events": { + "version": "1.0.1", + "license": "Apache-2.0 OR MIT" + }, + "node_modules/prom-client": { + "version": "15.1.3", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api": "^1.4.0", + "tdigest": "^0.1.1" + }, + "engines": { + "node": "^16 || ^18 || >=20" + } + }, + "node_modules/public-encrypt": { + "version": "4.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "bn.js": "^4.1.0", + "browserify-rsa": "^4.0.0", + "create-hash": "^1.1.0", + "parse-asn1": "^5.0.0", + "randombytes": "^2.0.1", + "safe-buffer": "^5.1.2" + } }, - "node_modules/picocolors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", - "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", - "devOptional": true + "node_modules/public-encrypt/node_modules/bn.js": { + "version": "4.12.0", + "dev": true, + "license": "MIT" }, - "node_modules/picomatch": { + "node_modules/punycode": { "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "devOptional": true, - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/pidtree": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/pidtree/-/pidtree-0.6.0.tgz", - "integrity": "sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g==", "dev": true, - "bin": { - "pidtree": "bin/pidtree.js" - }, + "license": "MIT", "engines": { - "node": ">=0.10" + "node": ">=6" } }, - "node_modules/pkg-dir": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-5.0.0.tgz", - "integrity": "sha512-NPE8TDbzl/3YQYY7CSS228s3g2ollTFnc+Qi3tqmqJp9Vg2ovUpixcJEo2HJScN2Ez+kEaal6y70c0ehqJBJeA==", - "dev": true, + "node_modules/qs": { + "version": "6.11.0", + "license": "BSD-3-Clause", "dependencies": { - "find-up": "^5.0.0" + "side-channel": "^1.0.4" }, "engines": { - "node": ">=10" + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/platform": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz", - "integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==", - "dev": true - }, - "node_modules/possible-typed-array-names": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", - "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "node_modules/querystring-es3": { + "version": "0.2.1", "dev": true, "engines": { - "node": ">= 0.4" + "node": ">=0.4.x" } }, - "node_modules/postcss": { - "version": "8.4.39", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz", - "integrity": "sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==", - "devOptional": true, + "node_modules/queue-microtask": { + "version": "1.2.3", "funding": [ { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" + "type": "github", + "url": "https://github.com/sponsors/feross" }, { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" + "type": "patreon", + "url": "https://www.patreon.com/feross" }, { - "type": "github", - "url": "https://github.com/sponsors/ai" + "type": "consulting", + "url": "https://feross.org/support" } ], + "license": "MIT" + }, + "node_modules/quibble": { + "version": "0.9.2", + "dev": true, + "license": "MIT", "dependencies": { - "nanoid": "^3.3.7", - "picocolors": "^1.0.1", - "source-map-js": "^1.2.0" + "lodash": "^4.17.21", + "resolve": "^1.22.8" }, "engines": { - "node": "^10 || ^12 || >=14" + "node": ">= 0.14.0" } }, - "node_modules/prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "node_modules/randombytes": { + "version": "2.1.0", "dev": true, - "engines": { - "node": ">= 0.8.0" + "license": "MIT", + "dependencies": { + "safe-buffer": "^5.1.0" } }, - "node_modules/prettier": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz", - "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==", + "node_modules/randomfill": { + "version": "1.0.4", "dev": true, - "bin": { - "prettier": "bin-prettier.js" + "license": "MIT", + "dependencies": { + "randombytes": "^2.0.5", + "safe-buffer": "^5.1.0" + } + }, + "node_modules/raw-body": { + "version": "2.5.2", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" }, "engines": { - "node": ">=10.13.0" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" + "node": ">= 0.8" } }, - "node_modules/prettier-linter-helpers": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", - "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", - "dev": true, + "node_modules/readable-stream": { + "version": "3.6.2", + "license": "MIT", "dependencies": { - "fast-diff": "^1.1.2" + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" }, "engines": { - "node": ">=6.0.0" + "node": ">= 6" } }, - "node_modules/pretty-format": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", - "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", - "devOptional": true, + "node_modules/reflect.getprototypeof": { + "version": "1.0.6", + "dev": true, + "license": "MIT", "dependencies": { - "@jest/schemas": "^29.6.3", - "ansi-styles": "^5.0.0", - "react-is": "^18.0.0" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.1", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "globalthis": "^1.0.3", + "which-builtin-type": "^1.1.3" }, "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/pretty-format/node_modules/ansi-styles": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", - "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", - "devOptional": true, + "node_modules/regexp.prototype.flags": { + "version": "1.5.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.6", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.1" + }, "engines": { - "node": ">=10" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "devOptional": true, + "node_modules/regexpp": { + "version": "3.2.0", + "dev": true, + "license": "MIT", "engines": { - "node": ">= 0.6.0" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" } }, - "node_modules/process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "devOptional": true - }, - "node_modules/process-on-spawn": { + "node_modules/release-zalgo": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.0.0.tgz", - "integrity": "sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==", "dev": true, + "license": "ISC", "dependencies": { - "fromentries": "^1.2.0" + "es6-error": "^4.0.1" }, "engines": { - "node": ">=8" + "node": ">=4" } }, - "node_modules/progress": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", - "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", - "optional": true, - "peer": true, + "node_modules/repeat-string": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==", + "dev": true, "engines": { - "node": ">=0.4.0" + "node": ">=0.10" } }, - "node_modules/progress-events": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/progress-events/-/progress-events-1.0.0.tgz", - "integrity": "sha512-zIB6QDrSbPfRg+33FZalluFIowkbV5Xh1xSuetjG+rlC5he6u2dc6VQJ0TbMdlN3R1RHdpOqxEFMKTnQ+itUwA==", + "node_modules/require-directory": { + "version": "2.1.1", + "license": "MIT", "engines": { - "node": ">=16.0.0", - "npm": ">=7.0.0" + "node": ">=0.10.0" } }, - "node_modules/prom-client": { - "version": "15.1.3", - "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-15.1.3.tgz", - "integrity": "sha512-6ZiOBfCywsD4k1BN9IX0uZhF+tJkV8q8llP64G5Hajs4JOeVLPCwpPVcpXy3BwYiUGgyJzsJJQeOIv7+hDSq8g==", + "node_modules/require-main-filename": { + "version": "2.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/resolve": { + "version": "1.22.8", + "dev": true, + "license": "MIT", "dependencies": { - "@opentelemetry/api": "^1.4.0", - "tdigest": "^0.1.1" + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "dev": true, + "license": "MIT", "engines": { - "node": "^16 || ^18 || >=20" + "node": ">=4" } }, - "node_modules/proxy-agent": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/proxy-agent/-/proxy-agent-6.3.1.tgz", - "integrity": "sha512-Rb5RVBy1iyqOtNl15Cw/llpeLH8bsb37gM1FUfKQ+Wck6xHlbAhWGUFiTRHtkjqGTA5pSHz6+0hrPW/oECihPQ==", - "optional": true, - "peer": true, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/restore-cursor": { + "version": "3.1.0", + "dev": true, + "license": "MIT", "dependencies": { - "agent-base": "^7.0.2", - "debug": "^4.3.4", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.2", - "lru-cache": "^7.14.1", - "pac-proxy-agent": "^7.0.1", - "proxy-from-env": "^1.1.0", - "socks-proxy-agent": "^8.0.2" + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" }, "engines": { - "node": ">= 14" + "node": ">=8" } }, - "node_modules/proxy-agent/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "optional": true, - "peer": true, + "node_modules/restore-cursor/node_modules/mimic-fn": { + "version": "2.1.0", + "dev": true, + "license": "MIT", "engines": { - "node": ">=12" + "node": ">=6" + } + }, + "node_modules/restore-cursor/node_modules/onetime": { + "version": "5.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/resumer": { + "version": "0.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "through": "~2.3.4" } }, - "node_modules/proxy-from-env": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "optional": true, - "peer": true - }, - "node_modules/public-encrypt": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz", - "integrity": "sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==", + "node_modules/reusify": { + "version": "1.0.4", "dev": true, - "dependencies": { - "bn.js": "^4.1.0", - "browserify-rsa": "^4.0.0", - "create-hash": "^1.1.0", - "parse-asn1": "^5.0.0", - "randombytes": "^2.0.1", - "safe-buffer": "^5.1.2" + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" } }, - "node_modules/public-encrypt/node_modules/bn.js": { - "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==", - "dev": true + "node_modules/rfdc": { + "version": "1.4.1", + "dev": true, + "license": "MIT" }, - "node_modules/pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "optional": true, - "peer": true, + "node_modules/rimraf": { + "version": "3.0.2", + "dev": true, + "license": "ISC", "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "node_modules/ripemd160": { + "version": "2.0.2", "dev": true, - "engines": { - "node": ">=6" + "license": "MIT", + "dependencies": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1" } }, - "node_modules/puppeteer-core": { - "version": "20.9.0", - "resolved": "https://registry.npmjs.org/puppeteer-core/-/puppeteer-core-20.9.0.tgz", - "integrity": "sha512-H9fYZQzMTRrkboEfPmf7m3CLDN6JvbxXA3qTtS+dFt27tR+CsFHzPsT6pzp6lYL6bJbAPaR0HaPO6uSi+F94Pg==", - "optional": true, - "peer": true, - "dependencies": { - "@puppeteer/browsers": "1.4.6", - "chromium-bidi": "0.4.16", - "cross-fetch": "4.0.0", - "debug": "4.3.4", - "devtools-protocol": "0.0.1147663", - "ws": "8.13.0" + "node_modules/rollup": { + "version": "2.79.1", + "dev": true, + "license": "MIT", + "bin": { + "rollup": "dist/bin/rollup" }, "engines": { - "node": ">=16.3.0" - }, - "peerDependencies": { - "typescript": ">= 4.7.4" + "node": ">=10.0.0" }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "optionalDependencies": { + "fsevents": "~2.3.2" } }, - "node_modules/puppeteer-core/node_modules/@puppeteer/browsers": { - "version": "1.4.6", - "resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-1.4.6.tgz", - "integrity": "sha512-x4BEjr2SjOPowNeiguzjozQbsc6h437ovD/wu+JpaenxVLm3jkgzHY2xOslMTp50HoTvQreMjiexiGQw1sqZlQ==", - "optional": true, - "peer": true, + "node_modules/rollup-plugin-visualizer": { + "version": "5.12.0", + "dev": true, + "license": "MIT", "dependencies": { - "debug": "4.3.4", - "extract-zip": "2.0.1", - "progress": "2.0.3", - "proxy-agent": "6.3.0", - "tar-fs": "3.0.4", - "unbzip2-stream": "1.4.3", - "yargs": "17.7.1" + "open": "^8.4.0", + "picomatch": "^2.3.1", + "source-map": "^0.7.4", + "yargs": "^17.5.1" }, "bin": { - "browsers": "lib/cjs/main-cli.js" + "rollup-plugin-visualizer": "dist/bin/cli.js" }, "engines": { - "node": ">=16.3.0" + "node": ">=14" }, "peerDependencies": { - "typescript": ">= 4.7.4" + "rollup": "2.x || 3.x || 4.x" }, "peerDependenciesMeta": { - "typescript": { + "rollup": { "optional": true } } }, - "node_modules/puppeteer-core/node_modules/cliui": { + "node_modules/rollup-plugin-visualizer/node_modules/cliui": { "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "optional": true, - "peer": true, + "dev": true, + "license": "ISC", "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", @@ -13070,84 +10397,23 @@ "node": ">=12" } }, - "node_modules/puppeteer-core/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "optional": true, - "peer": true, - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/puppeteer-core/node_modules/devtools-protocol": { - "version": "0.0.1147663", - "resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1147663.tgz", - "integrity": "sha512-hyWmRrexdhbZ1tcJUGpO95ivbRhWXz++F4Ko+n21AY5PNln2ovoJw+8ZMNDTtip+CNFQfrtLVh/w4009dXO/eQ==", - "optional": true, - "peer": true - }, - "node_modules/puppeteer-core/node_modules/emoji-regex": { + "node_modules/rollup-plugin-visualizer/node_modules/emoji-regex": { "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "optional": true, - "peer": true + "dev": true, + "license": "MIT" }, - "node_modules/puppeteer-core/node_modules/is-fullwidth-code-point": { + "node_modules/rollup-plugin-visualizer/node_modules/is-fullwidth-code-point": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "optional": true, - "peer": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, - "node_modules/puppeteer-core/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "optional": true, - "peer": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/puppeteer-core/node_modules/proxy-agent": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/proxy-agent/-/proxy-agent-6.3.0.tgz", - "integrity": "sha512-0LdR757eTj/JfuU7TL2YCuAZnxWXu3tkJbg4Oq3geW/qFNT/32T0sp2HnZ9O0lMR4q3vwAt0+xCA8SR0WAD0og==", - "optional": true, - "peer": true, - "dependencies": { - "agent-base": "^7.0.2", - "debug": "^4.3.4", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.0", - "lru-cache": "^7.14.1", - "pac-proxy-agent": "^7.0.0", - "proxy-from-env": "^1.1.0", - "socks-proxy-agent": "^8.0.1" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/puppeteer-core/node_modules/string-width": { + "node_modules/rollup-plugin-visualizer/node_modules/string-width": { "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "optional": true, - "peer": true, + "dev": true, + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -13157,34 +10423,10 @@ "node": ">=8" } }, - "node_modules/puppeteer-core/node_modules/ws": { - "version": "8.13.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", - "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", - "optional": true, - "peer": true, - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, - "node_modules/puppeteer-core/node_modules/yargs": { - "version": "17.7.1", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz", - "integrity": "sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==", - "optional": true, - "peer": true, + "node_modules/rollup-plugin-visualizer/node_modules/yargs": { + "version": "17.7.2", + "dev": true, + "license": "MIT", "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", @@ -13198,50 +10440,106 @@ "node": ">=12" } }, - "node_modules/puppeteer-core/node_modules/yargs-parser": { + "node_modules/rollup-plugin-visualizer/node_modules/yargs-parser": { "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/rollup/node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, "optional": true, + "os": [ + "darwin" + ], "peer": true, "engines": { - "node": ">=12" + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, - "node_modules/qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "node_modules/run-parallel": { + "version": "1.2.0", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", "dependencies": { - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "queue-microtask": "^1.2.2" } }, - "node_modules/query-selector-shadow-dom": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/query-selector-shadow-dom/-/query-selector-shadow-dom-1.0.1.tgz", - "integrity": "sha512-lT5yCqEBgfoMYpf3F2xQRK7zEr1rhIIZuceDK6+xRkJQ4NMbHTwXqk4NkwDwQMNqXgG9r9fyHnzwNVs6zV5KRw==", - "optional": true, - "peer": true + "node_modules/run-parallel-limit": { + "version": "1.1.0", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } }, - "node_modules/querystring-es3": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz", - "integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==", + "node_modules/rustbn-wasm": { + "version": "0.4.0", + "license": "(MIT OR Apache-2.0)", + "dependencies": { + "@scure/base": "^1.1.5" + } + }, + "node_modules/rxjs": { + "version": "7.8.1", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/safe-array-concat": { + "version": "1.1.2", "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4", + "has-symbols": "^1.0.3", + "isarray": "^2.0.5" + }, "engines": { - "node": ">=0.4.x" + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "node_modules/safe-buffer": { + "version": "5.2.1", "funding": [ { "type": "github", @@ -13255,139 +10553,148 @@ "type": "consulting", "url": "https://feross.org/support" } - ] - }, - "node_modules/queue-tick": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/queue-tick/-/queue-tick-1.0.1.tgz", - "integrity": "sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag==", - "optional": true, - "peer": true + ], + "license": "MIT" }, - "node_modules/quibble": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/quibble/-/quibble-0.9.2.tgz", - "integrity": "sha512-BrL7hrZcbyyt5ZDfePkGFDc3m82uUtxCPOnpRUrkOdtBnmV9ldQKxXORkKL8eIzToRNaCpIPyKyfdfq/tBlFAA==", + "node_modules/safe-regex-test": { + "version": "1.0.3", "dev": true, + "license": "MIT", "dependencies": { - "lodash": "^4.17.21", - "resolve": "^1.22.8" + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-regex": "^1.1.4" }, "engines": { - "node": ">= 0.14.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/quick-lru": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", - "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", - "optional": true, - "peer": true, + "node_modules/safe-stable-stringify": { + "version": "2.4.3", + "license": "MIT", "engines": { "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/randombytes": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", - "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "node_modules/safer-buffer": { + "version": "2.1.2", + "license": "MIT" + }, + "node_modules/scanf": { + "version": "1.1.2", + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "dev": true, - "dependencies": { - "safe-buffer": "^5.1.0" + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" } }, - "node_modules/randomfill": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/randomfill/-/randomfill-1.0.4.tgz", - "integrity": "sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==", + "node_modules/set-blocking": { + "version": "2.0.0", "dev": true, + "license": "ISC" + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "license": "MIT", "dependencies": { - "randombytes": "^2.0.5", - "safe-buffer": "^5.1.0" + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" } }, - "node_modules/raw-body": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", - "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "node_modules/set-function-name": { + "version": "2.0.2", + "dev": true, + "license": "MIT", "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.2" }, "engines": { - "node": ">= 0.8" + "node": ">= 0.4" } }, - "node_modules/react-is": { - "version": "18.3.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", - "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", - "devOptional": true + "node_modules/setimmediate": { + "version": "1.0.5", + "dev": true, + "license": "MIT" }, - "node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "node_modules/setprototypeof": { + "version": "1.2.0", + "license": "ISC" + }, + "node_modules/sha.js": { + "version": "2.4.11", + "dev": true, + "license": "(MIT AND BSD-3-Clause)", "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" }, - "engines": { - "node": ">= 6" + "bin": { + "sha.js": "bin.js" } }, - "node_modules/readdir-glob": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", - "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", - "optional": true, - "peer": true, + "node_modules/shebang-command": { + "version": "2.0.0", + "dev": true, + "license": "MIT", "dependencies": { - "minimatch": "^5.1.0" + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" } }, - "node_modules/readdir-glob/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "optional": true, - "peer": true, - "dependencies": { - "balanced-match": "^1.0.0" + "node_modules/shebang-regex": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" } }, - "node_modules/readdir-glob/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", - "optional": true, - "peer": true, + "node_modules/shiki": { + "version": "0.10.1", + "dev": true, + "license": "MIT", "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" + "jsonc-parser": "^3.0.0", + "vscode-oniguruma": "^1.6.1", + "vscode-textmate": "5.2.0" } }, - "node_modules/reflect.getprototypeof": { + "node_modules/side-channel": { "version": "1.0.6", - "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.6.tgz", - "integrity": "sha512-fmfw4XgoDke3kdI6h4xcUz1dG8uaiv5q9gcEwLS4Pnth2kxT+GZ7YehS1JTMGBQmtV7Y4GFGbs2re2NqhdozUg==", - "dev": true, + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.1", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.4", - "globalthis": "^1.0.3", - "which-builtin-type": "^1.1.3" + "object-inspect": "^1.13.1" }, "engines": { "node": ">= 0.4" @@ -13396,666 +10703,404 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/regenerator-runtime": { - "version": "0.14.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", - "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", - "optional": true, - "peer": true + "node_modules/siginfo": { + "version": "2.0.0", + "dev": true, + "license": "ISC" }, - "node_modules/regexp.prototype.flags": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", - "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", + "node_modules/signal-exit": { + "version": "3.0.7", "dev": true, + "license": "ISC" + }, + "node_modules/simple-swizzle": { + "version": "0.2.2", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.6", - "define-properties": "^1.2.1", - "es-errors": "^1.3.0", - "set-function-name": "^2.0.1" + "is-arrayish": "^0.3.1" + } + }, + "node_modules/sirv": { + "version": "2.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "@polka/url": "^1.0.0-next.24", + "mrmime": "^2.0.0", + "totalist": "^3.0.0" }, "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">= 10" } }, - "node_modules/regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", + "node_modules/slash": { + "version": "3.0.0", "dev": true, + "license": "MIT", "engines": { "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" } }, - "node_modules/release-zalgo": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz", - "integrity": "sha512-gUAyHVHPPC5wdqX/LG4LWtRYtgjxyX78oanFNTMMyFEfOqdC54s3eE82imuWKbOeqYht2CrNf64Qb8vgmmtZGA==", + "node_modules/slice-ansi": { + "version": "5.0.0", "dev": true, + "license": "MIT", "dependencies": { - "es6-error": "^4.0.1" + "ansi-styles": "^6.0.0", + "is-fullwidth-code-point": "^4.0.0" }, "engines": { - "node": ">=4" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" } }, - "node_modules/require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "node_modules/slice-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "dev": true, + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", - "dev": true + "node_modules/snappyjs": { + "version": "0.6.1", + "license": "MIT" }, - "node_modules/resolve": { - "version": "1.22.8", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", - "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "node_modules/solc": { + "version": "0.8.26", "dev": true, + "license": "MIT", "dependencies": { - "is-core-module": "^2.13.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" + "command-exists": "^1.2.8", + "commander": "^8.1.0", + "follow-redirects": "^1.12.1", + "js-sha3": "0.8.0", + "memorystream": "^0.3.1", + "semver": "^5.5.0", + "tmp": "0.0.33" }, "bin": { - "resolve": "bin/resolve" + "solcjs": "solc.js" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/resolve-alpn": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", - "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", - "optional": true, - "peer": true - }, - "node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "dev": true, "engines": { - "node": ">=4" - } - }, - "node_modules/resolve-pkg-maps": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", - "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", - "dev": true, - "funding": { - "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + "node": ">=10.0.0" } }, - "node_modules/responselike": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/responselike/-/responselike-3.0.0.tgz", - "integrity": "sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==", - "optional": true, - "peer": true, - "dependencies": { - "lowercase-keys": "^3.0.0" - }, + "node_modules/solc/node_modules/commander": { + "version": "8.3.0", + "dev": true, + "license": "MIT", "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">= 12" } }, - "node_modules/resq": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/resq/-/resq-1.11.0.tgz", - "integrity": "sha512-G10EBz+zAAy3zUd/CDoBbXRL6ia9kOo3xRHrMDsHljI0GDkhYlyjwoCx5+3eCC4swi1uCoZQhskuJkj7Gp57Bw==", - "optional": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^2.0.1" + "node_modules/solc/node_modules/semver": { + "version": "5.7.2", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" } }, - "node_modules/resq/node_modules/fast-deep-equal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", - "integrity": "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w==", - "optional": true, - "peer": true + "node_modules/sort-object-keys": { + "version": "1.1.3", + "dev": true, + "license": "MIT" }, - "node_modules/restore-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", - "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "node_modules/sort-package-json": { + "version": "1.57.0", "dev": true, + "license": "MIT", "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" + "detect-indent": "^6.0.0", + "detect-newline": "3.1.0", + "git-hooks-list": "1.0.3", + "globby": "10.0.0", + "is-plain-obj": "2.1.0", + "sort-object-keys": "^1.1.3" }, - "engines": { - "node": ">=8" - } - }, - "node_modules/restore-cursor/node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "engines": { - "node": ">=6" + "bin": { + "sort-package-json": "cli.js" } }, - "node_modules/restore-cursor/node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "node_modules/sort-package-json/node_modules/globby": { + "version": "10.0.0", "dev": true, + "license": "MIT", "dependencies": { - "mimic-fn": "^2.1.0" + "@types/glob": "^7.1.1", + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.0.3", + "glob": "^7.1.3", + "ignore": "^5.1.1", + "merge2": "^1.2.3", + "slash": "^3.0.0" }, "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=8" } }, - "node_modules/resumer": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/resumer/-/resumer-0.0.0.tgz", - "integrity": "sha512-Fn9X8rX8yYF4m81rZCK/5VmrmsSbqS/i3rDLl6ZZHAXgC2nTAx3dhwG8q8odP/RmdLa2YrybDJaAMg+X1ajY3w==", + "node_modules/source-map": { + "version": "0.7.4", "dev": true, - "dependencies": { - "through": "~2.3.4" + "license": "BSD-3-Clause", + "engines": { + "node": ">= 8" } }, - "node_modules/reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "devOptional": true, + "node_modules/source-map-js": { + "version": "1.2.0", + "dev": true, + "license": "BSD-3-Clause", "engines": { - "iojs": ">=1.0.0", "node": ">=0.10.0" } }, - "node_modules/rfdc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", - "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", - "dev": true - }, - "node_modules/rgb2hex": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/rgb2hex/-/rgb2hex-0.2.5.tgz", - "integrity": "sha512-22MOP1Rh7sAo1BZpDG6R5RFYzR2lYEgwq7HEmyW2qcsOqR2lQKmn+O//xV3YG/0rrhMC6KVX2hU+ZXuaw9a5bw==", - "optional": true, - "peer": true - }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "deprecated": "Rimraf versions prior to v4 are no longer supported", + "node_modules/spawn-wrap": { + "version": "2.0.0", "dev": true, + "license": "ISC", "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" + "foreground-child": "^2.0.0", + "is-windows": "^1.0.2", + "make-dir": "^3.0.0", + "rimraf": "^3.0.0", + "signal-exit": "^3.0.2", + "which": "^2.0.1" }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "engines": { + "node": ">=8" } }, - "node_modules/ripemd160": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", - "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", + "node_modules/spawn-wrap/node_modules/make-dir": { + "version": "3.1.0", "dev": true, + "license": "MIT", "dependencies": { - "hash-base": "^3.0.0", - "inherits": "^2.0.1" - } - }, - "node_modules/rollup": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.18.0.tgz", - "integrity": "sha512-QmJz14PX3rzbJCN1SG4Xe/bAAX2a6NpCP8ab2vfu2GiUr8AQcr2nCV/oEO3yneFarB67zk8ShlIyWb2LGTb3Sg==", - "devOptional": true, - "dependencies": { - "@types/estree": "1.0.5" - }, - "bin": { - "rollup": "dist/bin/rollup" + "semver": "^6.0.0" }, "engines": { - "node": ">=18.0.0", - "npm": ">=8.0.0" + "node": ">=8" }, - "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.18.0", - "@rollup/rollup-android-arm64": "4.18.0", - "@rollup/rollup-darwin-arm64": "4.18.0", - "@rollup/rollup-darwin-x64": "4.18.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.18.0", - "@rollup/rollup-linux-arm-musleabihf": "4.18.0", - "@rollup/rollup-linux-arm64-gnu": "4.18.0", - "@rollup/rollup-linux-arm64-musl": "4.18.0", - "@rollup/rollup-linux-powerpc64le-gnu": "4.18.0", - "@rollup/rollup-linux-riscv64-gnu": "4.18.0", - "@rollup/rollup-linux-s390x-gnu": "4.18.0", - "@rollup/rollup-linux-x64-gnu": "4.18.0", - "@rollup/rollup-linux-x64-musl": "4.18.0", - "@rollup/rollup-win32-arm64-msvc": "4.18.0", - "@rollup/rollup-win32-ia32-msvc": "4.18.0", - "@rollup/rollup-win32-x64-msvc": "4.18.0", - "fsevents": "~2.3.2" + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/rollup-plugin-visualizer": { - "version": "5.12.0", - "resolved": "https://registry.npmjs.org/rollup-plugin-visualizer/-/rollup-plugin-visualizer-5.12.0.tgz", - "integrity": "sha512-8/NU9jXcHRs7Nnj07PF2o4gjxmm9lXIrZ8r175bT9dK8qoLlvKTwRMArRCMgpMGlq8CTLugRvEmyMeMXIU2pNQ==", + "node_modules/spawn-wrap/node_modules/semver": { + "version": "6.3.1", "dev": true, - "dependencies": { - "open": "^8.4.0", - "picomatch": "^2.3.1", - "source-map": "^0.7.4", - "yargs": "^17.5.1" - }, + "license": "ISC", "bin": { - "rollup-plugin-visualizer": "dist/bin/cli.js" - }, - "engines": { - "node": ">=14" - }, - "peerDependencies": { - "rollup": "2.x || 3.x || 4.x" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } + "semver": "bin/semver.js" } }, - "node_modules/rollup-plugin-visualizer/node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "node_modules/split": { + "version": "1.0.1", "dev": true, + "license": "MIT", "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" + "through": "2" }, "engines": { - "node": ">=12" + "node": "*" } }, - "node_modules/rollup-plugin-visualizer/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, - "node_modules/rollup-plugin-visualizer/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "node_modules/sprintf-js": { + "version": "1.0.3", "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/stack-trace": { + "version": "0.0.10", + "license": "MIT", "engines": { - "node": ">=8" + "node": "*" } }, - "node_modules/rollup-plugin-visualizer/node_modules/source-map": { - "version": "0.7.4", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", - "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "node_modules/stackback": { + "version": "0.0.2", "dev": true, + "license": "MIT" + }, + "node_modules/statuses": { + "version": "1.5.0", + "license": "MIT", "engines": { - "node": ">= 8" + "node": ">= 0.6" } }, - "node_modules/rollup-plugin-visualizer/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "node_modules/std-env": { + "version": "3.7.0", "dev": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } + "license": "MIT" }, - "node_modules/rollup-plugin-visualizer/node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "node_modules/stop-iteration-iterator": { + "version": "1.0.0", "dev": true, + "license": "MIT", "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" + "internal-slot": "^1.0.4" }, "engines": { - "node": ">=12" + "node": ">= 0.4" } }, - "node_modules/rollup-plugin-visualizer/node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "node_modules/stream-browserify": { + "version": "3.0.0", "dev": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "devOptional": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], + "license": "MIT", "dependencies": { - "queue-microtask": "^1.2.2" + "inherits": "~2.0.4", + "readable-stream": "^3.5.0" } }, - "node_modules/run-parallel-limit": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/run-parallel-limit/-/run-parallel-limit-1.1.0.tgz", - "integrity": "sha512-jJA7irRNM91jaKc3Hcl1npHsFLOXOoTkPCUL1JEa1R82O2miplXXRaGdjW/KM/98YQWDhJLiSs793CnXfblJUw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], + "node_modules/stream-http": { + "version": "3.2.0", + "dev": true, + "license": "MIT", "dependencies": { - "queue-microtask": "^1.2.2" + "builtin-status-codes": "^3.0.0", + "inherits": "^2.0.4", + "readable-stream": "^3.6.0", + "xtend": "^4.0.2" } }, - "node_modules/rustbn-wasm": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/rustbn-wasm/-/rustbn-wasm-0.4.0.tgz", - "integrity": "sha512-C2ujvPv05hXC69MD7YwSsoUEsT/X/dKHkkgwN9B0ZTgb0OXDC9yaHhE6Pq+uaRAzMyW0Y97bwc4JO4cqPDzVuQ==", + "node_modules/string_decoder": { + "version": "1.3.0", + "license": "MIT", "dependencies": { - "@scure/base": "^1.1.5" + "safe-buffer": "~5.2.0" } }, - "node_modules/rxjs": { - "version": "7.8.1", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", - "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "node_modules/string-argv": { + "version": "0.3.2", "dev": true, - "dependencies": { - "tslib": "^2.1.0" + "license": "MIT", + "engines": { + "node": ">=0.6.19" } }, - "node_modules/safaridriver": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/safaridriver/-/safaridriver-0.1.2.tgz", - "integrity": "sha512-4R309+gWflJktzPXBQCobbWEHlzC4aK3a+Ov3tz2Ib2aBxiwd11phkdIBH1l0EO22x24CJMUQkpKFumRriCSRg==", - "optional": true, - "peer": true - }, - "node_modules/safe-array-concat": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", - "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", + "node_modules/string-width": { + "version": "5.1.2", "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", - "get-intrinsic": "^1.2.4", - "has-symbols": "^1.0.3", - "isarray": "^2.0.5" + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" }, "engines": { - "node": ">=0.4" + "node": ">=12" }, "funding": { - "url": "https://github.com/sponsors/ljharb" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/safe-regex-test": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", - "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.6", - "es-errors": "^1.3.0", - "is-regex": "^1.1.4" - }, - "engines": { - "node": ">= 0.4" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/safe-stable-stringify": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz", - "integrity": "sha512-e2bDA2WJT0wxseVd4lsDP4+3ONX6HpMXQa1ZhFQ7SU+GjvORCmShbCMltrtIDfkYhVHrOcPtj+KhmDBdPdZD1g==", "engines": { - "node": ">=10" + "node": ">=8" } }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" - }, - "node_modules/scanf": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/scanf/-/scanf-1.1.2.tgz", - "integrity": "sha512-AjyDCF9jrLcGl+wbH2OO0vfpMUNmv6skJuuLL/vgDUmG/0YXCT6SVBTOvZXOPAD5raJLtDtUU7v0yF79JDuAqA==", - "engines": { - "node": ">= 4" - } + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "dev": true, + "license": "MIT" }, - "node_modules/semver": { - "version": "7.6.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", - "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "node_modules/string-width-cjs/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", "dev": true, - "bin": { - "semver": "bin/semver.js" - }, + "license": "MIT", "engines": { - "node": ">=10" + "node": ">=8" } }, - "node_modules/serialize-error": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/serialize-error/-/serialize-error-11.0.3.tgz", - "integrity": "sha512-2G2y++21dhj2R7iHAdd0FIzjGwuKZld+7Pl/bTU6YIkrC2ZMbVUjm+luj6A6V34Rv9XfKJDKpTWu9W4Gse1D9g==", - "optional": true, - "peer": true, - "dependencies": { - "type-fest": "^2.12.2" - }, + "node_modules/string-width/node_modules/ansi-regex": { + "version": "6.0.1", + "dev": true, + "license": "MIT", "engines": { - "node": ">=14.16" + "node": ">=12" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, - "node_modules/serialize-error/node_modules/type-fest": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", - "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", - "optional": true, - "peer": true, + "node_modules/string-width/node_modules/strip-ansi": { + "version": "7.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, "engines": { - "node": ">=12.20" + "node": ">=12" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, - "node_modules/set-blocking": { + "node_modules/string.prototype.includes": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", - "dev": true - }, - "node_modules/set-function-length": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", - "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dev": true, + "license": "MIT", "dependencies": { - "define-data-property": "^1.1.4", - "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" } }, - "node_modules/set-function-name": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", - "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "node_modules/string.prototype.trim": { + "version": "1.2.9", "dev": true, + "license": "MIT", "dependencies": { - "define-data-property": "^1.1.4", - "es-errors": "^1.3.0", - "functions-have-names": "^1.2.3", - "has-property-descriptors": "^1.0.2" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" - } - }, - "node_modules/setimmediate": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", - "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", - "devOptional": true - }, - "node_modules/setprototypeof": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" - }, - "node_modules/sha.js": { - "version": "2.4.11", - "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", - "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", - "dev": true, - "dependencies": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" }, - "bin": { - "sha.js": "bin.js" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "devOptional": true, + "node_modules/string.prototype.trimend": { + "version": "1.0.8", + "dev": true, + "license": "MIT", "dependencies": { - "shebang-regex": "^3.0.0" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "devOptional": true, - "engines": { - "node": ">=8" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/shiki": { - "version": "0.10.1", - "resolved": "https://registry.npmjs.org/shiki/-/shiki-0.10.1.tgz", - "integrity": "sha512-VsY7QJVzU51j5o1+DguUd+6vmCmZ5v/6gYu4vyYAhzjuNQU6P/vmSy4uQaOhvje031qQMiW0d2BwgMH52vqMng==", + "node_modules/string.prototype.trimstart": { + "version": "1.0.8", "dev": true, - "dependencies": { - "jsonc-parser": "^3.0.0", - "vscode-oniguruma": "^1.6.1", - "vscode-textmate": "5.2.0" - } - }, - "node_modules/side-channel": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", - "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.7", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.4", - "object-inspect": "^1.13.1" + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -14064,643 +11109,588 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/siginfo": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", - "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", - "devOptional": true + "node_modules/stringify-object-es5": { + "version": "2.5.0", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "is-plain-obj": "^1.0.0", + "is-regexp": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true + "node_modules/stringify-object-es5/node_modules/is-plain-obj": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } }, - "node_modules/simple-swizzle": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", - "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", + "node_modules/strip-ansi": { + "version": "6.0.1", + "license": "MIT", "dependencies": { - "is-arrayish": "^0.3.1" + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" } }, - "node_modules/sirv": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/sirv/-/sirv-2.0.4.tgz", - "integrity": "sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==", - "devOptional": true, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "dev": true, + "license": "MIT", "dependencies": { - "@polka/url": "^1.0.0-next.24", - "mrmime": "^2.0.0", - "totalist": "^3.0.0" + "ansi-regex": "^5.0.1" }, "engines": { - "node": ">= 10" + "node": ">=8" } }, - "node_modules/slash": { + "node_modules/strip-bom": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "dev": true, + "license": "MIT", "engines": { - "node": ">=8" + "node": ">=4" } }, - "node_modules/slice-ansi": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-5.0.0.tgz", - "integrity": "sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==", + "node_modules/strip-final-newline": { + "version": "3.0.0", "dev": true, - "dependencies": { - "ansi-styles": "^6.0.0", - "is-fullwidth-code-point": "^4.0.0" - }, + "license": "MIT", "engines": { "node": ">=12" }, "funding": { - "url": "https://github.com/chalk/slice-ansi?sponsor=1" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/slice-ansi/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "node_modules/strip-json-comments": { + "version": "3.1.1", "dev": true, + "license": "MIT", "engines": { - "node": ">=12" + "node": ">=8" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/smart-buffer": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", - "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", - "optional": true, - "peer": true, + "node_modules/supports-color": { + "version": "7.2.0", + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, "engines": { - "node": ">= 6.0.0", - "npm": ">= 3.0.0" + "node": ">=8" } }, - "node_modules/snappyjs": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/snappyjs/-/snappyjs-0.6.1.tgz", - "integrity": "sha512-YIK6I2lsH072UE0aOFxxY1dPDCS43I5ktqHpeAsuLNYWkE5pGxRGWfDM4/vSUfNzXjC1Ivzt3qx31PCLmc9yqg==" + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, - "node_modules/socks": { - "version": "2.8.3", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.3.tgz", - "integrity": "sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==", - "optional": true, - "peer": true, + "node_modules/svg-element-attributes": { + "version": "1.3.1", + "dev": true, + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/synckit": { + "version": "0.9.1", + "dev": true, + "license": "MIT", "dependencies": { - "ip-address": "^9.0.5", - "smart-buffer": "^4.2.0" + "@pkgr/core": "^0.1.0", + "tslib": "^2.6.2" }, "engines": { - "node": ">= 10.0.0", - "npm": ">= 3.0.0" + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" } }, - "node_modules/socks-proxy-agent": { - "version": "8.0.4", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.4.tgz", - "integrity": "sha512-GNAq/eg8Udq2x0eNiFkr9gRg5bA7PXEWagQdeRX4cPSG+X/8V38v637gim9bjFptMk1QWsCTr0ttrJEiXbNnRw==", - "optional": true, - "peer": true, + "node_modules/tape": { + "version": "5.6.0", + "dev": true, + "license": "MIT", "dependencies": { - "agent-base": "^7.1.1", - "debug": "^4.3.4", - "socks": "^2.8.3" + "array.prototype.every": "^1.1.3", + "call-bind": "^1.0.2", + "deep-equal": "^2.0.5", + "defined": "^1.0.0", + "dotignore": "^0.1.2", + "for-each": "^0.3.3", + "get-package-type": "^0.1.0", + "glob": "^7.2.3", + "has": "^1.0.3", + "has-dynamic-import": "^2.0.1", + "inherits": "^2.0.4", + "is-regex": "^1.1.4", + "minimist": "^1.2.6", + "object-inspect": "^1.12.2", + "object-is": "^1.1.5", + "object-keys": "^1.1.1", + "object.assign": "^4.1.3", + "resolve": "^2.0.0-next.3", + "resumer": "^0.0.0", + "string.prototype.trim": "^1.2.6", + "through": "^2.3.8" + }, + "bin": { + "tape": "bin/tape" + } + }, + "node_modules/tape/node_modules/glob": { + "version": "7.2.3", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" }, "engines": { - "node": ">= 14" + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/solc": { - "version": "0.8.26", - "resolved": "https://registry.npmjs.org/solc/-/solc-0.8.26.tgz", - "integrity": "sha512-yiPQNVf5rBFHwN6SIf3TUUvVAFKcQqmSUFeq+fb6pNRCo0ZCgpYOZDi3BVoezCPIAcKrVYd/qXlBLUP9wVrZ9g==", + "node_modules/tape/node_modules/resolve": { + "version": "2.0.0-next.5", "dev": true, + "license": "MIT", "dependencies": { - "command-exists": "^1.2.8", - "commander": "^8.1.0", - "follow-redirects": "^1.12.1", - "js-sha3": "0.8.0", - "memorystream": "^0.3.1", - "semver": "^5.5.0", - "tmp": "0.0.33" + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { - "solcjs": "solc.js" + "resolve": "bin/resolve" }, - "engines": { - "node": ">=10.0.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/solc/node_modules/commander": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", - "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "node_modules/tdigest": { + "version": "0.1.2", + "license": "MIT", + "dependencies": { + "bintrees": "1.0.2" + } + }, + "node_modules/test-exclude": { + "version": "7.0.1", "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^10.4.1", + "minimatch": "^9.0.4" + }, "engines": { - "node": ">= 12" + "node": ">=18" } }, - "node_modules/solc/node_modules/semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "node_modules/test-exclude/node_modules/brace-expansion": { + "version": "2.0.1", "dev": true, - "bin": { - "semver": "bin/semver" + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" } }, - "node_modules/sort-object-keys": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/sort-object-keys/-/sort-object-keys-1.1.3.tgz", - "integrity": "sha512-855pvK+VkU7PaKYPc+Jjnmt4EzejQHyhhF33q31qG8x7maDzkeFhAAThdCYay11CISO+qAMwjOBP+fPZe0IPyg==", - "dev": true - }, - "node_modules/sort-package-json": { - "version": "1.57.0", - "resolved": "https://registry.npmjs.org/sort-package-json/-/sort-package-json-1.57.0.tgz", - "integrity": "sha512-FYsjYn2dHTRb41wqnv+uEqCUvBpK3jZcTp9rbz2qDTmel7Pmdtf+i2rLaaPMRZeSVM60V3Se31GyWFpmKs4Q5Q==", + "node_modules/test-exclude/node_modules/foreground-child": { + "version": "3.3.0", "dev": true, + "license": "ISC", "dependencies": { - "detect-indent": "^6.0.0", - "detect-newline": "3.1.0", - "git-hooks-list": "1.0.3", - "globby": "10.0.0", - "is-plain-obj": "2.1.0", - "sort-object-keys": "^1.1.3" + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" }, - "bin": { - "sort-package-json": "cli.js" + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/sort-package-json/node_modules/globby": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-10.0.0.tgz", - "integrity": "sha512-3LifW9M4joGZasyYPz2A1U74zbC/45fvpXUvO/9KbSa+VV0aGZarWkfdgKyR9sExNP0t0x0ss/UMJpNpcaTspw==", + "node_modules/test-exclude/node_modules/glob": { + "version": "10.4.5", "dev": true, + "license": "ISC", "dependencies": { - "@types/glob": "^7.1.1", - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.0.3", - "glob": "^7.1.3", - "ignore": "^5.1.1", - "merge2": "^1.2.3", - "slash": "^3.0.0" + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" }, - "engines": { - "node": ">=8" + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "devOptional": true, + "node_modules/test-exclude/node_modules/minimatch": { + "version": "9.0.5", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, "engines": { - "node": ">=0.10.0" + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/source-map-js": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", - "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", - "devOptional": true, + "node_modules/test-exclude/node_modules/signal-exit": { + "version": "4.1.0", + "dev": true, + "license": "ISC", "engines": { - "node": ">=0.10.0" + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/spacetrim": { - "version": "0.11.36", - "resolved": "https://registry.npmjs.org/spacetrim/-/spacetrim-0.11.36.tgz", - "integrity": "sha512-jqv5aAfMLkBnFK+38QUtEGgU7x1KrfpDnCdjX4+W1IEVgA8Kf3tk8K9je8j2nkCSXdIngjda53fuXERr4/61kw==", - "funding": [ - { - "type": "individual", - "url": "https://buymeacoffee.com/hejny" - }, - { - "type": "github", - "url": "https://github.com/hejny/spacetrim/blob/main/README.md#%EF%B8%8F-contributing" - } - ], - "optional": true, - "peer": true - }, - "node_modules/spawn-wrap": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz", - "integrity": "sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==", + "node_modules/testdouble": { + "version": "3.20.2", "dev": true, - "dependencies": { - "foreground-child": "^2.0.0", - "is-windows": "^1.0.2", - "make-dir": "^3.0.0", - "rimraf": "^3.0.0", - "signal-exit": "^3.0.2", - "which": "^2.0.1" + "license": "MIT", + "dependencies": { + "lodash": "^4.17.21", + "quibble": "^0.9.2", + "stringify-object-es5": "^2.5.0", + "theredoc": "^1.0.0" }, "engines": { - "node": ">=8" + "node": ">= 16" } }, - "node_modules/spawn-wrap/node_modules/make-dir": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "node_modules/testdouble-timers": { + "version": "0.1.1", "dev": true, + "license": "MIT", "dependencies": { - "semver": "^6.0.0" - }, - "engines": { - "node": ">=8" + "lolex": "^1.4.0" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "peerDependencies": { + "testdouble": "^3.0.0" } }, - "node_modules/spawn-wrap/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "node_modules/text-hex": { + "version": "1.0.0", + "license": "MIT" + }, + "node_modules/text-table": { + "version": "0.2.0", "dev": true, - "bin": { - "semver": "bin/semver.js" - } + "license": "MIT" }, - "node_modules/split2": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", - "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", - "optional": true, - "peer": true, - "engines": { - "node": ">= 10.x" - } + "node_modules/theredoc": { + "version": "1.0.0", + "dev": true, + "license": "MIT" }, - "node_modules/sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", - "dev": true + "node_modules/through": { + "version": "2.3.8", + "license": "MIT" }, - "node_modules/stack-trace": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", - "integrity": "sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==", + "node_modules/timers-browserify": { + "version": "2.0.12", + "dev": true, + "license": "MIT", + "dependencies": { + "setimmediate": "^1.0.4" + }, "engines": { - "node": "*" + "node": ">=0.6.0" } }, - "node_modules/stackback": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", - "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", - "devOptional": true + "node_modules/tinybench": { + "version": "2.9.0", + "dev": true, + "license": "MIT" }, - "node_modules/statuses": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", - "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==", + "node_modules/tinypool": { + "version": "1.0.0", + "dev": true, + "license": "MIT", "engines": { - "node": ">= 0.6" + "node": "^18.0.0 || >=20.0.0" } }, - "node_modules/std-env": { - "version": "3.7.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.7.0.tgz", - "integrity": "sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==", - "devOptional": true - }, - "node_modules/stop-iteration-iterator": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", - "integrity": "sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==", + "node_modules/tinyrainbow": { + "version": "1.2.0", "dev": true, - "dependencies": { - "internal-slot": "^1.0.4" - }, + "license": "MIT", "engines": { - "node": ">= 0.4" + "node": ">=14.0.0" } }, - "node_modules/stream-browserify": { + "node_modules/tinyspy": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-3.0.0.tgz", - "integrity": "sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA==", "dev": true, - "dependencies": { - "inherits": "~2.0.4", - "readable-stream": "^3.5.0" + "license": "MIT", + "engines": { + "node": ">=14.0.0" } }, - "node_modules/stream-http": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/stream-http/-/stream-http-3.2.0.tgz", - "integrity": "sha512-Oq1bLqisTyK3TSCXpPbT4sdeYNdmyZJv1LxpEm2vu1ZhK89kSE5YXwZc3cWk0MagGaKriBh9mCFbVGtO+vY29A==", + "node_modules/tmp": { + "version": "0.0.33", "dev": true, + "license": "MIT", "dependencies": { - "builtin-status-codes": "^3.0.0", - "inherits": "^2.0.4", - "readable-stream": "^3.6.0", - "xtend": "^4.0.2" - } - }, - "node_modules/streamx": { - "version": "2.18.0", - "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.18.0.tgz", - "integrity": "sha512-LLUC1TWdjVdn1weXGcSxyTR3T4+acB6tVGXT95y0nGbca4t4o/ng1wKAGTljm9VicuCVLvRlqFYXYy5GwgM7sQ==", - "optional": true, - "peer": true, - "dependencies": { - "fast-fifo": "^1.3.2", - "queue-tick": "^1.0.1", - "text-decoder": "^1.1.0" + "os-tmpdir": "~1.0.2" }, - "optionalDependencies": { - "bare-events": "^2.2.0" - } - }, - "node_modules/strict-event-emitter": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/strict-event-emitter/-/strict-event-emitter-0.5.1.tgz", - "integrity": "sha512-vMgjE/GGEPEFnhFub6pa4FmJBRBVOLpIII2hvCZ8Kzb7K0hlHo7mQv6xYrBvCL2LtAIBwFUK8wvuJgTVSQ5MFQ==", - "optional": true, - "peer": true - }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dependencies": { - "safe-buffer": "~5.2.0" + "engines": { + "node": ">=0.6.0" } }, - "node_modules/string-argv": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.2.tgz", - "integrity": "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==", + "node_modules/to-fast-properties": { + "version": "2.0.0", "dev": true, + "license": "MIT", "engines": { - "node": ">=0.6.19" + "node": ">=4" } }, - "node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "devOptional": true, + "node_modules/to-regex-range": { + "version": "5.0.1", + "dev": true, + "license": "MIT", "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" + "is-number": "^7.0.0" }, "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=8.0" } }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "devOptional": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, + "node_modules/toidentifier": { + "version": "1.0.1", + "license": "MIT", "engines": { - "node": ">=8" + "node": ">=0.6" } }, - "node_modules/string-width-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "devOptional": true - }, - "node_modules/string-width-cjs/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "devOptional": true, + "node_modules/totalist": { + "version": "3.0.1", + "dev": true, + "license": "MIT", "engines": { - "node": ">=8" + "node": ">=6" } }, - "node_modules/string-width/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "devOptional": true, + "node_modules/triple-beam": { + "version": "1.4.1", + "license": "MIT", "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" + "node": ">= 14.0.0" } }, - "node_modules/string-width/node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "devOptional": true, - "dependencies": { - "ansi-regex": "^6.0.1" - }, + "node_modules/ts-api-utils": { + "version": "1.3.0", + "dev": true, + "license": "MIT", "engines": { - "node": ">=12" + "node": ">=16" }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" + "peerDependencies": { + "typescript": ">=4.2.0" } }, - "node_modules/string.prototype.includes": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/string.prototype.includes/-/string.prototype.includes-2.0.0.tgz", - "integrity": "sha512-E34CkBgyeqNDcrbU76cDjL5JLcVrtSdYq0MEh/B10r17pRP4ciHLwTgnuLV8Ay6cgEMLkcBkFCKyFZ43YldYzg==", + "node_modules/tsconfig-paths": { + "version": "3.15.0", "dev": true, + "license": "MIT", "dependencies": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5" + "@types/json5": "^0.0.29", + "json5": "^1.0.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" } }, - "node_modules/string.prototype.trim": { - "version": "1.2.9", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", - "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", + "node_modules/tslib": { + "version": "2.6.3", + "license": "0BSD" + }, + "node_modules/tsutils": { + "version": "3.21.0", "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.0", - "es-object-atoms": "^1.0.0" + "tslib": "^1.8.1" }, "engines": { - "node": ">= 0.4" + "node": ">= 6" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "peerDependencies": { + "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" } }, - "node_modules/string.prototype.trimend": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", - "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", + "node_modules/tsutils/node_modules/tslib": { + "version": "1.14.1", "dev": true, - "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } + "license": "0BSD" }, - "node_modules/string.prototype.trimstart": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", - "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", + "node_modules/tsx": { + "version": "4.17.0", "dev": true, + "license": "MIT", "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0" + "esbuild": "~0.23.0", + "get-tsconfig": "^4.7.5" + }, + "bin": { + "tsx": "dist/cli.mjs" }, "engines": { - "node": ">= 0.4" + "node": ">=18.0.0" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/tsx/node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, - "node_modules/stringify-object-es5": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/stringify-object-es5/-/stringify-object-es5-2.5.0.tgz", - "integrity": "sha512-vE7Xdx9ylG4JI16zy7/ObKUB+MtxuMcWlj/WHHr3+yAlQoN6sst2stU9E+2Qs3OrlJw/Pf3loWxL1GauEHf6MA==", + "node_modules/tty-browserify": { + "version": "0.0.1", "dev": true, + "license": "MIT" + }, + "node_modules/type-check": { + "version": "0.4.0", + "dev": true, + "license": "MIT", "dependencies": { - "is-plain-obj": "^1.0.0", - "is-regexp": "^1.0.0" + "prelude-ls": "^1.2.1" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.8.0" } }, - "node_modules/stringify-object-es5/node_modules/is-plain-obj": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", - "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", + "node_modules/type-fest": { + "version": "0.20.2", "dev": true, + "license": "(MIT OR CC0-1.0)", "engines": { - "node": ">=0.10.0" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "node_modules/type-is": { + "version": "1.6.18", + "license": "MIT", "dependencies": { - "ansi-regex": "^5.0.1" + "media-typer": "0.3.0", + "mime-types": "~2.1.24" }, "engines": { - "node": ">=8" + "node": ">= 0.6" } }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "devOptional": true, + "node_modules/typed-array-buffer": { + "version": "1.0.2", + "dev": true, + "license": "MIT", "dependencies": { - "ansi-regex": "^5.0.1" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.13" }, "engines": { - "node": ">=8" + "node": ">= 0.4" } }, - "node_modules/strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "node_modules/typed-array-byte-length": { + "version": "1.0.1", "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/strip-final-newline": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", - "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", - "devOptional": true, - "engines": { - "node": ">=12" + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true, "engines": { - "node": ">=8" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/strip-literal": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-2.1.0.tgz", - "integrity": "sha512-Op+UycaUt/8FbN/Z2TWPBLge3jWrP3xj10f3fnYxf052bKuS3EKs1ZQcVGjnEMdsNVAM+plXRdmjrZ/KgG3Skw==", + "node_modules/typed-array-byte-offset": { + "version": "1.0.2", "dev": true, + "license": "MIT", "dependencies": { - "js-tokens": "^9.0.0" + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/antfu" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/strip-literal/node_modules/js-tokens": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.0.tgz", - "integrity": "sha512-WriZw1luRMlmV3LGJaR6QOJjWwgLUTf89OwT2lUOyjX2dJGBwgmIkbcz+7WFZjrZM635JOIR517++e/67CP9dQ==", - "dev": true - }, - "node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "node_modules/typed-array-length": { + "version": "1.0.6", + "dev": true, + "license": "MIT", "dependencies": { - "has-flag": "^4.0.0" + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0" }, - "engines": { - "node": ">=8" - } - }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", - "dev": true, "engines": { "node": ">= 0.4" }, @@ -14708,1056 +11698,1049 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/svg-element-attributes": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/svg-element-attributes/-/svg-element-attributes-1.3.1.tgz", - "integrity": "sha512-Bh05dSOnJBf3miNMqpsormfNtfidA/GxQVakhtn0T4DECWKeXQRQUceYjJ+OxYiiLdGe4Jo9iFV8wICFapFeIA==", + "node_modules/typedarray-to-buffer": { + "version": "3.1.5", "dev": true, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "license": "MIT", + "dependencies": { + "is-typedarray": "^1.0.0" } }, - "node_modules/synckit": { - "version": "0.8.8", - "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.8.8.tgz", - "integrity": "sha512-HwOKAP7Wc5aRGYdKH+dw0PRRpbO841v2DENBtjnR5HFWoiNByAl7vrx3p0G/rCyYXQsrxqtX48TImFtPcIHSpQ==", + "node_modules/typedoc": { + "version": "0.23.10", "dev": true, + "license": "Apache-2.0", "dependencies": { - "@pkgr/core": "^0.1.0", - "tslib": "^2.6.2" + "lunr": "^2.3.9", + "marked": "^4.0.18", + "minimatch": "^5.1.0", + "shiki": "^0.10.1" + }, + "bin": { + "typedoc": "bin/typedoc" }, "engines": { - "node": "^14.18.0 || >=16.0.0" + "node": ">= 14.14" }, - "funding": { - "url": "https://opencollective.com/unts" + "peerDependencies": { + "typescript": "4.6.x || 4.7.x" } }, - "node_modules/tape": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/tape/-/tape-5.6.0.tgz", - "integrity": "sha512-LyM4uqbiTAqDgsHTY0r1LH66yE24P3SZaz5TL3mPUds0XCTFl/0AMUBrjgBjUclvbPTFB4IalXg0wFfbTuuu/Q==", + "node_modules/typedoc-plugin-markdown": { + "version": "3.13.4", "dev": true, + "license": "MIT", "dependencies": { - "array.prototype.every": "^1.1.3", - "call-bind": "^1.0.2", - "deep-equal": "^2.0.5", - "defined": "^1.0.0", - "dotignore": "^0.1.2", - "for-each": "^0.3.3", - "get-package-type": "^0.1.0", - "glob": "^7.2.3", - "has": "^1.0.3", - "has-dynamic-import": "^2.0.1", - "inherits": "^2.0.4", - "is-regex": "^1.1.4", - "minimist": "^1.2.6", - "object-inspect": "^1.12.2", - "object-is": "^1.1.5", - "object-keys": "^1.1.1", - "object.assign": "^4.1.3", - "resolve": "^2.0.0-next.3", - "resumer": "^0.0.0", - "string.prototype.trim": "^1.2.6", - "through": "^2.3.8" + "handlebars": "^4.7.7" }, - "bin": { - "tape": "bin/tape" + "peerDependencies": { + "typedoc": ">=0.23.0" } }, - "node_modules/tape/node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", + "node_modules/typedoc/node_modules/brace-expansion": { + "version": "2.0.1", "dev": true, + "license": "MIT", "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "balanced-match": "^1.0.0" } }, - "node_modules/tape/node_modules/resolve": { - "version": "2.0.0-next.5", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz", - "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==", + "node_modules/typedoc/node_modules/minimatch": { + "version": "5.1.6", "dev": true, + "license": "ISC", "dependencies": { - "is-core-module": "^2.13.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" + "brace-expansion": "^2.0.1" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "engines": { + "node": ">=10" } }, - "node_modules/tar-fs": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.4.tgz", - "integrity": "sha512-5AFQU8b9qLfZCX9zp2duONhPmZv0hGYiBPJsyUdqMjzq/mqVpy/rEUSeHk1+YitmxugaptgBh5oDGU3VsAJq4w==", - "optional": true, - "peer": true, - "dependencies": { - "mkdirp-classic": "^0.5.2", - "pump": "^3.0.0", - "tar-stream": "^3.1.5" + "node_modules/typescript": { + "version": "4.7.4", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" } }, - "node_modules/tar-stream": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", - "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", + "node_modules/uglify-js": { + "version": "3.19.2", + "dev": true, + "license": "BSD-2-Clause", "optional": true, - "peer": true, - "dependencies": { - "b4a": "^1.6.4", - "fast-fifo": "^1.2.0", - "streamx": "^2.15.0" + "bin": { + "uglifyjs": "bin/uglifyjs" + }, + "engines": { + "node": ">=0.8.0" } }, - "node_modules/tdigest": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.2.tgz", - "integrity": "sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==", + "node_modules/uint8-varint": { + "version": "2.0.4", + "license": "Apache-2.0 OR MIT", "dependencies": { - "bintrees": "1.0.2" + "uint8arraylist": "^2.0.0", + "uint8arrays": "^5.0.0" } }, - "node_modules/test-exclude": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-7.0.1.tgz", - "integrity": "sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg==", - "dev": true, + "node_modules/uint8arraylist": { + "version": "2.4.8", + "license": "Apache-2.0 OR MIT", "dependencies": { - "@istanbuljs/schema": "^0.1.2", - "glob": "^10.4.1", - "minimatch": "^9.0.4" - }, - "engines": { - "node": ">=18" + "uint8arrays": "^5.0.1" } }, - "node_modules/test-exclude/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, + "node_modules/uint8arrays": { + "version": "5.1.0", + "license": "Apache-2.0 OR MIT", "dependencies": { - "balanced-match": "^1.0.0" + "multiformats": "^13.0.0" } }, - "node_modules/test-exclude/node_modules/foreground-child": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.2.1.tgz", - "integrity": "sha512-PXUUyLqrR2XCWICfv6ukppP96sdFwWbNEnfEMt7jNsISjMsvaLNinAHNDYyvkyU+SZG2BTSbT5NjG+vZslfGTA==", + "node_modules/unbox-primitive": { + "version": "1.0.2", "dev": true, + "license": "MIT", "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" + "call-bind": "^1.0.2", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" }, "funding": { - "url": "https://github.com/sponsors/isaacs" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/test-exclude/node_modules/glob": { - "version": "10.4.2", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.2.tgz", - "integrity": "sha512-GwMlUF6PkPo3Gk21UxkCohOv0PLcIXVtKyLlpEI28R/cO/4eNOdmLk3CMW1wROV/WR/EsZOWAfBbBOqYvs88/w==", - "dev": true, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, + "node_modules/undici-types": { + "version": "5.26.5", + "license": "MIT" + }, + "node_modules/unpipe": { + "version": "1.0.0", + "license": "MIT", "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "node": ">= 0.8" } }, - "node_modules/test-exclude/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "node_modules/update-browserslist-db": { + "version": "1.1.0", "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", "dependencies": { - "brace-expansion": "^2.0.1" + "escalade": "^3.1.2", + "picocolors": "^1.0.1" }, - "engines": { - "node": ">=16 || 14 >=14.17" + "bin": { + "update-browserslist-db": "cli.js" }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "peerDependencies": { + "browserslist": ">= 4.21.0" } }, - "node_modules/test-exclude/node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "node_modules/uri-js": { + "version": "4.4.1", "dev": true, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" } }, - "node_modules/testdouble": { - "version": "3.20.2", - "resolved": "https://registry.npmjs.org/testdouble/-/testdouble-3.20.2.tgz", - "integrity": "sha512-790e9vJKdfddWNOaxW1/V9FcMk48cPEl3eJSj2i8Hh1fX89qArEJ6cp3DBnaECpGXc3xKJVWbc1jeNlWYWgiMg==", + "node_modules/url": { + "version": "0.11.4", "dev": true, + "license": "MIT", "dependencies": { - "lodash": "^4.17.21", - "quibble": "^0.9.2", - "stringify-object-es5": "^2.5.0", - "theredoc": "^1.0.0" + "punycode": "^1.4.1", + "qs": "^6.12.3" }, "engines": { - "node": ">= 16" + "node": ">= 0.4" } }, - "node_modules/testdouble-timers": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/testdouble-timers/-/testdouble-timers-0.1.1.tgz", - "integrity": "sha512-xh41OtK2XYkKrvfqGWWlYJ16+ce3NqHw1hJVNzao2A28jaheqJbpG3qg14rIcCvRhj1itA0DahHVVqoM4XDYNA==", + "node_modules/url/node_modules/punycode": { + "version": "1.4.1", "dev": true, - "dependencies": { - "lolex": "^1.4.0" - }, - "peerDependencies": { - "testdouble": "^3.0.0" - } + "license": "MIT" }, - "node_modules/text-decoder": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.1.0.tgz", - "integrity": "sha512-TmLJNj6UgX8xcUZo4UDStGQtDiTzF7BzWlzn9g7UWrjkpHr5uJTK1ld16wZ3LXb2vb6jH8qU89dW5whuMdXYdw==", - "optional": true, - "peer": true, + "node_modules/url/node_modules/qs": { + "version": "6.13.0", + "dev": true, + "license": "BSD-3-Clause", "dependencies": { - "b4a": "^1.6.4" + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/text-hex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", - "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==" - }, - "node_modules/text-table": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", - "dev": true - }, - "node_modules/theredoc": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/theredoc/-/theredoc-1.0.0.tgz", - "integrity": "sha512-KU3SA3TjRRM932jpNfD3u4Ec3bSvedyo5ITPI7zgWYnKep7BwQQaxlhI9qbO+lKJoRnoAbEVfMcAHRuKVYikDA==", - "dev": true - }, - "node_modules/through": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" - }, - "node_modules/timers-browserify": { - "version": "2.0.12", - "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.12.tgz", - "integrity": "sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ==", + "node_modules/util": { + "version": "0.12.5", "dev": true, + "license": "MIT", "dependencies": { - "setimmediate": "^1.0.4" - }, - "engines": { - "node": ">=0.6.0" + "inherits": "^2.0.3", + "is-arguments": "^1.0.4", + "is-generator-function": "^1.0.7", + "is-typed-array": "^1.1.3", + "which-typed-array": "^1.1.2" } }, - "node_modules/tinybench": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.8.0.tgz", - "integrity": "sha512-1/eK7zUnIklz4JUUlL+658n58XO2hHLQfSk1Zf2LKieUjxidN16eKFEoDEfjHc3ohofSSqK3X5yO6VGb6iW8Lw==", - "devOptional": true + "node_modules/util-deprecate": { + "version": "1.0.2", + "license": "MIT" }, - "node_modules/tinypool": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.0.tgz", - "integrity": "sha512-KIKExllK7jp3uvrNtvRBYBWBOAXSX8ZvoaD8T+7KB/QHIuoJW3Pmr60zucywjAlMb5TeXUkcs/MWeWLu0qvuAQ==", - "devOptional": true, + "node_modules/utils-merge": { + "version": "1.0.1", + "license": "MIT", "engines": { - "node": "^18.0.0 || >=20.0.0" + "node": ">= 0.4.0" } }, - "node_modules/tinyspy": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.0.tgz", - "integrity": "sha512-q5nmENpTHgiPVd1cJDDc9cVoYN5x4vCvwT3FMilvKPKneCBZAxn2YWQjDF0UMcE9k0Cay1gBiDfTMU0g+mPMQA==", - "devOptional": true, - "engines": { - "node": ">=14.0.0" + "node_modules/uuid": { + "version": "8.3.2", + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" } }, - "node_modules/tmp": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", - "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "node_modules/v8-to-istanbul": { + "version": "9.3.0", "dev": true, + "license": "ISC", "dependencies": { - "os-tmpdir": "~1.0.2" + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" }, "engines": { - "node": ">=0.6.0" + "node": ">=10.12.0" } }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "dev": true, + "node_modules/vary": { + "version": "1.1.2", + "license": "MIT", "engines": { - "node": ">=4" + "node": ">= 0.8" } }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "devOptional": true, + "node_modules/verkle-cryptography-wasm": { + "version": "0.4.5", + "license": "MIT/Apache", "dependencies": { - "is-number": "^7.0.0" + "@scure/base": "^1.1.5" }, "engines": { - "node": ">=8.0" - } - }, - "node_modules/toidentifier": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", - "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", - "engines": { - "node": ">=0.6" - } - }, - "node_modules/totalist": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", - "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", - "devOptional": true, - "engines": { - "node": ">=6" + "node": ">=18", + "npm": ">=7" } }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", - "optional": true, - "peer": true - }, - "node_modules/triple-beam": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.4.1.tgz", - "integrity": "sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==", + "node_modules/vite": { + "version": "5.4.0", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.40", + "rollup": "^4.13.0" + }, + "bin": { + "vite": "bin/vite.js" + }, "engines": { - "node": ">= 14.0.0" + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } } }, - "node_modules/ts-api-utils": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz", - "integrity": "sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==", + "node_modules/vite-node": { + "version": "2.0.5", "dev": true, + "license": "MIT", + "dependencies": { + "cac": "^6.7.14", + "debug": "^4.3.5", + "pathe": "^1.1.2", + "tinyrainbow": "^1.2.0", + "vite": "^5.0.0" + }, + "bin": { + "vite-node": "vite-node.mjs" + }, "engines": { - "node": ">=16" + "node": "^18.0.0 || >=20.0.0" }, - "peerDependencies": { - "typescript": ">=4.2.0" + "funding": { + "url": "https://opencollective.com/vitest" } }, - "node_modules/tsconfig-paths": { - "version": "3.15.0", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", - "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", + "node_modules/vite-plugin-node-polyfills": { + "version": "0.21.0", "dev": true, + "license": "MIT", "dependencies": { - "@types/json5": "^0.0.29", - "json5": "^1.0.2", - "minimist": "^1.2.6", - "strip-bom": "^3.0.0" + "@rollup/plugin-inject": "^5.0.5", + "node-stdlib-browser": "^1.2.0" + }, + "funding": { + "url": "https://github.com/sponsors/davidmyersdev" + }, + "peerDependencies": { + "vite": "^2.0.0 || ^3.0.0 || ^4.0.0 || ^5.0.0" } }, - "node_modules/tslib": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", - "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==" - }, - "node_modules/tsutils": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", - "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "node_modules/vite-plugin-top-level-await": { + "version": "1.4.4", "dev": true, + "license": "MIT", "dependencies": { - "tslib": "^1.8.1" - }, - "engines": { - "node": ">= 6" + "@rollup/plugin-virtual": "^3.0.2", + "@swc/core": "^1.7.0", + "uuid": "^10.0.0" }, "peerDependencies": { - "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + "vite": ">=2.8" } }, - "node_modules/tsutils/node_modules/tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true - }, - "node_modules/tsx": { - "version": "4.16.0", - "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.16.0.tgz", - "integrity": "sha512-MPgN+CuY+4iKxGoJNPv+1pyo5YWZAQ5XfsyobUG+zoKG7IkvCPLZDEyoIb8yLS2FcWci1nlxAqmvPlFWD5AFiQ==", + "node_modules/vite-plugin-top-level-await/node_modules/uuid": { + "version": "10.0.0", "dev": true, - "dependencies": { - "esbuild": "~0.21.5", - "get-tsconfig": "^4.7.5" - }, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", "bin": { - "tsx": "dist/cli.mjs" - }, - "engines": { - "node": ">=18.0.0" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" + "uuid": "dist/bin/uuid" } }, - "node_modules/tty-browserify": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.1.tgz", - "integrity": "sha512-C3TaO7K81YvjCgQH9Q1S3R3P3BtN3RIM8n+OvX4il1K1zgE8ZhI0op7kClgkxtutIE8hQrcrHBXvIheqKUUCxw==", - "dev": true - }, - "node_modules/type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "node_modules/vite-plugin-wasm": { + "version": "3.3.0", "dev": true, - "dependencies": { - "prelude-ls": "^1.2.1" - }, - "engines": { - "node": ">= 0.8.0" + "license": "MIT", + "peerDependencies": { + "vite": "^2 || ^3 || ^4 || ^5" } }, - "node_modules/type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "node_modules/vite/node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "cpu": [ + "x64" + ], "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=12" } }, - "node_modules/type-is": { - "version": "1.6.18", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", - "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", - "dependencies": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" - }, - "engines": { - "node": ">= 0.6" - } + "node_modules/vite/node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.20.0.tgz", + "integrity": "sha512-TSpWzflCc4VGAUJZlPpgAJE1+V60MePDQnBd7PPkpuEmOy8i87aL6tinFGKBFKuEDikYpig72QzdT3QPYIi+oA==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] }, - "node_modules/typed-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", - "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", + "node_modules/vite/node_modules/@rollup/rollup-android-arm64": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.20.0.tgz", + "integrity": "sha512-u00Ro/nok7oGzVuh/FMYfNoGqxU5CPWz1mxV85S2w9LxHR8OoMQBuSk+3BKVIDYgkpeOET5yXkx90OYFc+ytpQ==", + "cpu": [ + "arm64" + ], "dev": true, - "dependencies": { - "call-bind": "^1.0.7", - "es-errors": "^1.3.0", - "is-typed-array": "^1.1.13" - }, - "engines": { - "node": ">= 0.4" - } + "optional": true, + "os": [ + "android" + ] }, - "node_modules/typed-array-byte-length": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", - "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", + "node_modules/vite/node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.20.0.tgz", + "integrity": "sha512-uFVfvzvsdGtlSLuL0ZlvPJvl6ZmrH4CBwLGEFPe7hUmf7htGAN+aXo43R/V6LATyxlKVC/m6UsLb7jbG+LG39Q==", + "cpu": [ + "arm64" + ], "dev": true, - "dependencies": { - "call-bind": "^1.0.7", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-proto": "^1.0.3", - "is-typed-array": "^1.1.13" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } + "optional": true, + "os": [ + "darwin" + ] }, - "node_modules/typed-array-byte-offset": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", - "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", + "node_modules/vite/node_modules/@rollup/rollup-darwin-x64": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.20.0.tgz", + "integrity": "sha512-xbrMDdlev53vNXexEa6l0LffojxhqDTBeL+VUxuuIXys4x6xyvbKq5XqTXBCEUA8ty8iEJblHvFaWRJTk/icAQ==", + "cpu": [ + "x64" + ], "dev": true, - "dependencies": { - "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.7", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-proto": "^1.0.3", - "is-typed-array": "^1.1.13" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } + "optional": true, + "os": [ + "darwin" + ] }, - "node_modules/typed-array-length": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", - "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", + "node_modules/vite/node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.20.0.tgz", + "integrity": "sha512-jMYvxZwGmoHFBTbr12Xc6wOdc2xA5tF5F2q6t7Rcfab68TT0n+r7dgawD4qhPEvasDsVpQi+MgDzj2faOLsZjA==", + "cpu": [ + "arm" + ], "dev": true, - "dependencies": { - "call-bind": "^1.0.7", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-proto": "^1.0.3", - "is-typed-array": "^1.1.13", - "possible-typed-array-names": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.20.0.tgz", + "integrity": "sha512-1asSTl4HKuIHIB1GcdFHNNZhxAYEdqML/MW4QmPS4G0ivbEcBr1JKlFLKsIRqjSwOBkdItn3/ZDlyvZ/N6KPlw==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.20.0.tgz", + "integrity": "sha512-COBb8Bkx56KldOYJfMf6wKeYJrtJ9vEgBRAOkfw6Ens0tnmzPqvlpjZiLgkhg6cA3DGzCmLmmd319pmHvKWWlQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.20.0.tgz", + "integrity": "sha512-+it+mBSyMslVQa8wSPvBx53fYuZK/oLTu5RJoXogjk6x7Q7sz1GNRsXWjn6SwyJm8E/oMjNVwPhmNdIjwP135Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.20.0.tgz", + "integrity": "sha512-yAMvqhPfGKsAxHN8I4+jE0CpLWD8cv4z7CK7BMmhjDuz606Q2tFKkWRY8bHR9JQXYcoLfopo5TTqzxgPUjUMfw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/vite/node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.20.0.tgz", + "integrity": "sha512-qmuxFpfmi/2SUkAw95TtNq/w/I7Gpjurx609OOOV7U4vhvUhBcftcmXwl3rqAek+ADBwSjIC4IVNLiszoj3dPA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] }, - "node_modules/typedarray-to-buffer": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", - "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", + "node_modules/vite/node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.20.0.tgz", + "integrity": "sha512-I0BtGXddHSHjV1mqTNkgUZLnS3WtsqebAXv11D5BZE/gfw5KoyXSAXVqyJximQXNvNzUo4GKlCK/dIwXlz+jlg==", + "cpu": [ + "s390x" + ], "dev": true, - "dependencies": { - "is-typedarray": "^1.0.0" - } + "optional": true, + "os": [ + "linux" + ] }, - "node_modules/typedoc": { - "version": "0.23.10", - "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.23.10.tgz", - "integrity": "sha512-03EUiu/ZuScUBMnY6p0lY+HTH8SwhzvRE3gImoemdPDWXPXlks83UGTx++lyquWeB1MTwm9D9Ca8RIjkK3AFfQ==", + "node_modules/vite/node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.20.0.tgz", + "integrity": "sha512-psegMvP+Ik/Bg7QRJbv8w8PAytPA7Uo8fpFjXyCRHWm6Nt42L+JtoqH8eDQ5hRP7/XW2UiIriy1Z46jf0Oa1kA==", + "cpu": [ + "arm64" + ], "dev": true, - "dependencies": { - "lunr": "^2.3.9", - "marked": "^4.0.18", - "minimatch": "^5.1.0", - "shiki": "^0.10.1" - }, - "bin": { - "typedoc": "bin/typedoc" - }, - "engines": { - "node": ">= 14.14" - }, - "peerDependencies": { - "typescript": "4.6.x || 4.7.x" - } + "optional": true, + "os": [ + "win32" + ] }, - "node_modules/typedoc-plugin-markdown": { - "version": "3.13.4", - "resolved": "https://registry.npmjs.org/typedoc-plugin-markdown/-/typedoc-plugin-markdown-3.13.4.tgz", - "integrity": "sha512-E/EBBmu6ARtnbswZGtBVBB/BfukZiGMOlqPc0RXCI/NFitONBahFqbCAF5fKQlijlcfipJj5pw5AMFH3NytrAw==", + "node_modules/vite/node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.20.0.tgz", + "integrity": "sha512-GabekH3w4lgAJpVxkk7hUzUf2hICSQO0a/BLFA11/RMxQT92MabKAqyubzDZmMOC/hcJNlc+rrypzNzYl4Dx7A==", + "cpu": [ + "ia32" + ], "dev": true, - "dependencies": { - "handlebars": "^4.7.7" - }, - "peerDependencies": { - "typedoc": ">=0.23.0" - } + "optional": true, + "os": [ + "win32" + ] }, - "node_modules/typedoc/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "node_modules/vite/node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.20.0.tgz", + "integrity": "sha512-aJ1EJSuTdGnM6qbVC4B5DSmozPTqIag9fSzXRNNo+humQLG89XpPgdt16Ia56ORD7s+H8Pmyx44uczDQ0yDzpg==", + "cpu": [ + "x64" + ], "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } + "optional": true, + "os": [ + "win32" + ] }, - "node_modules/typedoc/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "node_modules/vite/node_modules/esbuild": { + "version": "0.21.5", "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/typescript": { - "version": "4.7.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz", - "integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==", - "devOptional": true, + "hasInstallScript": true, + "license": "MIT", "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" + "esbuild": "bin/esbuild" }, "engines": { - "node": ">=4.2.0" + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" } }, - "node_modules/uglify-js": { - "version": "3.18.0", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.18.0.tgz", - "integrity": "sha512-SyVVbcNBCk0dzr9XL/R/ySrmYf0s372K6/hFklzgcp2lBFyXtw4I7BOdDjlLhE1aVqaI/SHWXWmYdlZxuyF38A==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], "dev": true, "optional": true, - "bin": { - "uglifyjs": "bin/uglifyjs" - }, + "os": [ + "aix" + ], "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/uint8-varint": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/uint8-varint/-/uint8-varint-2.0.4.tgz", - "integrity": "sha512-FwpTa7ZGA/f/EssWAb5/YV6pHgVF1fViKdW8cWaEarjB8t7NyofSWBdOTyFPaGuUG4gx3v1O3PQ8etsiOs3lcw==", - "dependencies": { - "uint8arraylist": "^2.0.0", - "uint8arrays": "^5.0.0" - } - }, - "node_modules/uint8arraylist": { - "version": "2.4.8", - "resolved": "https://registry.npmjs.org/uint8arraylist/-/uint8arraylist-2.4.8.tgz", - "integrity": "sha512-vc1PlGOzglLF0eae1M8mLRTBivsvrGsdmJ5RbK3e+QRvRLOZfZhQROTwH/OfyF3+ZVUg9/8hE8bmKP2CvP9quQ==", - "dependencies": { - "uint8arrays": "^5.0.1" - } - }, - "node_modules/uint8arrays": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/uint8arrays/-/uint8arrays-5.1.0.tgz", - "integrity": "sha512-vA6nFepEmlSKkMBnLBaUMVvAC4G3CTmO58C12y4sq6WPDOR7mOFYOi7GlrQ4djeSbP6JG9Pv9tJDM97PedRSww==", - "dependencies": { - "multiformats": "^13.0.0" + "node": ">=12" } }, - "node_modules/unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/unbzip2-stream": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz", - "integrity": "sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg==", "optional": true, - "peer": true, - "dependencies": { - "buffer": "^5.2.1", - "through": "^2.3.8" + "os": [ + "android" + ], + "engines": { + "node": ">=12" } }, - "node_modules/unbzip2-stream/node_modules/buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" ], + "dev": true, "optional": true, - "peer": true, - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" + "os": [ + "android" + ], + "engines": { + "node": ">=12" } }, - "node_modules/undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" - }, - "node_modules/universalify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", - "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, "optional": true, - "peer": true, + "os": [ + "android" + ], "engines": { - "node": ">= 10.0.0" + "node": ">=12" } }, - "node_modules/unpipe": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], "engines": { - "node": ">= 0.8" + "node": ">=12" } }, - "node_modules/update-browserslist-db": { - "version": "1.0.16", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.16.tgz", - "integrity": "sha512-KVbTxlBYlckhF5wgfyZXTWnMn7MMZjMu9XG8bPlliUOP9ThaF4QnhP8qrjrH7DRzHfSk0oQv1wToW+iA5GajEQ==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" ], - "dependencies": { - "escalade": "^3.1.2", - "picocolors": "^1.0.1" - }, - "bin": { - "update-browserslist-db": "cli.js" - }, - "peerDependencies": { - "browserslist": ">= 4.21.0" + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" } }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], "dev": true, - "dependencies": { - "punycode": "^2.1.0" + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" } }, - "node_modules/url": { - "version": "0.11.3", - "resolved": "https://registry.npmjs.org/url/-/url-0.11.3.tgz", - "integrity": "sha512-6hxOLGfZASQK/cijlZnZJTq8OXAkt/3YGfQX45vvMYXpZoo8NdWZcY73K108Jf759lS1Bv/8wXnHDTSz17dSRw==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], "dev": true, - "dependencies": { - "punycode": "^1.4.1", - "qs": "^6.11.2" + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" } }, - "node_modules/url/node_modules/punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==", - "dev": true - }, - "node_modules/url/node_modules/qs": { - "version": "6.12.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.12.2.tgz", - "integrity": "sha512-x+NLUpx9SYrcwXtX7ob1gnkSems4i/mGZX5SlYxwIau6RrUSODO89TR/XDGGpn5RPWSYIB+aSfuSlV5+CmbTBg==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], "dev": true, - "dependencies": { - "side-channel": "^1.0.6" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">=12" } }, - "node_modules/userhome": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/userhome/-/userhome-1.0.0.tgz", - "integrity": "sha512-ayFKY3H+Pwfy4W98yPdtH1VqH4psDeyW8lYYFzfecR9d6hqLpqhecktvYR3SEEXt7vG0S1JEpciI3g94pMErig==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, "optional": true, - "peer": true, + "os": [ + "linux" + ], "engines": { - "node": ">= 0.8.0" + "node": ">=12" } }, - "node_modules/util": { - "version": "0.12.5", - "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", - "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "is-arguments": "^1.0.4", - "is-generator-function": "^1.0.7", - "is-typed-array": "^1.1.3", - "which-typed-array": "^1.1.2" + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" } }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } }, - "node_modules/utils-merge": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">= 0.4.0" + "node": ">=12" } }, - "node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "bin": { - "uuid": "dist/bin/uuid" + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" } }, - "node_modules/v8-to-istanbul": { - "version": "9.3.0", - "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", - "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], "dev": true, - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.12", - "@types/istanbul-lib-coverage": "^2.0.1", - "convert-source-map": "^2.0.0" - }, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=10.12.0" + "node": ">=12" } }, - "node_modules/vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">= 0.8" + "node": ">=12" } }, - "node_modules/verkle-cryptography-wasm": { - "version": "0.4.5", - "resolved": "https://registry.npmjs.org/verkle-cryptography-wasm/-/verkle-cryptography-wasm-0.4.5.tgz", - "integrity": "sha512-CG0hRG0QuVoLnmwPiBpgWWzgJCoF81/w6S3ASeoew6tQp52HsN+e8yrachGh2bFIZLRDUQA2cX+0xr2VM60ywA==", - "dependencies": { - "@scure/base": "^1.1.5" - }, + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], "engines": { - "node": ">=18", - "npm": ">=7" + "node": ">=12" } }, - "node_modules/vite": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.3.2.tgz", - "integrity": "sha512-6lA7OBHBlXUxiJxbO5aAY2fsHHzDr1q7DvXYnyZycRs2Dz+dXBWuhpWHvmljTRTpQC2uvGmUFFkSHF2vGo90MA==", - "devOptional": true, - "dependencies": { - "esbuild": "^0.21.3", - "postcss": "^8.4.38", - "rollup": "^4.13.0" - }, - "bin": { - "vite": "bin/vite.js" - }, + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], "engines": { - "node": "^18.0.0 || >=20.0.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^18.0.0 || >=20.0.0", - "less": "*", - "lightningcss": "^1.21.0", - "sass": "*", - "stylus": "*", - "sugarss": "*", - "terser": "^5.4.0" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - } + "node": ">=12" } }, - "node_modules/vite-node": { - "version": "2.0.0-beta.12", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-2.0.0-beta.12.tgz", - "integrity": "sha512-aS07DFW00yJNteJ44bPOSz/Zs25ppIqMElzcydBQv7nKiImnb8N6Rrlg9GQYLJByHLbdJAdxXvDsdruwkPA+kw==", - "devOptional": true, - "dependencies": { - "cac": "^6.7.14", - "debug": "^4.3.5", - "pathe": "^1.1.2", - "picocolors": "^1.0.1", - "vite": "^5.0.0" - }, - "bin": { - "vite-node": "vite-node.mjs" - }, + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], "engines": { - "node": "^18.0.0 || >=20.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" + "node": ">=12" } }, - "node_modules/vite-plugin-node-polyfills": { - "version": "0.21.0", - "resolved": "https://registry.npmjs.org/vite-plugin-node-polyfills/-/vite-plugin-node-polyfills-0.21.0.tgz", - "integrity": "sha512-Sk4DiKnmxN8E0vhgEhzLudfJQfaT8k4/gJ25xvUPG54KjLJ6HAmDKbr4rzDD/QWEY+Lwg80KE85fGYBQihEPQA==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], "dev": true, - "dependencies": { - "@rollup/plugin-inject": "^5.0.5", - "node-stdlib-browser": "^1.2.0" - }, - "funding": { - "url": "https://github.com/sponsors/davidmyersdev" - }, - "peerDependencies": { - "vite": "^2.0.0 || ^3.0.0 || ^4.0.0 || ^5.0.0" + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" } }, - "node_modules/vite-plugin-top-level-await": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/vite-plugin-top-level-await/-/vite-plugin-top-level-await-1.4.1.tgz", - "integrity": "sha512-hogbZ6yT7+AqBaV6lK9JRNvJDn4/IJvHLu6ET06arNfo0t2IsyCaon7el9Xa8OumH+ESuq//SDf8xscZFE0rWw==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], "dev": true, - "dependencies": { - "@rollup/plugin-virtual": "^3.0.2", - "@swc/core": "^1.3.100", - "uuid": "^9.0.1" - }, - "peerDependencies": { - "vite": ">=2.8" + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" } }, - "node_modules/vite-plugin-top-level-await/node_modules/uuid": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "node_modules/vite/node_modules/esbuild/node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], "dev": true, - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" + "optional": true, + "os": [ + "win32" ], - "bin": { - "uuid": "dist/bin/uuid" + "engines": { + "node": ">=12" } }, - "node_modules/vite-plugin-wasm": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/vite-plugin-wasm/-/vite-plugin-wasm-3.3.0.tgz", - "integrity": "sha512-tVhz6w+W9MVsOCHzxo6SSMSswCeIw4HTrXEi6qL3IRzATl83jl09JVO1djBqPSwfjgnpVHNLYcaMbaDX5WB/pg==", + "node_modules/vite/node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", "dev": true, - "peerDependencies": { - "vite": "^2 || ^3 || ^4 || ^5" + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/vite/node_modules/rollup": { + "version": "4.20.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.5" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.20.0", + "@rollup/rollup-android-arm64": "4.20.0", + "@rollup/rollup-darwin-arm64": "4.20.0", + "@rollup/rollup-darwin-x64": "4.20.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.20.0", + "@rollup/rollup-linux-arm-musleabihf": "4.20.0", + "@rollup/rollup-linux-arm64-gnu": "4.20.0", + "@rollup/rollup-linux-arm64-musl": "4.20.0", + "@rollup/rollup-linux-powerpc64le-gnu": "4.20.0", + "@rollup/rollup-linux-riscv64-gnu": "4.20.0", + "@rollup/rollup-linux-s390x-gnu": "4.20.0", + "@rollup/rollup-linux-x64-gnu": "4.20.0", + "@rollup/rollup-linux-x64-musl": "4.20.0", + "@rollup/rollup-win32-arm64-msvc": "4.20.0", + "@rollup/rollup-win32-ia32-msvc": "4.20.0", + "@rollup/rollup-win32-x64-msvc": "4.20.0", + "fsevents": "~2.3.2" } }, "node_modules/vitest": { - "version": "2.0.0-beta.12", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-2.0.0-beta.12.tgz", - "integrity": "sha512-nqputSJprBdVHgQDg7xUVQigEdC8JOva889jbP0LoHQNA8kN+YzAEdAnYqyUk7ZRMlbtCHO16Ys/cfTBIqDm9A==", - "devOptional": true, + "version": "2.0.5", + "dev": true, + "license": "MIT", "dependencies": { "@ampproject/remapping": "^2.3.0", - "@vitest/expect": "2.0.0-beta.12", - "@vitest/runner": "2.0.0-beta.12", - "@vitest/snapshot": "2.0.0-beta.12", - "@vitest/spy": "2.0.0-beta.12", - "@vitest/utils": "2.0.0-beta.12", + "@vitest/expect": "2.0.5", + "@vitest/pretty-format": "^2.0.5", + "@vitest/runner": "2.0.5", + "@vitest/snapshot": "2.0.5", + "@vitest/spy": "2.0.5", + "@vitest/utils": "2.0.5", "chai": "^5.1.1", "debug": "^4.3.5", "execa": "^8.0.1", "magic-string": "^0.30.10", "pathe": "^1.1.2", - "picocolors": "^1.0.1", "std-env": "^3.7.0", "tinybench": "^2.8.0", "tinypool": "^1.0.0", + "tinyrainbow": "^1.2.0", "vite": "^5.0.0", - "vite-node": "2.0.0-beta.12", - "why-is-node-running": "^2.2.2" + "vite-node": "2.0.5", + "why-is-node-running": "^2.3.0" }, "bin": { "vitest": "vitest.mjs" @@ -15771,8 +12754,8 @@ "peerDependencies": { "@edge-runtime/vm": "*", "@types/node": "^18.0.0 || >=20.0.0", - "@vitest/browser": "2.0.0-beta.12", - "@vitest/ui": "2.0.0-beta.12", + "@vitest/browser": "2.0.5", + "@vitest/ui": "2.0.5", "happy-dom": "*", "jsdom": "*" }, @@ -15799,9 +12782,8 @@ }, "node_modules/vitest/node_modules/execa": { "version": "8.0.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", - "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^8.0.1", @@ -15822,9 +12804,8 @@ }, "node_modules/vitest/node_modules/get-stream": { "version": "8.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", - "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=16" }, @@ -15834,18 +12815,16 @@ }, "node_modules/vitest/node_modules/human-signals": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", - "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", - "devOptional": true, + "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=16.17.0" } }, "node_modules/vitest/node_modules/signal-exit": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "devOptional": true, + "dev": true, + "license": "ISC", "engines": { "node": ">=14" }, @@ -15853,272 +12832,43 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/vm-browserify": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.2.tgz", - "integrity": "sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==", - "dev": true - }, - "node_modules/vscode-oniguruma": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/vscode-oniguruma/-/vscode-oniguruma-1.7.0.tgz", - "integrity": "sha512-L9WMGRfrjOhgHSdOYgCt/yRMsXzLDJSL7BPrOZt73gU0iWO4mpqzqQzOz5srxqTvMBaR0XZTSrVWo4j55Rc6cA==", - "dev": true - }, - "node_modules/vscode-textmate": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/vscode-textmate/-/vscode-textmate-5.2.0.tgz", - "integrity": "sha512-Uw5ooOQxRASHgu6C7GVvUxisKXfSgW4oFlO+aa+PAkgmH89O3CXxEEzNRNtHSqtXFTl0nAC1uYj0GMSH27uwtQ==", - "dev": true - }, - "node_modules/vue": { - "version": "3.4.31", - "resolved": "https://registry.npmjs.org/vue/-/vue-3.4.31.tgz", - "integrity": "sha512-njqRrOy7W3YLAlVqSKpBebtZpDVg21FPoaq1I7f/+qqBThK9ChAIjkRWgeP6Eat+8C+iia4P3OYqpATP21BCoQ==", - "devOptional": true, - "peer": true, - "dependencies": { - "@vue/compiler-dom": "3.4.31", - "@vue/compiler-sfc": "3.4.31", - "@vue/runtime-dom": "3.4.31", - "@vue/server-renderer": "3.4.31", - "@vue/shared": "3.4.31" - }, - "peerDependencies": { - "typescript": "*" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/vue-observe-visibility": { - "version": "2.0.0-alpha.1", - "resolved": "https://registry.npmjs.org/vue-observe-visibility/-/vue-observe-visibility-2.0.0-alpha.1.tgz", - "integrity": "sha512-flFbp/gs9pZniXR6fans8smv1kDScJ8RS7rEpMjhVabiKeq7Qz3D9+eGsypncjfIyyU84saU88XZ0zjbD6Gq/g==", - "devOptional": true, - "peerDependencies": { - "vue": "^3.0.0" - } - }, - "node_modules/vue-resize": { - "version": "2.0.0-alpha.1", - "resolved": "https://registry.npmjs.org/vue-resize/-/vue-resize-2.0.0-alpha.1.tgz", - "integrity": "sha512-7+iqOueLU7uc9NrMfrzbG8hwMqchfVfSzpVlCMeJQe4pyibqyoifDNbKTZvwxZKDvGkB+PdFeKvnGZMoEb8esg==", - "devOptional": true, - "peerDependencies": { - "vue": "^3.0.0" - } - }, - "node_modules/vue-virtual-scroller": { - "version": "2.0.0-beta.8", - "resolved": "https://registry.npmjs.org/vue-virtual-scroller/-/vue-virtual-scroller-2.0.0-beta.8.tgz", - "integrity": "sha512-b8/f5NQ5nIEBRTNi6GcPItE4s7kxNHw2AIHLtDp+2QvqdTjVN0FgONwX9cr53jWRgnu+HRLPaWDOR2JPI5MTfQ==", - "devOptional": true, - "dependencies": { - "mitt": "^2.1.0", - "vue-observe-visibility": "^2.0.0-alpha.1", - "vue-resize": "^2.0.0-alpha.1" - }, - "peerDependencies": { - "vue": "^3.2.0" - } - }, - "node_modules/wait-port": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/wait-port/-/wait-port-1.1.0.tgz", - "integrity": "sha512-3e04qkoN3LxTMLakdqeWth8nih8usyg+sf1Bgdf9wwUkp05iuK1eSY/QpLvscT/+F/gA89+LpUmmgBtesbqI2Q==", - "optional": true, - "peer": true, - "dependencies": { - "chalk": "^4.1.2", - "commander": "^9.3.0", - "debug": "^4.3.4" - }, - "bin": { - "wait-port": "bin/wait-port.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/wait-port/node_modules/commander": { - "version": "9.5.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz", - "integrity": "sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==", - "optional": true, - "peer": true, - "engines": { - "node": "^12.20.0 || >=14" - } - }, - "node_modules/weak-lru-cache": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/weak-lru-cache/-/weak-lru-cache-1.2.2.tgz", - "integrity": "sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw==", - "dev": true - }, - "node_modules/web-streams-polyfill": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", - "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", - "optional": true, - "peer": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/webdriver": { - "version": "8.39.0", - "resolved": "https://registry.npmjs.org/webdriver/-/webdriver-8.39.0.tgz", - "integrity": "sha512-Kc3+SfiH4ufyrIht683VT2vnJocx0pfH8rYdyPvEh1b2OYewtFTHK36k9rBDHZiBmk6jcSXs4M2xeFgOuon9Lg==", - "optional": true, - "peer": true, - "dependencies": { - "@types/node": "^20.1.0", - "@types/ws": "^8.5.3", - "@wdio/config": "8.39.0", - "@wdio/logger": "8.38.0", - "@wdio/protocols": "8.38.0", - "@wdio/types": "8.39.0", - "@wdio/utils": "8.39.0", - "deepmerge-ts": "^5.1.0", - "got": "^12.6.1", - "ky": "^0.33.0", - "ws": "^8.8.0" - }, - "engines": { - "node": "^16.13 || >=18" - } - }, - "node_modules/webdriver/node_modules/@types/node": { - "version": "20.14.9", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz", - "integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==", - "optional": true, - "peer": true, - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/webdriverio": { - "version": "8.39.0", - "resolved": "https://registry.npmjs.org/webdriverio/-/webdriverio-8.39.0.tgz", - "integrity": "sha512-pDpGu0V+TL1LkXPode67m3s+IPto4TcmcOzMpzFgu2oeLMBornoLN3yQSFR1fjZd1gK4UfnG3lJ4poTGOfbWfw==", - "optional": true, - "peer": true, - "dependencies": { - "@types/node": "^20.1.0", - "@wdio/config": "8.39.0", - "@wdio/logger": "8.38.0", - "@wdio/protocols": "8.38.0", - "@wdio/repl": "8.24.12", - "@wdio/types": "8.39.0", - "@wdio/utils": "8.39.0", - "archiver": "^7.0.0", - "aria-query": "^5.0.0", - "css-shorthand-properties": "^1.1.1", - "css-value": "^0.0.1", - "devtools-protocol": "^0.0.1302984", - "grapheme-splitter": "^1.0.2", - "import-meta-resolve": "^4.0.0", - "is-plain-obj": "^4.1.0", - "jszip": "^3.10.1", - "lodash.clonedeep": "^4.5.0", - "lodash.zip": "^4.2.0", - "minimatch": "^9.0.0", - "puppeteer-core": "^20.9.0", - "query-selector-shadow-dom": "^1.0.0", - "resq": "^1.9.1", - "rgb2hex": "0.2.5", - "serialize-error": "^11.0.1", - "webdriver": "8.39.0" - }, - "engines": { - "node": "^16.13 || >=18" - }, - "peerDependencies": { - "devtools": "^8.14.0" - }, - "peerDependenciesMeta": { - "devtools": { - "optional": true - } - } - }, - "node_modules/webdriverio/node_modules/@types/node": { - "version": "20.14.9", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz", - "integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==", - "optional": true, - "peer": true, - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/webdriverio/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "optional": true, - "peer": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/webdriverio/node_modules/is-plain-obj": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", - "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", - "optional": true, - "peer": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/webdriverio/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "optional": true, - "peer": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", - "optional": true, - "peer": true + "node_modules/vm-browserify": { + "version": "1.1.2", + "dev": true, + "license": "MIT" }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "optional": true, - "peer": true, - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } + "node_modules/vscode-languageserver-textdocument": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz", + "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==", + "dev": true + }, + "node_modules/vscode-oniguruma": { + "version": "1.7.0", + "dev": true, + "license": "MIT" + }, + "node_modules/vscode-textmate": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/vscode-textmate/-/vscode-textmate-5.2.0.tgz", + "integrity": "sha512-Uw5ooOQxRASHgu6C7GVvUxisKXfSgW4oFlO+aa+PAkgmH89O3CXxEEzNRNtHSqtXFTl0nAC1uYj0GMSH27uwtQ==", + "dev": true + }, + "node_modules/vscode-uri": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.8.tgz", + "integrity": "sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==", + "dev": true + }, + "node_modules/weak-lru-cache": { + "version": "1.2.2", + "dev": true, + "license": "MIT" }, "node_modules/which": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "devOptional": true, + "dev": true, + "license": "ISC", "dependencies": { "isexe": "^2.0.0" }, @@ -16131,9 +12881,8 @@ }, "node_modules/which-boxed-primitive": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", - "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", "dev": true, + "license": "MIT", "dependencies": { "is-bigint": "^1.0.1", "is-boolean-object": "^1.1.0", @@ -16146,13 +12895,12 @@ } }, "node_modules/which-builtin-type": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.1.3.tgz", - "integrity": "sha512-YmjsSMDBYsM1CaFiayOVT06+KJeXf0o5M/CAd4o1lTadFAtacTUM49zoYxr/oroopFDfhvN6iEcBxUyc3gvKmw==", + "version": "1.1.4", "dev": true, + "license": "MIT", "dependencies": { - "function.prototype.name": "^1.1.5", - "has-tostringtag": "^1.0.0", + "function.prototype.name": "^1.1.6", + "has-tostringtag": "^1.0.2", "is-async-function": "^2.0.0", "is-date-object": "^1.0.5", "is-finalizationregistry": "^1.0.2", @@ -16161,8 +12909,8 @@ "is-weakref": "^1.0.2", "isarray": "^2.0.5", "which-boxed-primitive": "^1.0.2", - "which-collection": "^1.0.1", - "which-typed-array": "^1.1.9" + "which-collection": "^1.0.2", + "which-typed-array": "^1.1.15" }, "engines": { "node": ">= 0.4" @@ -16173,9 +12921,8 @@ }, "node_modules/which-collection": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", - "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", "dev": true, + "license": "MIT", "dependencies": { "is-map": "^2.0.3", "is-set": "^2.0.3", @@ -16191,15 +12938,13 @@ }, "node_modules/which-module": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz", - "integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/which-typed-array": { "version": "1.1.15", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", - "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", "dev": true, + "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", "call-bind": "^1.0.7", @@ -16215,10 +12960,9 @@ } }, "node_modules/why-is-node-running": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.2.2.tgz", - "integrity": "sha512-6tSwToZxTOcotxHeA+qGCq1mVzKR3CwcJGmVcY+QE8SHy6TnpFnh8PAvPNHYr7EcuVeG0QSMxtYCuO1ta/G/oA==", - "devOptional": true, + "version": "2.3.0", + "dev": true, + "license": "MIT", "dependencies": { "siginfo": "^2.0.0", "stackback": "0.0.2" @@ -16231,15 +12975,14 @@ } }, "node_modules/winston": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/winston/-/winston-3.13.0.tgz", - "integrity": "sha512-rwidmA1w3SE4j0E5MuIufFhyJPBDG7Nu71RkZor1p2+qHvJSZ9GYDA81AyleQcZbh/+V6HjeBdfnTZJm9rSeQQ==", + "version": "3.14.2", + "license": "MIT", "dependencies": { "@colors/colors": "^1.6.0", "@dabh/diagnostics": "^2.0.2", "async": "^3.2.3", "is-stream": "^2.0.0", - "logform": "^2.4.0", + "logform": "^2.6.0", "one-time": "^1.0.0", "readable-stream": "^3.4.0", "safe-stable-stringify": "^2.3.1", @@ -16253,8 +12996,7 @@ }, "node_modules/winston-daily-rotate-file": { "version": "4.7.1", - "resolved": "https://registry.npmjs.org/winston-daily-rotate-file/-/winston-daily-rotate-file-4.7.1.tgz", - "integrity": "sha512-7LGPiYGBPNyGHLn9z33i96zx/bd71pjBn9tqQzO3I4Tayv94WPmBNwKC7CO1wPHdP9uvu+Md/1nr6VSH9h0iaA==", + "license": "MIT", "dependencies": { "file-stream-rotator": "^0.6.1", "object-hash": "^2.0.1", @@ -16270,19 +13012,17 @@ }, "node_modules/winston-daily-rotate-file/node_modules/object-hash": { "version": "2.2.0", - "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz", - "integrity": "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==", + "license": "MIT", "engines": { "node": ">= 6" } }, "node_modules/winston-transport": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.7.0.tgz", - "integrity": "sha512-ajBj65K5I7denzer2IYW6+2bNIVqLGDHqDw3Ow8Ohh+vdW+rv4MZ6eiDvHoKhfJFZ2auyN8byXieDDJ96ViONg==", + "version": "4.7.1", + "license": "MIT", "dependencies": { - "logform": "^2.3.2", - "readable-stream": "^3.6.0", + "logform": "^2.6.1", + "readable-stream": "^3.6.2", "triple-beam": "^1.3.0" }, "engines": { @@ -16291,8 +13031,7 @@ }, "node_modules/winston/node_modules/is-stream": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "license": "MIT", "engines": { "node": ">=8" }, @@ -16302,23 +13041,20 @@ }, "node_modules/word-wrap": { "version": "1.2.5", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", - "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/wordwrap": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", - "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/wrap-ansi": { "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -16334,9 +13070,8 @@ "node_modules/wrap-ansi-cjs": { "name": "wrap-ansi", "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -16351,24 +13086,21 @@ }, "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "devOptional": true + "dev": true, + "license": "MIT" }, "node_modules/wrap-ansi-cjs/node_modules/is-fullwidth-code-point": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "devOptional": true, + "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/wrap-ansi-cjs/node_modules/string-width": { "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "devOptional": true, + "dev": true, + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -16380,21 +13112,18 @@ }, "node_modules/wrap-ansi/node_modules/emoji-regex": { "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "license": "MIT" }, "node_modules/wrap-ansi/node_modules/is-fullwidth-code-point": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/wrap-ansi/node_modules/string-width": { "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -16406,15 +13135,13 @@ }, "node_modules/wrappy": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "devOptional": true + "dev": true, + "license": "ISC" }, "node_modules/write-file-atomic": { "version": "3.0.3", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", - "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", "dev": true, + "license": "ISC", "dependencies": { "imurmurhash": "^0.1.4", "is-typedarray": "^1.0.0", @@ -16423,10 +13150,9 @@ } }, "node_modules/ws": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", - "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", - "devOptional": true, + "version": "8.18.0", + "dev": true, + "license": "MIT", "engines": { "node": ">=10.0.0" }, @@ -16443,34 +13169,44 @@ } } }, + "node_modules/xdg-basedir": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-5.1.0.tgz", + "integrity": "sha512-GCPAHLvrIH13+c0SuacwvRYj2SxJXQ4kaVTT5xgL3kPrz56XxkF21IGhjSE1+W0aw7gpBWRGXLCPnPby6lSpmQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/xtend": { "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.4" } }, "node_modules/y18n": { "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "license": "ISC", "engines": { "node": ">=10" } }, "node_modules/yallist": { "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/yaml": { - "version": "2.4.5", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.5.tgz", - "integrity": "sha512-aBx2bnqDzVOyNKfsysjA2ms5ZlnjSAW2eG3/L5G/CSujfjLJTJsEw1bGw8kCf04KodQWk1pxlGnZ56CRxiawmg==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.5.0.tgz", + "integrity": "sha512-2wWLbGbYDiSqqIKoPjar3MPgB94ErzCtrNE1FdqGuaO0pi2JGjmE8aW8TDZwzU7vuxcGRdL/4gPQwQ7hD5AMSw==", "dev": true, + "license": "ISC", "bin": { "yaml": "bin.mjs" }, @@ -16480,9 +13216,8 @@ }, "node_modules/yargs": { "version": "16.2.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", - "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", "dev": true, + "license": "MIT", "dependencies": { "cliui": "^7.0.2", "escalade": "^3.1.1", @@ -16498,33 +13233,29 @@ }, "node_modules/yargs-parser": { "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", "dev": true, + "license": "ISC", "engines": { "node": ">=10" } }, "node_modules/yargs/node_modules/emoji-regex": { "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/yargs/node_modules/is-fullwidth-code-point": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/yargs/node_modules/string-width": { "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -16534,32 +13265,10 @@ "node": ">=8" } }, - "node_modules/yauzl": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", - "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", - "optional": true, - "peer": true, - "dependencies": { - "buffer-crc32": "~0.2.3", - "fd-slicer": "~1.1.0" - } - }, - "node_modules/yauzl/node_modules/buffer-crc32": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", - "optional": true, - "peer": true, - "engines": { - "node": "*" - } - }, "node_modules/yocto-queue": { "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -16567,61 +13276,16 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/yoctocolors-cjs": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.1.tgz", - "integrity": "sha512-c6T13b6qYcJZvck7QbEFXrFX/Mu2KOjvAGiKHmYMUg96jxNpfP6i+psGW72BOPxOIDUJrORG+Kyu7quMX9CQBQ==", - "optional": true, - "peer": true, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/zip-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz", - "integrity": "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==", - "optional": true, - "peer": true, - "dependencies": { - "archiver-utils": "^5.0.0", - "compress-commons": "^6.0.2", - "readable-stream": "^4.0.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/zip-stream/node_modules/readable-stream": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz", - "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==", - "optional": true, - "peer": true, - "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, "packages/block": { "name": "@ethereumjs/block", - "version": "5.2.0", + "version": "5.3.0", "license": "MPL-2.0", "dependencies": { - "@ethereumjs/common": "^4.3.0", + "@ethereumjs/common": "^4.4.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/trie": "^6.2.0", - "@ethereumjs/tx": "^5.3.0", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/trie": "^6.2.1", + "@ethereumjs/tx": "^5.4.0", + "@ethereumjs/util": "^9.1.0", "ethereum-cryptography": "^2.2.1" }, "devDependencies": { @@ -16633,47 +13297,56 @@ }, "packages/blockchain": { "name": "@ethereumjs/blockchain", - "version": "7.2.0", + "version": "7.3.0", "license": "MPL-2.0", "dependencies": { - "@ethereumjs/block": "^5.2.0", - "@ethereumjs/common": "^4.3.0", - "@ethereumjs/ethash": "^3.0.3", + "@ethereumjs/block": "^5.3.0", + "@ethereumjs/common": "^4.4.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/trie": "^6.2.0", - "@ethereumjs/tx": "^5.3.0", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/trie": "^6.2.1", + "@ethereumjs/tx": "^5.4.0", + "@ethereumjs/util": "^9.1.0", "debug": "^4.3.3", "ethereum-cryptography": "^2.2.1", "lru-cache": "10.1.0" }, - "devDependencies": {}, + "devDependencies": { + "@ethereumjs/ethash": "^3.0.3" + }, "engines": { "node": ">=18" } }, + "packages/blockchain/node_modules/lru-cache": { + "version": "10.1.0", + "license": "ISC", + "engines": { + "node": "14 || >=16.14" + } + }, "packages/client": { "name": "@ethereumjs/client", - "version": "0.10.1", + "version": "0.10.2", "hasInstallScript": true, "license": "MPL-2.0", "dependencies": { - "@ethereumjs/block": "5.2.0", - "@ethereumjs/blockchain": "7.2.0", - "@ethereumjs/common": "4.3.0", - "@ethereumjs/devp2p": "6.1.2", - "@ethereumjs/ethash": "3.0.3", - "@ethereumjs/evm": "3.0.0", - "@ethereumjs/genesis": "0.2.2", + "@ethereumjs/block": "5.3.0", + "@ethereumjs/blockchain": "7.3.0", + "@ethereumjs/common": "4.4.0", + "@ethereumjs/devp2p": "6.1.3", + "@ethereumjs/ethash": "3.0.4", + "@ethereumjs/evm": "3.1.0", + "@ethereumjs/genesis": "0.2.3", "@ethereumjs/rlp": "5.0.2", - "@ethereumjs/statemanager": "2.3.0", - "@ethereumjs/trie": "6.2.0", - "@ethereumjs/tx": "5.3.0", - "@ethereumjs/util": "9.0.3", - "@ethereumjs/verkle": "^0.0.2", - "@ethereumjs/vm": "8.0.0", + "@ethereumjs/statemanager": "2.4.0", + "@ethereumjs/trie": "6.2.1", + "@ethereumjs/tx": "5.4.0", + "@ethereumjs/util": "9.1.0", + "@ethereumjs/verkle": "^0.1.0", + "@ethereumjs/vm": "8.1.0", + "@js-sdsl/ordered-map": "^4.4.2", "@multiformats/multiaddr": "^12.2.1", - "@polkadot/util": "^12.6.2", + "@polkadot/util": "^13.0.2", "@polkadot/wasm-crypto": "^7.3.2", "@scure/base": "^1.1.7", "abstract-level": "^1.0.3", @@ -16685,12 +13358,12 @@ "ethereum-cryptography": "^2.2.1", "it-pipe": "^1.1.0", "jayson": "^4.0.0", - "js-sdsl": "^4.4.0", "kzg-wasm": "^0.4.0", "level": "^8.0.0", "mcl-wasm": "^1.5.0", "memory-level": "^1.0.0", "prom-client": "^15.1.0", + "rustbn-wasm": "^0.4.0", "verkle-cryptography-wasm": "^0.4.5", "winston": "^3.3.3", "winston-daily-rotate-file": "^4.5.5", @@ -16720,8 +13393,7 @@ }, "packages/client/node_modules/cliui": { "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "license": "ISC", "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", @@ -16733,21 +13405,18 @@ }, "packages/client/node_modules/emoji-regex": { "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "license": "MIT" }, "packages/client/node_modules/is-fullwidth-code-point": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "license": "MIT", "engines": { "node": ">=8" } }, "packages/client/node_modules/string-width": { "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -16759,8 +13428,7 @@ }, "packages/client/node_modules/yargs": { "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "license": "MIT", "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", @@ -16776,18 +13444,17 @@ }, "packages/client/node_modules/yargs-parser": { "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "license": "ISC", "engines": { "node": ">=12" } }, "packages/common": { "name": "@ethereumjs/common", - "version": "4.3.0", + "version": "4.4.0", "license": "MIT", "dependencies": { - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/util": "^9.1.0", "ethereum-cryptography": "^2.2.1" }, "devDependencies": { @@ -16795,14 +13462,78 @@ "@polkadot/wasm-crypto": "^7.3.2" } }, + "packages/common/node_modules/@polkadot/util": { + "version": "12.6.2", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@polkadot/x-bigint": "12.6.2", + "@polkadot/x-global": "12.6.2", + "@polkadot/x-textdecoder": "12.6.2", + "@polkadot/x-textencoder": "12.6.2", + "@types/bn.js": "^5.1.5", + "bn.js": "^5.2.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "packages/common/node_modules/@polkadot/x-bigint": { + "version": "12.6.2", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@polkadot/x-global": "12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "packages/common/node_modules/@polkadot/x-global": { + "version": "12.6.2", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "packages/common/node_modules/@polkadot/x-textdecoder": { + "version": "12.6.2", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@polkadot/x-global": "12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "packages/common/node_modules/@polkadot/x-textencoder": { + "version": "12.6.2", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@polkadot/x-global": "12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, "packages/devp2p": { "name": "@ethereumjs/devp2p", - "version": "6.1.2", + "version": "6.1.3", "license": "MIT", "dependencies": { - "@ethereumjs/common": "^4.3.0", + "@ethereumjs/common": "^4.4.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/util": "^9.1.0", "@scure/base": "^1.1.7", "debug": "^4.3.3", "ethereum-cryptography": "^2.2.1", @@ -16811,8 +13542,8 @@ "snappyjs": "^0.6.1" }, "devDependencies": { - "@ethereumjs/block": "^5.2.0", - "@ethereumjs/tx": "^5.3.0", + "@ethereumjs/block": "^5.3.0", + "@ethereumjs/tx": "^5.4.0", "@types/debug": "^4.1.9", "@types/k-bucket": "^5.0.0", "chalk": "^4.1.2", @@ -16822,19 +13553,26 @@ "node": ">=18" } }, + "packages/devp2p/node_modules/lru-cache": { + "version": "10.1.0", + "license": "ISC", + "engines": { + "node": "14 || >=16.14" + } + }, "packages/ethash": { "name": "@ethereumjs/ethash", - "version": "3.0.3", + "version": "3.0.4", "license": "MPL-2.0", "dependencies": { - "@ethereumjs/block": "^5.2.0", + "@ethereumjs/block": "^5.3.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/util": "^9.1.0", "bigint-crypto-utils": "^3.2.2", "ethereum-cryptography": "^2.2.1" }, "devDependencies": { - "@ethereumjs/common": "^4.3.0" + "@ethereumjs/common": "^4.4.0" }, "engines": { "node": ">=18" @@ -16842,18 +13580,17 @@ }, "packages/evm": { "name": "@ethereumjs/evm", - "version": "3.0.0", + "version": "3.1.0", "license": "MPL-2.0", "dependencies": { - "@ethereumjs/common": "^4.3.0", - "@ethereumjs/statemanager": "^2.3.0", - "@ethereumjs/tx": "^5.3.0", - "@ethereumjs/util": "^9.0.3", - "@noble/curves": "^1.4.2", + "@ethereumjs/common": "^4.4.0", + "@ethereumjs/statemanager": "^2.4.0", + "@ethereumjs/tx": "^5.4.0", + "@ethereumjs/util": "^9.1.0", + "@noble/curves": "^1.5.0", "@types/debug": "^4.1.9", "debug": "^4.3.3", - "ethereum-cryptography": "^2.2.1", - "rustbn-wasm": "^0.4.0" + "ethereum-cryptography": "^2.2.1" }, "devDependencies": { "@ethersproject/abi": "^5.0.12", @@ -16870,22 +13607,34 @@ "minimist": "^1.2.5", "node-dir": "^0.1.17", "rollup-plugin-visualizer": "^5.12.0", - "solc": "^0.8.1" + "rustbn-wasm": "^0.4.0", + "solc": "^0.8.1", + "split": "^1.0.1" }, "engines": { "node": ">=18" } }, + "packages/evm/node_modules/@noble/curves": { + "version": "1.5.0", + "license": "MIT", + "dependencies": { + "@noble/hashes": "1.4.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, "packages/genesis": { "name": "@ethereumjs/genesis", - "version": "0.2.2", + "version": "0.2.3", "license": "MIT", "dependencies": { - "@ethereumjs/common": "^4.3.0", - "@ethereumjs/util": "^9.0.3" + "@ethereumjs/common": "^4.4.0", + "@ethereumjs/util": "^9.1.0" }, "devDependencies": { - "@ethereumjs/trie": "^6.2.0" + "@ethereumjs/trie": "^6.2.1" }, "engines": { "node": ">=18" @@ -16907,41 +13656,47 @@ }, "packages/statemanager": { "name": "@ethereumjs/statemanager", - "version": "2.3.0", + "version": "2.4.0", "license": "MPL-2.0", "dependencies": { - "@ethereumjs/common": "^4.3.0", + "@ethereumjs/common": "^4.4.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/trie": "^6.2.0", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/trie": "^6.2.1", + "@ethereumjs/util": "^9.1.0", + "@js-sdsl/ordered-map": "^4.4.2", "debug": "^4.3.3", "ethereum-cryptography": "^2.2.1", - "js-sdsl": "^4.1.4", "lru-cache": "10.1.0" }, "devDependencies": { - "@ethereumjs/block": "^5.2.0", - "@ethereumjs/genesis": "^0.2.2", + "@ethereumjs/block": "^5.3.0", + "@ethereumjs/genesis": "^0.2.3", "@types/debug": "^4.1.9", "rustbn-wasm": "^0.4.0", "verkle-cryptography-wasm": "^0.4.5" } }, + "packages/statemanager/node_modules/lru-cache": { + "version": "10.1.0", + "license": "ISC", + "engines": { + "node": "14 || >=16.14" + } + }, "packages/trie": { "name": "@ethereumjs/trie", - "version": "6.2.0", + "version": "6.2.1", "license": "MPL-2.0", "dependencies": { "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/util": "^9.1.0", "@types/readable-stream": "^2.3.13", "debug": "^4.3.4", "ethereum-cryptography": "^2.2.1", - "lru-cache": "10.1.0", - "readable-stream": "^3.6.0" + "lru-cache": "10.1.0" }, "devDependencies": { - "@ethereumjs/genesis": "^0.2.2", + "@ethereumjs/genesis": "^0.2.3", "@types/benchmark": "^1.0.33", "abstract-level": "^1.0.3", "level": "^8.0.0", @@ -16956,14 +13711,21 @@ "node": ">=18" } }, + "packages/trie/node_modules/lru-cache": { + "version": "10.1.0", + "license": "ISC", + "engines": { + "node": "14 || >=16.14" + } + }, "packages/tx": { "name": "@ethereumjs/tx", - "version": "5.3.0", + "version": "5.4.0", "license": "MPL-2.0", "dependencies": { - "@ethereumjs/common": "^4.3.0", + "@ethereumjs/common": "^4.4.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/util": "^9.1.0", "ethereum-cryptography": "^2.2.1" }, "devDependencies": { @@ -16979,7 +13741,7 @@ }, "packages/util": { "name": "@ethereumjs/util", - "version": "9.0.3", + "version": "9.1.0", "license": "MPL-2.0", "dependencies": { "@ethereumjs/rlp": "^5.0.2", @@ -16994,12 +13756,12 @@ }, "packages/verkle": { "name": "@ethereumjs/verkle", - "version": "0.0.2", + "version": "0.1.0", "license": "MIT", "dependencies": { - "@ethereumjs/block": "^5.2.0", + "@ethereumjs/block": "^5.3.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/util": "^9.1.0", "debug": "^4.3.4", "lru-cache": "10.1.0", "verkle-cryptography-wasm": "^0.4.5" @@ -17008,24 +13770,32 @@ "node": ">=18" } }, + "packages/verkle/node_modules/lru-cache": { + "version": "10.1.0", + "license": "ISC", + "engines": { + "node": "14 || >=16.14" + } + }, "packages/vm": { "name": "@ethereumjs/vm", - "version": "8.0.0", + "version": "8.1.0", "license": "MPL-2.0", "dependencies": { - "@ethereumjs/block": "^5.2.0", - "@ethereumjs/blockchain": "^7.2.0", - "@ethereumjs/common": "^4.3.0", - "@ethereumjs/evm": "^3.0.0", + "@ethereumjs/block": "^5.3.0", + "@ethereumjs/blockchain": "^7.3.0", + "@ethereumjs/common": "^4.4.0", + "@ethereumjs/evm": "^3.1.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/statemanager": "^2.3.0", - "@ethereumjs/trie": "^6.2.0", - "@ethereumjs/tx": "^5.3.0", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/statemanager": "^2.4.0", + "@ethereumjs/trie": "^6.2.1", + "@ethereumjs/tx": "^5.4.0", + "@ethereumjs/util": "^9.1.0", "debug": "^4.3.3", "ethereum-cryptography": "^2.2.1" }, "devDependencies": { + "@ethereumjs/ethash": "^3.0.3", "@ethersproject/abi": "^5.0.12", "@types/benchmark": "^1.0.33", "@types/core-js": "^2.5.0", @@ -17045,10 +13815,10 @@ }, "packages/wallet": { "name": "@ethereumjs/wallet", - "version": "2.0.3", + "version": "2.0.4", "license": "MIT", "dependencies": { - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/util": "^9.1.0", "@scure/base": "^1.1.7", "ethereum-cryptography": "^2.2.1", "js-md5": "^0.8.3", @@ -17067,12 +13837,11 @@ }, "packages/wallet/node_modules/uuid": { "version": "9.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", "funding": [ "https://github.com/sponsors/broofa", "https://github.com/sponsors/ctavan" ], + "license": "MIT", "bin": { "uuid": "dist/bin/uuid" } diff --git a/package.json b/package.json index 3ce2829e1f..284355ab03 100644 --- a/package.json +++ b/package.json @@ -7,11 +7,20 @@ "scripts": { "checkNpmVersion": "./scripts/check-npm-version.sh", "clean": "./config/cli/clean-root.sh", + "cspell": "npm run cspell:ts && npm run cspell:md", + "cspell:ts": "npx cspell --gitignore -e \"./packages/ethereum-tests\" -e \"./packages/wallet/test\" -e \"./packages/client/archive\" -c ./config/cspell-ts.json \"./packages/**/*.ts\" --cache --show-suggestions --show-context", + "cspell:md": "npx cspell --gitignore -e \"./packages/ethereum-tests\" -e \"./packages/client/withdrawals-testnet/**\" -e \"./packages/**/docs\" -c ./config/cspell-md.json \"**.md\" --cache --show-suggestions --show-context", "docs:build": "npm run docs:build --workspaces --if-present", "e2e:inject": "node ./scripts/e2e-inject-resolutions.js", "e2e:publish": "./scripts/e2e-publish.sh", "e2e:resolutions": "node ./scripts/e2e-resolutions.js", "examples": "npm run examples --workspaces --if-present", + "examples:build": "npm run examples:build --workspaces --if-present", + "lint": "npm run lint --workspaces --if-present", + "lint:fix": "npm run lint:fix --workspaces --if-present", + "test": "npm run test --workspaces --if-present", + "test:node": "npm run test:node --workspaces --if-present", + "test:browser": "npm run test:browser --workspaces --if-present", "preinstall": "npm run checkNpmVersion", "postinstall": "npm run build --workspaces", "prepare": "git config --local core.hooksPath .githooks", @@ -28,21 +37,22 @@ "@vitest/coverage-v8": "^v2.0.0-beta.1", "@vitest/ui": "^v2.0.0-beta.12", "c8": "7.12.0", + "cspell": "^8.13.3", "embedme": "1.22.1", - "eslint": "8.45.0", - "eslint-config-prettier": "8.8.0", - "eslint-config-typestrict": "1.0.5", + "eslint": "8.57.0", + "eslint-config-prettier": "^9.1.0", + "eslint-config-typestrict": "^1.0.5", "eslint-formatter-codeframe": "7.32.1", "eslint-plugin-ethereumjs": "file:./eslint", "eslint-plugin-github": "4.9.2", "eslint-plugin-implicit-dependencies": "1.1.1", "eslint-plugin-import": "2.26.0", - "eslint-plugin-prettier": "4.2.1", + "eslint-plugin-prettier": "^5.2.1", "eslint-plugin-simple-import-sort": "7.0.0", "eslint-plugin-sonarjs": "0.19.0", "lint-staged": "13.0.3", "lockfile-lint-api": "^5.5.1", - "prettier": "2.7.1", + "prettier": "^3.3.3", "sort-package-json": "1.57.0", "tape": "5.6.0", "tsx": "^4.6.2", diff --git a/packages/block/.eslintrc.cjs b/packages/block/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/block/.eslintrc.cjs +++ b/packages/block/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/block/CHANGELOG.md b/packages/block/CHANGELOG.md index 24392469be..6424716c49 100644 --- a/packages/block/CHANGELOG.md +++ b/packages/block/CHANGELOG.md @@ -6,7 +6,188 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 5.2.0 - 2024-03-05 +## 5.3.0 - 2024-08-15 + +### Blocks with EIP-7685 Consensus Layer Requests + +Starting with this release this library supports requests to the consensus layer (see PRs [#3372](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3372) and [#3393](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3393)) which have been introduced with [EIP-7685](https://eips.ethereum.org/EIPS/eip-7685) and will come into play for deposit and withdrawal requests along the upcoming [Prague](https://eips.ethereum.org/EIPS/eip-7600) hardfork. + +#### EIP-6110 Deposit Requests + +[EIP-6110](https://eips.ethereum.org/EIPS/eip-6110) introduces deposit requests allowing beacon chain deposits being triggered from the execution layer, see PRs [#3390](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3390) and [#3397](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3397). Starting with this release this library supports deposit requests and a containing block can be instantiated as follows: + +```ts +import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Block } from '@ethereumjs/block' +import { + bytesToBigInt, + DepositRequest, + randomBytes, + type CLRequest, + type CLRequestType, +} from '@ethereumjs/util' + +const main = async () => { + const common = new Common({ + chain: Chain.Mainnet, + hardfork: Hardfork.Prague, + }) + + const depositRequestData = { + pubkey: randomBytes(48), + withdrawalCredentials: randomBytes(32), + amount: bytesToBigInt(randomBytes(8)), + signature: randomBytes(96), + index: bytesToBigInt(randomBytes(8)), + } + const request = DepositRequest.fromRequestData(depositRequestData) as CLRequest + const requests = [request] + const requestsRoot = await Block.genRequestsTrieRoot(requests) + + const block = Block.fromBlockData( + { + requests, + header: { requestsRoot }, + }, + { common }, + ) + console.log( + `Instantiated block with ${ + block.requests?.length + } request, requestTrieValid=${await block.requestsTrieIsValid()}`, + ) +} + +main() +``` + +Have a look at the EIP for some guidance on how to use and fill in the various deposit request parameters. + +#### EIP-7002 Withdrawal Requests + +[EIP-7002](https://eips.ethereum.org/EIPS/eip-7002) introduces the possibility for validators to trigger exits and partial withdrawals via the execution layer, see PR [#3385](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3385). Starting with this release this library supports withdrawal requests and a containing block can be instantiated as follows: + +```ts +import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Block } from '@ethereumjs/block' +import { + bytesToBigInt, + randomBytes, + WithdrawalRequest, + type CLRequest, + type CLRequestType, +} from '@ethereumjs/util' + +const main = async () => { + const common = new Common({ + chain: Chain.Mainnet, + hardfork: Hardfork.Prague, + }) + + const withdrawalRequestData = { + sourceAddress: randomBytes(20), + validatorPubkey: randomBytes(48), + amount: bytesToBigInt(randomBytes(8)), + } + const request = WithdrawalRequest.fromRequestData( + withdrawalRequestData, + ) as CLRequest + const requests = [request] + const requestsRoot = await Block.genRequestsTrieRoot(requests) + + const block = Block.fromBlockData( + { + requests, + header: { requestsRoot }, + }, + { common }, + ) + console.log( + `Instantiated block with ${ + block.requests?.length + } withdrawal request, requestTrieValid=${await block.requestsTrieIsValid()}`, + ) +} + +main() +``` + +Have a look at the EIP for some guidance on how to use and fill in the various withdrawal request parameters. + +#### EIP-7251 Consolidation Requests + +[EIP-7251](https://eips.ethereum.org/EIPS/eip-7251) introduces consolidation requests allowing staked ETH from more than one validator on the beacon chain to be consolidated into one validator, triggered from the execution layer, see PR [#3477](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3477). Starting with this release this library supports consolidation requests and a containing block can be instantiated as follows: + +```ts +// ./examples/7251Requests.ts + +import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Block } from '@ethereumjs/block' +import { + bytesToBigInt, + ConsolidationRequest, + randomBytes, + type CLRequest, + type CLRequestType, +} from '@ethereumjs/util' + +const main = async () => { + const common = new Common({ + chain: Chain.Mainnet, + hardfork: Hardfork.Prague, + }) + + const consolidationRequestData = { + sourceAddress: randomBytes(20), + sourcePubkey: randomBytes(48), + targetPubkey: randomBytes(48), + } + const request = ConsolidationRequest.fromRequestData( + consolidationRequestData, + ) as CLRequest + const requests = [request] + const requestsRoot = await Block.genRequestsTrieRoot(requests) + + const block = Block.fromBlockData( + { + requests, + header: { requestsRoot }, + }, + { common }, + ) + console.log( + `Instantiated block with ${ + block.requests?.length + } consolidation request, requestTrieValid=${await block.requestsTrieIsValid()}`, + ) +} + +main() +``` + +Have a look at the EIP for some guidance on how to use and fill in the various deposit request parameters. + +### Verkle Updates + +- Fixes for Kaustinen4 support, PR [#3269](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3269) +- Update `kzg-wasm` to `0.4.0`, PR [#3358](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3358) +- Shift Verkle to `osaka` hardfork, PR [#3371](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3371) +- Fix the block body parsing as well as save/load from blockchain, PR [#3392](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3392) +- Verkle type/interface refactoring (moved to Common package), PR [#3462](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3462) + +### Other Features + +- New `Block.toExecutionPayload()` method to map to the execution payload structure from the beacon chain, PR [#3269](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3269) +- Stricter prefixed hex typing, PRs [#3348](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3348), [#3427](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3427) and [#3357](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3357) (some changes removed in PR [#3382](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3382) for backwards compatibility reasons, will be reintroduced along upcoming breaking releases) + +### Other Changes + +- Make EIP-4895 withdrawals trie check consistent with tx trie, PR [#3338](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3338) +- Rename deposit receipt to deposit request, PR [#3408](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3408) +- Enhances typing of CL requests, PR [#3398](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3398) +- Rename withdrawal request's `validatorPublicKey` to `validatorPubkey`, PR [#3474](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3474) + +## 5.2.0 - 2024-03-18 ### Full 4844 Browser Readiness @@ -131,7 +312,7 @@ While you could use our libraries in the browser libraries before, there had bee WE HAVE ELIMINATED ALL OF THEM. -The largest two undertakings: First: we have rewritten all (half) of our API and elimited the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went throuh our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. +The largest two undertakings: First: we have rewritten all (half) of our API and eliminated the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went through our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. Together with some few other modifications this now allows to run each (maybe adding an asterisk for client and devp2p) of our libraries directly in the browser - more or less without any modifications - see the `examples/browser.html` file in each package folder for an easy to set up example. @@ -417,14 +598,14 @@ const block = Block.fromBlockData( header: { withdrawalsRoot: Buffer.from( '69f28913c562b0d38f8dc81e72eb0d99052444d301bf8158dc1f3f94a4526357', - 'hex' + 'hex', ), }, withdrawals: [withdrawal], }, { common, - } + }, ) ``` @@ -455,11 +636,11 @@ common.setForkHashes(genesisHash) ### New RPC and Ethers Static Constructors -Two new static constructos have been added to the library, see PR [#2315](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2315) `Block.fromEthersProvider()` allows for an easy instantiation of a `Block` object using an [Ethers](https://ethers.io) provider connecting e.g. to a local node or a service provider like Infura. The `Block.fromRPC()` static constructor can be used for a straight-forward block instantiation if the block data is coming from an RPC request. This static constructor replaces the old standalong `blockFromRPC()` method which is now marked as `deprecated`. +Two new static constructors have been added to the library, see PR [#2315](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2315) `Block.fromEthersProvider()` allows for an easy instantiation of a `Block` object using an [Ethers](https://ethers.io) provider connecting e.g. to a local node or a service provider like Infura. The `Block.fromRPC()` static constructor can be used for a straight-forward block instantiation if the block data is coming from an RPC request. This static constructor replaces the old standalone `blockFromRPC()` method which is now marked as `deprecated`. ### Other Changes and Fixes -- Adressed several typing issues in the `blockFromRPC()` method, PR [#2302](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2302) +- Addressed several typing issues in the `blockFromRPC()` method, PR [#2302](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2302) ## 4.0.0 - 2022-09-06 @@ -511,7 +692,7 @@ const block = Block.fromBlockData( { // Provide your block data here or use default values }, - { common } + { common }, ) ``` @@ -526,7 +707,7 @@ Beta 2 release for the upcoming breaking release round on the [EthereumJS monore ### Removed Default Exports -The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all accross the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely dissalows using. +The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all across the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. Default exports were a common source of error and confusion when using our libraries in a CommonJS context, leading to issues like Issue [#978](https://github.com/ethereumjs/ethereumjs-monorepo/issues/978). @@ -534,7 +715,7 @@ Now every import is a named import and we think the long term benefits will very #### Common Library Import Updates -Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all accross our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. +Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all across our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. So Common import and usage is changing from: @@ -590,7 +771,7 @@ const block = Block.fromBlockData( { // Provide your block data here or use default values }, - { common } + { common }, ) ``` @@ -720,7 +901,7 @@ const block = Block.fromBlockData( { // Provide your block data here or use default values }, - { common } + { common }, ) ``` @@ -742,7 +923,7 @@ invalid transaction trie (block number=1 hash=0xe074b7b8d725c4000f278ae55cedbc76 The extended errors give substantial more object and chain context and should ease debugging. -**Potentially breaking**: Attention! If you do react on errors in your code and do exact errror matching (`error.message === 'invalid transaction trie'`) things will break. Please make sure to do error comparisons with something like `error.message.includes('invalid transaction trie')` instead. This should generally be the pattern used for all error message comparisions and is assured to be future proof on all error messages (we won't change the core text in non-breaking releases). +**Potentially breaking**: Attention! If you do react on errors in your code and do exact error matching (`error.message === 'invalid transaction trie'`) things will break. Please make sure to do error comparisons with something like `error.message.includes('invalid transaction trie')` instead. This should generally be the pattern used for all error message comparisons and is assured to be future proof on all error messages (we won't change the core text in non-breaking releases). ### Other Changes @@ -763,7 +944,7 @@ This release comes with experimental support for the Merge HF as defined in [EIP #### PoS Block Instantiation -Proof-of-Stake compatible execution blocks come with its own set of header field simplifications and associated validation rules. The difficuly is set to `0` since not relevant any more, just to name an example. For a full list of changes see `EIP-3675`. +Proof-of-Stake compatible execution blocks come with its own set of header field simplifications and associated validation rules. The difficulty is set to `0` since not relevant any more, just to name an example. For a full list of changes see `EIP-3675`. You can instantiate a Merge/PoS block like this: @@ -775,7 +956,7 @@ const block = Block.fromBlockData( { // Provide your block data here or use default values }, - { common } + { common }, ) ``` @@ -826,7 +1007,7 @@ const block = Block.fromBlockData( gasUsed: new BN(60), }, }, - { common } + { common }, ) // Base fee will increase for next block since the @@ -983,7 +1164,7 @@ const header = BlockHeader.fromHeaderData(headerData) ```ts const serialized = Buffer.from( 'f901f7a06bfee7294bf44572b7266358e627f3c35105e1c3851f3de09e6d646f955725a7a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000f837a120080845d20ab8080a00000000000000000000000000000000000000000000000000000000000000000880000000000000000', - 'hex' + 'hex', ) const header = BlockHeader.fromRLPSerializedHeader(serialized) ``` @@ -995,7 +1176,7 @@ const valuesArray = header.raw() BlockHeader.fromValuesArray(valuesArray) ``` -Generally internal types representing block header values are now closer to their domain representation (number, difficulty, gasLimit) instead of having everthing represented as a `Buffer`. +Generally internal types representing block header values are now closer to their domain representation (number, difficulty, gasLimit) instead of having everything represented as a `Buffer`. **Block Class** @@ -1009,9 +1190,9 @@ Learn more about the full API in the [docs](./docs/README.md). #### Immutability -The returned block is now frozen and immutable. To work with a maliable block, copy it with `const fakeBlock = Object.create(block)`. +The returned block is now frozen and immutable. To work with a mutable block, copy it with `const fakeBlock = Object.create(block)`. -If you need `Block` mutability - e.g. because you want to subclass `Block` and modifiy its behavior - there is a `freeze` option to prevent the `Object.freeze()` call on initialization, see PR [#941](https://github.com/ethereumjs/ethereumjs-monorepo/pull/941). +If you need `Block` mutability - e.g. because you want to subclass `Block` and modify its behavior - there is a `freeze` option to prevent the `Object.freeze()` call on initialization, see PR [#941](https://github.com/ethereumjs/ethereumjs-monorepo/pull/941). #### Promise-based API @@ -1058,13 +1239,13 @@ On the `Block` library new corresponding methods have been added which both oper **Breaking:** The default HF on the library has been updated from `petersburg` to `istanbul`, see PR [#906](https://github.com/ethereumjs/ethereumjs-monorepo/pull/906). -The HF setting is now automatically taken from the HF set for `Common.DEAULT_HARDFORK`, see PR [#863](https://github.com/ethereumjs/ethereumjs-monorepo/pull/863). +The HF setting is now automatically taken from the HF set for `Common.DEFAULT_HARDFORK`, see PR [#863](https://github.com/ethereumjs/ethereumjs-monorepo/pull/863). ### Dual ES5 and ES2017 Builds We significantly updated our internal tool and CI setup along the work on PR [#913](https://github.com/ethereumjs/ethereumjs-monorepo/pull/913) with an update to `ESLint` from `TSLint` for code linting and formatting and the introduction of a new build setup. -Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes @@ -1191,7 +1372,7 @@ const header = BlockHeader.fromHeaderData(headerData) ```ts const serialized = Buffer.from( 'f901f7a06bfee7294bf44572b7266358e627f3c35105e1c3851f3de09e6d646f955725a7a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000f837a120080845d20ab8080a00000000000000000000000000000000000000000000000000000000000000000880000000000000000', - 'hex' + 'hex', ) const header = BlockHeader.fromRLPSerializedHeader(serialized) ``` @@ -1204,7 +1385,7 @@ BlockHeader.fromValuesArray(valuesArray) ``` Generally internal types representing block header values are now closer to their domain representation -(number, difficulty, gasLimit) instead of having everthing represented as a `Buffer`. +(number, difficulty, gasLimit) instead of having everything represented as a `Buffer`. **Block Class** @@ -1218,7 +1399,7 @@ Learn more about the full API in the [docs](./docs/README.md). #### Immutability -The returned block is now frozen and immutable. To work with a maliable block, copy it with `const fakeBlock = Object.create(block)`. +The returned block is now frozen and immutable. To work with a mutable block, copy it with `const fakeBlock = Object.create(block)`. #### Promise-based API @@ -1264,7 +1445,7 @@ as an input parameter. ### New Default Hardfork **Breaking:** The default HF on the library has been updated from `petersburg` to `istanbul`, see PR [#906](https://github.com/ethereumjs/ethereumjs-monorepo/pull/906). -The HF setting is now automatically taken from the HF set for `Common.DEAULT_HARDFORK`, +The HF setting is now automatically taken from the HF set for `Common.DEFAULT_HARDFORK`, see PR [#863](https://github.com/ethereumjs/ethereumjs-monorepo/pull/863). ### Dual ES5 and ES2017 Builds @@ -1276,7 +1457,7 @@ for code linting and formatting and the introduction of a new build setup. Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result -in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes diff --git a/packages/block/README.md b/packages/block/README.md index 9d05978685..c78a8be8ac 100644 --- a/packages/block/README.md +++ b/packages/block/README.md @@ -89,7 +89,7 @@ const block = Block.fromBlockData( gasUsed: BigInt(60), }, }, - { common } + { common }, ) // Base fee will increase for next block since the @@ -106,7 +106,7 @@ const blockWithMatchingBaseFee = Block.fromBlockData( gasUsed: BigInt(60), }, }, - { common } + { common }, ) console.log(Number(blockWithMatchingBaseFee.header.baseFeePerGas)) // 11 @@ -139,14 +139,14 @@ const block = Block.fromBlockData( { header: { withdrawalsRoot: hexToBytes( - '0x69f28913c562b0d38f8dc81e72eb0d99052444d301bf8158dc1f3f94a4526357' + '0x69f28913c562b0d38f8dc81e72eb0d99052444d301bf8158dc1f3f94a4526357', ), }, withdrawals: [withdrawal], }, { common, - } + }, ) console.log(`Block with ${block.withdrawals!.length} withdrawal(s) created`) @@ -184,7 +184,7 @@ const main = async () => { }) const blobTx = BlobEIP4844Transaction.fromTxData( { blobsData: ['myFirstBlob'], to: Address.fromPrivateKey(randomBytes(32)) }, - { common } + { common }, ) const block = Block.fromBlockData( @@ -197,13 +197,13 @@ const main = async () => { { common, skipConsensusFormatValidation: true, - } + }, ) console.log( `4844 block header with excessBlobGas=${block.header.excessBlobGas} created and ${ block.transactions.filter((tx) => tx.type === 3).length - } blob transactions` + } blob transactions`, ) } @@ -212,6 +212,168 @@ main() **Note:** Working with blob transactions needs a manual KZG library installation and global initialization, see [KZG Setup](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/tx/README.md#kzg-setup) for instructions. +### Blocks with EIP-7685 Consensus Layer Requests + +Starting with v5.3.0 this library supports requests to the consensus layer which have been introduced with [EIP-7685](https://eips.ethereum.org/EIPS/eip-7685) and will come into play for deposit and withdrawal requests along the upcoming [Prague](https://eips.ethereum.org/EIPS/eip-7600) hardfork. + +#### EIP-6110 Deposit Requests + +[EIP-6110](https://eips.ethereum.org/EIPS/eip-6110) introduces deposit requests allowing beacon chain deposits being triggered from the execution layer. Starting with v5.3.0 this library supports deposit requests and a containing block can be instantiated as follows: + +```ts +// ./examples/6110Requests.ts + +import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Block } from '@ethereumjs/block' +import { + bytesToBigInt, + DepositRequest, + randomBytes, + type CLRequest, + type CLRequestType, +} from '@ethereumjs/util' + +const main = async () => { + const common = new Common({ + chain: Chain.Mainnet, + hardfork: Hardfork.Cancun, + eips: [7685, 4788], + }) + + const depositRequestData = { + pubkey: randomBytes(48), + withdrawalCredentials: randomBytes(32), + amount: bytesToBigInt(randomBytes(8)), + signature: randomBytes(96), + index: bytesToBigInt(randomBytes(8)), + } + const request = DepositRequest.fromRequestData(depositRequestData) as CLRequest + const requests = [request] + const requestsRoot = await Block.genRequestsTrieRoot(requests) + + const block = Block.fromBlockData( + { + requests, + header: { requestsRoot }, + }, + { common }, + ) + console.log( + `Instantiated block with ${ + block.requests?.length + } request, requestTrieValid=${await block.requestsTrieIsValid()}`, + ) +} + +main() +``` + +Have a look at the EIP for some guidance on how to use and fill in the various deposit request parameters. + +#### EIP-7002 Withdrawal Requests + +[EIP-7002](https://eips.ethereum.org/EIPS/eip-7002) introduces the possibility for validators to trigger exits and partial withdrawals via the execution layer. Starting with v5.3.0 this library supports withdrawal requests and a containing block can be instantiated as follows: + +```ts +import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Block } from '@ethereumjs/block' +import { + bytesToBigInt, + randomBytes, + WithdrawalRequest, + type CLRequest, + type CLRequestType, +} from '@ethereumjs/util' + +const main = async () => { + const common = new Common({ + chain: Chain.Mainnet, + hardfork: Hardfork.Prague, + }) + + const withdrawalRequestData = { + sourceAddress: randomBytes(20), + validatorPubkey: randomBytes(48), + amount: bytesToBigInt(randomBytes(8)), + } + const request = WithdrawalRequest.fromRequestData( + withdrawalRequestData, + ) as CLRequest + const requests = [request] + const requestsRoot = await Block.genRequestsTrieRoot(requests) + + const block = Block.fromBlockData( + { + requests, + header: { requestsRoot }, + }, + { common }, + ) + console.log( + `Instantiated block with ${ + block.requests?.length + } withdrawal request, requestTrieValid=${await block.requestsTrieIsValid()}`, + ) +} + +main() +``` + +Have a look at the EIP for some guidance on how to use and fill in the various withdrawal request parameters. + +#### EIP-7251 Consolidation Requests + +[EIP-7251](https://eips.ethereum.org/EIPS/eip-7251) introduces consolidation requests allowing staked ETH from more than one validator on the beacon chain to be consolidated into one validator, triggered from the execution layer. Starting with v5.3.0 this library supports consolidation requests and a containing block can be instantiated as follows: + +```ts +// ./examples/7251Requests.ts + +import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Block } from '@ethereumjs/block' +import { + bytesToBigInt, + ConsolidationRequest, + randomBytes, + type CLRequest, + type CLRequestType, +} from '@ethereumjs/util' + +const main = async () => { + const common = new Common({ + chain: Chain.Mainnet, + hardfork: Hardfork.Prague, + }) + + const consolidationRequestData = { + sourceAddress: randomBytes(20), + sourcePubkey: randomBytes(48), + targetPubkey: randomBytes(48), + } + const request = ConsolidationRequest.fromRequestData( + consolidationRequestData, + ) as CLRequest + const requests = [request] + const requestsRoot = await Block.genRequestsTrieRoot(requests) + + const block = Block.fromBlockData( + { + requests, + header: { requestsRoot }, + }, + { common }, + ) + console.log( + `Instantiated block with ${ + block.requests?.length + } consolidation request, requestTrieValid=${await block.requestsTrieIsValid()}`, + ) +} + +main() +``` + +Have a look at the EIP for some guidance on how to use and fill in the various deposit request parameters. + ### Consensus Types The block library supports the creation as well as consensus format validation of PoW `ethash` and PoA `clique` blocks (so e.g. do specific `extraData` checks on Clique/PoA blocks). @@ -297,7 +459,7 @@ const block = Block.fromBlockData( { // Provide your block data here or use default values }, - { common } + { common }, ) console.log(`Proof-of-Stake (default) block created with hardfork=${block.common.hardfork()}`) diff --git a/packages/block/examples/1559.ts b/packages/block/examples/1559.ts index a77afbc323..7dec3657e9 100644 --- a/packages/block/examples/1559.ts +++ b/packages/block/examples/1559.ts @@ -1,8 +1,8 @@ -import { Block, createBlockFromBlockData } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) +import { createBlock } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) -const block = createBlockFromBlockData( +const block = createBlock( { header: { baseFeePerGas: BigInt(10), @@ -10,7 +10,7 @@ const block = createBlockFromBlockData( gasUsed: BigInt(60), }, }, - { common } + { common }, ) // Base fee will increase for next block since the @@ -19,7 +19,7 @@ console.log(Number(block.header.calcNextBaseFee())) // 11 // So for creating a block with a matching base fee in a certain // chain context you can do: -const blockWithMatchingBaseFee = createBlockFromBlockData( +const blockWithMatchingBaseFee = createBlock( { header: { baseFeePerGas: block.header.calcNextBaseFee(), @@ -27,7 +27,7 @@ const blockWithMatchingBaseFee = createBlockFromBlockData( gasUsed: BigInt(60), }, }, - { common } + { common }, ) console.log(Number(blockWithMatchingBaseFee.header.baseFeePerGas)) // 11 diff --git a/packages/block/examples/4844.ts b/packages/block/examples/4844.ts index 7039c84189..20401f2809 100644 --- a/packages/block/examples/4844.ts +++ b/packages/block/examples/4844.ts @@ -1,26 +1,26 @@ -import { Common, Chain, Hardfork } from '@ethereumjs/common' -import { Block, createBlockFromBlockData } from '@ethereumjs/block' -import { BlobEIP4844Transaction } from '@ethereumjs/tx' -import { Address } from '@ethereumjs/util' -import { loadKZG } from 'kzg-wasm' +import { createBlock } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createBlob4844Tx } from '@ethereumjs/tx' +import { createAddressFromPrivateKey } from '@ethereumjs/util' import { randomBytes } from 'crypto' +import { loadKZG } from 'kzg-wasm' const main = async () => { const kzg = await loadKZG() const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.Cancun, customCrypto: { kzg, }, }) - const blobTx = BlobEIP4844Transaction.fromTxData( - { blobsData: ['myFirstBlob'], to: Address.fromPrivateKey(randomBytes(32)) }, - { common } + const blobTx = createBlob4844Tx( + { blobsData: ['myFirstBlob'], to: createAddressFromPrivateKey(randomBytes(32)) }, + { common }, ) - const block = createBlockFromBlockData( + const block = createBlock( { header: { excessBlobGas: 0n, @@ -30,14 +30,14 @@ const main = async () => { { common, skipConsensusFormatValidation: true, - } + }, ) console.log( `4844 block header with excessBlobGas=${block.header.excessBlobGas} created and ${ block.transactions.filter((tx) => tx.type === 3).length - } blob transactions` + } blob transactions`, ) } -main() +void main() diff --git a/packages/block/examples/6110Requests.ts b/packages/block/examples/6110Requests.ts new file mode 100644 index 0000000000..2f50752e94 --- /dev/null +++ b/packages/block/examples/6110Requests.ts @@ -0,0 +1,42 @@ +import { createBlock, genRequestsTrieRoot } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { + type CLRequest, + type CLRequestType, + DepositRequest, + bytesToBigInt, + randomBytes, +} from '@ethereumjs/util' + +const main = async () => { + const common = new Common({ + chain: Mainnet, + hardfork: Hardfork.Prague, + }) + + const depositRequestData = { + pubkey: randomBytes(48), + withdrawalCredentials: randomBytes(32), + amount: bytesToBigInt(randomBytes(8)), + signature: randomBytes(96), + index: bytesToBigInt(randomBytes(8)), + } + const request = DepositRequest.fromRequestData(depositRequestData) as CLRequest + const requests = [request] + const requestsRoot = await genRequestsTrieRoot(requests) + + const block = createBlock( + { + requests, + header: { requestsRoot }, + }, + { common }, + ) + console.log( + `Instantiated block with ${ + block.requests?.length + } deposit request, requestTrieValid=${await block.requestsTrieIsValid()}`, + ) +} + +void main() diff --git a/packages/block/examples/7002Requests.ts b/packages/block/examples/7002Requests.ts new file mode 100644 index 0000000000..acfcb45d80 --- /dev/null +++ b/packages/block/examples/7002Requests.ts @@ -0,0 +1,42 @@ +import { createBlock, genRequestsTrieRoot } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { + type CLRequest, + type CLRequestType, + WithdrawalRequest, + bytesToBigInt, + randomBytes, +} from '@ethereumjs/util' + +const main = async () => { + const common = new Common({ + chain: Mainnet, + hardfork: Hardfork.Prague, + }) + + const withdrawalRequestData = { + sourceAddress: randomBytes(20), + validatorPubkey: randomBytes(48), + amount: bytesToBigInt(randomBytes(8)), + } + const request = WithdrawalRequest.fromRequestData( + withdrawalRequestData, + ) as CLRequest + const requests = [request] + const requestsRoot = await genRequestsTrieRoot(requests) + + const block = createBlock( + { + requests, + header: { requestsRoot }, + }, + { common }, + ) + console.log( + `Instantiated block with ${ + block.requests?.length + } withdrawal request, requestTrieValid=${await block.requestsTrieIsValid()}`, + ) +} + +void main() diff --git a/packages/block/examples/7251Requests.ts b/packages/block/examples/7251Requests.ts new file mode 100644 index 0000000000..23c5878d51 --- /dev/null +++ b/packages/block/examples/7251Requests.ts @@ -0,0 +1,41 @@ +import { createBlock, genRequestsTrieRoot } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { + type CLRequest, + type CLRequestType, + ConsolidationRequest, + randomBytes, +} from '@ethereumjs/util' + +const main = async () => { + const common = new Common({ + chain: Mainnet, + hardfork: Hardfork.Prague, + }) + + const consolidationRequestData = { + sourceAddress: randomBytes(20), + sourcePubkey: randomBytes(48), + targetPubkey: randomBytes(48), + } + const request = ConsolidationRequest.fromRequestData( + consolidationRequestData, + ) as CLRequest + const requests = [request] + const requestsRoot = await genRequestsTrieRoot(requests) + + const block = createBlock( + { + requests, + header: { requestsRoot }, + }, + { common }, + ) + console.log( + `Instantiated block with ${ + block.requests?.length + } consolidation request, requestTrieValid=${await block.requestsTrieIsValid()}`, + ) +} + +void main() diff --git a/packages/block/examples/clique.ts b/packages/block/examples/clique.ts index ba32b58cc1..43630a1ca8 100644 --- a/packages/block/examples/clique.ts +++ b/packages/block/examples/clique.ts @@ -1,10 +1,10 @@ -import { Block, createBlockFromBlockData } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { createBlock } from '@ethereumjs/block' +import { Common, Goerli, Hardfork } from '@ethereumjs/common' -const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) +const common = new Common({ chain: Goerli, hardfork: Hardfork.Chainstart }) console.log(common.consensusType()) // 'poa' console.log(common.consensusAlgorithm()) // 'clique' -createBlockFromBlockData({ header: { extraData: new Uint8Array(97) } }, { common }) +createBlock({ header: { extraData: new Uint8Array(97) } }, { common }) console.log(`Old Clique Proof-of-Authority block created`) diff --git a/packages/block/examples/pos.ts b/packages/block/examples/pos.ts index 1096669d55..09a132aa04 100644 --- a/packages/block/examples/pos.ts +++ b/packages/block/examples/pos.ts @@ -1,13 +1,13 @@ -import { Block, createBlockFromBlockData } from '@ethereumjs/block' -import { Chain, Common } from '@ethereumjs/common' +import { createBlock } from '@ethereumjs/block' +import { Common, Mainnet } from '@ethereumjs/common' -const common = new Common({ chain: Chain.Mainnet }) +const common = new Common({ chain: Mainnet }) -const block = createBlockFromBlockData( +const block = createBlock( { // Provide your block data here or use default values }, - { common } + { common }, ) console.log(`Proof-of-Stake (default) block created with hardfork=${block.common.hardfork()}`) diff --git a/packages/block/examples/pow.ts b/packages/block/examples/pow.ts index 997e47c26c..adb7de1a68 100644 --- a/packages/block/examples/pow.ts +++ b/packages/block/examples/pow.ts @@ -1,10 +1,10 @@ -import { Block, createBlockFromBlockData } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { createBlock } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) console.log(common.consensusType()) // 'pow' console.log(common.consensusAlgorithm()) // 'ethash' -createBlockFromBlockData({}, { common }) +createBlock({}, { common }) console.log(`Old Proof-of-Work block created`) diff --git a/packages/block/examples/simple.ts b/packages/block/examples/simple.ts index f571d1d63e..c03ee2b3a0 100644 --- a/packages/block/examples/simple.ts +++ b/packages/block/examples/simple.ts @@ -1,11 +1,13 @@ -import { BlockHeader } from '@ethereumjs/block' +import { createBlockHeader } from '@ethereumjs/block' import { bytesToHex } from '@ethereumjs/util' -const headerData = { +import type { HeaderData } from '@ethereumjs/block' + +const headerData: HeaderData = { number: 15, parentHash: '0x6bfee7294bf44572b7266358e627f3c35105e1c3851f3de09e6d646f955725a7', gasLimit: 8000000, timestamp: 1562422144, } -const header = BlockHeader.fromHeaderData(headerData) +const header = createBlockHeader(headerData) console.log(`Created block header with hash=${bytesToHex(header.hash())}`) diff --git a/packages/block/examples/withdrawals.ts b/packages/block/examples/withdrawals.ts index 73e5e7dfc1..2b68e13e93 100644 --- a/packages/block/examples/withdrawals.ts +++ b/packages/block/examples/withdrawals.ts @@ -1,9 +1,10 @@ -import { Block, createBlockFromBlockData } from '@ethereumjs/block' -import { Common, Chain } from '@ethereumjs/common' +import { createBlock } from '@ethereumjs/block' +import { Common, Mainnet } from '@ethereumjs/common' import { Address, hexToBytes } from '@ethereumjs/util' + import type { WithdrawalData } from '@ethereumjs/util' -const common = new Common({ chain: Chain.Mainnet }) +const common = new Common({ chain: Mainnet }) const withdrawal = { index: BigInt(0), @@ -12,18 +13,18 @@ const withdrawal = { amount: BigInt(1000), } -const block = createBlockFromBlockData( +const block = createBlock( { header: { withdrawalsRoot: hexToBytes( - '0x69f28913c562b0d38f8dc81e72eb0d99052444d301bf8158dc1f3f94a4526357' + '0x69f28913c562b0d38f8dc81e72eb0d99052444d301bf8158dc1f3f94a4526357', ), }, withdrawals: [withdrawal], }, { common, - } + }, ) console.log(`Block with ${block.withdrawals!.length} withdrawal(s) created`) diff --git a/packages/block/package.json b/packages/block/package.json index 4591a64184..1674ce9c57 100644 --- a/packages/block/package.json +++ b/packages/block/package.json @@ -1,6 +1,6 @@ { "name": "@ethereumjs/block", - "version": "5.2.0", + "version": "5.3.0", "description": "Provides Block serialization and help functions", "keywords": [ "ethereum", @@ -47,11 +47,11 @@ "tsc": "../../config/cli/ts-compile.sh" }, "dependencies": { - "@ethereumjs/common": "^4.3.0", + "@ethereumjs/common": "^4.4.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/trie": "^6.2.0", - "@ethereumjs/tx": "^5.3.0", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/trie": "^6.2.1", + "@ethereumjs/tx": "^5.4.0", + "@ethereumjs/util": "^9.1.0", "ethereum-cryptography": "^2.2.1" }, "devDependencies": { diff --git a/packages/block/src/block.ts b/packages/block/src/block/block.ts similarity index 91% rename from packages/block/src/block.ts rename to packages/block/src/block/block.ts index 66bc14e7db..a59b332558 100644 --- a/packages/block/src/block.ts +++ b/packages/block/src/block/block.ts @@ -1,7 +1,7 @@ import { ConsensusType } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' import { Trie } from '@ethereumjs/trie' -import { BlobEIP4844Transaction, Capability } from '@ethereumjs/tx' +import { Blob4844Tx, Capability } from '@ethereumjs/tx' import { BIGINT_0, CLRequestType, @@ -12,32 +12,29 @@ import { } from '@ethereumjs/util' import { keccak256 } from 'ethereum-cryptography/keccak.js' -import { BlockHeader } from './header.js' -import { genRequestsTrieRoot, genTransactionsTrieRoot, genWithdrawalsTrieRoot } from './helpers.js' - /* eslint-disable */ // This is to allow for a proper and linked collection of constructors for the class header. // For tree shaking/code size this should be no problem since types go away on transpilation. // TODO: See if there is an easier way to achieve the same result. // See: https://github.com/microsoft/TypeScript/issues/47558 // (situation will eventually improve on Typescript and/or Eslint update) -import type { - createBlockFromBeaconPayloadJson, - createBlockFromBlockData, - createBlockFromExecutionPayload, - createBlockFromJsonRpcProvider, - createBlockFromRLPSerializedBlock, - createBlockFromRPC, - createBlockFromValuesArray, -} from './index.js' +import { + genRequestsTrieRoot, + genTransactionsTrieRoot, + genWithdrawalsTrieRoot, + BlockHeader, + type createBlockFromBeaconPayloadJson, + type createBlock, + type createBlockFromExecutionPayload, + type createBlockFromJsonRpcProvider, + type createBlockFromRLPSerializedBlock, + type createBlockFromRPC, + type createBlockFromBytesArray, +} from '../index.js' /* eslint-enable */ -import type { BlockBytes, BlockOptions, ExecutionPayload, JsonBlock } from './types.js' +import type { BlockBytes, BlockOptions, ExecutionPayload, JsonBlock } from '../types.js' import type { Common } from '@ethereumjs/common' -import type { - FeeMarketEIP1559Transaction, - LegacyTransaction, - TypedTransaction, -} from '@ethereumjs/tx' +import type { FeeMarket1559Tx, LegacyTx, TypedTransaction } from '@ethereumjs/tx' import type { CLRequest, ConsolidationRequest, @@ -55,8 +52,8 @@ import type { * A block object can be created with one of the following constructor methods * (separate from the Block class to allow for tree shaking): * - * - {@link createBlockFromBlockData } - * - {@link createBlockFromValuesArray } + * - {@link createBlock } + * - {@link createBlockFromBytesArray } * - {@link createBlockFromRLPSerializedBlock } * - {@link createBlockFromRPC } * - {@link createBlockFromJsonRpcProvider } @@ -98,9 +95,9 @@ export class Block { withdrawals?: Withdrawal[], opts: BlockOptions = {}, requests?: CLRequest[], - executionWitness?: VerkleExecutionWitness | null + executionWitness?: VerkleExecutionWitness | null, ) { - this.header = header ?? BlockHeader.fromHeaderData({}, opts) + this.header = header ?? new BlockHeader({}, opts) this.common = this.header.common this.keccakFunction = this.common.customCrypto.keccak256 ?? keccak256 @@ -134,13 +131,13 @@ export class Block { this.validateUncles() if (this.common.consensusType() === ConsensusType.ProofOfAuthority) { const msg = this._errorMsg( - 'Block initialization with uncleHeaders on a PoA network is not allowed' + 'Block initialization with uncleHeaders on a PoA network is not allowed', ) throw new Error(msg) } if (this.common.consensusType() === ConsensusType.ProofOfStake) { const msg = this._errorMsg( - 'Block initialization with uncleHeaders on a PoS network is not allowed' + 'Block initialization with uncleHeaders on a PoS network is not allowed', ) throw new Error(msg) } @@ -183,7 +180,7 @@ export class Block { const bytesArray = [ this.header.raw(), this.transactions.map((tx) => - tx.supports(Capability.EIP2718TypedTransaction) ? tx.serialize() : tx.raw() + tx.supports(Capability.EIP2718TypedTransaction) ? tx.serialize() : tx.raw(), ) as Uint8Array[], this.uncleHeaders.map((uh) => uh.raw()), ] @@ -279,31 +276,31 @@ export class Block { getTransactionsValidationErrors(): string[] { const errors: string[] = [] let blobGasUsed = BIGINT_0 - const blobGasLimit = this.common.param('gasConfig', 'maxblobGasPerBlock') - const blobGasPerBlob = this.common.param('gasConfig', 'blobGasPerBlob') // eslint-disable-next-line prefer-const for (let [i, tx] of this.transactions.entries()) { const errs = tx.getValidationErrors() if (this.common.isActivatedEIP(1559)) { if (tx.supports(Capability.EIP1559FeeMarket)) { - tx = tx as FeeMarketEIP1559Transaction + tx = tx as FeeMarket1559Tx if (tx.maxFeePerGas < this.header.baseFeePerGas!) { errs.push('tx unable to pay base fee (EIP-1559 tx)') } } else { - tx = tx as LegacyTransaction + tx = tx as LegacyTx if (tx.gasPrice < this.header.baseFeePerGas!) { errs.push('tx unable to pay base fee (non EIP-1559 tx)') } } } if (this.common.isActivatedEIP(4844)) { - if (tx instanceof BlobEIP4844Transaction) { + const blobGasLimit = this.common.param('maxblobGasPerBlock') + const blobGasPerBlob = this.common.param('blobGasPerBlob') + if (tx instanceof Blob4844Tx) { blobGasUsed += BigInt(tx.numBlobs()) * blobGasPerBlob if (blobGasUsed > blobGasLimit) { errs.push( - `tx causes total blob gas of ${blobGasUsed} to exceed maximum blob gas per block of ${blobGasLimit}` + `tx causes total blob gas of ${blobGasUsed} to exceed maximum blob gas per block of ${blobGasLimit}`, ) } } @@ -359,7 +356,7 @@ export class Block { for (const [index, tx] of this.transactions.entries()) { if (!tx.isSigned()) { const msg = this._errorMsg( - `invalid transactions: transaction at index ${index} is unsigned` + `invalid transactions: transaction at index ${index} is unsigned`, ) throw new Error(msg) } @@ -401,27 +398,27 @@ export class Block { */ validateBlobTransactions(parentHeader: BlockHeader) { if (this.common.isActivatedEIP(4844)) { - const blobGasLimit = this.common.param('gasConfig', 'maxblobGasPerBlock') - const blobGasPerBlob = this.common.param('gasConfig', 'blobGasPerBlob') + const blobGasLimit = this.common.param('maxblobGasPerBlock') + const blobGasPerBlob = this.common.param('blobGasPerBlob') let blobGasUsed = BIGINT_0 const expectedExcessBlobGas = parentHeader.calcNextExcessBlobGas() if (this.header.excessBlobGas !== expectedExcessBlobGas) { throw new Error( - `block excessBlobGas mismatch: have ${this.header.excessBlobGas}, want ${expectedExcessBlobGas}` + `block excessBlobGas mismatch: have ${this.header.excessBlobGas}, want ${expectedExcessBlobGas}`, ) } let blobGasPrice for (const tx of this.transactions) { - if (tx instanceof BlobEIP4844Transaction) { + if (tx instanceof Blob4844Tx) { blobGasPrice = blobGasPrice ?? this.header.getBlobGasPrice() if (tx.maxFeePerBlobGas < blobGasPrice) { throw new Error( `blob transaction maxFeePerBlobGas ${ tx.maxFeePerBlobGas - } < than block blob gas price ${blobGasPrice} - ${this.errorStr()}` + } < than block blob gas price ${blobGasPrice} - ${this.errorStr()}`, ) } @@ -429,7 +426,7 @@ export class Block { if (blobGasUsed > blobGasLimit) { throw new Error( - `tx causes total blob gas of ${blobGasUsed} to exceed maximum blob gas per block of ${blobGasLimit}` + `tx causes total blob gas of ${blobGasUsed} to exceed maximum blob gas per block of ${blobGasLimit}`, ) } } @@ -437,7 +434,7 @@ export class Block { if (this.header.blobGasUsed !== blobGasUsed) { throw new Error( - `block blobGasUsed mismatch: have ${this.header.blobGasUsed}, want ${blobGasUsed}` + `block blobGasUsed mismatch: have ${this.header.blobGasUsed}, want ${blobGasUsed}`, ) } } @@ -474,7 +471,7 @@ export class Block { if (this.cache.withdrawalsTrieRoot === undefined) { this.cache.withdrawalsTrieRoot = await genWithdrawalsTrieRoot( this.withdrawals!, - new Trie({ common: this.common }) + new Trie({ common: this.common }), ) } result = equalsBytes(this.cache.withdrawalsTrieRoot, this.header.withdrawalsRoot!) @@ -509,15 +506,6 @@ export class Block { } } - /** - * Returns the canonical difficulty for this block. - * - * @param parentBlock - the parent of this `Block` - */ - ethashCanonicalDifficulty(parentBlock: Block): bigint { - return this.header.ethashCanonicalDifficulty(parentBlock.header) - } - /** * Validates if the block gasLimit remains in the boundaries set by the protocol. * Throws if invalid @@ -546,6 +534,12 @@ export class Block { } } + /** + * Maps the block properties to the execution payload structure from the beacon chain, + * see https://github.com/ethereum/consensus-specs/blob/dev/specs/bellatrix/beacon-chain.md#ExecutionPayload + * + * @returns dict with the execution payload parameters with camel case naming + */ toExecutionPayload(): ExecutionPayload { const blockJson = this.toJSON() const header = blockJson.header! diff --git a/packages/block/src/constructors.ts b/packages/block/src/block/constructors.ts similarity index 78% rename from packages/block/src/constructors.ts rename to packages/block/src/block/constructors.ts index f06fbc359e..bab7a78495 100644 --- a/packages/block/src/constructors.ts +++ b/packages/block/src/block/constructors.ts @@ -1,6 +1,12 @@ import { RLP } from '@ethereumjs/rlp' import { Trie } from '@ethereumjs/trie' -import { TransactionFactory } from '@ethereumjs/tx' +import { + type TxOptions, + createTxFromBlockBodyData, + createTxFromSerializedData, + createTxFromTxData, + normalizeTxParams, +} from '@ethereumjs/tx' import { CLRequestFactory, ConsolidationRequest, @@ -18,12 +24,17 @@ import { isHexString, } from '@ethereumjs/util' -import { createBlockFromRpc } from './from-rpc.js' -import { genRequestsTrieRoot, genTransactionsTrieRoot, genWithdrawalsTrieRoot } from './helpers.js' - -import { Block, BlockHeader, executionPayloadFromBeaconPayload } from './index.js' - -import type { BeaconPayloadJson } from './from-beacon-payload.js' +import { generateCliqueBlockExtraData } from '../consensus/clique.js' +import { genRequestsTrieRoot, genTransactionsTrieRoot, genWithdrawalsTrieRoot } from '../helpers.js' +import { + Block, + blockHeaderFromRpc, + createBlockHeader, + createBlockHeaderFromBytesArray, + executionPayloadFromBeaconPayload, +} from '../index.js' + +import type { BeaconPayloadJson } from '../from-beacon-payload.js' import type { BlockBytes, BlockData, @@ -34,8 +45,8 @@ import type { JsonRpcBlock, RequestsBytes, WithdrawalsBytes, -} from './types.js' -import type { TxOptions } from '@ethereumjs/tx' +} from '../types.js' +import type { TypedTransaction } from '@ethereumjs/tx' import type { CLRequest, CLRequestType, @@ -51,7 +62,7 @@ import type { * @param blockData * @param opts */ -export function createBlockFromBlockData(blockData: BlockData = {}, opts?: BlockOptions) { +export function createBlock(blockData: BlockData = {}, opts?: BlockOptions) { const { header: headerData, transactions: txsData, @@ -61,12 +72,12 @@ export function createBlockFromBlockData(blockData: BlockData = {}, opts?: Block requests: clRequests, } = blockData - const header = BlockHeader.fromHeaderData(headerData, opts) + const header = createBlockHeader(headerData, opts) // parse transactions const transactions = [] for (const txData of txsData ?? []) { - const tx = TransactionFactory.fromTxData(txData, { + const tx = createTxFromTxData(txData, { ...opts, // Use header common in case of setHardfork being activated common: header.common, @@ -88,7 +99,7 @@ export function createBlockFromBlockData(blockData: BlockData = {}, opts?: Block uncleOpts.setHardfork = true } for (const uhData of uhsData ?? []) { - const uh = BlockHeader.fromHeaderData(uhData, uncleOpts) + const uh = createBlockHeader(uhData, uncleOpts) uncleHeaders.push(uh) } @@ -104,7 +115,7 @@ export function createBlockFromBlockData(blockData: BlockData = {}, opts?: Block withdrawals, opts, clRequests, - executionWitness + executionWitness, ) } @@ -114,7 +125,7 @@ export function createBlockFromBlockData(blockData: BlockData = {}, opts?: Block * @param values * @param opts */ -export function createBlockFromValuesArray(values: BlockBytes, opts?: BlockOptions) { +export function createBlockFromBytesArray(values: BlockBytes, opts?: BlockOptions) { if (values.length > 5) { throw new Error(`invalid More values=${values.length} than expected were received (at most 5)`) } @@ -122,7 +133,7 @@ export function createBlockFromValuesArray(values: BlockBytes, opts?: BlockOptio // First try to load header so that we can use its common (in case of setHardfork being activated) // to correctly make checks on the hardforks const [headerData, txsData, uhsData, ...valuesTail] = values - const header = BlockHeader.fromValuesArray(headerData, opts) + const header = createBlockHeaderFromBytesArray(headerData, opts) // conditional assignment of rest of values and splicing them out from the valuesTail const withdrawalBytes = header.common.isActivatedEIP(4895) @@ -142,7 +153,7 @@ export function createBlockFromValuesArray(values: BlockBytes, opts?: BlockOptio (withdrawalBytes === undefined || !Array.isArray(withdrawalBytes)) ) { throw new Error( - 'Invalid serialized block input: EIP-4895 is active, and no withdrawals were provided as array' + 'Invalid serialized block input: EIP-4895 is active, and no withdrawals were provided as array', ) } @@ -151,13 +162,13 @@ export function createBlockFromValuesArray(values: BlockBytes, opts?: BlockOptio (requestBytes === undefined || !Array.isArray(requestBytes)) ) { throw new Error( - 'Invalid serialized block input: EIP-7685 is active, and no requestBytes were provided as array' + 'Invalid serialized block input: EIP-7685 is active, and no requestBytes were provided as array', ) } if (header.common.isActivatedEIP(6800) && executionWitnessBytes === undefined) { throw new Error( - 'Invalid serialized block input: EIP-6800 is active, and execution witness is undefined' + 'Invalid serialized block input: EIP-6800 is active, and execution witness is undefined', ) } @@ -165,11 +176,11 @@ export function createBlockFromValuesArray(values: BlockBytes, opts?: BlockOptio const transactions = [] for (const txData of txsData ?? []) { transactions.push( - TransactionFactory.fromBlockBodyData(txData, { + createTxFromBlockBodyData(txData, { ...opts, // Use header common in case of setHardfork being activated common: header.common, - }) + }), ) } @@ -187,7 +198,7 @@ export function createBlockFromValuesArray(values: BlockBytes, opts?: BlockOptio uncleOpts.setHardfork = true } for (const uncleHeaderData of uhsData ?? []) { - uncleHeaders.push(BlockHeader.fromValuesArray(uncleHeaderData, uncleOpts)) + uncleHeaders.push(createBlockHeaderFromBytesArray(uncleHeaderData, uncleOpts)) } const withdrawals = (withdrawalBytes as WithdrawalBytes[]) @@ -202,7 +213,7 @@ export function createBlockFromValuesArray(values: BlockBytes, opts?: BlockOptio let requests if (header.common.isActivatedEIP(7685)) { requests = (requestBytes as RequestBytes[]).map((bytes) => - CLRequestFactory.fromSerializedRequest(bytes) + CLRequestFactory.fromSerializedRequest(bytes), ) } // executionWitness are not part of the EL fetched blocks via eth_ bodies method @@ -227,7 +238,7 @@ export function createBlockFromValuesArray(values: BlockBytes, opts?: BlockOptio withdrawals, opts, requests, - executionWitness + executionWitness, ) } @@ -244,7 +255,7 @@ export function createBlockFromRLPSerializedBlock(serialized: Uint8Array, opts?: throw new Error('Invalid serialized block input. Must be array') } - return createBlockFromValuesArray(values, opts) + return createBlockFromBytesArray(values, opts) } /** @@ -254,8 +265,31 @@ export function createBlockFromRLPSerializedBlock(serialized: Uint8Array, opts?: * @param uncles - Optional list of Ethereum JSON RPC of uncles (eth_getUncleByBlockHashAndIndex) * @param opts - An object describing the blockchain */ -export function createBlockFromRPC(blockData: JsonRpcBlock, uncles?: any[], opts?: BlockOptions) { - return createBlockFromRpc(blockData, uncles, opts) +export function createBlockFromRPC( + blockParams: JsonRpcBlock, + uncles: any[] = [], + options?: BlockOptions, +) { + const header = blockHeaderFromRpc(blockParams, options) + + const transactions: TypedTransaction[] = [] + const opts = { common: header.common } + for (const _txParams of blockParams.transactions ?? []) { + const txParams = normalizeTxParams(_txParams) + const tx = createTxFromTxData(txParams, opts) + transactions.push(tx) + } + + const uncleHeaders = uncles.map((uh) => blockHeaderFromRpc(uh, options)) + + const requests = blockParams.requests?.map((req) => { + const bytes = hexToBytes(req as PrefixedHexString) + return CLRequestFactory.fromSerializedRequest(bytes) + }) + return createBlock( + { header, transactions, uncleHeaders, withdrawals: blockParams.withdrawals, requests }, + options, + ) } /** @@ -268,7 +302,7 @@ export function createBlockFromRPC(blockData: JsonRpcBlock, uncles?: any[], opts export const createBlockFromJsonRpcProvider = async ( provider: string | EthersProvider, blockTag: string | bigint, - opts: BlockOptions + opts: BlockOptions, ) => { let blockData const providerUrl = getProvider(provider) @@ -297,7 +331,7 @@ export const createBlockFromJsonRpcProvider = async ( }) } else { throw new Error( - `expected blockTag to be block hash, bigint, hex prefixed string, or earliest/latest/pending; got ${blockTag}` + `expected blockTag to be block hash, bigint, hex prefixed string, or earliest/latest/pending; got ${blockTag}`, ) } @@ -316,7 +350,7 @@ export const createBlockFromJsonRpcProvider = async ( } } - return createBlockFromRpc(blockData, uncleHeaders, opts) + return createBlockFromRPC(blockData, uncleHeaders, opts) } /** @@ -327,7 +361,7 @@ export const createBlockFromJsonRpcProvider = async ( */ export async function createBlockFromExecutionPayload( payload: ExecutionPayload, - opts?: BlockOptions + opts?: BlockOptions, ): Promise { const { blockNumber: number, @@ -345,12 +379,9 @@ export async function createBlockFromExecutionPayload( const txs = [] for (const [index, serializedTx] of transactions.entries()) { try { - const tx = TransactionFactory.fromSerializedData( - hexToBytes(serializedTx as PrefixedHexString), - { - common: opts?.common, - } - ) + const tx = createTxFromSerializedData(hexToBytes(serializedTx as PrefixedHexString), { + common: opts?.common, + }) txs.push(tx) } catch (error) { const validationError = `Invalid tx at index ${index}: ${error}` @@ -406,9 +437,9 @@ export async function createBlockFromExecutionPayload( } // we are not setting setHardfork as common is already set to the correct hf - const block = createBlockFromBlockData( + const block = createBlock( { header, transactions: txs, withdrawals, executionWitness, requests }, - opts + opts, ) if ( block.common.isActivatedEIP(6800) && @@ -435,8 +466,32 @@ export async function createBlockFromExecutionPayload( */ export async function createBlockFromBeaconPayloadJson( payload: BeaconPayloadJson, - opts?: BlockOptions + opts?: BlockOptions, ): Promise { const executionPayload = executionPayloadFromBeaconPayload(payload) return createBlockFromExecutionPayload(executionPayload, opts) } + +export function createSealedCliqueBlock( + blockData: BlockData = {}, + cliqueSigner: Uint8Array, + opts: BlockOptions = {}, +): Block { + const sealedCliqueBlock = createBlock(blockData, { + ...opts, + ...{ freeze: false, skipConsensusFormatValidation: true }, + }) + ;(sealedCliqueBlock.header.extraData as any) = generateCliqueBlockExtraData( + sealedCliqueBlock.header, + cliqueSigner, + ) + if (opts?.freeze === true) { + // We have to freeze here since we can't freeze the block when constructing it since we are overwriting `extraData` + Object.freeze(sealedCliqueBlock) + } + if (opts?.skipConsensusFormatValidation === false) { + // We need to validate the consensus format here since we skipped it when constructing the block + sealedCliqueBlock.header['_consensusFormatValidation']() + } + return sealedCliqueBlock +} diff --git a/packages/block/src/block/index.ts b/packages/block/src/block/index.ts new file mode 100644 index 0000000000..2209b04cea --- /dev/null +++ b/packages/block/src/block/index.ts @@ -0,0 +1,2 @@ +export * from './block.js' +export * from './constructors.js' diff --git a/packages/block/src/clique.ts b/packages/block/src/clique.ts deleted file mode 100644 index 16f1edfcfc..0000000000 --- a/packages/block/src/clique.ts +++ /dev/null @@ -1,4 +0,0 @@ -// Fixed number of extra-data prefix bytes reserved for signer vanity -export const CLIQUE_EXTRA_VANITY = 32 -// Fixed number of extra-data suffix bytes reserved for signer seal -export const CLIQUE_EXTRA_SEAL = 65 diff --git a/packages/block/src/consensus/clique.ts b/packages/block/src/consensus/clique.ts new file mode 100644 index 0000000000..3a89a46319 --- /dev/null +++ b/packages/block/src/consensus/clique.ts @@ -0,0 +1,163 @@ +import { ConsensusAlgorithm } from '@ethereumjs/common' +import { RLP } from '@ethereumjs/rlp' +import { + Address, + BIGINT_0, + BIGINT_27, + bigIntToBytes, + bytesToBigInt, + concatBytes, + createAddressFromPublicKey, + createZeroAddress, + ecrecover, + ecsign, + equalsBytes, +} from '@ethereumjs/util' + +import type { BlockHeader } from '../index.js' +import type { CliqueConfig } from '@ethereumjs/common' + +// Fixed number of extra-data prefix bytes reserved for signer vanity +export const CLIQUE_EXTRA_VANITY = 32 +// Fixed number of extra-data suffix bytes reserved for signer seal +export const CLIQUE_EXTRA_SEAL = 65 + +// This function is not exported in the index file to keep it internal +export function requireClique(header: BlockHeader, name: string) { + if (header.common.consensusAlgorithm() !== ConsensusAlgorithm.Clique) { + const msg = header['_errorMsg']( + `BlockHeader.${name}() call only supported for clique PoA networks`, + ) + throw new Error(msg) + } +} + +/** + * PoA clique signature hash without the seal. + */ +export function cliqueSigHash(header: BlockHeader) { + requireClique(header, 'cliqueSigHash') + const raw = header.raw() + raw[12] = header.extraData.subarray(0, header.extraData.length - CLIQUE_EXTRA_SEAL) + return header['keccakFunction'](RLP.encode(raw)) +} + +/** + * Checks if the block header is an epoch transition + * header (only clique PoA, throws otherwise) + */ +export function cliqueIsEpochTransition(header: BlockHeader): boolean { + requireClique(header, 'cliqueIsEpochTransition') + const epoch = BigInt((header.common.consensusConfig() as CliqueConfig).epoch) + // Epoch transition block if the block number has no + // remainder on the division by the epoch length + return header.number % epoch === BIGINT_0 +} + +/** + * Returns extra vanity data + * (only clique PoA, throws otherwise) + */ +export function cliqueExtraVanity(header: BlockHeader): Uint8Array { + requireClique(header, 'cliqueExtraVanity') + return header.extraData.subarray(0, CLIQUE_EXTRA_VANITY) +} + +/** + * Returns extra seal data + * (only clique PoA, throws otherwise) + */ +export function cliqueExtraSeal(header: BlockHeader): Uint8Array { + requireClique(header, 'cliqueExtraSeal') + return header.extraData.subarray(-CLIQUE_EXTRA_SEAL) +} + +/** + * Returns a list of signers + * (only clique PoA, throws otherwise) + * + * This function throws if not called on an epoch + * transition block and should therefore be used + * in conjunction with {@link BlockHeader.cliqueIsEpochTransition} + */ +export function cliqueEpochTransitionSigners(header: BlockHeader): Address[] { + requireClique(header, 'cliqueEpochTransitionSigners') + if (!cliqueIsEpochTransition(header)) { + const msg = header['_errorMsg']('Signers are only included in epoch transition blocks (clique)') + throw new Error(msg) + } + + const start = CLIQUE_EXTRA_VANITY + const end = header.extraData.length - CLIQUE_EXTRA_SEAL + const signerBytes = header.extraData.subarray(start, end) + + const signerList: Uint8Array[] = [] + const signerLength = 20 + for (let start = 0; start <= signerBytes.length - signerLength; start += signerLength) { + signerList.push(signerBytes.subarray(start, start + signerLength)) + } + return signerList.map((buf) => new Address(buf)) +} + +/** + * Returns the signer address + */ +export function cliqueSigner(header: BlockHeader): Address { + requireClique(header, 'cliqueSigner') + const extraSeal = cliqueExtraSeal(header) + // Reasonable default for default blocks + if (extraSeal.length === 0 || equalsBytes(extraSeal, new Uint8Array(65))) { + return createZeroAddress() + } + const r = extraSeal.subarray(0, 32) + const s = extraSeal.subarray(32, 64) + const v = bytesToBigInt(extraSeal.subarray(64, 65)) + BIGINT_27 + const pubKey = ecrecover(cliqueSigHash(header), v, r, s) + return createAddressFromPublicKey(pubKey) +} + +/** + * Verifies the signature of the block (last 65 bytes of extraData field) + * (only clique PoA, throws otherwise) + * + * Method throws if signature is invalid + */ +export function cliqueVerifySignature(header: BlockHeader, signerList: Address[]): boolean { + requireClique(header, 'cliqueVerifySignature') + const signerAddress = cliqueSigner(header) + const signerFound = signerList.find((signer) => { + return signer.equals(signerAddress) + }) + return !!signerFound +} + +/** + * Generates the extraData from a sealed block header + * @param header block header from which to retrieve extraData + * @param cliqueSigner clique signer key used for creating sealed block + * @returns clique seal (i.e. extradata) for the block + */ +export function generateCliqueBlockExtraData( + header: BlockHeader, + cliqueSigner: Uint8Array, +): Uint8Array { + // Ensure extraData is at least length CLIQUE_EXTRA_VANITY + CLIQUE_EXTRA_SEAL + const minExtraDataLength = CLIQUE_EXTRA_VANITY + CLIQUE_EXTRA_SEAL + if (header.extraData.length < minExtraDataLength) { + const remainingLength = minExtraDataLength - header.extraData.length + ;(header.extraData as any) = concatBytes(header.extraData, new Uint8Array(remainingLength)) + } + + requireClique(header, 'generateCliqueBlockExtraData') + + const ecSignFunction = header.common.customCrypto?.ecsign ?? ecsign + const signature = ecSignFunction(cliqueSigHash(header), cliqueSigner) + const signatureB = concatBytes(signature.r, signature.s, bigIntToBytes(signature.v - BIGINT_27)) + + const extraDataWithoutSeal = header.extraData.subarray( + 0, + header.extraData.length - CLIQUE_EXTRA_SEAL, + ) + const extraData = concatBytes(extraDataWithoutSeal, signatureB) + return extraData +} diff --git a/packages/block/src/consensus/ethash.ts b/packages/block/src/consensus/ethash.ts new file mode 100644 index 0000000000..fec0b0378a --- /dev/null +++ b/packages/block/src/consensus/ethash.ts @@ -0,0 +1,10 @@ +import type { Block } from '../index.js' + +/** + * Returns the canonical difficulty for this block. + * + * @param parentBlock - the parent of this `Block` + */ +export function ethashCanonicalDifficulty(block: Block, parentBlock: Block): bigint { + return block.header.ethashCanonicalDifficulty(parentBlock.header) +} diff --git a/packages/block/src/consensus/index.ts b/packages/block/src/consensus/index.ts new file mode 100644 index 0000000000..dd60522000 --- /dev/null +++ b/packages/block/src/consensus/index.ts @@ -0,0 +1,12 @@ +export { + CLIQUE_EXTRA_SEAL, + CLIQUE_EXTRA_VANITY, + cliqueEpochTransitionSigners, + cliqueExtraSeal, + cliqueExtraVanity, + cliqueIsEpochTransition, + cliqueSigHash, + cliqueSigner, + cliqueVerifySignature, +} from './clique.js' +export * from './ethash.js' diff --git a/packages/block/src/from-beacon-payload.ts b/packages/block/src/from-beacon-payload.ts index aa42effecf..15bd90e1ac 100644 --- a/packages/block/src/from-beacon-payload.ts +++ b/packages/block/src/from-beacon-payload.ts @@ -159,27 +159,29 @@ export function executionPayloadFromBeaconPayload(payload: BeaconPayloadJson): E // requests if (payload.deposit_requests !== undefined && payload.deposit_requests !== null) { - executionPayload.depositRequests = payload.deposit_requests.map((breq) => ({ - pubkey: breq.pubkey, - withdrawalCredentials: breq.withdrawal_credentials, - amount: breq.amount, - signature: breq.signature, - index: breq.index, + executionPayload.depositRequests = payload.deposit_requests.map((beaconRequest) => ({ + pubkey: beaconRequest.pubkey, + withdrawalCredentials: beaconRequest.withdrawal_credentials, + amount: beaconRequest.amount, + signature: beaconRequest.signature, + index: beaconRequest.index, })) } if (payload.withdrawal_requests !== undefined && payload.withdrawal_requests !== null) { - executionPayload.withdrawalRequests = payload.withdrawal_requests.map((breq) => ({ - sourceAddress: breq.source_address, - validatorPubkey: breq.validator_pubkey, - amount: breq.amount, + executionPayload.withdrawalRequests = payload.withdrawal_requests.map((beaconRequest) => ({ + sourceAddress: beaconRequest.source_address, + validatorPubkey: beaconRequest.validator_pubkey, + amount: beaconRequest.amount, })) } if (payload.consolidation_requests !== undefined && payload.consolidation_requests !== null) { - executionPayload.consolidationRequests = payload.consolidation_requests.map((breq) => ({ - sourceAddress: breq.source_address, - sourcePubkey: breq.source_pubkey, - targetPubkey: breq.target_pubkey, - })) + executionPayload.consolidationRequests = payload.consolidation_requests.map( + (beaconRequest) => ({ + sourceAddress: beaconRequest.source_address, + sourcePubkey: beaconRequest.source_pubkey, + targetPubkey: beaconRequest.target_pubkey, + }), + ) } if (payload.execution_witness !== undefined && payload.execution_witness !== null) { @@ -188,7 +190,7 @@ export function executionPayloadFromBeaconPayload(payload: BeaconPayloadJson): E payload.execution_witness.verkleProof !== undefined ? payload.execution_witness : parseExecutionWitnessFromSnakeJson( - payload.execution_witness as unknown as VerkleExecutionWitnessSnakeJson + payload.execution_witness as unknown as VerkleExecutionWitnessSnakeJson, ) } diff --git a/packages/block/src/from-rpc.ts b/packages/block/src/from-rpc.ts deleted file mode 100644 index 20ec9cfc5c..0000000000 --- a/packages/block/src/from-rpc.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { TransactionFactory } from '@ethereumjs/tx' -import { - CLRequestFactory, - TypeOutput, - hexToBytes, - setLengthLeft, - toBytes, - toType, -} from '@ethereumjs/util' - -import { createBlockFromBlockData } from './constructors.js' -import { blockHeaderFromRpc } from './header-from-rpc.js' - -import type { BlockOptions, JsonRpcBlock } from './index.js' -import type { TypedTransaction } from '@ethereumjs/tx' -import type { PrefixedHexString } from '@ethereumjs/util' - -function normalizeTxParams(_txParams: any) { - const txParams = Object.assign({}, _txParams) - - txParams.gasLimit = toType(txParams.gasLimit ?? txParams.gas, TypeOutput.BigInt) - txParams.data = txParams.data === undefined ? txParams.input : txParams.data - - // check and convert gasPrice and value params - txParams.gasPrice = txParams.gasPrice !== undefined ? BigInt(txParams.gasPrice) : undefined - txParams.value = txParams.value !== undefined ? BigInt(txParams.value) : undefined - - // strict byte length checking - txParams.to = - txParams.to !== null && txParams.to !== undefined - ? setLengthLeft(toBytes(txParams.to), 20) - : null - - txParams.v = toType(txParams.v, TypeOutput.BigInt) - - return txParams -} - -/** - * Creates a new block object from Ethereum JSON RPC. - * - * @param blockParams - Ethereum JSON RPC of block (eth_getBlockByNumber) - * @param uncles - Optional list of Ethereum JSON RPC of uncles (eth_getUncleByBlockHashAndIndex) - * @param options - An object describing the blockchain - * @deprecated - */ -export function createBlockFromRpc( - blockParams: JsonRpcBlock, - uncles: any[] = [], - options?: BlockOptions -) { - const header = blockHeaderFromRpc(blockParams, options) - - const transactions: TypedTransaction[] = [] - const opts = { common: header.common } - for (const _txParams of blockParams.transactions ?? []) { - const txParams = normalizeTxParams(_txParams) - const tx = TransactionFactory.fromTxData(txParams, opts) - transactions.push(tx) - } - - const uncleHeaders = uncles.map((uh) => blockHeaderFromRpc(uh, options)) - - const requests = blockParams.requests?.map((req) => { - const bytes = hexToBytes(req as PrefixedHexString) - return CLRequestFactory.fromSerializedRequest(bytes) - }) - return createBlockFromBlockData( - { header, transactions, uncleHeaders, withdrawals: blockParams.withdrawals, requests }, - options - ) -} diff --git a/packages/block/src/header-from-rpc.ts b/packages/block/src/header-from-rpc.ts deleted file mode 100644 index a4ba8f3d45..0000000000 --- a/packages/block/src/header-from-rpc.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { BlockHeader } from './header.js' -import { numberToHex } from './helpers.js' - -import type { BlockOptions, JsonRpcBlock } from './types.js' - -/** - * Creates a new block header object from Ethereum JSON RPC. - * - * @param blockParams - Ethereum JSON RPC of block (eth_getBlockByNumber) - * @param options - An object describing the blockchain - */ -export function blockHeaderFromRpc(blockParams: JsonRpcBlock, options?: BlockOptions) { - const { - parentHash, - sha3Uncles, - miner, - stateRoot, - transactionsRoot, - receiptsRoot, - logsBloom, - difficulty, - number, - gasLimit, - gasUsed, - timestamp, - extraData, - mixHash, - nonce, - baseFeePerGas, - withdrawalsRoot, - blobGasUsed, - excessBlobGas, - parentBeaconBlockRoot, - requestsRoot, - } = blockParams - - const blockHeader = BlockHeader.fromHeaderData( - { - parentHash, - uncleHash: sha3Uncles, - coinbase: miner, - stateRoot, - transactionsTrie: transactionsRoot, - receiptTrie: receiptsRoot, - logsBloom, - difficulty: numberToHex(difficulty), - number, - gasLimit, - gasUsed, - timestamp, - extraData, - mixHash, - nonce, - baseFeePerGas, - withdrawalsRoot, - blobGasUsed, - excessBlobGas, - parentBeaconBlockRoot, - requestsRoot, - }, - options - ) - - return blockHeader -} diff --git a/packages/block/src/header/constructors.ts b/packages/block/src/header/constructors.ts new file mode 100644 index 0000000000..6d32b39350 --- /dev/null +++ b/packages/block/src/header/constructors.ts @@ -0,0 +1,152 @@ +import { RLP } from '@ethereumjs/rlp' +import { bigIntToBytes, equalsBytes } from '@ethereumjs/util' + +import { generateCliqueBlockExtraData } from '../consensus/clique.js' +import { numberToHex, valuesArrayToHeaderData } from '../helpers.js' +import { BlockHeader } from '../index.js' + +import type { BlockHeaderBytes, BlockOptions, HeaderData, JsonRpcBlock } from '../types.js' + +/** + * Static constructor to create a block header from a header data dictionary + * + * @param headerData + * @param opts + */ +export function createBlockHeader(headerData: HeaderData = {}, opts: BlockOptions = {}) { + return new BlockHeader(headerData, opts) +} + +/** + * Static constructor to create a block header from an array of bytes values + * + * @param values + * @param opts + */ +export function createBlockHeaderFromBytesArray(values: BlockHeaderBytes, opts: BlockOptions = {}) { + const headerData = valuesArrayToHeaderData(values) + const { number, baseFeePerGas, excessBlobGas, blobGasUsed, parentBeaconBlockRoot, requestsRoot } = + headerData + const header = createBlockHeader(headerData, opts) + if (header.common.isActivatedEIP(1559) && baseFeePerGas === undefined) { + const eip1559ActivationBlock = bigIntToBytes(header.common.eipBlock(1559)!) + if ( + eip1559ActivationBlock !== undefined && + equalsBytes(eip1559ActivationBlock, number as Uint8Array) + ) { + throw new Error('invalid header. baseFeePerGas should be provided') + } + } + if (header.common.isActivatedEIP(4844)) { + if (excessBlobGas === undefined) { + throw new Error('invalid header. excessBlobGas should be provided') + } else if (blobGasUsed === undefined) { + throw new Error('invalid header. blobGasUsed should be provided') + } + } + if (header.common.isActivatedEIP(4788) && parentBeaconBlockRoot === undefined) { + throw new Error('invalid header. parentBeaconBlockRoot should be provided') + } + + if (header.common.isActivatedEIP(7685) && requestsRoot === undefined) { + throw new Error('invalid header. requestsRoot should be provided') + } + return header +} + +/** + * Static constructor to create a block header from a RLP-serialized header + * + * @param serializedHeaderData + * @param opts + */ +export function createBlockHeaderFromRLP( + serializedHeaderData: Uint8Array, + opts: BlockOptions = {}, +) { + const values = RLP.decode(serializedHeaderData) + if (!Array.isArray(values)) { + throw new Error('Invalid serialized header input. Must be array') + } + return createBlockHeaderFromBytesArray(values as Uint8Array[], opts) +} + +export function createSealedCliqueBlockHeader( + headerData: HeaderData = {}, + cliqueSigner: Uint8Array, + opts: BlockOptions = {}, +): BlockHeader { + const sealedCliqueBlockHeader = new BlockHeader(headerData, { + ...opts, + ...{ skipConsensusFormatValidation: true }, + }) + ;(sealedCliqueBlockHeader.extraData as any) = generateCliqueBlockExtraData( + sealedCliqueBlockHeader, + cliqueSigner, + ) + if (opts.skipConsensusFormatValidation === false) + // We need to validate the consensus format here since we skipped it when constructing the block header + sealedCliqueBlockHeader['_consensusFormatValidation']() + return sealedCliqueBlockHeader +} + +/** + * Creates a new block header object from Ethereum JSON RPC. + * + * @param blockParams - Ethereum JSON RPC of block (eth_getBlockByNumber) + * @param options - An object describing the blockchain + */ +export function blockHeaderFromRpc(blockParams: JsonRpcBlock, options?: BlockOptions) { + const { + parentHash, + sha3Uncles, + miner, + stateRoot, + transactionsRoot, + receiptsRoot, + logsBloom, + difficulty, + number, + gasLimit, + gasUsed, + timestamp, + extraData, + mixHash, + nonce, + baseFeePerGas, + withdrawalsRoot, + blobGasUsed, + excessBlobGas, + parentBeaconBlockRoot, + requestsRoot, + } = blockParams + + const blockHeader = new BlockHeader( + { + parentHash, + uncleHash: sha3Uncles, + coinbase: miner, + stateRoot, + transactionsTrie: transactionsRoot, + receiptTrie: receiptsRoot, + logsBloom, + difficulty: numberToHex(difficulty), + number, + gasLimit, + gasUsed, + timestamp, + extraData, + mixHash, + nonce, + baseFeePerGas, + withdrawalsRoot, + blobGasUsed, + excessBlobGas, + parentBeaconBlockRoot, + requestsRoot, + }, + options, + ) + + return blockHeader +} diff --git a/packages/block/src/header.ts b/packages/block/src/header/header.ts similarity index 72% rename from packages/block/src/header.ts rename to packages/block/src/header/header.ts index 189f9ff883..f5afcfaedf 100644 --- a/packages/block/src/header.ts +++ b/packages/block/src/header/header.ts @@ -1,24 +1,19 @@ -import { Chain, Common, ConsensusAlgorithm, ConsensusType, Hardfork } from '@ethereumjs/common' +import { Common, ConsensusAlgorithm, ConsensusType, Hardfork, Mainnet } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' import { Address, BIGINT_0, BIGINT_1, BIGINT_2, - BIGINT_27, BIGINT_7, KECCAK256_RLP, KECCAK256_RLP_ARRAY, TypeOutput, - bigIntToBytes, bigIntToHex, bigIntToUnpaddedBytes, - bytesToBigInt, bytesToHex, bytesToUtf8, - concatBytes, - ecrecover, - ecsign, + createZeroAddress, equalsBytes, hexToBytes, toType, @@ -26,12 +21,15 @@ import { } from '@ethereumjs/util' import { keccak256 } from 'ethereum-cryptography/keccak.js' -import { CLIQUE_EXTRA_SEAL, CLIQUE_EXTRA_VANITY } from './clique.js' -import { fakeExponential, valuesArrayToHeaderData } from './helpers.js' +import { + CLIQUE_EXTRA_SEAL, + CLIQUE_EXTRA_VANITY, + cliqueIsEpochTransition, +} from '../consensus/clique.js' +import { fakeExponential } from '../helpers.js' +import { paramsBlock } from '../params.js' -import type { BlockHeaderBytes, BlockOptions, HeaderData, JsonHeader } from './types.js' -import type { CliqueConfig } from '@ethereumjs/common' -import type { BigIntLike } from '@ethereumjs/util' +import type { BlockHeaderBytes, BlockOptions, HeaderData, JsonHeader } from '../types.js' interface HeaderCache { hash: Uint8Array | undefined @@ -79,84 +77,18 @@ export class BlockHeader { get prevRandao() { if (!this.common.isActivatedEIP(4399)) { const msg = this._errorMsg( - 'The prevRandao parameter can only be accessed when EIP-4399 is activated' + 'The prevRandao parameter can only be accessed when EIP-4399 is activated', ) throw new Error(msg) } return this.mixHash } - /** - * Static constructor to create a block header from a header data dictionary - * - * @param headerData - * @param opts - */ - public static fromHeaderData(headerData: HeaderData = {}, opts: BlockOptions = {}) { - return new BlockHeader(headerData, opts) - } - - /** - * Static constructor to create a block header from a RLP-serialized header - * - * @param serializedHeaderData - * @param opts - */ - public static fromRLPSerializedHeader(serializedHeaderData: Uint8Array, opts: BlockOptions = {}) { - const values = RLP.decode(serializedHeaderData) - if (!Array.isArray(values)) { - throw new Error('Invalid serialized header input. Must be array') - } - return BlockHeader.fromValuesArray(values as Uint8Array[], opts) - } - - /** - * Static constructor to create a block header from an array of Bytes values - * - * @param values - * @param opts - */ - public static fromValuesArray(values: BlockHeaderBytes, opts: BlockOptions = {}) { - const headerData = valuesArrayToHeaderData(values) - const { - number, - baseFeePerGas, - excessBlobGas, - blobGasUsed, - parentBeaconBlockRoot, - requestsRoot, - } = headerData - const header = BlockHeader.fromHeaderData(headerData, opts) - if (header.common.isActivatedEIP(1559) && baseFeePerGas === undefined) { - const eip1559ActivationBlock = bigIntToBytes(header.common.eipBlock(1559)!) - if ( - eip1559ActivationBlock !== undefined && - equalsBytes(eip1559ActivationBlock, number as Uint8Array) - ) { - throw new Error('invalid header. baseFeePerGas should be provided') - } - } - if (header.common.isActivatedEIP(4844)) { - if (excessBlobGas === undefined) { - throw new Error('invalid header. excessBlobGas should be provided') - } else if (blobGasUsed === undefined) { - throw new Error('invalid header. blobGasUsed should be provided') - } - } - if (header.common.isActivatedEIP(4788) && parentBeaconBlockRoot === undefined) { - throw new Error('invalid header. parentBeaconBlockRoot should be provided') - } - - if (header.common.isActivatedEIP(7685) && requestsRoot === undefined) { - throw new Error('invalid header. requestsRoot should be provided') - } - return header - } /** * This constructor takes the values, validates them, assigns them and freezes the object. * * @deprecated Use the public static factory methods to assist in creating a Header object from - * varying data types. For a default empty header, use {@link BlockHeader.fromHeaderData}. + * varying data types. For a default empty header, use {@link createBlockHeader}. * */ constructor(headerData: HeaderData, opts: BlockOptions = {}) { @@ -164,9 +96,11 @@ export class BlockHeader { this.common = opts.common.copy() } else { this.common = new Common({ - chain: Chain.Mainnet, // default + chain: Mainnet, // default }) } + this.common.updateParams(opts.params ?? paramsBlock) + this.keccakFunction = this.common.customCrypto.keccak256 ?? keccak256 const skipValidateConsensusFormat = opts.skipConsensusFormatValidation ?? false @@ -174,7 +108,7 @@ export class BlockHeader { const defaults = { parentHash: zeros(32), uncleHash: KECCAK256_RLP_ARRAY, - coinbase: Address.zero(), + coinbase: createZeroAddress(), stateRoot: zeros(32), transactionsTrie: KECCAK256_RLP, receiptTrie: KECCAK256_RLP, @@ -192,7 +126,7 @@ export class BlockHeader { const parentHash = toType(headerData.parentHash, TypeOutput.Uint8Array) ?? defaults.parentHash const uncleHash = toType(headerData.uncleHash, TypeOutput.Uint8Array) ?? defaults.uncleHash const coinbase = new Address( - toType(headerData.coinbase ?? defaults.coinbase, TypeOutput.Uint8Array) + toType(headerData.coinbase ?? defaults.coinbase, TypeOutput.Uint8Array), ) const stateRoot = toType(headerData.stateRoot, TypeOutput.Uint8Array) ?? defaults.stateRoot const transactionsTrie = @@ -215,19 +149,13 @@ export class BlockHeader { blockNumber: number, timestamp, }) - } else if (typeof setHardfork !== 'boolean') { - this.common.setHardforkBy({ - blockNumber: number, - td: setHardfork as BigIntLike, - timestamp, - }) } // Hardfork defaults which couldn't be paired with earlier defaults const hardforkDefaults = { baseFeePerGas: this.common.isActivatedEIP(1559) ? number === this.common.hardforkBlock(Hardfork.London) - ? this.common.param('gasConfig', 'initialBaseFee') + ? this.common.param('initialBaseFee') : BIGINT_7 : undefined, withdrawalsRoot: this.common.isActivatedEIP(4895) ? KECCAK256_RLP : undefined, @@ -257,7 +185,7 @@ export class BlockHeader { if (!this.common.isActivatedEIP(4895) && withdrawalsRoot !== undefined) { throw new Error( - 'A withdrawalsRoot for a header can only be provided with EIP4895 being activated' + 'A withdrawalsRoot for a header can only be provided with EIP4895 being activated', ) } @@ -273,7 +201,7 @@ export class BlockHeader { if (!this.common.isActivatedEIP(4788) && parentBeaconBlockRoot !== undefined) { throw new Error( - 'A parentBeaconBlockRoot for a header can only be provided with EIP4788 being activated' + 'A parentBeaconBlockRoot for a header can only be provided with EIP4788 being activated', ) } @@ -315,18 +243,6 @@ export class BlockHeader { this.difficulty = this.ethashCanonicalDifficulty(opts.calcDifficultyFromHeader) } - // If cliqueSigner is provided, seal block with provided privateKey. - if (opts.cliqueSigner) { - // Ensure extraData is at least length CLIQUE_EXTRA_VANITY + CLIQUE_EXTRA_SEAL - const minExtraDataLength = CLIQUE_EXTRA_VANITY + CLIQUE_EXTRA_SEAL - if (this.extraData.length < minExtraDataLength) { - const remainingLength = minExtraDataLength - this.extraData.length - this.extraData = concatBytes(this.extraData, new Uint8Array(remainingLength)) - } - - this.extraData = this.cliqueSealBlock(opts.cliqueSigner) - } - // Validate consensus format after block is sealed (if applicable) so extraData checks will pass if (skipValidateConsensusFormat === false) this._consensusFormatValidation() @@ -352,13 +268,13 @@ export class BlockHeader { } if (transactionsTrie.length !== 32) { const msg = this._errorMsg( - `transactionsTrie must be 32 bytes, received ${transactionsTrie.length} bytes` + `transactionsTrie must be 32 bytes, received ${transactionsTrie.length} bytes`, ) throw new Error(msg) } if (receiptTrie.length !== 32) { const msg = this._errorMsg( - `receiptTrie must be 32 bytes, received ${receiptTrie.length} bytes` + `receiptTrie must be 32 bytes, received ${receiptTrie.length} bytes`, ) throw new Error(msg) } @@ -375,7 +291,7 @@ export class BlockHeader { // check if the block used too much gas if (this.gasUsed > this.gasLimit) { const msg = this._errorMsg( - `Invalid block: too much gas used. Used: ${this.gasUsed}, gas limit: ${this.gasLimit}` + `Invalid block: too much gas used. Used: ${this.gasUsed}, gas limit: ${this.gasLimit}`, ) throw new Error(msg) } @@ -392,7 +308,7 @@ export class BlockHeader { londonHfBlock !== BIGINT_0 && this.number === londonHfBlock ) { - const initialBaseFee = this.common.param('gasConfig', 'initialBaseFee') + const initialBaseFee = this.common.param('initialBaseFee') if (this.baseFeePerGas !== initialBaseFee) { const msg = this._errorMsg('Initial EIP1559 block does not have initial base fee') throw new Error(msg) @@ -407,7 +323,7 @@ export class BlockHeader { } if (this.withdrawalsRoot?.length !== 32) { const msg = this._errorMsg( - `withdrawalsRoot must be 32 bytes, received ${this.withdrawalsRoot!.length} bytes` + `withdrawalsRoot must be 32 bytes, received ${this.withdrawalsRoot!.length} bytes`, ) throw new Error(msg) } @@ -422,7 +338,7 @@ export class BlockHeader { const msg = this._errorMsg( `parentBeaconBlockRoot must be 32 bytes, received ${ this.parentBeaconBlockRoot!.length - } bytes` + } bytes`, ) throw new Error(msg) } @@ -446,10 +362,7 @@ export class BlockHeader { // Consensus type dependent checks if (this.common.consensusAlgorithm() === ConsensusAlgorithm.Ethash) { // PoW/Ethash - if ( - number > BIGINT_0 && - this.extraData.length > this.common.param('vm', 'maxExtraDataSize') - ) { + if (number > BIGINT_0 && this.extraData.length > this.common.param('maxExtraDataSize')) { // Check length of data on all post-genesis blocks const msg = this._errorMsg('invalid amount of extra data') throw new Error(msg) @@ -458,11 +371,11 @@ export class BlockHeader { if (this.common.consensusAlgorithm() === ConsensusAlgorithm.Clique) { // PoA/Clique const minLength = CLIQUE_EXTRA_VANITY + CLIQUE_EXTRA_SEAL - if (!this.cliqueIsEpochTransition()) { + if (!cliqueIsEpochTransition(this)) { // ExtraData length on epoch transition if (this.extraData.length !== minLength) { const msg = this._errorMsg( - `extraData must be ${minLength} bytes on non-epoch transition blocks, received ${this.extraData.length} bytes` + `extraData must be ${minLength} bytes on non-epoch transition blocks, received ${this.extraData.length} bytes`, ) throw new Error(msg) } @@ -470,14 +383,14 @@ export class BlockHeader { const signerLength = this.extraData.length - minLength if (signerLength % 20 !== 0) { const msg = this._errorMsg( - `invalid signer list length in extraData, received signer length of ${signerLength} (not divisible by 20)` + `invalid signer list length in extraData, received signer length of ${signerLength} (not divisible by 20)`, ) throw new Error(msg) } // coinbase (beneficiary) on epoch transition if (!this.coinbase.isZero()) { const msg = this._errorMsg( - `coinbase must be filled with zeros on epoch transition blocks, received ${this.coinbase}` + `coinbase must be filled with zeros on epoch transition blocks, received ${this.coinbase}`, ) throw new Error(msg) } @@ -495,7 +408,7 @@ export class BlockHeader { if (!equalsBytes(uncleHash, KECCAK256_RLP_ARRAY)) { errorMsg += `, uncleHash: ${bytesToHex(uncleHash)} (expected: ${bytesToHex( - KECCAK256_RLP_ARRAY + KECCAK256_RLP_ARRAY, )})` error = true } @@ -507,7 +420,7 @@ export class BlockHeader { } if (extraData.length > 32) { errorMsg += `, extraData: ${bytesToHex( - extraData + extraData, )} (cannot exceed 32 bytes length, received ${extraData.length} bytes)` error = true } @@ -539,35 +452,34 @@ export class BlockHeader { londonHardforkBlock !== BIGINT_0 && this.number === londonHardforkBlock ) { - const elasticity = this.common.param('gasConfig', 'elasticityMultiplier') + const elasticity = this.common.param('elasticityMultiplier') parentGasLimit = parentGasLimit * elasticity } const gasLimit = this.gasLimit - const a = parentGasLimit / this.common.param('gasConfig', 'gasLimitBoundDivisor') + const a = parentGasLimit / this.common.param('gasLimitBoundDivisor') const maxGasLimit = parentGasLimit + a const minGasLimit = parentGasLimit - a if (gasLimit >= maxGasLimit) { const msg = this._errorMsg( - `gas limit increased too much. Gas limit: ${gasLimit}, max gas limit: ${maxGasLimit}` + `gas limit increased too much. Gas limit: ${gasLimit}, max gas limit: ${maxGasLimit}`, ) throw new Error(msg) } if (gasLimit <= minGasLimit) { const msg = this._errorMsg( - `gas limit decreased too much. Gas limit: ${gasLimit}, min gas limit: ${minGasLimit}` + `gas limit decreased too much. Gas limit: ${gasLimit}, min gas limit: ${minGasLimit}`, ) throw new Error(msg) } - if (gasLimit < this.common.param('gasConfig', 'minGasLimit')) { + if (gasLimit < this.common.param('minGasLimit')) { const msg = this._errorMsg( `gas limit decreased below minimum gas limit. Gas limit: ${gasLimit}, minimum gas limit: ${this.common.param( - 'gasConfig', - 'minGasLimit' - )}` + 'minGasLimit', + )}`, ) throw new Error(msg) } @@ -579,32 +491,26 @@ export class BlockHeader { public calcNextBaseFee(): bigint { if (!this.common.isActivatedEIP(1559)) { const msg = this._errorMsg( - 'calcNextBaseFee() can only be called with EIP1559 being activated' + 'calcNextBaseFee() can only be called with EIP1559 being activated', ) throw new Error(msg) } let nextBaseFee: bigint - const elasticity = this.common.param('gasConfig', 'elasticityMultiplier') + const elasticity = this.common.param('elasticityMultiplier') const parentGasTarget = this.gasLimit / elasticity if (parentGasTarget === this.gasUsed) { nextBaseFee = this.baseFeePerGas! } else if (this.gasUsed > parentGasTarget) { const gasUsedDelta = this.gasUsed - parentGasTarget - const baseFeeMaxChangeDenominator = this.common.param( - 'gasConfig', - 'baseFeeMaxChangeDenominator' - ) + const baseFeeMaxChangeDenominator = this.common.param('baseFeeMaxChangeDenominator') const calculatedDelta = (this.baseFeePerGas! * gasUsedDelta) / parentGasTarget / baseFeeMaxChangeDenominator nextBaseFee = (calculatedDelta > BIGINT_1 ? calculatedDelta : BIGINT_1) + this.baseFeePerGas! } else { const gasUsedDelta = parentGasTarget - this.gasUsed - const baseFeeMaxChangeDenominator = this.common.param( - 'gasConfig', - 'baseFeeMaxChangeDenominator' - ) + const baseFeeMaxChangeDenominator = this.common.param('baseFeeMaxChangeDenominator') const calculatedDelta = (this.baseFeePerGas! * gasUsedDelta) / parentGasTarget / baseFeeMaxChangeDenominator @@ -633,9 +539,9 @@ export class BlockHeader { */ private _getBlobGasPrice(excessBlobGas: bigint) { return fakeExponential( - this.common.param('gasPrices', 'minBlobGasPrice'), + this.common.param('minBlobGas'), excessBlobGas, - this.common.param('gasConfig', 'blobGasPriceUpdateFraction') + this.common.param('blobGasPriceUpdateFraction'), ) } @@ -646,7 +552,7 @@ export class BlockHeader { * @returns the total blob gas fee for numBlobs blobs */ calcDataFee(numBlobs: number): bigint { - const blobGasPerBlob = this.common.param('gasConfig', 'blobGasPerBlob') + const blobGasPerBlob = this.common.param('blobGasPerBlob') const blobGasUsed = blobGasPerBlob * BigInt(numBlobs) const blobGasPrice = this.getBlobGasPrice() @@ -659,7 +565,7 @@ export class BlockHeader { public calcNextExcessBlobGas(): bigint { // The validation of the fields and 4844 activation is already taken care in BlockHeader constructor const targetGasConsumed = (this.excessBlobGas ?? BIGINT_0) + (this.blobGasUsed ?? BIGINT_0) - const targetBlobGasPerBlock = this.common.param('gasConfig', 'targetBlobGasPerBlock') + const targetBlobGasPerBlock = this.common.param('targetBlobGasPerBlock') if (targetGasConsumed <= targetBlobGasPerBlock) { return BIGINT_0 @@ -706,7 +612,7 @@ export class BlockHeader { rawItems.push(this.withdrawalsRoot!) } - // in kaunstinen 2 verkle is scheduled after withdrawals, will eventually be post deneb hopefully + // in kaustinen 2 verkle is scheduled after withdrawals, will eventually be post deneb hopefully if (this.common.isActivatedEIP(6800)) { // execution witness is not mandatory part of the the block so nothing to push here // but keep this comment segment for clarity regarding the same and move it according as per the @@ -747,15 +653,6 @@ export class BlockHeader { return this.number === BIGINT_0 } - protected _requireClique(name: string) { - if (this.common.consensusAlgorithm() !== ConsensusAlgorithm.Clique) { - const msg = this._errorMsg( - `BlockHeader.${name}() call only supported for clique PoA networks` - ) - throw new Error(msg) - } - } - /** * Returns the canonical difficulty for this block. * @@ -768,14 +665,14 @@ export class BlockHeader { } if (this.common.consensusAlgorithm() !== ConsensusAlgorithm.Ethash) { const msg = this._errorMsg( - 'difficulty calculation currently only supports the ethash algorithm' + 'difficulty calculation currently only supports the ethash algorithm', ) throw new Error(msg) } const blockTs = this.timestamp const { timestamp: parentTs, difficulty: parentDif } = parentBlockHeader - const minimumDifficulty = this.common.param('pow', 'minimumDifficulty') - const offset = parentDif / this.common.param('pow', 'difficultyBoundDivisor') + const minimumDifficulty = this.common.param('minimumDifficulty') + const offset = parentDif / this.common.param('difficultyBoundDivisor') let num = this.number // We use a ! here as TS cannot follow this hardfork-dependent logic, but it always gets assigned @@ -795,7 +692,7 @@ export class BlockHeader { if (this.common.gteHardfork(Hardfork.Byzantium)) { // Get delay as parameter from common - num = num - this.common.param('pow', 'difficultyBombDelay') + num = num - this.common.param('difficultyBombDelay') if (num < BIGINT_0) { num = BIGINT_0 } @@ -810,7 +707,7 @@ export class BlockHeader { dif = parentDif + offset * a } else { // pre-homestead - if (parentTs + this.common.param('pow', 'durationLimit') > blockTs) { + if (parentTs + this.common.param('durationLimit') > blockTs) { dif = offset + parentDif } else { dif = parentDif - offset @@ -829,125 +726,6 @@ export class BlockHeader { return dif } - /** - * PoA clique signature hash without the seal. - */ - cliqueSigHash() { - this._requireClique('cliqueSigHash') - const raw = this.raw() - raw[12] = this.extraData.subarray(0, this.extraData.length - CLIQUE_EXTRA_SEAL) - return this.keccakFunction(RLP.encode(raw)) - } - - /** - * Checks if the block header is an epoch transition - * header (only clique PoA, throws otherwise) - */ - cliqueIsEpochTransition(): boolean { - this._requireClique('cliqueIsEpochTransition') - const epoch = BigInt((this.common.consensusConfig() as CliqueConfig).epoch) - // Epoch transition block if the block number has no - // remainder on the division by the epoch length - return this.number % epoch === BIGINT_0 - } - - /** - * Returns extra vanity data - * (only clique PoA, throws otherwise) - */ - cliqueExtraVanity(): Uint8Array { - this._requireClique('cliqueExtraVanity') - return this.extraData.subarray(0, CLIQUE_EXTRA_VANITY) - } - - /** - * Returns extra seal data - * (only clique PoA, throws otherwise) - */ - cliqueExtraSeal(): Uint8Array { - this._requireClique('cliqueExtraSeal') - return this.extraData.subarray(-CLIQUE_EXTRA_SEAL) - } - - /** - * Seal block with the provided signer. - * Returns the final extraData field to be assigned to `this.extraData`. - * @hidden - */ - private cliqueSealBlock(privateKey: Uint8Array) { - this._requireClique('cliqueSealBlock') - - const ecSignFunction = this.common.customCrypto?.ecsign ?? ecsign - const signature = ecSignFunction(this.cliqueSigHash(), privateKey) - const signatureB = concatBytes(signature.r, signature.s, bigIntToBytes(signature.v - BIGINT_27)) - - const extraDataWithoutSeal = this.extraData.subarray( - 0, - this.extraData.length - CLIQUE_EXTRA_SEAL - ) - const extraData = concatBytes(extraDataWithoutSeal, signatureB) - return extraData - } - - /** - * Returns a list of signers - * (only clique PoA, throws otherwise) - * - * This function throws if not called on an epoch - * transition block and should therefore be used - * in conjunction with {@link BlockHeader.cliqueIsEpochTransition} - */ - cliqueEpochTransitionSigners(): Address[] { - this._requireClique('cliqueEpochTransitionSigners') - if (!this.cliqueIsEpochTransition()) { - const msg = this._errorMsg('Signers are only included in epoch transition blocks (clique)') - throw new Error(msg) - } - - const start = CLIQUE_EXTRA_VANITY - const end = this.extraData.length - CLIQUE_EXTRA_SEAL - const signerBytes = this.extraData.subarray(start, end) - - const signerList: Uint8Array[] = [] - const signerLength = 20 - for (let start = 0; start <= signerBytes.length - signerLength; start += signerLength) { - signerList.push(signerBytes.subarray(start, start + signerLength)) - } - return signerList.map((buf) => new Address(buf)) - } - - /** - * Verifies the signature of the block (last 65 bytes of extraData field) - * (only clique PoA, throws otherwise) - * - * Method throws if signature is invalid - */ - cliqueVerifySignature(signerList: Address[]): boolean { - this._requireClique('cliqueVerifySignature') - const signerAddress = this.cliqueSigner() - const signerFound = signerList.find((signer) => { - return signer.equals(signerAddress) - }) - return !!signerFound - } - - /** - * Returns the signer address - */ - cliqueSigner(): Address { - this._requireClique('cliqueSigner') - const extraSeal = this.cliqueExtraSeal() - // Reasonable default for default blocks - if (extraSeal.length === 0 || equalsBytes(extraSeal, new Uint8Array(65))) { - return Address.zero() - } - const r = extraSeal.subarray(0, 32) - const s = extraSeal.subarray(32, 64) - const v = bytesToBigInt(extraSeal.subarray(64, 65)) + BIGINT_27 - const pubKey = ecrecover(this.cliqueSigHash(), v, r, s) - return Address.fromPublicKey(pubKey) - } - /** * Returns the rlp encoding of the block header. */ @@ -1014,8 +792,8 @@ export class BlockHeader { if (drift <= DAO_ForceExtraDataRange && !equalsBytes(this.extraData, DAO_ExtraData)) { const msg = this._errorMsg( `extraData should be 'dao-hard-fork', got ${bytesToUtf8(this.extraData)} (hex: ${bytesToHex( - this.extraData - )})` + this.extraData, + )})`, ) throw new Error(msg) } diff --git a/packages/block/src/header/index.ts b/packages/block/src/header/index.ts new file mode 100644 index 0000000000..3d53654058 --- /dev/null +++ b/packages/block/src/header/index.ts @@ -0,0 +1,2 @@ +export * from './constructors.js' +export * from './header.js' diff --git a/packages/block/src/helpers.ts b/packages/block/src/helpers.ts index 145d051bb7..79538dd20d 100644 --- a/packages/block/src/helpers.ts +++ b/packages/block/src/helpers.ts @@ -1,6 +1,6 @@ import { RLP } from '@ethereumjs/rlp' import { Trie } from '@ethereumjs/trie' -import { BlobEIP4844Transaction } from '@ethereumjs/tx' +import { Blob4844Tx } from '@ethereumjs/tx' import { BIGINT_0, BIGINT_1, TypeOutput, isHexString, toType } from '@ethereumjs/util' import type { BlockHeaderBytes, HeaderData } from './types.js' @@ -51,12 +51,12 @@ export function valuesArrayToHeaderData(values: BlockHeaderBytes): HeaderData { if (values.length > 21) { throw new Error( - `invalid header. More values than expected were received. Max: 20, got: ${values.length}` + `invalid header. More values than expected were received. Max: 20, got: ${values.length}`, ) } if (values.length < 15) { throw new Error( - `invalid header. Less values than expected were received. Min: 15, got: ${values.length}` + `invalid header. Less values than expected were received. Min: 15, got: ${values.length}`, ) } @@ -96,7 +96,7 @@ export function getDifficulty(headerData: HeaderData): bigint | null { export const getNumBlobs = (transactions: TypedTransaction[]) => { let numBlobs = 0 for (const tx of transactions) { - if (tx instanceof BlobEIP4844Transaction) { + if (tx instanceof Blob4844Tx) { numBlobs += tx.blobVersionedHashes.length } } @@ -109,10 +109,10 @@ export const getNumBlobs = (transactions: TypedTransaction[]) => { export const fakeExponential = (factor: bigint, numerator: bigint, denominator: bigint) => { let i = BIGINT_1 let output = BIGINT_0 - let numerator_accum = factor * denominator - while (numerator_accum > BIGINT_0) { - output += numerator_accum - numerator_accum = (numerator_accum * numerator) / (denominator * i) + let numerator_accumulator = factor * denominator + while (numerator_accumulator > BIGINT_0) { + output += numerator_accumulator + numerator_accumulator = (numerator_accumulator * numerator) / (denominator * i) i++ } diff --git a/packages/block/src/index.ts b/packages/block/src/index.ts index a4e2bacd4d..50379f3719 100644 --- a/packages/block/src/index.ts +++ b/packages/block/src/index.ts @@ -1,7 +1,8 @@ -export { Block } from './block.js' -export * from './constructors.js' +export { Block } from './block/block.js' +export * from './block/index.js' +export * from './consensus/index.js' export { type BeaconPayloadJson, executionPayloadFromBeaconPayload } from './from-beacon-payload.js' -export { BlockHeader } from './header.js' +export * from './header/index.js' export { genRequestsTrieRoot, genTransactionsTrieRoot, @@ -9,4 +10,5 @@ export { getDifficulty, valuesArrayToHeaderData, } from './helpers.js' +export * from './params.js' export * from './types.js' diff --git a/packages/block/src/params.ts b/packages/block/src/params.ts new file mode 100644 index 0000000000..d4277f3329 --- /dev/null +++ b/packages/block/src/params.ts @@ -0,0 +1,88 @@ +import type { ParamsDict } from '@ethereumjs/common' + +export const paramsBlock: ParamsDict = { + /** + * Frontier/Chainstart + */ + 1: { + // gasConfig + minGasLimit: 5000, // Minimum the gas limit may ever be + gasLimitBoundDivisor: 1024, // The bound divisor of the gas limit, used in update calculations + targetBlobGasPerBlock: 0, // Base value needed here since called pre-4844 in BlockHeader.calcNextExcessBlobGas() + blobGasPerBlob: 0, + maxblobGasPerBlock: 0, + // format + maxExtraDataSize: 32, // Maximum size extra data may be after Genesis + // pow + minimumDifficulty: 131072, // The minimum that the difficulty may ever be + difficultyBoundDivisor: 2048, // The bound divisor of the difficulty, used in the update calculations + durationLimit: 13, // The decision boundary on the blocktime duration used to determine whether difficulty should go up or not + epochDuration: 30000, // Duration between proof-of-work epochs + timebombPeriod: 100000, // Exponential difficulty timebomb period + difficultyBombDelay: 0, // the amount of blocks to delay the difficulty bomb with + }, + /** +. * Byzantium HF Meta EIP +. */ + 609: { + // pow + difficultyBombDelay: 3000000, // the amount of blocks to delay the difficulty bomb with + }, + /** +. * Constantinople HF Meta EIP +. */ + 1013: { + // pow + difficultyBombDelay: 5000000, // the amount of blocks to delay the difficulty bomb with + }, + /** +. * MuirGlacier HF Meta EIP +. */ + 2384: { + // pow + difficultyBombDelay: 9000000, // the amount of blocks to delay the difficulty bomb with + }, + /** +. * Fee market change for ETH 1.0 chain +. */ + 1559: { + // gasConfig + baseFeeMaxChangeDenominator: 8, // Maximum base fee change denominator + elasticityMultiplier: 2, // Maximum block gas target elasticity + initialBaseFee: 1000000000, // Initial base fee on first EIP1559 block + }, + /** +. * Difficulty Bomb Delay to December 1st 2021 +. */ + 3554: { + // pow + difficultyBombDelay: 9500000, // the amount of blocks to delay the difficulty bomb with + }, + /** +. * Difficulty Bomb Delay to June 2022 +. */ + 4345: { + // pow + difficultyBombDelay: 10700000, // the amount of blocks to delay the difficulty bomb with + }, + /** +. * Shard Blob Transactions +. */ + 4844: { + // gasConfig + targetBlobGasPerBlock: 393216, // The target blob gas consumed per block + blobGasPerBlob: 131072, // The base fee for blob gas per blob + maxblobGasPerBlock: 786432, // The max blob gas allowable per block + blobGasPriceUpdateFraction: 3338477, // The denominator used in the exponential when calculating a blob gas price + // gasPrices + simplePerBlobGas: 12000, // The basic gas fee for each blob + minBlobGas: 1, // The minimum fee per blob gas + }, + /** + * Delaying Difficulty Bomb to mid-September 2022 + */ + 5133: { + // pow + difficultyBombDelay: 11400000, // the amount of blocks to delay the difficulty bomb with + }, +} diff --git a/packages/block/src/types.ts b/packages/block/src/types.ts index 409705b60d..abcb550709 100644 --- a/packages/block/src/types.ts +++ b/packages/block/src/types.ts @@ -1,5 +1,5 @@ -import type { BlockHeader } from './header.js' -import type { Common } from '@ethereumjs/common' +import type { BlockHeader } from './index.js' +import type { Common, ParamsDict } from '@ethereumjs/common' import type { JsonRpcTx, JsonTx, TransactionType, TxData } from '@ethereumjs/tx' import type { AddressLike, @@ -40,12 +40,26 @@ export interface BlockOptions { * Set the hardfork either by timestamp (for HFs from Shanghai onwards) or by block number * for older Hfs. * - * Additionally it is possible to pass in a specific TD value to support live-Merge-HF - * transitions. Note that this should only be needed in very rare and specific scenarios. - * * Default: `false` (HF is set to whatever default HF is set by the {@link Common} instance) */ - setHardfork?: boolean | BigIntLike + setHardfork?: boolean + /** + * Block parameters sorted by EIP can be found in the exported `paramsBlock` dictionary, + * which is internally passed to the associated `@ethereumjs/common` instance which + * manages parameter selection based on the hardfork and EIP settings. + * + * This option allows providing a custom set of parameters. Note that parameters + * get fully overwritten, so you need to extend the default parameter dict + * to provide the full parameter set. + * + * It is recommended to deep-clone the params object for this to avoid side effects: + * + * ```ts + * const params = JSON.parse(JSON.stringify(paramsBlock)) + * params['1']['minGasLimit'] = 3000 // 5000 + * ``` + */ + params?: ParamsDict /** * If a preceding {@link BlockHeader} (usually the parent header) is given the preceding * header will be used to calculate the difficulty for this block and the calculated @@ -67,11 +81,6 @@ export interface BlockOptions { * Default: true */ freeze?: boolean - /** - * Provide a clique signer's privateKey to seal this block. - * Will throw if provided on a non-PoA chain. - */ - cliqueSigner?: Uint8Array /** * Skip consensus format validation checks on header if set. Defaults to false. */ @@ -83,29 +92,28 @@ export interface BlockOptions { /** * A block header's data. */ -// TODO: Deprecate the string type and only keep BytesLike/AddressLike/BigIntLike export interface HeaderData { - parentHash?: BytesLike | string - uncleHash?: BytesLike | string - coinbase?: AddressLike | string - stateRoot?: BytesLike | string - transactionsTrie?: BytesLike | string - receiptTrie?: BytesLike | string - logsBloom?: BytesLike | string - difficulty?: BigIntLike | string - number?: BigIntLike | string - gasLimit?: BigIntLike | string - gasUsed?: BigIntLike | string - timestamp?: BigIntLike | string - extraData?: BytesLike | string - mixHash?: BytesLike | string - nonce?: BytesLike | string - baseFeePerGas?: BigIntLike | string - withdrawalsRoot?: BytesLike | string - blobGasUsed?: BigIntLike | string - excessBlobGas?: BigIntLike | string - parentBeaconBlockRoot?: BytesLike | string - requestsRoot?: BytesLike | string + parentHash?: BytesLike + uncleHash?: BytesLike + coinbase?: AddressLike + stateRoot?: BytesLike + transactionsTrie?: BytesLike + receiptTrie?: BytesLike + logsBloom?: BytesLike + difficulty?: BigIntLike + number?: BigIntLike + gasLimit?: BigIntLike + gasUsed?: BigIntLike + timestamp?: BigIntLike + extraData?: BytesLike + mixHash?: BytesLike + nonce?: BytesLike + baseFeePerGas?: BigIntLike + withdrawalsRoot?: BytesLike + blobGasUsed?: BigIntLike + excessBlobGas?: BigIntLike + parentBeaconBlockRoot?: BytesLike + requestsRoot?: BytesLike } /** @@ -140,7 +148,7 @@ export type BlockBytes = UncleHeadersBytes, WithdrawalsBytes, RequestsBytes, - ExecutionWitnessBytes + ExecutionWitnessBytes, ] /** @@ -151,7 +159,7 @@ export type BlockBodyBytes = [ TransactionsBytes, UncleHeadersBytes, WithdrawalsBytes?, - RequestBytes? + RequestBytes?, ] /** * TransactionsBytes can be an array of serialized txs for Typed Transactions or an array of Uint8Array Arrays for legacy transactions. @@ -177,65 +185,63 @@ export interface JsonBlock { /** * An object with the block header's data represented as 0x-prefixed hex strings. */ -// TODO: Remove the string type and only keep PrefixedHexString export interface JsonHeader { - parentHash?: PrefixedHexString | string - uncleHash?: PrefixedHexString | string - coinbase?: PrefixedHexString | string - stateRoot?: PrefixedHexString | string - transactionsTrie?: PrefixedHexString | string - receiptTrie?: PrefixedHexString | string - logsBloom?: PrefixedHexString | string - difficulty?: PrefixedHexString | string - number?: PrefixedHexString | string - gasLimit?: PrefixedHexString | string - gasUsed?: PrefixedHexString | string - timestamp?: PrefixedHexString | string - extraData?: PrefixedHexString | string - mixHash?: PrefixedHexString | string - nonce?: PrefixedHexString | string - baseFeePerGas?: PrefixedHexString | string - withdrawalsRoot?: PrefixedHexString | string - blobGasUsed?: PrefixedHexString | string - excessBlobGas?: PrefixedHexString | string - parentBeaconBlockRoot?: PrefixedHexString | string - requestsRoot?: PrefixedHexString | string + parentHash?: PrefixedHexString + uncleHash?: PrefixedHexString + coinbase?: PrefixedHexString + stateRoot?: PrefixedHexString + transactionsTrie?: PrefixedHexString + receiptTrie?: PrefixedHexString + logsBloom?: PrefixedHexString + difficulty?: PrefixedHexString + number?: PrefixedHexString + gasLimit?: PrefixedHexString + gasUsed?: PrefixedHexString + timestamp?: PrefixedHexString + extraData?: PrefixedHexString + mixHash?: PrefixedHexString + nonce?: PrefixedHexString + baseFeePerGas?: PrefixedHexString + withdrawalsRoot?: PrefixedHexString + blobGasUsed?: PrefixedHexString + excessBlobGas?: PrefixedHexString + parentBeaconBlockRoot?: PrefixedHexString + requestsRoot?: PrefixedHexString } /* * Based on https://ethereum.org/en/developers/docs/apis/json-rpc/ */ -// TODO: Remove the string type and only keep PrefixedHexString export interface JsonRpcBlock { - number: PrefixedHexString | string // the block number. null when pending block. - hash: PrefixedHexString | string // hash of the block. null when pending block. - parentHash: PrefixedHexString | string // hash of the parent block. - mixHash?: PrefixedHexString | string // bit hash which proves combined with the nonce that a sufficient amount of computation has been carried out on this block. - nonce: PrefixedHexString | string // hash of the generated proof-of-work. null when pending block. - sha3Uncles: PrefixedHexString | string // SHA3 of the uncles data in the block. - logsBloom: PrefixedHexString | string // the bloom filter for the logs of the block. null when pending block. - transactionsRoot: PrefixedHexString | string // the root of the transaction trie of the block. - stateRoot: PrefixedHexString | string // the root of the final state trie of the block. - receiptsRoot: PrefixedHexString | string // the root of the receipts trie of the block. - miner: PrefixedHexString | string // the address of the beneficiary to whom the mining rewards were given. - difficulty: PrefixedHexString | string // integer of the difficulty for this block. - totalDifficulty: PrefixedHexString | string // integer of the total difficulty of the chain until this block. - extraData: PrefixedHexString | string // the “extra data” field of this block. - size: PrefixedHexString | string // integer the size of this block in bytes. - gasLimit: PrefixedHexString | string // the maximum gas allowed in this block. - gasUsed: PrefixedHexString | string // the total used gas by all transactions in this block. - timestamp: PrefixedHexString | string // the unix timestamp for when the block was collated. - transactions: Array // Array of transaction objects, or 32 Bytes transaction hashes depending on the last given parameter. - uncles: PrefixedHexString[] | string[] // Array of uncle hashes - baseFeePerGas?: PrefixedHexString | string // If EIP-1559 is enabled for this block, returns the base fee per gas + number: PrefixedHexString // the block number. + hash: PrefixedHexString // hash of the block. + parentHash: PrefixedHexString // hash of the parent block. + mixHash?: PrefixedHexString // bit hash which proves combined with the nonce that a sufficient amount of computation has been carried out on this block. + nonce: PrefixedHexString // hash of the generated proof-of-work. + sha3Uncles: PrefixedHexString // SHA3 of the uncles data in the block. + logsBloom: PrefixedHexString // the bloom filter for the logs of the block. + transactionsRoot: PrefixedHexString // the root of the transaction trie of the block. + stateRoot: PrefixedHexString // the root of the final state trie of the block. + receiptsRoot: PrefixedHexString // the root of the receipts trie of the block. + miner: PrefixedHexString // the address of the beneficiary to whom the mining rewards were given. + difficulty: PrefixedHexString // integer of the difficulty for this block. + totalDifficulty: PrefixedHexString // integer of the total difficulty of the chain until this block. + extraData: PrefixedHexString // the “extra data” field of this block. + size: PrefixedHexString // integer the size of this block in bytes. + gasLimit: PrefixedHexString // the maximum gas allowed in this block. + gasUsed: PrefixedHexString // the total used gas by all transactions in this block. + timestamp: PrefixedHexString // the unix timestamp for when the block was collated. + transactions: Array // Array of transaction objects, or 32 Bytes transaction hashes depending on the last given parameter. + uncles: PrefixedHexString[] // Array of uncle hashes + baseFeePerGas?: PrefixedHexString // If EIP-1559 is enabled for this block, returns the base fee per gas withdrawals?: Array // If EIP-4895 is enabled for this block, array of withdrawals - withdrawalsRoot?: PrefixedHexString | string // If EIP-4895 is enabled for this block, the root of the withdrawal trie of the block. - blobGasUsed?: PrefixedHexString | string // If EIP-4844 is enabled for this block, returns the blob gas used for the block - excessBlobGas?: PrefixedHexString | string // If EIP-4844 is enabled for this block, returns the excess blob gas for the block - parentBeaconBlockRoot?: PrefixedHexString | string // If EIP-4788 is enabled for this block, returns parent beacon block root + withdrawalsRoot?: PrefixedHexString // If EIP-4895 is enabled for this block, the root of the withdrawal trie of the block. + blobGasUsed?: PrefixedHexString // If EIP-4844 is enabled for this block, returns the blob gas used for the block + excessBlobGas?: PrefixedHexString // If EIP-4844 is enabled for this block, returns the excess blob gas for the block + parentBeaconBlockRoot?: PrefixedHexString // If EIP-4788 is enabled for this block, returns parent beacon block root executionWitness?: VerkleExecutionWitness | null // If Verkle is enabled for this block - requestsRoot?: PrefixedHexString | string // If EIP-7685 is enabled for this block, returns the requests root - requests?: Array // If EIP-7685 is enabled for this block, array of serialized CL requests + requestsRoot?: PrefixedHexString // If EIP-7685 is enabled for this block, returns the requests root + requests?: Array // If EIP-7685 is enabled for this block, array of serialized CL requests } export type WithdrawalV1 = { @@ -246,26 +252,25 @@ export type WithdrawalV1 = { } // Note: all these strings are 0x-prefixed -// TODO: Remove the string type and only keep PrefixedHexString export type ExecutionPayload = { - parentHash: PrefixedHexString | string // DATA, 32 Bytes - feeRecipient: PrefixedHexString | string // DATA, 20 Bytes - stateRoot: PrefixedHexString | string // DATA, 32 Bytes - receiptsRoot: PrefixedHexString | string // DATA, 32 bytes - logsBloom: PrefixedHexString | string // DATA, 256 Bytes - prevRandao: PrefixedHexString | string // DATA, 32 Bytes - blockNumber: PrefixedHexString | string // QUANTITY, 64 Bits - gasLimit: PrefixedHexString | string // QUANTITY, 64 Bits - gasUsed: PrefixedHexString | string // QUANTITY, 64 Bits - timestamp: PrefixedHexString | string // QUANTITY, 64 Bits - extraData: PrefixedHexString | string // DATA, 0 to 32 Bytes - baseFeePerGas: PrefixedHexString | string // QUANTITY, 256 Bits - blockHash: PrefixedHexString | string // DATA, 32 Bytes - transactions: PrefixedHexString[] | string[] // Array of DATA - Array of transaction rlp strings, + parentHash: PrefixedHexString // DATA, 32 Bytes + feeRecipient: PrefixedHexString // DATA, 20 Bytes + stateRoot: PrefixedHexString // DATA, 32 Bytes + receiptsRoot: PrefixedHexString // DATA, 32 bytes + logsBloom: PrefixedHexString // DATA, 256 Bytes + prevRandao: PrefixedHexString // DATA, 32 Bytes + blockNumber: PrefixedHexString // QUANTITY, 64 Bits + gasLimit: PrefixedHexString // QUANTITY, 64 Bits + gasUsed: PrefixedHexString // QUANTITY, 64 Bits + timestamp: PrefixedHexString // QUANTITY, 64 Bits + extraData: PrefixedHexString // DATA, 0 to 32 Bytes + baseFeePerGas: PrefixedHexString // QUANTITY, 256 Bits + blockHash: PrefixedHexString // DATA, 32 Bytes + transactions: PrefixedHexString[] // Array of DATA - Array of transaction rlp strings, withdrawals?: WithdrawalV1[] // Array of withdrawal objects - blobGasUsed?: PrefixedHexString | string // QUANTITY, 64 Bits - excessBlobGas?: PrefixedHexString | string // QUANTITY, 64 Bits - parentBeaconBlockRoot?: PrefixedHexString | string // QUANTITY, 64 Bits + blobGasUsed?: PrefixedHexString // QUANTITY, 64 Bits + excessBlobGas?: PrefixedHexString // QUANTITY, 64 Bits + parentBeaconBlockRoot?: PrefixedHexString // QUANTITY, 64 Bits // VerkleExecutionWitness is already a hex serialized object executionWitness?: VerkleExecutionWitness | null // QUANTITY, 64 Bits, null implies not available depositRequests?: DepositRequestV1[] // Array of 6110 deposit requests diff --git a/packages/block/test/block.spec.ts b/packages/block/test/block.spec.ts index 8bccdd1d63..b830005b7f 100644 --- a/packages/block/test/block.spec.ts +++ b/packages/block/test/block.spec.ts @@ -1,6 +1,6 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Goerli, Hardfork, Mainnet, createCustomCommon } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' -import { LegacyTransaction } from '@ethereumjs/tx' +import { createLegacyTx } from '@ethereumjs/tx' import { KECCAK256_RLP_ARRAY, bytesToHex, @@ -11,39 +11,50 @@ import { } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' +import { genTransactionsTrieRoot } from '../src/helpers.js' import { - createBlockFromBlockData, + type Block, + type BlockBytes, + type JsonRpcBlock, + createBlock, + createBlockFromBytesArray, createBlockFromRLPSerializedBlock, - createBlockFromValuesArray, -} from '../src/constructors.js' -import { createBlockFromRpc } from '../src/from-rpc.js' -import { genTransactionsTrieRoot } from '../src/helpers.js' + createBlockFromRPC, + paramsBlock, +} from '../src/index.js' -import * as testDataGenesis from './testdata/genesishashestest.json' +import * as testDataGenesis from './testdata/genesisHashesTest.json' import * as testDataFromRpcGoerli from './testdata/testdata-from-rpc-goerli.json' import * as testDataPreLondon2 from './testdata/testdata_pre-london-2.json' import * as testDataPreLondon from './testdata/testdata_pre-london.json' import * as testnetMerge from './testdata/testnetMerge.json' -import type { Block, BlockBytes, JsonRpcBlock } from '../src/index.js' -import type { ChainConfig } from '@ethereumjs/common' import type { NestedUint8Array, PrefixedHexString } from '@ethereumjs/util' describe('[Block]: block functions', () => { it('should test block initialization', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const genesis = createBlockFromBlockData({}, { common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const genesis = createBlock({}, { common }) assert.ok(bytesToHex(genesis.hash()), 'block should initialize') + const params = JSON.parse(JSON.stringify(paramsBlock)) + params['1']['minGasLimit'] = 3000 // 5000 + let block = createBlock({}, { params }) + assert.equal( + block.common.param('minGasLimit'), + BigInt(3000), + 'should use custom parameters provided', + ) + // test default freeze values // also test if the options are carried over to the constructor - let block = createBlockFromBlockData({}) + block = createBlock({}) assert.ok(Object.isFrozen(block), 'block should be frozen by default') - block = createBlockFromBlockData({}, { freeze: false }) + block = createBlock({}, { freeze: false }) assert.ok( !Object.isFrozen(block), - 'block should not be frozen when freeze deactivated in options' + 'block should not be frozen when freeze deactivated in options', ) const rlpBlock = block.serialize() @@ -53,7 +64,7 @@ describe('[Block]: block functions', () => { block = createBlockFromRLPSerializedBlock(rlpBlock, { freeze: false }) assert.ok( !Object.isFrozen(block), - 'block should not be frozen when freeze deactivated in options' + 'block should not be frozen when freeze deactivated in options', ) const zero = new Uint8Array(0) @@ -73,93 +84,73 @@ describe('[Block]: block functions', () => { const valuesArray = [headerArray, [], []] - block = createBlockFromValuesArray(valuesArray, { common }) + block = createBlockFromBytesArray(valuesArray, { common }) assert.ok(Object.isFrozen(block), 'block should be frozen by default') - block = createBlockFromValuesArray(valuesArray, { common, freeze: false }) + block = createBlockFromBytesArray(valuesArray, { common, freeze: false }) assert.ok( !Object.isFrozen(block), - 'block should not be frozen when freeze deactivated in options' + 'block should not be frozen when freeze deactivated in options', ) }) it('initialization -> setHardfork option', () => { - const customChains = [testnetMerge] - const common = new Common({ - chain: 'testnetMerge', - hardfork: Hardfork.Istanbul, - customChains: customChains as ChainConfig[], + // @ts-ignore type is too strict in this case + const common = createCustomCommon(testnetMerge.default, Mainnet, { + name: 'testnetMerge', }) - let block = createBlockFromBlockData( + let block = createBlock( { header: { number: 12, // Berlin block extraData: new Uint8Array(97), }, }, - { common, setHardfork: true } + { common, setHardfork: true }, ) assert.equal(block.common.hardfork(), Hardfork.Berlin, 'should use setHardfork option') - block = createBlockFromBlockData( + block = createBlock( { header: { number: 20, // Future block }, }, - { common, setHardfork: 5001 } + { common, setHardfork: true }, ) assert.equal( block.common.hardfork(), Hardfork.Paris, - 'should use setHardfork option (td > threshold)' - ) - - block = createBlockFromBlockData( - { - header: { - number: 12, // Berlin block, - extraData: new Uint8Array(97), - }, - }, - { common, setHardfork: 3000 } - ) - assert.equal( - block.common.hardfork(), - Hardfork.Berlin, - 'should work with setHardfork option (td < threshold)' + 'should use setHardfork option post merge', ) }) it('should initialize with undefined parameters without throwing', () => { assert.doesNotThrow(function () { - createBlockFromBlockData() + createBlock() }) }) it('should initialize with null parameters without throwing', () => { - const common = new Common({ chain: Chain.Goerli }) + const common = new Common({ chain: Goerli }) const opts = { common } assert.doesNotThrow(function () { - createBlockFromBlockData({}, opts) + createBlock({}, opts) }) }) it('should throw when trying to initialize with uncle headers on a PoA network', () => { - const common = new Common({ chain: Chain.Mainnet }) - const uncleBlock = createBlockFromBlockData( - { header: { extraData: new Uint8Array(117) } }, - { common } - ) + const common = new Common({ chain: Mainnet }) + const uncleBlock = createBlock({ header: { extraData: new Uint8Array(117) } }, { common }) assert.throws(function () { - createBlockFromBlockData({ uncleHeaders: [uncleBlock.header] }, { common }) + createBlock({ uncleHeaders: [uncleBlock.header] }, { common }) }) }) it('should test block validation on pow chain', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - const blockRlp = hexToBytes(testDataPreLondon.blocks[0].rlp as PrefixedHexString) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) + const blockRlp = hexToBytes(testDataPreLondon.default.blocks[0].rlp as PrefixedHexString) try { createBlockFromRLPSerializedBlock(blockRlp, { common }) assert.ok(true, 'should pass') @@ -169,10 +160,10 @@ describe('[Block]: block functions', () => { }) it('should test block validation on poa chain', async () => { - const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Goerli, hardfork: Hardfork.Chainstart }) try { - createBlockFromRpc(testDataFromRpcGoerli as JsonRpcBlock, [], { common }) + createBlockFromRPC(testDataFromRpcGoerli.default as JsonRpcBlock, [], { common }) assert.ok(true, 'does not throw') } catch (error: any) { assert.fail('error thrown') @@ -185,8 +176,8 @@ describe('[Block]: block functions', () => { } it('should test transaction validation - invalid tx trie', async () => { - const blockRlp = hexToBytes(testDataPreLondon.blocks[0].rlp as PrefixedHexString) - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const blockRlp = hexToBytes(testDataPreLondon.default.blocks[0].rlp as PrefixedHexString) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const block = createBlockFromRLPSerializedBlock(blockRlp, { common, freeze: false }) await testTransactionValidation(block) ;(block.header as any).transactionsTrie = new Uint8Array(32) @@ -199,13 +190,13 @@ describe('[Block]: block functions', () => { }) it('should test transaction validation - transaction not signed', async () => { - const tx = LegacyTransaction.fromTxData({ + const tx = createLegacyTx({ gasLimit: 53000, gasPrice: 7, }) - const blockTest = createBlockFromBlockData({ transactions: [tx] }) + const blockTest = createBlock({ transactions: [tx] }) const txTrie = await blockTest.genTxTrie() - const block = createBlockFromBlockData({ + const block = createBlock({ header: { transactionsTrie: txTrie, }, @@ -220,26 +211,26 @@ describe('[Block]: block functions', () => { }) it('should test transaction validation with empty transaction list', async () => { - const block = createBlockFromBlockData({}) + const block = createBlock({}) await testTransactionValidation(block) }) it('should test transaction validation with legacy tx in london', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - const blockRlp = hexToBytes(testDataPreLondon.blocks[0].rlp as PrefixedHexString) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) + const blockRlp = hexToBytes(testDataPreLondon.default.blocks[0].rlp as PrefixedHexString) const block = createBlockFromRLPSerializedBlock(blockRlp, { common, freeze: false }) await testTransactionValidation(block) ;(block.transactions[0] as any).gasPrice = BigInt(0) const result = block.getTransactionsValidationErrors() assert.ok( result[0].includes('tx unable to pay base fee (non EIP-1559 tx)'), - 'should throw when legacy tx is unable to pay base fee' + 'should throw when legacy tx is unable to pay base fee', ) }) it('should test uncles hash validation', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - const blockRlp = hexToBytes(testDataPreLondon2.blocks[2].rlp as PrefixedHexString) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) + const blockRlp = hexToBytes(testDataPreLondon2.default.blocks[2].rlp as PrefixedHexString) const block = createBlockFromRLPSerializedBlock(blockRlp, { common, freeze: false }) assert.equal(block.uncleHashIsValid(), true) ;(block.header as any).uncleHash = new Uint8Array(32) @@ -252,10 +243,10 @@ describe('[Block]: block functions', () => { }) it('should test data integrity', async () => { - const unsignedTx = LegacyTransaction.fromTxData({}) + const unsignedTx = createLegacyTx({}) const txRoot = await genTransactionsTrieRoot([unsignedTx]) - let block = createBlockFromBlockData({ + let block = createBlock({ transactions: [unsignedTx], header: { transactionsTrie: txRoot, @@ -277,7 +268,7 @@ describe('[Block]: block functions', () => { const zeroRoot = zeros(32) // Tx root - block = createBlockFromBlockData({ + block = createBlock({ transactions: [unsignedTx], header: { transactionsTrie: zeroRoot, @@ -286,63 +277,63 @@ describe('[Block]: block functions', () => { await checkThrowsAsync(block.validateData(false, false), 'invalid transaction trie') // Withdrawals root - block = createBlockFromBlockData( + block = createBlock( { header: { withdrawalsRoot: zeroRoot, uncleHash: KECCAK256_RLP_ARRAY, }, }, - { common: new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai }) } + { common: new Common({ chain: Mainnet, hardfork: Hardfork.Shanghai }) }, ) await checkThrowsAsync(block.validateData(false, false), 'invalid withdrawals trie') // Uncle root - block = createBlockFromBlockData( + block = createBlock( { header: { uncleHash: zeroRoot, }, }, - { common: new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) } + { common: new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) }, ) await checkThrowsAsync(block.validateData(false, false), 'invalid uncle hash') - // Verkle withness - const common = new Common({ chain: Chain.Mainnet, eips: [6800], hardfork: Hardfork.Cancun }) + // Verkle witness + const common = new Common({ chain: Mainnet, eips: [6800], hardfork: Hardfork.Cancun }) // Note: `executionWitness: undefined` will still initialize an execution witness in the block // So, only testing for `null` here - block = createBlockFromBlockData({ executionWitness: null }, { common }) + block = createBlock({ executionWitness: null }, { common }) await checkThrowsAsync( block.validateData(false, false), - 'Invalid block: ethereumjs stateless client needs executionWitness' + 'Invalid block: ethereumjs stateless client needs executionWitness', ) }) it('should test isGenesis (mainnet default)', () => { - const block = createBlockFromBlockData({ header: { number: 1 } }) + const block = createBlock({ header: { number: 1 } }) assert.notEqual(block.isGenesis(), true) - const genesisBlock = createBlockFromBlockData({ header: { number: 0 } }) + const genesisBlock = createBlock({ header: { number: 0 } }) assert.equal(genesisBlock.isGenesis(), true) }) it('should test genesis hashes (mainnet default)', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const rlp = hexToBytes(`0x${testDataGenesis.test.genesis_rlp_hex}`) - const hash = hexToBytes(`0x${testDataGenesis.test.genesis_hash}`) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const rlp = hexToBytes(`0x${testDataGenesis.default.test.genesis_rlp_hex}`) + const hash = hexToBytes(`0x${testDataGenesis.default.test.genesis_hash}`) const block = createBlockFromRLPSerializedBlock(rlp, { common }) assert.ok(equalsBytes(block.hash(), hash), 'genesis hash match') }) it('should test hash() method (mainnet default)', () => { - let common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const rlp = hexToBytes(`0x${testDataGenesis.test.genesis_rlp_hex}`) - const hash = hexToBytes(`0x${testDataGenesis.test.genesis_hash}`) + let common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const rlp = hexToBytes(`0x${testDataGenesis.default.test.genesis_rlp_hex}`) + const hash = hexToBytes(`0x${testDataGenesis.default.test.genesis_hash}`) let block = createBlockFromRLPSerializedBlock(rlp, { common }) assert.ok(equalsBytes(block.hash(), hash), 'genesis hash match') common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.Chainstart, customCrypto: { keccak256: () => { @@ -361,109 +352,109 @@ describe('[Block]: block functions', () => { }, undefined, undefined, - 'input must be array' + 'input must be array', ) assert.throws( () => { - createBlockFromValuesArray([1, 2, 3, 4] as any) + createBlockFromBytesArray([1, 2, 3, 4] as any) }, undefined, undefined, - 'input length must be 3 or less' + 'input length must be 3 or less', ) }) it('should return the same block data from raw()', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) const block = createBlockFromRLPSerializedBlock( - toBytes(testDataPreLondon2.blocks[2].rlp as PrefixedHexString), + toBytes(testDataPreLondon2.default.blocks[2].rlp as PrefixedHexString), { common, - } + }, ) - const createBlockFromRaw = createBlockFromValuesArray(block.raw(), { common }) + const createBlockFromRaw = createBlockFromBytesArray(block.raw(), { common }) assert.ok(equalsBytes(block.hash(), createBlockFromRaw.hash())) }) it('should test toJSON', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) const block = createBlockFromRLPSerializedBlock( - toBytes(testDataPreLondon2.blocks[2].rlp as PrefixedHexString), + toBytes(testDataPreLondon2.default.blocks[2].rlp as PrefixedHexString), { common, - } + }, ) assert.equal(typeof block.toJSON(), 'object') }) it('DAO hardfork', () => { const blockData = RLP.decode( - testDataPreLondon2.blocks[0].rlp as PrefixedHexString + testDataPreLondon2.default.blocks[0].rlp as PrefixedHexString, ) as NestedUint8Array // Set block number from test block to mainnet DAO fork block 1920000 blockData[0][8] = hexToBytes('0x1D4C00') - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Dao }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Dao }) assert.throws( function () { - createBlockFromValuesArray(blockData as BlockBytes, { common }) + createBlockFromBytesArray(blockData as BlockBytes, { common }) }, /extraData should be 'dao-hard-fork/, undefined, - 'should throw on DAO HF block with wrong extra data' + 'should throw on DAO HF block with wrong extra data', ) // eslint-disable-line // Set extraData to dao-hard-fork blockData[0][12] = hexToBytes('0x64616f2d686172642d666f726b') assert.doesNotThrow(function () { - createBlockFromValuesArray(blockData as BlockBytes, { common }) + createBlockFromBytesArray(blockData as BlockBytes, { common }) }, 'should not throw on DAO HF block with correct extra data') }) it('should set canonical difficulty if I provide a calcDifficultyFromHeader header', () => { - let common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const genesis = createBlockFromBlockData({}, { common }) + let common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const genesis = createBlock({}, { common }) const nextBlockHeaderData = { number: genesis.header.number + BigInt(1), timestamp: genesis.header.timestamp + BigInt(10), } - common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - const blockWithoutDifficultyCalculation = createBlockFromBlockData( + common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) + const blockWithoutDifficultyCalculation = createBlock( { header: nextBlockHeaderData, }, - { common } + { common }, ) // test if difficulty defaults to 0 assert.equal( blockWithoutDifficultyCalculation.header.difficulty, BigInt(0), - 'header difficulty should default to 0' + 'header difficulty should default to 0', ) // test if we set difficulty if we have a "difficulty header" in options; also verify this is equal to reported canonical difficulty. - const blockWithDifficultyCalculation = createBlockFromBlockData( + const blockWithDifficultyCalculation = createBlock( { header: nextBlockHeaderData, }, { common, calcDifficultyFromHeader: genesis.header, - } + }, ) assert.ok( blockWithDifficultyCalculation.header.difficulty > BigInt(0), - 'header difficulty should be set if difficulty header is given' + 'header difficulty should be set if difficulty header is given', ) assert.ok( blockWithDifficultyCalculation.header.ethashCanonicalDifficulty(genesis.header) === blockWithDifficultyCalculation.header.difficulty, - 'header difficulty is canonical difficulty if difficulty header is given' + 'header difficulty is canonical difficulty if difficulty header is given', ) // test if we can provide a block which is too far ahead to still calculate difficulty @@ -472,25 +463,25 @@ describe('[Block]: block functions', () => { timestamp: genesis.header.timestamp + BigInt(10), } - const block_farAhead = createBlockFromBlockData( + const block_farAhead = createBlock( { header: noParentHeaderData, }, { common, calcDifficultyFromHeader: genesis.header, - } + }, ) assert.ok( block_farAhead.header.difficulty > BigInt(0), - 'should allow me to provide a bogus next block to calculate difficulty on when providing a difficulty header' + 'should allow me to provide a bogus next block to calculate difficulty on when providing a difficulty header', ) }) it('should be able to initialize shanghai blocks with correct hardfork defaults', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai }) - const block = createBlockFromBlockData({}, { common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Shanghai }) + const block = createBlock({}, { common }) assert.equal(block.common.hardfork(), Hardfork.Shanghai, 'hardfork should be set to shanghai') assert.deepEqual(block.withdrawals, [], 'withdrawals should be set to default empty array') }) diff --git a/packages/block/test/clique.spec.ts b/packages/block/test/clique.spec.ts index bd5803ee0b..a170d42c80 100644 --- a/packages/block/test/clique.spec.ts +++ b/packages/block/test/clique.spec.ts @@ -1,74 +1,84 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { Address, hexToBytes } from '@ethereumjs/util' +import { Common, Goerli, Hardfork } from '@ethereumjs/common' +import { Address, createZeroAddress, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { BlockHeader } from '../src/header.js' +import { + cliqueEpochTransitionSigners, + cliqueExtraSeal, + cliqueExtraVanity, + cliqueIsEpochTransition, + cliqueSigner, + cliqueVerifySignature, + createBlockHeader, + createSealedCliqueBlockHeader, +} from '../src/index.js' describe('[Header]: Clique PoA Functionality', () => { - const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Goerli, hardfork: Hardfork.Chainstart }) it('Header Data', () => { - let header = BlockHeader.fromHeaderData({ number: 1 }) + let header = createBlockHeader({ number: 1 }) assert.throws( () => { - header.cliqueIsEpochTransition() + cliqueIsEpochTransition(header) }, undefined, undefined, - 'cliqueIsEpochTransition() -> should throw on PoW networks' + 'cliqueIsEpochTransition() -> should throw on PoW networks', ) - header = BlockHeader.fromHeaderData({ extraData: new Uint8Array(97) }, { common }) + header = createBlockHeader({ extraData: new Uint8Array(97) }, { common }) assert.ok( - header.cliqueIsEpochTransition(), - 'cliqueIsEpochTransition() -> should indicate an epoch transition for the genesis block' + cliqueIsEpochTransition(header), + 'cliqueIsEpochTransition() -> should indicate an epoch transition for the genesis block', ) - header = BlockHeader.fromHeaderData({ number: 1, extraData: new Uint8Array(97) }, { common }) + header = createBlockHeader({ number: 1, extraData: new Uint8Array(97) }, { common }) assert.notOk( - header.cliqueIsEpochTransition(), - 'cliqueIsEpochTransition() -> should correctly identify a non-epoch block' + cliqueIsEpochTransition(header), + 'cliqueIsEpochTransition() -> should correctly identify a non-epoch block', ) assert.deepEqual( - header.cliqueExtraVanity(), + cliqueExtraVanity(header), new Uint8Array(32), - 'cliqueExtraVanity() -> should return correct extra vanity value' + 'cliqueExtraVanity() -> should return correct extra vanity value', ) assert.deepEqual( - header.cliqueExtraSeal(), + cliqueExtraSeal(header), new Uint8Array(65), - 'cliqueExtraSeal() -> should return correct extra seal value' + 'cliqueExtraSeal() -> should return correct extra seal value', ) assert.throws( () => { - header.cliqueEpochTransitionSigners() + cliqueEpochTransitionSigners(header) }, undefined, undefined, - 'cliqueEpochTransitionSigners() -> should throw on non-epch block' + 'cliqueEpochTransitionSigners() -> should throw on non-epoch block', ) - header = BlockHeader.fromHeaderData( - { number: 60000, extraData: new Uint8Array(137) }, - { common } - ) + header = createBlockHeader({ number: 60000, extraData: new Uint8Array(137) }, { common }) assert.ok( - header.cliqueIsEpochTransition(), - 'cliqueIsEpochTransition() -> should correctly identify an epoch block' + cliqueIsEpochTransition(header), + 'cliqueIsEpochTransition() -> should correctly identify an epoch block', ) assert.deepEqual( - header.cliqueExtraVanity(), + cliqueExtraVanity(header), new Uint8Array(32), - 'cliqueExtraVanity() -> should return correct extra vanity value' + 'cliqueExtraVanity() -> should return correct extra vanity value', ) assert.deepEqual( - header.cliqueExtraSeal(), + cliqueExtraSeal(header), new Uint8Array(65), - 'cliqueExtraSeal() -> should return correct extra seal value' + 'cliqueExtraSeal() -> should return correct extra seal value', ) const msg = 'cliqueEpochTransitionSigners() -> should return the correct epoch transition signer list on epoch block' - assert.deepEqual(header.cliqueEpochTransitionSigners(), [Address.zero(), Address.zero()], msg) + assert.deepEqual( + cliqueEpochTransitionSigners(header), + [createZeroAddress(), createZeroAddress()], + msg, + ) }) type Signer = { @@ -81,26 +91,27 @@ describe('[Header]: Clique PoA Functionality', () => { address: new Address(hexToBytes('0x0b90087d864e82a284dca15923f3776de6bb016f')), privateKey: hexToBytes('0x64bf9cc30328b0e42387b3c82c614e6386259136235e20c1357bd11cdee86993'), publicKey: hexToBytes( - '0x40b2ebdf4b53206d2d3d3d59e7e2f13b1ea68305aec71d5d24cefe7f24ecae886d241f9267f04702d7f693655eb7b4aa23f30dcd0c3c5f2b970aad7c8a828195' + '0x40b2ebdf4b53206d2d3d3d59e7e2f13b1ea68305aec71d5d24cefe7f24ecae886d241f9267f04702d7f693655eb7b4aa23f30dcd0c3c5f2b970aad7c8a828195', ), } it('Signing', () => { - const cliqueSigner = A.privateKey + const cliqueSignerKey = A.privateKey - let header = BlockHeader.fromHeaderData( + let header = createSealedCliqueBlockHeader( { number: 1, extraData: new Uint8Array(97) }, - { common, freeze: false, cliqueSigner } + cliqueSignerKey, + { common, freeze: false }, ) assert.equal(header.extraData.length, 97) - assert.ok(header.cliqueVerifySignature([A.address]), 'should verify signature') - assert.ok(header.cliqueSigner().equals(A.address), 'should recover the correct signer address') + assert.ok(cliqueVerifySignature(header, [A.address]), 'should verify signature') + assert.ok(cliqueSigner(header).equals(A.address), 'should recover the correct signer address') - header = BlockHeader.fromHeaderData({ extraData: new Uint8Array(97) }, { common }) + header = createBlockHeader({ extraData: new Uint8Array(97) }, { common }) assert.ok( - header.cliqueSigner().equals(Address.zero()), - 'should return zero address on default block' + cliqueSigner(header).equals(createZeroAddress()), + 'should return zero address on default block', ) }) }) diff --git a/packages/block/test/difficulty.spec.ts b/packages/block/test/difficulty.spec.ts index 945d69fc03..3bb4afe830 100644 --- a/packages/block/test/difficulty.spec.ts +++ b/packages/block/test/difficulty.spec.ts @@ -1,4 +1,4 @@ -import { Chain, Common } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { assert, describe, it } from 'vitest' import * as difficultyMainNetwork from '../../ethereum-tests/BasicTests/difficultyMainNetwork.json' @@ -11,29 +11,27 @@ import * as difficultyEIP2384_random_to20M from '../../ethereum-tests/Difficulty import * as difficultyFrontier from '../../ethereum-tests/DifficultyTests/dfFrontier/difficultyFrontier.json' import * as difficultyGrayGlacier from '../../ethereum-tests/DifficultyTests/dfGrayGlacier/difficultyGrayGlacier.json' import * as difficultyHomestead from '../../ethereum-tests/DifficultyTests/dfHomestead/difficultyHomestead.json' -import { createBlockFromBlockData } from '../src/constructors.js' - -import type { Block } from '../src/index.js' +import { type Block, createBlock, ethashCanonicalDifficulty } from '../src/index.js' function runDifficultyTests(test: any, parentBlock: Block, block: Block, msg: string) { - const dif = block.ethashCanonicalDifficulty(parentBlock) + const dif = ethashCanonicalDifficulty(block, parentBlock) assert.equal(dif, BigInt(test.currentDifficulty), `test ethashCanonicalDifficulty: ${msg}`) } type TestData = { [key: string]: any } const hardforkTestData: TestData = { - chainstart: difficultyFrontier.difficultyFrontier.Frontier, - homestead: difficultyHomestead.difficultyHomestead.Homestead, - byzantium: difficultyByzantium.difficultyByzantium.Byzantium, - constantinople: difficultyConstantinople.difficultyConstantinople.Constantinople, + chainstart: difficultyFrontier.default.difficultyFrontier.Frontier, + homestead: difficultyHomestead.default.difficultyHomestead.Homestead, + byzantium: difficultyByzantium.default.difficultyByzantium.Byzantium, + constantinople: difficultyConstantinople.default.difficultyConstantinople.Constantinople, muirGlacier: Object.assign( - difficultyEIP2384.difficultyEIP2384.Berlin, - difficultyEIP2384_random.difficultyEIP2384_random.Berlin, - difficultyEIP2384_random_to20M.difficultyEIP2384_random_to20M.Berlin + difficultyEIP2384.default.difficultyEIP2384.Berlin, + difficultyEIP2384_random.default.difficultyEIP2384_random.Berlin, + difficultyEIP2384_random_to20M.default.difficultyEIP2384_random_to20M.Berlin, ), - arrowGlacier: difficultyArrowGlacier.difficultyArrowGlacier.ArrowGlacier, - grayGlacier: difficultyGrayGlacier.difficultyGrayGlacier.GrayGlacier, + arrowGlacier: difficultyArrowGlacier.default.difficultyArrowGlacier.ArrowGlacier, + grayGlacier: difficultyGrayGlacier.default.difficultyGrayGlacier.GrayGlacier, } const chainTestData: TestData = { @@ -47,7 +45,7 @@ describe('[Header]: difficulty tests', () => { const testData = hardforkTestData[hardfork] for (const testName in testData) { const test = testData[testName] - const common = new Common({ chain: Chain.Mainnet, hardfork }) + const common = new Common({ chain: Mainnet, hardfork }) // Unschedule any timestamp since tests are not configured for timestamps common .hardforks() @@ -57,7 +55,7 @@ describe('[Header]: difficulty tests', () => { }) const blockOpts = { common } const uncleHash = test.parentUncles === '0x00' ? undefined : test.parentUncles - const parentBlock = createBlockFromBlockData( + const parentBlock = createBlock( { header: { timestamp: test.parentTimestamp, @@ -65,10 +63,10 @@ describe('[Header]: difficulty tests', () => { uncleHash, }, }, - blockOpts + blockOpts, ) - const block = createBlockFromBlockData( + const block = createBlock( { header: { timestamp: test.currentTimestamp, @@ -76,7 +74,7 @@ describe('[Header]: difficulty tests', () => { number: test.currentBlockNumber, }, }, - blockOpts + blockOpts, ) runDifficultyTests(test, parentBlock, block, `fork determination by hardfork (${hardfork})`) @@ -89,10 +87,10 @@ describe('[Header]: difficulty tests', () => { const testData = chainTestData[chain] for (const testName in testData.default) { const test = testData[testName] - const common = new Common({ chain }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) const blockOpts = { common, setHardfork: true } const uncleHash = test.parentUncles === '0x00' ? undefined : test.parentUncles - const parentBlock = createBlockFromBlockData( + const parentBlock = createBlock( { header: { timestamp: test.parentTimestamp, @@ -101,10 +99,10 @@ describe('[Header]: difficulty tests', () => { uncleHash, }, }, - blockOpts + blockOpts, ) - const block = createBlockFromBlockData( + const block = createBlock( { header: { timestamp: test.currentTimestamp, @@ -112,14 +110,14 @@ describe('[Header]: difficulty tests', () => { number: test.currentBlockNumber, }, }, - blockOpts + blockOpts, ) runDifficultyTests( test, parentBlock, block, - `fork determination by block number (${test.currentBlockNumber})` + `fork determination by block number (${test.currentBlockNumber})`, ) } } diff --git a/packages/block/test/eip1559block.spec.ts b/packages/block/test/eip1559block.spec.ts index 328ce8cc36..abffc96a9e 100644 --- a/packages/block/test/eip1559block.spec.ts +++ b/packages/block/test/eip1559block.spec.ts @@ -1,22 +1,23 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { FeeMarketEIP1559Transaction } from '@ethereumjs/tx' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createFeeMarket1559Tx } from '@ethereumjs/tx' import { hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { createBlockFromBlockData } from '../src/constructors.js' -import { BlockHeader } from '../src/header.js' - +import { createBlock, createBlockHeader } from '../src/index.js' // Test data from Besu (retrieved via Discord) // Older version at https://github.com/abdelhamidbakhta/besu/blob/bf54b6c0b40d3015fc85ff9b078fbc26592d80c0/ethereum/core/src/test/resources/org/hyperledger/besu/ethereum/core/fees/basefee-test.json +import { paramsBlock } from '../src/params.js' + import * as eip1559BaseFee from './testdata/eip1559baseFee.json' const common = new Common({ eips: [1559], - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.London, + params: paramsBlock, }) -const genesis = createBlockFromBlockData({}) +const genesis = createBlock({}) // Small hack to hack in the activation block number // (Otherwise there would be need for a custom chain only for testing purposes) @@ -32,10 +33,10 @@ common.hardforkBlock = function (hardfork: string | undefined) { describe('EIP1559 tests', () => { it('Header -> Initialization', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) assert.throws( () => { - BlockHeader.fromHeaderData( + createBlockHeader( { number: BigInt(1), parentHash: genesis.hash(), @@ -45,18 +46,18 @@ describe('EIP1559 tests', () => { }, { common, - } + }, ) }, undefined, undefined, - 'should throw when setting baseFeePerGas with EIP1559 not being activated' + 'should throw when setting baseFeePerGas with EIP1559 not being activated', ) }) it('Header -> genericFormatValidation checks', async () => { try { - BlockHeader.fromHeaderData( + createBlockHeader( { number: BigInt(1), parentHash: genesis.hash(), @@ -68,19 +69,19 @@ describe('EIP1559 tests', () => { calcDifficultyFromHeader: genesis.header, common, freeze: false, - } + }, ) assert.fail('should throw when baseFeePerGas is not set to initial base fee') } catch (e: any) { const expectedError = 'Initial EIP1559 block does not have initial base fee' assert.ok( e.message.includes(expectedError), - 'should throw if base fee is not set to initial value' + 'should throw if base fee is not set to initial value', ) } try { - const header = BlockHeader.fromHeaderData( + const header = createBlockHeader( { number: BigInt(1), parentHash: genesis.hash(), @@ -91,7 +92,7 @@ describe('EIP1559 tests', () => { calcDifficultyFromHeader: genesis.header, common, freeze: false, - } + }, ) ;(header as any).baseFeePerGas = undefined await (header as any)._genericFormatValidation() @@ -99,26 +100,26 @@ describe('EIP1559 tests', () => { const expectedError = 'EIP1559 block has no base fee field' assert.ok( e.message.includes(expectedError), - 'should throw with no base fee field when EIP1559 is activated' + 'should throw with no base fee field when EIP1559 is activated', ) } }) it('Header -> _genericFormValidation -> success case', async () => { - createBlockFromBlockData( + createBlock( { header: { number: BigInt(1), parentHash: genesis.hash(), gasLimit: genesis.header.gasLimit * BigInt(2), // Special case on EIP-1559 transition block timestamp: BigInt(1), - baseFeePerGas: common.param('gasConfig', 'initialBaseFee'), + baseFeePerGas: common.param('initialBaseFee'), }, }, { calcDifficultyFromHeader: genesis.header, common, - } + }, ) assert.ok(true, 'Valid initial EIP1559 header should be valid') @@ -126,7 +127,7 @@ describe('EIP1559 tests', () => { it('Header -> validate()', async () => { try { - BlockHeader.fromHeaderData( + createBlockHeader( { baseFeePerGas: BigInt(1000), number: BigInt(1), @@ -137,7 +138,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: genesis.header, common, - } + }, ) assert.fail('should throw') } catch (e: any) { @@ -146,7 +147,7 @@ describe('EIP1559 tests', () => { }) it('Header -> validate() -> success cases', async () => { - const block1 = createBlockFromBlockData( + const block1 = createBlock( { header: { number: BigInt(1), @@ -159,9 +160,9 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: genesis.header, common, - } + }, ) - createBlockFromBlockData( + createBlock( { header: { number: BigInt(2), @@ -174,29 +175,28 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: block1.header, common, - } + }, ) assert.ok(true, 'should correctly validate subsequent EIP-1559 blocks') }) it('Header -> validate() -> gas usage', async () => { try { - BlockHeader.fromHeaderData( + createBlockHeader( { number: BigInt(1), parentHash: genesis.hash(), timestamp: BigInt(1), gasLimit: genesis.header.gasLimit * BigInt(2), // Special case on EIP-1559 transition block gasUsed: - genesis.header.gasLimit * - (common.param('gasConfig', 'elasticityMultiplier') ?? BigInt(0)) + + genesis.header.gasLimit * (common.param('elasticityMultiplier') ?? BigInt(0)) + BigInt(1), - baseFeePerGas: common.param('gasConfig', 'initialBaseFee'), + baseFeePerGas: common.param('initialBaseFee'), }, { calcDifficultyFromHeader: genesis.header, common, - } + }, ) assert.fail('should throw') } catch (e: any) { @@ -205,76 +205,76 @@ describe('EIP1559 tests', () => { }) it('Header -> validate() -> gas usage', async () => { - BlockHeader.fromHeaderData( + createBlockHeader( { number: BigInt(1), parentHash: genesis.hash(), timestamp: BigInt(1), gasLimit: genesis.header.gasLimit * BigInt(2), // Special case on EIP-1559 transition block gasUsed: genesis.header.gasLimit * BigInt(2), - baseFeePerGas: common.param('gasConfig', 'initialBaseFee'), + baseFeePerGas: common.param('initialBaseFee'), }, { calcDifficultyFromHeader: genesis.header, common, - } + }, ) assert.ok(true, 'should not throw when elasticity is exactly matched') }) - const block1 = createBlockFromBlockData( + const block1 = createBlock( { header: { number: BigInt(1), parentHash: genesis.hash(), gasLimit: genesis.header.gasLimit * BigInt(2), // Special case on EIP-1559 transition block timestamp: BigInt(1), - baseFeePerGas: common.param('gasConfig', 'initialBaseFee'), + baseFeePerGas: common.param('initialBaseFee'), }, }, { calcDifficultyFromHeader: genesis.header, common, - } + }, ) it('Header -> validate() -> gasLimit -> success cases', async () => { let parentGasLimit = genesis.header.gasLimit * BigInt(2) - BlockHeader.fromHeaderData( + createBlockHeader( { number: BigInt(1), parentHash: genesis.hash(), timestamp: BigInt(1), gasLimit: parentGasLimit + parentGasLimit / BigInt(1024) - BigInt(1), - baseFeePerGas: common.param('gasConfig', 'initialBaseFee'), + baseFeePerGas: common.param('initialBaseFee'), }, { calcDifficultyFromHeader: genesis.header, common, - } + }, ) assert.ok(true, 'should not throw if gas limit is between bounds (HF transition block)') - BlockHeader.fromHeaderData( + createBlockHeader( { number: BigInt(1), parentHash: genesis.hash(), timestamp: BigInt(1), gasLimit: parentGasLimit - parentGasLimit / BigInt(1024) + BigInt(1), - baseFeePerGas: common.param('gasConfig', 'initialBaseFee'), + baseFeePerGas: common.param('initialBaseFee'), }, { calcDifficultyFromHeader: genesis.header, common, - } + }, ) assert.ok(true, 'should not throw if gas limit is between bounds (HF transition block)') parentGasLimit = block1.header.gasLimit - BlockHeader.fromHeaderData( + createBlockHeader( { number: BigInt(2), parentHash: block1.hash(), @@ -285,12 +285,12 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: block1.header, common, - } + }, ) assert.ok(true, 'should not throw if gas limit is between bounds (post-HF transition block)') - BlockHeader.fromHeaderData( + createBlockHeader( { number: BigInt(2), parentHash: block1.hash(), @@ -301,7 +301,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: block1.header, common, - } + }, ) assert.ok(true, 'should not throw if gas limit is between bounds (post-HF transition block)') @@ -309,18 +309,18 @@ describe('EIP1559 tests', () => { it('Header -> validateGasLimit() -> error cases', async () => { let parentGasLimit = genesis.header.gasLimit * BigInt(2) - let header = BlockHeader.fromHeaderData( + let header = createBlockHeader( { number: BigInt(1), parentHash: genesis.hash(), timestamp: BigInt(1), gasLimit: parentGasLimit + parentGasLimit, - baseFeePerGas: common.param('gasConfig', 'initialBaseFee'), + baseFeePerGas: common.param('initialBaseFee'), }, { calcDifficultyFromHeader: genesis.header, common, - } + }, ) try { header.validateGasLimit(genesis.header) @@ -328,12 +328,12 @@ describe('EIP1559 tests', () => { } catch (e: any) { assert.ok( e.message.includes('gas limit increased too much'), - 'should throw if gas limit is increased too much (HF transition block)' + 'should throw if gas limit is increased too much (HF transition block)', ) } parentGasLimit = block1.header.gasLimit - header = BlockHeader.fromHeaderData( + header = createBlockHeader( { number: BigInt(2), parentHash: block1.hash(), @@ -344,7 +344,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: block1.header, common, - } + }, ) try { header.validateGasLimit(block1.header) @@ -352,25 +352,25 @@ describe('EIP1559 tests', () => { } catch (e: any) { assert.ok( e.message.includes('gas limit increased too much'), - 'should throw if gas limit is increased too much (post-HF transition block)' + 'should throw if gas limit is increased too much (post-HF transition block)', ) } }) it('Header -> validateGasLimit() -> error cases', async () => { let parentGasLimit = genesis.header.gasLimit * BigInt(2) - let header = BlockHeader.fromHeaderData( + let header = createBlockHeader( { number: BigInt(1), parentHash: genesis.hash(), timestamp: BigInt(1), gasLimit: parentGasLimit - parentGasLimit / BigInt(1024), - baseFeePerGas: common.param('gasConfig', 'initialBaseFee'), + baseFeePerGas: common.param('initialBaseFee'), }, { calcDifficultyFromHeader: genesis.header, common, - } + }, ) try { header.validateGasLimit(genesis.header) @@ -378,12 +378,12 @@ describe('EIP1559 tests', () => { } catch (e: any) { assert.ok( e.message.includes('gas limit decreased too much'), - 'should throw if gas limit is decreased too much (HF transition block)' + 'should throw if gas limit is decreased too much (HF transition block)', ) } parentGasLimit = block1.header.gasLimit - header = BlockHeader.fromHeaderData( + header = createBlockHeader( { number: BigInt(2), parentHash: block1.hash(), @@ -394,7 +394,7 @@ describe('EIP1559 tests', () => { { calcDifficultyFromHeader: block1.header, common, - } + }, ) try { header.validateGasLimit(block1.header) @@ -402,27 +402,27 @@ describe('EIP1559 tests', () => { } catch (e: any) { assert.ok( e.message.includes('gas limit decreased too much'), - 'should throw if gas limit is decreased too much (post-HF transition block)' + 'should throw if gas limit is decreased too much (post-HF transition block)', ) } }) it('Header -> validateTransactions() -> tx', async () => { - const transaction = FeeMarketEIP1559Transaction.fromTxData( + const transaction = createFeeMarket1559Tx( { maxFeePerGas: BigInt(0), maxPriorityFeePerGas: BigInt(0), }, - { common } + { common }, ).sign(hexToBytes(`0x${'46'.repeat(32)}`)) - const block = createBlockFromBlockData( + const block = createBlock( { header: { number: BigInt(1), parentHash: genesis.hash(), gasLimit: genesis.header.gasLimit * BigInt(2), // Special case on EIP-1559 transition block timestamp: BigInt(1), - baseFeePerGas: common.param('gasConfig', 'initialBaseFee'), + baseFeePerGas: common.param('initialBaseFee'), }, transactions: [ { @@ -439,26 +439,26 @@ describe('EIP1559 tests', () => { { common, calcDifficultyFromHeader: genesis.header, - } + }, ) const errs = block.getTransactionsValidationErrors() assert.ok( errs[0].includes('unable to pay base fee'), - 'should throw if transaction is unable to pay base fee' + 'should throw if transaction is unable to pay base fee', ) }) it('Header -> calcNextBaseFee()', () => { for (let index = 0; index < eip1559BaseFee.length; index++) { const item = eip1559BaseFee[index] - const result = BlockHeader.fromHeaderData( + const result = createBlockHeader( { baseFeePerGas: BigInt(item.parentBaseFee), gasUsed: BigInt(item.parentGasUsed), gasLimit: BigInt(item.parentTargetGasUsed) * BigInt(2), }, - { common } + { common }, ).calcNextBaseFee() const expected = BigInt(item.expectedBaseFee) assert.equal(expected, result, 'base fee correct') @@ -466,7 +466,7 @@ describe('EIP1559 tests', () => { }) it('Header -> toJSON()', () => { - const header = BlockHeader.fromHeaderData( + const header = createBlockHeader( { number: BigInt(3), parentHash: genesis.hash(), @@ -476,7 +476,7 @@ describe('EIP1559 tests', () => { }, { common, - } + }, ) assert.equal(header.toJSON().baseFeePerGas, '0x5') }) diff --git a/packages/block/test/eip4788block.spec.ts b/packages/block/test/eip4788block.spec.ts index 7f294c832b..3100c08c76 100644 --- a/packages/block/test/eip4788block.spec.ts +++ b/packages/block/test/eip4788block.spec.ts @@ -1,48 +1,47 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { bytesToHex, zeros } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { createBlockFromBlockData } from '../src/constructors.js' -import { BlockHeader } from '../src/header.js' +import { createBlock, createBlockHeader } from '../src/index.js' describe('EIP4788 header tests', () => { it('should work', () => { - const earlyCommon = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Cancun, eips: [4788] }) + const earlyCommon = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Cancun, eips: [4788] }) assert.throws( () => { - BlockHeader.fromHeaderData( + createBlockHeader( { parentBeaconBlockRoot: zeros(32), }, { common: earlyCommon, - } + }, ) }, 'A parentBeaconBlockRoot for a header can only be provided with EIP4788 being activated', undefined, - 'should throw when setting parentBeaconBlockRoot with EIP4788 not being activated' + 'should throw when setting parentBeaconBlockRoot with EIP4788 not being activated', ) assert.throws( () => { - BlockHeader.fromHeaderData( + createBlockHeader( { blobGasUsed: 1n, }, { common: earlyCommon, - } + }, ) }, 'blob gas used can only be provided with EIP4844 activated', undefined, - 'should throw when setting blobGasUsed with EIP4844 not being activated' + 'should throw when setting blobGasUsed with EIP4844 not being activated', ) assert.doesNotThrow(() => { - BlockHeader.fromHeaderData( + createBlockHeader( { excessBlobGas: 0n, blobGasUsed: 0n, @@ -51,20 +50,20 @@ describe('EIP4788 header tests', () => { { common, skipConsensusFormatValidation: true, - } + }, ) }, 'correctly instantiates an EIP4788 block header') - const block = createBlockFromBlockData( + const block = createBlock( { - header: BlockHeader.fromHeaderData({}, { common }), + header: createBlockHeader({}, { common }), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) assert.equal( block.toJSON().header?.parentBeaconBlockRoot, bytesToHex(zeros(32)), - 'JSON output includes excessBlobGas' + 'JSON output includes excessBlobGas', ) }) }) diff --git a/packages/block/test/eip4844block.spec.ts b/packages/block/test/eip4844block.spec.ts index cb169c07c7..a81c55dd0f 100644 --- a/packages/block/test/eip4844block.spec.ts +++ b/packages/block/test/eip4844block.spec.ts @@ -1,5 +1,5 @@ -import { Chain, Common, Hardfork, createCommonFromGethGenesis } from '@ethereumjs/common' -import { BlobEIP4844Transaction } from '@ethereumjs/tx' +import { Common, Hardfork, Mainnet, createCommonFromGethGenesis } from '@ethereumjs/common' +import { createBlob4844Tx } from '@ethereumjs/tx' import { blobsToCommitments, commitmentsToVersionedHashes, @@ -9,9 +9,9 @@ import { import { loadKZG } from 'kzg-wasm' import { assert, beforeAll, describe, it } from 'vitest' -import { createBlockFromBlockData } from '../src/constructors.js' -import { BlockHeader } from '../src/header.js' import { fakeExponential, getNumBlobs } from '../src/helpers.js' +import { createBlock, createBlockHeader } from '../src/index.js' +import { paramsBlock } from '../src/params.js' import gethGenesis from './testdata/4844-hardfork.json' @@ -32,66 +32,66 @@ describe('EIP4844 header tests', () => { }) it('should work', () => { - const earlyCommon = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const earlyCommon = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) assert.throws( () => { - BlockHeader.fromHeaderData( + createBlockHeader( { excessBlobGas: 1n, }, { common: earlyCommon, - } + }, ) }, 'excess blob gas can only be provided with EIP4844 activated', undefined, - 'should throw when setting excessBlobGas with EIP4844 not being activated' + 'should throw when setting excessBlobGas with EIP4844 not being activated', ) assert.throws( () => { - BlockHeader.fromHeaderData( + createBlockHeader( { blobGasUsed: 1n, }, { common: earlyCommon, - } + }, ) }, 'blob gas used can only be provided with EIP4844 activated', undefined, - 'should throw when setting blobGasUsed with EIP4844 not being activated' + 'should throw when setting blobGasUsed with EIP4844 not being activated', ) - const excessBlobGas = BlockHeader.fromHeaderData( + const excessBlobGas = createBlockHeader( {}, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ).excessBlobGas assert.equal( excessBlobGas, 0n, - 'instantiates block with reasonable default excess blob gas value when not provided' + 'instantiates block with reasonable default excess blob gas value when not provided', ) assert.doesNotThrow(() => { - BlockHeader.fromHeaderData( + createBlockHeader( { excessBlobGas: 0n, }, { common, skipConsensusFormatValidation: true, - } + }, ) }, 'correctly instantiates an EIP4844 block header') - const block = createBlockFromBlockData( + const block = createBlock( { - header: BlockHeader.fromHeaderData({}, { common, skipConsensusFormatValidation: true }), + header: createBlockHeader({}, { common, skipConsensusFormatValidation: true }), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) assert.equal(block.toJSON().header?.excessBlobGas, '0x0', 'JSON output includes excessBlobGas') }) @@ -105,39 +105,43 @@ describe('blob gas tests', () => { common = createCommonFromGethGenesis(gethGenesis, { chain: 'customChain', hardfork: Hardfork.Cancun, + params: paramsBlock, customCrypto: { kzg }, }) - blobGasPerBlob = common.param('gasConfig', 'blobGasPerBlob') + blobGasPerBlob = common.param('blobGasPerBlob') }) it('should work', () => { - const preShardingHeader = BlockHeader.fromHeaderData({}) + const preShardingHeader = createBlockHeader( + {}, + { common: new Common({ chain: Mainnet, hardfork: Hardfork.Shanghai }) }, + ) let excessBlobGas = preShardingHeader.calcNextExcessBlobGas() assert.equal( excessBlobGas, 0n, - 'excess blob gas where 4844 is not active on header should be 0' + 'excess blob gas where 4844 is not active on header should be 0', ) assert.throws( () => preShardingHeader.calcDataFee(1), 'header must have excessBlobGas field', undefined, - 'calcDataFee throws when header has no excessBlobGas field' + 'calcDataFee throws when header has no excessBlobGas field', ) - const lowGasHeader = BlockHeader.fromHeaderData( + const lowGasHeader = createBlockHeader( { number: 1, excessBlobGas: 5000 }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) excessBlobGas = lowGasHeader.calcNextExcessBlobGas() let blobGasPrice = lowGasHeader.getBlobGasPrice() assert.equal(excessBlobGas, 0n, 'excess blob gas should be 0 for small parent header blob gas') assert.equal(blobGasPrice, 1n, 'blob gas price should be 1n when low or no excess blob gas') - const highGasHeader = BlockHeader.fromHeaderData( + const highGasHeader = createBlockHeader( { number: 1, excessBlobGas: 6291456, blobGasUsed: BigInt(6) * blobGasPerBlob }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) excessBlobGas = highGasHeader.calcNextExcessBlobGas() blobGasPrice = highGasHeader.getBlobGasPrice() @@ -162,16 +166,17 @@ describe('transaction validation tests', () => { common = createCommonFromGethGenesis(gethGenesis, { chain: 'customChain', hardfork: Hardfork.Cancun, + params: paramsBlock, customCrypto: { kzg }, }) - blobGasPerBlob = common.param('gasConfig', 'blobGasPerBlob') + blobGasPerBlob = common.param('blobGasPerBlob') }) it('should work', () => { const blobs = getBlobs('hello world') const commitments = blobsToCommitments(kzg, blobs) const blobVersionedHashes = commitmentsToVersionedHashes(commitments) - const tx1 = BlobEIP4844Transaction.fromTxData( + const tx1 = createBlob4844Tx( { blobVersionedHashes, blobs, @@ -180,9 +185,9 @@ describe('transaction validation tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ).sign(randomBytes(32)) - const tx2 = BlobEIP4844Transaction.fromTxData( + const tx2 = createBlob4844Tx( { blobVersionedHashes, blobs, @@ -191,12 +196,12 @@ describe('transaction validation tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ).sign(randomBytes(32)) - const parentHeader = BlockHeader.fromHeaderData( + const parentHeader = createBlockHeader( { number: 1n, excessBlobGas: 4194304, blobGasUsed: 0 }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) const excessBlobGas = parentHeader.calcNextExcessBlobGas() @@ -204,18 +209,18 @@ describe('transaction validation tests', () => { function getBlock(transactions: TypedTransaction[]) { const blobs = getNumBlobs(transactions) - const blockHeader = BlockHeader.fromHeaderData( + const blockHeader = createBlockHeader( { number: 2n, parentHash: parentHeader.hash(), excessBlobGas, blobGasUsed: BigInt(blobs) * blobGasPerBlob, }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) - const block = createBlockFromBlockData( + const block = createBlock( { header: blockHeader, transactions }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) return block } @@ -228,35 +233,35 @@ describe('transaction validation tests', () => { assert.doesNotThrow( () => blockWithValidTx.validateBlobTransactions(parentHeader), - 'does not throw when all tx maxFeePerBlobGas are >= to block blob gas fee' + 'does not throw when all tx maxFeePerBlobGas are >= to block blob gas fee', ) const blockJson = blockWithValidTx.toJSON() blockJson.header!.blobGasUsed = '0x0' - const blockWithInvalidHeader = createBlockFromBlockData(blockJson, { common }) + const blockWithInvalidHeader = createBlock(blockJson, { common }) assert.throws( () => blockWithInvalidHeader.validateBlobTransactions(parentHeader), 'block blobGasUsed mismatch', undefined, - 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee' + 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee', ) assert.throws( () => blockWithInvalidTx.validateBlobTransactions(parentHeader), 'than block blob gas price', undefined, - 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee' + 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee', ) assert.throws( () => blockWithInvalidTx.validateBlobTransactions(parentHeader), 'than block blob gas price', undefined, - 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee' + 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee', ) assert.throws( () => blockWithTooManyBlobs.validateBlobTransactions(parentHeader), 'exceed maximum blob gas per block', undefined, - 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee' + 'throws with correct error message when tx maxFeePerBlobGas less than block blob gas fee', ) assert.ok( @@ -264,7 +269,7 @@ describe('transaction validation tests', () => { .getTransactionsValidationErrors() .join(' ') .includes('exceed maximum blob gas per block'), - 'tx erros includes correct error message when too many blobs in a block' + 'tx errors includes correct error message when too many blobs in a block', ) }) }) @@ -292,7 +297,7 @@ describe('fake exponential', () => { assert.equal( fakeExponential(BigInt(input[0]), BigInt(input[1]), BigInt(input[2])), BigInt(input[3]), - 'fake exponential produced expected output' + 'fake exponential produced expected output', ) } }) diff --git a/packages/block/test/eip4895block.spec.ts b/packages/block/test/eip4895block.spec.ts index 2159b51bbc..13e7835363 100644 --- a/packages/block/test/eip4895block.spec.ts +++ b/packages/block/test/eip4895block.spec.ts @@ -1,4 +1,4 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' import { Address, @@ -10,9 +10,8 @@ import { } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { createBlockFromBlockData, createBlockFromRLPSerializedBlock } from '../src/constructors.js' -import { BlockHeader } from '../src/header.js' import { genWithdrawalsTrieRoot } from '../src/helpers.js' +import { createBlock, createBlockFromRLPSerializedBlock, createBlockHeader } from '../src/index.js' import type { WithdrawalBytes, WithdrawalData } from '@ethereumjs/util' @@ -20,7 +19,7 @@ const gethWithdrawals8BlockRlp = 'f903e1f90213a0fe950635b1bd2a416ff6283b0bbd30176e1b1125ad06fa729da9f3f4c1c61710a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794aa00000000000000000000000000000000000000a07f7510a0cb6203f456e34ec3e2ce30d6c5590ded42c10a9cf3f24784119c5afba056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b901000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080018401c9c380802f80a0ff0000000000000000000000000000000000000000000000000000000000000088000000000000000007a0b695b29ec7ee934ef6a68838b13729f2d49fffe26718de16a1a9ed94a4d7d06dc0c0f901c6da8082ffff94000000000000000000000000000000000000000080f83b0183010000940100000000000000000000000000000000000000a00100000000000000000000000000000000000000000000000000000000000000f83b0283010001940200000000000000000000000000000000000000a00200000000000000000000000000000000000000000000000000000000000000f83b0383010002940300000000000000000000000000000000000000a00300000000000000000000000000000000000000000000000000000000000000f83b0483010003940400000000000000000000000000000000000000a00400000000000000000000000000000000000000000000000000000000000000f83b0583010004940500000000000000000000000000000000000000a00500000000000000000000000000000000000000000000000000000000000000f83b0683010005940600000000000000000000000000000000000000a00600000000000000000000000000000000000000000000000000000000000000f83b0783010006940700000000000000000000000000000000000000a00700000000000000000000000000000000000000000000000000000000000000' const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.Shanghai, }) @@ -38,83 +37,83 @@ common.hardforkBlock = function (hardfork: string | undefined) { describe('EIP4895 tests', () => { it('should correctly generate withdrawalsRoot', async () => { - // get withdwalsArray + // get withdrawalsArray const gethBlockBytesArray = RLP.decode(hexToBytes(`0x${gethWithdrawals8BlockRlp}`)) const withdrawals = (gethBlockBytesArray[3] as WithdrawalBytes[]).map((wa) => - Withdrawal.fromValuesArray(wa) + Withdrawal.fromValuesArray(wa), ) assert.equal(withdrawals.length, 8, '8 withdrawals should have been found') - const gethWitdrawalsRoot = (gethBlockBytesArray[0] as Uint8Array[])[16] as Uint8Array + const gethWithdrawalsRoot = (gethBlockBytesArray[0] as Uint8Array[])[16] as Uint8Array assert.deepEqual( await genWithdrawalsTrieRoot(withdrawals), - gethWitdrawalsRoot, - 'withdrawalsRoot should be valid' + gethWithdrawalsRoot, + 'withdrawalsRoot should be valid', ) }) it('Header tests', () => { - const earlyCommon = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const earlyCommon = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) assert.throws( () => { - BlockHeader.fromHeaderData( + createBlockHeader( { withdrawalsRoot: zeros(32), }, { common: earlyCommon, - } + }, ) }, undefined, undefined, - 'should throw when setting withdrawalsRoot with EIP4895 not being activated' + 'should throw when setting withdrawalsRoot with EIP4895 not being activated', ) assert.doesNotThrow(() => { - BlockHeader.fromHeaderData( + createBlockHeader( {}, { common, - } + }, ) }, 'should not throw when withdrawalsRoot is undefined with EIP4895 being activated') assert.doesNotThrow(() => { - BlockHeader.fromHeaderData( + createBlockHeader( { withdrawalsRoot: zeros(32), }, { common, - } + }, ) }, 'correctly instantiates an EIP4895 block header') }) it('Block tests', async () => { - const earlyCommon = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const earlyCommon = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) assert.throws( () => { - createBlockFromBlockData( + createBlock( { withdrawals: [], }, { common: earlyCommon, - } + }, ) }, undefined, undefined, - 'should throw when setting withdrawals with EIP4895 not being activated' + 'should throw when setting withdrawals with EIP4895 not being activated', ) assert.doesNotThrow(() => { - createBlockFromBlockData( + createBlock( {}, { common, - } + }, ) }, 'should not throw when withdrawals is undefined with EIP4895 being activated') assert.doesNotThrow(() => { - createBlockFromBlockData( + createBlock( { header: { withdrawalsRoot: zeros(32), @@ -123,10 +122,10 @@ describe('EIP4895 tests', () => { }, { common, - } + }, ) }) - const block = createBlockFromBlockData( + const block = createBlock( { header: { withdrawalsRoot: zeros(32), @@ -135,26 +134,26 @@ describe('EIP4895 tests', () => { }, { common, - } + }, ) assert.notOk( await block.withdrawalsTrieIsValid(), - 'should invalidate the empty withdrawals root' + 'should invalidate the empty withdrawals root', ) - const validHeader = BlockHeader.fromHeaderData( + const validHeader = createBlockHeader( { withdrawalsRoot: KECCAK256_RLP, }, - { common } + { common }, ) - const validBlock = createBlockFromBlockData( + const validBlock = createBlock( { header: validHeader, withdrawals: [], }, { common, - } + }, ) assert.ok(await validBlock.withdrawalsTrieIsValid(), 'should validate empty withdrawals root') @@ -165,22 +164,22 @@ describe('EIP4895 tests', () => { amount: BigInt(1000), } - const validBlockWithWithdrawal = createBlockFromBlockData( + const validBlockWithWithdrawal = createBlock( { header: { withdrawalsRoot: hexToBytes( - '0x897ca49edcb278aecab2688bcc2b7b7ee43524cc489672534fee332a172f1718' + '0x897ca49edcb278aecab2688bcc2b7b7ee43524cc489672534fee332a172f1718', ), }, withdrawals: [withdrawal], }, { common, - } + }, ) assert.ok( await validBlockWithWithdrawal.withdrawalsTrieIsValid(), - 'should validate withdrawals root' + 'should validate withdrawals root', ) const withdrawal2 = { @@ -190,22 +189,22 @@ describe('EIP4895 tests', () => { amount: BigInt(2000), } - const validBlockWithWithdrawal2 = createBlockFromBlockData( + const validBlockWithWithdrawal2 = createBlock( { header: { withdrawalsRoot: hexToBytes( - '0x3b514862c42008079d461392e29d5b6775dd5ed370a6c4441ccb8ab742bf2436' + '0x3b514862c42008079d461392e29d5b6775dd5ed370a6c4441ccb8ab742bf2436', ), }, withdrawals: [withdrawal, withdrawal2], }, { common, - } + }, ) assert.ok( await validBlockWithWithdrawal2.withdrawalsTrieIsValid(), - 'should validate withdrawals root' + 'should validate withdrawals root', ) assert.doesNotThrow(() => { validBlockWithWithdrawal.hash() @@ -215,7 +214,7 @@ describe('EIP4895 tests', () => { }, 'hashed block with withdrawals') }) it('should throw if no withdrawal array is provided', () => { - const blockWithWithdrawals = createBlockFromBlockData({}, { common }) + const blockWithWithdrawals = createBlock({}, { common }) const rlp = blockWithWithdrawals.serialize() const rlpDecoded = RLP.decode(rlp) as Uint8Array[] // remove withdrawals root @@ -229,17 +228,17 @@ describe('EIP4895 tests', () => { }, undefined, undefined, - 'should provide withdrawals array when 4895 is active' + 'should provide withdrawals array when 4895 is active', ) }) it('should return early when withdrawals root equals KECCAK256_RLP', async () => { - const block = createBlockFromBlockData({}, { common }) + const block = createBlock({}, { common }) // Set invalid withdrawalsRoot in cache block['cache'].withdrawalsTrieRoot = randomBytes(32) assert.ok( await block.withdrawalsTrieIsValid(), - 'correctly executed code path where withdrawals length is 0' + 'correctly executed code path where withdrawals length is 0', ) }) }) diff --git a/packages/block/test/eip7685block.spec.ts b/packages/block/test/eip7685block.spec.ts index f51f129bfd..928e2db558 100644 --- a/packages/block/test/eip7685block.spec.ts +++ b/packages/block/test/eip7685block.spec.ts @@ -1,4 +1,4 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { DepositRequest, KECCAK256_RLP, @@ -8,13 +8,14 @@ import { } from '@ethereumjs/util' import { assert, describe, expect, it } from 'vitest' +import { genRequestsTrieRoot } from '../src/helpers.js' import { - createBlockFromBlockData, + Block, + createBlock, + createBlockFromBytesArray, createBlockFromRPC, - createBlockFromValuesArray, -} from '../src/constructors.js' -import { genRequestsTrieRoot } from '../src/helpers.js' -import { Block, BlockHeader } from '../src/index.js' + createBlockHeader, +} from '../src/index.js' import type { CLRequest, CLRequestType } from '@ethereumjs/util' @@ -39,13 +40,13 @@ function getRandomWithdrawalRequest(): CLRequest { } const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.Cancun, eips: [7685, 4844, 4788], }) describe('7685 tests', () => { it('should instantiate block with defaults', () => { - const block = createBlockFromBlockData({}, { common }) + const block = createBlock({}, { common }) assert.deepEqual(block.header.requestsRoot, KECCAK256_RLP) const block2 = new Block(undefined, undefined, undefined, undefined, { common }) assert.deepEqual(block.header.requestsRoot, KECCAK256_RLP) @@ -54,24 +55,24 @@ describe('7685 tests', () => { it('should instantiate a block with requests', async () => { const request = getRandomDepositRequest() const requestsRoot = await genRequestsTrieRoot([request]) - const block = createBlockFromBlockData( + const block = createBlock( { requests: [request], header: { requestsRoot }, }, - { common } + { common }, ) assert.equal(block.requests?.length, 1) assert.deepEqual(block.header.requestsRoot, requestsRoot) }) it('RequestsRootIsValid should return false when requestsRoot is invalid', async () => { const request = getRandomDepositRequest() - const block = createBlockFromBlockData( + const block = createBlock( { requests: [request], header: { requestsRoot: randomBytes(32) }, }, - { common } + { common }, ) assert.equal(await block.requestsTrieIsValid(), false) @@ -85,36 +86,36 @@ describe('7685 tests', () => { // Construct block with requests in correct order - const block = createBlockFromBlockData( + const block = createBlock( { requests, header: { requestsRoot }, }, - { common } + { common }, ) assert.ok(await block.requestsTrieIsValid()) // Throws when requests are not ordered correctly await expect(async () => - createBlockFromBlockData( + createBlock( { requests: [request1, request3, request2], header: { requestsRoot }, }, - { common } - ) + { common }, + ), ).rejects.toThrow('ascending order') }) }) describe('fromValuesArray tests', () => { it('should construct a block with empty requests root', () => { - const block = createBlockFromValuesArray( - [BlockHeader.fromHeaderData({}, { common }).raw(), [], [], [], []], + const block = createBlockFromBytesArray( + [createBlockHeader({}, { common }).raw(), [], [], [], []], { common, - } + }, ) assert.deepEqual(block.header.requestsRoot, KECCAK256_RLP) }) @@ -126,17 +127,11 @@ describe('fromValuesArray tests', () => { const requestsRoot = await genRequestsTrieRoot(requests) const serializedRequests = [request1.serialize(), request2.serialize(), request3.serialize()] - const block = createBlockFromValuesArray( - [ - BlockHeader.fromHeaderData({ requestsRoot }, { common }).raw(), - [], - [], - [], - serializedRequests, - ], + const block = createBlockFromBytesArray( + [createBlockHeader({ requestsRoot }, { common }).raw(), [], [], [], serializedRequests], { common, - } + }, ) assert.deepEqual(block.header.requestsRoot, requestsRoot) assert.equal(block.requests?.length, 3) @@ -152,17 +147,11 @@ describe('fromRPC tests', () => { const requestsRoot = await genRequestsTrieRoot(requests) const serializedRequests = [request1.serialize(), request2.serialize(), request3.serialize()] - const block = createBlockFromValuesArray( - [ - BlockHeader.fromHeaderData({ requestsRoot }, { common }).raw(), - [], - [], - [], - serializedRequests, - ], + const block = createBlockFromBytesArray( + [createBlockHeader({ requestsRoot }, { common }).raw(), [], [], [], serializedRequests], { common, - } + }, ) const jsonBlock = block.toJSON() const rpcBlock: any = { ...jsonBlock.header, requests: jsonBlock.requests } diff --git a/packages/block/test/from-beacon-payload.spec.ts b/packages/block/test/from-beacon-payload.spec.ts index 519c649adc..f634235ea7 100644 --- a/packages/block/test/from-beacon-payload.spec.ts +++ b/packages/block/test/from-beacon-payload.spec.ts @@ -3,8 +3,7 @@ import { loadKZG } from 'kzg-wasm' import { assert, beforeAll, describe, it } from 'vitest' import * as shardingJson from '../../client/test/sim/configs/4844-devnet.json' -import { createBlockFromBeaconPayloadJson } from '../src/constructors.js' -import { BlockHeader } from '../src/index.js' +import { createBlockFromBeaconPayloadJson, createBlockHeader } from '../src/index.js' import * as payloadKaustinen from './testdata/payload-kaustinen.json' import * as payload87335 from './testdata/payload-slot-87335.json' @@ -24,7 +23,7 @@ describe('[fromExecutionPayloadJson]: 4844 devnet 5', () => { commonJson.config = { ...commonJson.config, chainId: 4844001005 } const network = 'sharding' common = createCommonFromGethGenesis(commonJson, { chain: network, customCrypto: { kzg } }) - // safely change chainId without modifying undelying json + // safely change chainId without modifying underlying json common.setHardfork(Hardfork.Cancun) }) @@ -35,9 +34,9 @@ describe('[fromExecutionPayloadJson]: 4844 devnet 5', () => { const block = await createBlockFromBeaconPayloadJson(payload as BeaconPayloadJson, { common, }) - const parentHeader = BlockHeader.fromHeaderData( + const parentHeader = createBlockHeader( { excessBlobGas: BigInt(0), blobGasUsed: block.header.excessBlobGas! + BigInt(393216) }, - { common } + { common }, ) block.validateBlobTransactions(parentHeader) assert.ok(true, `successfully constructed block=${block.header.number}`) @@ -55,7 +54,7 @@ describe('[fromExecutionPayloadJson]: 4844 devnet 5', () => { ...payload87335, block_hash: payload87475.block_hash, } as BeaconPayloadJson, - { common } + { common }, ) assert.fail(`should have failed constructing the block`) } catch (e) { @@ -72,9 +71,9 @@ describe('[fromExecutionPayloadJson]: 4844 devnet 5', () => { ...payload87475, block_hash: '0x573714bdd0ca5e47bc32008751c4fc74237f8cb354fbc1475c1d0ece38236ea4', } as BeaconPayloadJson, - { common } + { common }, ) - const parentHeader = BlockHeader.fromHeaderData({ excessBlobGas: BigInt(0) }, { common }) + const parentHeader = createBlockHeader({ excessBlobGas: BigInt(0) }, { common }) block.validateBlobTransactions(parentHeader) assert.fail(`should have failed constructing the block`) } catch (e) { @@ -87,7 +86,7 @@ describe('[fromExecutionPayloadJson]: 4844 devnet 5', () => { describe('[fromExecutionPayloadJson]: kaustinen', () => { const network = 'kaustinen' - // safely change chainId without modifying undelying json + // safely change chainId without modifying underlying json const common = createCommonFromGethGenesis(testnetVerkleKaustinen, { chain: network, eips: [6800], @@ -102,7 +101,7 @@ describe('[fromExecutionPayloadJson]: kaustinen', () => { assert.deepEqual( block.executionWitness, payloadKaustinen.execution_witness as VerkleExecutionWitness, - 'execution witness should match' + 'execution witness should match', ) }) }) diff --git a/packages/block/test/from-rpc.spec.ts b/packages/block/test/from-rpc.spec.ts index 23c255fbf2..950cdbec25 100644 --- a/packages/block/test/from-rpc.spec.ts +++ b/packages/block/test/from-rpc.spec.ts @@ -1,17 +1,19 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Goerli, Hardfork, Mainnet } from '@ethereumjs/common' import { bytesToHex, equalsBytes, hexToBytes, randomBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { createBlockFromJsonRpcProvider } from '../src/constructors.js' -import { createBlockFromRpc } from '../src/from-rpc.js' -import { blockHeaderFromRpc } from '../src/header-from-rpc.js' +import { + blockHeaderFromRpc, + createBlockFromJsonRpcProvider, + createBlockFromRPC, +} from '../src/index.js' import * as alchemy14151203 from './testdata/alchemy14151203.json' import * as infuraGoerliBlock10536893 from './testdata/infura-goerli-block-10536893.json' import * as infura15571241woTxs from './testdata/infura15571241.json' -import * as infura15571241wTxs from './testdata/infura15571241wtxns.json' -import * as infura2000004woTxs from './testdata/infura2000004wotxns.json' -import * as infura2000004wTxs from './testdata/infura2000004wtxs.json' +import * as infura15571241wTxs from './testdata/infura15571241wtxns.json' // cspell:disable-line +import * as infura2000004woTxs from './testdata/infura2000004wotxns.json' // cspell:disable-line +import * as infura2000004wTxs from './testdata/infura2000004wtxs.json' // cspell:disable-line import * as blockDataDifficultyAsInteger from './testdata/testdata-from-rpc-difficulty-as-integer.json' import * as testDataFromRpcGoerliLondon from './testdata/testdata-from-rpc-goerli-london.json' import * as blockDataWithUncles from './testdata/testdata-from-rpc-with-uncles.json' @@ -20,14 +22,14 @@ import * as blockDataWithWithdrawals from './testdata/testdata-from-rpc-with-wit import * as blockData from './testdata/testdata-from-rpc.json' import type { JsonRpcBlock } from '../src/index.js' -import type { LegacyTransaction } from '@ethereumjs/tx' +import type { LegacyTx } from '@ethereumjs/tx' import type { PrefixedHexString } from '@ethereumjs/util' describe('[fromRPC]: block #2924874', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) it('should create a block with transactions with valid signatures', () => { - const block = createBlockFromRpc(blockData as JsonRpcBlock, [], { common }) + const block = createBlockFromRPC(blockData, [], { common }) const allValid = block.transactions.every((tx) => tx.verifySignature()) assert.equal(allValid, true, 'all transaction signatures are valid') }) @@ -41,80 +43,78 @@ describe('[fromRPC]: block #2924874', () => { describe('[fromRPC]:', () => { it('Should create a block with json data that includes a transaction with value parameter as integer string', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const valueAsIntegerString = '1' const blockDataTransactionValueAsInteger = blockData blockDataTransactionValueAsInteger.transactions[0].value = valueAsIntegerString - const createBlockFromTransactionValueAsInteger = createBlockFromRpc( + const createBlockFromTransactionValueAsInteger = createBlockFromRPC( blockDataTransactionValueAsInteger as JsonRpcBlock, undefined, - { common } + { common }, ) assert.equal( createBlockFromTransactionValueAsInteger.transactions[0].value.toString(), - valueAsIntegerString + valueAsIntegerString, ) }) it('Should create a block with json data that includes a transaction with defaults with gasPrice parameter as integer string', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const gasPriceAsIntegerString = '1' const blockDataTransactionGasPriceAsInteger = blockData blockDataTransactionGasPriceAsInteger.transactions[0].gasPrice = gasPriceAsIntegerString - const createBlockFromTransactionGasPriceAsInteger = createBlockFromRpc( + const createBlockFromTransactionGasPriceAsInteger = createBlockFromRPC( blockDataTransactionGasPriceAsInteger as JsonRpcBlock, undefined, - { common } + { common }, ) assert.equal( - ( - createBlockFromTransactionGasPriceAsInteger.transactions[0] as LegacyTransaction - ).gasPrice.toString(), - gasPriceAsIntegerString + (createBlockFromTransactionGasPriceAsInteger.transactions[0] as LegacyTx).gasPrice.toString(), + gasPriceAsIntegerString, ) }) it('should create a block given json data that includes a difficulty parameter of type integer string', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - const blockDifficultyAsInteger = createBlockFromRpc( + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) + const blockDifficultyAsInteger = createBlockFromRPC( blockDataDifficultyAsInteger as JsonRpcBlock, undefined, { common, - } + }, ) assert.equal( blockDifficultyAsInteger.header.difficulty.toString(), - blockDataDifficultyAsInteger.difficulty + blockDataDifficultyAsInteger.difficulty, ) }) it('should create a block from london hardfork', () => { - const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.London }) - const block = createBlockFromRpc(testDataFromRpcGoerliLondon as JsonRpcBlock, [], { common }) + const common = new Common({ chain: Goerli, hardfork: Hardfork.London }) + const block = createBlockFromRPC(testDataFromRpcGoerliLondon as JsonRpcBlock, [], { common }) assert.equal( `0x${block.header.baseFeePerGas?.toString(16)}`, - testDataFromRpcGoerliLondon.baseFeePerGas + testDataFromRpcGoerliLondon.baseFeePerGas, ) assert.equal(bytesToHex(block.hash()), testDataFromRpcGoerliLondon.hash) }) it('should create a block with uncles', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - const block = createBlockFromRpc(blockDataWithUncles as JsonRpcBlock, [uncleBlockData], { + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) + const block = createBlockFromRPC(blockDataWithUncles as JsonRpcBlock, [uncleBlockData], { common, }) assert.ok(block.uncleHashIsValid()) }) it('should create a block with EIP-4896 withdrawals', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai }) - const block = createBlockFromRpc(blockDataWithWithdrawals as JsonRpcBlock, [], { common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Shanghai }) + const block = createBlockFromRPC(blockDataWithWithdrawals as JsonRpcBlock, [], { common }) assert.ok(block.withdrawalsTrieIsValid()) }) it('should create a block header with the correct hash when EIP-4896 withdrawals are present', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Shanghai }) const block = blockHeaderFromRpc(blockDataWithWithdrawals as JsonRpcBlock, { common }) const hash = blockDataWithWithdrawals.hash assert.equal(bytesToHex(block.hash()), hash) @@ -123,52 +123,52 @@ describe('[fromRPC]:', () => { describe('[fromRPC] - Alchemy/Infura API block responses', () => { it('should create pre merge block from Alchemy API response to eth_getBlockByHash', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - const block = createBlockFromRpc(alchemy14151203 as JsonRpcBlock, [], { common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) + const block = createBlockFromRPC(alchemy14151203 as JsonRpcBlock, [], { common }) assert.equal(bytesToHex(block.hash()), alchemy14151203.hash) }) it('should create pre and post merge blocks from Infura API responses to eth_getBlockByHash and eth_getBlockByNumber', () => { - const common = new Common({ chain: Chain.Mainnet }) - let block = createBlockFromRpc(infura2000004woTxs as JsonRpcBlock, [], { + const common = new Common({ chain: Mainnet }) + let block = createBlockFromRPC(infura2000004woTxs as JsonRpcBlock, [], { common, setHardfork: true, }) assert.equal( bytesToHex(block.hash()), infura2000004woTxs.hash, - 'created premerge block w/o txns' + 'created premerge block w/o txns', ) - block = createBlockFromRpc(infura2000004wTxs as JsonRpcBlock, [], { common, setHardfork: true }) + block = createBlockFromRPC(infura2000004wTxs as JsonRpcBlock, [], { common, setHardfork: true }) assert.equal( bytesToHex(block.hash()), infura2000004wTxs.hash, - 'created premerge block with txns' + 'created premerge block with txns', ) - block = createBlockFromRpc(infura15571241woTxs as JsonRpcBlock, [], { + block = createBlockFromRPC(infura15571241woTxs as JsonRpcBlock, [], { common, - setHardfork: 58750000000000000000000n, + setHardfork: true, }) assert.equal( bytesToHex(block.hash()), infura15571241woTxs.hash, - 'created post merge block without txns' + 'created post merge block without txns', ) - block = createBlockFromRpc(infura15571241wTxs as JsonRpcBlock, [], { + block = createBlockFromRPC(infura15571241wTxs as JsonRpcBlock, [], { common, - setHardfork: 58750000000000000000000n, + setHardfork: true, }) assert.equal( bytesToHex(block.hash()), infura15571241wTxs.hash, - 'created post merge block with txns' + 'created post merge block with txns', ) }) it('should correctly parse a cancun block over rpc', () => { - const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Cancun }) - const block = blockHeaderFromRpc(infuraGoerliBlock10536893 as JsonRpcBlock, { common }) + const common = new Common({ chain: Goerli, hardfork: Hardfork.Cancun }) + const block = blockHeaderFromRpc(infuraGoerliBlock10536893 as JsonRpcBlock, { common }) // cspell:disable-line const hash = hexToBytes(infuraGoerliBlock10536893.hash as PrefixedHexString) assert.ok(equalsBytes(block.hash(), hash)) }) @@ -176,7 +176,7 @@ describe('[fromRPC] - Alchemy/Infura API block responses', () => { describe('[fromJsonRpcProvider]', () => { it('should work', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const provider = 'https://my.json.rpc.provider.com:8545' const realFetch = global.fetch @@ -184,7 +184,7 @@ describe('[fromJsonRpcProvider]', () => { global.fetch = async (_url: string, req: any) => { const json = JSON.parse(req.body) if (json.params[0] === '0x1850b014065b23d804ecf71a8a4691d076ca87c2e6fb8fe81ee20a4d8e884c24') { - const txData = await import(`./testdata/infura15571241wtxns.json`) + const txData = await import(`./testdata/infura15571241wtxns.json`) // cspell:disable-line return { ok: true, status: 200, @@ -212,7 +212,7 @@ describe('[fromJsonRpcProvider]', () => { assert.equal( bytesToHex(block.hash()), blockHash, - 'assembled a block from blockdata from a provider' + 'assembled a block from blockdata from a provider', ) try { await createBlockFromJsonRpcProvider(provider, bytesToHex(randomBytes(32)), {}) @@ -220,7 +220,7 @@ describe('[fromJsonRpcProvider]', () => { } catch (err: any) { assert.ok( err.message.includes('No block data returned from provider'), - 'returned correct error message' + 'returned correct error message', ) } global.fetch = realFetch diff --git a/packages/block/test/header.spec.ts b/packages/block/test/header.spec.ts index 4d4cafc066..a56e3c41f7 100644 --- a/packages/block/test/header.spec.ts +++ b/packages/block/test/header.spec.ts @@ -1,25 +1,31 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Goerli, Hardfork, Mainnet } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' import { - Address, KECCAK256_RLP, KECCAK256_RLP_ARRAY, bytesToHex, concatBytes, + createZeroAddress, equalsBytes, hexToBytes, zeros, } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { createBlockFromBlockData, createBlockFromRLPSerializedBlock } from '../src/constructors.js' -import { BlockHeader } from '../src/header.js' -import { Block } from '../src/index.js' +import { + Block, + createBlock, + createBlockFromRLPSerializedBlock, + createBlockHeader, + createBlockHeaderFromBytesArray, + createBlockHeaderFromRLP, +} from '../src/index.js' import * as testData from './testdata/bcBlockGasLimitTest.json' import * as blocksGoerli from './testdata/blocks_goerli.json' import * as blocksMainnet from './testdata/blocks_mainnet.json' +import type { BlockHeader } from '../src/index.js' import type { CliqueConfig } from '@ethereumjs/common' import type { PrefixedHexString } from '@ethereumjs/util' @@ -28,7 +34,7 @@ describe('[Block]: Header functions', () => { function compareDefaultHeader(header: BlockHeader) { assert.ok(equalsBytes(header.parentHash, zeros(32))) assert.ok(equalsBytes(header.uncleHash, KECCAK256_RLP_ARRAY)) - assert.ok(header.coinbase.equals(Address.zero())) + assert.ok(header.coinbase.equals(createZeroAddress())) assert.ok(equalsBytes(header.stateRoot, zeros(32))) assert.ok(equalsBytes(header.transactionsTrie, KECCAK256_RLP)) assert.ok(equalsBytes(header.receiptTrie, KECCAK256_RLP)) @@ -43,7 +49,7 @@ describe('[Block]: Header functions', () => { assert.ok(equalsBytes(header.nonce, zeros(8))) } - const header = BlockHeader.fromHeaderData() + const header = createBlockHeader() compareDefaultHeader(header) const block = new Block() @@ -51,84 +57,72 @@ describe('[Block]: Header functions', () => { }) it('Initialization -> fromHeaderData()', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - let header = BlockHeader.fromHeaderData(undefined, { common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + let header = createBlockHeader(undefined, { common }) assert.ok(bytesToHex(header.hash()), 'genesis block should initialize') assert.equal( header.common.hardfork(), 'chainstart', - 'should initialize with correct HF provided' + 'should initialize with correct HF provided', ) common.setHardfork(Hardfork.Byzantium) assert.equal( header.common.hardfork(), 'chainstart', - 'should stay on correct HF if outer common HF changes' + 'should stay on correct HF if outer common HF changes', ) - header = BlockHeader.fromHeaderData({}, { common }) + header = createBlockHeader({}, { common }) assert.ok(bytesToHex(header.hash()), 'default block should initialize') // test default freeze values // also test if the options are carried over to the constructor - header = BlockHeader.fromHeaderData({}) + header = createBlockHeader({}) assert.ok(Object.isFrozen(header), 'block should be frozen by default') - header = BlockHeader.fromHeaderData({}, { freeze: false }) + header = createBlockHeader({}, { freeze: false }) assert.ok( !Object.isFrozen(header), - 'block should not be frozen when freeze deactivated in options' + 'block should not be frozen when freeze deactivated in options', ) }) it('Initialization -> fromRLPSerializedHeader()', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - let header = BlockHeader.fromHeaderData({}, { common, freeze: false }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) + let header = createBlockHeader({}, { common, freeze: false }) const rlpHeader = header.serialize() - header = BlockHeader.fromRLPSerializedHeader(rlpHeader, { + header = createBlockHeaderFromRLP(rlpHeader, { common, }) assert.ok(Object.isFrozen(header), 'block should be frozen by default') - header = BlockHeader.fromRLPSerializedHeader(rlpHeader, { + header = createBlockHeaderFromRLP(rlpHeader, { common, freeze: false, }) assert.ok( !Object.isFrozen(header), - 'block should not be frozen when freeze deactivated in options' - ) - - assert.throws( - () => - BlockHeader.fromRLPSerializedHeader(rlpHeader, { - common, - freeze: false, - setHardfork: 1n, // Added to bypass defaulting setHardfork to true in static constructor - }), - 'A base fee', - undefined, - 'throws when RLP serialized block with no base fee on default hardfork (london) and setHardfork left undefined' + 'block should not be frozen when freeze deactivated in options', ) - header = BlockHeader.fromRLPSerializedHeader( + header = createBlockHeaderFromRLP( hexToBytes( - '0xf90214a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a0d7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000850400000000808213888080a011bbe8db4e347b4e8c937c1c8370e4b5ed33adb3db69cbdb7a38e1e50b1b82faa00000000000000000000000000000000000000000000000000000000000000000880000000000000042' + '0xf90214a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a0d7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000850400000000808213888080a011bbe8db4e347b4e8c937c1c8370e4b5ed33adb3db69cbdb7a38e1e50b1b82faa00000000000000000000000000000000000000000000000000000000000000000880000000000000042', ), - { common, setHardfork: false } + { common, setHardfork: false }, ) assert.equal( bytesToHex(header.hash()), '0xf0f936910ebf101b7b168bbe08e3f166ce1e75e16f513dd5a97af02fbe7de7c0', - 'genesis block should produce incorrect hash since default hardfork is london' + 'genesis block should produce incorrect hash since default hardfork is london', ) }) it('Initialization -> fromRLPSerializedHeader() -> error cases', () => { try { - BlockHeader.fromRLPSerializedHeader(RLP.encode('a')) + createBlockHeaderFromRLP(RLP.encode('a')) } catch (e: any) { const expectedError = 'Invalid serialized header input. Must be array' assert.ok(e.message.includes(expectedError), 'should throw with header as rlp encoded string') @@ -136,7 +130,7 @@ describe('[Block]: Header functions', () => { }) it('Initialization -> fromValuesArray()', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const zero = new Uint8Array(0) const headerArray = [] for (let item = 0; item < 15; item++) { @@ -152,13 +146,13 @@ describe('[Block]: Header functions', () => { headerArray[13] = zeros(32) // mixHash headerArray[14] = zeros(8) // nonce - let header = BlockHeader.fromValuesArray(headerArray, { common }) + let header = createBlockHeaderFromBytesArray(headerArray, { common }) assert.ok(Object.isFrozen(header), 'block should be frozen by default') - header = BlockHeader.fromValuesArray(headerArray, { common, freeze: false }) + header = createBlockHeaderFromBytesArray(headerArray, { common, freeze: false }) assert.ok( !Object.isFrozen(header), - 'block should not be frozen when freeze deactivated in options' + 'block should not be frozen when freeze deactivated in options', ) }) @@ -175,14 +169,14 @@ describe('[Block]: Header functions', () => { headerArray[14] = zeros(8) // nonce headerArray[15] = zeros(4) // bad data try { - BlockHeader.fromValuesArray(headerArray) + createBlockHeaderFromBytesArray(headerArray) } catch (e: any) { const expectedError = 'invalid header. More values than expected were received' assert.ok(e.message.includes(expectedError), 'should throw on more values than expected') } try { - BlockHeader.fromValuesArray(headerArray.slice(0, 5)) + createBlockHeaderFromBytesArray(headerArray.slice(0, 5)) } catch (e: any) { const expectedError = 'invalid header. Less values than expected were received' assert.ok(e.message.includes(expectedError), 'should throw on less values than expected') @@ -190,15 +184,15 @@ describe('[Block]: Header functions', () => { }) it('Initialization -> Clique Blocks', () => { - const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) - const header = BlockHeader.fromHeaderData({ extraData: new Uint8Array(97) }, { common }) + const common = new Common({ chain: Goerli, hardfork: Hardfork.Chainstart }) + const header = createBlockHeader({ extraData: new Uint8Array(97) }, { common }) assert.ok(bytesToHex(header.hash()), 'default block should initialize') }) it('should validate extraData', async () => { // PoW - let common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - let genesis = createBlockFromBlockData({}, { common }) + let common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + let genesis = createBlock({}, { common }) const number = 1 let parentHash = genesis.hash() @@ -212,7 +206,7 @@ describe('[Block]: Header functions', () => { let extraData = new Uint8Array(32) try { - BlockHeader.fromHeaderData({ ...data, extraData }, opts) + createBlockHeader({ ...data, extraData }, opts) assert.ok(true, testCase) } catch (error: any) { assert.fail(testCase) @@ -223,7 +217,7 @@ describe('[Block]: Header functions', () => { extraData = new Uint8Array(12) try { - BlockHeader.fromHeaderData({ ...data, extraData }, opts) + createBlockHeader({ ...data, extraData }, opts) assert.ok(testCase) } catch (error: any) { assert.fail(testCase) @@ -234,15 +228,15 @@ describe('[Block]: Header functions', () => { extraData = new Uint8Array(42) try { - BlockHeader.fromHeaderData({ ...data, extraData }, opts) + createBlockHeader({ ...data, extraData }, opts) assert.fail(testCase) } catch (error: any) { assert.ok((error.message as string).includes('invalid amount of extra data'), testCase) } // PoA - common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) - genesis = createBlockFromBlockData({ header: { extraData: new Uint8Array(97) } }, { common }) + common = new Common({ chain: Goerli, hardfork: Hardfork.Chainstart }) + genesis = createBlock({ header: { extraData: new Uint8Array(97) } }, { common }) parentHash = genesis.hash() gasLimit = genesis.header.gasLimit @@ -254,7 +248,7 @@ describe('[Block]: Header functions', () => { 'clique block should validate with valid number of bytes in extraData: 32 byte vanity + 65 byte seal' extraData = concatBytes(new Uint8Array(32), new Uint8Array(65)) try { - BlockHeader.fromHeaderData({ ...data, extraData }, opts) + createBlockHeader({ ...data, extraData }, opts) assert.ok(true, testCase) } catch (error: any) { assert.fail(testCase) @@ -264,14 +258,14 @@ describe('[Block]: Header functions', () => { testCase = 'clique block should throw on invalid extraData length' extraData = new Uint8Array(32) try { - BlockHeader.fromHeaderData({ ...data, extraData }, opts) + createBlockHeader({ ...data, extraData }, opts) assert.fail(testCase) } catch (error: any) { assert.ok( (error.message as string).includes( - 'extraData must be 97 bytes on non-epoch transition blocks, received 32 bytes' + 'extraData must be 97 bytes on non-epoch transition blocks, received 32 bytes', ), - testCase + testCase, ) } @@ -281,31 +275,31 @@ describe('[Block]: Header functions', () => { new Uint8Array(32), new Uint8Array(65), new Uint8Array(20), - new Uint8Array(21) + new Uint8Array(21), ) const epoch = BigInt((common.consensusConfig() as CliqueConfig).epoch) try { - BlockHeader.fromHeaderData({ ...data, number: epoch, extraData }, opts) + createBlockHeader({ ...data, number: epoch, extraData }, opts) assert.fail(testCase) } catch (error: any) { assert.ok( (error.message as string).includes( - 'invalid signer list length in extraData, received signer length of 41 (not divisible by 20)' + 'invalid signer list length in extraData, received signer length of 41 (not divisible by 20)', ), - testCase + testCase, ) } }) it('should skip consensusFormatValidation if flag is set to false', () => { - const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Goerli, hardfork: Hardfork.Chainstart }) const extraData = concatBytes(new Uint8Array(1)) try { - BlockHeader.fromHeaderData({ extraData }, { common, skipConsensusFormatValidation: true }) + createBlockHeader({ extraData }, { common, skipConsensusFormatValidation: true }) assert.ok( true, - 'should instantiate header with invalid extraData when skipConsensusFormatValidation === true' + 'should instantiate header with invalid extraData when skipConsensusFormatValidation === true', ) } catch (error: any) { assert.fail('should not throw') @@ -316,29 +310,29 @@ describe('[Block]: Header functions', () => { const badHash = new Uint8Array(31) assert.throws( - () => BlockHeader.fromHeaderData({ parentHash: badHash }), + () => createBlockHeader({ parentHash: badHash }), 'parentHash must be 32 bytes', undefined, - 'throws on invalid parent hash length' + 'throws on invalid parent hash length', ) assert.throws( - () => BlockHeader.fromHeaderData({ stateRoot: badHash }), + () => createBlockHeader({ stateRoot: badHash }), 'stateRoot must be 32 bytes', undefined, - 'throws on invalid state root hash length' + 'throws on invalid state root hash length', ) assert.throws( - () => BlockHeader.fromHeaderData({ transactionsTrie: badHash }), + () => createBlockHeader({ transactionsTrie: badHash }), 'transactionsTrie must be 32 bytes', undefined, - 'throws on invalid transactionsTrie root hash length' + 'throws on invalid transactionsTrie root hash length', ) assert.throws( - () => BlockHeader.fromHeaderData({ nonce: new Uint8Array(5) }), + () => createBlockHeader({ nonce: new Uint8Array(5) }), 'nonce must be 8 bytes', undefined, - 'contains nonce length error message' + 'contains nonce length error message', ) }) /* @@ -346,7 +340,7 @@ describe('[Block]: Header functions', () => { it('header validation -> poa checks', async () => { const headerData = testDataPreLondon.blocks[0].blockHeader - const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Istanbul }) + const common = new Common({ chain: Goerli, hardfork: Hardfork.Istanbul }) const blockchain = new Mockchain() const genesisRlp = toBytes(testDataPreLondon.genesisRLP) @@ -360,7 +354,7 @@ describe('[Block]: Header functions', () => { headerData.difficulty = BigInt(2) let testCase = 'should throw on lower than period timestamp diffs' - let header = BlockHeader.fromHeaderData(headerData, { common }) + let header = createBlockHeader(headerData, { common }) try { await header.validate(blockchain) assert.fail(testCase) @@ -370,7 +364,7 @@ describe('[Block]: Header functions', () => { testCase = 'should not throw on timestamp diff equal to period' headerData.timestamp = BigInt(1422494864) - header = BlockHeader.fromHeaderData(headerData, { common }) + header = createBlockHeader(headerData, { common }) try { await header.validate(blockchain) assert.ok(true, testCase) @@ -380,8 +374,8 @@ describe('[Block]: Header functions', () => { testCase = 'should throw on non-zero beneficiary (coinbase) for epoch transition block' headerData.number = common.consensusConfig().epoch - headerData.coinbase = Address.fromString('0x091dcd914fCEB1d47423e532955d1E62d1b2dAEf') - header = BlockHeader.fromHeaderData(headerData, { common }) + headerData.coinbase = createAddressFromString('0x091dcd914fCEB1d47423e532955d1E62d1b2dAEf') + header = createBlockHeader(headerData, { common }) try { await header.validate(blockchain) assert.fail('should throw') @@ -393,11 +387,11 @@ describe('[Block]: Header functions', () => { } } headerData.number = 1 - headerData.coinbase = Address.zero() + headerData.coinbase = createZeroAddress() testCase = 'should throw on non-zero mixHash' headerData.mixHash = new Uint8Array(32).fill(1) - header = BlockHeader.fromHeaderData(headerData, { common }) + header = createBlockHeader(headerData, { common }) try { await header.validate(blockchain) assert.fail('should throw') @@ -412,7 +406,7 @@ describe('[Block]: Header functions', () => { testCase = 'should throw on invalid clique difficulty' headerData.difficulty = BigInt(3) - header = BlockHeader.fromHeaderData(headerData, { common }) + header = createBlockHeader(headerData, { common }) try { header.validateCliqueDifficulty(blockchain) assert.fail(testCase) @@ -433,7 +427,7 @@ describe('[Block]: Header functions', () => { const poaBlock = createBlockFromRLPSerializedBlock(genesisRlp, { common, cliqueSigner }) await poaBlockchain.putBlock(poaBlock) - header = BlockHeader.fromHeaderData(headerData, { common, cliqueSigner }) + header = createBlockHeader(headerData, { common, cliqueSigner }) try { const res = header.validateCliqueDifficulty(poaBlockchain) assert.equal(res, true, testCase) @@ -444,7 +438,7 @@ describe('[Block]: Header functions', () => { testCase = 'validateCliqueDifficulty() should return false with INTURN difficulty and one signer' headerData.difficulty = BigInt(1) - header = BlockHeader.fromHeaderData(headerData, { common, cliqueSigner }) + header = createBlockHeader(headerData, { common, cliqueSigner }) try { const res = header.validateCliqueDifficulty(poaBlockchain) assert.equal(res, false, testCase) @@ -454,18 +448,18 @@ describe('[Block]: Header functions', () => { }) */ it('should test validateGasLimit()', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - const bcBlockGasLimitTestData = testData.tests.BlockGasLimit2p63m1 + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) + const bcBlockGasLimitTestData = testData.default.tests.BlockGasLimit2p63m1 for (const key of Object.keys(bcBlockGasLimitTestData)) { const genesisRlp = hexToBytes( bcBlockGasLimitTestData[key as keyof typeof bcBlockGasLimitTestData] - .genesisRLP as PrefixedHexString + .genesisRLP as PrefixedHexString, ) const parentBlock = createBlockFromRLPSerializedBlock(genesisRlp, { common }) const blockRlp = hexToBytes( bcBlockGasLimitTestData[key as keyof typeof bcBlockGasLimitTestData].blocks[0] - .rlp as PrefixedHexString + .rlp as PrefixedHexString, ) const block = createBlockFromRLPSerializedBlock(blockRlp, { common }) assert.doesNotThrow(() => block.validateGasLimit(parentBlock)) @@ -473,40 +467,40 @@ describe('[Block]: Header functions', () => { }) it('should test isGenesis()', () => { - const header1 = BlockHeader.fromHeaderData({ number: 1 }) + const header1 = createBlockHeader({ number: 1 }) assert.equal(header1.isGenesis(), false) - const header2 = BlockHeader.fromHeaderData() + const header2 = createBlockHeader() assert.equal(header2.isGenesis(), true) }) it('should test hash() function', () => { - let common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - let header = BlockHeader.fromHeaderData((blocksMainnet as any).default[0]['header'], { common }) + let common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + let header = createBlockHeader((blocksMainnet as any).default[0]['header'], { common }) assert.equal( bytesToHex(header.hash()), '0x88e96d4537bea4d9c05d12549907b32561d3bf31f45aae734cdc119f13406cb6', - 'correct PoW hash (mainnet block 1)' + 'correct PoW hash (mainnet block 1)', ) - common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) - header = BlockHeader.fromHeaderData((blocksGoerli as any).default[0]['header'], { common }) + common = new Common({ chain: Goerli, hardfork: Hardfork.Chainstart }) + header = createBlockHeader((blocksGoerli as any).default[0]['header'], { common }) assert.equal( bytesToHex(header.hash()), '0x8f5bab218b6bb34476f51ca588e9f4553a3a7ce5e13a66c660a5283e97e9a85a', - 'correct PoA clique hash (goerli block 1)' + 'correct PoA clique hash (goerli block 1)', ) }) it('should be able to initialize shanghai header with correct hardfork defaults', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai }) - const header = BlockHeader.fromHeaderData({}, { common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Shanghai }) + const header = createBlockHeader({}, { common }) assert.equal(header.common.hardfork(), Hardfork.Shanghai, 'hardfork should be set to shanghai') assert.equal(header.baseFeePerGas, BigInt(7), 'baseFeePerGas should be set to minimum default') assert.deepEqual( header.withdrawalsRoot, KECCAK256_RLP, - 'withdrawalsRoot should be set to KECCAK256_RLP' + 'withdrawalsRoot should be set to KECCAK256_RLP', ) }) }) diff --git a/packages/block/test/mergeBlock.spec.ts b/packages/block/test/mergeBlock.spec.ts index f9275d225a..ccc7a6f8bd 100644 --- a/packages/block/test/mergeBlock.spec.ts +++ b/packages/block/test/mergeBlock.spec.ts @@ -1,27 +1,27 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { - Address, KECCAK256_RLP, KECCAK256_RLP_ARRAY, + createZeroAddress, equalsBytes, hexToBytes, zeros, } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { Block } from '../src/block.js' -import { createBlockFromBlockData } from '../src/constructors.js' -import { BlockHeader } from '../src/header.js' +import { Block, createBlock, createBlockHeader } from '../src/index.js' + +import type { BlockHeader } from '../src/index.js' const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.Paris, }) function validateMergeHeader(header: BlockHeader) { assert.ok(equalsBytes(header.parentHash, zeros(32)), 'parentHash') assert.ok(equalsBytes(header.uncleHash, KECCAK256_RLP_ARRAY), 'uncleHash') - assert.ok(header.coinbase.equals(Address.zero()), 'coinbase') + assert.ok(header.coinbase.equals(createZeroAddress()), 'coinbase') assert.ok(equalsBytes(header.stateRoot, zeros(32)), 'stateRoot') assert.ok(equalsBytes(header.transactionsTrie, KECCAK256_RLP), 'transactionsTrie') assert.ok(equalsBytes(header.receiptTrie, KECCAK256_RLP), 'receiptTrie') @@ -38,7 +38,7 @@ function validateMergeHeader(header: BlockHeader) { describe('[Header]: Casper PoS / The Merge Functionality', () => { it('should construct default blocks with post-merge PoS constants fields', () => { - const header = BlockHeader.fromHeaderData({}, { common }) + const header = createBlockHeader({}, { common }) validateMergeHeader(header) const block = new Block(undefined, undefined, undefined, undefined, { common }, undefined) @@ -51,7 +51,7 @@ describe('[Header]: Casper PoS / The Merge Functionality', () => { const headerData = { uncleHash: hexToBytes('0x123abc'), } - BlockHeader.fromHeaderData(headerData, { common }) + createBlockHeader(headerData, { common }) assert.fail('should throw') } catch (e: any) { assert.ok(true, 'should throw on wrong uncleHash') @@ -62,7 +62,7 @@ describe('[Header]: Casper PoS / The Merge Functionality', () => { difficulty: BigInt(123456), number: 1n, } - BlockHeader.fromHeaderData(headerData, { common }) + createBlockHeader(headerData, { common }) assert.fail('should throw') } catch (e: any) { assert.ok(true, 'should throw on wrong difficulty') @@ -73,7 +73,7 @@ describe('[Header]: Casper PoS / The Merge Functionality', () => { extraData: new Uint8Array(33).fill(1), number: 1n, } - BlockHeader.fromHeaderData(headerData, { common }) + createBlockHeader(headerData, { common }) assert.fail('should throw') } catch (e: any) { assert.ok(true, 'should throw on invalid extraData length') @@ -83,7 +83,7 @@ describe('[Header]: Casper PoS / The Merge Functionality', () => { const headerData = { mixHash: new Uint8Array(30).fill(1), } - BlockHeader.fromHeaderData(headerData, { common }) + createBlockHeader(headerData, { common }) assert.fail('should throw') } catch (e: any) { assert.ok(true, 'should throw on invalid mixHash length') @@ -94,7 +94,7 @@ describe('[Header]: Casper PoS / The Merge Functionality', () => { nonce: new Uint8Array(8).fill(1), number: 1n, } - BlockHeader.fromHeaderData(headerData, { common }) + createBlockHeader(headerData, { common }) assert.fail('should throw') } catch (e: any) { assert.ok(true, 'should throw on wrong nonce') @@ -103,32 +103,26 @@ describe('[Header]: Casper PoS / The Merge Functionality', () => { it('test that a PoS block with uncles cannot be produced', () => { try { - new Block( - undefined, - undefined, - [BlockHeader.fromHeaderData(undefined, { common })], - undefined, - { - common, - } - ) + new Block(undefined, undefined, [createBlockHeader(undefined, { common })], undefined, { + common, + }) assert.fail('should have thrown') } catch (e: any) { assert.ok(true, 'should throw') } }) - it('EIP-4399: prevRando should return mixHash value', () => { + it('EIP-4399: prevRandao should return mixHash value', () => { const mixHash = new Uint8Array(32).fill(3) - let block = createBlockFromBlockData({ header: { mixHash } }, { common }) + let block = createBlock({ header: { mixHash } }, { common }) assert.ok( equalsBytes(block.header.prevRandao, mixHash), - 'prevRandao should return mixHash value' + 'prevRandao should return mixHash value', ) const commonLondon = common.copy() commonLondon.setHardfork(Hardfork.London) - block = createBlockFromBlockData({ header: { mixHash } }, { common: commonLondon }) + block = createBlock({ header: { mixHash } }, { common: commonLondon }) try { block.header.prevRandao assert.fail('should have thrown') diff --git a/packages/block/test/testdata/4844-hardfork.json b/packages/block/test/testdata/4844-hardfork.json index 9ee53c6112..bb954f80e4 100644 --- a/packages/block/test/testdata/4844-hardfork.json +++ b/packages/block/test/testdata/4844-hardfork.json @@ -19,7 +19,7 @@ "period": 5, "epoch": 30000 }, - "terminalTotalDifficulty": 2, + "terminalTotalDifficulty": 0, "terminalTotalDifficultyPassed": true }, "nonce": "0x42", diff --git a/packages/block/test/testdata/genesishashestest.json b/packages/block/test/testdata/genesisHashesTest.json similarity index 100% rename from packages/block/test/testdata/genesishashestest.json rename to packages/block/test/testdata/genesisHashesTest.json diff --git a/packages/block/test/testdata/testnetMerge.json b/packages/block/test/testdata/testnetMerge.json index 995d7b1d2a..5103197fcd 100644 --- a/packages/block/test/testdata/testnetMerge.json +++ b/packages/block/test/testdata/testnetMerge.json @@ -1,7 +1,6 @@ { "name": "testnetMerge", "chainId": 55555, - "networkId": 55555, "defaultHardfork": "istanbul", "consensus": { "type": "poa", @@ -54,8 +53,7 @@ }, { "name": "paris", - "block": null, - "ttd": "5000" + "block": 15 }, { "name": "shanghai", diff --git a/packages/block/test/util.ts b/packages/block/test/util.ts deleted file mode 100644 index 1383be87a0..0000000000 --- a/packages/block/test/util.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { RLP } from '@ethereumjs/rlp' -import { BIGINT_0, BIGINT_1, utf8ToBytes } from '@ethereumjs/util' -import { keccak256 } from 'ethereum-cryptography/keccak' - -import { createBlockFromBlockData } from '../src/constructors.js' - -import type { Block, BlockHeader } from '../src/index.js' - -/** - * This helper function creates a valid block (except the PoW) with the ability to add uncles. Returns a Block. - * @param parentBlock - The Parent block to build upon - * @param extraData - Extra data graffiti in order to create equal blocks (like block number) but with different hashes - * @param uncles - Optional, an array of uncle headers. Automatically calculates the uncleHash. - */ -function createBlock( - parentBlock: Block, - extraData: string, - uncles?: BlockHeader[], - common?: Common -): Block { - uncles = uncles ?? [] - common = common ?? new Common({ chain: Chain.Mainnet }) - - if (extraData.length > 32) { - throw new Error('extra data graffiti must be 32 bytes or less') - } - - const number = parentBlock.header.number + BIGINT_1 - const timestamp = parentBlock.header.timestamp + BIGINT_1 - - const uncleHash = keccak256(RLP.encode(uncles.map((uh) => uh.raw()))) - - const londonHfBlock = common.hardforkBlock(Hardfork.London) - const baseFeePerGas = - typeof londonHfBlock === 'bigint' && londonHfBlock !== BIGINT_0 && number > londonHfBlock - ? parentBlock.header.calcNextBaseFee() - : undefined - - return createBlockFromBlockData( - { - header: { - number, - parentHash: parentBlock.hash(), - timestamp, - gasLimit: parentBlock.header.gasLimit, - extraData: utf8ToBytes(extraData), - uncleHash, - baseFeePerGas, - }, - uncleHeaders: uncles, - }, - { - common, - calcDifficultyFromHeader: parentBlock.header, - } - ) -} - -export { createBlock } diff --git a/packages/block/tsconfig.lint.json b/packages/block/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/block/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/blockchain/.eslintrc.cjs b/packages/blockchain/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/blockchain/.eslintrc.cjs +++ b/packages/blockchain/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/blockchain/CHANGELOG.md b/packages/blockchain/CHANGELOG.md index c5c5d84484..7daea0dbec 100644 --- a/packages/blockchain/CHANGELOG.md +++ b/packages/blockchain/CHANGELOG.md @@ -6,7 +6,25 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 7.2.0 - 2024-03-05 +## 7.3.0 - 2024-08-15 + +### EIP-7685 Requests: EIP-6110 (Deposits) / EIP-7002 (Withdrawals) / EIP-7251 (Consolidations) + +This library now supports `EIP-6110` deposit requests, see PR [#3390](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3390), `EIP-7002` withdrawal requests, see PR [#3385](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3385) and `EIP-7251` consolidation requests, see PR [#3477](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3477) as well as the underlying generic execution layer request logic introduced with `EIP-7685` (PR [#3372](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3372)). + +These new request types will be activated with the `Prague` hardfork, see [@ethereumjs/block](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/block) README for detailed documentation. + +### Verkle Updates + +- Fix the block body parsing as well as save/load from blockchain, PR [#3392](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3392) +- Handle nil block bodies for backwards compatibility, PR [#3394](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3394) + +### Other Features + +- Support for EIP-7685 blocks containing withdrawal and/or deposit requests (see @ethereumjs/block for main documentation), PR [#3372](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3372) +- Stricter prefixed hex typing, PRs [#3348](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3348), [#3427](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3427) and [#3357](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3357) (some changes removed in PR [#3382](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3382) for backwards compatibility reasons, will be reintroduced along upcoming breaking releases) + +## 7.2.0 - 2024-03-18 ### Full 4844 Browser Readiness @@ -133,7 +151,7 @@ While you could use our libraries in the browser libraries before, there had bee WE HAVE ELIMINATED ALL OF THEM. -The largest two undertakings: First: we have rewritten all (half) of our API and elimited the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went throuh our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. +The largest two undertakings: First: we have rewritten all (half) of our API and eliminated the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went through our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. Together with some few other modifications this now allows to run each (maybe adding an asterisk for client and devp2p) of our libraries directly in the browser - more or less without any modifications - see the `examples/browser.html` file in each package folder for an easy to set up example. @@ -258,7 +276,7 @@ Please therefore check you code base on updating and ensure that values you are - Breaking: `Blockchain._common` property has been renamed to `Blockchain.common`, PR [#2857](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2857) - Fixed clique signer reorg scenario, PR [#2610](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2610) - Fix handling of nested uint8Arrays in JSON in DB, PR [#2666](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2666) -- Save iterator head to last successfuly executed even on errors, PR [#2680](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2680) +- Save iterator head to last successfully executed even on errors, PR [#2680](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2680) ## 6.2.2 - 2023-04-20 @@ -303,7 +321,7 @@ So the Blockchain interface was - falsely - claiming that there would be the pos We now fixed this by removing the `null` from the interface return values - see PR [#2524](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2524), after exploring the other way around as well (and the reverting), see PR [#2516](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2516). -While this might lead to breaking code constallations on the TypeScript level if this `null` value is picked up we felt this is the right thing to do since this divergence would otherwise continue to "trick" people into assuming and dealing with `null` values for non-existing-block assumptions in their code and continue to produce eventual bugs (we actually fell over this ourselves). +While this might lead to breaking code constellations on the TypeScript level if this `null` value is picked up we felt this is the right thing to do since this divergence would otherwise continue to "trick" people into assuming and dealing with `null` values for non-existing-block assumptions in their code and continue to produce eventual bugs (we actually fell over this ourselves). A bit on the verge of breaking vs. bug fixing, sorry if you are eventually affected, but we just can't do a single breaking release update for a fix on that level. @@ -416,7 +434,7 @@ Beta 2 release for the upcoming breaking release round on the [EthereumJS monore ### Removed Default Exports -The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all accross the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely dissalows using. +The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all across the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. Default exports were a common source of error and confusion when using our libraries in a CommonJS context, leading to issues like Issue [#978](https://github.com/ethereumjs/ethereumjs-monorepo/issues/978). @@ -424,7 +442,7 @@ Now every import is a named import and we think the long term benefits will very #### Common Library Import Updates -Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all accross our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. +Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all across our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. So Common import and usage is changing from: @@ -458,7 +476,7 @@ import { Blockchain } from '@ethereumjs/blockchain' ## Blockchain Consensus Option -The Blockchain library now has a new optional `consensus` constructor options parameter which can be used to pass in a customized or own consensus class respectively implementation, e.g. a modfifed Ethash version or a Clique implementation with adopted parameters or the like, see PR [#2002](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2002) to get a grasp on the integration. +The Blockchain library now has a new optional `consensus` constructor options parameter which can be used to pass in a customized or own consensus class respectively implementation, e.g. a modified Ethash version or a Clique implementation with adopted parameters or the like, see PR [#2002](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2002) to get a grasp on the integration. ## Other Changes @@ -544,7 +562,7 @@ The following methods have been taken out of the `Block` package and moved into - `BlockHeader.validateDifficulty()`, `BlockHeader.validateCliqueDifficulty()` -> `Blockchain.consensus.validateDifficulty()` - `Block.validateUncles()` -> to `Blockchain`, kept private (let us know if you need to call into the functionality) -### New File Structue +### New File Structure The file structure of the package has been reworked and aligned with other libraries, see PR [#1986](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1986). There is now a dedicated `blockchain.ts` file for the main source code. The `index.ts` is now re-exporting the `Blockchain` class and `Consensus` implementations as well as the `BlockchainInterface` interface, the `BlockchainOptions` dictionary and types from a dedicated `types.ts` file. @@ -741,13 +759,13 @@ Genesis handling has been reworked to now be safer and reduce the risk of wiping ### Removed deprecated `validate` option -The deprecated `validate` option has been removed, please use `valdiateBlock` and `validatePow` for options when instantiating a new `Blockchain`. +The deprecated `validate` option has been removed, please use `validateBlock` and `validatePow` for options when instantiating a new `Blockchain`. ### Dual ES5 and ES2017 Builds We significantly updated our internal tool and CI setup along the work on PR [#913](https://github.com/ethereumjs/ethereumjs-monorepo/pull/913) with an update to `ESLint` from `TSLint` for code linting and formatting and the introduction of a new build setup. -Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes @@ -866,7 +884,7 @@ const blockchain = new Blockchain({ common }) ### Removed deprecated `validate` option -The deprecated `validate` option has been removed, please use `valdiateBlock` and `validatePow` for options when instantiating a new `Blockchain`. +The deprecated `validate` option has been removed, please use `validateBlock` and `validatePow` for options when instantiating a new `Blockchain`. ### Dual ES5 and ES2017 Builds @@ -877,7 +895,7 @@ for code linting and formatting and the introduction of a new build setup. Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result -in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes diff --git a/packages/blockchain/README.md b/packages/blockchain/README.md index 8fbd1a09d6..b3f67525da 100644 --- a/packages/blockchain/README.md +++ b/packages/blockchain/README.md @@ -61,7 +61,7 @@ const main = async () => { difficulty: blockchain.genesisBlock.header.difficulty + 1n, }, }, - { common, setHardfork: true } + { common, setHardfork: true }, ) const block2 = Block.fromBlockData( { @@ -71,7 +71,7 @@ const main = async () => { difficulty: block.header.difficulty + 1n, }, }, - { common, setHardfork: true } + { common, setHardfork: true }, ) // See @ethereumjs/block for more details on how to create a block await blockchain.putBlock(block) @@ -157,7 +157,7 @@ const main = async () => { const genesisBlockHash = blockchain.genesisBlock.hash() common.setForkHashes(genesisBlockHash) console.log( - `Genesis hash from geth genesis parameters - ${bytesToHex(blockchain.genesisBlock.hash())}` + `Genesis hash from geth genesis parameters - ${bytesToHex(blockchain.genesisBlock.hash())}`, ) } @@ -180,6 +180,14 @@ The blockchain library now allows for blob transactions to be validated and incl **Note:** Working with blob transactions needs a manual KZG library installation and global initialization, see [KZG Setup](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/tx/README.md#kzg-setup) for instructions. +### EIP-7685 Requests Support + +This library supports blocks including the following [EIP-7685](https://eips.ethereum.org/EIPS/eip-7685) requests: + +- [EIP-6110](https://eips.ethereum.org/EIPS/eip-6110) - Deposit Requests (`v7.3.0`+) +- [EIP-7002](https://eips.ethereum.org/EIPS/eip-7002) - Withdrawal Requests (`v7.3.0`+) +- [EIP-7251](https://eips.ethereum.org/EIPS/eip-7251) - Consolidation Requests (`v7.3.0`+) + ## Browser With the breaking release round in Summer 2023 we have added hybrid ESM/CJS builds for all our libraries (see section below) and have eliminated many of the caveats which had previously prevented a frictionless browser usage. diff --git a/packages/blockchain/examples/clique.ts b/packages/blockchain/examples/clique.ts new file mode 100644 index 0000000000..2a67eb3e4c --- /dev/null +++ b/packages/blockchain/examples/clique.ts @@ -0,0 +1,14 @@ +import { CliqueConsensus, createBlockchain } from '@ethereumjs/blockchain' +import { Common, ConsensusAlgorithm, Goerli, Hardfork } from '@ethereumjs/common' + +import type { ConsensusDict } from '@ethereumjs/blockchain' + +const common = new Common({ chain: Goerli, hardfork: Hardfork.London }) + +const consensusDict: ConsensusDict = {} +consensusDict[ConsensusAlgorithm.Clique] = new CliqueConsensus() +const blockchain = await createBlockchain({ + consensusDict, + common, +}) +console.log(`Created blockchain with ${blockchain.consensus!.algorithm} consensus algorithm`) diff --git a/packages/blockchain/examples/gethGenesis.ts b/packages/blockchain/examples/gethGenesis.ts index 97b7df1db4..b435181136 100644 --- a/packages/blockchain/examples/gethGenesis.ts +++ b/packages/blockchain/examples/gethGenesis.ts @@ -1,6 +1,7 @@ import { createBlockchain } from '@ethereumjs/blockchain' -import { Common, createCommonFromGethGenesis, parseGethGenesis } from '@ethereumjs/common' +import { createCommonFromGethGenesis } from '@ethereumjs/common' import { bytesToHex, parseGethGenesisState } from '@ethereumjs/util' + import gethGenesisJson from './genesisData/post-merge.json' const main = async () => { @@ -14,8 +15,8 @@ const main = async () => { const genesisBlockHash = blockchain.genesisBlock.hash() common.setForkHashes(genesisBlockHash) console.log( - `Genesis hash from geth genesis parameters - ${bytesToHex(blockchain.genesisBlock.hash())}` + `Genesis hash from geth genesis parameters - ${bytesToHex(blockchain.genesisBlock.hash())}`, ) } -main() +void main() diff --git a/packages/blockchain/examples/simple.ts b/packages/blockchain/examples/simple.ts index 38c4024bee..9fb24a1307 100644 --- a/packages/blockchain/examples/simple.ts +++ b/packages/blockchain/examples/simple.ts @@ -1,10 +1,10 @@ -import { Block, createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' -import { Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { bytesToHex } from '@ethereumjs/util' const main = async () => { - const common = new Common({ chain: 'mainnet', hardfork: Hardfork.London }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) // Use the safe static constructor which awaits the init method const blockchain = await createBlockchain({ validateBlocks: false, // Skipping validation so we can make a simple chain without having to provide complete blocks @@ -13,7 +13,7 @@ const main = async () => { }) // We use minimal data to provide a sequence of blocks (increasing number, difficulty, and then setting parent hash to previous block) - const block = createBlockFromBlockData( + const block = createBlock( { header: { number: 1n, @@ -21,9 +21,9 @@ const main = async () => { difficulty: blockchain.genesisBlock.header.difficulty + 1n, }, }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block2 = createBlockFromBlockData( + const block2 = createBlock( { header: { number: 2n, @@ -31,7 +31,7 @@ const main = async () => { difficulty: block.header.difficulty + 1n, }, }, - { common, setHardfork: true } + { common, setHardfork: true }, ) // See @ethereumjs/block for more details on how to create a block await blockchain.putBlock(block) @@ -47,4 +47,4 @@ const main = async () => { // Block 1: 0xa1a061528d74ba81f560e1ebc4f29d6b58171fc13b72b876cdffe6e43b01bdc5 // Block 2: 0x5583be91cf9fb14f5dbeb03ad56e8cef19d1728f267c35a25ba5a355a528f602 } -main() +void main() diff --git a/packages/blockchain/package.json b/packages/blockchain/package.json index 1ea6f94b08..ed2b367178 100644 --- a/packages/blockchain/package.json +++ b/packages/blockchain/package.json @@ -1,6 +1,6 @@ { "name": "@ethereumjs/blockchain", - "version": "7.2.0", + "version": "7.3.0", "description": "A module to store and interact with blocks", "keywords": [ "ethereum", @@ -47,18 +47,19 @@ "tsc": "../../config/cli/ts-compile.sh" }, "dependencies": { - "@ethereumjs/block": "^5.2.0", - "@ethereumjs/common": "^4.3.0", - "@ethereumjs/ethash": "^3.0.3", + "@ethereumjs/block": "^5.3.0", + "@ethereumjs/common": "^4.4.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/trie": "^6.2.0", - "@ethereumjs/tx": "^5.3.0", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/trie": "^6.2.1", + "@ethereumjs/tx": "^5.4.0", + "@ethereumjs/util": "^9.1.0", "debug": "^4.3.3", "ethereum-cryptography": "^2.2.1", "lru-cache": "10.1.0" }, - "devDependencies": {}, + "devDependencies": { + "@ethereumjs/ethash": "^3.0.3" + }, "engines": { "node": ">=18" } diff --git a/packages/blockchain/src/blockchain.ts b/packages/blockchain/src/blockchain.ts index ff6d8ac8b0..89db94edda 100644 --- a/packages/blockchain/src/blockchain.ts +++ b/packages/blockchain/src/blockchain.ts @@ -1,5 +1,5 @@ -import { Block, BlockHeader, createBlockFromBlockData } from '@ethereumjs/block' -import { Chain, Common, ConsensusAlgorithm, ConsensusType, Hardfork } from '@ethereumjs/common' +import { Block, BlockHeader, createBlock } from '@ethereumjs/block' +import { Common, ConsensusAlgorithm, ConsensusType, Hardfork, Mainnet } from '@ethereumjs/common' import { AsyncEventEmitter, BIGINT_0, @@ -14,7 +14,7 @@ import { equalsBytes, } from '@ethereumjs/util' -import { CasperConsensus, CliqueConsensus, EthashConsensus } from './consensus/index.js' +import { CasperConsensus } from './consensus/casper.js' import { DBOp, DBSaveLookups, @@ -30,6 +30,7 @@ import type { BlockchainInterface, BlockchainOptions, Consensus, + ConsensusDict, OnBlock, } from './types.js' import type { HeaderData } from '@ethereumjs/block' @@ -37,10 +38,16 @@ import type { CliqueConfig } from '@ethereumjs/common' import type { BigIntLike, DB, DBObject, GenesisState } from '@ethereumjs/util' /** - * This class stores and interacts with blocks. + * Blockchain implementation to create and maintain a valid canonical chain + * of block headers or blocks with support for reorgs and the ability to provide + * custom DB backends. + * + * By default consensus validation is not provided since with the switch to + * Proof-of-Stake consensus is validated by the Ethereum consensus layer. + * If consensus validation is desired for Ethash or Clique blockchains the + * optional `consensusDict` option can be used to pass in validation objects. */ export class Blockchain implements BlockchainInterface { - consensus: Consensus db: DB dbManager: DBManager events: AsyncEventEmitter @@ -70,8 +77,9 @@ export class Blockchain implements BlockchainInterface { public readonly common: Common private _hardforkByHeadBlockNumber: boolean - private readonly _validateConsensus: boolean private readonly _validateBlocks: boolean + private readonly _validateConsensus: boolean + private _consensusDict: ConsensusDict /** * This is used to track which canonical blocks are deleted. After a method calls @@ -94,7 +102,7 @@ export class Blockchain implements BlockchainInterface { if (opts.common) { this.common = opts.common } else { - const DEFAULT_CHAIN = Chain.Mainnet + const DEFAULT_CHAIN = Mainnet const DEFAULT_HARDFORK = Hardfork.Chainstart this.common = new Common({ chain: DEFAULT_CHAIN, @@ -103,8 +111,8 @@ export class Blockchain implements BlockchainInterface { } this._hardforkByHeadBlockNumber = opts.hardforkByHeadBlockNumber ?? false - this._validateConsensus = opts.validateConsensus ?? true this._validateBlocks = opts.validateBlocks ?? true + this._validateConsensus = opts.validateConsensus ?? false this._customGenesisState = opts.genesisState this.db = opts.db !== undefined ? opts.db : new MapDB() @@ -113,38 +121,13 @@ export class Blockchain implements BlockchainInterface { this.events = new AsyncEventEmitter() - if (opts.consensus) { - this.consensus = opts.consensus - } else { - switch (this.common.consensusAlgorithm()) { - case ConsensusAlgorithm.Casper: - this.consensus = new CasperConsensus() - break - case ConsensusAlgorithm.Clique: - this.consensus = new CliqueConsensus() - break - case ConsensusAlgorithm.Ethash: - this.consensus = new EthashConsensus() - break - default: - throw new Error(`consensus algorithm ${this.common.consensusAlgorithm()} not supported`) - } - } + this._consensusDict = {} + this._consensusDict[ConsensusAlgorithm.Casper] = new CasperConsensus() - if (this._validateConsensus) { - if (this.common.consensusType() === ConsensusType.ProofOfWork) { - if (this.common.consensusAlgorithm() !== ConsensusAlgorithm.Ethash) { - throw new Error('consensus validation only supported for pow ethash algorithm') - } - } - if (this.common.consensusType() === ConsensusType.ProofOfAuthority) { - if (this.common.consensusAlgorithm() !== ConsensusAlgorithm.Clique) { - throw new Error( - 'consensus (signature) validation only supported for poa clique algorithm' - ) - } - } + if (opts.consensusDict !== undefined) { + this._consensusDict = { ...this._consensusDict, ...opts.consensusDict } } + this._consensusCheck() this._heads = {} @@ -155,6 +138,22 @@ export class Blockchain implements BlockchainInterface { } } + private _consensusCheck() { + if (this._validateConsensus && this.consensus === undefined) { + throw new Error( + `Consensus object for ${this.common.consensusAlgorithm()} must be passed (see consensusDict option) if consensus validation is activated`, + ) + } + } + + /** + * Returns an eventual consensus object matching the current consensus algorithm from Common + * or undefined if non available + */ + get consensus(): Consensus | undefined { + return this._consensusDict[this.common.consensusAlgorithm()] + } + /** * Returns a deep copy of this {@link Blockchain} instance. * @@ -169,7 +168,7 @@ export class Blockchain implements BlockchainInterface { shallowCopy(): Blockchain { const copiedBlockchain = Object.create( Object.getPrototypeOf(this), - Object.getOwnPropertyDescriptors(this) + Object.getOwnPropertyDescriptors(this), ) copiedBlockchain.common = this.common.copy() return copiedBlockchain @@ -319,14 +318,13 @@ export class Blockchain implements BlockchainInterface { throw new Error(`no block for ${canonicalHead} found in DB`) } const header = await this._getHeader(hash, canonicalHead) - const td = await this.getParentTD(header) const dbOps: DBOp[] = [] await this._deleteCanonicalChainReferences(canonicalHead + BIGINT_1, hash, dbOps) const ops = dbOps.concat(this._saveHeadOps()) await this.dbManager.batch(ops) - await this.checkAndTransitionHardForkByNumber(canonicalHead, td, header.timestamp) + await this.checkAndTransitionHardForkByNumber(canonicalHead, header.timestamp) }) if (this._deletedBlocks.length > 0) { this.events.emit('deletedCanonicalBlocks', this._deletedBlocks) @@ -367,7 +365,7 @@ export class Blockchain implements BlockchainInterface { return } throw new Error( - 'Cannot put a different genesis block than current blockchain genesis: create a new Blockchain' + 'Cannot put a different genesis block than current blockchain genesis: create a new Blockchain', ) } @@ -380,7 +378,7 @@ export class Blockchain implements BlockchainInterface { if (block.common.chainId() !== this.common.chainId()) { throw new Error( - `Chain mismatch while trying to put block or header. Chain ID of block: ${block.common.chainId}, chain ID of blockchain : ${this.common.chainId}` + `Chain mismatch while trying to put block or header. Chain ID of block: ${block.common.chainId}, chain ID of blockchain : ${this.common.chainId}`, ) } @@ -390,7 +388,7 @@ export class Blockchain implements BlockchainInterface { } if (this._validateConsensus) { - await this.consensus.validateConsensus(block) + await this.consensus!.validateConsensus(block) } // set total difficulty in the current context scope @@ -430,7 +428,7 @@ export class Blockchain implements BlockchainInterface { this._headBlockHash = blockHash } if (this._hardforkByHeadBlockNumber) { - await this.checkAndTransitionHardForkByNumber(blockNumber, parentTd, header.timestamp) + await this.checkAndTransitionHardForkByNumber(blockNumber, header.timestamp) } // delete higher number assignments and overwrite stale canonical chain @@ -453,9 +451,9 @@ export class Blockchain implements BlockchainInterface { const ops = dbOps.concat(this._saveHeadOps()) await this.dbManager.batch(ops) - await this.consensus.newBlock(block, commonAncestor, ancestorHeaders) + await this.consensus?.newBlock(block, commonAncestor, ancestorHeaders) } catch (e) { - // restore head to the previouly sane state + // restore head to the previously sane state this._heads = oldHeads this._headHeaderHash = oldHeadHeaderHash this._headBlockHash = oldHeadBlockHash @@ -499,7 +497,7 @@ export class Blockchain implements BlockchainInterface { throw new Error(`invalid timestamp ${header.errorStr()}`) } - if (!(header.common.consensusType() === 'pos')) await this.consensus.validateDifficulty(header) + if (!(header.common.consensusType() === 'pos')) await this.consensus?.validateDifficulty(header) if (this.common.consensusAlgorithm() === ConsensusAlgorithm.Clique) { const period = (this.common.consensusConfig() as CliqueConfig).period @@ -516,7 +514,7 @@ export class Blockchain implements BlockchainInterface { if (!(dif < BIGINT_8 && dif > BIGINT_1)) { throw new Error( - `uncle block has a parent that is too old or too young ${header.errorStr()}` + `uncle block has a parent that is too old or too young ${header.errorStr()}`, ) } } @@ -528,7 +526,7 @@ export class Blockchain implements BlockchainInterface { const londonHfBlock = this.common.hardforkBlock(Hardfork.London) const isInitialEIP1559Block = number === londonHfBlock if (isInitialEIP1559Block) { - expectedBaseFee = header.common.param('gasConfig', 'initialBaseFee') + expectedBaseFee = header.common.param('initialBaseFee') } else { expectedBaseFee = parentHeader.calcNextBaseFee() } @@ -638,7 +636,7 @@ export class Blockchain implements BlockchainInterface { if (!canonicalChainHashes[parentHash]) { throw new Error( - `The parent hash of the uncle header is not part of the canonical chain ${block.errorStr()}` + `The parent hash of the uncle header is not part of the canonical chain ${block.errorStr()}`, ) } @@ -713,7 +711,7 @@ export class Blockchain implements BlockchainInterface { blockId: Uint8Array | bigint | number, maxBlocks: number, skip: number, - reverse: boolean + reverse: boolean, ): Promise { return this.runWithLock(async () => { const blocks: Block[] = [] @@ -854,7 +852,7 @@ export class Blockchain implements BlockchainInterface { blockHash: Uint8Array, blockNumber: bigint, headHash: Uint8Array | null, - ops: DBOp[] + ops: DBOp[], ) { // delete header, body, hash to number mapping and td ops.push(DBOp.del(DBTarget.Header, { blockHash, blockNumber })) @@ -902,7 +900,7 @@ export class Blockchain implements BlockchainInterface { name: string, onBlock: OnBlock, maxBlocks?: number, - releaseLockOnCallback?: boolean + releaseLockOnCallback?: boolean, ): Promise { return this.runWithLock(async (): Promise => { let headHash = this._heads[name] ?? this.genesisBlock.hash() @@ -942,9 +940,9 @@ export class Blockchain implements BlockchainInterface { } finally { if (releaseLockOnCallback === true) { await this._lock.acquire() - // If lock was released check if reorg occured + // If lock was released check if reorg occurred const nextBlockMayBeReorged = await this.getBlock(nextBlockNumber).catch( - (_e) => null + (_e) => null, ) reorgWhileOnBlock = nextBlockMayBeReorged ? !equalsBytes(nextBlockMayBeReorged.hash(), nextBlock.hash()) @@ -1042,7 +1040,7 @@ export class Blockchain implements BlockchainInterface { private async _deleteCanonicalChainReferences( blockNumber: bigint, headHash: Uint8Array, - ops: DBOp[] + ops: DBOp[], ) { try { let hash: Uint8Array | false @@ -1178,7 +1176,7 @@ export class Blockchain implements BlockchainInterface { // LevelDB doesn't handle Uint8Arrays properly when they are part // of a JSON object being stored as a value in the DB const hexHeads = Object.fromEntries( - Object.entries(this._heads).map((entry) => [entry[0], bytesToUnprefixedHex(entry[1])]) + Object.entries(this._heads).map((entry) => [entry[0], bytesToUnprefixedHex(entry[1])]), ) return [ DBOp.set(DBTarget.Heads, hexHeads), @@ -1212,40 +1210,16 @@ export class Blockchain implements BlockchainInterface { async checkAndTransitionHardForkByNumber( number: BigIntLike, - td?: BigIntLike, - timestamp?: BigIntLike + timestamp?: BigIntLike, ): Promise { this.common.setHardforkBy({ blockNumber: number, - td, timestamp, }) - // If custom consensus algorithm is used, skip merge hardfork consensus checks - if (!Object.values(ConsensusAlgorithm).includes(this.consensus.algorithm as ConsensusAlgorithm)) - return - - switch (this.common.consensusAlgorithm()) { - case ConsensusAlgorithm.Casper: - if (!(this.consensus instanceof CasperConsensus)) { - this.consensus = new CasperConsensus() - } - break - case ConsensusAlgorithm.Clique: - if (!(this.consensus instanceof CliqueConsensus)) { - this.consensus = new CliqueConsensus() - } - break - case ConsensusAlgorithm.Ethash: - if (!(this.consensus instanceof EthashConsensus)) { - this.consensus = new EthashConsensus() - } - break - default: - throw new Error(`consensus algorithm ${this.common.consensusAlgorithm()} not supported`) - } - await this.consensus.setup({ blockchain: this }) - await this.consensus.genesisInit(this.genesisBlock) + this._consensusCheck() + await this.consensus?.setup({ blockchain: this }) + await this.consensus?.genesisInit(this.genesisBlock) } /** @@ -1286,7 +1260,6 @@ export class Blockchain implements BlockchainInterface { const common = this.common.copy() common.setHardforkBy({ blockNumber: 0, - td: BigInt(common.genesis().difficulty), timestamp: common.genesis().timestamp, }) @@ -1298,16 +1271,16 @@ export class Blockchain implements BlockchainInterface { } if (common.consensusType() === 'poa') { if (common.genesis().extraData) { - // Ensure exta data is populated from genesis data if provided + // Ensure extra data is populated from genesis data if provided header.extraData = common.genesis().extraData } else { // Add required extraData (32 bytes vanity + 65 bytes filled with zeroes header.extraData = concatBytes(new Uint8Array(32), new Uint8Array(65)) } } - return createBlockFromBlockData( + return createBlock( { header, withdrawals: common.isActivatedEIP(4895) ? [] : undefined }, - { common } + { common }, ) } } diff --git a/packages/blockchain/src/consensus/casper.ts b/packages/blockchain/src/consensus/casper.ts index 1cd3779d64..8d006e7367 100644 --- a/packages/blockchain/src/consensus/casper.ts +++ b/packages/blockchain/src/consensus/casper.ts @@ -21,6 +21,9 @@ export class CasperConsensus implements Consensus { public async validateConsensus(): Promise {} public async validateDifficulty(header: BlockHeader): Promise { + // TODO: This is not really part of consensus validation and it should be analyzed + // if it is possible to replace by a more generic hardfork check between block and + // blockchain along adding new blocks or headers if (header.difficulty !== BIGINT_0) { const msg = 'invalid difficulty. PoS blocks must have difficulty 0' throw new Error(`${msg} ${header.errorStr()}`) diff --git a/packages/blockchain/src/consensus/clique.ts b/packages/blockchain/src/consensus/clique.ts index 9f59a7a95b..c7d78cbf8c 100644 --- a/packages/blockchain/src/consensus/clique.ts +++ b/packages/blockchain/src/consensus/clique.ts @@ -1,3 +1,9 @@ +import { + cliqueEpochTransitionSigners, + cliqueIsEpochTransition, + cliqueSigner, + cliqueVerifySignature, +} from '@ethereumjs/block' import { ConsensusAlgorithm } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' import { @@ -42,7 +48,7 @@ type CliqueLatestSignerStates = CliqueSignerState[] // Clique Vote type CliqueVote = [ blockNumber: bigint, - vote: [signer: Address, beneficiary: Address, cliqueNonce: Uint8Array] + vote: [signer: Address, beneficiary: Address, cliqueNonce: Uint8Array], ] type CliqueLatestVotes = CliqueVote[] @@ -114,7 +120,7 @@ export class CliqueConsensus implements Consensus { /** * - * @param param dictionary containin a {@link Blockchain} object + * @param param dictionary containing a {@link Blockchain} object * * Note: this method must be called before consensus checks are used or type errors will occur */ @@ -136,7 +142,7 @@ export class CliqueConsensus implements Consensus { } const { header } = block - const valid = header.cliqueVerifySignature(this.cliqueActiveSigners(header.number)) + const valid = cliqueVerifySignature(header, this.cliqueActiveSigners(header.number)) if (!valid) { throw new Error('invalid PoA block signature (clique)') } @@ -145,16 +151,16 @@ export class CliqueConsensus implements Consensus { } // validate checkpoint signers towards active signers on epoch transition blocks - if (header.cliqueIsEpochTransition()) { + if (cliqueIsEpochTransition(header)) { // note: keep votes on epoch transition blocks in case of reorgs. // only active (non-stale) votes will counted (if vote.blockNumber >= lastEpochBlockNumber - const checkpointSigners = header.cliqueEpochTransitionSigners() + const checkpointSigners = cliqueEpochTransitionSigners(header) const activeSigners = this.cliqueActiveSigners(header.number) for (const [i, cSigner] of checkpointSigners.entries()) { if (activeSigners[i]?.equals(cSigner) !== true) { throw new Error( - `checkpoint signer not found in active signers list at index ${i}: ${cSigner}` + `checkpoint signer not found in active signers list at index ${i}: ${cSigner}`, ) } } @@ -178,7 +184,7 @@ export class CliqueConsensus implements Consensus { throw new Error(`${msg} ${header.errorStr()}`) } const signerIndex = signers.findIndex((address: Address) => - address.equals(header.cliqueSigner()) + address.equals(cliqueSigner(header)), ) const inTurn = header.number % BigInt(signers.length) === BigInt(signerIndex) if ( @@ -211,7 +217,7 @@ export class CliqueConsensus implements Consensus { private async cliqueSaveGenesisSigners(genesisBlock: Block) { const genesisSignerState: CliqueSignerState = [ BIGINT_0, - genesisBlock.header.cliqueEpochTransitionSigners(), + cliqueEpochTransitionSigners(genesisBlock.header), ] await this.cliqueUpdateSignerStates(genesisSignerState) debug(`[Block 0] Genesis block -> update signer states`) @@ -280,7 +286,7 @@ export class CliqueConsensus implements Consensus { private async cliqueUpdateVotes(header?: BlockHeader) { // Block contains a vote on a new signer if (header && !header.coinbase.isZero()) { - const signer = header.cliqueSigner() + const signer = cliqueSigner(header) const beneficiary = header.coinbase const nonce = header.nonce const latestVote: CliqueVote = [header.number, [signer, beneficiary, nonce]] @@ -336,7 +342,7 @@ export class CliqueConsensus implements Consensus { }) // Discard votes for added signer this._cliqueLatestVotes = this._cliqueLatestVotes.filter( - (vote) => !vote[1][1].equals(beneficiary) + (vote) => !vote[1][1].equals(beneficiary), ) debug(`[Block ${header.number}] Clique majority consensus (AUTH ${beneficiary})`) } @@ -370,7 +376,7 @@ export class CliqueConsensus implements Consensus { activeSigners = activeSigners.filter((signer) => !signer.equals(beneficiary)) this._cliqueLatestVotes = this._cliqueLatestVotes.filter( // Discard votes from removed signer and for removed signer - (vote) => !vote[1][0].equals(beneficiary) && !vote[1][1].equals(beneficiary) + (vote) => !vote[1][0].equals(beneficiary) && !vote[1][1].equals(beneficiary), ) debug(`[Block ${header.number}] Clique majority consensus (DROP ${beneficiary})`) } @@ -381,17 +387,17 @@ export class CliqueConsensus implements Consensus { debug( `[Block ${header.number}] New clique vote: ${signer} -> ${beneficiary} ${ equalsBytes(nonce, CLIQUE_NONCE_AUTH) ? 'AUTH' : 'DROP' - }` + }`, ) } if (consensus) { if (round === 1) { debug( - `[Block ${header.number}] Clique majority consensus on existing votes -> update signer states` + `[Block ${header.number}] Clique majority consensus on existing votes -> update signer states`, ) } else { debug( - `[Block ${header.number}] Clique majority consensus on new vote -> update signer states` + `[Block ${header.number}] Clique majority consensus on new vote -> update signer states`, ) } const newSignerState: CliqueSignerState = [header.number, activeSigners] @@ -469,8 +475,8 @@ export class CliqueConsensus implements Consensus { // we do not have a complete picture of the state to verify if too recently signed return false } - signers.push([header.number, header.cliqueSigner()]) - const seen = signers.filter((s) => s[1].equals(header.cliqueSigner())).length + signers.push([header.number, cliqueSigner(header)]) + const seen = signers.filter((s) => s[1].equals(cliqueSigner(header))).length return seen > 1 } @@ -483,7 +489,7 @@ export class CliqueConsensus implements Consensus { // remove blockNumber from clique snapshots // (latest signer states, latest votes, latest block signers) this._cliqueLatestSignerStates = this._cliqueLatestSignerStates.filter( - (s) => s[0] <= blockNumber + (s) => s[0] <= blockNumber, ) await this.cliqueUpdateSignerStates() @@ -491,7 +497,7 @@ export class CliqueConsensus implements Consensus { await this.cliqueUpdateVotes() this._cliqueLatestBlockSigners = this._cliqueLatestBlockSigners.filter( - (s) => s[0] <= blockNumber + (s) => s[0] <= blockNumber, ) await this.cliqueUpdateLatestBlockSigners() } @@ -509,7 +515,7 @@ export class CliqueConsensus implements Consensus { return } // add this block's signer - const signer: CliqueBlockSigner = [header.number, header.cliqueSigner()] + const signer: CliqueBlockSigner = [header.number, cliqueSigner(header)] this._cliqueLatestBlockSigners.push(signer) // trim length to `this.cliqueSignerLimit()` @@ -518,7 +524,7 @@ export class CliqueConsensus implements Consensus { if (length > limit) { this._cliqueLatestBlockSigners = this._cliqueLatestBlockSigners.slice( length - limit, - length + length, ) } } @@ -541,8 +547,8 @@ export class CliqueConsensus implements Consensus { const states = RLP.decode(signerStates as Uint8Array) as [Uint8Array, Uint8Array[]] return states.map((state) => { const blockNum = bytesToBigInt(state[0] as Uint8Array) - const addrs = (state[1]).map((bytes: Uint8Array) => new Address(bytes)) - return [blockNum, addrs] + const addresses = (state[1]).map((bytes: Uint8Array) => new Address(bytes)) + return [blockNum, addresses] }) as CliqueLatestSignerStates } @@ -555,7 +561,7 @@ export class CliqueConsensus implements Consensus { if (signerVotes === undefined) return [] const votes = RLP.decode(signerVotes as Uint8Array) as [ Uint8Array, - [Uint8Array, Uint8Array, Uint8Array] + [Uint8Array, Uint8Array, Uint8Array], ] return votes.map((vote) => { const blockNum = bytesToBigInt(vote[0] as Uint8Array) @@ -587,7 +593,7 @@ export class CliqueConsensus implements Consensus { * @hidden */ private async _cliqueBuildSnapshots(header: BlockHeader) { - if (!header.cliqueIsEpochTransition()) { + if (!cliqueIsEpochTransition(header)) { await this.cliqueUpdateVotes(header) } await this.cliqueUpdateLatestBlockSigners(header) diff --git a/packages/blockchain/src/consensus/ethash.ts b/packages/blockchain/src/consensus/ethash.ts index b02f4229c8..5b93014c19 100644 --- a/packages/blockchain/src/consensus/ethash.ts +++ b/packages/blockchain/src/consensus/ethash.ts @@ -1,26 +1,28 @@ import { ConsensusAlgorithm } from '@ethereumjs/common' -import { Ethash } from '@ethereumjs/ethash' import type { Blockchain } from '../index.js' import type { Consensus, ConsensusOptions } from '../types.js' import type { Block, BlockHeader } from '@ethereumjs/block' +type MinimalEthashInterface = { + cacheDB?: any + verifyPOW(block: Block): Promise +} + /** * This class encapsulates Ethash-related consensus functionality when used with the Blockchain class. */ export class EthashConsensus implements Consensus { blockchain: Blockchain | undefined algorithm: ConsensusAlgorithm - _ethash: Ethash | undefined + _ethash: MinimalEthashInterface - constructor() { + constructor(ethash: MinimalEthashInterface) { this.algorithm = ConsensusAlgorithm.Ethash + this._ethash = ethash } async validateConsensus(block: Block): Promise { - if (!this._ethash) { - throw new Error('blockchain not provided') - } const valid = await this._ethash.verifyPOW(block) if (!valid) { throw new Error('invalid POW') @@ -44,7 +46,7 @@ export class EthashConsensus implements Consensus { public async genesisInit(): Promise {} public async setup({ blockchain }: ConsensusOptions): Promise { this.blockchain = blockchain - this._ethash = new Ethash(this.blockchain!.db as any) + this._ethash.cacheDB = this.blockchain.db } public async newBlock(): Promise {} } diff --git a/packages/blockchain/src/constructors.ts b/packages/blockchain/src/constructors.ts index ad310b9d8d..824957f63f 100644 --- a/packages/blockchain/src/constructors.ts +++ b/packages/blockchain/src/constructors.ts @@ -1,4 +1,4 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { BIGINT_0, equalsBytes } from '@ethereumjs/util' import { @@ -17,7 +17,7 @@ import type { Chain } from '@ethereumjs/common' export async function createBlockchain(opts: BlockchainOptions = {}) { const blockchain = new Blockchain(opts) - await blockchain.consensus.setup({ blockchain }) + await blockchain.consensus?.setup({ blockchain }) let stateRoot = opts.genesisBlock?.header.stateRoot ?? opts.genesisStateRoot if (stateRoot === undefined) { @@ -26,7 +26,7 @@ export async function createBlockchain(opts: BlockchainOptions = {}) { } else { stateRoot = await getGenesisStateRoot( Number(blockchain.common.chainId()) as Chain, - blockchain.common + blockchain.common, ) } } @@ -42,7 +42,7 @@ export async function createBlockchain(opts: BlockchainOptions = {}) { // DB is indeed the Genesis block generated or assigned. if (dbGenesisBlock !== undefined && !equalsBytes(genesisBlock.hash(), dbGenesisBlock.hash())) { throw new Error( - 'The genesis block in the DB has a different hash than the provided genesis block.' + 'The genesis block in the DB has a different hash than the provided genesis block.', ) } @@ -56,7 +56,7 @@ export async function createBlockchain(opts: BlockchainOptions = {}) { DBSetBlockOrHeader(genesisBlock).map((op) => dbOps.push(op)) DBSaveLookups(genesisHash, BIGINT_0).map((op) => dbOps.push(op)) await blockchain.dbManager.batch(dbOps) - await blockchain.consensus.genesisInit(genesisBlock) + await blockchain.consensus?.genesisInit(genesisBlock) } // At this point, we can safely set the genesis: @@ -78,12 +78,7 @@ export async function createBlockchain(opts: BlockchainOptions = {}) { if (blockchain['_hardforkByHeadBlockNumber']) { const latestHeader = await blockchain['_getHeader'](blockchain['_headHeaderHash']) - const td = await blockchain.getParentTD(latestHeader) - await blockchain.checkAndTransitionHardForkByNumber( - latestHeader.number, - td, - latestHeader.timestamp - ) + await blockchain.checkAndTransitionHardForkByNumber(latestHeader.number, latestHeader.timestamp) } return blockchain @@ -91,18 +86,18 @@ export async function createBlockchain(opts: BlockchainOptions = {}) { /** * Creates a blockchain from a list of block objects, - * objects must be readable by {@link Block.fromBlockData} + * objects must be readable by {@link createBlock} * * @param blockData List of block objects * @param opts Constructor options, see {@link BlockchainOptions} */ export async function createBlockchainFromBlocksData( blocksData: BlockData[], - opts: BlockchainOptions = {} + opts: BlockchainOptions = {}, ) { const blockchain = await createBlockchain(opts) for (const blockData of blocksData) { - const block = createBlockFromBlockData(blockData, { + const block = createBlock(blockData, { common: blockchain.common, setHardfork: true, }) diff --git a/packages/blockchain/src/db/constants.ts b/packages/blockchain/src/db/constants.ts index a7936b804f..5b365937ac 100644 --- a/packages/blockchain/src/db/constants.ts +++ b/packages/blockchain/src/db/constants.ts @@ -32,7 +32,7 @@ const NUM_SUFFIX = utf8ToBytes('n') /** * blockHashPrefix + hash -> number */ -const BLOCK_HASH_PEFIX = utf8ToBytes('H') +const BLOCK_HASH_PREFIX = utf8ToBytes('H') /** * bodyPrefix + number + hash -> block body @@ -55,7 +55,7 @@ const bodyKey = (n: bigint, hash: Uint8Array) => concatBytes(BODY_PREFIX, bytesB const numberToHashKey = (n: bigint) => concatBytes(HEADER_PREFIX, bytesBE8(n), NUM_SUFFIX) -const hashToNumberKey = (hash: Uint8Array) => concatBytes(BLOCK_HASH_PEFIX, hash) +const hashToNumberKey = (hash: Uint8Array) => concatBytes(BLOCK_HASH_PREFIX, hash) /** * @hidden diff --git a/packages/blockchain/src/db/helpers.ts b/packages/blockchain/src/db/helpers.ts index 141f9e7ce3..29685634d2 100644 --- a/packages/blockchain/src/db/helpers.ts +++ b/packages/blockchain/src/db/helpers.ts @@ -38,7 +38,7 @@ function DBSetBlockOrHeader(blockBody: Block | BlockHeader): DBOp[] { DBOp.set(DBTarget.Header, headerValue, { blockNumber, blockHash, - }) + }), ) const isGenesis = header.number === BIGINT_0 @@ -49,7 +49,7 @@ function DBSetBlockOrHeader(blockBody: Block | BlockHeader): DBOp[] { DBOp.set(DBTarget.Body, bodyValue, { blockNumber, blockHash, - }) + }), ) } @@ -73,7 +73,7 @@ function DBSaveLookups(blockHash: Uint8Array, blockNumber: bigint, skipNumIndex? ops.push( DBOp.set(DBTarget.HashToNumber, blockNumber8Bytes, { blockHash, - }) + }), ) return ops } diff --git a/packages/blockchain/src/db/manager.ts b/packages/blockchain/src/db/manager.ts index c66310ea5b..42f826c464 100644 --- a/packages/blockchain/src/db/manager.ts +++ b/packages/blockchain/src/db/manager.ts @@ -1,8 +1,6 @@ -import { BlockHeader, createBlockFromValuesArray, valuesArrayToHeaderData } from '@ethereumjs/block' +import { createBlockFromBytesArray, createBlockHeaderFromBytesArray } from '@ethereumjs/block' import { RLP } from '@ethereumjs/rlp' import { - BIGINT_0, - BIGINT_1, KECCAK256_RLP, KECCAK256_RLP_ARRAY, bytesToBigInt, @@ -109,7 +107,7 @@ export class DBManager { let body = await this.getBody(hash, number) // be backward compatible where we didn't use to store a body with no txs, uncles, withdrawals - // otherwise the body is never partially stored and if we have some body, its in entirity + // otherwise the body is never partially stored and if we have some body, its in entirety if (body === undefined) { body = [[], []] as BlockBodyBytes // Do extra validations on the header since we are assuming empty transactions and uncles @@ -142,13 +140,8 @@ export class DBManager { } const blockData = [header.raw(), ...body] as BlockBytes - const opts: BlockOptions = { common: this.common } - if (number === BIGINT_0) { - opts.setHardfork = await this.getTotalDifficulty(hash, BIGINT_0) - } else { - opts.setHardfork = await this.getTotalDifficulty(header.parentHash, number - BIGINT_1) - } - return createBlockFromValuesArray(blockData, opts) + const opts: BlockOptions = { common: this.common, setHardfork: true } + return createBlockFromBytesArray(blockData, opts) } /** @@ -166,17 +159,8 @@ export class DBManager { const encodedHeader = await this.get(DBTarget.Header, { blockHash, blockNumber }) const headerValues = RLP.decode(encodedHeader) - const opts: BlockOptions = { common: this.common } - if (blockNumber === BIGINT_0) { - opts.setHardfork = await this.getTotalDifficulty(blockHash, BIGINT_0) - } else { - // Lets fetch the parent hash but not by number since this block might not - // be in canonical chain - const headerData = valuesArrayToHeaderData(headerValues as Uint8Array[]) - const parentHash = headerData.parentHash as Uint8Array - opts.setHardfork = await this.getTotalDifficulty(parentHash, blockNumber - BIGINT_1) - } - return BlockHeader.fromValuesArray(headerValues as Uint8Array[], opts) + const opts: BlockOptions = { common: this.common, setHardfork: true } + return createBlockHeaderFromBytesArray(headerValues as Uint8Array[], opts) } /** @@ -249,8 +233,8 @@ export class DBManager { op.baseDBOp.type !== undefined ? op.baseDBOp.type : op.baseDBOp.value !== undefined - ? 'put' - : 'del' + ? 'put' + : 'del' const convertedOp = { key: op.baseDBOp.key, value: op.baseDBOp.value, diff --git a/packages/blockchain/src/db/operation.ts b/packages/blockchain/src/db/operation.ts index f1b66a7b28..a861b9605f 100644 --- a/packages/blockchain/src/db/operation.ts +++ b/packages/blockchain/src/db/operation.ts @@ -114,7 +114,7 @@ export class DBOp { public static set( operationTarget: DBTarget, value: Uint8Array | object, - key?: DatabaseKey + key?: DatabaseKey, ): DBOp { const dbOperation = new DBOp(operationTarget, key) dbOperation.baseDBOp.value = value diff --git a/packages/blockchain/src/helpers.ts b/packages/blockchain/src/helpers.ts index 2d4d6c3046..fc21c891fd 100644 --- a/packages/blockchain/src/helpers.ts +++ b/packages/blockchain/src/helpers.ts @@ -19,12 +19,11 @@ import type { GenesisState } from '@ethereumjs/util' */ export async function genGenesisStateRoot( genesisState: GenesisState, - common: Common + common: Common, ): Promise { const genCommon = common.copy() genCommon.setHardforkBy({ blockNumber: 0, - td: BigInt(genCommon.genesis().difficulty), timestamp: genCommon.genesis().timestamp, }) if (genCommon.isActivatedEIP(6800)) { diff --git a/packages/blockchain/src/types.ts b/packages/blockchain/src/types.ts index 7ae0b75539..0eee200bf8 100644 --- a/packages/blockchain/src/types.ts +++ b/packages/blockchain/src/types.ts @@ -10,7 +10,7 @@ export type BlockchainEvents = { } export interface BlockchainInterface { - consensus: Consensus + consensus: Consensus | undefined /** * Adds a block to the blockchain. * @@ -44,7 +44,7 @@ export interface BlockchainInterface { name: string, onBlock: OnBlock, maxBlocks?: number, - releaseLockOnCallback?: boolean + releaseLockOnCallback?: boolean, ): Promise /** @@ -132,6 +132,10 @@ export interface GenesisOptions { genesisStateRoot?: Uint8Array } +export type ConsensusDict = { + [consensusAlgorithm: ConsensusAlgorithm | string]: Consensus +} + /** * This are the options that the Blockchain constructor can receive. */ @@ -161,17 +165,6 @@ export interface BlockchainOptions extends GenesisOptions { */ db?: DB - /** - * This flags indicates if a block should be validated along the consensus algorithm - * or protocol used by the chain, e.g. by verifying the PoW on the block. - * - * Supported consensus types and algorithms (taken from the `Common` instance): - * - 'pow' with 'ethash' algorithm (validates the proof-of-work) - * - 'poa' with 'clique' algorithm (verifies the block signatures) - * Default: `true`. - */ - validateConsensus?: boolean - /** * This flag indicates if protocol-given consistency checks on * block headers and included uncles and transactions should be performed, @@ -181,9 +174,40 @@ export interface BlockchainOptions extends GenesisOptions { validateBlocks?: boolean /** - * Optional custom consensus that implements the {@link Consensus} class + * Validate the consensus with the respective consensus implementation passed + * to `consensusDict` (see respective option) `CasperConsensus` (which effectively + * does nothing) is available by default. + * + * For the build-in validation classes the following validations take place. + * - 'pow' with 'ethash' algorithm (validates the proof-of-work) + * - 'poa' with 'clique' algorithm (verifies the block signatures) + * Default: `false`. + */ + validateConsensus?: boolean + + /** + * Optional dictionary with consensus objects (adhering to the {@link Consensus} interface) + * if consensus validation is wished for certain consensus algorithms. + * + * Since consensus validation moved to the Ethereum consensus layer with Proof-of-Stake + * consensus is not validated by default. For `ConsensusAlgorithm.Ethash` and + * `ConsensusAlgorithm.Clique` consensus validation can be activated by passing in the + * respective consensus validation objects `EthashConsensus` or `CliqueConsensus`. + * + * ```ts + * import { CliqueConsensus, createBlockchain } from '@ethereumjs/blockchain' + * import type { ConsensusDict } from '@ethereumjs/blockchain' + * + * const consensusDict: ConsensusDict = {} + * consensusDict[ConsensusAlgorithm.Clique] = new CliqueConsensus() + * const blockchain = await createBlockchain({ common, consensusDict }) + * ``` + * + * Additionally it is possible to provide a fully custom consensus implementation. + * Note that this needs a custom `Common` object passed to the blockchain where + * the `ConsensusAlgorithm` string matches the string used here. */ - consensus?: Consensus + consensusDict?: ConsensusDict } /** @@ -219,7 +243,7 @@ export interface Consensus { newBlock( block: Block, commonAncestor?: BlockHeader, - ancientHeaders?: BlockHeader[] + ancientHeaders?: BlockHeader[], ): Promise } diff --git a/packages/blockchain/test/blockValidation.spec.ts b/packages/blockchain/test/blockValidation.spec.ts index 2cdef214a8..0a9a57732e 100644 --- a/packages/blockchain/test/blockValidation.spec.ts +++ b/packages/blockchain/test/blockValidation.spec.ts @@ -1,26 +1,29 @@ -import { BlockHeader, createBlockFromBlockData } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { createBlock, createBlockHeader } from '@ethereumjs/block' +import { Common, ConsensusAlgorithm, Hardfork, Mainnet } from '@ethereumjs/common' +import { Ethash } from '@ethereumjs/ethash' import { RLP } from '@ethereumjs/rlp' import { KECCAK256_RLP, bytesToHex, randomBytes } from '@ethereumjs/util' import { keccak256 } from 'ethereum-cryptography/keccak.js' import { assert, describe, expect, it } from 'vitest' -import { createBlockchain } from '../src/index.js' +import { EthashConsensus, createBlockchain } from '../src/index.js' -import { createBlock } from './util.js' +import { generateBlock } from './util.js' + +import type { ConsensusDict } from '../src/index.js' describe('[Blockchain]: Block validation tests', () => { it('should throw if an uncle is included before', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const blockchain = await createBlockchain({ common, validateConsensus: false }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const blockchain = await createBlockchain({ common }) const genesis = blockchain.genesisBlock - const uncleBlock = createBlock(genesis, 'uncle', [], common) + const uncleBlock = generateBlock(genesis, 'uncle', [], common) - const block1 = createBlock(genesis, 'block1', [], common) - const block2 = createBlock(block1, 'block2', [uncleBlock.header], common) - const block3 = createBlock(block2, 'block3', [uncleBlock.header], common) + const block1 = generateBlock(genesis, 'block1', [], common) + const block2 = generateBlock(block1, 'block2', [uncleBlock.header], common) + const block3 = generateBlock(block2, 'block3', [uncleBlock.header], common) await blockchain.putBlock(uncleBlock) await blockchain.putBlock(block1) @@ -32,23 +35,23 @@ describe('[Blockchain]: Block validation tests', () => { } catch (e: any) { assert.ok( e.message.includes('uncle is already included'), - 'block throws if uncle is already included' + 'block throws if uncle is already included', ) } }) it('should throw if the uncle parent block is not part of the canonical chain', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const blockchain = await createBlockchain({ common, validateConsensus: false }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const blockchain = await createBlockchain({ common }) const genesis = blockchain.genesisBlock - const emptyBlock = createBlockFromBlockData({ header: { number: BigInt(1) } }, { common }) + const emptyBlock = createBlock({ header: { number: BigInt(1) } }, { common }) - const uncleBlock = createBlock(emptyBlock, 'uncle', [], common) - const block1 = createBlock(genesis, 'block1', [], common) - const block2 = createBlock(block1, 'block2', [], common) - const block3 = createBlock(block2, 'block3', [uncleBlock.header], common) + const uncleBlock = generateBlock(emptyBlock, 'uncle', [], common) + const block1 = generateBlock(genesis, 'block1', [], common) + const block2 = generateBlock(block1, 'block2', [], common) + const block3 = generateBlock(block2, 'block3', [uncleBlock.header], common) await blockchain.putBlock(block1) await blockchain.putBlock(block2) @@ -59,31 +62,31 @@ describe('[Blockchain]: Block validation tests', () => { } catch (err: any) { assert.ok( err.message.includes('not found in DB'), - 'block throws if uncle parent hash is not part of the canonical chain' + 'block throws if uncle parent hash is not part of the canonical chain', ) } }) it('should throw if the uncle is too old', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const blockchain = await createBlockchain({ common, validateConsensus: false }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const blockchain = await createBlockchain({ common }) const genesis = blockchain.genesisBlock - const uncleBlock = createBlock(genesis, 'uncle', [], common) + const uncleBlock = generateBlock(genesis, 'uncle', [], common) let lastBlock = genesis for (let i = 0; i < 7; i++) { - const block = createBlock(lastBlock, 'block' + i.toString(), [], common) + const block = generateBlock(lastBlock, 'block' + i.toString(), [], common) await blockchain.putBlock(block) lastBlock = block } - const blockWithUnclesTooOld = createBlock( + const blockWithUnclesTooOld = generateBlock( lastBlock, 'too-old-uncle', [uncleBlock.header], - common + common, ) try { @@ -92,19 +95,19 @@ describe('[Blockchain]: Block validation tests', () => { } catch (e: any) { assert.ok( e.message.includes('uncle block has a parent that is too old'), - 'block throws uncle is too old' + 'block throws uncle is too old', ) } }) it('should throw if uncle is too young', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const blockchain = await createBlockchain({ common, validateConsensus: false }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const blockchain = await createBlockchain({ common }) const genesis = blockchain.genesisBlock - const uncleBlock = createBlock(genesis, 'uncle', [], common) - const block1 = createBlock(genesis, 'block1', [uncleBlock.header], common) + const uncleBlock = generateBlock(genesis, 'uncle', [], common) + const block1 = generateBlock(genesis, 'block1', [uncleBlock.header], common) await blockchain.putBlock(uncleBlock) @@ -114,18 +117,20 @@ describe('[Blockchain]: Block validation tests', () => { } catch (e: any) { assert.ok( e.message.includes('uncle block has a parent that is too old or too young'), - 'block throws uncle is too young' + 'block throws uncle is too young', ) } }) it('should throw if the uncle header is invalid', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const blockchain = await createBlockchain({ common, validateConsensus: false }) + const consensusDict: ConsensusDict = {} + consensusDict[ConsensusAlgorithm.Ethash] = new EthashConsensus(new Ethash()) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const blockchain = await createBlockchain({ common, validateConsensus: false, consensusDict }) const genesis = blockchain.genesisBlock - const uncleBlock = createBlockFromBlockData( + const uncleBlock = createBlock( { header: { number: genesis.header.number + BigInt(1), @@ -134,11 +139,11 @@ describe('[Blockchain]: Block validation tests', () => { gasLimit: BigInt(5000), }, }, - { common } + { common }, ) - const block1 = createBlock(genesis, 'block1', [], common) - const block2 = createBlock(block1, 'block2', [uncleBlock.header], common) + const block1 = generateBlock(genesis, 'block1', [], common) + const block2 = generateBlock(block1, 'block2', [uncleBlock.header], common) await blockchain.putBlock(block1) @@ -148,19 +153,19 @@ describe('[Blockchain]: Block validation tests', () => { } catch (e: any) { assert.ok( e.message.includes('invalid difficulty block header number=1 '), - 'block throws when uncle header is invalid' + 'block throws when uncle header is invalid', ) } }) it('throws if uncle is a canonical block', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const blockchain = await createBlockchain({ common, validateConsensus: false }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const blockchain = await createBlockchain({ common }) const genesis = blockchain.genesisBlock - const block1 = createBlock(genesis, 'block1', [], common) - const block2 = createBlock(block1, 'block2', [block1.header], common) + const block1 = generateBlock(genesis, 'block1', [], common) + const block2 = generateBlock(block1, 'block2', [block1.header], common) await blockchain.putBlock(block1) @@ -171,40 +176,40 @@ describe('[Blockchain]: Block validation tests', () => { } catch (e: any) { assert.ok( e.message.includes('The uncle is a canonical block'), - 'block throws if an uncle is a canonical block' + 'block throws if an uncle is a canonical block', ) } }) it('successfully validates uncles', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const blockchain = await createBlockchain({ common, validateConsensus: false }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const blockchain = await createBlockchain({ common }) const genesis = blockchain.genesisBlock - const uncleBlock = createBlock(genesis, 'uncle', [], common) + const uncleBlock = generateBlock(genesis, 'uncle', [], common) await blockchain.putBlock(uncleBlock) - const block1 = createBlock(genesis, 'block1', [], common) - const block2 = createBlock(block1, 'block2', [uncleBlock.header], common) + const block1 = generateBlock(genesis, 'block1', [], common) + const block2 = generateBlock(block1, 'block2', [uncleBlock.header], common) await blockchain.putBlock(block1) await blockchain.putBlock(block2) assert.deepEqual( (await blockchain.getCanonicalHeadHeader()).uncleHash, block2.header.uncleHash, - 'uncle blocks validated successfully' + 'uncle blocks validated successfully', ) }) it('EIP1559 base fee tests', async () => { const common = new Common({ eips: [1559], - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.London, }) - const blockchain = await createBlockchain({ common, validateConsensus: false }) + const blockchain = await createBlockchain({ common }) const genesis = blockchain.genesisBlock // Small hack to hack in the activation block number @@ -219,7 +224,7 @@ describe('[Blockchain]: Block validation tests', () => { return BigInt(0) } - const header = BlockHeader.fromHeaderData( + const header = createBlockHeader( { number: BigInt(1), parentHash: genesis.hash(), @@ -231,13 +236,13 @@ describe('[Blockchain]: Block validation tests', () => { calcDifficultyFromHeader: genesis.header, common, freeze: false, - } + }, ) - const block = createBlockFromBlockData({ header }, { common }) + const block = createBlock({ header }, { common }) await blockchain.putBlock(block) try { - const header = BlockHeader.fromHeaderData( + const header = createBlockHeader( { number: BigInt(2), parentHash: block.hash(), @@ -248,15 +253,15 @@ describe('[Blockchain]: Block validation tests', () => { { calcDifficultyFromHeader: block.header, common, - } + }, ) - const block2 = createBlockFromBlockData({ header }, { common }) + const block2 = createBlock({ header }, { common }) await blockchain.putBlock(block2) } catch (e: any) { const expectedError = 'Invalid block: base fee not correct' assert.ok( (e.message as string).includes(expectedError), - 'should throw when base fee is not correct' + 'should throw when base fee is not correct', ) } }) @@ -287,7 +292,7 @@ describe('[Blockchain]: Block validation tests', () => { * It is tested that common does not change */ - const common = new Common({ chain: Chain.Mainnet }) + const common = new Common({ chain: Mainnet }) common.hardforkBlock = function (hardfork: string | undefined) { if (hardfork === 'london') { return BigInt(4) @@ -300,14 +305,13 @@ describe('[Blockchain]: Block validation tests', () => { const blockchain = await createBlockchain({ common, - validateConsensus: false, validateBlocks: false, }) common.setHardfork(Hardfork.Berlin) const mainnetForkBlock = common.hardforkBlock(Hardfork.London) - const rootBlock = createBlockFromBlockData( + const rootBlock = createBlock( { header: { parentHash: blockchain.genesisBlock.hash(), @@ -315,23 +319,28 @@ describe('[Blockchain]: Block validation tests', () => { gasLimit: BigInt(5000), }, }, - { common } + { common }, ) await blockchain.putBlock(rootBlock) - const unclePreFork = createBlock(rootBlock, 'unclePreFork', [], common) - const canonicalBlock = createBlock(rootBlock, 'canonicalBlock', [], common) + const unclePreFork = generateBlock(rootBlock, 'unclePreFork', [], common) + const canonicalBlock = generateBlock(rootBlock, 'canonicalBlock', [], common) await blockchain.putBlock(canonicalBlock) - const preForkBlock = createBlock(canonicalBlock, 'preForkBlock', [unclePreFork.header], common) + const preForkBlock = generateBlock( + canonicalBlock, + 'preForkBlock', + [unclePreFork.header], + common, + ) await blockchain.putBlock(preForkBlock) assert.deepEqual( (await blockchain.getCanonicalHeadHeader()).uncleHash, preForkBlock.header.uncleHash, - 'able to put pre-london block in chain with pre-london uncles' + 'able to put pre-london block in chain with pre-london uncles', ) common.setHardfork(Hardfork.London) - const forkBlock = createBlock(preForkBlock, 'forkBlock', [], common) + const forkBlock = generateBlock(preForkBlock, 'forkBlock', [], common) await blockchain.putBlock(forkBlock) assert.equal(common.hardfork(), Hardfork.London, 'validation did not change common hardfork') @@ -339,13 +348,13 @@ describe('[Blockchain]: Block validation tests', () => { const uncleHeaderData = unclePreFork.header.toJSON() uncleHeaderData.extraData = '0xffff' - const uncleHeader = BlockHeader.fromHeaderData(uncleHeaderData, { - common: new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Berlin }), + const uncleHeader = createBlockHeader(uncleHeaderData, { + common: new Common({ chain: Mainnet, hardfork: Hardfork.Berlin }), }) forkBlockHeaderData.uncleHash = bytesToHex(keccak256(RLP.encode([uncleHeader.raw()]))) - const forkBlock_ValidCommon = createBlockFromBlockData( + const forkBlock_ValidCommon = createBlock( { header: forkBlockHeaderData, uncleHeaders: [uncleHeaderData], @@ -353,19 +362,19 @@ describe('[Blockchain]: Block validation tests', () => { { common, setHardfork: false, - } + }, ) assert.deepEqual( forkBlock_ValidCommon.uncleHeaders[0].hash(), uncleHeader.hash(), - 'successfully validated a pre-london uncle on a london block' + 'successfully validated a pre-london uncle on a london block', ) assert.equal(common.hardfork(), Hardfork.London, 'validation did not change common hardfork') assert.doesNotThrow( () => - createBlockFromBlockData( + createBlock( { header: forkBlockHeaderData, uncleHeaders: [uncleHeaderData], @@ -373,9 +382,9 @@ describe('[Blockchain]: Block validation tests', () => { { common, setHardfork: false, - } + }, ), - 'should create block even with pre-London uncle and common evaluated with london since uncle is given default base fee' + 'should create block even with pre-London uncle and common evaluated with london since uncle is given default base fee', ) assert.equal(common.hardfork(), Hardfork.London, 'validation did not change common hardfork') }) @@ -383,15 +392,14 @@ describe('[Blockchain]: Block validation tests', () => { describe('EIP 7685: requests field validation tests', () => { it('should throw when putting a block with an invalid requestsRoot', async () => { const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.Cancun, eips: [7685, 1559, 4895, 4844, 4788], }) const blockchain = await createBlockchain({ common, - validateConsensus: false, }) - const block = createBlockFromBlockData( + const block = createBlock( { header: { number: 1n, @@ -402,12 +410,12 @@ describe('EIP 7685: requests field validation tests', () => { gasLimit: 5000, }, }, - { common } + { common }, ) await expect(async () => blockchain.putBlock(block)).rejects.toThrow('invalid requestsRoot') - const blockWithRequest = createBlockFromBlockData( + const blockWithRequest = createBlock( { header: { number: 1n, @@ -419,10 +427,10 @@ describe('EIP 7685: requests field validation tests', () => { }, requests: [{ type: 0x1, bytes: randomBytes(12), serialize: () => randomBytes(32) } as any], }, - { common } + { common }, ) await expect(async () => blockchain.putBlock(blockWithRequest)).rejects.toThrow( - 'invalid requestsRoot' + 'invalid requestsRoot', ) }) }) diff --git a/packages/blockchain/test/clique.spec.ts b/packages/blockchain/test/clique.spec.ts index 832a8b2b7e..08effb50b7 100644 --- a/packages/blockchain/test/clique.spec.ts +++ b/packages/blockchain/test/clique.spec.ts @@ -1,24 +1,33 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' import { - Chain, + cliqueEpochTransitionSigners, + createBlock, + createSealedCliqueBlock, +} from '@ethereumjs/block' +import { Common, ConsensusAlgorithm, ConsensusType, + Goerli, Hardfork, createCustomCommon, } from '@ethereumjs/common' -import { Address, concatBytes, hexToBytes } from '@ethereumjs/util' +import { + Address, + concatBytes, + createAddressFromString, + createZeroAddress, + hexToBytes, +} from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { CLIQUE_NONCE_AUTH, CLIQUE_NONCE_DROP } from '../src/consensus/clique.js' +import { CLIQUE_NONCE_AUTH, CLIQUE_NONCE_DROP, CliqueConsensus } from '../src/consensus/clique.js' import { createBlockchain } from '../src/index.js' -import type { CliqueConsensus } from '../src/consensus/clique.js' -import type { Blockchain } from '../src/index.js' +import type { Blockchain, ConsensusDict } from '../src/index.js' import type { Block } from '@ethereumjs/block' import type { CliqueConfig } from '@ethereumjs/common' -const COMMON = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) +const COMMON = new Common({ chain: Goerli, hardfork: Hardfork.Chainstart }) const EXTRA_DATA = new Uint8Array(97) const GAS_LIMIT = BigInt(8000000) @@ -32,7 +41,7 @@ const A: Signer = { address: new Address(hexToBytes('0x0b90087d864e82a284dca15923f3776de6bb016f')), privateKey: hexToBytes('0x64bf9cc30328b0e42387b3c82c614e6386259136235e20c1357bd11cdee86993'), publicKey: hexToBytes( - '0x40b2ebdf4b53206d2d3d3d59e7e2f13b1ea68305aec71d5d24cefe7f24ecae886d241f9267f04702d7f693655eb7b4aa23f30dcd0c3c5f2b970aad7c8a828195' + '0x40b2ebdf4b53206d2d3d3d59e7e2f13b1ea68305aec71d5d24cefe7f24ecae886d241f9267f04702d7f693655eb7b4aa23f30dcd0c3c5f2b970aad7c8a828195', ), } @@ -40,7 +49,7 @@ const B: Signer = { address: new Address(hexToBytes('0x6f62d8382bf2587361db73ceca28be91b2acb6df')), privateKey: hexToBytes('0x2a6e9ad5a6a8e4f17149b8bc7128bf090566a11dbd63c30e5a0ee9f161309cd6'), publicKey: hexToBytes( - '0xca0a55f6e81cb897aee6a1c390aa83435c41048faa0564b226cfc9f3df48b73e846377fb0fd606df073addc7bd851f22547afbbdd5c3b028c91399df802083a2' + '0xca0a55f6e81cb897aee6a1c390aa83435c41048faa0564b226cfc9f3df48b73e846377fb0fd606df073addc7bd851f22547afbbdd5c3b028c91399df802083a2', ), } @@ -48,7 +57,7 @@ const C: Signer = { address: new Address(hexToBytes('0x83c30730d1972baa09765a1ac72a43db27fedce5')), privateKey: hexToBytes('0xf216ddcf276079043c52b5dd144aa073e6b272ad4bfeaf4fbbc044aa478d1927'), publicKey: hexToBytes( - '0x555b19a5cbe6dd082a4a1e1e0520dd52a82ba24fd5598ea31f0f31666c40905ed319314c5fb06d887b760229e1c0e616294e7b1cb5dfefb71507c9112132ce56' + '0x555b19a5cbe6dd082a4a1e1e0520dd52a82ba24fd5598ea31f0f31666c40905ed319314c5fb06d887b760229e1c0e616294e7b1cb5dfefb71507c9112132ce56', ), } @@ -56,7 +65,7 @@ const D: Signer = { address: new Address(hexToBytes('0x8458f408106c4875c96679f3f556a511beabe138')), privateKey: hexToBytes('0x159e95d07a6c64ddbafa6036cdb7b8114e6e8cdc449ca4b0468a6d0c955f991b'), publicKey: hexToBytes( - '0xf02724341e2df54cf53515f079b1354fa8d437e79c5b091b8d8cc7cbcca00fd8ad854cb3b3a85b06c44ecb7269404a67be88b561f2224c94d133e5fc21be915c' + '0xf02724341e2df54cf53515f079b1354fa8d437e79c5b091b8d8cc7cbcca00fd8ad854cb3b3a85b06c44ecb7269404a67be88b561f2224c94d133e5fc21be915c', ), } @@ -64,7 +73,7 @@ const E: Signer = { address: new Address(hexToBytes('0xab80a948c661aa32d09952d2a6c4ad77a4c947be')), privateKey: hexToBytes('0x48ec5a6c4a7fc67b10a9d4c8a8f594a81ae42e41ed061fa5218d96abb6012344'), publicKey: hexToBytes( - '0xadefb82b9f54e80aa3532263e4478739de16fcca6828f4ae842f8a07941c347fa59d2da1300569237009f0f122dc1fd6abb0db8fcb534280aa94948a5cc95f94' + '0xadefb82b9f54e80aa3532263e4478739de16fcca6828f4ae842f8a07941c347fa59d2da1300569237009f0f122dc1fd6abb0db8fcb534280aa94948a5cc95f94', ), } @@ -72,7 +81,7 @@ const F: Signer = { address: new Address(hexToBytes('0xdc7bc81ddf67d037d7439f8e6ff12f3d2a100f71')), privateKey: hexToBytes('0x86b0ff7b6cf70786f29f297c57562905ab0b6c32d69e177a46491e56da9e486e'), publicKey: hexToBytes( - '0xd3e3d2b722e325bfc085ff5638a112b4e7e88ff13f92fc7f6cfc14b5a25e8d1545a2f27d8537b96e8919949d5f8c139ae7fc81aea7cf7fe5d43d7faaa038e35b' + '0xd3e3d2b722e325bfc085ff5638a112b4e7e88ff13f92fc7f6cfc14b5a25e8d1545a2f27d8537b96e8919949d5f8c139ae7fc81aea7cf7fe5d43d7faaa038e35b', ), } @@ -83,17 +92,18 @@ const initWithSigners = async (signers: Signer[], common?: Common) => { const extraData = concatBytes( new Uint8Array(32), ...signers.map((s) => s.address.toBytes()), - new Uint8Array(65) - ) - const genesisBlock = createBlockFromBlockData( - { header: { gasLimit: GAS_LIMIT, extraData } }, - { common } + new Uint8Array(65), ) + const genesisBlock = createBlock({ header: { gasLimit: GAS_LIMIT, extraData } }, { common }) blocks.push(genesisBlock) + const consensusDict: ConsensusDict = {} + consensusDict[ConsensusAlgorithm.Clique] = new CliqueConsensus() + const blockchain = await createBlockchain({ validateBlocks: true, validateConsensus: true, + consensusDict, genesisBlock, common, }) @@ -106,12 +116,12 @@ function getBlock( signer: Signer, beneficiary?: [Signer, boolean], checkpointSigners?: Signer[], - common?: Common + common?: Common, ) { common = common ?? COMMON const number = lastBlock.header.number + BigInt(1) - let coinbase = Address.zero() + let coinbase = createZeroAddress() let nonce = CLIQUE_NONCE_DROP let extraData = EXTRA_DATA if (beneficiary) { @@ -123,7 +133,7 @@ function getBlock( extraData = concatBytes( new Uint8Array(32), ...checkpointSigners.map((s) => s.address.toBytes()), - new Uint8Array(65) + new Uint8Array(65), ) } @@ -149,7 +159,7 @@ function getBlock( // set signer const cliqueSigner = signer.privateKey - return createBlockFromBlockData(blockData, { common, freeze: false, cliqueSigner }) + return createSealedCliqueBlock(blockData, cliqueSigner, { common }) } const addNextBlockReorg = async ( @@ -159,7 +169,7 @@ const addNextBlockReorg = async ( signer: Signer, beneficiary?: [Signer, boolean], checkpointSigners?: Signer[], - common?: Common + common?: Common, ) => { const block = getBlock(blockchain, forkBlock, signer, beneficiary, checkpointSigners, common) await blockchain.putBlock(block) @@ -173,7 +183,7 @@ const addNextBlock = async ( signer: Signer, beneficiary?: [Signer, boolean], checkpointSigners?: Signer[], - common?: Common + common?: Common, ) => { const block = getBlock( blockchain, @@ -181,7 +191,7 @@ const addNextBlock = async ( signer, beneficiary, checkpointSigners, - common + common, ) await blockchain.putBlock(block) blocks.push(block) @@ -190,16 +200,18 @@ const addNextBlock = async ( describe('Clique: Initialization', () => { it('should initialize a clique blockchain', async () => { - const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) - const blockchain = await createBlockchain({ common }) + const common = new Common({ chain: Goerli, hardfork: Hardfork.Chainstart }) + const consensusDict: ConsensusDict = {} + consensusDict[ConsensusAlgorithm.Clique] = new CliqueConsensus() + const blockchain = await createBlockchain({ common, consensusDict }) const head = await blockchain.getIteratorHead() assert.deepEqual(head.hash(), blockchain.genesisBlock.hash(), 'correct genesis hash') assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners(head.header.number + BigInt(1)), - head.header.cliqueEpochTransitionSigners(), - 'correct genesis signers' + cliqueEpochTransitionSigners(head.header), + 'correct genesis signers', ) }) @@ -209,24 +221,24 @@ describe('Clique: Initialization', () => { // _validateConsensus needs to be true to trigger this test condition ;(blockchain as any)._validateConsensus = true const number = (COMMON.consensusConfig() as CliqueConfig).epoch - const unauthorizedSigner = Address.fromString('0x00a839de7922491683f547a67795204763ff8237') + const unauthorizedSigner = createAddressFromString('0x00a839de7922491683f547a67795204763ff8237') const extraData = concatBytes( new Uint8Array(32), A.address.toBytes(), unauthorizedSigner.toBytes(), - new Uint8Array(65) - ) - const block = createBlockFromBlockData( - { header: { number, extraData } }, - { common: COMMON, cliqueSigner: A.privateKey } + new Uint8Array(65), ) + const block = createSealedCliqueBlock({ header: { number, extraData } }, A.privateKey, { + common: COMMON, + freeze: false, + }) try { await blockchain.putBlock(block) assert.fail('should fail') } catch (error: any) { assert.ok( error.message.includes('checkpoint signer not found in active signers list'), - 'correct error' + 'correct error', ) } }) @@ -238,7 +250,7 @@ describe('Clique: Initialization', () => { const number = BigInt(2) const extraData = new Uint8Array(97) let difficulty = BigInt(5) - let block = createBlockFromBlockData( + let block = createBlock( { header: { number, @@ -248,7 +260,7 @@ describe('Clique: Initialization', () => { timestamp: parentHeader.timestamp + BigInt(10000), }, }, - { common: COMMON } + { common: COMMON }, ) try { @@ -257,13 +269,13 @@ describe('Clique: Initialization', () => { } catch (error: any) { assert.ok( error.message.includes('difficulty for clique block must be INTURN (2) or NOTURN (1)'), - 'correct error' + 'correct error', ) } difficulty = BigInt(1) const cliqueSigner = A.privateKey - block = createBlockFromBlockData( + block = createSealedCliqueBlock( { header: { number, @@ -273,7 +285,8 @@ describe('Clique: Initialization', () => { timestamp: parentHeader.timestamp + BigInt(10000), }, }, - { common: COMMON, cliqueSigner } + cliqueSigner, + { common: COMMON }, ) try { @@ -302,9 +315,9 @@ describe('Clique: Initialization', () => { assert.equal(block.header.number, BigInt(1)) assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - block.header.number + BigInt(1) + block.header.number + BigInt(1), ), - [A.address] + [A.address], ) }) @@ -315,10 +328,10 @@ describe('Clique: Initialization', () => { await addNextBlock(blockchain, blocks, A, [C, true]) assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address], - 'only accept first, second needs 2 votes' + 'only accept first, second needs 2 votes', ) }) @@ -334,10 +347,10 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address, C.address, D.address], - 'only accept first two, third needs 3 votes already' + 'only accept first two, third needs 3 votes already', ) }) @@ -363,10 +376,10 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [], - 'weird, but one less cornercase by explicitly allowing this' + 'weird, but one less cornercase by explicitly allowing this', ) }) @@ -376,10 +389,10 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address], - 'not fulfilled' + 'not fulfilled', ) }) @@ -390,10 +403,10 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address], - 'fulfilled' + 'fulfilled', ) }) @@ -404,9 +417,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address] + [A.address, B.address], ) }) @@ -417,9 +430,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address, C.address, D.address] + [A.address, B.address, C.address, D.address], ) }) @@ -431,9 +444,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address, C.address] + [A.address, B.address, C.address], ) }) @@ -447,9 +460,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address] + [A.address, B.address], ) }) @@ -466,9 +479,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address, C.address, D.address] + [A.address, B.address, C.address, D.address], ) }) @@ -482,9 +495,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address] + [A.address, B.address], ) }) @@ -504,9 +517,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address] + [A.address, B.address], ) }) @@ -519,10 +532,10 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address], - 'deauth votes' + 'deauthorized votes', ) }) @@ -535,14 +548,14 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address], - 'auth votes' + 'auth votes', ) }) - it('Clique Voting: Changes reaching consensus out of bounds (via a deauth) execute on touch', async () => { + it('Clique Voting: Changes reaching consensus out of bounds (via a deauthorization) execute on touch', async () => { const { blocks, blockchain } = await initWithSigners([A, B, C, D]) await addNextBlock(blockchain, blocks, A, [C, false]) await addNextBlock(blockchain, blocks, B) @@ -558,13 +571,13 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address] + [A.address, B.address], ) }) - it('Clique Voting: Changes reaching consensus out of bounds (via a deauth) may go out of consensus on first touch', async () => { + it('Clique Voting: Changes reaching consensus out of bounds (via a deauthorization) may go out of consensus on first touch', async () => { const { blocks, blockchain } = await initWithSigners([A, B, C, D]) await addNextBlock(blockchain, blocks, A, [C, false]) await addNextBlock(blockchain, blocks, B) @@ -580,9 +593,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address, C.address] + [A.address, B.address, C.address], ) }) @@ -608,9 +621,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [B.address, C.address, D.address, E.address, F.address] + [B.address, C.address, D.address, E.address, F.address], ) }) @@ -626,10 +639,10 @@ describe('Clique: Initialization', () => { }, }, }, + Goerli, { - baseChain: Chain.Goerli, hardfork: Hardfork.Chainstart, - } + }, ) const { blocks, blockchain } = await initWithSigners([A, B], common) await addNextBlock(blockchain, blocks, A, [C, true], undefined, common) @@ -639,9 +652,9 @@ describe('Clique: Initialization', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), - [A.address, B.address] + [A.address, B.address], ) }) @@ -654,7 +667,7 @@ describe('Clique: Initialization', () => { } catch (error: any) { assert.ok( error.message.includes('invalid PoA block signature (clique)'), - 'correct error thrown' + 'correct error thrown', ) } }) @@ -682,10 +695,10 @@ describe('Clique: Initialization', () => { }, }, }, + Goerli, { - baseChain: Chain.Goerli, hardfork: Hardfork.Chainstart, - } + }, ) const { blocks, blockchain } = await initWithSigners([A, B, C], common) await addNextBlock(blockchain, blocks, A, undefined, undefined, common) @@ -706,80 +719,80 @@ describe('Clique: Initialization', () => { assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( A.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( B.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.ok( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( C.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) // block 2: C, next signer: A await addNextBlock(blockchain, blocks, C) assert.ok( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( A.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( B.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( C.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) // block 3: A, next signer: B await addNextBlock(blockchain, blocks, A) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( A.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.ok( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( B.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( C.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) // block 4: B, next signer: C await addNextBlock(blockchain, blocks, B) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( A.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.notOk( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( B.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) assert.ok( await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( C.address, - blocks[blocks.length - 1].header.number - ) + blocks[blocks.length - 1].header.number, + ), ) }) }) @@ -789,15 +802,15 @@ describe('clique: reorgs', () => { const { blocks, blockchain } = await initWithSigners([A, B]) const genesis = blocks[0] await addNextBlock(blockchain, blocks, A, [C, true]) - const headBlockUnforked = await addNextBlock(blockchain, blocks, B, [C, true]) + const headBlockNotForked = await addNextBlock(blockchain, blocks, B, [C, true]) assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address, C.address], - 'address C added to signers' + 'address C added to signers', ) - assert.deepEqual((await blockchain.getCanonicalHeadBlock()).hash(), headBlockUnforked.hash()) + assert.deepEqual((await blockchain.getCanonicalHeadBlock()).hash(), headBlockNotForked.hash()) await addNextBlockReorg(blockchain, blocks, genesis, B) const headBlock = await addNextBlock(blockchain, blocks, A) assert.deepEqual((await blockchain.getCanonicalHeadBlock()).hash(), headBlock.hash()) @@ -806,10 +819,10 @@ describe('clique: reorgs', () => { assert.deepEqual( (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - blocks[blocks.length - 1].header.number + BigInt(1) + blocks[blocks.length - 1].header.number + BigInt(1), ), [A.address, B.address], - 'address C not added to signers' + 'address C not added to signers', ) }) @@ -839,7 +852,7 @@ describe('clique: reorgs', () => { await addNextBlock(blockchain, blocks, A, [C, true], undefined, common) await addNextBlock(blockchain, blocks, B, [C, true], undefined, common) await addNextBlock(blockchain, blocks, A, undefined, undefined, common) - const headBlockUnforked = await addNextBlock( + const headBlockNotForked = await addNextBlock( blockchain, blocks, B, @@ -854,7 +867,7 @@ describe('clique: reorgs', () => { [A.address, B.address, C.address], 'address C added to signers' ) - assert.deepEqual((await blockchain.getCanonicalHeadBlock()).hash(), headBlockUnforked.hash()) + assert.deepEqual((await blockchain.getCanonicalHeadBlock()).hash(), headBlockNotForked.hash()) await addNextBlockReorg(blockchain, blocks, genesis, B, undefined, undefined, common) await addNextBlock(blockchain, blocks, A, undefined, undefined, common) diff --git a/packages/blockchain/test/customConsensus.spec.ts b/packages/blockchain/test/customConsensus.spec.ts index 23dc717441..19083ade27 100644 --- a/packages/blockchain/test/customConsensus.spec.ts +++ b/packages/blockchain/test/customConsensus.spec.ts @@ -1,11 +1,13 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { Common, Hardfork } from '@ethereumjs/common' import { bytesToHex } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EthashConsensus, createBlockchain } from '../src/index.js' +import { createBlockchain } from '../src/index.js' -import type { Consensus } from '../src/index.js' +import * as testnet from './testdata/testnet.json' + +import type { Consensus, ConsensusDict } from '../src/index.js' import type { Block, BlockHeader } from '@ethereumjs/block' class fibonacciConsensus implements Consensus { @@ -22,7 +24,7 @@ class fibonacciConsensus implements Consensus { validateConsensus(_block: Block): Promise { if (bytesToHex(_block.header.extraData) !== '0x12358d') { throw new Error( - 'header contains invalid extradata - must match first 6 elements of fibonacci sequence' + 'header contains invalid extradata - must match first 6 elements of fibonacci sequence', ) } return new Promise((resolve) => resolve()) @@ -41,16 +43,19 @@ class fibonacciConsensus implements Consensus { } } +testnet.default.consensus.algorithm = 'fibonacci' +const consensusDict: ConsensusDict = {} +consensusDict['fibonacci'] = new fibonacciConsensus() + describe('Optional consensus parameter in blockchain constructor', () => { it('blockchain constructor should work with custom consensus', async () => { - const common = new Common({ chain: 'mainnet', hardfork: Hardfork.Chainstart }) - const consensus = new fibonacciConsensus() + const common = new Common({ chain: testnet, hardfork: Hardfork.Chainstart }) try { - const blockchain = await createBlockchain({ common, consensus }) + const blockchain = await createBlockchain({ common, validateConsensus: true, consensusDict }) assert.equal( (blockchain.consensus as fibonacciConsensus).algorithm, 'fibonacciConsensus', - 'consensus algorithm matches' + 'consensus algorithm matches', ) } catch (err) { assert.fail('blockchain should instantiate successfully') @@ -59,11 +64,10 @@ describe('Optional consensus parameter in blockchain constructor', () => { }) describe('Custom consensus validation rules', () => { - it('should validat custom consensus rules', async () => { - const common = new Common({ chain: 'mainnet', hardfork: Hardfork.Chainstart }) - const consensus = new fibonacciConsensus() - const blockchain = await createBlockchain({ common, consensus }) - const block = createBlockFromBlockData( + it('should validate custom consensus rules', async () => { + const common = new Common({ chain: testnet, hardfork: Hardfork.Chainstart }) + const blockchain = await createBlockchain({ common, validateConsensus: true, consensusDict }) + const block = createBlock( { header: { number: 1n, @@ -74,7 +78,7 @@ describe('Custom consensus validation rules', () => { gasLimit: blockchain.genesisBlock.header.gasLimit + 1n, }, }, - { common } + { common }, ) try { @@ -82,13 +86,13 @@ describe('Custom consensus validation rules', () => { assert.deepEqual( (await blockchain.getBlock(block.header.number)).header.hash(), block.header.hash(), - 'put block with valid difficulty and extraData' + 'put block with valid difficulty and extraData', ) } catch { assert.fail('should have put block with valid difficulty and extraData') } - const blockWithBadDifficulty = createBlockFromBlockData( + const blockWithBadDifficulty = createBlock( { header: { number: 2n, @@ -98,7 +102,7 @@ describe('Custom consensus validation rules', () => { timestamp: block.header.timestamp + 1n, }, }, - { common } + { common }, ) try { await blockchain.putBlock(blockWithBadDifficulty) @@ -106,11 +110,11 @@ describe('Custom consensus validation rules', () => { } catch (err: any) { assert.ok( err.message.includes('invalid difficulty'), - 'failed to put block with invalid difficulty' + 'failed to put block with invalid difficulty', ) } - const blockWithBadExtraData = createBlockFromBlockData( + const blockWithBadExtraData = createBlock( { header: { number: 2n, @@ -121,7 +125,7 @@ describe('Custom consensus validation rules', () => { gasLimit: block.header.gasLimit + 1n, }, }, - { common } + { common }, ) try { await blockchain.putBlock(blockWithBadExtraData) @@ -130,7 +134,7 @@ describe('Custom consensus validation rules', () => { assert.ok( err.message === 'header contains invalid extradata - must match first 6 elements of fibonacci sequence', - 'failed to put block with invalid extraData' + 'failed to put block with invalid extraData', ) } }) @@ -138,33 +142,27 @@ describe('Custom consensus validation rules', () => { describe('consensus transition checks', () => { it('should transition correctly', async () => { - const common = new Common({ chain: 'mainnet', hardfork: Hardfork.Chainstart }) - const consensus = new fibonacciConsensus() - const blockchain = await createBlockchain({ common, consensus }) + const common = new Common({ chain: testnet, hardfork: Hardfork.Chainstart }) + const blockchain = await createBlockchain({ common, validateConsensus: true, consensusDict }) try { await blockchain.checkAndTransitionHardForkByNumber(5n) assert.ok('checkAndTransitionHardForkByNumber does not throw with custom consensus') } catch (err: any) { assert.fail( - `checkAndTransitionHardForkByNumber should not throw with custom consensus, error=${err.message}` + `checkAndTransitionHardForkByNumber should not throw with custom consensus, error=${err.message}`, ) } - blockchain.consensus = new EthashConsensus() - blockchain.common.consensusAlgorithm = () => 'fibonacci' + blockchain.common.consensusAlgorithm = () => 'ethash' try { await blockchain.checkAndTransitionHardForkByNumber(5n) assert.fail( - 'checkAndTransitionHardForkByNumber should throw when using standard consensus (ethash, clique, casper) but consensus algorithm defined in common is different' + 'checkAndTransitionHardForkByNumber should throw when using standard consensus (ethash, clique, casper) but consensus algorithm defined in common is different', ) } catch (err: any) { - assert.equal( - err.message, - 'consensus algorithm fibonacci not supported', - `checkAndTransitionHardForkByNumber correctly throws when using standard consensus (ethash, clique, casper) but consensus algorithm defined in common is different, error=${err.message}` - ) + assert.ok(err.message.includes('Consensus object for ethash must be passed')) } }) }) diff --git a/packages/blockchain/test/index.spec.ts b/packages/blockchain/test/index.spec.ts index 937ff6a187..e93de60cb3 100644 --- a/packages/blockchain/test/index.spec.ts +++ b/packages/blockchain/test/index.spec.ts @@ -1,9 +1,10 @@ import { - BlockHeader, - createBlockFromBlockData, + createBlock, createBlockFromRLPSerializedBlock, + createBlockHeader, + createBlockHeaderFromBytesArray, } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Goerli, Hardfork, Holesky, Mainnet, Sepolia } from '@ethereumjs/common' import { MapDB, bytesToHex, equalsBytes, hexToBytes, utf8ToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' @@ -26,7 +27,7 @@ describe('blockchain test', () => { }) it('should initialize correctly', async () => { - const common = new Common({ chain: Chain.Mainnet }) + const common = new Common({ chain: Mainnet }) let blockchain = await createBlockchain({ common }) const iteratorHead = await blockchain.getIteratorHead() @@ -34,21 +35,21 @@ describe('blockchain test', () => { assert.deepEqual( iteratorHead.hash(), blockchain.genesisBlock.hash(), - 'correct genesis hash (getIteratorHead())' + 'correct genesis hash (getIteratorHead())', ) blockchain = await createBlockchain({ common, hardforkByHeadBlockNumber: true }) assert.equal( common.hardfork(), 'chainstart', - 'correct HF setting with hardforkByHeadBlockNumber option' + 'correct HF setting with hardforkByHeadBlockNumber option', ) }) it('should initialize holesky correctly', async () => { // Taken from: https://github.com/eth-clients/holesky/blob/f1d14b9a80085c3f0cb9d729fea9172cde445588/README.md#hole%C5%A1ky-hole%C5%A1ovice-testnet const holeskyHash = '0xb5f7f912443c940f21fd611f12828d75b534364ed9e95ca4e307729a4661bde4' - const common = new Common({ chain: Chain.Holesky }) + const common = new Common({ chain: Holesky }) const blockchain = await createBlockchain({ common, }) @@ -58,7 +59,7 @@ describe('blockchain test', () => { }) it('should initialize correctly with createBlockchainFromBlocksData()', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) const blockchain = await createBlockchainFromBlocksData(blocksData as BlockData[], { validateBlocks: true, validateConsensus: false, @@ -69,11 +70,11 @@ describe('blockchain test', () => { }) it('should only initialize with supported consensus validation options', async () => { - let common = new Common({ chain: Chain.Mainnet }) + let common = new Common({ chain: Mainnet }) try { await createBlockchain({ common, validateConsensus: true }) await createBlockchain({ common, validateBlocks: true }) - common = new Common({ chain: Chain.Goerli }) + common = new Common({ chain: Goerli }) await createBlockchain({ common, validateConsensus: true }) const chain = await createBlockchain({ common, validateBlocks: true }) assert.ok(chain instanceof Blockchain, 'should not throw') @@ -83,8 +84,8 @@ describe('blockchain test', () => { }) it('should add a genesis block without errors', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const genesisBlock = createBlockFromBlockData({ header: { number: 0 } }, { common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const genesisBlock = createBlock({ header: { number: 0 } }, { common }) const blockchain = await createBlockchain({ common, validateBlocks: true, @@ -94,12 +95,12 @@ describe('blockchain test', () => { assert.deepEqual( genesisBlock.hash(), (await blockchain.getCanonicalHeadHeader()).hash(), - 'genesis block hash should be correct' + 'genesis block hash should be correct', ) }) it('should not validate a block incorrectly flagged as genesis', async () => { - const genesisBlock = createBlockFromBlockData({ header: { number: BigInt(8) } }) + const genesisBlock = createBlock({ header: { number: BigInt(8) } }) try { await createBlockchain({ validateBlocks: true, @@ -123,9 +124,9 @@ describe('blockchain test', () => { it('should add 12 blocks, one at a time', async () => { const blocks: Block[] = [] const gasLimit = 8000000 - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) - const genesisBlock = createBlockFromBlockData({ header: { gasLimit } }, { common }) + const genesisBlock = createBlock({ header: { gasLimit } }, { common }) blocks.push(genesisBlock) const blockchain = await createBlockchain({ @@ -146,7 +147,7 @@ describe('blockchain test', () => { gasLimit, }, } - const block = createBlockFromBlockData(blockData, { + const block = createBlock(blockData, { calcDifficultyFromHeader: lastBlock.header, common, }) @@ -168,9 +169,9 @@ describe('blockchain test', () => { it('getBlock(): should get block by number', async () => { const blocks: Block[] = [] const gasLimit = 8000000 - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) - const genesisBlock = createBlockFromBlockData({ header: { gasLimit } }, { common }) + const genesisBlock = createBlock({ header: { gasLimit } }, { common }) blocks.push(genesisBlock) const blockchain = await createBlockchain({ @@ -188,7 +189,7 @@ describe('blockchain test', () => { gasLimit, }, } - const block = createBlockFromBlockData(blockData, { + const block = createBlock(blockData, { calcDifficultyFromHeader: genesisBlock.header, common, }) @@ -204,9 +205,9 @@ describe('blockchain test', () => { }) it('getBlock(): should get block by hash / not existing', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) const gasLimit = 8000000 - const genesisBlock = createBlockFromBlockData({ header: { gasLimit } }, { common }) + const genesisBlock = createBlock({ header: { gasLimit } }, { common }) const blockchain = await createBlockchain({ common, @@ -223,7 +224,7 @@ describe('blockchain test', () => { } catch (e: any) { assert.ok( e.message.includes('not found in DB'), - `should throw for non-existing block-by-number request` + `should throw for non-existing block-by-number request`, ) } @@ -233,7 +234,7 @@ describe('blockchain test', () => { } catch (e: any) { assert.ok( e.message.includes('not found in DB'), - `should throw for non-existing block-by-hash request` + `should throw for non-existing block-by-hash request`, ) } }) @@ -270,7 +271,7 @@ describe('blockchain test', () => { assert.equal( err.message, 'header with number 22 not found in canonical chain', - 'canonical references correctly deleted' + 'canonical references correctly deleted', ) } @@ -285,7 +286,7 @@ describe('blockchain test', () => { assert.equal( bytesToHex(newblock22.hash()), bytesToHex(newheader22.hash()), - 'fetched block should match' + 'fetched block should match', ) }) @@ -308,7 +309,7 @@ describe('blockchain test', () => { assert.equal( getBlocks![1].header.number, blocks[3].header.number, - 'should skip two blocks apart' + 'should skip two blocks apart', ) assert.ok(!isConsecutive(getBlocks!), 'blocks should not be consecutive') }) @@ -431,23 +432,23 @@ describe('blockchain test', () => { await blockchain.putBlocks(blocks.slice(1)) - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) const headerData = { number: 15, parentHash: blocks[14].hash(), gasLimit: 8000000, timestamp: BigInt(blocks[14].header.timestamp) + BigInt(1), } - const forkHeader = BlockHeader.fromHeaderData(headerData, { + const forkHeader = createBlockHeader(headerData, { common, calcDifficultyFromHeader: blocks[14].header, }) - blockchain._heads['staletest'] = blockchain._headHeaderHash + blockchain._heads['staleTest'] = blockchain._headHeaderHash await blockchain.putHeader(forkHeader) - assert.deepEqual(blockchain._heads['staletest'], blocks[14].hash(), 'should update stale head') + assert.deepEqual(blockchain._heads['staleTest'], blocks[14].hash(), 'should update stale head') assert.deepEqual(blockchain._headBlockHash, blocks[14].hash(), 'should update stale headBlock') }) @@ -455,7 +456,7 @@ describe('blockchain test', () => { const { blockchain, blocks, error } = await generateBlockchain(15) assert.equal(error, null, 'no error') - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) const headerData = { number: 15, parentHash: blocks[14].hash(), @@ -463,16 +464,16 @@ describe('blockchain test', () => { //eslint-disable-next-line timestamp: BigInt(blocks[14].header.timestamp) + BigInt(1), } - const forkHeader = BlockHeader.fromHeaderData(headerData, { + const forkHeader = createBlockHeader(headerData, { common, calcDifficultyFromHeader: blocks[14].header, }) - blockchain._heads['staletest'] = blockchain._headHeaderHash + blockchain._heads['staleTest'] = blockchain._headHeaderHash await blockchain.putHeader(forkHeader) - assert.deepEqual(blockchain._heads['staletest'], blocks[14].hash(), 'should update stale head') + assert.deepEqual(blockchain._heads['staleTest'], blocks[14].hash(), 'should update stale head') assert.deepEqual(blockchain._headBlockHash, blocks[14].hash(), 'should update stale headBlock') await blockchain.delBlock(forkHeader.hash()) @@ -530,26 +531,26 @@ describe('blockchain test', () => { const block2HeaderValuesArray = blocks[2].header.raw() block2HeaderValuesArray[1] = new Uint8Array(32) - const block2Header = BlockHeader.fromValuesArray(block2HeaderValuesArray, { + const block2Header = createBlockHeaderFromBytesArray(block2HeaderValuesArray, { common: blocks[2].common, }) await blockchain.putHeader(block2Header) try { await blockchain.getBlock(BigInt(2)) - assert.fail('block should not be constucted') + assert.fail('block should not be constructed') } catch (e: any) { assert.equal( e.message, 'uncle hash should be equal to hash of empty array', - 'block not constructed from empty bodies' + 'block not constructed from empty bodies', ) } }) it('should put multiple blocks at once', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) const blocks: Block[] = [] - const genesisBlock = createBlockFromBlockData({ header: { gasLimit: 8000000 } }, { common }) + const genesisBlock = createBlock({ header: { gasLimit: 8000000 } }, { common }) blocks.push(...generateBlocks(15, [genesisBlock])) const blockchain = await createBlockchain({ validateBlocks: true, @@ -560,14 +561,14 @@ describe('blockchain test', () => { }) it('should validate', async () => { - const genesisBlock = createBlockFromBlockData({ header: { gasLimit: 8000000 } }) + const genesisBlock = createBlock({ header: { gasLimit: 8000000 } }) const blockchain = await createBlockchain({ validateBlocks: true, validateConsensus: false, genesisBlock, }) - const invalidBlock = createBlockFromBlockData({ header: { number: 50 } }) + const invalidBlock = createBlock({ header: { number: 50 } }) try { await blockchain.putBlock(invalidBlock) assert.fail('should not validate an invalid block') @@ -577,7 +578,7 @@ describe('blockchain test', () => { }) it('should add block with body', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) const genesisRlp = hexToBytes(testDataPreLondon.genesisRLP as PrefixedHexString) const genesisBlock = createBlockFromRLPSerializedBlock(genesisRlp, { common }) const blockchain = await createBlockchain({ @@ -613,8 +614,8 @@ describe('blockchain test', () => { const db = new MapDB() const gasLimit = 8000000 - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - const genesisBlock = createBlockFromBlockData({ header: { gasLimit } }, { common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) + const genesisBlock = createBlock({ header: { gasLimit } }, { common }) let blockchain = await createBlockchain({ db, validateBlocks: true, @@ -628,7 +629,7 @@ describe('blockchain test', () => { gasLimit, timestamp: genesisBlock.header.timestamp + BigInt(1), } - const header = BlockHeader.fromHeaderData(headerData, { + const header = createBlockHeader(headerData, { calcDifficultyFromHeader: genesisBlock.header, common, }) @@ -650,10 +651,10 @@ describe('blockchain test', () => { it('should get latest', async () => { const gasLimit = 8000000 - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) const opts: BlockOptions = { common } - const genesisBlock = createBlockFromBlockData({ header: { gasLimit } }, opts) + const genesisBlock = createBlock({ header: { gasLimit } }, opts) const blockchain = await createBlockchain({ validateBlocks: true, validateConsensus: false, @@ -669,7 +670,7 @@ describe('blockchain test', () => { }, } opts.calcDifficultyFromHeader = genesisBlock.header - const block = createBlockFromBlockData(blockData, opts) + const block = createBlock(blockData, opts) const headerData1 = { number: 1, @@ -678,7 +679,7 @@ describe('blockchain test', () => { gasLimit, } opts.calcDifficultyFromHeader = genesisBlock.header - const header1 = BlockHeader.fromHeaderData(headerData1, opts) + const header1 = createBlockHeader(headerData1, opts) const headers = [header1] const headerData2 = { @@ -688,7 +689,7 @@ describe('blockchain test', () => { gasLimit, } opts.calcDifficultyFromHeader = block.header - const header2 = BlockHeader.fromHeaderData(headerData2, opts) + const header2 = createBlockHeader(headerData2, opts) headers.push(header2) await blockchain.putHeaders(headers) @@ -709,10 +710,10 @@ describe('blockchain test', () => { }) it('mismatched chains', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) const gasLimit = 8000000 - const genesisBlock = createBlockFromBlockData({ header: { gasLimit } }, { common }) + const genesisBlock = createBlock({ header: { gasLimit } }, { common }) const blockData1 = { header: { @@ -730,12 +731,12 @@ describe('blockchain test', () => { const blocks = [ genesisBlock, - createBlockFromBlockData(blockData1, { + createBlock(blockData1, { common, calcDifficultyFromHeader: genesisBlock.header, }), - createBlockFromBlockData(blockData2, { - common: new Common({ chain: Chain.Sepolia, hardfork: Hardfork.Chainstart }), + createBlock(blockData2, { + common: new Common({ chain: Sepolia, hardfork: Hardfork.Chainstart }), calcDifficultyFromHeader: genesisBlock.header, }), ] @@ -766,7 +767,7 @@ describe('blockchain test', () => { describe('initialization tests', () => { it('should read genesis from database', async () => { const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.Chainstart, }) const blockchain = await createBlockchain({ common }) @@ -775,7 +776,7 @@ describe('initialization tests', () => { assert.deepEqual( (await blockchain.getIteratorHead()).hash(), genesisHash, - 'head hash should equal expected mainnet genesis hash' + 'head hash should equal expected mainnet genesis hash', ) const db = blockchain.db @@ -785,19 +786,19 @@ describe('initialization tests', () => { assert.deepEqual( (await newBlockchain.getIteratorHead()).hash(), genesisHash, - 'head hash should be read from the provided db' + 'head hash should be read from the provided db', ) }) it('should allow to put a custom genesis block', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const genesisBlock = createBlockFromBlockData( + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const genesisBlock = createBlock( { header: { extraData: utf8ToBytes('custom extra data'), }, }, - { common } + { common }, ) const hash = genesisBlock.hash() const blockchain = await createBlockchain({ common, genesisBlock }) @@ -806,38 +807,38 @@ describe('initialization tests', () => { assert.deepEqual( (await blockchain.getIteratorHead()).hash(), hash, - 'blockchain should put custom genesis block' + 'blockchain should put custom genesis block', ) const newBlockchain = await createBlockchain({ db, genesisBlock }) assert.deepEqual( (await newBlockchain.getIteratorHead()).hash(), hash, - 'head hash should be read from the provided db' + 'head hash should be read from the provided db', ) }) it('should not allow to change the genesis block in the database', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const genesisBlock = createBlockFromBlockData( + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const genesisBlock = createBlock( { header: { extraData: utf8ToBytes('custom extra data'), }, }, - { common } + { common }, ) const hash = genesisBlock.hash() const blockchain = await createBlockchain({ common, genesisBlock }) const db = blockchain.db - const otherGenesisBlock = createBlockFromBlockData( + const otherGenesisBlock = createBlock( { header: { extraData: utf8ToBytes('other extra data'), }, }, - { common } + { common }, ) // assert that this is a block with a new hash @@ -853,7 +854,7 @@ describe('initialization tests', () => { assert.equal( e.message, 'Cannot put a different genesis block than current blockchain genesis: create a new Blockchain', - 'putting a genesis block did throw (otherGenesisBlock not found in chain)' + 'putting a genesis block did throw (otherGenesisBlock not found in chain)', ) } @@ -865,20 +866,20 @@ describe('initialization tests', () => { assert.equal( e.message, 'The genesis block in the DB has a different hash than the provided genesis block.', - 'creating blockchain with different genesis block than in db throws' + 'creating blockchain with different genesis block than in db throws', ) } }) }) it('should correctly derive mainnet genesis block hash and stateRoot', async () => { - const common = new Common({ chain: Chain.Mainnet }) + const common = new Common({ chain: Mainnet }) const blockchain = await createBlockchain({ common }) const mainnetGenesisBlockHash = hexToBytes( - '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ) const mainnetGenesisStateRoot = hexToBytes( - '0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544' + '0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544', ) assert.deepEqual(blockchain.genesisBlock.hash(), mainnetGenesisBlockHash) assert.deepEqual(blockchain.genesisBlock.header.stateRoot, mainnetGenesisStateRoot) diff --git a/packages/blockchain/test/iterator.spec.ts b/packages/blockchain/test/iterator.spec.ts index 2e1690eed1..3edd1712ed 100644 --- a/packages/blockchain/test/iterator.spec.ts +++ b/packages/blockchain/test/iterator.spec.ts @@ -57,13 +57,13 @@ describe('blockchain test', () => { } }, undefined, - true + true, ) assert.equal(reorged, 1, 'should have reorged once') assert.equal( servedReorged, reorgedBlocks.length, - 'should have served all 21 reorged blocks with head resetting' + 'should have served all 21 reorged blocks with head resetting', ) assert.equal(iterated, 31, 'should have iterated 10 + 21 blocks in total') }) @@ -79,7 +79,7 @@ describe('blockchain test', () => { i++ } }, - 5 + 5, ) assert.equal(iterated, 5) assert.equal(i, 5) @@ -97,7 +97,7 @@ describe('blockchain test', () => { i++ } }, - 0 + 0, ) .catch(() => { assert.fail('Promise cannot throw when running 0 blocks') @@ -118,7 +118,7 @@ describe('blockchain test', () => { i++ } }, - -1 + -1, ) .catch(() => {}) // Note: if st.end() is not called (Promise did not throw), then this test fails, as it does not end. @@ -145,7 +145,7 @@ describe('blockchain test', () => { i++ } }, - 5 + 5, ) assert.equal(i, 1) @@ -186,7 +186,7 @@ describe('blockchain test', () => { assert.equal( bytesToHex((blockchain as any)._heads['head0']), '0xabcd', - 'should get state root heads' + 'should get state root heads', ) } else { assert.fail() diff --git a/packages/blockchain/test/pos.spec.ts b/packages/blockchain/test/pos.spec.ts deleted file mode 100644 index 4ad1c33d5c..0000000000 --- a/packages/blockchain/test/pos.spec.ts +++ /dev/null @@ -1,117 +0,0 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { bytesToHex } from '@ethereumjs/util' -import { assert, describe, it } from 'vitest' - -import { createBlockchain } from '../src/index.js' - -import * as testnet from './testdata/testnet.json' - -import type { Blockchain } from '../src/index.js' -import type { Block } from '@ethereumjs/block' - -const buildChain = async (blockchain: Blockchain, common: Common, height: number) => { - const blocks: Block[] = [] - const londonBlockNumber = Number(common.hardforkBlock('london')!) - const genesis = blockchain.genesisBlock - blocks.push(genesis) - for (let number = 1; number <= height; number++) { - let baseFeePerGas = BigInt(0) - if (number === londonBlockNumber) { - baseFeePerGas = BigInt(1000000000) - } else if (number > londonBlockNumber) { - baseFeePerGas = blocks[number - 1].header.calcNextBaseFee() - } - const block = createBlockFromBlockData( - { - header: { - number, - parentHash: blocks[number - 1].hash(), - timestamp: blocks[number - 1].header.timestamp + BigInt(1), - gasLimit: number >= londonBlockNumber ? BigInt(10000) : BigInt(5000), - baseFeePerGas: number >= londonBlockNumber ? baseFeePerGas : undefined, - }, - }, - { - calcDifficultyFromHeader: blocks[number - 1].header, - common, - setHardfork: await blockchain.getTotalDifficulty(blocks[number - 1].hash()), - } - ) - blocks.push(block) - await blockchain.putBlock(block) - } -} - -describe('Proof of Stake - inserting blocks into blockchain', () => { - const testnetOnlyTD = JSON.parse(JSON.stringify(testnet)) - testnetOnlyTD['hardforks'][11] = { - name: 'paris', - ttd: BigInt(1313600), - block: null, - } - const scenarios = [ - { - common: new Common({ chain: testnet, hardfork: Hardfork.Chainstart }), - }, - { - common: new Common({ chain: testnetOnlyTD, hardfork: Hardfork.Chainstart }), - }, - ] - - for (const s of scenarios) { - it('should pass', async () => { - const blockchain = await createBlockchain({ - validateBlocks: true, - validateConsensus: false, - common: s.common, - hardforkByHeadBlockNumber: true, - }) - const genesisHeader = await blockchain.getCanonicalHeadHeader() - assert.equal( - bytesToHex(genesisHeader.hash()), - '0x1119dc5ff680bf7b4c3d9cd41168334dee127d46b3626482076025cdd498ed0b', - 'genesis hash matches' - ) - await buildChain(blockchain, s.common, 15) - - const latestHeader = await blockchain.getCanonicalHeadHeader() - assert.equal(latestHeader.number, BigInt(15), 'blockchain is at correct height') - - assert.equal( - (blockchain as any).common.hardfork(), - 'paris', - 'HF should have been correctly updated' - ) - const td = await blockchain.getTotalDifficulty(latestHeader.hash()) - assert.equal( - td, - BigInt(1313601), - 'should have calculated the correct post-Merge total difficulty' - ) - - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - const powBlock = createBlockFromBlockData( - { - header: { - number: 16, - difficulty: BigInt(1), - parentHash: latestHeader.hash(), - timestamp: latestHeader.timestamp + BigInt(1), - gasLimit: BigInt(10000), - }, - }, - { common } - ) - try { - await blockchain.putBlock(powBlock) - assert.fail('should throw when inserting PoW block') - } catch (err: any) { - assert.ok( - err.message.includes('invalid difficulty'), - 'should throw with invalid difficulty message' - ) - } - }) - } -}) diff --git a/packages/blockchain/test/reorg.spec.ts b/packages/blockchain/test/reorg.spec.ts index 5873765d12..19382953b9 100644 --- a/packages/blockchain/test/reorg.spec.ts +++ b/packages/blockchain/test/reorg.spec.ts @@ -1,20 +1,20 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { cliqueSigner, createBlock } from '@ethereumjs/block' +import { Common, ConsensusAlgorithm, Goerli, Hardfork, Mainnet } from '@ethereumjs/common' import { Address, equalsBytes, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { CLIQUE_NONCE_AUTH } from '../src/consensus/clique.js' +import { CLIQUE_NONCE_AUTH, CliqueConsensus } from '../src/consensus/clique.js' import { createBlockchain } from '../src/index.js' import { generateConsecutiveBlock } from './util.js' -import type { CliqueConsensus } from '../src/consensus/clique.js' +import type { ConsensusDict } from '../src/index.js' import type { Block } from '@ethereumjs/block' describe('reorg tests', () => { it('should correctly reorg the chain if the total difficulty is higher on a lower block number than the current head block', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.MuirGlacier }) - const genesis = createBlockFromBlockData( + const common = new Common({ chain: Mainnet, hardfork: Hardfork.MuirGlacier }) + const genesis = createBlock( { header: { number: BigInt(0), @@ -22,7 +22,7 @@ describe('reorg tests', () => { gasLimit: BigInt(8000000), }, }, - { common } + { common }, ) const blocks_lowTD: Block[] = [] @@ -39,7 +39,7 @@ describe('reorg tests', () => { while (TD_High < TD_Low) { blocks_lowTD.push(generateConsecutiveBlock(blocks_lowTD[blocks_lowTD.length - 1], 0)) blocks_highTD.push( - generateConsecutiveBlock(blocks_highTD[blocks_highTD.length - 1] ?? genesis, 1) + generateConsecutiveBlock(blocks_highTD[blocks_highTD.length - 1] ?? genesis, 1), ) TD_Low += blocks_lowTD[blocks_lowTD.length - 1].header.difficulty @@ -56,30 +56,31 @@ describe('reorg tests', () => { // ensure that the block difficulty is higher on the highTD chain when compared to the low TD chain assert.ok( number_lowTD > number_highTD, - 'low TD should have a lower TD than the reported high TD' + 'low TD should have a lower TD than the reported high TD', ) assert.ok( blocks_lowTD[blocks_lowTD.length - 1].header.number > blocks_highTD[blocks_highTD.length - 1].header.number, - 'low TD block should have a higher number than high TD block' + 'low TD block should have a higher number than high TD block', ) }) it('should correctly reorg a poa chain and remove blocks from clique snapshots', async () => { - const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) - const genesisBlock = createBlockFromBlockData( - { header: { extraData: new Uint8Array(97) } }, - { common } - ) + const common = new Common({ chain: Goerli, hardfork: Hardfork.Chainstart }) + const genesisBlock = createBlock({ header: { extraData: new Uint8Array(97) } }, { common }) + + const consensusDict: ConsensusDict = {} + consensusDict[ConsensusAlgorithm.Clique] = new CliqueConsensus() const blockchain = await createBlockchain({ validateBlocks: false, validateConsensus: false, + consensusDict, common, genesisBlock, }) const extraData = hexToBytes( - '0x506172697479205465636820417574686f7269747900000000000000000000002bbf886181970654ed46e3fae0ded41ee53fec702c47431988a7ae80e6576f3552684f069af80ba11d36327aaf846d470526e4a1c461601b2fd4ebdcdc2b734a01' + '0x506172697479205465636820417574686f7269747900000000000000000000002bbf886181970654ed46e3fae0ded41ee53fec702c47431988a7ae80e6576f3552684f069af80ba11d36327aaf846d470526e4a1c461601b2fd4ebdcdc2b734a01', ) // from goerli block 1 const { gasLimit } = genesisBlock.header const base = { extraData, gasLimit, difficulty: 1 } @@ -88,7 +89,7 @@ describe('reorg tests', () => { const beneficiary1 = new Address(new Uint8Array(20).fill(1)) const beneficiary2 = new Address(new Uint8Array(20).fill(2)) - const block1_low = createBlockFromBlockData( + const block1_low = createBlock( { header: { ...base, @@ -97,9 +98,9 @@ describe('reorg tests', () => { timestamp: genesisBlock.header.timestamp + BigInt(30), }, }, - { common } + { common }, ) - const block2_low = createBlockFromBlockData( + const block2_low = createBlock( { header: { ...base, @@ -110,10 +111,10 @@ describe('reorg tests', () => { coinbase: beneficiary1, }, }, - { common } + { common }, ) - const block1_high = createBlockFromBlockData( + const block1_high = createBlock( { header: { ...base, @@ -122,9 +123,9 @@ describe('reorg tests', () => { timestamp: genesisBlock.header.timestamp + BigInt(15), }, }, - { common } + { common }, ) - const block2_high = createBlockFromBlockData( + const block2_high = createBlock( { header: { ...base, @@ -133,9 +134,9 @@ describe('reorg tests', () => { timestamp: block1_high.header.timestamp + BigInt(15), }, }, - { common } + { common }, ) - const block3_high = createBlockFromBlockData( + const block3_high = createBlock( { header: { ...base, @@ -146,7 +147,7 @@ describe('reorg tests', () => { coinbase: beneficiary2, }, }, - { common } + { common }, ) await blockchain.putBlocks([block1_low, block2_low]) @@ -157,9 +158,9 @@ describe('reorg tests', () => { assert.ok( !signerStates.find( - (s: any) => s[0] === BigInt(2) && s[1].find((a: Address) => a.equals(beneficiary1)) + (s: any) => s[0] === BigInt(2) && s[1].find((a: Address) => a.equals(beneficiary1)), ), - 'should not find reorged signer state' + 'should not find reorged signer state', ) let signerVotes = (blockchain.consensus as CliqueConsensus)._cliqueLatestVotes @@ -167,27 +168,27 @@ describe('reorg tests', () => { !signerVotes.find( (v: any) => v[0] === BigInt(2) && - v[1][0].equal(block1_low.header.cliqueSigner()) && + v[1][0].equal(cliqueSigner(block1_low.header)) && v[1][1].equal(beneficiary1) && - equalsBytes(v[1][2], CLIQUE_NONCE_AUTH) + equalsBytes(v[1][2], CLIQUE_NONCE_AUTH), ), - 'should not find reorged clique vote' + 'should not find reorged clique vote', ) let blockSigners = (blockchain.consensus as CliqueConsensus)._cliqueLatestBlockSigners assert.ok( !blockSigners.find( - (s: any) => s[0] === BigInt(1) && s[1].equal(block1_low.header.cliqueSigner()) + (s: any) => s[0] === BigInt(1) && s[1].equal(cliqueSigner(block1_low.header)), ), - 'should not find reorged block signer' + 'should not find reorged block signer', ) signerStates = (blockchain.consensus as CliqueConsensus)._cliqueLatestSignerStates assert.ok( !!signerStates.find( - (s: any) => s[0] === BigInt(3) && s[1].find((a: Address) => a.equals(beneficiary2)) + (s: any) => s[0] === BigInt(3) && s[1].find((a: Address) => a.equals(beneficiary2)), ), - 'should find reorged signer state' + 'should find reorged signer state', ) signerVotes = (blockchain.consensus as CliqueConsensus)._cliqueLatestVotes @@ -196,9 +197,9 @@ describe('reorg tests', () => { blockSigners = (blockchain.consensus as CliqueConsensus)._cliqueLatestBlockSigners assert.ok( !!blockSigners.find( - (s: any) => s[0] === BigInt(3) && s[1].equals(block3_high.header.cliqueSigner()) + (s: any) => s[0] === BigInt(3) && s[1].equals(cliqueSigner(block3_high.header)), ), - 'should find reorged block signer' + 'should find reorged block signer', ) }) }) diff --git a/packages/blockchain/test/testdata/geth-genesis-kiln.json b/packages/blockchain/test/testdata/geth-genesis-kiln.json deleted file mode 100644 index 6d99cf2355..0000000000 --- a/packages/blockchain/test/testdata/geth-genesis-kiln.json +++ /dev/null @@ -1,865 +0,0 @@ -{ - "config": { - "chainId": 1337802, - "homesteadBlock": 0, - "eip150Block": 0, - "eip155Block": 0, - "eip158Block": 0, - "byzantiumBlock": 0, - "constantinopleBlock": 0, - "petersburgBlock": 0, - "istanbulBlock": 0, - "berlinBlock": 0, - "londonBlock": 0, - "mergeForkBlock": 1000, - "terminalTotalDifficulty": 20000000000000 - }, - "alloc": { - "0x0000000000000000000000000000000000000000": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000001": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000002": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000003": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000004": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000005": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000006": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000007": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000008": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000009": { - "balance": "1" - }, - "0x000000000000000000000000000000000000000a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000000b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000000c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000000d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000000e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000000f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000010": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000011": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000012": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000013": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000014": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000015": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000016": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000017": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000018": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000019": { - "balance": "1" - }, - "0x000000000000000000000000000000000000001a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000001b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000001c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000001d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000001e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000001f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000020": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000021": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000022": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000023": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000024": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000025": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000026": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000027": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000028": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000029": { - "balance": "1" - }, - "0x000000000000000000000000000000000000002a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000002b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000002c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000002d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000002e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000002f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000030": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000031": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000032": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000033": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000034": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000035": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000036": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000037": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000038": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000039": { - "balance": "1" - }, - "0x000000000000000000000000000000000000003a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000003b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000003c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000003d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000003e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000003f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000040": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000041": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000042": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000043": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000044": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000045": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000046": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000047": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000048": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000049": { - "balance": "1" - }, - "0x000000000000000000000000000000000000004a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000004b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000004c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000004d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000004e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000004f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000050": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000051": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000052": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000053": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000054": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000055": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000056": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000057": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000058": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000059": { - "balance": "1" - }, - "0x000000000000000000000000000000000000005a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000005b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000005c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000005d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000005e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000005f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000060": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000061": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000062": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000063": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000064": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000065": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000066": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000067": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000068": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000069": { - "balance": "1" - }, - "0x000000000000000000000000000000000000006a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000006b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000006c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000006d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000006e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000006f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000070": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000071": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000072": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000073": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000074": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000075": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000076": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000077": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000078": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000079": { - "balance": "1" - }, - "0x000000000000000000000000000000000000007a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000007b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000007c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000007d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000007e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000007f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000080": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000081": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000082": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000083": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000084": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000085": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000086": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000087": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000088": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000089": { - "balance": "1" - }, - "0x000000000000000000000000000000000000008a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000008b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000008c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000008d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000008e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000008f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000090": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000091": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000092": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000093": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000094": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000095": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000096": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000097": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000098": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000099": { - "balance": "1" - }, - "0x000000000000000000000000000000000000009a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000009b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000009c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000009d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000009e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000009f": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a0": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a1": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a2": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a3": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a4": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a5": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a6": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a7": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a8": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a9": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000aa": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ab": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ac": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ad": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ae": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000af": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b0": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b1": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b2": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b3": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b4": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b5": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b6": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b7": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b8": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b9": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ba": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000bb": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000bc": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000bd": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000be": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000bf": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c0": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c1": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c2": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c3": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c4": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c5": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c6": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c7": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c8": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c9": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ca": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000cb": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000cc": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000cd": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ce": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000cf": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d0": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d1": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d2": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d3": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d4": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d5": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d6": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d7": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d8": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d9": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000da": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000db": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000dc": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000dd": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000de": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000df": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e0": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e1": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e2": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e3": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e4": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e5": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e6": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e7": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e8": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e9": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ea": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000eb": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ec": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ed": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ee": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ef": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f0": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f1": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f2": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f3": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f4": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f5": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f6": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f7": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f8": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f9": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000fa": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000fb": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000fc": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000fd": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000fe": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ff": { - "balance": "1" - }, - "0x4242424242424242424242424242424242424242": { - "balance": "0", - "code": "0x60806040526004361061003f5760003560e01c806301ffc9a71461004457806322895118146100a4578063621fd130146101ba578063c5f2892f14610244575b600080fd5b34801561005057600080fd5b506100906004803603602081101561006757600080fd5b50357fffffffff000000000000000000000000000000000000000000000000000000001661026b565b604080519115158252519081900360200190f35b6101b8600480360360808110156100ba57600080fd5b8101906020810181356401000000008111156100d557600080fd5b8201836020820111156100e757600080fd5b8035906020019184600183028401116401000000008311171561010957600080fd5b91939092909160208101903564010000000081111561012757600080fd5b82018360208201111561013957600080fd5b8035906020019184600183028401116401000000008311171561015b57600080fd5b91939092909160208101903564010000000081111561017957600080fd5b82018360208201111561018b57600080fd5b803590602001918460018302840111640100000000831117156101ad57600080fd5b919350915035610304565b005b3480156101c657600080fd5b506101cf6110b5565b6040805160208082528351818301528351919283929083019185019080838360005b838110156102095781810151838201526020016101f1565b50505050905090810190601f1680156102365780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561025057600080fd5b506102596110c7565b60408051918252519081900360200190f35b60007fffffffff0000000000000000000000000000000000000000000000000000000082167f01ffc9a70000000000000000000000000000000000000000000000000000000014806102fe57507fffffffff0000000000000000000000000000000000000000000000000000000082167f8564090700000000000000000000000000000000000000000000000000000000145b92915050565b6030861461035d576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118056026913960400191505060405180910390fd5b602084146103b6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252603681526020018061179c6036913960400191505060405180910390fd5b6060821461040f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260298152602001806118786029913960400191505060405180910390fd5b670de0b6b3a7640000341015610470576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118526026913960400191505060405180910390fd5b633b9aca003406156104cd576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260338152602001806117d26033913960400191505060405180910390fd5b633b9aca00340467ffffffffffffffff811115610535576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252602781526020018061182b6027913960400191505060405180910390fd5b6060610540826114ba565b90507f649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c589898989858a8a6105756020546114ba565b6040805160a0808252810189905290819060208201908201606083016080840160c085018e8e80828437600083820152601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690910187810386528c815260200190508c8c808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690920188810386528c5181528c51602091820193918e019250908190849084905b83811015610648578181015183820152602001610630565b50505050905090810190601f1680156106755780820380516001836020036101000a031916815260200191505b5086810383528881526020018989808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169092018881038452895181528951602091820193918b019250908190849084905b838110156106ef5781810151838201526020016106d7565b50505050905090810190601f16801561071c5780820380516001836020036101000a031916815260200191505b509d505050505050505050505050505060405180910390a1600060028a8a600060801b604051602001808484808284377fffffffffffffffffffffffffffffffff0000000000000000000000000000000090941691909301908152604080517ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0818403018152601090920190819052815191955093508392506020850191508083835b602083106107fc57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016107bf565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610859573d6000803e3d6000fd5b5050506040513d602081101561086e57600080fd5b5051905060006002806108846040848a8c6116fe565b6040516020018083838082843780830192505050925050506040516020818303038152906040526040518082805190602001908083835b602083106108f857805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016108bb565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610955573d6000803e3d6000fd5b5050506040513d602081101561096a57600080fd5b5051600261097b896040818d6116fe565b60405160009060200180848480828437919091019283525050604080518083038152602092830191829052805190945090925082918401908083835b602083106109f457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016109b7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610a51573d6000803e3d6000fd5b5050506040513d6020811015610a6657600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610ada57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610a9d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610b37573d6000803e3d6000fd5b5050506040513d6020811015610b4c57600080fd5b50516040805160208101858152929350600092600292839287928f928f92018383808284378083019250505093505050506040516020818303038152906040526040518082805190602001908083835b60208310610bd957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610b9c565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610c36573d6000803e3d6000fd5b5050506040513d6020811015610c4b57600080fd5b50516040518651600291889160009188916020918201918291908601908083835b60208310610ca957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610c6c565b6001836020036101000a0380198251168184511680821785525050505050509050018367ffffffffffffffff191667ffffffffffffffff1916815260180182815260200193505050506040516020818303038152906040526040518082805190602001908083835b60208310610d4e57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610d11565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610dab573d6000803e3d6000fd5b5050506040513d6020811015610dc057600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610e3457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610df7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610e91573d6000803e3d6000fd5b5050506040513d6020811015610ea657600080fd5b50519050858114610f02576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260548152602001806117486054913960600191505060405180910390fd5b60205463ffffffff11610f60576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260218152602001806117276021913960400191505060405180910390fd5b602080546001019081905560005b60208110156110a9578160011660011415610fa0578260008260208110610f9157fe5b0155506110ac95505050505050565b600260008260208110610faf57fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061102557805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610fe8565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015611082573d6000803e3d6000fd5b5050506040513d602081101561109757600080fd5b50519250600282049150600101610f6e565b50fe5b50505050505050565b60606110c26020546114ba565b905090565b6020546000908190815b60208110156112f05781600116600114156111e6576002600082602081106110f557fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061116b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161112e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156111c8573d6000803e3d6000fd5b5050506040513d60208110156111dd57600080fd5b505192506112e2565b600283602183602081106111f657fe5b015460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061126b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161122e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156112c8573d6000803e3d6000fd5b5050506040513d60208110156112dd57600080fd5b505192505b6002820491506001016110d1565b506002826112ff6020546114ba565b600060401b6040516020018084815260200183805190602001908083835b6020831061135a57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161131d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790527fffffffffffffffffffffffffffffffffffffffffffffffff000000000000000095909516920191825250604080518083037ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8018152601890920190819052815191955093508392850191508083835b6020831061143f57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101611402565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa15801561149c573d6000803e3d6000fd5b5050506040513d60208110156114b157600080fd5b50519250505090565b60408051600880825281830190925260609160208201818036833701905050905060c082901b8060071a60f81b826000815181106114f457fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060061a60f81b8260018151811061153757fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060051a60f81b8260028151811061157a57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060041a60f81b826003815181106115bd57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060031a60f81b8260048151811061160057fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060021a60f81b8260058151811061164357fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060011a60f81b8260068151811061168657fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060001a60f81b826007815181106116c957fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a90535050919050565b6000808585111561170d578182fd5b83861115611719578182fd5b505082019391909203915056fe4465706f736974436f6e74726163743a206d65726b6c6520747265652066756c6c4465706f736974436f6e74726163743a207265636f6e7374727563746564204465706f7369744461746120646f6573206e6f74206d6174636820737570706c696564206465706f7369745f646174615f726f6f744465706f736974436f6e74726163743a20696e76616c6964207769746864726177616c5f63726564656e7469616c73206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c7565206e6f74206d756c7469706c65206f6620677765694465706f736974436f6e74726163743a20696e76616c6964207075626b6579206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f20686967684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f206c6f774465706f736974436f6e74726163743a20696e76616c6964207369676e6174757265206c656e677468a26469706673582212201dd26f37a621703009abf16e77e69c93dc50c79db7f6cc37543e3e0e3decdc9764736f6c634300060b0033", - "storage": { - "0x0000000000000000000000000000000000000000000000000000000000000022": "0xf5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b", - "0x0000000000000000000000000000000000000000000000000000000000000023": "0xdb56114e00fdd4c1f85c892bf35ac9a89289aaecb1ebd0a96cde606a748b5d71", - "0x0000000000000000000000000000000000000000000000000000000000000024": "0xc78009fdf07fc56a11f122370658a353aaa542ed63e44c4bc15ff4cd105ab33c", - "0x0000000000000000000000000000000000000000000000000000000000000025": "0x536d98837f2dd165a55d5eeae91485954472d56f246df256bf3cae19352a123c", - "0x0000000000000000000000000000000000000000000000000000000000000026": "0x9efde052aa15429fae05bad4d0b1d7c64da64d03d7a1854a588c2cb8430c0d30", - "0x0000000000000000000000000000000000000000000000000000000000000027": "0xd88ddfeed400a8755596b21942c1497e114c302e6118290f91e6772976041fa1", - "0x0000000000000000000000000000000000000000000000000000000000000028": "0x87eb0ddba57e35f6d286673802a4af5975e22506c7cf4c64bb6be5ee11527f2c", - "0x0000000000000000000000000000000000000000000000000000000000000029": "0x26846476fd5fc54a5d43385167c95144f2643f533cc85bb9d16b782f8d7db193", - "0x000000000000000000000000000000000000000000000000000000000000002a": "0x506d86582d252405b840018792cad2bf1259f1ef5aa5f887e13cb2f0094f51e1", - "0x000000000000000000000000000000000000000000000000000000000000002b": "0xffff0ad7e659772f9534c195c815efc4014ef1e1daed4404c06385d11192e92b", - "0x000000000000000000000000000000000000000000000000000000000000002c": "0x6cf04127db05441cd833107a52be852868890e4317e6a02ab47683aa75964220", - "0x000000000000000000000000000000000000000000000000000000000000002d": "0xb7d05f875f140027ef5118a2247bbb84ce8f2f0f1123623085daf7960c329f5f", - "0x000000000000000000000000000000000000000000000000000000000000002e": "0xdf6af5f5bbdb6be9ef8aa618e4bf8073960867171e29676f8b284dea6a08a85e", - "0x000000000000000000000000000000000000000000000000000000000000002f": "0xb58d900f5e182e3c50ef74969ea16c7726c549757cc23523c369587da7293784", - "0x0000000000000000000000000000000000000000000000000000000000000030": "0xd49a7502ffcfb0340b1d7885688500ca308161a7f96b62df9d083b71fcc8f2bb", - "0x0000000000000000000000000000000000000000000000000000000000000031": "0x8fe6b1689256c0d385f42f5bbe2027a22c1996e110ba97c171d3e5948de92beb", - "0x0000000000000000000000000000000000000000000000000000000000000032": "0x8d0d63c39ebade8509e0ae3c9c3876fb5fa112be18f905ecacfecb92057603ab", - "0x0000000000000000000000000000000000000000000000000000000000000033": "0x95eec8b2e541cad4e91de38385f2e046619f54496c2382cb6cacd5b98c26f5a4", - "0x0000000000000000000000000000000000000000000000000000000000000034": "0xf893e908917775b62bff23294dbbe3a1cd8e6cc1c35b4801887b646a6f81f17f", - "0x0000000000000000000000000000000000000000000000000000000000000035": "0xcddba7b592e3133393c16194fac7431abf2f5485ed711db282183c819e08ebaa", - "0x0000000000000000000000000000000000000000000000000000000000000036": "0x8a8d7fe3af8caa085a7639a832001457dfb9128a8061142ad0335629ff23ff9c", - "0x0000000000000000000000000000000000000000000000000000000000000037": "0xfeb3c337d7a51a6fbf00b9e34c52e1c9195c969bd4e7a0bfd51d5c5bed9c1167", - "0x0000000000000000000000000000000000000000000000000000000000000038": "0xe71f0aa83cc32edfbefa9f4d3e0174ca85182eec9f3a09f6a6c0df6377a510d7", - "0x0000000000000000000000000000000000000000000000000000000000000039": "0x31206fa80a50bb6abe29085058f16212212a60eec8f049fecb92d8c8e0a84bc0", - "0x000000000000000000000000000000000000000000000000000000000000003a": "0x21352bfecbeddde993839f614c3dac0a3ee37543f9b412b16199dc158e23b544", - "0x000000000000000000000000000000000000000000000000000000000000003b": "0x619e312724bb6d7c3153ed9de791d764a366b389af13c58bf8a8d90481a46765", - "0x000000000000000000000000000000000000000000000000000000000000003c": "0x7cdd2986268250628d0c10e385c58c6191e6fbe05191bcc04f133f2cea72c1c4", - "0x000000000000000000000000000000000000000000000000000000000000003d": "0x848930bd7ba8cac54661072113fb278869e07bb8587f91392933374d017bcbe1", - "0x000000000000000000000000000000000000000000000000000000000000003e": "0x8869ff2c22b28cc10510d9853292803328be4fb0e80495e8bb8d271f5b889636", - "0x000000000000000000000000000000000000000000000000000000000000003f": "0xb5fe28e79f1b850f8658246ce9b6a1e7b49fc06db7143e8fe0b4f2b0c5523a5c", - "0x0000000000000000000000000000000000000000000000000000000000000040": "0x985e929f70af28d0bdd1a90a808f977f597c7c778c489e98d3bd8910d31ac0f7" - } - }, - "0xf97e180c050e5Ab072211Ad2C213Eb5AEE4DF134": { - "balance": "10000000000000000000000000" - }, - "0x2cA5F489CC1Fd1CEC24747B64E8dE0F4A6A850E1": { - "balance": "10000000000000000000000000" - }, - "0x7203bd333a874D9d329050ecE393820fCD501eaA": { - "balance": "10000000000000000000000000" - }, - "0xA51918aA40D78Ff8be939bf0E8404252875c6aDF": { - "balance": "10000000000000000000000000" - }, - "0xAA81078e6b2121dd7A846690DFdD6b10d7658d8B": { - "balance": "10000000000000000000000000" - }, - "0xFA2d31D8f21c1D1633E9BEB641dF77D21D63ccDd": { - "balance": "10000000000000000000000000" - }, - "0xf751C9c6d60614226fE57D2cAD6e10C856a2ddA3": { - "balance": "10000000000000000000000000" - }, - "0x9cD16887f6A808AEaa65D3c840f059EeA4ca1319": { - "balance": "10000000000000000000000000" - }, - "0x2E07043584F11BFF0AC39c927665DF6c6ebaffFB": { - "balance": "10000000000000000000000000" - }, - "0x60e771E5eCA8E26690920de669520Da210D64A9B": { - "balance": "10000000000000000000000000" - }, - "0xFC4db92C2Cf77CE02fBfd7Da0346d2CbFA66aD59": { - "balance": "10000000000000000000000000" - } - }, - "coinbase": "0x0000000000000000000000000000000000000000", - "difficulty": "0x01", - "extraData": "", - "gasLimit": "0x400000", - "nonce": "0x1234", - "mixhash": "0x0000000000000000000000000000000000000000000000000000000000000000", - "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", - "timestamp": "0" -} diff --git a/packages/blockchain/test/testdata/post-merge.json b/packages/blockchain/test/testdata/post-merge.json new file mode 100644 index 0000000000..32f5f093d3 --- /dev/null +++ b/packages/blockchain/test/testdata/post-merge.json @@ -0,0 +1,36 @@ +{ + "config": { + "chainId": 1, + "homesteadBlock": 0, + "eip150Block": 0, + "eip155Block": 0, + "eip158Block": 0, + "byzantiumBlock": 0, + "constantinopleBlock": 0, + "petersburgBlock": 0, + "istanbulBlock": 0, + "muirGlacierBlock": 0, + "berlinBlock": 0, + "londonBlock": 0, + "clique": { + "period": 5, + "epoch": 30000 + }, + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true + }, + "nonce": "0x42", + "timestamp": "0x0", + "extraData": "0x0000000000000000000000000000000000000000000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "gasLimit": "0x1C9C380", + "difficulty": "0x400000000", + "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "coinbase": "0x0000000000000000000000000000000000000000", + "alloc": { + "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": { "balance": "0x6d6172697573766477000000" } + }, + "number": "0x0", + "gasUsed": "0x0", + "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "baseFeePerGas": "0x7" +} diff --git a/packages/blockchain/test/testdata/testnet.json b/packages/blockchain/test/testdata/testnet.json index 177e9b5baa..3bf8733743 100644 --- a/packages/blockchain/test/testdata/testnet.json +++ b/packages/blockchain/test/testdata/testnet.json @@ -1,7 +1,6 @@ { "name": "mainnet", "chainId": 1, - "networkId": 1, "defaultHardfork": "london", "consensus": { "type": "pow", diff --git a/packages/blockchain/test/util.ts b/packages/blockchain/test/util.ts index 2d519f85dd..abb88488c5 100644 --- a/packages/blockchain/test/util.ts +++ b/packages/blockchain/test/util.ts @@ -1,5 +1,5 @@ -import { Block, BlockHeader, createBlockFromBlockData } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Block, createBlock, createBlockHeader } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' import { MapDB, @@ -13,17 +13,18 @@ import { keccak256 } from 'ethereum-cryptography/keccak.js' import { createBlockchain } from '../src/index.js' +import type { BlockHeader } from '@ethereumjs/block' import type { DB } from '@ethereumjs/util' export const generateBlocks = (numberOfBlocks: number, existingBlocks?: Block[]): Block[] => { const blocks = existingBlocks ? existingBlocks : [] const gasLimit = 8000000 - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) const opts = { common } if (blocks.length === 0) { - const genesis = createBlockFromBlockData({ header: { gasLimit } }, opts) + const genesis = createBlock({ header: { gasLimit } }, opts) blocks.push(genesis) } @@ -37,7 +38,7 @@ export const generateBlocks = (numberOfBlocks: number, existingBlocks?: Block[]) timestamp: lastBlock.header.timestamp + BigInt(1), }, } - const block = createBlockFromBlockData(blockData, { + const block = createBlock(blockData, { common, calcDifficultyFromHeader: lastBlock.header, }) @@ -53,7 +54,6 @@ export const generateBlockchain = async (numberOfBlocks: number, genesis?: Block const blockchain = await createBlockchain({ validateBlocks: true, - validateConsensus: false, genesisBlock: genesis ?? blocks[0], }) try { @@ -77,20 +77,20 @@ export const generateBlockchain = async (numberOfBlocks: number, genesis?: Block export const generateConsecutiveBlock = ( parentBlock: Block, difficultyChangeFactor: number, - gasLimit: bigint = BigInt(8000000) + gasLimit: bigint = BigInt(8000000), ): Block => { if (difficultyChangeFactor > 1) { difficultyChangeFactor = 1 } - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.MuirGlacier }) - const tmpHeader = BlockHeader.fromHeaderData( + const common = new Common({ chain: Mainnet, hardfork: Hardfork.MuirGlacier }) + const tmpHeader = createBlockHeader( { number: parentBlock.header.number + BigInt(1), timestamp: parentBlock.header.timestamp + BigInt(10 + -difficultyChangeFactor * 9), }, - { common } + { common }, ) - const header = BlockHeader.fromHeaderData( + const header = createBlockHeader( { number: parentBlock.header.number + BigInt(1), parentHash: parentBlock.hash(), @@ -101,7 +101,7 @@ export const generateConsecutiveBlock = ( { common, calcDifficultyFromHeader: parentBlock.header, - } + }, ) const block = new Block(header, undefined, undefined, undefined, { common }, undefined) @@ -123,8 +123,8 @@ export const isConsecutive = (blocks: Block[]) => { export const createTestDB = async (): Promise< [DB, Block] > => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const genesis = createBlockFromBlockData({ header: { number: 0 } }, { common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const genesis = createBlock({ header: { number: 0 } }, { common }) const db = new MapDB() await db.batch([ @@ -151,21 +151,21 @@ export const createTestDB = async (): Promise< { type: 'put', key: hexToBytes( - '0x680000000000000000d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0x680000000000000000d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ), value: genesis.header.serialize(), }, { type: 'put', key: hexToBytes( - '0x680000000000000000d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa374' + '0x680000000000000000d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa374', ), value: RLP.encode(toBytes(17179869184)), }, { type: 'put', key: hexToBytes( - '0x620000000000000000d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0x620000000000000000d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ), value: RLP.encode(genesis.raw().slice(1)), }, @@ -184,14 +184,14 @@ export const createTestDB = async (): Promise< * @param extraData - Extra data graffiti in order to create equal blocks (like block number) but with different hashes * @param uncles - Optional, an array of uncle headers. Automatically calculates the uncleHash. */ -function createBlock( +function generateBlock( parentBlock: Block, extraData: string, uncles?: BlockHeader[], - common?: Common + common?: Common, ): Block { uncles = uncles ?? [] - common = common ?? new Common({ chain: Chain.Mainnet }) + common = common ?? new Common({ chain: Mainnet }) if (extraData.length > 32) { throw new Error('extra data graffiti must be 32 bytes or less') @@ -208,7 +208,7 @@ function createBlock( ? parentBlock.header.calcNextBaseFee() : undefined - return createBlockFromBlockData( + return createBlock( { header: { number, @@ -224,8 +224,8 @@ function createBlock( { common, calcDifficultyFromHeader: parentBlock.header, - } + }, ) } -export { createBlock } +export { generateBlock } diff --git a/packages/blockchain/test/utils.spec.ts b/packages/blockchain/test/utils.spec.ts index 6900c1fb86..846a5bcab2 100644 --- a/packages/blockchain/test/utils.spec.ts +++ b/packages/blockchain/test/utils.spec.ts @@ -3,10 +3,9 @@ import { genesisStateRoot } from '@ethereumjs/trie' import { bytesToHex, parseGethGenesisState } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -// kiln genesis with deposit contract storage set import { createBlockchain } from '../src/index.js' -import gethGenesisKilnJSON from './testdata/geth-genesis-kiln.json' +import gethGenesisJSON from './testdata/post-merge.json' import type { Blockchain } from '../src/blockchain.js' @@ -22,23 +21,23 @@ async function getBlockchain(gethGenesis: any): Promise { describe('[Utils/Parse]', () => { it('should properly parse genesis state from gethGenesis', async () => { - const genesisState = parseGethGenesisState(gethGenesisKilnJSON) + const genesisState = parseGethGenesisState(gethGenesisJSON) const stateRoot = await genesisStateRoot(genesisState) assert.equal( bytesToHex(stateRoot), - '0x52e628c7f35996ba5a0402d02b34535993c89ff7fc4c430b2763ada8554bee62', - 'kiln stateRoot matches' + '0xca3149fa9e37db08d1cd49c9061db1002ef1cd58db2210f2115c8c989b2bdf45', + 'stateRoot matches', ) }) it('should initialize blockchain from gethGenesis', async () => { - const blockchain = await getBlockchain(gethGenesisKilnJSON) + const blockchain = await getBlockchain(gethGenesisJSON) const genesisHash = blockchain.genesisBlock.hash() assert.equal( bytesToHex(genesisHash), - '0x51c7fe41be669f69c45c33a56982cbde405313342d9e2b00d7c91a7b284dd4f8', - 'kiln genesis hash matches' + '0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4a', + 'genesis hash matches', ) }) }) diff --git a/packages/blockchain/tsconfig.lint.json b/packages/blockchain/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/blockchain/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/client/.eslintrc.cjs b/packages/client/.eslintrc.cjs index 974e754e63..43d2c22619 100644 --- a/packages/client/.eslintrc.cjs +++ b/packages/client/.eslintrc.cjs @@ -1,14 +1,11 @@ module.exports = { extends: '../../config/eslint.cjs', - rules: { - 'import/extensions': 'off', - }, parserOptions: { - project: ['./tsconfig.json', './tsconfig.browser.json', './tsconfig.eslint.json'], + project: ['./tsconfig.lint.json'], }, overrides: [ { - files: ['bin/**.ts', 'test/sim/**.ts'], + files: ['bin/**.ts', 'test/sim/**.ts', 'examples/**/*.ts'], rules: { 'no-console': 'off', }, diff --git a/packages/client/CHANGELOG.md b/packages/client/CHANGELOG.md index 4af9204427..4aeffdba21 100644 --- a/packages/client/CHANGELOG.md +++ b/packages/client/CHANGELOG.md @@ -6,7 +6,50 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 0.10.1 - 2024-03-05 +## 0.10.2 - 2024-08-15 + +This release comes with some RPC improvements as well as various updates to catch up for testnets preparing for the Prague hardfork as well as the Verkle tree integration. Note that for running/participating in the latest Prague and Verkle testnets it is still needed to join with a build from `master` since testnets are evolving so quickly that it is not practical to catch up with official client releases! + +### Verkle Updates + +- Fixes for Kaustinen4 support, PR [#3269](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3269) +- Kaustinen5 related fixes, PR [#3343](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3343) +- CLI option `--ignoreStatelessInvalidExecs` for Verkle debugging, PR [#3269](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3269) +- Kaustinen6 adjustments, `verkle-cryptography-wasm` migration, PRs [#3355](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3355) and [#3356](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3356) +- Update `kzg-wasm` to `0.4.0`, PR [#3358](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3358) +- Shift Verkle to `osaka` hardfork, PR [#3371](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3371) +- Simplify `--ignoreStatelessInvalidExecs` to just a boolean flag, PR [#3395](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3395) +- Add verkle execution support to `executeBlocks()`, PR [#3406](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3406) +- Verkle decoupling in underlying libraries, PR [#3462](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3462) + +### Other Features + +- Integrates support for [EIP-7702](https://eips.ethereum.org/EIPS/eip-7702) EOA code transactions (outdated) (see tx library for full documentation), see PR [#3470](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3470) +- New `--startExecutionFrom` and `--startExecution` CLI options, PR [#3269](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3269) +- Add `eth_blobBaseFee` RPC endpoint, PR [#3436](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3436) +- Add support for `pending` in `eth_getTransactionCount` RPC method, PR [#3415](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3415) +- Add support for multiple sources of rlp blocks when loading with `--loadBlocksFromRlp`, PR [#3442](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3442) +- Basic Prometheus metrics support (not many metrics yet), PR [#3287](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3287) + +### Other Changes + +- ESM-only client build, PRs [#3359](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3359) and [#3414](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3414) +- Add execution api v4 handling to engine, PR [#3399](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3399) +- New mechanism to keep latest block from peers updated, PR [#3354](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3354) +- Better `--execution` flag guard, PR [#3363](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3363) +- Stricter prefixed hex typing, PR [#3348](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3348) +- Update `multiaddress` dependency, PR [#3384](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3384) +- Internalize `QHeap` dependency, PR [#3451](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3451) +- Internalize `jwt-simple` dependency, PR [#3458](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3458) + +### Bugfixes + +- Fixes for the `eth_estimateGas` RPC endpoint, PR [#3416](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3416) +- Fixes tx status in `eth_getTransactionReceipt` RPC method, PR [#3435](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3435) +- Fixes the "block to payload" serialization for `getPayloadV4`, PR [#3409](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3409) +- Fix the `getPayloadV4` with a deposit tx and expected deposit requests, PR [#3410](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3410) + +## 0.10.1 - 2024-03-18 This is mainly a maintenance release coming with a few internal changes and minor bug fixes, single user-focused addition is the support for the `eth_feeHistory` RPC call. @@ -44,7 +87,7 @@ Note that while HF timestamp switches for all testnets are included, a mainnet H ### WASM Crypto Support -With this release the client uses WASM by default for all crypto related operations like hashing or signature verification, see PR [#3192](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3192). As a WASM crypto library [@polkadot/wasm-crypto](https://github.com/polkadot-js/wasm/tree/master/packages/wasm-crypto) is being used and WASM comes into play in the EVM for hashing opcodes and precompiles, block and tx hashing and ECDSA signature verfication down to trie key hashing and all hashing and signature functionality in the devp2p layer. +With this release the client uses WASM by default for all crypto related operations like hashing or signature verification, see PR [#3192](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3192). As a WASM crypto library [@polkadot/wasm-crypto](https://github.com/polkadot-js/wasm/tree/master/packages/wasm-crypto) is being used and WASM comes into play in the EVM for hashing opcodes and precompiles, block and tx hashing and ECDSA signature verification down to trie key hashing and all hashing and signature functionality in the devp2p layer. This makes up for a significantly lighter and sped-up client experience regarding both block execution and sync times. @@ -91,7 +134,7 @@ Following spec updates included: - Additional `EIP-4788` updates (address + modulus), PR [#3068](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3068) - Update the beacon block root contract address, PR [#3003](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3003) - Fix `newPayloadV2` having `PayloadV3` params, PR [#2954](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2954) -- Include parent beacon block root for proposal payload uniquness, PR [#2967](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2967) +- Include parent beacon block root for proposal payload uniqueness, PR [#2967](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2967) - Fixes for new engine api method validations for hive pr-834, PR [#2973](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2973) - Track and respond to invalid blocks in engine api and other hive engine-cancun fixes, PR [#3077](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3077) - Make the newpayload execution of big blocks non blocking, PR [#3076](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3076) @@ -111,7 +154,7 @@ Both changes together should increase client sync performance by 30% or more. ### New Post-Merge UX Experience -We have gone through the complete post-Merge beacon sync process and reworked the client output along, see PR [#3085](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3085) and some follow-up PRs. It is now substantially easier to follow the different stages of the sync process (backfilling, forwardfilling & execution, following the chain) and track the overall sync progress. Holesky with its newly initialized state is a great testbed to see the new client capabilities in practice! 🤩 +We have gone through the complete post-Merge beacon sync process and reworked the client output along, see PR [#3085](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3085) and some follow-up PRs. It is now substantially easier to follow the different stages of the sync process (backfilling, forward filling & execution, following the chain) and track the overall sync progress. Holesky with its newly initialized state is a great testbed to see the new client capabilities in practice! 🤩 ### Block/Tx Profiling @@ -127,7 +170,7 @@ A new more fine-tuned discovery `discV4` mechanism has been integrated along PR - Decouple skeleton from beacon sync, PR [#3028](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3028) - Fix canonical reset of the chain by the skeleton, PR [#3078](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3078) - Skeleton reorg head fixes, PR [#3085](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3085) -- Fixes regarding beacon sync, vmexecution and further log improvs, PR [#3094](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3094) +- Fixes regarding beacon sync, vmexecution and further log improvements, PR [#3094](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3094) - Various rebase and sync related improvements along PR [#3031](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3031) ### Other Features @@ -255,7 +298,7 @@ ethereumjs --network=sepolia - Change withdrawal amount representation from Wei to Gwei, PR [#2483](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2483) - Fix forkchoiceUpdateV2 shanghai, PR [#2502](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2502) - Engine-api-validators, newPayloadV2 and newPayloadV3 updates, PR [#2504](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2504) -- Add new shanghai engine apis (getCapabilties, getPayloadBodiesByHashV1, getPayloadBodiesByRangeV1), PR [#2509](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2509) +- Add new shanghai engine apis (getCapabilities, getPayloadBodiesByHashV1, getPayloadBodiesByRangeV1), PR [#2509](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2509) - getPayloadBodiesByRange fixes, PR [#2518](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2518) - Changes engine_forkchoiceUpdatedV2 withdrawals parameter to `optional` to ensure we return the correct error message if a preShanghai payload is sent, PR [#2533](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2533) @@ -339,9 +382,9 @@ The Client is now ready to work with hardforks triggered by timestamp, which wil - Enhanced skeleton sync to process batches of new payloads and fcUs, PR [#2309](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2309) - Various tx pool fixes, PR [#2382](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2382) - Fixed skeleton reset scenario when head announced before subchain 0 tail, PR [#2408](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2408) -- Handle genesis and genesis extention properly for skeleton, PR [#2420](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2420) +- Handle genesis and genesis extension properly for skeleton, PR [#2420](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2420) - Fixed enode to ip4 and write the same to disk, PR [#2407](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2407) -- Fixed sendTransactions peer loop and enchance txpool logs, PR [#2412](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2412) +- Fixed sendTransactions peer loop and exchange txpool logs, PR [#2412](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2412) - Used unpadded int/bigint to buffer in net protocols (bug), PR [#2409](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2409) - Fixed handling of post-merge genesis blocks, PR [#2427](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2427) - Fixed logic bug in txPool.validate, PR [#2441](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2441) @@ -409,7 +452,7 @@ This is the first client release which works with the next generation EthereumJS ### DB Format Changes -Note that DB format has changed along with this release and it is therefore necessary to delete the old chain and state data directories. If this is causing you substantial hazzle please ask in our monorepo or Discord server on a migration script. While this will cause us some extra work it might actually be possible to provide one if there is some demand. +Note that DB format has changed along with this release and it is therefore necessary to delete the old chain and state data directories. If this is causing you substantial hassle please ask in our monorepo or Discord server on a migration script. While this will cause us some extra work it might actually be possible to provide one if there is some demand. ## Default Receipt Saving @@ -417,7 +460,7 @@ With the transition to PoS chains running the client in a CL/EL setup the activa Since the EthereumJS client is still mainly used in not-that-much-grown testnets and environments (so the additional disk space requirements are not that grave) we have therefore decided to turn the respective option `--saveReceipts` on by default, see PR [#2040](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2040). -You can use `--saveReceipts=false` if you would rather want to deactive again. +You can use `--saveReceipts=false` if you would rather want to deactivate again. ### Other Changes @@ -443,7 +486,7 @@ See PR [#1878](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1878) for ### TxPool Validation -With this release our tx pool grows into a still simple but full-grown and base-feature-complete pool by adding all sorts of validation checks to ensure validity with consensus, see PR [#1852](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1852). This is another substantial step towards fullfilling the requirements for acting as an active block-producing client in an Ethereum network. Validation checks - like e.g. "tx nonce is greater than sender's current nonce" - are now run early on when a tx is send via RPC `eth_sendRawTransaction` and feedback (and eventual rejection of the tx) is provided at the point of submission. This allows for building valid blocks when acting as a block producer. +With this release our tx pool grows into a still simple but full-grown and base-feature-complete pool by adding all sorts of validation checks to ensure validity with consensus, see PR [#1852](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1852). This is another substantial step towards fulfilling the requirements for acting as an active block-producing client in an Ethereum network. Validation checks - like e.g. "tx nonce is greater than sender's current nonce" - are now run early on when a tx is send via RPC `eth_sendRawTransaction` and feedback (and eventual rejection of the tx) is provided at the point of submission. This allows for building valid blocks when acting as a block producer. ### Fixes diff --git a/packages/client/DESIGN.md b/packages/client/DESIGN.md index 22f797c100..8056aff1d8 100644 --- a/packages/client/DESIGN.md +++ b/packages/client/DESIGN.md @@ -60,7 +60,7 @@ to help contributors better understand how the project is organized. and `removed` events when new peers are added and removed and also emit the `message` event whenever any of the peers in the pool emit a message. Each `Service` has an associated `PeerPool` and they are used primarily by `Synchronizer`s to help with blockchain synchronization. - `Synchronizer` Subclasses of this class implements a specific blockchain synchronization strategy. They - also make use of subclasses of the `Fetcher` class that help fetch headers and bodies from pool peers. The fetchers internally make use of streams to handle things like queuing and backpressure. + also make use of subclasses of the `Fetcher` class that help fetch headers and bodies from pool peers. The fetchers internally make use of streams to handle things like queuing and back pressure. - `FullSynchronizer` [**In Progress**] Implements full syncing of the blockchain - `LightSynchronizer` [**In Progress**] Implements light syncing of the blockchain - `Handler` Subclasses of this class implements a protocol message handler. Handlers respond to incoming requests from peers. diff --git a/packages/client/README.md b/packages/client/README.md index 5eb589fe0f..487ae47962 100644 --- a/packages/client/README.md +++ b/packages/client/README.md @@ -71,7 +71,7 @@ A Docker image is built nightly from the current master branch and can be retrie docker pull ethpandaops/ethereumjs:master ``` -Alternatively, an image from the most recent stable releast can be accessed via: +Alternatively, an image from the most recent stable release can be accessed via: ```sh docker pull ethpandaops/ethereumjs:stable @@ -307,8 +307,8 @@ dist/bin/cli.js --d ## Metrics -The client can optionally collect metrics using the Prometheus metrics platform and expose them via an HTTP endpoint with the following CLI flags. -The current metrics that are reported by the client can be found [here](./src/util//metrics.ts). +The client can optionally collect metrics using the [Prometheus](https://github.com/prometheus/prometheus) metrics platform and expose them via an HTTP endpoint with the following CLI flags. +The current metrics that are reported by the client can be found at the default port and route: `localhost:8000/metrics`. ```sh # npm installation @@ -318,8 +318,6 @@ ethereumjs --prometheus npm run client:start:ts -- --prometheus --prometheusPort=9123 ``` -Note: The Prometheus endpoint runs on port 8000 by default - ## API [API Reference](./docs/README.md) diff --git a/packages/client/archive/libp2p/index.ts b/packages/client/archive/libp2p/index.ts index f7c1b46164..fb1dc945a5 100644 --- a/packages/client/archive/libp2p/index.ts +++ b/packages/client/archive/libp2p/index.ts @@ -1,3 +1,4 @@ +// cspell:ignore ppeer pnode pserver import { Blockchain } from '@ethereumjs/blockchain' import { Chain, Common } from '@ethereumjs/common' import debug from 'debug' @@ -72,7 +73,7 @@ export async function createClient(args: any) { }) config.events.setMaxListeners(50) const chainDB = new Level( - `${datadir}/${common.chainName()}` + `${datadir}/${common.chainName()}`, ) const blockchain = await createBlockchain({ diff --git a/packages/client/archive/libp2p/libp2pnode.ts b/packages/client/archive/libp2p/libp2pnode.ts index 2e532234fa..01903c52ef 100644 --- a/packages/client/archive/libp2p/libp2pnode.ts +++ b/packages/client/archive/libp2p/libp2pnode.ts @@ -1,3 +1,4 @@ +// cspell:ignore MPLEX Muxer /** * Libp2p Bundle * @memberof module:net/peer diff --git a/packages/client/archive/libp2p/net/peer/libp2pnode.ts b/packages/client/archive/libp2p/net/peer/libp2pnode.ts index 125ffe71f2..34c85539e3 100644 --- a/packages/client/archive/libp2p/net/peer/libp2pnode.ts +++ b/packages/client/archive/libp2p/net/peer/libp2pnode.ts @@ -1,4 +1,5 @@ //@ts-nocheck +// cspell:ignore MPLEX Muxer /** * Libp2p Bundle * @memberof module:net/peer diff --git a/packages/client/archive/libp2p/net/peer/libp2ppeer.ts b/packages/client/archive/libp2p/net/peer/libp2ppeer.ts index b6f3ff5543..8f89e78781 100644 --- a/packages/client/archive/libp2p/net/peer/libp2ppeer.ts +++ b/packages/client/archive/libp2p/net/peer/libp2ppeer.ts @@ -1,4 +1,5 @@ //@ts-nocheck +// cspell:ignore psender pnode muxed muxer import { multiaddr } from '@multiformats/multiaddr' import { Peer } from '../../../src/net/peer/peer' @@ -31,7 +32,7 @@ export interface Libp2pPeerOptions extends Omit { await Promise.all( this.protocols.map(async (p) => { @@ -106,10 +107,10 @@ export class Libp2pPeer extends Peer { } catch (err: any) { const peerInfo = isPeerId(peer) ? `id=${peer.toB58String()}` : `multiaddr=${peer}` this.config.logger.debug( - `Peer doesn't support protocol=${protocol} ${peerInfo} ${err.stack}` + `Peer doesn't support protocol=${protocol} ${peerInfo} ${err.stack}`, ) } - }) + }), ) this.server = server this.connected = true diff --git a/packages/client/archive/libp2p/net/server/libp2pserver.ts b/packages/client/archive/libp2p/net/server/libp2pserver.ts index 89d46e3129..d283563e65 100644 --- a/packages/client/archive/libp2p/net/server/libp2pserver.ts +++ b/packages/client/archive/libp2p/net/server/libp2pserver.ts @@ -1,4 +1,5 @@ //@ts-nocheck +// cspell:ignore pnode addrs // eslint-disable-next-line implicit-dependencies/no-implicit, import/no-extraneous-dependencies import { keys } from 'libp2p-crypto' import { multiaddr } from '@multiformats/multiaddr' diff --git a/packages/client/bin/cli.ts b/packages/client/bin/cli.ts index e3d552f829..0067810b83 100755 --- a/packages/client/bin/cli.ts +++ b/packages/client/bin/cli.ts @@ -1,22 +1,25 @@ #!/usr/bin/env node -import { createBlockFromValuesArray } from '@ethereumjs/block' -import { createBlockchain } from '@ethereumjs/blockchain' +import { createBlockFromBytesArray } from '@ethereumjs/block' +import { CliqueConsensus, createBlockchain } from '@ethereumjs/blockchain' import { Chain, Common, ConsensusAlgorithm, Hardfork, + Mainnet, createCommonFromGethGenesis, - getInitializedChains, + createCustomCommon, + getPresetChainConfig, } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' import { - Address, BIGINT_2, bytesToHex, calculateSigRecovery, concatBytes, + createAddressFromPrivateKey, + createAddressFromString, ecrecover, ecsign, hexToBytes, @@ -63,14 +66,15 @@ import type { FullEthereumService } from '../src/service/index.js' import type { ClientOpts } from '../src/types.js' import type { RPCArgs } from './startRpc.js' import type { Block, BlockBytes } from '@ethereumjs/block' +import type { ConsensusDict } from '@ethereumjs/blockchain' import type { CustomCrypto } from '@ethereumjs/common' -import type { GenesisState, PrefixedHexString } from '@ethereumjs/util' +import type { Address, GenesisState, PrefixedHexString } from '@ethereumjs/util' import type { AbstractLevel } from 'abstract-level' import type { Server as RPCServer } from 'jayson/promise/index.js' type Account = [address: Address, privateKey: Uint8Array] -const networks = Object.entries(getInitializedChains().names) +const networks = Object.keys(Chain).map((network) => network.toLowerCase()) let logger: Logger @@ -81,12 +85,25 @@ const args: ClientOpts = yargs }) .option('network', { describe: 'Network', - choices: networks.map((n) => n[1]).filter((el) => isNaN(parseInt(el))), + choices: networks, + coerce: (arg: string) => arg.toLowerCase(), default: 'mainnet', }) + .option('chainId', { + describe: 'Chain ID', + choices: Object.entries(Chain) + .map((n) => parseInt(n[1] as string)) + .filter((el) => !isNaN(el)), + default: undefined, + conflicts: ['customChain', 'customGenesisState', 'gethGenesis'], // Disallows custom chain data and chainId + }) .option('networkId', { describe: 'Network ID', - choices: networks.map((n) => parseInt(n[0])).filter((el) => !isNaN(el)), + deprecated: true, + deprecate: 'use --chainId instead', + choices: Object.entries(Chain) + .map((n) => parseInt(n[1] as string)) + .filter((el) => !isNaN(el)), default: undefined, conflicts: ['customChain', 'customGenesisState', 'gethGenesis'], // Disallows custom chain data and networkId }) @@ -362,7 +379,7 @@ const args: ClientOpts = yargs describe: 'Address for mining rewards (etherbase). If not provided, defaults to the primary account', string: true, - coerce: (coinbase) => Address.fromString(coinbase), + coerce: (coinbase) => createAddressFromString(coinbase), }) .option('saveReceipts', { describe: @@ -413,7 +430,7 @@ const args: ClientOpts = yargs }) .option('isSingleNode', { describe: - 'To run client in single node configuration without need to discover the sync height from peer. Particularly useful in test configurations. This flag is automically activated in the "dev" mode', + 'To run client in single node configuration without need to discover the sync height from peer. Particularly useful in test configurations. This flag is automatically activated in the "dev" mode', boolean: true, }) .option('vmProfileBlocks', { @@ -443,7 +460,7 @@ const args: ClientOpts = yargs }) .option('engineNewpayloadMaxExecute', { describe: - 'Number of unexecuted blocks (including ancestors) that can be blockingly executed in engine`s new payload (if required and possible) to determine the validity of the block', + 'Number of unexecuted blocks (including ancestors) that can be executed per-block in engine`s new payload (if required and possible) to determine the validity of the block', number: true, }) .option('skipEngineExec', { @@ -453,7 +470,7 @@ const args: ClientOpts = yargs }) .option('ignoreStatelessInvalidExecs', { describe: - 'Ignore stateless execution failures and keep moving the vm execution along using execution witnesses available in block (verkle). Sets/overrides --statelessVerkle=true and --engineNewpayloadMaxExecute=0 to prevent engine newPayload direct block execution where block execution faliures may stall the CL client. Useful for debugging the verkle. The invalid blocks will be stored in dataDir/network/invalidPayloads which one may use later for debugging', + 'Ignore stateless execution failures and keep moving the vm execution along using execution witnesses available in block (verkle). Sets/overrides --statelessVerkle=true and --engineNewpayloadMaxExecute=0 to prevent engine newPayload direct block execution where block execution failures may stall the CL client. Useful for debugging the verkle. The invalid blocks will be stored in dataDir/network/invalidPayloads which one may use later for debugging', boolean: true, hidden: true, }) @@ -538,7 +555,7 @@ async function executeBlocks(client: EthereumClient) { } } catch (e: any) { client.config.logger.error( - 'Wrong input format for block execution, allowed format types: 5, 5-10, 5[0xba4b5fd92a26badad3cad22eb6f7c7e745053739b5f5d1e8a3afb00f8fb2a280,[TX_HASH_2],...], 5[*] (all txs in verbose mode)' + 'Wrong input format for block execution, allowed format types: 5, 5-10, 5[0xba4b5fd92a26badad3cad22eb6f7c7e745053739b5f5d1e8a3afb00f8fb2a280,[TX_HASH_2],...], 5[*] (all txs in verbose mode)', ) process.exit() } @@ -580,14 +597,9 @@ async function startExecutionFrom(client: EthereumClient) { const startExecutionBlock = await client.chain.getBlock(startExecutionFrom) const startExecutionParent = await client.chain.getBlock(startExecutionBlock.header.parentHash) - const startExecutionParentTd = await client.chain.getTd( - startExecutionParent.hash(), - startExecutionParent.header.number - ) const startExecutionHardfork = client.config.execCommon.getHardforkBy({ blockNumber: startExecutionBlock.header.number, - td: startExecutionParentTd, timestamp: startExecutionBlock.header.timestamp, }) @@ -601,7 +613,7 @@ async function startExecutionFrom(client: EthereumClient) { await client.chain.blockchain.setIteratorHead('vm', startExecutionParent.hash()) await client.chain.update(false) logger.info( - `vmHead set to ${client.chain.headers.height} for starting stateless execution at hardfork=${startExecutionHardfork}` + `vmHead set to ${client.chain.headers.height} for starting stateless execution at hardfork=${startExecutionHardfork}`, ) } catch (err: any) { logger.error(`Error setting vmHead for starting stateless execution: ${err}`) @@ -619,7 +631,7 @@ async function startExecutionFrom(client: EthereumClient) { */ async function startClient( config: Config, - genesisMeta: { genesisState?: GenesisState; genesisStateRoot?: Uint8Array } = {} + genesisMeta: { genesisState?: GenesisState; genesisStateRoot?: Uint8Array } = {}, ) { config.logger.info(`Data directory: ${config.datadir}`) if (config.lightserv) { @@ -630,14 +642,21 @@ async function startClient( let blockchain if (genesisMeta.genesisState !== undefined || genesisMeta.genesisStateRoot !== undefined) { - const validateConsensus = config.chainCommon.consensusAlgorithm() === ConsensusAlgorithm.Clique + let validateConsensus = false + const consensusDict: ConsensusDict = {} + if (config.chainCommon.consensusAlgorithm() === ConsensusAlgorithm.Clique) { + consensusDict[ConsensusAlgorithm.Clique] = new CliqueConsensus() + validateConsensus = true + } + blockchain = await createBlockchain({ db: new LevelDB(dbs.chainDB), ...genesisMeta, common: config.chainCommon, hardforkByHeadBlockNumber: true, - validateConsensus, validateBlocks: true, + validateConsensus, + consensusDict, genesisState: genesisMeta.genesisState, genesisStateRoot: genesisMeta.genesisStateRoot, }) @@ -660,7 +679,7 @@ async function startClient( let buf = RLP.decode(blockRlp, true) while (buf.data?.length > 0 || buf.remainder?.length > 0) { try { - const block = createBlockFromValuesArray(buf.data as BlockBytes, { + const block = createBlockFromBytesArray(buf.data as BlockBytes, { common: config.chainCommon, setHardfork: true, }) @@ -669,11 +688,11 @@ async function startClient( config.logger.info( `Preloading block hash=0x${short(bytesToHex(block.header.hash()))} number=${ block.header.number - }` + }`, ) } catch (err: any) { config.logger.info( - `Encountered error while while preloading chain data error=${err.message}` + `Encountered error while while preloading chain data error=${err.message}`, ) break } @@ -784,6 +803,8 @@ async function inputAccounts() { const accounts: Account[] = [] const rl = readline.createInterface({ + // @ts-ignore Looks like there is a type incompatibility in NodeJS ReadStream vs what this package expects + // TODO: See whether package needs to be updated or not input: process.stdin, output: process.stdout, }) @@ -813,18 +834,18 @@ async function inputAccounts() { const isFile = existsSync(path.resolve(addresses[0])) if (!isFile) { for (const addressString of addresses) { - const address = Address.fromString(addressString) + const address = createAddressFromString(addressString) const inputKey = (await question( - `Please enter the 0x-prefixed private key to unlock ${address}:\n` + `Please enter the 0x-prefixed private key to unlock ${address}:\n`, )) as PrefixedHexString ;(rl as any).history = (rl as any).history.slice(1) const privKey = hexToBytes(inputKey) - const derivedAddress = Address.fromPrivateKey(privKey) - if (address.equals(derivedAddress)) { + const derivedAddress = createAddressFromPrivateKey(privKey) + if (address.equals(derivedAddress) === true) { accounts.push([address, privKey]) } else { console.error( - `Private key does not match for ${address} (address derived: ${derivedAddress})` + `Private key does not match for ${address} (address derived: ${derivedAddress})`, ) process.exit() } @@ -832,7 +853,7 @@ async function inputAccounts() { } else { const acc = readFileSync(path.resolve(args.unlock!), 'utf-8').replace(/(\r\n|\n|\r)/gm, '') const privKey = hexToBytes(`0x${acc}`) // See docs: acc has to be non-zero prefixed in the file - const derivedAddress = Address.fromPrivateKey(privKey) + const derivedAddress = createAddressFromPrivateKey(privKey) accounts.push([derivedAddress, privKey]) } } catch (e: any) { @@ -848,7 +869,7 @@ async function inputAccounts() { */ function generateAccount(): Account { const privKey = randomBytes(32) - const address = Address.fromPrivateKey(privKey) + const address = createAddressFromPrivateKey(privKey) console.log('='.repeat(50)) console.log('Account generated for mining blocks:') console.log(`Address: ${address}`) @@ -868,7 +889,7 @@ const stopClient = async ( clientStartPromise: Promise<{ client: EthereumClient servers: (RPCServer | http.Server)[] - } | null> + } | null>, ) => { config.logger.info('Caught interrupt signal. Obtaining client handle for clean shutdown...') config.logger.info('(This might take a little longer if client not yet fully started)') @@ -908,8 +929,10 @@ async function run() { // TODO sharding: Just initialize kzg library now, in future it can be optimized to be // loaded and initialized on the sharding hardfork activation - // Give network id precedence over network name - const chain = args.networkId ?? args.network ?? Chain.Mainnet + // Give chainId priority over networkId + // Give networkId precedence over network name + const chainName = args.chainId ?? args.networkId ?? args.network ?? Chain.Mainnet + const chain = getPresetChainConfig(chainName) const cryptoFunctions: CustomCrypto = {} const kzg = await loadKZG() @@ -922,14 +945,14 @@ async function run() { v: bigint, r: Uint8Array, s: Uint8Array, - chainID?: bigint + chainID?: bigint, ) => secp256k1Expand( secp256k1Recover( msgHash, concatBytes(setLengthLeft(r, 32), setLengthLeft(s, 32)), - Number(calculateSigRecovery(v, chainID)) - ) + Number(calculateSigRecovery(v, chainID)), + ), ).slice(1) cryptoFunctions.sha256 = wasmSha256 cryptoFunctions.ecsign = (msg: Uint8Array, pk: Uint8Array, chainId?: bigint) => { @@ -994,12 +1017,11 @@ async function run() { try { const customChainParams = JSON.parse(readFileSync(args.customChain, 'utf-8')) customGenesisState = JSON.parse(readFileSync(args.customGenesisState!, 'utf-8')) - common = new Common({ - chain: customChainParams.name, - customChains: [customChainParams], + common = createCustomCommon(customChainParams, Mainnet, { customCrypto: cryptoFunctions, }) } catch (err: any) { + console.error(err) console.error(`invalid chain parameters: ${err.message}`) process.exit() } @@ -1017,7 +1039,7 @@ async function run() { if (args.mine === true && accounts.length === 0) { console.error( - 'Please provide an account to mine blocks with `--unlock [address]` or use `--dev` to generate' + 'Please provide an account to mine blocks with `--unlock [address]` or use `--dev` to generate', ) process.exit() } @@ -1094,10 +1116,16 @@ async function run() { const reqUrl = new url.URL(req.url, `http://${req.headers.host}`) const route = reqUrl.pathname - if (route === '/metrics') { - // Return all metrics in the Prometheus exposition format - res.setHeader('Content-Type', register.contentType) - res.end(await register.metrics()) + switch (route) { + case '/metrics': + // Return all metrics in the Prometheus exposition format + res.setHeader('Content-Type', register.contentType) + res.end(await register.metrics()) + break + default: + res.statusCode = 404 + res.end('Not found') + return } }) // Start the HTTP server which exposes the metrics on http://localhost:${args.prometheusPort}/metrics diff --git a/packages/client/bin/startRpc.ts b/packages/client/bin/startRpc.ts index 7f28bc7b8b..b12dcbd55f 100644 --- a/packages/client/bin/startRpc.ts +++ b/packages/client/bin/startRpc.ts @@ -101,7 +101,7 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { if ((rpc || rpcEngine) && !config.saveReceipts) { logger?.warn( - `Starting client without --saveReceipts might lead to interop issues with a CL especially if the CL intends to propose blocks, omitting methods=${saveReceiptsMethods}` + `Starting client without --saveReceipts might lead to interop issues with a CL especially if the CL intends to propose blocks, omitting methods=${saveReceiptsMethods}`, ) } @@ -136,12 +136,12 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { logger.info( `Started JSON RPC Server address=http://${rpcAddr}:${rpcPort} namespaces=${namespaces}${ withEngineMethods ? ' rpcEngineAuth=' + rpcEngineAuth.toString() : '' - }` + }`, ) logger.debug( `Methods available at address=http://${rpcAddr}:${rpcPort} namespaces=${namespaces} methods=${Object.keys( - methods - ).join(',')}` + methods, + ).join(',')}`, ) } if (ws) { @@ -160,12 +160,12 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { logger.info( `Started JSON RPC Server address=ws://${wsAddr}:${wsPort} namespaces=${namespaces}${ withEngineMethods ? ` rpcEngineAuth=${rpcEngineAuth}` : '' - }` + }`, ) logger.debug( `Methods available at address=ws://${wsAddr}:${wsPort} namespaces=${namespaces} methods=${Object.keys( - methods - ).join(',')}` + methods, + ).join(',')}`, ) } } @@ -189,12 +189,12 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { }) rpcHttpServer.listen(rpcEnginePort, rpcEngineAddr) logger.info( - `Started JSON RPC server address=http://${rpcEngineAddr}:${rpcEnginePort} namespaces=${namespaces} rpcEngineAuth=${rpcEngineAuth}` + `Started JSON RPC server address=http://${rpcEngineAddr}:${rpcEnginePort} namespaces=${namespaces} rpcEngineAuth=${rpcEngineAuth}`, ) logger.debug( `Methods available at address=http://${rpcEngineAddr}:${rpcEnginePort} namespaces=${namespaces} methods=${Object.keys( - methods - ).join(',')}` + methods, + ).join(',')}`, ) if (ws) { @@ -212,12 +212,12 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { const rpcWsServer = createWsRPCServerListener(opts) if (rpcWsServer) rpcWsServer.listen(wsEnginePort, wsEngineAddr) logger.info( - `Started JSON RPC Server address=ws://${wsEngineAddr}:${wsEnginePort} namespaces=${namespaces} rpcEngineAuth=${rpcEngineAuth}` + `Started JSON RPC Server address=ws://${wsEngineAddr}:${wsEnginePort} namespaces=${namespaces} rpcEngineAuth=${rpcEngineAuth}`, ) logger.debug( `Methods available at address=ws://${wsEngineAddr}:${wsEnginePort} namespaces=${namespaces} methods=${Object.keys( - methods - ).join(',')}` + methods, + ).join(',')}`, ) } } diff --git a/packages/client/devnets/4844-interop/tools/txGenerator.ts b/packages/client/devnets/4844-interop/tools/txGenerator.ts index f6d6aea8a1..4d799e65c5 100644 --- a/packages/client/devnets/4844-interop/tools/txGenerator.ts +++ b/packages/client/devnets/4844-interop/tools/txGenerator.ts @@ -1,13 +1,13 @@ // Adapted from - https://github.com/Inphi/eip4844-interop/blob/master/blob_tx_generator/blob.js -import { Common, Hardfork } from '@ethereumjs/common' -import { BlobEIP4844Transaction, TransactionType, TxData } from '@ethereumjs/tx' +import { createCommonFromGethGenesis, Hardfork } from '@ethereumjs/common' +import { createTxFromTxData, TransactionType, TxData } from '@ethereumjs/tx' import { - Address, blobsToCommitments, commitmentsToVersionedHashes, getBlobs, bytesToHex, hexToBytes, + createAddressFromPrivateKey, } from '@ethereumjs/util' import { randomBytes } from '@ethereumjs/util' @@ -18,8 +18,8 @@ import { loadKZG } from 'kzg-wasm' const clientPort = parseInt(process.argv[2]) // EL client port number const input = process.argv[3] // text to generate blob from const genesisJson = require(process.argv[4]) // Genesis parameters -const pkey = hexToBytes('0x' + process.argv[5]) // private key of tx sender as unprefixed hex string (unprefixed in args) -const sender = Address.fromPrivateKey(pkey) +const pkey = hexToBytes(`0x${process.argv[5]}`) // private key of tx sender as unprefixed hex string (unprefixed in args) +const sender = createAddressFromPrivateKey(pkey) async function getNonce(client: Client, account: string) { const nonce = await client.request('eth_getTransactionCount', [account, 'latest'], 2.0) @@ -41,7 +41,7 @@ async function run(data: any) { const commitments = blobsToCommitments(kzg, blobs) const hashes = commitmentsToVersionedHashes(commitments) - const account = Address.fromPrivateKey(randomBytes(32)) + const account = createAddressFromPrivateKey(randomBytes(32)) const txData: TxData[TransactionType.BlobEIP4844] = { to: account.toString(), data: '0x', @@ -62,7 +62,7 @@ async function run(data: any) { txData.gasLimit = BigInt(28000000) const nonce = await getNonce(client, sender.toString()) txData.nonce = BigInt(nonce) - const blobTx = BlobEIP4844Transaction.fromTxData(txData, { common }).sign(pkey) + const blobTx = createTxFromTxData(txData, { common }).sign(pkey) const serializedWrapper = blobTx.serializeNetworkWrapper() diff --git a/packages/client/examples/private-geth-network.md b/packages/client/examples/private-geth-network.md index ab20204888..53eb07a96f 100644 --- a/packages/client/examples/private-geth-network.md +++ b/packages/client/examples/private-geth-network.md @@ -14,7 +14,7 @@ Second, get geth configured to use the genesis parameters file just updated. Now, let's run geth and ensure that its sealing blocks. Note, geth will prompt you for a password to unlock your signer account. -`geth --datadir data --nat extip:[your local ip address here] --networkid 15470 --unlock [the signer account you created] --mine --nodiscover` +`geth --datadir data --nat extip:[your local ip address here] --chainId 15470 --unlock [the signer account you created] --mine --nodiscover` You should start seeing logs like below: diff --git a/packages/client/package.json b/packages/client/package.json index 648725f084..f1618b7d2b 100644 --- a/packages/client/package.json +++ b/packages/client/package.json @@ -1,6 +1,6 @@ { "name": "@ethereumjs/client", - "version": "0.10.1", + "version": "0.10.2", "description": "EthereumJS Execution Layer (EL) Client Implementation", "keywords": [ "ethereum", @@ -55,22 +55,23 @@ "tsc": "../../config/cli/ts-compile.sh" }, "dependencies": { - "@ethereumjs/block": "5.2.0", - "@ethereumjs/blockchain": "7.2.0", - "@ethereumjs/common": "4.3.0", - "@ethereumjs/devp2p": "6.1.2", - "@ethereumjs/ethash": "3.0.3", - "@ethereumjs/evm": "3.0.0", - "@ethereumjs/genesis": "0.2.2", + "@ethereumjs/block": "5.3.0", + "@ethereumjs/blockchain": "7.3.0", + "@ethereumjs/common": "4.4.0", + "@ethereumjs/devp2p": "6.1.3", + "@ethereumjs/ethash": "3.0.4", + "@ethereumjs/evm": "3.1.0", + "@ethereumjs/genesis": "0.2.3", "@ethereumjs/rlp": "5.0.2", - "@ethereumjs/statemanager": "2.3.0", - "@ethereumjs/trie": "6.2.0", - "@ethereumjs/tx": "5.3.0", - "@ethereumjs/util": "9.0.3", - "@ethereumjs/verkle": "^0.0.2", - "@ethereumjs/vm": "8.0.0", + "@ethereumjs/statemanager": "2.4.0", + "@ethereumjs/trie": "6.2.1", + "@ethereumjs/tx": "5.4.0", + "@ethereumjs/util": "9.1.0", + "@ethereumjs/verkle": "^0.1.0", + "@ethereumjs/vm": "8.1.0", + "@js-sdsl/ordered-map": "^4.4.2", "@multiformats/multiaddr": "^12.2.1", - "@polkadot/util": "^12.6.2", + "@polkadot/util": "^13.0.2", "@polkadot/wasm-crypto": "^7.3.2", "@scure/base": "^1.1.7", "abstract-level": "^1.0.3", @@ -82,12 +83,12 @@ "ethereum-cryptography": "^2.2.1", "it-pipe": "^1.1.0", "jayson": "^4.0.0", - "js-sdsl": "^4.4.0", "kzg-wasm": "^0.4.0", "level": "^8.0.0", "mcl-wasm": "^1.5.0", "memory-level": "^1.0.0", "prom-client": "^15.1.0", + "rustbn-wasm": "^0.4.0", "verkle-cryptography-wasm": "^0.4.5", "winston": "^3.3.3", "winston-daily-rotate-file": "^4.5.5", diff --git a/packages/client/src/blockchain/chain.ts b/packages/client/src/blockchain/chain.ts index a2a8dbe50a..af99f4aa63 100644 --- a/packages/client/src/blockchain/chain.ts +++ b/packages/client/src/blockchain/chain.ts @@ -1,14 +1,14 @@ -import { BlockHeader, createBlockFromValuesArray } from '@ethereumjs/block' -import { createBlockchain } from '@ethereumjs/blockchain' +import { createBlockFromBytesArray, createBlockHeaderFromBytesArray } from '@ethereumjs/block' +import { CliqueConsensus, createBlockchain } from '@ethereumjs/blockchain' import { ConsensusAlgorithm, Hardfork } from '@ethereumjs/common' -import { BIGINT_0, BIGINT_1, equalsBytes } from '@ethereumjs/util' +import { BIGINT_0, equalsBytes } from '@ethereumjs/util' import { LevelDB } from '../execution/level.js' import { Event } from '../types.js' import type { Config } from '../config.js' -import type { Block } from '@ethereumjs/block' -import type { Blockchain } from '@ethereumjs/blockchain' +import type { Block, BlockHeader } from '@ethereumjs/block' +import type { Blockchain, ConsensusDict } from '@ethereumjs/blockchain' import type { DB, DBObject, GenesisState } from '@ethereumjs/util' import type { AbstractLevel } from 'abstract-level' @@ -158,7 +158,9 @@ export class Chain { */ public static async create(options: ChainOptions) { let validateConsensus = false + const consensusDict: ConsensusDict = {} if (options.config.chainCommon.consensusAlgorithm() === ConsensusAlgorithm.Clique) { + consensusDict[ConsensusAlgorithm.Clique] = new CliqueConsensus() validateConsensus = true } @@ -170,6 +172,7 @@ export class Chain { hardforkByHeadBlockNumber: true, validateBlocks: true, validateConsensus, + consensusDict, genesisState: options.genesisState, genesisStateRoot: options.genesisStateRoot, })) @@ -223,10 +226,10 @@ export class Chain { } /** - * Network ID + * Chain ID */ - get networkId(): bigint { - return this.config.chainCommon.networkId() + get chainId(): bigint { + return this.config.chainCommon.chainId() } /** @@ -263,7 +266,7 @@ export class Chain { this.config.chainCommon.events.on('hardforkChanged', async (hardfork: string) => { const block = this.config.chainCommon.hardforkBlock() this.config.superMsg( - `New hardfork reached 🪢 ! hardfork=${hardfork} ${block !== null ? `block=${block}` : ''}` + `New hardfork reached 🪢 ! hardfork=${hardfork} ${block !== null ? `block=${block}` : ''}`, ) }) } @@ -334,45 +337,11 @@ export class Chain { this._headers = headers this._blocks = blocks - const parentTd = await this.blockchain.getParentTD(headers.latest) this.config.chainCommon.setHardforkBy({ blockNumber: headers.latest.number, - td: parentTd, timestamp: headers.latest.timestamp, }) - // Check and log if this is a terminal block and next block could be merge - if (!this.config.chainCommon.gteHardfork(Hardfork.Paris)) { - const nextBlockHf = this.config.chainCommon.getHardforkBy({ - blockNumber: headers.height + BIGINT_1, - td: headers.td, - }) - if (this.config.chainCommon.hardforkGteHardfork(nextBlockHf, Hardfork.Paris)) { - this.config.logger.info('*'.repeat(85)) - this.config.logger.info( - `Paris (Merge) hardfork reached 🐼 👉 👈 🐼 ! block=${headers.height} td=${headers.td}` - ) - this.config.logger.info('-'.repeat(85)) - this.config.logger.info(' ') - this.config.logger.info('Consensus layer client (CL) needed for continued sync:') - this.config.logger.info( - 'https://ethereum.org/en/developers/docs/nodes-and-clients/#consensus-clients' - ) - this.config.logger.info(' ') - this.config.logger.info( - 'Make sure to have the JSON RPC (--rpc) and Engine API (--rpcEngine) endpoints exposed' - ) - this.config.logger.info('and JWT authentication configured (see client README).') - this.config.logger.info(' ') - this.config.logger.info('*'.repeat(85)) - this.config.logger.info( - `Transitioning to PoS! First block for CL-framed execution: block=${ - headers.height + BIGINT_1 - }` - ) - } - } - if (emit) { this.config.events.emit(Event.CHAIN_UPDATED) } @@ -390,7 +359,7 @@ export class Chain { block: Uint8Array | bigint, max = 1, skip = 0, - reverse = false + reverse = false, ): Promise { if (!this.opened) throw new Error('Chain closed') return this.blockchain.getBlocks(block, max, skip, reverse) @@ -424,7 +393,7 @@ export class Chain { const canonicalBlock = await this.getBlock(block.header.number) if (!equalsBytes(canonicalBlock.hash(), block.hash())) { throw Error( - `Invalid putBlock for block=${block.header.number} before finalized=${this.headers.finalized.number}` + `Invalid putBlock for block=${block.header.number} before finalized=${this.headers.finalized.number}`, ) } } else { @@ -441,19 +410,17 @@ export class Chain { break } - const td = await this.blockchain.getParentTD(b.header) if (b.header.number <= this.headers.height) { await this.blockchain.checkAndTransitionHardForkByNumber( b.header.number, - td, - b.header.timestamp + b.header.timestamp, ) - await this.blockchain.consensus.setup({ blockchain: this.blockchain }) + await this.blockchain.consensus?.setup({ blockchain: this.blockchain }) } - const block = createBlockFromValuesArray(b.raw(), { + const block = createBlockFromBytesArray(b.raw(), { common: this.config.chainCommon, - setHardfork: td, + setHardfork: true, }) await this.blockchain.putBlock(block) @@ -476,7 +443,7 @@ export class Chain { block: Uint8Array | bigint, max: number, skip: number, - reverse: boolean + reverse: boolean, ): Promise { const blocks = await this.getBlocks(block, max, skip, reverse) return blocks.map((b) => b.header) @@ -501,9 +468,9 @@ export class Chain { } break } - const header = BlockHeader.fromValuesArray(h.raw(), { + const header = createBlockHeaderFromBytesArray(h.raw(), { common: this.config.chainCommon, - setHardfork: this.headers.td, + setHardfork: true, }) await this.blockchain.putHeader(header) numAdded++ diff --git a/packages/client/src/client.ts b/packages/client/src/client.ts index 09113ed96b..0bdc05a320 100644 --- a/packages/client/src/client.ts +++ b/packages/client/src/client.ts @@ -129,11 +129,11 @@ export class EthereumClient { const packageJson = JSON.parse( readFileSync( '/' + import.meta.url.split('client')[0].split('file:///')[1] + 'client/package.json', - 'utf-8' - ) + 'utf-8', + ), ) this.config.logger.info( - `Initializing Ethereumjs client version=v${packageJson.version} network=${name} chainId=${chainId}` + `Initializing Ethereumjs client version=v${packageJson.version} network=${name} chainId=${chainId}`, ) this.config.events.on(Event.SERVER_ERROR, (error) => { @@ -141,7 +141,7 @@ export class EthereumClient { }) this.config.events.on(Event.SERVER_LISTENING, (details) => { this.config.logger.info( - `Server listener up transport=${details.transport} url=${details.url}` + `Server listener up transport=${details.transport} url=${details.url}`, ) }) diff --git a/packages/client/src/config.ts b/packages/client/src/config.ts index 959b89f929..389e615e07 100644 --- a/packages/client/src/config.ts +++ b/packages/client/src/config.ts @@ -1,4 +1,4 @@ -import { Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { genPrivateKey } from '@ethereumjs/devp2p' import { type Address, BIGINT_0, BIGINT_1, BIGINT_2, BIGINT_256 } from '@ethereumjs/util' import { Level } from 'level' @@ -276,7 +276,7 @@ export interface ConfigOptions { /** * If there is a reorg, this is a safe distance from which - * to try to refetch and refeed the blocks. + * to try to refetch and re-feed the blocks. */ safeReorgDistance?: number @@ -352,7 +352,7 @@ export class Config { */ public readonly events: EventBusType - public static readonly CHAIN_DEFAULT = 'mainnet' + public static readonly CHAIN_DEFAULT = Mainnet public static readonly SYNCMODE_DEFAULT = SyncMode.Full public static readonly LIGHTSERV_DEFAULT = false public static readonly DATADIR_DEFAULT = `./datadir` @@ -385,10 +385,9 @@ export class Config { public static readonly SYNCED_STATE_REMOVAL_PERIOD = 60000 // engine new payload calls can come in batch of 64, keeping 128 as the lookup factor - public static readonly ENGINE_PARENTLOOKUP_MAX_DEPTH = 128 + public static readonly ENGINE_PARENT_LOOKUP_MAX_DEPTH = 128 public static readonly ENGINE_NEWPAYLOAD_MAX_EXECUTE = 2 - // currently ethereumjs can execute 200 txs in 12 second window so keeping 1/2 target for blocking response - public static readonly ENGINE_NEWPAYLOAD_MAX_TXS_EXECUTE = 100 + public static readonly ENGINE_NEWPAYLOAD_MAX_TXS_EXECUTE = 200 public static readonly SNAP_AVAILABILITY_DEPTH = BigInt(128) // distance from head at which we can safely transition from a synced snapstate to vmexecution // randomly kept it at 5 for fast testing purposes but ideally should be >=32 slots @@ -453,7 +452,7 @@ export class Config { public readonly ignoreStatelessInvalidExecs: boolean public synchronized: boolean - public lastsyncronized?: boolean + public lastSynchronized?: boolean /** lastSyncDate in ms */ public lastSyncDate: number /** Best known block height */ @@ -526,7 +525,7 @@ export class Config { this.syncedStateRemovalPeriod = options.syncedStateRemovalPeriod ?? Config.SYNCED_STATE_REMOVAL_PERIOD this.engineParentLookupMaxDepth = - options.engineParentLookupMaxDepth ?? Config.ENGINE_PARENTLOOKUP_MAX_DEPTH + options.engineParentLookupMaxDepth ?? Config.ENGINE_PARENT_LOOKUP_MAX_DEPTH this.engineNewpayloadMaxExecute = options.engineNewpayloadMaxExecute ?? Config.ENGINE_NEWPAYLOAD_MAX_EXECUTE this.engineNewpayloadMaxTxsExecute = @@ -605,7 +604,7 @@ export class Config { this.synchronized = true // Log to console the sync status this.superMsg( - `Synchronized blockchain at height=${height} hash=${short(latest.hash())} 🎉` + `Synchronized blockchain at height=${height} hash=${short(latest.hash())} 🎉`, ) } @@ -620,21 +619,21 @@ export class Config { if (diff >= this.syncedStateRemovalPeriod) { this.synchronized = false this.logger.info( - `Sync status reset (no chain updates for ${Math.round(diff / 1000)} seconds).` + `Sync status reset (no chain updates for ${Math.round(diff / 1000)} seconds).`, ) } } } - if (this.synchronized !== this.lastsyncronized) { + if (this.synchronized !== this.lastSynchronized) { this.logger.debug( `Client synchronized=${this.synchronized}${ latest !== null && latest !== undefined ? ' height=' + latest.number : '' } syncTargetHeight=${this.syncTargetHeight} lastSyncDate=${ (Date.now() - this.lastSyncDate) / 1000 - } secs ago` + } secs ago`, ) - this.lastsyncronized = this.synchronized + this.lastSynchronized = this.synchronized } } diff --git a/packages/client/src/execution/level.ts b/packages/client/src/execution/level.ts index 3f80ac704b..dbebf46699 100644 --- a/packages/client/src/execution/level.ts +++ b/packages/client/src/execution/level.ts @@ -41,7 +41,7 @@ const getEncodings = (opts: EncodingOpts = {}) => { */ export class LevelDB< TKey extends Uint8Array | string = Uint8Array | string, - TValue extends Uint8Array | string | DBObject = Uint8Array | string | DBObject + TValue extends Uint8Array | string | DBObject = Uint8Array | string | DBObject, > implements DB { _leveldb: AbstractLevel @@ -52,7 +52,7 @@ export class LevelDB< * @param leveldb - An abstract-leveldown compliant store */ constructor( - leveldb?: AbstractLevel + leveldb?: AbstractLevel, ) { this._leveldb = leveldb ?? new MemoryLevel() } diff --git a/packages/client/src/execution/receipt.ts b/packages/client/src/execution/receipt.ts index 2c1f4c0904..b560b60c0f 100644 --- a/packages/client/src/execution/receipt.ts +++ b/packages/client/src/execution/receipt.ts @@ -38,7 +38,7 @@ type GetReceiptByTxHashReturn = [ receipt: TxReceipt, blockHash: Uint8Array, txIndex: number, - logIndex: number + logIndex: number, ] type GetLogsReturn = { log: Log @@ -121,17 +121,17 @@ export class ReceiptsManager extends MetaDBManager { async getReceipts( blockHash: Uint8Array, calcBloom?: boolean, - includeTxType?: true + includeTxType?: true, ): Promise async getReceipts( blockHash: Uint8Array, calcBloom?: boolean, - includeTxType?: false + includeTxType?: false, ): Promise async getReceipts( blockHash: Uint8Array, calcBloom = false, - includeTxType = false + includeTxType = false, ): Promise { const encoded = await this.get(DBKey.Receipts, blockHash) if (!encoded) return [] @@ -176,7 +176,7 @@ export class ReceiptsManager extends MetaDBManager { from: Block, to: Block, addresses?: Uint8Array[], - topics: (Uint8Array | Uint8Array[] | null)[] = [] + topics: (Uint8Array | Uint8Array[] | null)[] = [], ): Promise { const returnedLogs: GetLogsReturn = [] let returnedLogsSize = 0 @@ -194,7 +194,7 @@ export class ReceiptsManager extends MetaDBManager { tx: block.transactions[receiptIndex], txIndex: receiptIndex, logIndex: logIndex++, - })) + })), ) } if (addresses && addresses.length > 0) { @@ -245,7 +245,7 @@ export class ReceiptsManager extends MetaDBManager { private async updateIndex( operation: IndexOperation, type: IndexType.TxHash, - value: Block + value: Block, ): Promise private async updateIndex(operation: IndexOperation, type: IndexType, value: any): Promise { switch (type) { @@ -309,14 +309,14 @@ export class ReceiptsManager extends MetaDBManager { private rlp( conversion: RlpConvert.Decode, type: RlpType.Receipts, - values: Uint8Array + values: Uint8Array, ): TxReceipt[] private rlp(conversion: RlpConvert.Decode, type: RlpType.Logs, value: rlpLog[]): Log[] private rlp(conversion: RlpConvert.Decode, type: RlpType.TxHash, value: Uint8Array): TxHashIndex private rlp( conversion: RlpConvert, type: RlpType, - value: Uint8Array | rlpOut + value: Uint8Array | rlpOut, ): Uint8Array | rlpOut { switch (type) { case RlpType.Receipts: @@ -328,7 +328,7 @@ export class ReceiptsManager extends MetaDBManager { intToBytes((r as PostByzantiumTxReceipt).status), bigIntToBytes(r.cumulativeBlockGasUsed), this.rlp(RlpConvert.Encode, RlpType.Logs, r.logs), - ]) + ]), ) } else { const decoded = RLP.decode(value as Uint8Array) as unknown as rlpReceipt[] diff --git a/packages/client/src/execution/vmexecution.ts b/packages/client/src/execution/vmexecution.ts index 40826b970e..45d28962e1 100644 --- a/packages/client/src/execution/vmexecution.ts +++ b/packages/client/src/execution/vmexecution.ts @@ -5,14 +5,15 @@ import { DBSetTD, } from '@ethereumjs/blockchain' import { ConsensusType, Hardfork } from '@ethereumjs/common' -import { MCLBLS } from '@ethereumjs/evm' +import { MCLBLS, RustBN254 } from '@ethereumjs/evm' import { getGenesis } from '@ethereumjs/genesis' import { CacheType, + Caches, DefaultStateManager, StatelessVerkleStateManager, } from '@ethereumjs/statemanager' -import { Trie } from '@ethereumjs/trie' +import { createTrie } from '@ethereumjs/trie' import { BIGINT_0, BIGINT_1, @@ -22,9 +23,10 @@ import { equalsBytes, hexToBytes, } from '@ethereumjs/util' -import { VM } from '@ethereumjs/vm' +import { VM, runBlock, runTx } from '@ethereumjs/vm' import { writeFileSync } from 'fs' import * as mcl from 'mcl-wasm' +import { initRustBN } from 'rustbn-wasm' import { loadVerkleCrypto } from 'verkle-cryptography-wasm' import { Event } from '../types.js' @@ -128,7 +130,7 @@ export class VMExecution extends Execution { if (resolve !== undefined) { resolve() } - } + }, ) } if (this.config.savePreimages) { @@ -145,7 +147,7 @@ export class VMExecution extends Execution { if (this.merkleVM !== undefined) { return } - const trie = await Trie.create({ + const trie = await createTrie({ db: new LevelDB(this.stateDB), useKeyHashing: true, common: this.config.chainCommon, @@ -162,31 +164,32 @@ export class VMExecution extends Execution { const stateManager = new DefaultStateManager({ trie, prefixStorageTrieKeys: this.config.prefixStorageTrieKeys, - accountCacheOpts: { - deactivate: false, - type: CacheType.LRU, - size: this.config.accountCache, - }, - storageCacheOpts: { - deactivate: false, - type: CacheType.LRU, - size: this.config.storageCache, - }, - codeCacheOpts: { - deactivate: false, - type: CacheType.LRU, - size: this.config.codeCache, - }, + caches: new Caches({ + account: { + type: CacheType.LRU, + size: this.config.accountCache, + }, + storage: { + type: CacheType.LRU, + size: this.config.storageCache, + }, + code: { + type: CacheType.LRU, + size: this.config.codeCache, + }, + }), common: this.config.chainCommon, }) await mcl.init(mcl.BLS12_381) + const rustBN = await initRustBN() this.merkleVM = await VM.create({ common: this.config.execCommon, blockchain: this.chain.blockchain, stateManager, evmOpts: { bls: new MCLBLS(mcl), + bn254: new RustBN254(rustBN), }, profilerOpts: this.config.vmProfilerOpts, }) @@ -203,12 +206,14 @@ export class VMExecution extends Execution { verkleCrypto, }) await mcl.init(mcl.BLS12_381) + const rustBN = await initRustBN() this.verkleVM = await VM.create({ common: this.config.execCommon, blockchain: this.chain.blockchain, stateManager, evmOpts: { bls: new MCLBLS(mcl), + bn254: new RustBN254(rustBN), }, profilerOpts: this.config.vmProfilerOpts, }) @@ -234,7 +239,7 @@ export class VMExecution extends Execution { const verkleStateRoot = await verkleStateManager.getTransitionStateRoot( merkleStateManager, - merkleStateRoot + merkleStateRoot, ) await verkleStateManager.setStateRoot(verkleStateRoot) @@ -270,8 +275,7 @@ export class VMExecution extends Execution { if (typeof blockchain.getTotalDifficulty !== 'function') { throw new Error('cannot get iterator head: blockchain has no getTotalDifficulty function') } - const td = await blockchain.getTotalDifficulty(headBlock.header.hash()) - this.config.execCommon.setHardforkBy({ blockNumber: number, td, timestamp }) + this.config.execCommon.setHardforkBy({ blockNumber: number, timestamp }) this.hardfork = this.config.execCommon.hardfork() if (this.config.execCommon.gteHardfork(Hardfork.Osaka)) { @@ -283,7 +287,7 @@ export class VMExecution extends Execution { this.vm = this.verkleVM! } else { this.config.logger.info( - `Initializing VM merkle statemanager genesis hardfork=${this.hardfork}` + `Initializing VM merkle statemanager genesis hardfork=${this.hardfork}`, ) await this.setupMerkleVM() this.vm = this.merkleVM! @@ -294,11 +298,11 @@ export class VMExecution extends Execution { this.chain['_customGenesisState'] ?? getGenesis(Number(blockchain.common.chainId())) if ( !genesisState && - (this.vm instanceof DefaultStateManager || !this.config.statelessVerkle) + (!('generateCanonicalGenesis' in this.vm.stateManager) || !this.config.statelessVerkle) ) { throw new Error('genesisState not available') } else { - await this.vm.stateManager.generateCanonicalGenesis(genesisState) + await this.vm.stateManager.generateCanonicalGenesis!(genesisState) } } @@ -330,13 +334,8 @@ export class VMExecution extends Execution { // there could to be checks here that the resetted head is a parent of the chainStatus // but we can skip it for now trusting the chain reset has been correctly performed - const td = - headBlock.header.number === BIGINT_0 - ? headBlock.header.difficulty - : await this.chain.blockchain.getTotalDifficulty(headBlock.header.parentHash) this.hardfork = this.config.execCommon.setHardforkBy({ blockNumber: number, - td, timestamp, }) if (this.config.execCommon.gteHardfork(Hardfork.Osaka)) { @@ -367,7 +366,7 @@ export class VMExecution extends Execution { opts: RunBlockOpts & { parentBlock?: Block }, receipts?: TxReceipt[], blocking: boolean = false, - skipBlockchain: boolean = false + skipBlockchain: boolean = false, ): Promise { // if its not blocking request then return early if its already running else wait to grab the lock if ((!blocking && this.running) || !this.started || this.config.shutdown) return false @@ -389,21 +388,8 @@ export class VMExecution extends Execution { const parentState = root ?? prevVMStateRoot const clearCache = !equalsBytes(prevVMStateRoot, parentState) - // merge TTD might not give correct td, but its sufficient for purposes of determining HF and allows - // stateless execution where blockchain mightnot have all the blocks filling upto the block - let td - if (block.common.gteHardfork(Hardfork.Paris)) { - td = this.config.chainCommon.hardforkTTD(Hardfork.Paris) - if (td === null) { - throw Error(`Invalid null paris TTD for the chain`) - } - } else { - td = await this.chain.getTd(block.header.parentHash, block.header.number - BIGINT_1) - } - const hardfork = this.config.execCommon.getHardforkBy({ blockNumber: block.header.number, - td, timestamp: block.header.timestamp, }) @@ -415,10 +401,8 @@ export class VMExecution extends Execution { // see if this is a transition block const parentBlock = opts?.parentBlock ?? (await this.chain.getBlock(block.header.parentHash)) - const parentTd = td - parentBlock.header.difficulty const parentHf = this.config.execCommon.getHardforkBy({ blockNumber: parentBlock.header.number, - td: parentTd, timestamp: parentBlock.header.timestamp, }) @@ -451,7 +435,7 @@ export class VMExecution extends Execution { } const reportPreimages = this.config.savePreimages - const result = await vm.runBlock({ + const result = await runBlock(vm, { clearCache, ...opts, parentStateRoot: prevVMStateRoot, @@ -506,7 +490,7 @@ export class VMExecution extends Execution { */ async setHead( blocks: Block[], - { finalizedBlock, safeBlock }: { finalizedBlock?: Block; safeBlock?: Block } = {} + { finalizedBlock, safeBlock }: { finalizedBlock?: Block; safeBlock?: Block } = {}, ): Promise { if (!this.started || this.config.shutdown) return false @@ -522,8 +506,8 @@ export class VMExecution extends Execution { // execution run will always fail throw Error( `vmHeadBlock's stateRoot not found number=${vmHeadBlock.header.number} root=${short( - vmHeadBlock.header.stateRoot - )}` + vmHeadBlock.header.stateRoot, + )}`, ) } @@ -559,13 +543,8 @@ export class VMExecution extends Execution { status: ExecStatus.VALID, } - const td = await this.chain.getTd( - vmHeadBlock.header.parentHash, - vmHeadBlock.header.number - BIGINT_1 - ) const hardfork = this.config.execCommon.setHardforkBy({ blockNumber: vmHeadBlock.header.number, - td, timestamp: vmHeadBlock.header.timestamp, }) if ( @@ -594,7 +573,7 @@ export class VMExecution extends Execution { return this.runWithLock(async () => { // check if the block is canonical in chain this.config.logger.warn( - `Setting execution head to hash=${short(jumpToHash)} number=${jumpToNumber}` + `Setting execution head to hash=${short(jumpToHash)} number=${jumpToNumber}`, ) await this.vm.blockchain.setIteratorHead('vm', jumpToHash) }) @@ -605,7 +584,7 @@ export class VMExecution extends Execution { * @param loop Whether to continue iterating until vm head equals chain head (default: true) * @returns number of blocks executed */ - async run(loop = true, runOnlybatched = false): Promise { + async run(loop = true, runOnlyBatched = false): Promise { if (this.running || !this.started || !this.config.execution || this.config.shutdown) return 0 return this.runWithLock(async () => { @@ -624,13 +603,13 @@ export class VMExecution extends Execution { if (typeof blockchain.getCanonicalHeadBlock !== 'function') { throw new Error( - 'cannot get iterator head: blockchain has no getCanonicalHeadBlock function' + 'cannot get iterator head: blockchain has no getCanonicalHeadBlock function', ) } let canonicalHead = await blockchain.getCanonicalHeadBlock() this.config.logger.debug( - `Running execution startHeadBlock=${startHeadBlock?.header.number} canonicalHead=${canonicalHead?.header.number} loop=${loop}` + `Running execution startHeadBlock=${startHeadBlock?.header.number} canonicalHead=${canonicalHead?.header.number} loop=${loop}`, ) let headBlock: Block | undefined @@ -645,8 +624,8 @@ export class VMExecution extends Execution { while ( this.started && !this.config.shutdown && - (!runOnlybatched || - (runOnlybatched && + (!runOnlyBatched || + (runOnlyBatched && canonicalHead.header.number - startHeadBlock.header.number >= BigInt(this.config.numBlocksPerIteration))) && (numExecuted === undefined || @@ -670,7 +649,7 @@ export class VMExecution extends Execution { if (reorg) { clearCache = true this.config.logger.info( - `VM run: Chain reorged, setting new head to block number=${headBlock.header.number} clearCache=${clearCache}.` + `VM run: Chain reorged, setting new head to block number=${headBlock.header.number} clearCache=${clearCache}.`, ) } else { const prevVMStateRoot = await this.vm.stateManager.getStateRoot() @@ -686,25 +665,22 @@ export class VMExecution extends Execution { const { number, timestamp } = block.header if (typeof blockchain.getTotalDifficulty !== 'function') { throw new Error( - 'cannot get iterator head: blockchain has no getTotalDifficulty function' + 'cannot get iterator head: blockchain has no getTotalDifficulty function', ) } - const td = await blockchain.getTotalDifficulty(block.header.parentHash) const hardfork = this.config.execCommon.getHardforkBy({ blockNumber: number, - td, timestamp, }) if (hardfork !== this.hardfork) { const wasPrePrague = !this.config.execCommon.gteHardfork(Hardfork.Osaka) const hash = short(block.hash()) this.config.superMsg( - `Execution hardfork switch on block number=${number} hash=${hash} old=${this.hardfork} new=${hardfork}` + `Execution hardfork switch on block number=${number} hash=${hash} old=${this.hardfork} new=${hardfork}`, ) this.hardfork = this.config.execCommon.setHardforkBy({ blockNumber: number, - td, timestamp, }) const isPostOsaka = this.config.execCommon.gteHardfork(Hardfork.Osaka) @@ -722,7 +698,7 @@ export class VMExecution extends Execution { throw Error( `Invalid vm stateManager type=${typeof this.vm.stateManager} for fork=${ this.hardfork - }` + }`, ) } @@ -742,7 +718,7 @@ export class VMExecution extends Execution { this._statsVM = this.vm const beforeTS = Date.now() - const result = await this.vm.runBlock({ + const result = await runBlock(this.vm, { block, root: parentState, clearCache, @@ -774,7 +750,7 @@ export class VMExecution extends Execution { parentState = block.header.stateRoot } catch (error: any) { // only marked the block as invalid if it was an actual execution error - // for e.g. absense of executionWitness doesn't make a block invalid + // for e.g. absence of executionWitness doesn't make a block invalid if (!`${error.message}`.includes('Invalid executionWitness=null')) { errorBlock = block } @@ -783,7 +759,7 @@ export class VMExecution extends Execution { }, this.config.numBlocksPerIteration, // release lock on this callback so other blockchain ops can happen while this block is being executed - true + true, ) // Ensure to catch and not throw as this would lead to unCaughtException with process exit .catch(async (error) => { @@ -810,7 +786,7 @@ export class VMExecution extends Execution { hasParentStateRoot = false if (headBlock !== undefined) { hasParentStateRoot = await this.vm.stateManager.hasStateRoot( - headBlock.header.stateRoot + headBlock.header.stateRoot, ) backStepTo = headBlock.header.number ?? BIGINT_0 - BIGINT_1 backStepToHash = headBlock.header.parentHash @@ -820,17 +796,17 @@ export class VMExecution extends Execution { if (hasParentStateRoot === true && backStepToHash !== undefined) { this.config.logger.warn( `${errorMsg}, backStepping vmHead to number=${backStepTo} hash=${short( - backStepToHash ?? 'na' - )} hasParentStateRoot=${short(backStepToRoot ?? 'na')}:\n${error}` + backStepToHash ?? 'na', + )} hasParentStateRoot=${short(backStepToRoot ?? 'na')}:\n${error}`, ) await this.vm.blockchain.setIteratorHead('vm', backStepToHash) } else { this.config.logger.error( `${errorMsg}, couldn't back step to vmHead number=${backStepTo} hash=${short( - backStepToHash ?? 'na' + backStepToHash ?? 'na', )} hasParentStateRoot=${hasParentStateRoot} backStepToRoot=${short( - backStepToRoot ?? 'na' - )}:\n${error}` + backStepToRoot ?? 'na', + )}:\n${error}`, ) } } else { @@ -867,7 +843,7 @@ export class VMExecution extends Execution { } this.config.events.emit(Event.SYNC_EXECUTION_VM_ERROR, error) const actualExecuted = Number( - errorBlock.header.number - startHeadBlock.header.number + errorBlock.header.number - startHeadBlock.header.number, ) return actualExecuted } else { @@ -883,7 +859,7 @@ export class VMExecution extends Execution { endHeadBlock = await this.vm.blockchain.getIteratorHead('vm') } else { throw new Error( - 'cannot get iterator head: blockchain has no getIteratorHead function' + 'cannot get iterator head: blockchain has no getIteratorHead function', ) } @@ -902,7 +878,7 @@ export class VMExecution extends Execution { ;(this.config.execCommon.gteHardfork(Hardfork.Paris) ? this.config.logger.debug : this.config.logger.info)( - `Executed blocks count=${numExecuted} first=${firstNumber} hash=${firstHash} ${tdAdd}${baseFeeAdd}hardfork=${this.hardfork} last=${lastNumber} hash=${lastHash} txs=${txCounter}` + `Executed blocks count=${numExecuted} first=${firstNumber} hash=${firstHash} ${tdAdd}${baseFeeAdd}hardfork=${this.hardfork} last=${lastNumber} hash=${lastHash} txs=${txCounter}`, ) await this.chain.update(false) @@ -910,13 +886,13 @@ export class VMExecution extends Execution { this.config.logger.debug( `No blocks executed past chain head hash=${short(endHeadBlock.hash())} number=${ endHeadBlock.header.number - }` + }`, ) } startHeadBlock = endHeadBlock if (typeof this.vm.blockchain.getCanonicalHeadBlock !== 'function') { throw new Error( - 'cannot get iterator head: blockchain has no getCanonicalHeadBlock function' + 'cannot get iterator head: blockchain has no getCanonicalHeadBlock function', ) } canonicalHead = await this.vm.blockchain.getCanonicalHeadBlock() @@ -937,7 +913,7 @@ export class VMExecution extends Execution { this._statsInterval = setInterval( // eslint-disable-next-line @typescript-eslint/await-thenable await this.stats.bind(this), - this.STATS_INTERVAL + this.STATS_INTERVAL, ) const { blockchain } = this.vm @@ -1008,12 +984,8 @@ export class VMExecution extends Execution { this.config.logger.info('Preparing for block execution (debug mode, no services started)...') const block = await this.vm.blockchain.getBlock(first) - const parentBlock = await this.vm.blockchain.getBlock(block.header.parentHash) - const startExecutionParentTd = await this.chain.getTd(block.hash(), parentBlock.header.number) - const startExecutionHardfork = this.config.execCommon.getHardforkBy({ blockNumber: block.header.number, - td: startExecutionParentTd, timestamp: block.header.timestamp, }) @@ -1034,10 +1006,8 @@ export class VMExecution extends Execution { if (typeof vm.blockchain.getTotalDifficulty !== 'function') { throw new Error('cannot get iterator head: blockchain has no getTotalDifficulty function') } - const td = await vm.blockchain.getTotalDifficulty(block.header.parentHash) vm.common.setHardforkBy({ blockNumber, - td, timestamp: block.header.timestamp, }) @@ -1047,7 +1017,7 @@ export class VMExecution extends Execution { // we are skipping header validation because the block has been picked from the // blockchain and header should have already been validated while putBlock const beforeTS = Date.now() - const res = await vm.runBlock({ + const res = await runBlock(vm, { block, root, clearCache: false, @@ -1071,9 +1041,9 @@ export class VMExecution extends Execution { for (const tx of block.transactions) { const txHash = bytesToHex(tx.hash()) if (allTxs || txHashes.includes(txHash)) { - const res = await vm.runTx({ block, tx }) + const res = await runTx(vm, { block, tx }) this.config.logger.info( - `Executed tx hash=${txHash} gasUsed=${res.totalGasSpent} from block num=${blockNumber}` + `Executed tx hash=${txHash} gasUsed=${res.totalGasSpent} from block num=${blockNumber}`, ) count += 1 } @@ -1091,28 +1061,25 @@ export class VMExecution extends Execution { stats() { if (this._statsVM instanceof DefaultStateManager) { - const sm = this._statsVM.stateManager as any - const disactivatedStats = { size: 0, reads: 0, hits: 0, writes: 0 } + const sm = this._statsVM.stateManager as DefaultStateManager + const deactivatedStats = { size: 0, reads: 0, hits: 0, writes: 0 } let stats - // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions - stats = !sm._accountCacheSettings.deactivate ? sm._accountCache.stats() : disactivatedStats + stats = sm['_caches']?.account?.stats() ?? deactivatedStats this.config.logger.info( - `Account cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}` + `Account cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}`, ) - // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions - stats = !sm._storageCacheSettings.deactivate ? sm._storageCache.stats() : disactivatedStats + stats = sm['_caches']?.storage?.stats() ?? deactivatedStats this.config.logger.info( - `Storage cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}` + `Storage cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}`, ) - // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions - stats = !sm._codeCacheSettings.deactivate ? sm._codeCache.stats() : disactivatedStats + stats = sm['_caches']?.code?.stats() ?? deactivatedStats this.config.logger.info( - `Code cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}` + `Code cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}`, ) - const tStats = (sm._trie as Trie).database().stats() + const tStats = sm['_trie'].database().stats() this.config.logger.info( `Trie cache stats size=${tStats.size} reads=${tStats.cache.reads} hits=${tStats.cache.hits} ` + - `writes=${tStats.cache.writes} readsDB=${tStats.db.reads} hitsDB=${tStats.db.hits} writesDB=${tStats.db.writes}` + `writes=${tStats.cache.writes} readsDB=${tStats.db.reads} hitsDB=${tStats.db.hits} writesDB=${tStats.db.writes}`, ) } } diff --git a/packages/client/src/ext/jwt-simple.ts b/packages/client/src/ext/jwt-simple.ts index 9357ef52c3..42972be434 100644 --- a/packages/client/src/ext/jwt-simple.ts +++ b/packages/client/src/ext/jwt-simple.ts @@ -7,7 +7,7 @@ * module dependencies */ import { bytesToUtf8, utf8ToBytes } from '@ethereumjs/util' -import { base64url } from '@scure/base' +import { base64url, base64urlnopad } from '@scure/base' // cspell:disable-line import crypto from 'crypto' /** @@ -102,7 +102,7 @@ const decode = function jwt_decode( token: string, key: string, noVerify: boolean = false, - algorithm: string = '' + algorithm: string = '', ) { // check token if (!token) { @@ -121,7 +121,7 @@ const decode = function jwt_decode( // base64 decode and parse JSON const header = JSON.parse(bytesToUtf8(base64url.decode(headerSeg))) - const payload = JSON.parse(bytesToUtf8(base64url.decode(payloadSeg))) + const payload = JSON.parse(bytesToUtf8(base64urlnopad.decode(payloadSeg))) if (!noVerify) { if (!algorithm && /BEGIN( RSA)? PUBLIC KEY/.test(key.toString())) { @@ -168,7 +168,7 @@ const encode = function jwt_encode( payload: any, key: string, algorithm: string = '', - options: any = undefined + options: any = undefined, ) { // Check key if (!key) { @@ -193,7 +193,7 @@ const encode = function jwt_encode( // create segments, all segments should be base64 string const segments = [] segments.push(base64url.encode(utf8ToBytes(JSON.stringify(header)))) - segments.push(base64url.encode(utf8ToBytes(JSON.stringify(payload)))) + segments.push(base64urlnopad.encode(utf8ToBytes(JSON.stringify(payload)))) segments.push(sign(segments.join('.'), key, signingMethod, signingType)) return segments.join('.') diff --git a/packages/client/src/ext/qheap.ts b/packages/client/src/ext/qheap.ts index 346245b232..dc83e56f23 100644 --- a/packages/client/src/ext/qheap.ts +++ b/packages/client/src/ext/qheap.ts @@ -63,10 +63,10 @@ export class Heap { // @ts-ignore return opts!.compar!(a, b) < 0 } - : opts.comparBefore ?? + : (opts.comparBefore ?? function (a: any, b: any): boolean { return a < b - } + }) this._sortBefore = opts.compar ?? diff --git a/packages/client/src/logging.ts b/packages/client/src/logging.ts index defa127bc7..b62749ead9 100644 --- a/packages/client/src/logging.ts +++ b/packages/client/src/logging.ts @@ -86,7 +86,7 @@ function logFormat(colors = false) { const msg = `[${info.timestamp}] ${level} ${CLLog}${HFLog}${info.message}` return msg - } + }, ) } @@ -99,7 +99,7 @@ function formatConfig(colors = false) { format.splat(), label({ label: 'ethereumjs' }), timestamp({ format: 'MM-DD|HH:mm:ss' }), - logFormat(colors) + logFormat(colors), ) } diff --git a/packages/client/src/miner/miner.ts b/packages/client/src/miner/miner.ts index f4fe4d78c5..271c92b92d 100644 --- a/packages/client/src/miner/miner.ts +++ b/packages/client/src/miner/miner.ts @@ -1,7 +1,8 @@ -import { BlockHeader } from '@ethereumjs/block' +import { type BlockHeader, createSealedCliqueBlockHeader } from '@ethereumjs/block' import { ConsensusType, Hardfork } from '@ethereumjs/common' import { Ethash } from '@ethereumjs/ethash' import { BIGINT_0, BIGINT_1, BIGINT_2, bytesToHex, equalsBytes } from '@ethereumjs/util' +import { type TxReceipt, buildBlock } from '@ethereumjs/vm' import { MemoryLevel } from 'memory-level' import { LevelDB } from '../execution/level.js' @@ -14,7 +15,6 @@ import type { FullSynchronizer } from '../sync/index.js' import type { CliqueConsensus } from '@ethereumjs/blockchain' import type { CliqueConfig } from '@ethereumjs/common' import type { Miner as EthashMiner, Solution } from '@ethereumjs/ethash' -import type { TxReceipt } from '@ethereumjs/vm' export interface MinerOptions { /* Config */ @@ -90,7 +90,6 @@ export class Miner { // Check if the new block to be minted isn't PoS const nextBlockHf = this.config.chainCommon.getHardforkBy({ blockNumber: this.service.chain.headers.height + BIGINT_1, - td: this.service.chain.headers.td, }) if (this.config.chainCommon.hardforkGteHardfork(nextBlockHf, Hardfork.Paris)) { this.config.logger.info('Miner: reached merge hardfork - stopping') @@ -110,11 +109,11 @@ export class Miner { const number = parentBlock.header.number + BIGINT_1 const inTurn = await (blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( signerAddress, - number + number, ) if (inTurn === false) { const signerCount = (blockchain.consensus as CliqueConsensus).cliqueActiveSigners( - number + number, ).length timeout += Math.random() * signerCount * 500 } @@ -159,7 +158,7 @@ export class Miner { this.config.logger.debug( `Miner: Chain updated with block ${ latestBlockHeader.number - }. Queuing next block assembly in ${Math.round(timeout / 1000)}s` + }. Queuing next block assembly in ${Math.round(timeout / 1000)}s`, ) await this.queueNextAssembly(timeout) } @@ -209,10 +208,10 @@ export class Miner { if (this.config.chainCommon.consensusType() === ConsensusType.ProofOfAuthority) { // Abort if we have too recently signed const cliqueSigner = this.config.accounts[0][1] - const header = BlockHeader.fromHeaderData( - { number }, - { common: this.config.chainCommon, cliqueSigner } - ) + const header = createSealedCliqueBlockHeader({ number }, cliqueSigner, { + common: this.config.chainCommon, + freeze: false, + }) if ( (this.service.chain.blockchain as any).consensus.cliqueCheckRecentlySigned(header) === true ) { @@ -247,7 +246,7 @@ export class Miner { // Determine if signer is INTURN (2) or NOTURN (1) inTurn = await (vmCopy.blockchain.consensus as CliqueConsensus).cliqueSignerInTurn( signerAddress, - number + number, ) difficulty = inTurn ? 2 : 1 } @@ -260,8 +259,7 @@ export class Miner { number === londonHardforkBlock ) { // Get baseFeePerGas from `paramByEIP` since 1559 not currently active on common - baseFeePerGas = - this.config.chainCommon.paramByEIP('gasConfig', 'initialBaseFee', 1559) ?? BIGINT_0 + baseFeePerGas = vmCopy.common.paramByEIP('initialBaseFee', 1559) ?? BIGINT_0 // Set initial EIP1559 block gas limit to 2x parent gas limit per logic in `block.validateGasLimit` gasLimit = gasLimit * BIGINT_2 } else if (this.config.chainCommon.isActivatedEIP(1559)) { @@ -275,7 +273,7 @@ export class Miner { coinbase = this.config.minerCoinbase ?? this.config.accounts[0][0] } - const blockBuilder = await vmCopy.buildBlock({ + const blockBuilder = await buildBlock(vmCopy, { parentBlock, headerData: { number, @@ -298,7 +296,7 @@ export class Miner { typeof baseFeePerGas === 'bigint' && baseFeePerGas !== BIGINT_0 ? `(baseFee: ${baseFeePerGas})` : '' - }` + }`, ) let index = 0 let blockFull = false @@ -320,14 +318,14 @@ export class Miner { // If block has less than 21000 gas remaining, consider it full blockFull = true this.config.logger.info( - `Miner: Assembled block full (gasLeft: ${gasLimit - blockBuilder.gasUsed})` + `Miner: Assembled block full (gasLeft: ${gasLimit - blockBuilder.gasUsed})`, ) } } else { // If there is an error adding a tx, it will be skipped const hash = bytesToHex(txs[index].hash()) this.config.logger.debug( - `Skipping tx ${hash}, error encountered when trying to add tx:\n${error}` + `Skipping tx ${hash}, error encountered when trying to add tx:\n${error}`, ) } } @@ -344,7 +342,7 @@ export class Miner { this.config.chainCommon.consensusType() === ConsensusType.ProofOfWork ? `(difficulty: ${block.header.difficulty})` : `(${inTurn === true ? 'in turn' : 'not in turn'})` - }` + }`, ) this.assembling = false if (interrupt) return diff --git a/packages/client/src/miner/pendingBlock.ts b/packages/client/src/miner/pendingBlock.ts index 6f548849d0..4f901aea20 100644 --- a/packages/client/src/miner/pendingBlock.ts +++ b/packages/client/src/miner/pendingBlock.ts @@ -1,19 +1,19 @@ import { Hardfork } from '@ethereumjs/common' -import { BlobEIP4844Transaction } from '@ethereumjs/tx' +import { Blob4844Tx } from '@ethereumjs/tx' import { - Address, BIGINT_1, BIGINT_2, TypeOutput, bigIntToUnpaddedBytes, bytesToHex, concatBytes, + createZeroAddress, equalsBytes, toBytes, toType, zeros, } from '@ethereumjs/util' -import { BuildStatus } from '@ethereumjs/vm' +import { BuildStatus, buildBlock } from '@ethereumjs/vm' import { keccak256 } from 'ethereum-cryptography/keccak' import type { Config } from '../config.js' @@ -98,7 +98,7 @@ export class PendingBlock { vm: VM, parentBlock: Block, headerData: Partial = {}, - withdrawals?: WithdrawalData[] + withdrawals?: WithdrawalData[], ) { const number = parentBlock.header.number + BIGINT_1 const { timestamp, mixHash, parentBeaconBlockRoot, coinbase } = headerData @@ -107,10 +107,8 @@ export class PendingBlock { if (typeof vm.blockchain.getTotalDifficulty !== 'function') { throw new Error('cannot get iterator head: blockchain has no getTotalDifficulty function') } - const td = await vm.blockchain.getTotalDifficulty(parentBlock.hash()) vm.common.setHardforkBy({ blockNumber: number, - td, timestamp, }) @@ -138,9 +136,9 @@ export class PendingBlock { for (const withdrawal of withdrawals) { const indexBuf = bigIntToUnpaddedBytes(toType(withdrawal.index ?? 0, TypeOutput.BigInt)) const validatorIndex = bigIntToUnpaddedBytes( - toType(withdrawal.validatorIndex ?? 0, TypeOutput.BigInt) + toType(withdrawal.validatorIndex ?? 0, TypeOutput.BigInt), ) - const address = toType(withdrawal.address ?? Address.zero(), TypeOutput.Uint8Array) + const address = toType(withdrawal.address ?? createZeroAddress(), TypeOutput.Uint8Array) const amount = bigIntToUnpaddedBytes(toType(withdrawal.amount ?? 0, TypeOutput.BigInt)) withdrawalsBufTemp.push(concatBytes(indexBuf, validatorIndex, address, amount)) } @@ -158,9 +156,9 @@ export class PendingBlock { gasLimitBuf, parentBeaconBlockRootBuf, coinbaseBuf, - withdrawalsBuf - ) - ).subarray(0, 8) + withdrawalsBuf, + ), + ).subarray(0, 8), ) const payloadId = bytesToHex(payloadIdBytes) @@ -176,7 +174,7 @@ export class PendingBlock { // is based on the parent block's state await vm.stateManager.setStateRoot(parentBlock.header.stateRoot) - const builder = await vm.buildBlock({ + const builder = await buildBlock(vm, { parentBlock, // excessBlobGas will be correctly calculated and set in buildBlock constructor, // unless already explicity provided in headerData @@ -189,7 +187,7 @@ export class PendingBlock { withdrawals, blockOpts: { putBlockIntoBlockchain: false, - setHardfork: td, + setHardfork: true, }, }) @@ -198,8 +196,8 @@ export class PendingBlock { // Get if and how many blobs are allowed in the tx let allowedBlobs if (vm.common.isActivatedEIP(4844)) { - const blobGasLimit = vm.common.param('gasConfig', 'maxblobGasPerBlock') - const blobGasPerBlob = vm.common.param('gasConfig', 'blobGasPerBlob') + const blobGasLimit = vm.common.param('maxblobGasPerBlock') + const blobGasPerBlob = vm.common.param('blobGasPerBlob') allowedBlobs = Number(blobGasLimit / blobGasPerBlob) } else { allowedBlobs = 0 @@ -210,12 +208,12 @@ export class PendingBlock { allowedBlobs, }) this.config.logger.info( - `Pending: Assembling block from ${txs.length} eligible txs (baseFee: ${baseFeePerGas})` + `Pending: Assembling block from ${txs.length} eligible txs (baseFee: ${baseFeePerGas})`, ) const { addedTxs, skippedByAddErrors, blobTxs } = await this.addTransactions(builder, txs) this.config.logger.info( - `Pending: Added txs=${addedTxs} skippedByAddErrors=${skippedByAddErrors} from total=${txs.length} tx candidates` + `Pending: Added txs=${addedTxs} skippedByAddErrors=${skippedByAddErrors} from total=${txs.length} tx candidates`, ) // Construct initial blobs bundle when payload is constructed @@ -244,7 +242,7 @@ export class PendingBlock { * Returns the completed block */ async build( - payloadIdBytes: Uint8Array | string + payloadIdBytes: Uint8Array | string, ): Promise { const payloadId = typeof payloadIdBytes !== 'string' ? bytesToHex(payloadIdBytes) : payloadIdBytes @@ -267,8 +265,8 @@ export class PendingBlock { let allowedBlobs if (vm.common.isActivatedEIP(4844)) { const bundle = this.blobsBundles.get(payloadId) ?? { blobs: [], commitments: [], proofs: [] } - const blobGasLimit = vm.common.param('gasConfig', 'maxblobGasPerBlock') - const blobGasPerBlob = vm.common.param('gasConfig', 'blobGasPerBlob') + const blobGasLimit = vm.common.param('maxblobGasPerBlock') + const blobGasPerBlob = vm.common.param('blobGasPerBlob') allowedBlobs = Number(blobGasLimit / blobGasPerBlob) - bundle.blobs.length } else { allowedBlobs = 0 @@ -283,8 +281,8 @@ export class PendingBlock { ).filter( (tx) => (builder as any).transactions.some((t: TypedTransaction) => - equalsBytes(t.hash(), tx.hash()) - ) === false + equalsBytes(t.hash(), tx.hash()), + ) === false, ) const { skippedByAddErrors, blobTxs } = await this.addTransactions(builder, txs) @@ -303,8 +301,8 @@ export class PendingBlock { `Pending: Built block number=${block.header.number} txs=${ block.transactions.length }${withdrawalsStr}${blobsStr} skippedByAddErrors=${skippedByAddErrors} hash=${bytesToHex( - block.hash() - )}` + block.hash(), + )}`, ) return [block, builder.transactionReceipts, builder.minerValue, blobs] @@ -324,7 +322,7 @@ export class PendingBlock { switch (addTxResult) { case AddTxResult.Success: // Push the tx in blobTxs only after successful addTransaction - if (tx instanceof BlobEIP4844Transaction) blobTxs.push(tx) + if (tx instanceof Blob4844Tx) blobTxs.push(tx) break case AddTxResult.BlockFull: @@ -365,15 +363,15 @@ export class PendingBlock { // Remove the blob tx which doesn't has blobs bundled this.txPool.removeByHash(bytesToHex(tx.hash()), tx) this.config.logger.error( - `Pending: Removed from txPool a blob tx ${bytesToHex(tx.hash())} with missing blobs` + `Pending: Removed from txPool a blob tx ${bytesToHex(tx.hash())} with missing blobs`, ) addTxResult = AddTxResult.RemovedByErrors } else { // If there is an error adding a tx, it will be skipped this.config.logger.debug( `Pending: Skipping tx ${bytesToHex( - tx.hash() - )}, error encountered when trying to add tx:\n${error}` + tx.hash(), + )}, error encountered when trying to add tx:\n${error}`, ) addTxResult = AddTxResult.SkippedByErrors } @@ -384,10 +382,10 @@ export class PendingBlock { /** * An internal helper for storing the blob bundle associated with each transaction in an EIP4844 world * @param payloadId the payload Id of the pending block - * @param txs an array of {@BlobEIP4844Transaction } transactions + * @param txs an array of {@Blob4844Tx } transactions * @param blockHash the blockhash of the pending block (computed from the header data provided) */ - private constructBlobsBundle = (payloadId: string, txs: BlobEIP4844Transaction[]) => { + private constructBlobsBundle = (payloadId: string, txs: Blob4844Tx[]) => { let blobs: Uint8Array[] = [] let commitments: Uint8Array[] = [] let proofs: Uint8Array[] = [] @@ -399,7 +397,7 @@ export class PendingBlock { } for (let tx of txs) { - tx = tx as BlobEIP4844Transaction + tx = tx as Blob4844Tx if (tx.blobs !== undefined && tx.blobs.length > 0) { blobs = blobs.concat(tx.blobs) commitments = commitments.concat(tx.kzgCommitments!) diff --git a/packages/client/src/net/peer/rlpxpeer.ts b/packages/client/src/net/peer/rlpxpeer.ts index 92277a2d3a..d2016f20f8 100644 --- a/packages/client/src/net/peer/rlpxpeer.ts +++ b/packages/client/src/net/peer/rlpxpeer.ts @@ -176,19 +176,19 @@ export class RlpxPeer extends Peer { const snapProtocol = snapRlpxProtocol !== undefined ? this.protocols.find( - (p) => p.name === snapRlpxProtocol?.constructor.name.toLowerCase() + (p) => p.name === snapRlpxProtocol?.constructor.name.toLowerCase(), ) : undefined if (snapProtocol !== undefined) { const snapSender = new RlpxSender( - snapRlpxProtocol as Devp2pETH | Devp2pLES | Devp2pSNAP + snapRlpxProtocol as Devp2pETH | Devp2pLES | Devp2pSNAP, ) return this.addProtocol(snapSender, snapProtocol) } } }) } - }) + }), ) this.connected = true } diff --git a/packages/client/src/net/peerpool.ts b/packages/client/src/net/peerpool.ts index f48a397405..c02daa800e 100644 --- a/packages/client/src/net/peerpool.ts +++ b/packages/client/src/net/peerpool.ts @@ -91,13 +91,13 @@ export class PeerPool { this._statusCheckInterval = setInterval( // eslint-disable-next-line @typescript-eslint/await-thenable await this._statusCheck.bind(this), - this.DEFAULT_STATUS_CHECK_INTERVAL + this.DEFAULT_STATUS_CHECK_INTERVAL, ) this._peerBestHeaderUpdateInterval = setInterval( // eslint-disable-next-line @typescript-eslint/await-thenable await this._peerBestHeaderUpdate.bind(this), - this.DEFAULT_PEER_BEST_HEADER_UPDATE_INTERVAL + this.DEFAULT_PEER_BEST_HEADER_UPDATE_INTERVAL, ) this.running = true diff --git a/packages/client/src/net/protocol/boundprotocol.ts b/packages/client/src/net/protocol/boundprotocol.ts index 5198d86dfa..299d18c5d2 100644 --- a/packages/client/src/net/protocol/boundprotocol.ts +++ b/packages/client/src/net/protocol/boundprotocol.ts @@ -84,7 +84,7 @@ export class BoundProtocol { } }) this.sender.on('error', (error: Error) => - this.config.events.emit(Event.PROTOCOL_ERROR, error, this.peer) + this.config.events.emit(Event.PROTOCOL_ERROR, error, this.peer), ) } @@ -137,7 +137,7 @@ export class BoundProtocol { Event.PROTOCOL_MESSAGE, { name: message.name, data }, this.protocol.name, - this.peer + this.peer, ) } } diff --git a/packages/client/src/net/protocol/ethprotocol.ts b/packages/client/src/net/protocol/ethprotocol.ts index 681091c691..be102adea8 100644 --- a/packages/client/src/net/protocol/ethprotocol.ts +++ b/packages/client/src/net/protocol/ethprotocol.ts @@ -1,18 +1,14 @@ -import { - BlockHeader, - createBlockFromValuesArray, - getDifficulty, - valuesArrayToHeaderData, -} from '@ethereumjs/block' -import { Hardfork } from '@ethereumjs/common' +import { createBlockFromBytesArray, createBlockHeaderFromBytesArray } from '@ethereumjs/block' import { RLP } from '@ethereumjs/rlp' import { - BlobEIP4844Transaction, - TransactionFactory, - isAccessListEIP2930Tx, - isBlobEIP4844Tx, - isEOACodeEIP7702Tx, - isFeeMarketEIP1559Tx, + Blob4844Tx, + createBlob4844TxFromSerializedNetworkWrapper, + createTxFromBlockBodyData, + createTxFromSerializedData, + isAccessList2930Tx, + isBlob4844Tx, + isEOACode7702Tx, + isFeeMarket1559Tx, isLegacyTx, } from '@ethereumjs/tx' import { @@ -32,10 +28,16 @@ import { Protocol } from './protocol.js' import type { Chain } from '../../blockchain/index.js' import type { TxReceiptWithType } from '../../execution/receipt.js' import type { Message, ProtocolOptions } from './protocol.js' -import type { Block, BlockBodyBytes, BlockBytes, BlockHeaderBytes } from '@ethereumjs/block' +import type { + Block, + BlockBodyBytes, + BlockBytes, + BlockHeader, + BlockHeaderBytes, +} from '@ethereumjs/block' import type { Log } from '@ethereumjs/evm' import type { TypedTransaction } from '@ethereumjs/tx' -import type { BigIntLike, PrefixedHexString } from '@ethereumjs/util' +import type { PrefixedHexString } from '@ethereumjs/util' import type { PostByzantiumTxReceipt, PreByzantiumTxReceipt, TxReceipt } from '@ethereumjs/vm' interface EthProtocolOptions extends ProtocolOptions { @@ -99,7 +101,6 @@ function exhaustiveTypeGuard(_value: never, errorMsg: string): never { export class EthProtocol extends Protocol { private chain: Chain private nextReqId = BIGINT_0 - private chainTTD?: BigIntLike /* eslint-disable no-invalid-this */ private protocolMessages: Message[] = [ @@ -116,7 +117,7 @@ export class EthProtocol extends Protocol { const serializedTxs = [] for (const tx of txs) { // Don't automatically broadcast blob transactions - they should only be announced using NewPooledTransactionHashes - if (tx instanceof BlobEIP4844Transaction) continue + if (tx instanceof Blob4844Tx) continue serializedTxs.push(tx.serialize()) } return serializedTxs @@ -132,7 +133,7 @@ export class EthProtocol extends Protocol { BIGINT_0, // Use chainstart, timestamp: this.chain.headers.latest?.timestamp ?? Math.floor(Date.now() / 1000), }) - return txs.map((txData) => TransactionFactory.fromSerializedData(txData, { common })) + return txs.map((txData) => createTxFromSerializedData(txData, { common })) }, }, { @@ -166,15 +167,8 @@ export class EthProtocol extends Protocol { decode: ([reqId, headers]: [Uint8Array, BlockHeaderBytes[]]) => [ bytesToBigInt(reqId), headers.map((h) => { - const headerData = valuesArrayToHeaderData(h) - const difficulty = getDifficulty(headerData)! const common = this.config.chainCommon - // If this is a post merge block, we can still send chainTTD since it would still lead - // to correct hardfork choice - const header = BlockHeader.fromValuesArray( - h, - difficulty > 0 ? { common, setHardfork: true } : { common, setHardfork: this.chainTTD } - ) + const header = createBlockHeaderFromBytesArray(h, { common, setHardfork: true }) return header }), ], @@ -206,7 +200,7 @@ export class EthProtocol extends Protocol { code: 0x07, encode: ([block, td]: [Block, bigint]) => [block.raw(), bigIntToUnpaddedBytes(td)], decode: ([block, td]: [BlockBytes, Uint8Array]) => [ - createBlockFromValuesArray(block, { + createBlockFromBytesArray(block, { common: this.config.chainCommon, setHardfork: true, }), @@ -229,7 +223,7 @@ export class EthProtocol extends Protocol { ] }, decode: ( - params: Uint8Array[] | [types: PrefixedHexString, sizes: number[], hashes: Uint8Array[]] + params: Uint8Array[] | [types: PrefixedHexString, sizes: number[], hashes: Uint8Array[]], ) => { if (isNestedUint8Array(params) === true) { return params @@ -259,13 +253,9 @@ export class EthProtocol extends Protocol { const serializedTxs = [] for (const tx of txs) { // serialize txs as per type - if (isBlobEIP4844Tx(tx)) { + if (isBlob4844Tx(tx)) { serializedTxs.push(tx.serializeNetworkWrapper()) - } else if ( - isFeeMarketEIP1559Tx(tx) || - isAccessListEIP2930Tx(tx) || - isEOACodeEIP7702Tx(tx) - ) { + } else if (isFeeMarket1559Tx(tx) || isAccessList2930Tx(tx) || isEOACode7702Tx(tx)) { serializedTxs.push(tx.serialize()) } else if (isLegacyTx(tx)) { serializedTxs.push(tx.raw()) @@ -294,9 +284,9 @@ export class EthProtocol extends Protocol { txs.map((txData) => { // Blob transactions are deserialized with network wrapper if (txData[0] === 3) { - return BlobEIP4844Transaction.fromSerializedBlobTxNetworkWrapper(txData, { common }) + return createBlob4844TxFromSerializedNetworkWrapper(txData, { common }) } else { - return TransactionFactory.fromBlockBodyData(txData, { common }) + return createTxFromBlockBodyData(txData, { common }) } }), ] @@ -335,7 +325,7 @@ export class EthProtocol extends Protocol { Uint8Array, Uint8Array, Uint8Array, - Log[] + Log[], ] const receipt = { cumulativeBlockGasUsed: bytesToBigInt(cumulativeGasUsed), @@ -360,10 +350,6 @@ export class EthProtocol extends Protocol { super(options) this.chain = options.chain - const chainTTD = this.config.chainCommon.hardforkTTD(Hardfork.Paris) - if (chainTTD !== null && chainTTD !== undefined) { - this.chainTTD = chainTTD - } } /** @@ -403,7 +389,7 @@ export class EthProtocol extends Protocol { */ encodeStatus(): any { return { - networkId: bigIntToUnpaddedBytes(this.chain.networkId), + chainId: bigIntToUnpaddedBytes(this.chain.chainId), td: bigIntToUnpaddedBytes(this.chain.blocks.td), bestHash: this.chain.blocks.latest!.hash(), genesisHash: this.chain.genesis.hash(), @@ -417,7 +403,7 @@ export class EthProtocol extends Protocol { */ decodeStatus(status: any): any { return { - networkId: bytesToBigInt(status.networkId), + chainId: bytesToBigInt(status.chainId), td: bytesToBigInt(status.td), bestHash: status.bestHash, genesisHash: status.genesisHash, diff --git a/packages/client/src/net/protocol/flowcontrol.ts b/packages/client/src/net/protocol/flowcontrol.ts index 4330244e76..54414961dd 100644 --- a/packages/client/src/net/protocol/flowcontrol.ts +++ b/packages/client/src/net/protocol/flowcontrol.ts @@ -27,7 +27,7 @@ export class FlowControl { readonly bl: number readonly mrc: Mrc readonly mrr: number - readonly out: Map; + readonly out: Map readonly in: Map constructor(options?: FlowControlOptions) { diff --git a/packages/client/src/net/protocol/lesprotocol.ts b/packages/client/src/net/protocol/lesprotocol.ts index 97f4400cb9..7235951898 100644 --- a/packages/client/src/net/protocol/lesprotocol.ts +++ b/packages/client/src/net/protocol/lesprotocol.ts @@ -1,4 +1,4 @@ -import { BlockHeader } from '@ethereumjs/block' +import { createBlockHeaderFromBytesArray } from '@ethereumjs/block' import { BIGINT_0, bigIntToUnpaddedBytes, @@ -13,7 +13,7 @@ import { Protocol } from './protocol.js' import type { Chain } from '../../blockchain/index.js' import type { FlowControl } from './flowcontrol.js' import type { Message, ProtocolOptions } from './protocol.js' -import type { BlockHeaderBytes } from '@ethereumjs/block' +import type { BlockHeader, BlockHeaderBytes } from '@ethereumjs/block' export interface LesProtocolOptions extends ProtocolOptions { /* Blockchain */ @@ -41,7 +41,7 @@ type GetBlockHeadersOpts = { */ export interface LesProtocolMethods { getBlockHeaders: ( - opts: GetBlockHeadersOpts + opts: GetBlockHeadersOpts, ) => Promise<{ reqId: bigint; bv: bigint; headers: BlockHeader[] }> } @@ -109,10 +109,10 @@ export class LesProtocol extends Protocol { reqId: bytesToBigInt(reqId), bv: bytesToBigInt(bv), headers: headers.map((h: BlockHeaderBytes) => - BlockHeader.fromValuesArray(h, { + createBlockHeaderFromBytesArray(h, { setHardfork: true, common: this.config.chainCommon, // eslint-disable-line no-invalid-this - }) + }), ), }), }, @@ -186,15 +186,15 @@ export class LesProtocol extends Protocol { const forkHash = this.config.chainCommon.forkHash( this.config.chainCommon.hardfork(), - this.chain.genesis.hash() + this.chain.genesis.hash(), ) const nextFork = this.config.chainCommon.nextHardforkBlockOrTimestamp( - this.config.chainCommon.hardfork() + this.config.chainCommon.hardfork(), ) const forkID = [hexToBytes(forkHash), bigIntToUnpaddedBytes(nextFork ?? 0n)] return { - networkId: bigIntToUnpaddedBytes(this.chain.networkId), + chainId: bigIntToUnpaddedBytes(this.chain.chainId), headTd: bigIntToUnpaddedBytes(this.chain.headers.td), headHash: this.chain.headers.latest?.hash(), headNum: bigIntToUnpaddedBytes(this.chain.headers.height), @@ -223,7 +223,7 @@ export class LesProtocol extends Protocol { } } return { - networkId: bytesToBigInt(status.networkId), + chainId: bytesToBigInt(status.chainId), headTd: bytesToBigInt(status.headTd), headHash: status.headHash, headNum: bytesToBigInt(status.headNum), diff --git a/packages/client/src/net/protocol/snapprotocol.ts b/packages/client/src/net/protocol/snapprotocol.ts index db70510457..614ae8aaa7 100644 --- a/packages/client/src/net/protocol/snapprotocol.ts +++ b/packages/client/src/net/protocol/snapprotocol.ts @@ -81,7 +81,7 @@ type GetTrieNodesOpts = { */ export interface SnapProtocolMethods { getAccountRange: ( - opts: GetAccountRangeOpts + opts: GetAccountRangeOpts, ) => Promise<{ reqId: bigint; accounts: AccountData[]; proof: Uint8Array[] }> getStorageRanges: (opts: GetStorageRangesOpts) => Promise<{ reqId: bigint @@ -158,7 +158,7 @@ export class SnapProtocol extends Protocol { ({ hash, body: this.convertSlimBody === true ? accountBodyFromSlim(body) : body, - } as AccountData) + }) as AccountData, ), proof, } @@ -206,7 +206,7 @@ export class SnapProtocol extends Protocol { return [ bigIntToUnpaddedBytes(reqId ?? ++this.nextReqId), slots.map((accSlots) => - accSlots.map((slotData) => [setLengthLeft(slotData.hash, 32), slotData.body]) + accSlots.map((slotData) => [setLengthLeft(slotData.hash, 32), slotData.body]), ), proof, ] @@ -215,7 +215,7 @@ export class SnapProtocol extends Protocol { return { reqId: bytesToBigInt(reqId), slots: slots.map((accSlots: any) => - accSlots.map(([hash, body]: any) => ({ hash, body } as StorageData)) + accSlots.map(([hash, body]: any) => ({ hash, body }) as StorageData), ), proof, } diff --git a/packages/client/src/net/server/rlpxserver.ts b/packages/client/src/net/server/rlpxserver.ts index 11de958b9a..1b02ce48c5 100644 --- a/packages/client/src/net/server/rlpxserver.ts +++ b/packages/client/src/net/server/rlpxserver.ts @@ -41,7 +41,7 @@ const ignoredErrors = new RegExp( // Client 'Handshake timed out', // Protocol handshake 'Server already destroyed', // Bootstrap retrigger - ].join('|') + ].join('|'), ) /** @@ -72,11 +72,11 @@ export class RlpxServer extends Server { 'go1.6', 'go1.7', 'quorum', - 'pirl', - 'ubiq', - 'gmc', - 'gwhale', - 'prichain', + 'pirl', // cspell:disable-line + 'ubiq', // cspell:disable-line + 'gmc', // cspell:disable-line + 'gwhale', // cspell:disable-line + 'prichain', // cspell:disable-line ] } @@ -243,7 +243,7 @@ export class RlpxServer extends Server { this.dpt.bind(this.config.port, '0.0.0.0') } this.config.logger.info( - `Started discovery service discV4=${this.config.discV4} dns=${this.config.discDns} refreshInterval=${this.refreshInterval}` + `Started discovery service discV4=${this.config.discV4} dns=${this.config.discDns} refreshInterval=${this.refreshInterval}`, ) }) } @@ -291,14 +291,14 @@ export class RlpxServer extends Server { if (peer) { this.peers.delete(peer.id) this.config.logger.debug( - `Peer disconnected (${rlpxPeer.getDisconnectPrefix(reason)}): ${peer}` + `Peer disconnected (${rlpxPeer.getDisconnectPrefix(reason)}): ${peer}`, ) this.config.events.emit(Event.PEER_DISCONNECTED, peer) } }) this.rlpx.events.on('peer:error', (rlpxPeer: Devp2pRLPxPeer, error: Error) => - this.error(error) + this.error(error), ) this.rlpx.events.on('error', (e: Error) => { diff --git a/packages/client/src/rpc/helpers.ts b/packages/client/src/rpc/helpers.ts index 99528f452a..75f282dbdf 100644 --- a/packages/client/src/rpc/helpers.ts +++ b/packages/client/src/rpc/helpers.ts @@ -10,6 +10,7 @@ type RpcError = { code: number message: string trace?: string + data?: string } export function callWithStackTrace(handler: Function, debug: boolean) { @@ -21,6 +22,7 @@ export function callWithStackTrace(handler: Function, debug: boolean) { const e: RpcError = { code: error.code ?? INTERNAL_ERROR, message: error.message, + data: error.data, } if (debug === true) { e['trace'] = error.stack diff --git a/packages/client/src/rpc/index.ts b/packages/client/src/rpc/index.ts index fc18973587..ced41f3981 100644 --- a/packages/client/src/rpc/index.ts +++ b/packages/client/src/rpc/index.ts @@ -32,7 +32,7 @@ export class RPCManager { getMethods(engine = false, rpcDebug = false) { const methods: { [key: string]: Function } = {} const mods = modules.list.filter((name: string) => - engine ? name === 'Engine' : name !== 'Engine' + engine ? name === 'Engine' : name !== 'Engine', ) for (const modName of mods) { const mod = new (modules as any)[modName](this._client, rpcDebug) @@ -42,8 +42,8 @@ export class RPCManager { if (!this._config.saveReceipts && saveReceiptsMethods.includes(methodName)) { continue } - const concatedMethodName = `${modName.toLowerCase()}_${methodName}` - methods[concatedMethodName] = mod[methodName].bind((...params: any[]) => { + const concatenatedMethodName = `${modName.toLowerCase()}_${methodName}` + methods[concatenatedMethodName] = mod[methodName].bind((...params: any[]) => { try { mod(...params) } catch (error: any) { @@ -64,7 +64,7 @@ export class RPCManager { */ static getMethodNames(mod: Object): string[] { const methodNames = Object.getOwnPropertyNames((mod as any).prototype).filter( - (methodName: string) => methodName !== 'constructor' + (methodName: string) => methodName !== 'constructor', ) return methodNames } diff --git a/packages/client/src/rpc/modules/admin.ts b/packages/client/src/rpc/modules/admin.ts index 2da9a3c846..34460f785c 100644 --- a/packages/client/src/rpc/modules/admin.ts +++ b/packages/client/src/rpc/modules/admin.ts @@ -6,6 +6,7 @@ import { middleware } from '../validation.js' import type { Chain } from '../../blockchain/index.js' import type { EthereumClient } from '../../client.js' +import type { RlpxPeer } from '../../net/peer/rlpxpeer.js' import type { Service } from '../../service/index.js' /** @@ -28,14 +29,14 @@ export class Admin { this._rpcDebug = rpcDebug this.nodeInfo = middleware(callWithStackTrace(this.nodeInfo.bind(this), this._rpcDebug), 0, []) + this.peers = middleware(callWithStackTrace(this.peers.bind(this), this._rpcDebug), 0, []) } /** * Returns information about the currently running node. - * see for reference: https://geth.ethereum.org/docs/rpc/ns-admin#admin_nodeinfo - * @param params An empty array + * see for reference: https://geth.ethereum.org/docs/interacting-with-geth/rpc/ns-admin#admin_peers */ - async nodeInfo(_params: []) { + async nodeInfo() { const rlpxInfo = this._client.config.server!.getRlpxInfo() const { enode, id, ip, listenAddr, ports } = rlpxInfo const { discovery, listener } = ports @@ -45,7 +46,7 @@ export class Admin { const difficulty = latestHeader.difficulty.toString() const genesis = bytesToHex(this._chain.genesis.hash()) const head = bytesToHex(latestHeader.mixHash) - const network = this._chain.networkId.toString() + const network = this._chain.chainId.toString() const nodeInfo = { name: clientName, @@ -68,4 +69,33 @@ export class Admin { } return nodeInfo } + + /** + * Returns information about currently connected peers + * @returns an array of objects containing information about peers (including id, eth protocol versions supported, client name, etc.) + */ + async peers() { + const peers = this._client.services.filter((service) => service.name === 'eth')[0]?.pool + .peers as RlpxPeer[] + + return peers?.map((peer) => { + return { + id: peer.id, + name: peer.rlpxPeer?.['_hello']?.clientId ?? null, + protocols: { + eth: { + head: peer.eth?.updatedBestHeader + ? bytesToHex(peer.eth.updatedBestHeader?.hash()) + : bytesToHex(peer.eth?.status.bestHash), + difficulty: peer.eth?.status.td.toString(10), + version: peer.eth?.['versions'].slice(-1)[0] ?? null, + }, + }, + caps: peer.eth?.['versions'].map((ver) => 'eth/' + ver), + network: { + remoteAddress: peer.address, + }, + } + }) + } } diff --git a/packages/client/src/rpc/modules/debug.ts b/packages/client/src/rpc/modules/debug.ts index abfd4eaffa..f118b561a9 100644 --- a/packages/client/src/rpc/modules/debug.ts +++ b/packages/client/src/rpc/modules/debug.ts @@ -1,5 +1,12 @@ -import { Address, TypeOutput, bigIntToHex, bytesToHex, hexToBytes, toType } from '@ethereumjs/util' -import { type VM, encodeReceipt } from '@ethereumjs/vm' +import { + TypeOutput, + bigIntToHex, + bytesToHex, + createAddressFromString, + hexToBytes, + toType, +} from '@ethereumjs/util' +import { type VM, encodeReceipt, runTx } from '@ethereumjs/vm' import { INTERNAL_ERROR, INVALID_PARAMS } from '../error-code.js' import { callWithStackTrace, getBlockByOption } from '../helpers.js' @@ -94,7 +101,7 @@ export class Debug { this.traceTransaction = middleware( callWithStackTrace(this.traceTransaction.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) this.traceCall = middleware(callWithStackTrace(this.traceCall.bind(this), this._rpcDebug), 2, [ [validators.transaction()], @@ -109,27 +116,27 @@ export class Debug { [validators.address], [validators.uint256], [validators.unsignedInteger], - ] + ], ) this.getRawBlock = middleware( callWithStackTrace(this.getRawBlock.bind(this), this._rpcDebug), 1, - [[validators.blockOption]] + [[validators.blockOption]], ) this.getRawHeader = middleware( callWithStackTrace(this.getRawHeader.bind(this), this._rpcDebug), 1, - [[validators.blockOption]] + [[validators.blockOption]], ) this.getRawReceipts = middleware( callWithStackTrace(this.getRawReceipts.bind(this), this._rpcDebug), 1, - [[validators.blockOption]] + [[validators.blockOption]], ) this.getRawTransaction = middleware( callWithStackTrace(this.getRawTransaction.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) } @@ -153,7 +160,7 @@ export class Debug { const opts = validateTracerConfig(config) const result = await this.service.execution.receiptsManager.getReceiptByTxHash( - hexToBytes(txHash) + hexToBytes(txHash), ) if (!result) return null const [_, blockHash, txIndex] = result @@ -166,7 +173,7 @@ export class Debug { await vmCopy.stateManager.setStateRoot(parentBlock.header.stateRoot) for (let x = 0; x < txIndex; x++) { // Run all txns in the block prior to the traced transaction - await vmCopy.runTx({ tx: block.transactions[x], block }) + await runTx(vmCopy, { tx: block.transactions[x], block }) } const trace = { @@ -179,7 +186,13 @@ export class Debug { const memory = [] let storage = {} if (opts.disableStorage === false) { - storage = await vmCopy.stateManager.dumpStorage(step.address) + if (!('dumpStorage' in vmCopy.stateManager)) { + throw { + message: 'stateManager has no dumpStorage implementation', + code: INTERNAL_ERROR, + } + } + storage = await vmCopy.stateManager.dumpStorage!(step.address) } if (opts.enableMemory === true) { for (let x = 0; x < step.memoryWordCount; x++) { @@ -210,7 +223,7 @@ export class Debug { } next?.() }) - const res = await vmCopy.runTx({ tx, block }) + const res = await runTx(vmCopy, { tx, block }) trace.gas = bigIntToHex(res.totalGasSpent) trace.failed = res.execResult.exceptionError !== undefined trace.returnValue = bytesToHex(res.execResult.returnValue) @@ -260,7 +273,13 @@ export class Debug { const memory = [] let storage = {} if (opts.disableStorage === false) { - storage = await vm.stateManager.dumpStorage(step.address) + if (!('dumpStorage' in vm.stateManager)) { + throw { + message: 'stateManager has no dumpStorage implementation', + code: INTERNAL_ERROR, + } + } + storage = await vm.stateManager.dumpStorage!(step.address) } if (opts.enableMemory === true) { for (let x = 0; x < step.memoryWordCount; x++) { @@ -292,8 +311,8 @@ export class Debug { next?.() }) const runCallOpts = { - caller: from !== undefined ? Address.fromString(from) : undefined, - to: to !== undefined ? Address.fromString(to) : undefined, + caller: from !== undefined ? createAddressFromString(from) : undefined, + to: to !== undefined ? createAddressFromString(to) : undefined, gasLimit: toType(gasLimit, TypeOutput.BigInt), gasPrice: toType(gasPrice, TypeOutput.BigInt), value: toType(value, TypeOutput.BigInt), @@ -318,7 +337,7 @@ export class Debug { * The object will also contain `nextKey`, the next (hashed) storage key after the range included in `storage`. */ async storageRangeAt( - params: [PrefixedHexString, number, PrefixedHexString, PrefixedHexString, number] + params: [PrefixedHexString, number, PrefixedHexString, PrefixedHexString, number], ) { const [blockHash, txIndex, account, startKey, limit] = params @@ -347,17 +366,23 @@ export class Debug { const parentBlock = await this.chain.getBlock(block.header.parentHash) // Copy the VM and run transactions including the relevant transaction. const vmCopy = await this.vm.shallowCopy() + if (!('dumpStorageRange' in vmCopy.stateManager)) { + throw { + code: INTERNAL_ERROR, + message: 'stateManager has no dumpStorageRange implementation', + } + } await vmCopy.stateManager.setStateRoot(parentBlock.header.stateRoot) for (let i = 0; i <= txIndex; i++) { - await vmCopy.runTx({ tx: block.transactions[i], block }) + await runTx(vmCopy, { tx: block.transactions[i], block }) } // await here so that any error can be handled in the catch below for proper response - return vmCopy.stateManager.dumpStorageRange( + return vmCopy.stateManager.dumpStorageRange!( // Validator already verified that `account` and `startKey` are properly formatted. - Address.fromString(account), + createAddressFromString(account), BigInt(startKey), - limit + limit, ) } /** @@ -390,7 +415,7 @@ export class Debug { const receipts = await this.service.execution.receiptsManager.getReceipts( block.hash(), true, - true + true, ) return receipts.map((r) => bytesToHex(encodeReceipt(r, r.txType))) } @@ -398,11 +423,11 @@ export class Debug { * Returns the bytes of the transaction. * @param blockOpt Block number or tag */ - async getRawTransaction(params: [string]) { + async getRawTransaction(params: [PrefixedHexString]) { const [txHash] = params if (!this.service.execution.receiptsManager) throw new Error('missing receiptsManager') const result = await this.service.execution.receiptsManager.getReceiptByTxHash( - hexToBytes(txHash) + hexToBytes(txHash), ) if (!result) return null const [_receipt, blockHash, txIndex] = result diff --git a/packages/client/src/rpc/modules/engine/CLConnectionManager.ts b/packages/client/src/rpc/modules/engine/CLConnectionManager.ts index 148772f1e0..da6e5c0776 100644 --- a/packages/client/src/rpc/modules/engine/CLConnectionManager.ts +++ b/packages/client/src/rpc/modules/engine/CLConnectionManager.ts @@ -137,15 +137,15 @@ export class CLConnectionManager { this._connectionCheckInterval = setInterval( // eslint-disable @typescript-eslint/await-thenable this.connectionCheck.bind(this), - this.DEFAULT_CONNECTION_CHECK_INTERVAL + this.DEFAULT_CONNECTION_CHECK_INTERVAL, ) this._payloadLogInterval = setInterval( this.lastPayloadLog.bind(this), - this.DEFAULT_PAYLOAD_LOG_INTERVAL + this.DEFAULT_PAYLOAD_LOG_INTERVAL, ) this._forkchoiceLogInterval = setInterval( this.lastForkchoiceLog.bind(this), - this.DEFAULT_FORKCHOICE_LOG_INTERVAL + this.DEFAULT_FORKCHOICE_LOG_INTERVAL, ) } @@ -166,11 +166,11 @@ export class CLConnectionManager { private _getPayloadLogMsg(payload: NewPayload) { let msg = `number=${Number(payload.payload.blockNumber)} hash=${short( - payload.payload.blockHash + payload.payload.blockHash, )} parentHash=${short(payload.payload.parentHash)} status=${ payload.response ? payload.response.status : '-' } gasUsed=${this.compactNum(Number(payload.payload.gasUsed))} baseFee=${Number( - payload.payload.baseFeePerGas + payload.payload.baseFeePerGas, )} txs=${payload.payload.transactions.length}` if ('withdrawals' in payload.payload && payload.payload.withdrawals !== null) { @@ -190,7 +190,7 @@ export class CLConnectionManager { msg += `number=${Number(update.headBlock.header.number)} ` } msg += `head=${short(update.state.headBlockHash)} finalized=${short( - update.state.finalizedBlockHash + update.state.finalizedBlockHash, )} response=${update.response ? update.response.payloadStatus.status : '-'}` if (update.headBlock) { msg += ` timestampDiff=${this.timeDiffStr(update.headBlock)}` @@ -217,7 +217,7 @@ export class CLConnectionManager { logCLStatus( this.config.logger, `Initial consensus forkchoice update ${this._getForkchoiceUpdateLogMsg(update)}`, - logLevel.INFO + logLevel.INFO, ) } this._lastForkchoiceUpdate = update @@ -230,7 +230,7 @@ export class CLConnectionManager { logCLStatus( this.config.logger, `Initial consensus payload received ${this._getPayloadLogMsg(payload)}`, - logLevel.INFO + logLevel.INFO, ) } this._lastPayload = payload @@ -319,12 +319,12 @@ export class CLConnectionManager { logCLStatus( this.config.logger, 'CL client connection is needed, Merge HF happening soon', - logLevel.WARN + logLevel.WARN, ) logCLStatus( this.config.logger, '(no CL <-> EL communication yet, connection might be in a workable state though)', - logLevel.WARN + logLevel.WARN, ) } } @@ -337,12 +337,12 @@ export class CLConnectionManager { logCLStatus( this.config.logger, 'Paris (Merge) HF activated, CL client connection is needed for continued block processing', - logLevel.INFO + logLevel.INFO, ) logCLStatus( this.config.logger, '(note that CL client might need to be synced up to beacon chain Merge transition slot until communication starts)', - logLevel.INFO + logLevel.INFO, ) } this.oneTimeMergeCLConnectionCheck = true @@ -365,7 +365,7 @@ export class CLConnectionManager { logCLStatus( this.config.logger, `Last consensus payload received ${payloadMsg}`, - logLevel.INFO + logLevel.INFO, ) const count = this._payloadToPayloadStats['blockCount'] const min = this._payloadToPayloadStats['minBlockNumber'] @@ -381,7 +381,7 @@ export class CLConnectionManager { `Payload stats blocks count=${count} minBlockNum=${min} maxBlockNum=${max} txsPerType=${ txsMsg.length > 0 ? txsMsg.join('|') : '0' }`, - logLevel.DEBUG + logLevel.DEBUG, ) this.clearPayloadStats() } @@ -398,7 +398,7 @@ export class CLConnectionManager { logCLStatus( this.config.logger, `New consensus payload received ${payloadMsg}`, - logLevel.INFO + logLevel.INFO, ) } } @@ -415,15 +415,15 @@ export class CLConnectionManager { logCLStatus( this.config.logger, `No consensus forkchoice update received yet`, - logLevel.INFO + logLevel.INFO, ) } else { logCLStatus( this.config.logger, `Last consensus forkchoice update ${this._getForkchoiceUpdateLogMsg( - this._lastForkchoiceUpdate + this._lastForkchoiceUpdate, )}`, - logLevel.INFO + logLevel.INFO, ) } } @@ -437,9 +437,9 @@ export class CLConnectionManager { logCLStatus( this.config.logger, `New chain head set (forkchoice update) ${this._getForkchoiceUpdateLogMsg( - this._lastForkchoiceUpdate + this._lastForkchoiceUpdate, )}`, - logLevel.INFO + logLevel.INFO, ) } } @@ -451,7 +451,7 @@ export class CLConnectionManager { */ export function middleware( methodFn: (params: any[]) => Promise, - handler: (params: any[], response: any, errormsg: any) => void + handler: (params: any[], response: any, errormsg: any) => void, ): any { return function (params: any[] = []) { return methodFn(params) diff --git a/packages/client/src/rpc/modules/engine/engine.ts b/packages/client/src/rpc/modules/engine/engine.ts index d9094eba22..47d9abe161 100644 --- a/packages/client/src/rpc/modules/engine/engine.ts +++ b/packages/client/src/rpc/modules/engine/engine.ts @@ -1,6 +1,5 @@ import { Hardfork } from '@ethereumjs/common' import { - BIGINT_0, BIGINT_1, bytesToHex, bytesToUnprefixedHex, @@ -36,7 +35,6 @@ import { validHash, validate4844BlobVersionedHashes, validateHardforkRange, - validateTerminalBlock, } from './util/index.js' import { executionPayloadV1FieldValidators, @@ -68,7 +66,6 @@ import type { PayloadAttributesV1, PayloadAttributesV2, PayloadAttributesV3, - TransitionConfigurationV1, } from './types.js' import type { Block, ExecutionPayload } from '@ethereumjs/block' import type { PrefixedHexString } from '@ethereumjs/util' @@ -178,7 +175,7 @@ export class Engine { middleware(callWithStackTrace(this.newPayloadV1.bind(this), this._rpcDebug), 1, [ [validators.object(executionPayloadV1FieldValidators)], ]), - ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }) + ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }), ) this.newPayloadV2 = cmMiddleware( @@ -186,11 +183,11 @@ export class Engine { [ validators.either( validators.object(executionPayloadV1FieldValidators), - validators.object(executionPayloadV2FieldValidators) + validators.object(executionPayloadV2FieldValidators), ), ], ]), - ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }) + ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }), ) this.newPayloadV3 = cmMiddleware( @@ -202,9 +199,9 @@ export class Engine { [validators.array(validators.bytes32)], [validators.bytes32], ], - ['executionPayload', 'blobVersionedHashes', 'parentBeaconBlockRoot'] + ['executionPayload', 'blobVersionedHashes', 'parentBeaconBlockRoot'], ), - ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }) + ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }), ) this.newPayloadV4 = cmMiddleware( @@ -216,9 +213,9 @@ export class Engine { [validators.array(validators.bytes32)], [validators.bytes32], ], - ['executionPayload', 'blobVersionedHashes', 'parentBeaconBlockRoot'] + ['executionPayload', 'blobVersionedHashes', 'parentBeaconBlockRoot'], ), - ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }) + ([payload], response) => this.connectionManager.lastNewPayload({ payload, response }), ) /** @@ -227,7 +224,7 @@ export class Engine { const forkchoiceUpdatedResponseCMHandler = ( [state]: ForkchoiceStateV1[], response?: ForkchoiceResponseV1 & { headBlock?: Block }, - error?: string + error?: string, ) => { this.connectionManager.lastForkchoiceUpdate({ state, @@ -244,21 +241,21 @@ export class Engine { [validators.object(forkchoiceFieldValidators)], [validators.optional(validators.object(payloadAttributesFieldValidatorsV1))], ]), - forkchoiceUpdatedResponseCMHandler + forkchoiceUpdatedResponseCMHandler, ) this.forkchoiceUpdatedV2 = cmMiddleware( middleware(callWithStackTrace(this.forkchoiceUpdatedV2.bind(this), this._rpcDebug), 1, [ [validators.object(forkchoiceFieldValidators)], [validators.optional(validators.object(payloadAttributesFieldValidatorsV2))], ]), - forkchoiceUpdatedResponseCMHandler + forkchoiceUpdatedResponseCMHandler, ) this.forkchoiceUpdatedV3 = cmMiddleware( middleware(callWithStackTrace(this.forkchoiceUpdatedV3.bind(this), this._rpcDebug), 1, [ [validators.object(forkchoiceFieldValidators)], [validators.optional(validators.object(payloadAttributesFieldValidatorsV3))], ]), - forkchoiceUpdatedResponseCMHandler + forkchoiceUpdatedResponseCMHandler, ) /** @@ -268,48 +265,28 @@ export class Engine { middleware(callWithStackTrace(this.getPayloadV1.bind(this), this._rpcDebug), 1, [ [validators.bytes8], ]), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) this.getPayloadV2 = cmMiddleware( middleware(callWithStackTrace(this.getPayloadV2.bind(this), this._rpcDebug), 1, [ [validators.bytes8], ]), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) this.getPayloadV3 = cmMiddleware( middleware(callWithStackTrace(this.getPayloadV3.bind(this), this._rpcDebug), 1, [ [validators.bytes8], ]), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) this.getPayloadV4 = cmMiddleware( middleware(callWithStackTrace(this.getPayloadV4.bind(this), this._rpcDebug), 1, [ [validators.bytes8], ]), - () => this.connectionManager.updateStatus() - ) - - /** - * exchangeTransitionConfiguration - */ - this.exchangeTransitionConfigurationV1 = cmMiddleware( - middleware( - callWithStackTrace(this.exchangeTransitionConfigurationV1.bind(this), this._rpcDebug), - 1, - [ - [ - validators.object({ - terminalTotalDifficulty: validators.uint256, - terminalBlockHash: validators.bytes32, - terminalBlockNumber: validators.uint64, - }), - ], - ] - ), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) /** @@ -317,7 +294,7 @@ export class Engine { */ this.exchangeCapabilities = cmMiddleware( middleware(callWithStackTrace(this.exchangeCapabilities.bind(this), this._rpcDebug), 0, []), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) /** @@ -327,7 +304,7 @@ export class Engine { middleware(callWithStackTrace(this.getPayloadBodiesByHashV1.bind(this), this._rpcDebug), 1, [ [validators.array(validators.bytes32)], ]), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) /** @@ -338,7 +315,7 @@ export class Engine { [validators.bytes8], [validators.bytes8], ]), - () => this.connectionManager.updateStatus() + () => this.connectionManager.updateStatus(), ) } @@ -360,7 +337,7 @@ export class Engine { * 3. validationError: String|null - validation error message */ private async newPayload( - params: [ExecutionPayload, (Bytes32[] | null)?, (Bytes32 | null)?] + params: [ExecutionPayload, (Bytes32[] | null)?, (Bytes32 | null)?], ): Promise { const [payload, blobVersionedHashes, parentBeaconBlockRoot] = params if (this.config.synchronized) { @@ -386,7 +363,7 @@ export class Engine { parentBeaconBlockRoot: parentBeaconBlockRoot ?? undefined, }, this.chain, - this.chainCache + this.chainCache, ) if (headBlock === undefined || error !== undefined) { let response = error @@ -396,7 +373,7 @@ export class Engine { const latestValidHash = await validHash( hexToBytes(parentHash as PrefixedHexString), this.chain, - this.chainCache + this.chainCache, ) response = { status: Status.INVALID, latestValidHash, validationError } } @@ -421,7 +398,7 @@ export class Engine { const latestValidHash = await validHash( hexToBytes(parentHash as PrefixedHexString), this.chain, - this.chainCache + this.chainCache, ) const response = { status: Status.INVALID, latestValidHash, validationError } // skip marking the block invalid as this is more of a data issue from CL @@ -432,7 +409,7 @@ export class Engine { const latestValidHash = await validHash( hexToBytes(parentHash as PrefixedHexString), this.chain, - this.chainCache + this.chainCache, ) const response = { status: Status.INVALID, latestValidHash, validationError } // skip marking the block invalid as this is more of a data issue from CL @@ -447,8 +424,8 @@ export class Engine { if (hardfork !== this.lastNewPayloadHF && this.lastNewPayloadHF !== '') { this.config.logger.info( `Hardfork change along new payload block number=${headBlock.header.number} hash=${short( - headBlock.hash() - )} old=${this.lastNewPayloadHF} new=${hardfork}` + headBlock.hash(), + )} old=${this.lastNewPayloadHF} new=${hardfork}`, ) } this.lastNewPayloadHF = hardfork @@ -463,23 +440,6 @@ export class Engine { this.remoteBlocks.get(parentHash.slice(2)) ?? (await this.chain.getBlock(hexToBytes(parentHash as PrefixedHexString))) - // Validations with parent - if (!parent.common.gteHardfork(Hardfork.Paris)) { - const validTerminalBlock = await validateTerminalBlock(parent, this.chain) - if (!validTerminalBlock) { - const response = { - status: Status.INVALID, - validationError: null, - latestValidHash: bytesToHex(zeros(32)), - } - this.invalidBlocks.set( - blockHash.slice(2), - new Error(response.validationError ?? 'Terminal block validation failed') - ) - return response - } - } - /** * validate 4844 transactions and fields as these validations generally happen on putBlocks * when parent is confirmed to be in the chain. But we can do it here early @@ -492,7 +452,7 @@ export class Engine { const latestValidHash = await validHash( hexToBytes(parentHash as PrefixedHexString), this.chain, - this.chainCache + this.chainCache, ) const response = { status: Status.INVALID, latestValidHash, validationError } // skip marking the block invalid as this is more of a data issue from CL @@ -546,17 +506,17 @@ export class Engine { // if the invalid block is canonical along the current chain return invalid const invalidBlock = await this.skeleton.getBlockByHash( this.execution.chainStatus.hash, - true + true, ) if (invalidBlock !== undefined) { // hard luck: block along canonical chain is invalid const latestValidHash = await validHash( invalidBlock.header.parentHash, this.chain, - this.chainCache + this.chainCache, ) const validationError = `Block number=${invalidBlock.header.number} hash=${short( - invalidBlock.hash() + invalidBlock.hash(), )} root=${short(invalidBlock.header.stateRoot)} along the canonical chain is invalid` const response = { @@ -633,10 +593,10 @@ export class Engine { const latestValidHash = await validHash( invalidBlock.header.parentHash, this.chain, - this.chainCache + this.chainCache, ) const validationError = `Block number=${invalidBlock.header.number} hash=${short( - invalidBlock.hash() + invalidBlock.hash(), )} root=${short(invalidBlock.header.stateRoot)} along the canonical chain is invalid` const response = { @@ -695,13 +655,13 @@ export class Engine { const blockParent = i > 0 ? blocks[i - 1] - : this.chainCache.remoteBlocks.get( - bytesToHex(block.header.parentHash).slice(2) - ) ?? (await this.chain.getBlock(block.header.parentHash)) + : (this.chainCache.remoteBlocks.get( + bytesToHex(block.header.parentHash).slice(2), + ) ?? (await this.chain.getBlock(block.header.parentHash))) const blockExecuted = await this.execution.runWithoutSetHead({ block, root: blockParent.header.stateRoot, - setHardfork: this.chain.headers.td, + setHardfork: true, parentBlock: blockParent, }) return blockExecuted @@ -711,14 +671,14 @@ export class Engine { if (!executed) { this.config.logger.debug( `Skipping block(s) execution for headBlock=${headBlock.header.number} hash=${short( - headBlock.hash() + headBlock.hash(), )} : pendingBlocks=${blocks.length - i}(limit=${ this.chain.config.engineNewpayloadMaxExecute }) transactions=${block.transactions.length}(limit=${ this.chain.config.engineNewpayloadMaxTxsExecute - }) executionBusy=${this.execution.running}` + }) executionBusy=${this.execution.running}`, ) - // determind status to be returned depending on if block could extend chain or not + // determined status to be returned depending on if block could extend chain or not const status = optimisticLookup === true ? Status.SYNCING : Status.ACCEPTED const response = { status, latestValidHash: null, validationError: null } return response @@ -731,7 +691,7 @@ export class Engine { const latestValidHash = await validHash( headBlock.header.parentHash, this.chain, - this.chainCache + this.chainCache, ) const errorMsg = `${error}`.toLowerCase() @@ -922,7 +882,7 @@ export class Engine { * 3. headBlock: Block|undefined - Block corresponding to headBlockHash if found */ private async forkchoiceUpdated( - params: [forkchoiceState: ForkchoiceStateV1, payloadAttributes: PayloadAttributes | undefined] + params: [forkchoiceState: ForkchoiceStateV1, payloadAttributes: PayloadAttributes | undefined], ): Promise { const { headBlockHash, finalizedBlockHash, safeBlockHash } = params[0] const payloadAttributes = params[1] @@ -974,7 +934,7 @@ export class Engine { (await this.chain.getBlock(head)) } catch (error) { this.config.logger.debug( - `Forkchoice announced head block unknown to EL hash=${short(headBlockHash)}` + `Forkchoice announced head block unknown to EL hash=${short(headBlockHash)}`, ) const payloadStatus = { status: Status.SYNCING, @@ -993,7 +953,7 @@ export class Engine { this.config.logger.info( `Hardfork change along forkchoice head block update number=${ headBlock.header.number - } hash=${short(headBlock.hash())} old=${this.lastForkchoiceUpdatedHF} new=${hardfork}` + } hash=${short(headBlock.hash())} old=${this.lastForkchoiceUpdatedHF} new=${hardfork}`, ) } this.lastForkchoiceUpdatedHF = hardfork @@ -1002,8 +962,8 @@ export class Engine { // requirements that might come later because of reorg or CL restarts this.config.logger.debug( `Forkchoice requested update to new head number=${headBlock.header.number} hash=${short( - headBlock.hash() - )}` + headBlock.hash(), + )}`, ) /** @@ -1032,27 +992,6 @@ export class Engine { if (reorged) await this.service.beaconSync?.reorged(headBlock) - /** - * Terminal block validation - */ - // Only validate this as terminal block if this block's difficulty is non-zero, - // else this is a PoS block but its hardfork could be indeterminable if the skeleton - // is not yet connected. - if (!headBlock.common.gteHardfork(Hardfork.Paris) && headBlock.header.difficulty > BIGINT_0) { - const validTerminalBlock = await validateTerminalBlock(headBlock, this.chain) - if (!validTerminalBlock) { - const response = { - payloadStatus: { - status: Status.INVALID, - validationError: 'Invalid terminal block', - latestValidHash: bytesToHex(zeros(32)), - }, - payloadId: null, - } - return response - } - } - /** * Check execution status */ @@ -1064,17 +1003,17 @@ export class Engine { // see if the invalid block is canonical along the current skeleton/chain return invalid const invalidBlock = await this.skeleton.getBlockByHash( this.execution.chainStatus.hash, - true + true, ) if (invalidBlock !== undefined) { // hard luck: block along canonical chain is invalid const latestValidHash = await validHash( invalidBlock.header.parentHash, this.chain, - this.chainCache + this.chainCache, ) const validationError = `Block number=${invalidBlock.header.number} hash=${short( - invalidBlock.hash() + invalidBlock.hash(), )} root=${short(invalidBlock.header.stateRoot)} along the canonical chain is invalid` const payloadStatus = { @@ -1094,11 +1033,11 @@ export class Engine { ) { // jump the vm head to failing block so that next block can be executed this.config.logger.debug( - `Jumping the stalled vmHead forward to hash=${this.execution.chainStatus.hash} height=${this.execution.chainStatus.height} to continue the execution` + `Jumping the stalled vmHead forward to hash=${this.execution.chainStatus.hash} height=${this.execution.chainStatus.height} to continue the execution`, ) await this.execution.jumpVmHead( this.execution.chainStatus.hash, - this.execution.chainStatus.height + this.execution.chainStatus.height, ) } @@ -1129,7 +1068,7 @@ export class Engine { parentBlocks = await recursivelyFindParents( vmHeadHash, headBlock.header.parentHash, - this.chain + this.chain, ) } catch (error) { const payloadStatus = { @@ -1213,7 +1152,7 @@ export class Engine { coinbase: suggestedFeeRecipient, parentBeaconBlockRoot, }, - withdrawals + withdrawals, ) const latestValidHash = await validHash(headBlock.hash(), this.chain, this.chainCache) const payloadStatus = { status: Status.VALID, latestValidHash, validationError: null } @@ -1240,7 +1179,10 @@ export class Engine { * @returns */ private async forkchoiceUpdatedV1( - params: [forkchoiceState: ForkchoiceStateV1, payloadAttributes: PayloadAttributesV1 | undefined] + params: [ + forkchoiceState: ForkchoiceStateV1, + payloadAttributes: PayloadAttributesV1 | undefined, + ], ): Promise { const payloadAttributes = params[1] if (payloadAttributes !== undefined && payloadAttributes !== null) { @@ -1258,7 +1200,7 @@ export class Engine { 1, null, Hardfork.Paris, - BigInt(payloadAttributes.timestamp) + BigInt(payloadAttributes.timestamp), ) } @@ -1274,8 +1216,8 @@ export class Engine { private async forkchoiceUpdatedV2( params: [ forkchoiceState: ForkchoiceStateV1, - payloadAttributes: PayloadAttributesV1 | PayloadAttributesV2 | undefined - ] + payloadAttributes: PayloadAttributesV1 | PayloadAttributesV2 | undefined, + ], ): Promise { const payloadAttributes = params[1] if (payloadAttributes !== undefined && payloadAttributes !== null) { @@ -1294,7 +1236,7 @@ export class Engine { 2, null, Hardfork.Shanghai, - BigInt(payloadAttributes.timestamp) + BigInt(payloadAttributes.timestamp), ) const shanghaiTimestamp = this.chain.config.chainCommon.hardforkTimestamp(Hardfork.Shanghai) @@ -1335,7 +1277,10 @@ export class Engine { * @returns */ private async forkchoiceUpdatedV3( - params: [forkchoiceState: ForkchoiceStateV1, payloadAttributes: PayloadAttributesV3 | undefined] + params: [ + forkchoiceState: ForkchoiceStateV1, + payloadAttributes: PayloadAttributesV3 | undefined, + ], ): Promise { const payloadAttributes = params[1] if (payloadAttributes !== undefined && payloadAttributes !== null) { @@ -1355,7 +1300,7 @@ export class Engine { Hardfork.Cancun, // this could be valid post cancun as well, if not then update the valid till hf here null, - BigInt(payloadAttributes.timestamp) + BigInt(payloadAttributes.timestamp), ) } @@ -1432,7 +1377,7 @@ export class Engine { payloadVersion, checkNotBeforeHf, checkNotAfterHf, - BigInt(executionPayload.executionPayload.timestamp) + BigInt(executionPayload.executionPayload.timestamp), ) return executionPayload } catch (error: any) { @@ -1479,41 +1424,6 @@ export class Engine { async getPayloadV4(params: [Bytes8]) { return this.getPayload(params, 4) } - /** - * Compare transition configuration parameters. - * - * V1 (Paris HF), see: - * https://github.com/ethereum/execution-apis/blob/main/src/engine/paris.md#engine_exchangetransitionconfigurationv1 - * - * Note: This method is deprecated starting with the Cancun HF - * - * @param params An array of one parameter: - * 1. transitionConfiguration: Object - instance of {@link TransitionConfigurationV1} - * @returns Instance of {@link TransitionConfigurationV1} or an error - */ - async exchangeTransitionConfigurationV1( - params: [TransitionConfigurationV1] - ): Promise { - const { terminalTotalDifficulty, terminalBlockHash, terminalBlockNumber } = params[0] - const ttd = this.chain.config.chainCommon.hardforkTTD(Hardfork.Paris) - if (ttd === undefined || ttd === null) { - throw { - code: INTERNAL_ERROR, - message: 'terminalTotalDifficulty not set internally', - } - } - if (ttd !== BigInt(terminalTotalDifficulty)) { - throw { - code: INVALID_PARAMS, - message: `terminalTotalDifficulty set to ${ttd}, received ${parseInt( - terminalTotalDifficulty - )}`, - } - } - // Note: our client does not yet support block whitelisting (terminalBlockHash/terminalBlockNumber) - // since we are not yet fast enough to run along tip-of-chain mainnet execution - return { terminalTotalDifficulty, terminalBlockHash, terminalBlockNumber } - } /** * Returns a list of engine API endpoints supported by the client @@ -1535,7 +1445,7 @@ export class Engine { * @returns an array of ExecutionPayloadBodyV1 objects or null if a given execution payload isn't stored locally */ private async getPayloadBodiesByHashV1( - params: [[Bytes32]] + params: [[Bytes32]], ): Promise<(ExecutionPayloadBodyV1 | null)[]> { if (params[0].length > 32) { throw { @@ -1567,7 +1477,7 @@ export class Engine { * @returns an array of ExecutionPayloadBodyV1 objects or null if a given execution payload isn't stored locally */ private async getPayloadBodiesByRangeV1( - params: [Bytes8, Bytes8] + params: [Bytes8, Bytes8], ): Promise<(ExecutionPayloadBodyV1 | null)[]> { const start = BigInt(params[0]) let count = BigInt(params[1]) diff --git a/packages/client/src/rpc/modules/engine/types.ts b/packages/client/src/rpc/modules/engine/types.ts index c8be49cef5..bfab9b9750 100644 --- a/packages/client/src/rpc/modules/engine/types.ts +++ b/packages/client/src/rpc/modules/engine/types.ts @@ -70,12 +70,6 @@ export type ForkchoiceResponseV1 = { payloadId: Bytes8 | null } -export type TransitionConfigurationV1 = { - terminalTotalDifficulty: Uint256 - terminalBlockHash: Bytes32 - terminalBlockNumber: Uint64 -} - export type BlobsBundleV1 = { commitments: Bytes48[] blobs: Blob[] diff --git a/packages/client/src/rpc/modules/engine/util/generic.ts b/packages/client/src/rpc/modules/engine/util/generic.ts index f36d5805c8..ad231ea6dc 100644 --- a/packages/client/src/rpc/modules/engine/util/generic.ts +++ b/packages/client/src/rpc/modules/engine/util/generic.ts @@ -1,12 +1,11 @@ import { Block } from '@ethereumjs/block' -import { Hardfork } from '@ethereumjs/common' -import { BIGINT_1, bytesToHex, bytesToUnprefixedHex, equalsBytes } from '@ethereumjs/util' +import { bytesToHex, bytesToUnprefixedHex, equalsBytes } from '@ethereumjs/util' import { UNSUPPORTED_FORK } from '../../../error-code.js' import { type ChainCache } from '../types.js' import type { Chain } from '../../../../blockchain/index.js' -import type { Common } from '@ethereumjs/common' +import type { Common, Hardfork } from '@ethereumjs/common' import type { PrefixedHexString } from '@ethereumjs/util' /** @@ -15,7 +14,7 @@ import type { PrefixedHexString } from '@ethereumjs/util' export const recursivelyFindParents = async ( vmHeadHash: Uint8Array, parentHash: Uint8Array, - chain: Chain + chain: Chain, ) => { if (equalsBytes(parentHash, vmHeadHash) || equalsBytes(parentHash, new Uint8Array(32))) { return [] @@ -28,7 +27,7 @@ export const recursivelyFindParents = async ( while (!equalsBytes(parentBlocks[parentBlocks.length - 1].hash(), vmHeadHash)) { const block: Block = await chain.getBlock( - parentBlocks[parentBlocks.length - 1].header.parentHash + parentBlocks[parentBlocks.length - 1].header.parentHash, ) parentBlocks.push(block) @@ -50,7 +49,7 @@ export const recursivelyFindParents = async ( */ export const validExecutedChainBlock = async ( blockOrHash: Uint8Array | Block, - chain: Chain + chain: Chain, ): Promise => { try { const block = blockOrHash instanceof Block ? blockOrHash : await chain.getBlock(blockOrHash) @@ -77,7 +76,7 @@ export const validExecutedChainBlock = async ( export const validHash = async ( hash: Uint8Array, chain: Chain, - chainCache: ChainCache + chainCache: ChainCache, ): Promise => { const { remoteBlocks, executedBlocks, invalidBlocks, skeleton } = chainCache const maxDepth = chain.config.engineParentLookupMaxDepth @@ -115,28 +114,12 @@ export const validHash = async ( return null } -/** - * Validates that the block satisfies post-merge conditions. - */ -export const validateTerminalBlock = async (block: Block, chain: Chain): Promise => { - const ttd = chain.config.chainCommon.hardforkTTD(Hardfork.Paris) - if (ttd === null) return false - const blockTd = await chain.getTd(block.hash(), block.header.number) - - // Block is terminal if its td >= ttd and its parent td < ttd. - // In case the Genesis block has td >= ttd it is the terminal block - if (block.isGenesis()) return blockTd >= ttd - - const parentBlockTd = await chain.getTd(block.header.parentHash, block.header.number - BIGINT_1) - return blockTd >= ttd && parentBlockTd < ttd -} - export function validateHardforkRange( chainCommon: Common, methodVersion: number, checkNotBeforeHf: Hardfork | null, checkNotAfterHf: Hardfork | null, - timestamp: bigint + timestamp: bigint, ) { if (checkNotBeforeHf !== null) { const hfTimeStamp = chainCommon.hardforkTimestamp(checkNotBeforeHf) diff --git a/packages/client/src/rpc/modules/engine/util/getPayload.ts b/packages/client/src/rpc/modules/engine/util/getPayload.ts index 9cad1d79fb..18da12f188 100644 --- a/packages/client/src/rpc/modules/engine/util/getPayload.ts +++ b/packages/client/src/rpc/modules/engine/util/getPayload.ts @@ -21,7 +21,7 @@ export const blockToExecutionPayload = (block: Block, value: bigint, bundle?: Bl } : undefined - // ethereumjs doesnot provide any transaction censoring detection (yet) to suggest + // ethereumjs does not provide any transaction censoring detection (yet) to suggest // overriding builder/mev-boost blocks const shouldOverrideBuilder = false return { executionPayload, blockValue: bigIntToHex(value), blobsBundle, shouldOverrideBuilder } diff --git a/packages/client/src/rpc/modules/engine/util/newPayload.ts b/packages/client/src/rpc/modules/engine/util/newPayload.ts index f566fb2361..ad1b0cb665 100644 --- a/packages/client/src/rpc/modules/engine/util/newPayload.ts +++ b/packages/client/src/rpc/modules/engine/util/newPayload.ts @@ -1,6 +1,5 @@ import { createBlockFromExecutionPayload } from '@ethereumjs/block' -import { Hardfork } from '@ethereumjs/common' -import { BlobEIP4844Transaction } from '@ethereumjs/tx' +import { Blob4844Tx } from '@ethereumjs/tx' import { equalsBytes, hexToBytes } from '@ethereumjs/util' import { short } from '../../../../util/index.js' @@ -20,17 +19,13 @@ import type { PrefixedHexString } from '@ethereumjs/util' export const assembleBlock = async ( payload: ExecutionPayload, chain: Chain, - chainCache: ChainCache + chainCache: ChainCache, ): Promise<{ block?: Block; error?: PayloadStatusV1 }> => { const { blockNumber, timestamp } = payload const { config } = chain const common = config.chainCommon.copy() - // This is a post merge block, so set its common accordingly - // Can't use setHardfork flag, as the transactions will need to be deserialized - // first before the header can be constucted with their roots - const ttd = common.hardforkTTD(Hardfork.Paris) - common.setHardforkBy({ blockNumber, td: ttd !== null ? ttd : undefined, timestamp }) + common.setHardforkBy({ blockNumber, timestamp }) try { const block = await createBlockFromExecutionPayload(payload, { common }) @@ -44,7 +39,7 @@ export const assembleBlock = async ( const latestValidHash = await validHash( hexToBytes(payload.parentHash as PrefixedHexString), chain, - chainCache + chainCache, ) const response = { status: `${error}`.includes('Invalid blockHash') ? Status.INVALID_BLOCK_HASH : Status.INVALID, @@ -57,14 +52,14 @@ export const assembleBlock = async ( export const validate4844BlobVersionedHashes = ( headBlock: Block, - blobVersionedHashes: PrefixedHexString[] + blobVersionedHashes: PrefixedHexString[], ): string | null => { let validationError: string | null = null // Collect versioned hashes in the flat array `txVersionedHashes` to match with received const txVersionedHashes = [] for (const tx of headBlock.transactions) { - if (tx instanceof BlobEIP4844Transaction) { + if (tx instanceof Blob4844Tx) { for (const vHash of tx.blobVersionedHashes) { txVersionedHashes.push(vHash) } @@ -79,7 +74,7 @@ export const validate4844BlobVersionedHashes = ( // if mismatch, record error and break if (!equalsBytes(hexToBytes(blobVersionedHashes[vIndex]), txVersionedHashes[vIndex])) { validationError = `Error verifying blobVersionedHashes: mismatch at index=${vIndex} expected=${short( - txVersionedHashes[vIndex] + txVersionedHashes[vIndex], )} received=${short(blobVersionedHashes[vIndex])}` break } diff --git a/packages/client/src/rpc/modules/eth.ts b/packages/client/src/rpc/modules/eth.ts index 0a6bb135a6..603d726d97 100644 --- a/packages/client/src/rpc/modules/eth.ts +++ b/packages/client/src/rpc/modules/eth.ts @@ -1,8 +1,12 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { Hardfork } from '@ethereumjs/common' -import { BlobEIP4844Transaction, Capability, TransactionFactory } from '@ethereumjs/tx' import { - Address, + Capability, + createBlob4844TxFromSerializedNetworkWrapper, + createTxFromSerializedData, + createTxFromTxData, +} from '@ethereumjs/tx' +import { BIGINT_0, BIGINT_1, BIGINT_100, @@ -11,12 +15,24 @@ import { bigIntMax, bigIntToHex, bytesToHex, + createAddressFromString, + createZeroAddress, equalsBytes, hexToBytes, intToHex, + isHexString, setLengthLeft, toType, } from '@ethereumjs/util' +import { + type EIP4844BlobTxReceipt, + type PostByzantiumTxReceipt, + type PreByzantiumTxReceipt, + type TxReceipt, + type VM, + runBlock, + runTx, +} from '@ethereumjs/vm' import { INTERNAL_ERROR, INVALID_HEX_STRING, INVALID_PARAMS, PARSE_ERROR } from '../error-code.js' import { callWithStackTrace, getBlockByOption, jsonRpcTx } from '../helpers.js' @@ -31,19 +47,8 @@ import type { RpcTx } from '../types.js' import type { Block, JsonRpcBlock } from '@ethereumjs/block' import type { Log } from '@ethereumjs/evm' import type { Proof } from '@ethereumjs/statemanager' -import type { - FeeMarketEIP1559Transaction, - LegacyTransaction, - TypedTransaction, -} from '@ethereumjs/tx' -import type { PrefixedHexString } from '@ethereumjs/util' -import type { - EIP4844BlobTxReceipt, - PostByzantiumTxReceipt, - PreByzantiumTxReceipt, - TxReceipt, - VM, -} from '@ethereumjs/vm' +import type { FeeMarket1559Tx, LegacyTx, TypedTransaction } from '@ethereumjs/tx' +import type { Address, PrefixedHexString } from '@ethereumjs/util' const EMPTY_SLOT = `0x${'00'.repeat(32)}` @@ -100,12 +105,12 @@ type JsonRpcLog = { const jsonRpcBlock = async ( block: Block, chain: Chain, - includeTransactions: boolean + includeTransactions: boolean, ): Promise => { const json = block.toJSON() const header = json!.header! const transactions = block.transactions.map((tx, txIndex) => - includeTransactions ? jsonRpcTx(tx, block, txIndex) : bytesToHex(tx.hash()) + includeTransactions ? jsonRpcTx(tx, block, txIndex) : bytesToHex(tx.hash()), ) const withdrawalsAttr = header.withdrawalsRoot !== undefined @@ -154,7 +159,7 @@ const jsonRpcLog = async ( block?: Block, tx?: TypedTransaction, txIndex?: number, - logIndex?: number + logIndex?: number, ): Promise => ({ removed: false, // TODO implement logIndex: logIndex !== undefined ? intToHex(logIndex) : null, @@ -180,7 +185,7 @@ const jsonRpcReceipt = async ( logIndex: number, contractAddress?: Address, blobGasUsed?: bigint, - blobGasPrice?: bigint + blobGasPrice?: bigint, ): Promise => ({ transactionHash: bytesToHex(tx.hash()), transactionIndex: intToHex(txIndex), @@ -193,7 +198,7 @@ const jsonRpcReceipt = async ( gasUsed: bigIntToHex(gasUsed), contractAddress: contractAddress?.toString() ?? null, logs: await Promise.all( - receipt.logs.map((l, i) => jsonRpcLog(l, block, tx, txIndex, logIndex + i)) + receipt.logs.map((l, i) => jsonRpcLog(l, block, tx, txIndex, logIndex + i)), ), logsBloom: bytesToHex(receipt.bitvector), root: @@ -212,7 +217,7 @@ const jsonRpcReceipt = async ( const calculateRewards = async ( block: Block, receiptsManager: ReceiptsManager, - priorityFeePercentiles: number[] + priorityFeePercentiles: number[], ) => { if (priorityFeePercentiles.length === 0) { return [] @@ -307,7 +312,7 @@ export class Eth { this.blockNumber = middleware( callWithStackTrace(this.blockNumber.bind(this), this._rpcDebug), - 0 + 0, ) this.call = middleware(callWithStackTrace(this.call.bind(this), this._rpcDebug), 2, [ @@ -320,13 +325,13 @@ export class Eth { this.estimateGas = middleware( callWithStackTrace(this.estimateGas.bind(this), this._rpcDebug), 1, - [[validators.transaction()], [validators.blockOption]] + [[validators.transaction()], [validators.blockOption]], ) this.getBalance = middleware( callWithStackTrace(this.getBalance.bind(this), this._rpcDebug), 2, - [[validators.address], [validators.blockOption]] + [[validators.address], [validators.blockOption]], ) this.coinbase = middleware(callWithStackTrace(this.coinbase.bind(this), this._rpcDebug), 0, []) @@ -334,19 +339,19 @@ export class Eth { this.getBlockByNumber = middleware( callWithStackTrace(this.getBlockByNumber.bind(this), this._rpcDebug), 2, - [[validators.blockOption], [validators.bool]] + [[validators.blockOption], [validators.bool]], ) this.getBlockByHash = middleware( callWithStackTrace(this.getBlockByHash.bind(this), this._rpcDebug), 2, - [[validators.hex, validators.blockHash], [validators.bool]] + [[validators.hex, validators.blockHash], [validators.bool]], ) this.getBlockTransactionCountByHash = middleware( callWithStackTrace(this.getBlockTransactionCountByHash.bind(this), this._rpcDebug), 1, - [[validators.hex, validators.blockHash]] + [[validators.hex, validators.blockHash]], ) this.getCode = middleware(callWithStackTrace(this.getCode.bind(this), this._rpcDebug), 2, [ @@ -357,54 +362,54 @@ export class Eth { this.getUncleCountByBlockNumber = middleware( callWithStackTrace(this.getUncleCountByBlockNumber.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) this.getStorageAt = middleware( callWithStackTrace(this.getStorageAt.bind(this), this._rpcDebug), 3, - [[validators.address], [validators.hex], [validators.blockOption]] + [[validators.address], [validators.hex], [validators.blockOption]], ) this.getTransactionByBlockHashAndIndex = middleware( callWithStackTrace(this.getTransactionByBlockHashAndIndex.bind(this), this._rpcDebug), 2, - [[validators.hex, validators.blockHash], [validators.hex]] + [[validators.hex, validators.blockHash], [validators.hex]], ) this.getTransactionByBlockNumberAndIndex = middleware( callWithStackTrace(this.getTransactionByBlockNumberAndIndex.bind(this), this._rpcDebug), 2, - [[validators.hex, validators.blockOption], [validators.hex]] + [[validators.hex, validators.blockOption], [validators.hex]], ) this.getTransactionByHash = middleware( callWithStackTrace(this.getTransactionByHash.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) this.getTransactionCount = middleware( callWithStackTrace(this.getTransactionCount.bind(this), this._rpcDebug), 2, - [[validators.address], [validators.blockOption]] + [[validators.address], [validators.blockOption]], ) this.getBlockReceipts = middleware( callWithStackTrace(this.getBlockReceipts.bind(this), this._rpcDebug), 1, - [[validators.blockOption]] + [[validators.blockOption]], ) this.getTransactionReceipt = middleware( callWithStackTrace(this.getTransactionReceipt.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) this.getUncleCountByBlockNumber = middleware( callWithStackTrace(this.getUncleCountByBlockNumber.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) this.getLogs = middleware(callWithStackTrace(this.getLogs.bind(this), this._rpcDebug), 1, [ @@ -413,14 +418,14 @@ export class Eth { fromBlock: validators.optional(validators.blockOption), toBlock: validators.optional(validators.blockOption), address: validators.optional( - validators.either(validators.array(validators.address), validators.address) + validators.either(validators.array(validators.address), validators.address), ), topics: validators.optional( validators.array( validators.optional( - validators.either(validators.hex, validators.array(validators.hex)) - ) - ) + validators.either(validators.hex, validators.array(validators.hex)), + ), + ), ), blockHash: validators.optional(validators.blockHash), }), @@ -430,13 +435,13 @@ export class Eth { this.sendRawTransaction = middleware( callWithStackTrace(this.sendRawTransaction.bind(this), this._rpcDebug), 1, - [[validators.hex]] + [[validators.hex]], ) this.protocolVersion = middleware( callWithStackTrace(this.protocolVersion.bind(this), this._rpcDebug), 0, - [] + [], ) this.syncing = middleware(callWithStackTrace(this.syncing.bind(this), this._rpcDebug), 0, []) @@ -450,7 +455,7 @@ export class Eth { this.getBlockTransactionCountByNumber = middleware( callWithStackTrace(this.getBlockTransactionCountByNumber.bind(this), this._rpcDebug), 1, - [[validators.blockOption]] + [[validators.blockOption]], ) this.gasPrice = middleware(callWithStackTrace(this.gasPrice.bind(this), this._rpcDebug), 0, []) @@ -462,13 +467,13 @@ export class Eth { [validators.either(validators.hex, validators.integer)], [validators.either(validators.hex, validators.blockOption)], [validators.rewardPercentiles], - ] + ], ) this.blobBaseFee = middleware( callWithStackTrace(this.blobBaseFee.bind(this), this._rpcDebug), 0, - [] + [], ) } @@ -509,8 +514,8 @@ export class Eth { const data = transaction.data ?? transaction.input const runCallOpts = { - caller: from !== undefined ? Address.fromString(from) : undefined, - to: to !== undefined ? Address.fromString(to) : undefined, + caller: from !== undefined ? createAddressFromString(from) : undefined, + to: to !== undefined ? createAddressFromString(to) : undefined, gasLimit: toType(gasLimit, TypeOutput.BigInt), gasPrice: toType(gasPrice, TypeOutput.BigInt), value: toType(value, TypeOutput.BigInt), @@ -518,6 +523,13 @@ export class Eth { block, } const { execResult } = await vm.evm.runCall(runCallOpts) + if (execResult.exceptionError !== undefined) { + throw { + code: 3, + data: bytesToHex(execResult.returnValue), + message: execResult.exceptionError.error, + } + } return bytesToHex(execResult.returnValue) } @@ -577,7 +589,7 @@ export class Eth { gasLimit: transaction.gas, } - const blockToRunOn = createBlockFromBlockData( + const blockToRunOn = createBlock( { header: { parentHash: block.hash(), @@ -588,7 +600,7 @@ export class Eth { : undefined, }, }, - { common: vm.common, setHardfork: true } + { common: vm.common, setHardfork: true }, ) vm.common.setHardforkBy({ @@ -596,16 +608,18 @@ export class Eth { blockNumber: blockToRunOn.header.number, }) - const tx = TransactionFactory.fromTxData(txData, { common: vm.common, freeze: false }) + const tx = createTxFromTxData(txData, { common: vm.common, freeze: false }) // set from address const from = - transaction.from !== undefined ? Address.fromString(transaction.from) : Address.zero() + transaction.from !== undefined + ? createAddressFromString(transaction.from) + : createZeroAddress() tx.getSenderAddress = () => { return from } - const { totalGasSpent } = await vm.runTx({ + const { totalGasSpent } = await runTx(vm, { tx, skipNonce: true, skipBalance: true, @@ -623,7 +637,7 @@ export class Eth { */ async getBalance(params: [string, string]) { const [addressHex, blockOpt] = params - const address = Address.fromString(addressHex) + const address = createAddressFromString(addressHex) const block = await getBlockByOption(blockOpt, this._chain) if (this._vm === undefined) { @@ -729,8 +743,8 @@ export class Eth { const vm = await this._vm.shallowCopy() await vm.stateManager.setStateRoot(block.header.stateRoot) - const address = Address.fromString(addressHex) - const code = await vm.stateManager.getContractCode(address) + const address = createAddressFromString(addressHex) + const code = await vm.stateManager.getCode(address) return bytesToHex(code) } @@ -770,13 +784,13 @@ export class Eth { const block = await getBlockByOption(blockOpt, this._chain) await vm.stateManager.setStateRoot(block.header.stateRoot) - const address = Address.fromString(addressHex) + const address = createAddressFromString(addressHex) const account = await vm.stateManager.getAccount(address) if (account === undefined) { return EMPTY_SLOT } const key = setLengthLeft(hexToBytes(keyHex), 32) - const storage = await vm.stateManager.getContractStorage(address, key) + const storage = await vm.stateManager.getStorage(address, key) return storage !== null && storage !== undefined ? bytesToHex(setLengthLeft(Uint8Array.from(storage) as Uint8Array, 32)) : EMPTY_SLOT @@ -867,7 +881,7 @@ export class Eth { const vm = await this._vm.shallowCopy() await vm.stateManager.setStateRoot(block.header.stateRoot) - const address = Address.fromString(addressHex) + const address = createAddressFromString(addressHex) const account = await vm.stateManager.getAccount(address) if (account === undefined) { return '0x0' @@ -878,7 +892,7 @@ export class Eth { // Add pending txns to nonce if blockOpt is 'pending' if (blockOpt === 'pending') { pendingTxsCount = BigInt( - (this.service as FullEthereumService).txPool.pool.get(addressHex.slice(2))?.length ?? 0 + (this.service as FullEthereumService).txPool.pool.get(addressHex.slice(2))?.length ?? 0, ) } return bigIntToHex(account.nonce + pendingTxsCount) @@ -918,7 +932,7 @@ export class Eth { const [blockOpt] = params let block: Block try { - if (blockOpt.length === 66) { + if (isHexString(blockOpt, 64)) { block = await this._chain.getBlock(hexToBytes(blockOpt)) } else { block = await getBlockByOption(blockOpt, this._chain) @@ -934,7 +948,7 @@ export class Eth { const vmCopy = await this._vm!.shallowCopy() vmCopy.common.setHardfork(block.common.hardfork()) // Run tx through copied vm to get tx gasUsed and createdAddress - const runBlockResult = await vmCopy.runBlock({ + const runBlockResult = await runBlock(vmCopy, { block, root: parentBlock.header.stateRoot, skipBlockValidation: true, @@ -947,13 +961,13 @@ export class Eth { const { blobGasPrice, blobGasUsed } = runBlockResult.receipts[i] as EIP4844BlobTxReceipt const effectiveGasPrice = tx.supports(Capability.EIP1559FeeMarket) === true - ? (tx as FeeMarketEIP1559Transaction).maxPriorityFeePerGas < - (tx as FeeMarketEIP1559Transaction).maxFeePerGas - block.header.baseFeePerGas! - ? (tx as FeeMarketEIP1559Transaction).maxPriorityFeePerGas - : (tx as FeeMarketEIP1559Transaction).maxFeePerGas - + ? (tx as FeeMarket1559Tx).maxPriorityFeePerGas < + (tx as FeeMarket1559Tx).maxFeePerGas - block.header.baseFeePerGas! + ? (tx as FeeMarket1559Tx).maxPriorityFeePerGas + : (tx as FeeMarket1559Tx).maxFeePerGas - block.header.baseFeePerGas! + block.header.baseFeePerGas! - : (tx as LegacyTransaction).gasPrice + : (tx as LegacyTx).gasPrice return jsonRpcReceipt( r, @@ -965,9 +979,9 @@ export class Eth { i, createdAddress, blobGasUsed, - blobGasPrice + blobGasPrice, ) - }) + }), ) return receipts } @@ -998,18 +1012,18 @@ export class Eth { const parentBlock = await this._chain.getBlock(block.header.parentHash) const tx = block.transactions[txIndex] const effectiveGasPrice = tx.supports(Capability.EIP1559FeeMarket) - ? (tx as FeeMarketEIP1559Transaction).maxPriorityFeePerGas < - (tx as FeeMarketEIP1559Transaction).maxFeePerGas - block.header.baseFeePerGas! - ? (tx as FeeMarketEIP1559Transaction).maxPriorityFeePerGas - : (tx as FeeMarketEIP1559Transaction).maxFeePerGas - + ? (tx as FeeMarket1559Tx).maxPriorityFeePerGas < + (tx as FeeMarket1559Tx).maxFeePerGas - block.header.baseFeePerGas! + ? (tx as FeeMarket1559Tx).maxPriorityFeePerGas + : (tx as FeeMarket1559Tx).maxFeePerGas - block.header.baseFeePerGas! + block.header.baseFeePerGas! - : (tx as LegacyTransaction).gasPrice + : (tx as LegacyTx).gasPrice const vmCopy = await this._vm!.shallowCopy() vmCopy.common.setHardfork(tx.common.hardfork()) // Run tx through copied vm to get tx gasUsed and createdAddress - const runBlockResult = await vmCopy.runBlock({ + const runBlockResult = await runBlock(vmCopy, { block, root: parentBlock.header.stateRoot, skipBlockValidation: true, @@ -1027,7 +1041,7 @@ export class Eth { logIndex, createdAddress, blobGasUsed, - blobGasPrice + blobGasPrice, ) } @@ -1116,8 +1130,8 @@ export class Eth { const logs = await this.receiptsManager.getLogs(from, to, addressBytes, formattedTopics) return Promise.all( logs.map(({ log, block, tx, txIndex, logIndex }) => - jsonRpcLog(log, block, tx, txIndex, logIndex) - ) + jsonRpcLog(log, block, tx, txIndex, logIndex), + ), ) } @@ -1154,20 +1168,20 @@ export class Eth { const txBuf = hexToBytes(serializedTx) if (txBuf[0] === 0x03) { // Blob Transactions sent over RPC are expected to be in Network Wrapper format - tx = BlobEIP4844Transaction.fromSerializedBlobTxNetworkWrapper(txBuf, { common }) + tx = createBlob4844TxFromSerializedNetworkWrapper(txBuf, { common }) - const blobGasLimit = common.param('gasConfig', 'maxblobGasPerBlock') - const blobGasPerBlob = common.param('gasConfig', 'blobGasPerBlob') + const blobGasLimit = tx.common.param('maxblobGasPerBlock') + const blobGasPerBlob = tx.common.param('blobGasPerBlob') if (BigInt((tx.blobs ?? []).length) * blobGasPerBlob > blobGasLimit) { throw Error( `tx blobs=${(tx.blobs ?? []).length} exceeds block limit=${ blobGasLimit / blobGasPerBlob - }` + }`, ) } } else { - tx = TransactionFactory.fromSerializedData(txBuf, { common }) + tx = createTxFromSerializedData(txBuf, { common }) } } catch (e: any) { throw { @@ -1221,7 +1235,7 @@ export class Eth { * @returns The {@link Proof} */ async getProof( - params: [PrefixedHexString, PrefixedHexString[], PrefixedHexString] + params: [PrefixedHexString, PrefixedHexString[], PrefixedHexString], ): Promise { const [addressHex, slotsHex, blockOpt] = params const block = await getBlockByOption(blockOpt, this._chain) @@ -1237,7 +1251,7 @@ export class Eth { } await vm.stateManager.setStateRoot(block.header.stateRoot) - const address = Address.fromString(addressHex) + const address = createAddressFromString(addressHex) const slots = slotsHex.map((slotHex) => setLengthLeft(hexToBytes(slotHex), 32)) const proof = await vm.stateManager.getProof!(address, slots) for (const p of proof.storageProof) { @@ -1312,7 +1326,10 @@ export class Eth { * @returns a hex code of an integer representing the suggested gas price in wei. */ async gasPrice() { - const minGasPrice: bigint = this._chain.config.chainCommon.param('gasConfig', 'minPrice') + // TODO: going more strict on parameter accesses in Common (PR #3532) revealed that this line had + // absolutely no effect by accessing a non-present gas parameter. Someone familiar with the RPC method + // implementation should look over it and recall what was meant to be accomplished here. + const minGasPrice = BIGINT_0 //: bigint = this._chain.config.chainCommon.param('minPrice') let gasPrice = BIGINT_0 const latest = await this._chain.getCanonicalHeadHeader() if (this._vm !== undefined && this._vm.common.isActivatedEIP(1559)) { @@ -1320,7 +1337,7 @@ export class Eth { let priorityFee = BIGINT_0 const block = await this._chain.getBlock(latest.number) for (const tx of block.transactions) { - const maxPriorityFeePerGas = (tx as FeeMarketEIP1559Transaction).maxPriorityFeePerGas + const maxPriorityFeePerGas = (tx as FeeMarket1559Tx).maxPriorityFeePerGas priorityFee += maxPriorityFeePerGas } @@ -1339,7 +1356,7 @@ export class Eth { } for (const tx of block.transactions) { - const txGasPrice = (tx as LegacyTransaction).gasPrice + const txGasPrice = (tx as LegacyTx).gasPrice gasPrice += txGasPrice txCount++ } @@ -1375,11 +1392,11 @@ export class Eth { const requestedBlockNumbers = Array.from( { length: Number(blockCount) }, - (_, i) => oldestBlockNumber + BigInt(i) + (_, i) => oldestBlockNumber + BigInt(i), ) const requestedBlocks = await Promise.all( - requestedBlockNumbers.map((n) => getBlockByOption(n.toString(), this._chain)) + requestedBlockNumbers.map((n) => getBlockByOption(n.toString(), this._chain)), ) const [baseFees, gasUsedRatios, baseFeePerBlobGas, blobGasUsedRatio] = requestedBlocks.reduce( @@ -1391,7 +1408,7 @@ export class Eth { let blobGasUsedRatio = 0 if (b.header.excessBlobGas !== undefined) { baseFeePerBlobGas = b.header.getBlobGasPrice() - const max = b.common.param('gasConfig', 'maxblobGasPerBlock') + const max = b.common.param('maxblobGasPerBlock') blobGasUsedRatio = Number(blobGasUsed) / Number(max) } @@ -1403,7 +1420,7 @@ export class Eth { return [prevBaseFees, prevGasUsedRatios, prevBaseFeesPerBlobGas, prevBlobGasUsedRatio] }, - [[], [], [], []] as [bigint[], number[], bigint[], number[]] + [[], [], [], []] as [bigint[], number[], bigint[], number[]], ) const londonHardforkBlockNumber = this._chain.blockchain.common.hardforkBlock(Hardfork.London)! @@ -1415,7 +1432,7 @@ export class Eth { if (this._chain.blockchain.common.isActivatedEIP(4844)) { baseFeePerBlobGas.push( - requestedBlocks[requestedBlocks.length - 1].header.calcNextBlobGasPrice() + requestedBlocks[requestedBlocks.length - 1].header.calcNextBlobGasPrice(), ) } else { // TODO (?): known bug @@ -1429,8 +1446,8 @@ export class Eth { if (this.receiptsManager && priorityFeePercentiles) { rewards = await Promise.all( requestedBlocks.map((b) => - calculateRewards(b, this.receiptsManager!, priorityFeePercentiles) - ) + calculateRewards(b, this.receiptsManager!, priorityFeePercentiles), + ), ) } diff --git a/packages/client/src/rpc/modules/net.ts b/packages/client/src/rpc/modules/net.ts index 54639921af..f53c10ca5f 100644 --- a/packages/client/src/rpc/modules/net.ts +++ b/packages/client/src/rpc/modules/net.ts @@ -33,12 +33,12 @@ export class Net { this.listening = middleware( callWithStackTrace(this.listening.bind(this), this._rpcDebug), 0, - [] + [], ) this.peerCount = middleware( callWithStackTrace(this.peerCount.bind(this), this._rpcDebug), 0, - [] + [], ) } diff --git a/packages/client/src/rpc/validation.ts b/packages/client/src/rpc/validation.ts index af2e2f30fe..cc929dfe4e 100644 --- a/packages/client/src/rpc/validation.ts +++ b/packages/client/src/rpc/validation.ts @@ -11,7 +11,7 @@ export function middleware( method: any, requiredParamsCount: number, validators: any[] = [], - names: string[] = [] + names: string[] = [], ): any { return function (params: any[] = []) { return new Promise((resolve, reject) => { @@ -379,7 +379,7 @@ export const validators = { }, /** - * validator to ensure required withdawal fields are present, and checks for valid address and hex values + * validator to ensure required withdrawal fields are present, and checks for valid address and hex values * for the other quantity based fields * @param requiredFields array of required fields * @returns validator function with params: @@ -430,7 +430,13 @@ export const validators = { get depositRequest() { return ( - requiredFields: string[] = ['pubkey', 'withdrawalCredentials', 'amount', 'signature', 'index'] + requiredFields: string[] = [ + 'pubkey', + 'withdrawalCredentials', + 'amount', + 'signature', + 'index', + ], ) => { return (params: any[], index: number) => { if (typeof params[index] !== 'object') { @@ -671,7 +677,7 @@ export const validators = { /** * Verification of rewardPercentiles array * - * description: A monotonically increasing list of percentile values. For each block in the requested range, the transactions will be sorted in ascending order by effective tip per gas and the coresponding effective tip for the percentile will be determined, accounting for gas consumed. + * description: A monotonically increasing list of percentile values. For each block in the requested range, the transactions will be sorted in ascending order by effective tip per gas and the corresponding effective tip for the percentile will be determined, accounting for gas consumed. * type: array * items: rewardPercentile value * diff --git a/packages/client/src/service/fullethereumservice.ts b/packages/client/src/service/fullethereumservice.ts index 94268bd9d4..ebbca4958b 100644 --- a/packages/client/src/service/fullethereumservice.ts +++ b/packages/client/src/service/fullethereumservice.ts @@ -19,7 +19,7 @@ import { TxPool } from './txpool.js' import type { Peer } from '../net/peer/peer.js' import type { Protocol } from '../net/protocol/index.js' import type { Block } from '@ethereumjs/block' -import type { BlobEIP4844Transaction } from '@ethereumjs/tx' +import type { Blob4844Tx } from '@ethereumjs/tx' interface FullEthereumServiceOptions extends ServiceOptions { /** Serve LES requests (default: false) */ @@ -159,7 +159,7 @@ export class FullEthereumService extends Service { this.synchronizer instanceof BeaconSynchronizer ? 'BeaconSynchronizer' : 'FullSynchronizer' - }.` + }.`, ) } else { this.config.logger.info('Starting FullEthereumService with no syncing.') @@ -175,7 +175,7 @@ export class FullEthereumService extends Service { if (rawTx.type !== TransactionType.BlobEIP4844) { txs[1].push(rawTx.serialize().byteLength) } else { - txs[1].push((rawTx as BlobEIP4844Transaction).serializeNetworkWrapper().byteLength) + txs[1].push((rawTx as Blob4844Tx).serializeNetworkWrapper().byteLength) } txs[2].push(hexToBytes(`0x${tx.hash}`)) } @@ -197,13 +197,13 @@ export class FullEthereumService extends Service { throw Error(`Currently stateful verkle execution not supported`) } this.execution.config.logger.info( - `Skipping VM verkle statemanager genesis hardfork=${this.execution.hardfork}` + `Skipping VM verkle statemanager genesis hardfork=${this.execution.hardfork}`, ) await this.execution.setupVerkleVM() this.execution.vm = this.execution.verkleVM! } else { this.execution.config.logger.info( - `Initializing VM merkle statemanager genesis hardfork=${this.execution.hardfork}` + `Initializing VM merkle statemanager genesis hardfork=${this.execution.hardfork}`, ) await this.execution.setupMerkleVM() this.execution.vm = this.execution.merkleVM! @@ -262,12 +262,12 @@ export class FullEthereumService extends Service { } } else { this.config.logger.debug( - `skipping snapsync since cl (skeleton) synchronized=${this.skeleton?.synchronized}` + `skipping snapsync since cl (skeleton) synchronized=${this.skeleton?.synchronized}`, ) } } else { this.config.logger.warn( - 'skipping building head state as neither execution is started nor snapsync is available' + 'skipping building head state as neither execution is started nor snapsync is available', ) } } catch (error) { @@ -329,7 +329,7 @@ export class FullEthereumService extends Service { chain: this.chain, flow: this.flow, timeout: this.timeout, - }) + }), ) } return protocols @@ -379,7 +379,7 @@ export class FullEthereumService extends Service { case 'GetBlockBodies': { const { reqId, hashes } = message.data const blocks: Block[] = await Promise.all( - hashes.map((hash: Uint8Array) => this.chain.getBlock(hash)) + hashes.map((hash: Uint8Array) => this.chain.getBlock(hash)), ) const bodies = blocks.map((block) => block.raw().slice(1)) peer.eth!.send('BlockBodies', { reqId, bodies }) @@ -388,7 +388,7 @@ export class FullEthereumService extends Service { case 'NewBlockHashes': { if (this.config.chainCommon.gteHardfork(Hardfork.Paris)) { this.config.logger.debug( - `Dropping peer ${peer.id} for sending NewBlockHashes after merge (EIP-3675)` + `Dropping peer ${peer.id} for sending NewBlockHashes after merge (EIP-3675)`, ) this.pool.ban(peer, 9000000) } else if (this.synchronizer instanceof FullSynchronizer) { @@ -403,7 +403,7 @@ export class FullEthereumService extends Service { case 'NewBlock': { if (this.config.chainCommon.gteHardfork(Hardfork.Paris)) { this.config.logger.debug( - `Dropping peer ${peer.id} for sending NewBlock after merge (EIP-3675)` + `Dropping peer ${peer.id} for sending NewBlock after merge (EIP-3675)`, ) this.pool.ban(peer, 9000000) } else if (this.synchronizer instanceof FullSynchronizer) { diff --git a/packages/client/src/service/service.ts b/packages/client/src/service/service.ts index aa1a9da8d3..5b9aee525a 100644 --- a/packages/client/src/service/service.ts +++ b/packages/client/src/service/service.ts @@ -86,7 +86,7 @@ export class Service { await this.handle(message, protocol, peer) } catch (error: any) { this.config.logger.debug( - `Error handling message (${protocol}:${message.name}): ${error.message}` + `Error handling message (${protocol}:${message.name}): ${error.message}`, ) } } @@ -126,13 +126,13 @@ export class Service { this.config.server && this.config.server.addProtocols(protocols) this.config.events.on(Event.POOL_PEER_BANNED, (peer) => - this.config.logger.debug(`Peer banned: ${peer}`) + this.config.logger.debug(`Peer banned: ${peer}`), ) this.config.events.on(Event.POOL_PEER_ADDED, (peer) => - this.config.logger.debug(`Peer added: ${peer}`) + this.config.logger.debug(`Peer added: ${peer}`), ) this.config.events.on(Event.POOL_PEER_REMOVED, (peer) => - this.config.logger.debug(`Peer removed: ${peer}`) + this.config.logger.debug(`Peer removed: ${peer}`), ) await this.pool.open() @@ -168,7 +168,7 @@ export class Service { this._statsInterval = setInterval( // eslint-disable-next-line @typescript-eslint/await-thenable await this.stats.bind(this), - this.STATS_INTERVAL + this.STATS_INTERVAL, ) this.running = true this.config.logger.info(`Started ${this.name} service.`) diff --git a/packages/client/src/service/skeleton.ts b/packages/client/src/service/skeleton.ts index e31815d85c..859a6454b3 100644 --- a/packages/client/src/service/skeleton.ts +++ b/packages/client/src/service/skeleton.ts @@ -131,9 +131,9 @@ export class Skeleton extends MetaDBManager { public safeBlock?: Block public finalizedBlock?: Block - // to track if we have cl FCUs close to the clockhead + // to track if we have cl FCUs close to the clock head synchronized = false - private lastsyncronized = false + private lastSynchronized = false private lastSyncDate = 0 constructor(opts: MetaDBManagerOptions) { @@ -216,7 +216,7 @@ export class Skeleton extends MetaDBManager { this.config.logger.debug( `Canonical subchain linked with main, removing junked chains ${junkedSubChains .map((s) => `[tail=${s.tail} head=${s.head} next=${short(s.next)}]`) - .join(',')}` + .join(',')}`, ) await this.writeSyncStatus() } @@ -233,7 +233,7 @@ export class Skeleton extends MetaDBManager { return this.started > 0 } - async isLastAnnoucement(): Promise { + async isLastAnnouncement(): Promise { const subchain0 = this.status.progress.subchains[0] if (subchain0 !== undefined) { return this.getBlock(subchain0.head + BIGINT_1) !== undefined @@ -262,7 +262,7 @@ export class Skeleton extends MetaDBManager { } lastchain.head = headBlock.header.number this.config.logger.debug( - `lastchain head fast forwarded from=${head} to=${lastchain.head} tail=${lastchain.tail}` + `lastchain head fast forwarded from=${head} to=${lastchain.head} tail=${lastchain.tail}`, ) } @@ -282,8 +282,8 @@ export class Skeleton extends MetaDBManager { if (!equalsBytes(this.chain.genesis.hash(), head.hash())) { throw Error( `Invalid genesis setHead announcement number=${number} hash=${short( - head.hash() - )} genesisHash=${short(this.chain.genesis.hash())}` + head.hash(), + )} genesisHash=${short(this.chain.genesis.hash())}`, ) } // genesis announcement @@ -302,14 +302,14 @@ export class Skeleton extends MetaDBManager { // Not a noop / double head announce, abort with a reorg if (force) { this.config.logger.warn( - `Skeleton setHead before tail, resetting skeleton tail=${lastchain.tail} head=${lastchain.head} newHead=${number}` + `Skeleton setHead before tail, resetting skeleton tail=${lastchain.tail} head=${lastchain.head} newHead=${number}`, ) lastchain.head = number lastchain.tail = number lastchain.next = head.header.parentHash } else { this.config.logger.debug( - `Skeleton announcement before tail, will reset skeleton tail=${lastchain.tail} head=${lastchain.head} newHead=${number}` + `Skeleton announcement before tail, will reset skeleton tail=${lastchain.tail} head=${lastchain.head} newHead=${number}`, ) } return true @@ -321,7 +321,7 @@ export class Skeleton extends MetaDBManager { this.config.logger.debug( `Skeleton duplicate ${force ? 'setHead' : 'announcement'} tail=${lastchain.tail} head=${ lastchain.head - } number=${number} hash=${short(head.hash())}` + } number=${number} hash=${short(head.hash())}`, ) return false } else { @@ -332,12 +332,12 @@ export class Skeleton extends MetaDBManager { `Skeleton head reorg tail=${lastchain.tail} head=${ lastchain.head } number=${number} expected=${short( - mayBeDupBlock?.hash() ?? zeroBlockHash - )} actual=${short(head.hash())}` + mayBeDupBlock?.hash() ?? zeroBlockHash, + )} actual=${short(head.hash())}`, ) } else { this.config.logger.debug( - `Skeleton differing announcement tail=${lastchain.tail} head=${lastchain.head} number=${number}` + `Skeleton differing announcement tail=${lastchain.tail} head=${lastchain.head} number=${number}`, ) } return true @@ -348,13 +348,13 @@ export class Skeleton extends MetaDBManager { // If its still less than number then its gapped head if (lastchain.head + BIGINT_1 < number) { this.config.logger.debug( - `Beacon chain gapped setHead head=${lastchain.head} newHead=${number}` + `Beacon chain gapped setHead head=${lastchain.head} newHead=${number}`, ) return true } } else { this.config.logger.debug( - `Beacon chain gapped announcement head=${lastchain.head} newHead=${number}` + `Beacon chain gapped announcement head=${lastchain.head} newHead=${number}`, ) return true } @@ -364,8 +364,8 @@ export class Skeleton extends MetaDBManager { if (force) { this.config.logger.warn( `Beacon chain forked ancestor=${parent?.header.number} hash=${short( - parent?.hash() ?? 'NA' - )} want=${short(head.header.parentHash)}` + parent?.hash() ?? 'NA', + )} want=${short(head.header.parentHash)}`, ) } return true @@ -380,8 +380,8 @@ export class Skeleton extends MetaDBManager { } this.config.logger.debug( `Beacon chain extended new head=${lastchain.head} tail=${lastchain.tail} next=${short( - lastchain.next - )}` + lastchain.next, + )}`, ) } return false @@ -392,12 +392,12 @@ export class Skeleton extends MetaDBManager { * @params head - The block being attempted as a new head * @params force - Flag to indicate if this is just a check of worthiness or a actually new head * @params init - Flag this is the first time since the beacon sync start to perform additional tasks - * @params reorgthrow - Flag to indicate if we would actually like to throw if there is a reorg + * @params reorgThrow - Flag to indicate if we would actually like to throw if there is a reorg * instead of just returning the boolean * * @returns True if the head (will) cause a reorg in the canonical skeleton subchain */ - async setHead(head: Block, force = true, init = false, reorgthrow = false): Promise { + async setHead(head: Block, force = true, init = false, reorgThrow = false): Promise { if ( this.config.syncTargetHeight === undefined || this.config.syncTargetHeight < head.header.number @@ -415,8 +415,8 @@ export class Skeleton extends MetaDBManager { this.config.logger.debug( `New skeleton head announced number=${head.header.number} hash=${short( - head.hash() - )} force=${force}` + head.hash(), + )} force=${force}`, ) let [lastchain] = this.status.progress.subchains @@ -432,7 +432,7 @@ export class Skeleton extends MetaDBManager { this.config.logger.debug( `Initing empty skeleton with current chain head tail=${lastchain.tail} head=${ lastchain.head - } next=${short(lastchain.next)}` + } next=${short(lastchain.next)}`, ) this.status.progress.subchains.push(lastchain) } @@ -455,30 +455,30 @@ export class Skeleton extends MetaDBManager { parent.header.number < subchain.tail ) { // truncate subchain 0 before inserting a new chain so that this chain can be merged into new - // one without issues if the opportunity arrises + // one without issues if the opportunity arises if ( subchain !== undefined && this.status.linked && this.status.canonicalHeadReset === false && this.chain.blocks.height >= subchain.tail ) { - const trucateTailToNumber = this.chain.blocks.height + BIGINT_1 - const trucateTailTo = - trucateTailToNumber <= subchain.head - ? await this.getBlock(trucateTailToNumber, true) + const truncateTailToNumber = this.chain.blocks.height + BIGINT_1 + const truncateTailTo = + truncateTailToNumber <= subchain.head + ? await this.getBlock(truncateTailToNumber, true) : undefined - if (trucateTailTo !== undefined) { - subchain.tail = trucateTailTo.header.number - subchain.next = trucateTailTo.header.parentHash + if (truncateTailTo !== undefined) { + subchain.tail = truncateTailTo.header.number + subchain.next = truncateTailTo.header.parentHash this.config.logger.info( `Truncated subchain0 with head=${subchain.head} to a new tail=${ subchain.tail - } next=${short(subchain.next)} before overlaying a new subchain` + } next=${short(subchain.next)} before overlaying a new subchain`, ) } else { // clear out this subchain this.config.logger.info( - `Dropping subchain0 with head=${subchain.head} before overlaying a new subchain as trucateTailToNumber=${trucateTailToNumber} block not available ` + `Dropping subchain0 with head=${subchain.head} before overlaying a new subchain as truncateTailToNumber=${truncateTailToNumber} block not available `, ) this.status.progress.subchains.splice(0, 1) } @@ -514,13 +514,13 @@ export class Skeleton extends MetaDBManager { !this.status.canonicalHeadReset && this.chain.blocks.height >= subchain.tail ) { - let trucateTailTo - const trucateTailToNumber = this.chain.blocks.height + BIGINT_1 - if (trucateTailToNumber < head.header.number) { - trucateTailTo = await this.getBlock(trucateTailToNumber, true) + let truncateTailTo + const truncateTailToNumber = this.chain.blocks.height + BIGINT_1 + if (truncateTailToNumber < head.header.number) { + truncateTailTo = await this.getBlock(truncateTailToNumber, true) } - if (trucateTailTo === undefined) { + if (truncateTailTo === undefined) { subchain.tail = head.header.number subchain.next = head.header.parentHash // reset canonical head, don't change linked status because parent was @@ -531,18 +531,18 @@ export class Skeleton extends MetaDBManager { subchain.tail } next=${short(subchain.next)} linked=${this.status.linked} canonicalHeadReset=${ this.status.canonicalHeadReset - }` + }`, ) } else { - subchain.tail = trucateTailTo.header.number - subchain.next = trucateTailTo.header.parentHash + subchain.tail = truncateTailTo.header.number + subchain.next = truncateTailTo.header.parentHash // just reset tail and no need to modify linked status this.config.logger.info( `Truncated subchain with head=${subchain.head} to a new tail=${ subchain.tail } next=${short(subchain.next)} linked=${this.status.linked} canonicalHeadReset=${ this.status.canonicalHeadReset - }` + }`, ) } } @@ -577,7 +577,7 @@ export class Skeleton extends MetaDBManager { // Earlier we were throwing on reorg, essentially for the purposes for killing the reverse fetcher // but it can be handled properly in the calling fn without erroring - if (reorg && reorgthrow) { + if (reorg && reorgThrow) { if (force) { throw errSyncReorged } else { @@ -617,7 +617,7 @@ export class Skeleton extends MetaDBManager { this.synchronized = true // Log to console the sync status this.config.superMsg( - `Synchronized cl (skeleton) at height=${height} hash=${short(latest.hash())} 🎉` + `Synchronized cl (skeleton) at height=${height} hash=${short(latest.hash())} 🎉`, ) } } @@ -629,22 +629,22 @@ export class Skeleton extends MetaDBManager { this.synchronized = false this.config.logger.info( `Cl (skeleton) sync status reset (no chain updates for ${Math.round( - diff / 1000 - )} seconds).` + diff / 1000, + )} seconds).`, ) } } } - if (this.synchronized !== this.lastsyncronized) { + if (this.synchronized !== this.lastSynchronized) { this.config.logger.debug( `Cl (skeleton) synchronized=${this.synchronized}${ latest !== null && latest !== undefined ? ' height=' + latest.number : '' } syncTargetHeight=${this.config.syncTargetHeight} lastSyncDate=${ (Date.now() - this.lastSyncDate) / 1000 - } secs ago` + } secs ago`, ) - this.lastsyncronized = this.synchronized + this.lastSynchronized = this.synchronized } } @@ -653,7 +653,7 @@ export class Skeleton extends MetaDBManager { { safeBlockHash, finalizedBlockHash, - }: { safeBlockHash?: Uint8Array; finalizedBlockHash?: Uint8Array } = {} + }: { safeBlockHash?: Uint8Array; finalizedBlockHash?: Uint8Array } = {}, ): Promise<{ reorged: boolean; safeBlock?: Block; finalizedBlock?: Block }> { // setHead locks independently and between setHead unlocking and locking below there should // be no injected code as each of the async ops take the lock. so once setHead takes the @@ -665,7 +665,7 @@ export class Skeleton extends MetaDBManager { await this.blockingTailBackfillWithCutoff(this.chain.config.engineParentLookupMaxDepth).catch( (e) => { this.config.logger.debug(`blockingTailBackfillWithCutoff exited with error=${e}`) - } + }, ) } @@ -804,7 +804,7 @@ export class Skeleton extends MetaDBManager { // blocks if there are executed blocks to fill with. This blocking causes it to not interfere // with the setHead mechanism. This is however a hack and a better solution needs to be devised // to handle it blockchain level as because of async nature of new payloads and fcUs and the skeleton - // there is always a chance for uncordinated put blocks unless they are all cordinated through skeleton + // there is always a chance for uncoordinated put blocks unless they are all coordinated through skeleton // which might also be a valid await this.blockingFillWithCutoff(this.chain.config.engineParentLookupMaxDepth) @@ -816,7 +816,7 @@ export class Skeleton extends MetaDBManager { return this.runWithLock(async () => { // check if the synced state's block is canonical and <= current safe and chain has synced till const syncedBlock = await this.getBlock( - syncedHeight + syncedHeight, // need to debug why this flag causes to return undefined when chain gets synced //, true ) @@ -840,13 +840,13 @@ export class Skeleton extends MetaDBManager { /** * Setup the skeleton to init sync with head * @params head - The block with which we want to init the skeleton head - * @params reorgthrow - If we would like the function to throw instead of silently + * @params reorgThrow - If we would like the function to throw instead of silently * return if there is reorg of the skeleton head * * @returns True if the skeleton was reorged trying to init else false */ - async initSync(head: Block, reorgthrow = false): Promise { - return this.setHead(head, true, true, reorgthrow) + async initSync(head: Block, reorgThrow = false): Promise { + return this.setHead(head, true, true, reorgThrow) } /** @@ -883,7 +883,7 @@ export class Skeleton extends MetaDBManager { if (tail >= this.status.progress.subchains[0].tail) { // Fully overwritten, get rid of the subchain as a whole this.config.logger.debug( - `Previous subchain fully overwritten tail=${tail} head=${head} next=${short(next)}` + `Previous subchain fully overwritten tail=${tail} head=${head} next=${short(next)}`, ) this.status.progress.subchains.splice(1, 1) edited = true @@ -893,8 +893,8 @@ export class Skeleton extends MetaDBManager { this.status.progress.subchains[1].head = this.status.progress.subchains[0].tail - BIGINT_1 this.config.logger.debug( `Previous subchain partially overwritten tail=${tail} head=${head} next=${short( - next - )} with newHead=${this.status.progress.subchains[1].head}` + next, + )} with newHead=${this.status.progress.subchains[1].head}`, ) edited = true } @@ -913,7 +913,7 @@ export class Skeleton extends MetaDBManager { // if subChain1Head is not in the skeleton then all previous subchains are not useful // and better to junk this.config.logger.debug( - `Removing all previous subchains as skeleton missing block at previous subchain head=${this.status.progress.subchains[1].head} or its tail=${this.status.progress.subchains[1].tail}` + `Removing all previous subchains as skeleton missing block at previous subchain head=${this.status.progress.subchains[1].head} or its tail=${this.status.progress.subchains[1].tail}`, ) this.status.progress.subchains.splice(1, this.status.progress.subchains.length - 1) } else if ( @@ -923,7 +923,7 @@ export class Skeleton extends MetaDBManager { // to disruption of the block fetcher to start a fresh if (head - tail > this.config.skeletonSubchainMergeMinimum) { this.config.logger.debug( - `Previous subchain merged tail=${tail} head=${head} next=${short(next)}` + `Previous subchain merged tail=${tail} head=${head} next=${short(next)}`, ) this.status.progress.subchains[0].tail = tail this.status.progress.subchains[0].next = next @@ -933,7 +933,7 @@ export class Skeleton extends MetaDBManager { merged = true } else { this.config.logger.debug( - `Subchain ignored for merge tail=${tail} head=${head} count=${head - tail}` + `Subchain ignored for merge tail=${tail} head=${head} count=${head - tail}`, ) this.status.progress.subchains.splice(1, 1) } @@ -962,12 +962,12 @@ export class Skeleton extends MetaDBManager { let tailUpdated = false this.config.logger.debug( `Skeleton putBlocks start=${blocks[0]?.header.number} hash=${short( - blocks[0]?.hash() + blocks[0]?.hash(), )} fork=${blocks[0].common.hardfork()} end=${ blocks[blocks.length - 1]?.header.number } count=${blocks.length}, subchain head=${this.status.progress.subchains[0]?.head} tail = ${ this.status.progress.subchains[0].tail - } next=${short(this.status.progress.subchains[0]?.next)}` + } next=${short(this.status.progress.subchains[0]?.next)}`, ) for (const block of blocks) { const { number } = block.header @@ -979,8 +979,8 @@ export class Skeleton extends MetaDBManager { if (!equalsBytes(this.chain.genesis.hash(), block.hash())) { throw Error( `Skeleton pubBlocks with invalid genesis block number=${number} hash=${short( - block.hash() - )} genesisHash=${short(this.chain.genesis.hash())}` + block.hash(), + )} genesisHash=${short(this.chain.genesis.hash())}`, ) } continue @@ -1003,12 +1003,12 @@ export class Skeleton extends MetaDBManager { `Blocks don't extend canonical subchain tail=${ this.status.progress.subchains[0].tail } head=${this.status.progress.subchains[0].head} next=${short( - this.status.progress.subchains[0].next + this.status.progress.subchains[0].next, )} tailHash=${short( - tailBlock?.hash() ?? zeroBlockHash + tailBlock?.hash() ?? zeroBlockHash, )} tailFork=${tailBlock?.common.hardfork()}, block number=${number} tailparent=${short( - tailBlock?.header.parentHash ?? zeroBlockHash - )} hash=${short(block.hash())} fork=${block.common.hardfork()}` + tailBlock?.header.parentHash ?? zeroBlockHash, + )} hash=${short(block.hash())} fork=${block.common.hardfork()}`, ) throw Error(`Blocks don't extend canonical subchain`) } @@ -1035,7 +1035,7 @@ export class Skeleton extends MetaDBManager { // If the sync is finished, start filling the canonical chain. if (this.status.linked) { this.config.superMsg( - `Backfilling subchain completed, filling canonical chain=${!skipForwardFill}` + `Backfilling subchain completed, filling canonical chain=${!skipForwardFill}`, ) if (!skipForwardFill) { void this.fillCanonicalChain() @@ -1075,7 +1075,7 @@ export class Skeleton extends MetaDBManager { this.status.progress.subchains = [] await this.writeSyncStatus() this.config.logger.warn( - `Couldn't backStep subchain 0, dropping subchains for new head signal` + `Couldn't backStep subchain 0, dropping subchains for new head signal`, ) return null } @@ -1168,7 +1168,7 @@ export class Skeleton extends MetaDBManager { if (this.status.canonicalHeadReset) { if (subchain.tail > canonicalHead + BIGINT_1) { throw Error( - `Canonical head should already be on or ahead subchain tail canonicalHead=${canonicalHead} tail=${subchain.tail}` + `Canonical head should already be on or ahead subchain tail canonicalHead=${canonicalHead} tail=${subchain.tail}`, ) } let newHead = subchain.tail - BIGINT_1 @@ -1178,12 +1178,12 @@ export class Skeleton extends MetaDBManager { if (canonicalHead > BIGINT_0) { this.config.logger.debug( - `Resetting canonicalHead for fillCanonicalChain from=${canonicalHead} to=${newHead}` + `Resetting canonicalHead for fillCanonicalChain from=${canonicalHead} to=${newHead}`, ) canonicalHead = newHead await this.chain.resetCanonicalHead(canonicalHead) } - // update in lock so as to not conflict/overwrite sethead/putblock updates + // update in lock so as to not conflict/overwrite setHead/putBlock updates await this.runWithLock(async () => { this.status.canonicalHeadReset = false }) @@ -1192,7 +1192,7 @@ export class Skeleton extends MetaDBManager { const start = canonicalHead // This subchain is a reference to update the tail for the very subchain we are filling the data for this.config.logger.debug( - `Starting canonical chain fill canonicalHead=${canonicalHead} subchainHead=${subchain.head}` + `Starting canonical chain fill canonicalHead=${canonicalHead} subchainHead=${subchain.head}`, ) // run till it has not been determined that tail reset is required by concurrent setHead calls @@ -1210,7 +1210,7 @@ export class Skeleton extends MetaDBManager { // Else we should back step and fetch again as it indicates some concurrency/db errors if (!this.status.canonicalHeadReset) { this.config.logger.debug( - `fillCanonicalChain block number=${number} not found, backStepping...` + `fillCanonicalChain block number=${number} not found, backStepping...`, ) await this.runWithLock(async () => { // backstep the subchain from the block that was not found only if the canonicalHeadReset @@ -1219,7 +1219,7 @@ export class Skeleton extends MetaDBManager { }) } else { this.config.logger.debug( - `fillCanonicalChain block number=${number} not found canonicalHeadReset=${this.status.canonicalHeadReset}, breaking out...` + `fillCanonicalChain block number=${number} not found canonicalHeadReset=${this.status.canonicalHeadReset}, breaking out...`, ) } break @@ -1231,7 +1231,7 @@ export class Skeleton extends MetaDBManager { // chain height has to be <= block number as we will skip putting this block as it might currently // cause chain reset. This can happen if any other async process added a batch of blocks like // execution's setHead. If that caused this chain to be not canonical anymore than the next - // putblocks should fail causing the fill to exit with skeleton stepback + // putBlocks should fail causing the fill to exit with skeleton stepback if (this.chain.blocks.height <= block.header.number) { try { numBlocksInserted = await this.chain.putBlocks([block], true) @@ -1251,12 +1251,12 @@ export class Skeleton extends MetaDBManager { await this.runWithLock(async () => { if (!this.status.canonicalHeadReset) { this.config.logger.debug( - `fillCanonicalChain canonicalHeadReset=${this.status.canonicalHeadReset}, backStepping...` + `fillCanonicalChain canonicalHeadReset=${this.status.canonicalHeadReset}, backStepping...`, ) await this.backStep(number) } else { this.config.logger.debug( - `fillCanonicalChain canonicalHeadReset=${this.status.canonicalHeadReset}, breaking out...` + `fillCanonicalChain canonicalHeadReset=${this.status.canonicalHeadReset}, breaking out...`, ) } }) @@ -1274,8 +1274,8 @@ export class Skeleton extends MetaDBManager { if (numBlocksInserted !== 1) { this.config.logger.error( `Failed to put block number=${number} fork=${block.common.hardfork()} hash=${short( - block.hash() - )} parentHash=${short(block.header.parentHash)}from skeleton chain to canonical` + block.hash(), + )} parentHash=${short(block.header.parentHash)}from skeleton chain to canonical`, ) // Lets log some parent by number and parent by hash, that may help to understand whats going on let parent = null @@ -1283,8 +1283,8 @@ export class Skeleton extends MetaDBManager { parent = await this.chain.getBlock(number - BIGINT_1) this.config.logger.info( `ParentByNumber number=${parent?.header.number}, hash=${short( - parent?.hash() ?? 'undefined' - )} hf=${parent?.common.hardfork()}` + parent?.hash() ?? 'undefined', + )} hf=${parent?.common.hardfork()}`, ) } catch (e) { this.config.logger.error(`Failed to fetch parent of number=${number}`) @@ -1295,12 +1295,12 @@ export class Skeleton extends MetaDBManager { parentWithHash = await this.chain.getBlock(block.header.parentHash) this.config.logger.info( `parentByHash number=${parentWithHash?.header.number}, hash=${short( - parentWithHash?.hash() ?? 'undefined' - )} hf=${parentWithHash?.common.hardfork()} ` + parentWithHash?.hash() ?? 'undefined', + )} hf=${parentWithHash?.common.hardfork()} `, ) } catch (e) { this.config.logger.error( - `Failed to fetch parent with parentWithHash=${short(block.header.parentHash)}` + `Failed to fetch parent with parentWithHash=${short(block.header.parentHash)}`, ) } break @@ -1330,14 +1330,14 @@ export class Skeleton extends MetaDBManager { }) if (fillLogIndex >= this.config.numBlocksPerIteration) { this.config.logger.debug( - `Skeleton canonical chain fill status: canonicalHead=${canonicalHead} chainHead=${this.chain.blocks.height} subchainHead=${subchain.head}` + `Skeleton canonical chain fill status: canonicalHead=${canonicalHead} chainHead=${this.chain.blocks.height} subchainHead=${subchain.head}`, ) fillLogIndex = 0 } } this.filling = false this.config.logger.debug( - `Successfully put=${fillLogIndex} skipped (because already inserted)=${skippedLogIndex} blocks start=${start} end=${canonicalHead} skeletonHead=${subchain.head} from skeleton chain to canonical syncTargetHeight=${this.config.syncTargetHeight}` + `Successfully put=${fillLogIndex} skipped (because already inserted)=${skippedLogIndex} blocks start=${start} end=${canonicalHead} skeletonHead=${subchain.head} from skeleton chain to canonical syncTargetHeight=${this.config.syncTargetHeight}`, ) } @@ -1373,7 +1373,7 @@ export class Skeleton extends MetaDBManager { await this.put( DBKey.SkeletonBlockHashToNumber, block.hash(), - bigIntToBytes(block.header.number) + bigIntToBytes(block.header.number), ) } @@ -1418,7 +1418,7 @@ export class Skeleton extends MetaDBManager { */ async getBlockByHash( hash: Uint8Array, - onlyCanonical: boolean = false + onlyCanonical: boolean = false, ): Promise { const number = await this.get(DBKey.SkeletonBlockHashToNumber, hash) if (number) { @@ -1505,7 +1505,7 @@ export class Skeleton extends MetaDBManager { fetching?: boolean snapsync?: SnapFetcherDoneFlags peers?: number | string - } = {} + } = {}, ): string { const vmHead = this.chain.blocks.vm const subchain0 = this.status.progress.subchains[0] @@ -1534,10 +1534,10 @@ export class Skeleton extends MetaDBManager { const status = isValid ? 'VALID' : isSynced - ? vmexecution?.running === true - ? `EXECUTING` - : `SYNCED` - : `SYNCING` + ? vmexecution?.running === true + ? `EXECUTING` + : `SYNCED` + : `SYNCING` if (peers === undefined || peers === 0) { this.lastsyncedAt = 0 @@ -1645,7 +1645,7 @@ export class Skeleton extends MetaDBManager { extraStatus = '' } const chainHead = `el=${this.chain.blocks.latest?.header.number ?? 'na'} hash=${short( - this.chain.blocks.latest?.hash() ?? 'na' + this.chain.blocks.latest?.hash() ?? 'na', )}` forceShowInfo = forceShowInfo ?? false @@ -1662,7 +1662,7 @@ export class Skeleton extends MetaDBManager { const sinceStarted = (new Date().getTime() - this.started) / 1000 beaconSyncETA = `${timeDuration((sinceStarted / Number(this.pulled)) * Number(left))}` this.config.logger.debug( - `Syncing beacon headers downloaded=${this.pulled} left=${left} eta=${beaconSyncETA}` + `Syncing beacon headers downloaded=${this.pulled} left=${left} eta=${beaconSyncETA}`, ) } } @@ -1687,23 +1687,23 @@ export class Skeleton extends MetaDBManager { const { snapTargetHeight, snapTargetRoot, snapTargetHash } = snapsync if (snapsync.done === true) { snapLogInfo = `snapsync=synced height=${snapTargetHeight} hash=${short( - snapTargetHash ?? 'na' + snapTargetHash ?? 'na', )} root=${short(snapTargetRoot ?? 'na')}` } else if (snapsync.syncing) { const accountsDone = formatBigDecimal( snapsync.accountFetcher.first * BIGINT_100, BIGINT_2EXP256, - BIGINT_100 + BIGINT_100, ) const storageReqsDone = formatBigDecimal( snapsync.storageFetcher.first * BIGINT_100, snapsync.storageFetcher.count, - BIGINT_100 + BIGINT_100, ) const codeReqsDone = formatBigDecimal( snapsync.byteCodeFetcher.first * BIGINT_100, snapsync.byteCodeFetcher.count, - BIGINT_100 + BIGINT_100, ) const snapprogress = `accounts=${accountsDone}% storage=${storageReqsDone}% of ${snapsync.storageFetcher.count} codes=${codeReqsDone}% of ${snapsync.byteCodeFetcher.count}` @@ -1722,7 +1722,7 @@ export class Skeleton extends MetaDBManager { } snapLogInfo = `${stage} ${snapprogress} (hash=${short( - snapTargetHash ?? 'na' + snapTargetHash ?? 'na', )} root=${short(snapTargetRoot ?? 'na')})` } else { if (this.synchronized) { @@ -1760,7 +1760,7 @@ export class Skeleton extends MetaDBManager { } else { // else break into two this.config.logger.info( - `${logPrefix} ${status}${extraStatus} synchronized=${this.config.synchronized} peers=${peers}` + `${logPrefix} ${status}${extraStatus} synchronized=${this.config.synchronized} peers=${peers}`, ) if (snapLogInfo !== undefined && snapLogInfo !== '') { this.config.logger.info(`${logPrefix} ${snapLogInfo}`) @@ -1778,7 +1778,7 @@ export class Skeleton extends MetaDBManager { this.status.linked } subchains=${this.status.progress.subchains .map((s) => `[tail=${s.tail} head=${s.head} next=${short(s.next)}]`) - .join(',')} reset=${this.status.canonicalHeadReset} ${chainHead}` + .join(',')} reset=${this.status.canonicalHeadReset} ${chainHead}`, ) } return status @@ -1823,7 +1823,7 @@ export class Skeleton extends MetaDBManager { subchains, // linked intToBytes(this.status.linked ? 1 : 0), - // canonocalHeadReset + // canonicalHeadReset intToBytes(this.status.canonicalHeadReset ? 1 : 0), // safe and finalized bigIntToBytes(this.status.safe), @@ -1848,7 +1848,7 @@ export class Skeleton extends MetaDBManager { Uint8Array, // safe and finalized Uint8Array, - Uint8Array + Uint8Array, ] const subchains: SkeletonSubchain[] = rawStatus[0].map((raw) => ({ head: bytesToBigInt(raw[0]), diff --git a/packages/client/src/service/txpool.ts b/packages/client/src/service/txpool.ts index 693b5e9231..ec9dba5412 100644 --- a/packages/client/src/service/txpool.ts +++ b/packages/client/src/service/txpool.ts @@ -1,9 +1,9 @@ import { - BlobEIP4844Transaction, + Blob4844Tx, Capability, - isAccessListEIP2930Tx, - isBlobEIP4844Tx, - isFeeMarketEIP1559Tx, + isAccessList2930Tx, + isBlob4844Tx, + isFeeMarket1559Tx, isLegacyTx, } from '@ethereumjs/tx' import { @@ -25,11 +25,7 @@ import type { Peer } from '../net/peer/peer.js' import type { PeerPool } from '../net/peerpool.js' import type { FullEthereumService } from './fullethereumservice.js' import type { Block } from '@ethereumjs/block' -import type { - FeeMarketEIP1559Transaction, - LegacyTransaction, - TypedTransaction, -} from '@ethereumjs/tx' +import type { FeeMarket1559Tx, LegacyTx, TypedTransaction } from '@ethereumjs/tx' import type { VM } from '@ethereumjs/vm' // Configuration constants @@ -200,7 +196,7 @@ export class TxPool { } this._cleanupInterval = setInterval( this.cleanup.bind(this), - this.POOLED_STORAGE_TIME_LIMIT * 1000 * 60 + this.POOLED_STORAGE_TIME_LIMIT * 1000 * 60, ) if (this.config.logger.isInfoEnabled()) { @@ -242,17 +238,17 @@ export class TxPool { (existingTxGasPrice.maxFee * BigInt(MIN_GAS_PRICE_BUMP_PERCENT)) / BigInt(100) if (newGasPrice.tip < minTipCap || newGasPrice.maxFee < minFeeCap) { throw new Error( - `replacement gas too low, got tip ${newGasPrice.tip}, min: ${minTipCap}, got fee ${newGasPrice.maxFee}, min: ${minFeeCap}` + `replacement gas too low, got tip ${newGasPrice.tip}, min: ${minTipCap}, got fee ${newGasPrice.maxFee}, min: ${minFeeCap}`, ) } - if (addedTx instanceof BlobEIP4844Transaction && existingTx instanceof BlobEIP4844Transaction) { + if (addedTx instanceof Blob4844Tx && existingTx instanceof Blob4844Tx) { const minblobGasFee = existingTx.maxFeePerBlobGas + (existingTx.maxFeePerBlobGas * BigInt(MIN_GAS_PRICE_BUMP_PERCENT)) / BigInt(100) if (addedTx.maxFeePerBlobGas < minblobGasFee) { throw new Error( - `replacement blob gas too low, got: ${addedTx.maxFeePerBlobGas}, min: ${minblobGasFee}` + `replacement blob gas too low, got: ${addedTx.maxFeePerBlobGas}, min: ${minblobGasFee}`, ) } } @@ -268,7 +264,7 @@ export class TxPool { } if (tx.data.length > TX_MAX_DATA_SIZE) { throw new Error( - `Tx is too large (${tx.data.length} bytes) and exceeds the max data size of ${TX_MAX_DATA_SIZE} bytes` + `Tx is too large (${tx.data.length} bytes) and exceeds the max data size of ${TX_MAX_DATA_SIZE} bytes`, ) } const currentGasPrice = this.txGasPrice(tx) @@ -291,7 +287,7 @@ export class TxPool { if (inPool) { if (!isLocalTransaction && inPool.length >= MAX_TXS_PER_ACCOUNT) { throw new Error( - `Cannot add tx for ${senderAddress}: already have max amount of txs for this account` + `Cannot add tx for ${senderAddress}: already have max amount of txs for this account`, ) } // Replace pooled txs with the same nonce @@ -307,13 +303,13 @@ export class TxPool { if (typeof block.baseFeePerGas === 'bigint' && block.baseFeePerGas !== BIGINT_0) { if (currentGasPrice.maxFee < block.baseFeePerGas / BIGINT_2 && !isLocalTransaction) { throw new Error( - `Tx cannot pay basefee of ${block.baseFeePerGas}, have ${currentGasPrice.maxFee} (not within 50% range of current basefee)` + `Tx cannot pay basefee of ${block.baseFeePerGas}, have ${currentGasPrice.maxFee} (not within 50% range of current basefee)`, ) } } if (tx.gasLimit > block.gasLimit) { throw new Error( - `Tx gaslimit of ${tx.gasLimit} exceeds block gas limit of ${block.gasLimit} (exceeds last block gas limit)` + `Tx gaslimit of ${tx.gasLimit} exceeds block gas limit of ${block.gasLimit} (exceeds last block gas limit)`, ) } @@ -327,13 +323,13 @@ export class TxPool { } if (account.nonce > tx.nonce) { throw new Error( - `0x${sender} tries to send a tx with nonce ${tx.nonce}, but account has nonce ${account.nonce} (tx nonce too low)` + `0x${sender} tries to send a tx with nonce ${tx.nonce}, but account has nonce ${account.nonce} (tx nonce too low)`, ) } const minimumBalance = tx.value + currentGasPrice.maxFee * tx.gasLimit if (account.balance < minimumBalance) { throw new Error( - `0x${sender} does not have enough balance to cover transaction costs, need ${minimumBalance}, but have ${account.balance} (insufficient balance)` + `0x${sender} does not have enough balance to cover transaction costs, need ${minimumBalance}, but have ${account.balance} (insufficient balance)`, ) } } @@ -368,13 +364,13 @@ export class TxPool { if (isLegacyTx(tx)) { this.config.metrics?.legacyTxGauge?.inc() } - if (isAccessListEIP2930Tx(tx)) { + if (isAccessList2930Tx(tx)) { this.config.metrics?.accessListEIP2930TxGauge?.inc() } - if (isFeeMarketEIP1559Tx(tx)) { + if (isFeeMarket1559Tx(tx)) { this.config.metrics?.feeMarketEIP1559TxGauge?.inc() } - if (isBlobEIP4844Tx(tx)) { + if (isBlob4844Tx(tx)) { this.config.metrics?.blobEIP4844TxGauge?.inc() } } catch (e) { @@ -419,13 +415,13 @@ export class TxPool { if (isLegacyTx(tx)) { this.config.metrics?.legacyTxGauge?.dec() } - if (isAccessListEIP2930Tx(tx)) { + if (isAccessList2930Tx(tx)) { this.config.metrics?.accessListEIP2930TxGauge?.dec() } - if (isFeeMarketEIP1559Tx(tx)) { + if (isFeeMarket1559Tx(tx)) { this.config.metrics?.feeMarketEIP1559TxGauge?.dec() } - if (isBlobEIP4844Tx(tx)) { + if (isBlob4844Tx(tx)) { this.config.metrics?.blobEIP4844TxGauge?.dec() } @@ -571,7 +567,7 @@ export class TxPool { this.config.logger.debug(`TxPool: received new transactions number=${txs.length}`) this.addToKnownByPeer( txs.map((tx) => tx.hash()), - peer + peer, ) const newTxHashes: [number[], number[], Uint8Array[]] = [] as any @@ -583,7 +579,7 @@ export class TxPool { newTxHashes[2].push(tx.hash()) } catch (error: any) { this.config.logger.debug( - `Error adding tx to TxPool: ${error.message} (tx hash: ${bytesToHex(tx.hash())})` + `Error adding tx to TxPool: ${error.message} (tx hash: ${bytesToHex(tx.hash())})`, ) } } @@ -621,7 +617,7 @@ export class TxPool { const reqHashesStr: UnprefixedHash[] = reqHashes.map(bytesToUnprefixedHex) this.pending = this.pending.concat(reqHashesStr) this.config.logger.debug( - `TxPool: requesting txs number=${reqHashes.length} pending=${this.pending.length}` + `TxPool: requesting txs number=${reqHashes.length} pending=${this.pending.length}`, ) const getPooledTxs = await peer.eth?.getPooledTransactions({ hashes: reqHashes.slice(0, this.TX_RETRIEVAL_LIMIT), @@ -642,7 +638,7 @@ export class TxPool { await this.add(tx) } catch (error: any) { this.config.logger.debug( - `Error adding tx to TxPool: ${error.message} (tx hash: ${bytesToHex(tx.hash())})` + `Error adding tx to TxPool: ${error.message} (tx hash: ${bytesToHex(tx.hash())})`, ) } newTxHashes[0].push(tx.type) @@ -707,15 +703,15 @@ export class TxPool { const supports1559 = tx.supports(Capability.EIP1559FeeMarket) if (typeof baseFee === 'bigint' && baseFee !== BIGINT_0) { if (supports1559) { - return (tx as FeeMarketEIP1559Transaction).maxPriorityFeePerGas + return (tx as FeeMarket1559Tx).maxPriorityFeePerGas } else { - return (tx as LegacyTransaction).gasPrice - baseFee + return (tx as LegacyTx).gasPrice - baseFee } } else { if (supports1559) { - return (tx as FeeMarketEIP1559Transaction).maxFeePerGas + return (tx as FeeMarket1559Tx).maxFeePerGas } else { - return (tx as LegacyTransaction).gasPrice + return (tx as LegacyTx).gasPrice } } } @@ -732,14 +728,14 @@ export class TxPool { } } - if (isAccessListEIP2930Tx(tx)) { + if (isAccessList2930Tx(tx)) { return { maxFee: tx.gasPrice, tip: tx.gasPrice, } } - if (isFeeMarketEIP1559Tx(tx) || isBlobEIP4844Tx(tx)) { + if (isFeeMarket1559Tx(tx) || isBlob4844Tx(tx)) { return { maxFee: tx.maxFeePerGas, tip: tx.maxPriorityFeePerGas, @@ -767,7 +763,7 @@ export class TxPool { */ async txsByPriceAndNonce( vm: VM, - { baseFee, allowedBlobs }: { baseFee?: bigint; allowedBlobs?: number } = {} + { baseFee, allowedBlobs }: { baseFee?: bigint; allowedBlobs?: number } = {}, ) { const txs: TypedTransaction[] = [] // Separate the transactions by account and sort by nonce @@ -825,9 +821,9 @@ export class TxPool { // ii) or there is no blobs limit provided // iii) or blobs are still within limit if this best tx's blobs are included if ( - !(best instanceof BlobEIP4844Transaction) || + !(best instanceof Blob4844Tx) || allowedBlobs === undefined || - ((best as BlobEIP4844Transaction).blobs ?? []).length + blobsCount <= allowedBlobs + ((best as Blob4844Tx).blobs ?? []).length + blobsCount <= allowedBlobs ) { if (accTxs.length > 0) { byPrice.insert(accTxs[0]) @@ -835,8 +831,8 @@ export class TxPool { } // Accumulate the best priced transaction and increment blobs count txs.push(best) - if (best instanceof BlobEIP4844Transaction) { - blobsCount += ((best as BlobEIP4844Transaction).blobs ?? []).length + if (best instanceof Blob4844Tx) { + blobsCount += ((best as Blob4844Tx).blobs ?? []).length } } else { // Since no more blobs can fit in the block, not only skip inserting in byPrice but also remove all other @@ -846,7 +842,7 @@ export class TxPool { } } this.config.logger.info( - `txsByPriceAndNonce selected txs=${txs.length}, skipped byNonce=${skippedStats.byNonce} byPrice=${skippedStats.byPrice} byBlobsLimit=${skippedStats.byBlobsLimit}` + `txsByPriceAndNonce selected txs=${txs.length}, skipped byNonce=${skippedStats.byNonce} byPrice=${skippedStats.byPrice} byBlobsLimit=${skippedStats.byBlobsLimit}`, ) return txs } @@ -888,7 +884,7 @@ export class TxPool { broadcasterrors += sendobjects.filter((sendobject) => sendobject.error !== undefined).length knownpeers++ } - // Get avergae + // Get average if (knownpeers > 0) { broadcasts = broadcasts / knownpeers broadcasterrors = broadcasterrors / knownpeers @@ -908,13 +904,13 @@ export class TxPool { } } this.config.logger.info( - `TxPool Statistics txs=${this.txsInPool} senders=${this.pool.size} peers=${this.service.pool.peers.length}` + `TxPool Statistics txs=${this.txsInPool} senders=${this.pool.size} peers=${this.service.pool.peers.length}`, ) this.config.logger.info( - `TxPool Statistics broadcasts=${broadcasts}/tx/peer broadcasterrors=${broadcasterrors}/tx/peer knownpeers=${knownpeers} since minutes=${this.POOLED_STORAGE_TIME_LIMIT}` + `TxPool Statistics broadcasts=${broadcasts}/tx/peer broadcasterrors=${broadcasterrors}/tx/peer knownpeers=${knownpeers} since minutes=${this.POOLED_STORAGE_TIME_LIMIT}`, ) this.config.logger.info( - `TxPool Statistics successfuladds=${handledadds} failedadds=${handlederrors} since minutes=${this.HANDLED_CLEANUP_TIME_LIMIT}` + `TxPool Statistics successfuladds=${handledadds} failedadds=${handlederrors} since minutes=${this.HANDLED_CLEANUP_TIME_LIMIT}`, ) } } diff --git a/packages/client/src/sync/beaconsync.ts b/packages/client/src/sync/beaconsync.ts index c852723022..f419b698b1 100644 --- a/packages/client/src/sync/beaconsync.ts +++ b/packages/client/src/sync/beaconsync.ts @@ -77,12 +77,12 @@ export class BeaconSynchronizer extends Synchronizer { const hash = this.chain.blocks.latest!.hash() this.startingBlock = number const timestamp = this.chain.blocks.latest?.header.timestamp - this.config.chainCommon.setHardforkBy({ blockNumber: number, td, timestamp }) + this.config.chainCommon.setHardforkBy({ blockNumber: number, timestamp }) this.config.logger.info( `Latest local block number=${Number(number)} td=${td} hash=${bytesToHex( - hash - )} hardfork=${this.config.chainCommon.hardfork()}` + hash, + )} hardfork=${this.config.chainCommon.hardfork()}`, ) const subchain = this.skeleton.bounds() @@ -165,8 +165,8 @@ export class BeaconSynchronizer extends Synchronizer { await this.stop() this.config.logger.debug( `Beacon sync reorged, new head number=${block.header.number} hash=${short( - block.header.hash() - )}` + block.header.hash(), + )}`, ) void this.start() } @@ -251,7 +251,7 @@ export class BeaconSynchronizer extends Synchronizer { this.fetcher === null ? '' : 'previous fetcher errored=' + this.fetcher.syncErrored?.message - }` + }`, ) this.fetcher = new ReverseBlockFetcher({ config: this.config, @@ -281,7 +281,7 @@ export class BeaconSynchronizer extends Synchronizer { const hash = short(blocks[0].hash()) this.config.logger.debug( - `Imported skeleton blocks count=${blocks.length} first=${first} last=${last} hash=${hash} peers=${this.pool.size}` + `Imported skeleton blocks count=${blocks.length} first=${first} last=${last} hash=${hash} peers=${this.pool.size}`, ) } diff --git a/packages/client/src/sync/fetcher/accountfetcher.ts b/packages/client/src/sync/fetcher/accountfetcher.ts index 7a5e888443..197aae3659 100644 --- a/packages/client/src/sync/fetcher/accountfetcher.ts +++ b/packages/client/src/sync/fetcher/accountfetcher.ts @@ -1,5 +1,5 @@ import { DefaultStateManager } from '@ethereumjs/statemanager' -import { Trie } from '@ethereumjs/trie' +import { verifyTrieRangeProof } from '@ethereumjs/trie' import { BIGINT_0, BIGINT_1, @@ -26,13 +26,14 @@ import { ByteCodeFetcher } from './bytecodefetcher.js' import { Fetcher } from './fetcher.js' import { StorageFetcher } from './storagefetcher.js' import { TrieNodeFetcher } from './trienodefetcher.js' -import { getInitFecherDoneFlags } from './types.js' +import { getInitFetcherDoneFlags } from './types.js' import type { Peer } from '../../net/peer/index.js' import type { AccountData } from '../../net/protocol/snapprotocol.js' import type { FetcherOptions } from './fetcher.js' import type { StorageRequest } from './storagefetcher.js' import type { Job, SnapFetcherDoneFlags } from './types.js' +import type { Trie } from '@ethereumjs/trie' import type { Debugger } from 'debug' type AccountDataResponse = AccountData[] & { completed?: boolean } @@ -89,7 +90,7 @@ export class AccountFetcher extends Fetcher */ constructor(options: AccountFetcherOptions) { super(options) - this.fetcherDoneFlags = options.fetcherDoneFlags ?? getInitFecherDoneFlags() + this.fetcherDoneFlags = options.fetcherDoneFlags ?? getInitFetcherDoneFlags() this.root = options.root this.first = options.first @@ -137,8 +138,8 @@ export class AccountFetcher extends Fetcher this.debug( `Account fetcher instantiated root=${short(this.root)} origin=${short(origin)} limit=${short( - limit - )} destroyWhenDone=${this.destroyWhenDone}` + limit, + )} destroyWhenDone=${this.destroyWhenDone}`, ) } @@ -170,7 +171,7 @@ export class AccountFetcher extends Fetcher () => this.snapFetchersCompleted(StorageFetcher), () => { throw Error('Snap fetcher failed to exit') - } + }, ) : null const codeFetch = !this.fetcherDoneFlags.byteCodeFetcher.done @@ -178,12 +179,12 @@ export class AccountFetcher extends Fetcher () => this.snapFetchersCompleted(ByteCodeFetcher), () => { throw Error('Snap fetcher failed to exit') - } + }, ) : null this.config.superMsg( - `Snapsync: running storageFetch=${storageFetch !== null} codeFetch=${codeFetch !== null}` + `Snapsync: running storageFetch=${storageFetch !== null} codeFetch=${codeFetch !== null}`, ) this.storageFetcher.setDestroyWhenDone() @@ -195,7 +196,7 @@ export class AccountFetcher extends Fetcher this.fetcherDoneFlags.byteCodeFetcher.done !== true ) { throw Error( - `storageFetch or codeFetch didn't complete storageFetcherDone=${this.fetcherDoneFlags.storageFetcher.done} byteCodeFetcherDone=${this.fetcherDoneFlags.byteCodeFetcher.done}` + `storageFetch or codeFetch didn't complete storageFetcherDone=${this.fetcherDoneFlags.storageFetcher.done} byteCodeFetcherDone=${this.fetcherDoneFlags.byteCodeFetcher.done}`, ) } @@ -208,7 +209,7 @@ export class AccountFetcher extends Fetcher }, () => { throw Error('Snap fetcher failed to exit') - } + }, ) this.config.superMsg(`Snapsync: running trieNodeFetch=${trieNodeFetch !== null}`) this.trieNodeFetcher.setDestroyWhenDone() @@ -238,10 +239,10 @@ export class AccountFetcher extends Fetcher const fetcherProgress = formatBigDecimal( fetcherDoneFlags.accountFetcher.first * BIGINT_100, BIGINT_2EXP256, - BIGINT_100 + BIGINT_100, ) this.config.logger.warn( - `accountFetcher completed with pending range done=${fetcherProgress}%` + `accountFetcher completed with pending range done=${fetcherProgress}%`, ) } break @@ -252,10 +253,10 @@ export class AccountFetcher extends Fetcher const reqsDone = formatBigDecimal( fetcherDoneFlags.storageFetcher.first * BIGINT_100, fetcherDoneFlags.storageFetcher.count, - BIGINT_100 + BIGINT_100, ) this.config.logger.warn( - `storageFetcher completed with pending tasks done=${reqsDone}% of ${fetcherDoneFlags.storageFetcher.count} queued=${this.storageFetcher.storageRequests.length}` + `storageFetcher completed with pending tasks done=${reqsDone}% of ${fetcherDoneFlags.storageFetcher.count} queued=${this.storageFetcher.storageRequests.length}`, ) } @@ -267,10 +268,10 @@ export class AccountFetcher extends Fetcher const reqsDone = formatBigDecimal( fetcherDoneFlags.byteCodeFetcher.first * BIGINT_100, fetcherDoneFlags.byteCodeFetcher.count, - BIGINT_100 + BIGINT_100, ) this.config.logger.warn( - `byteCodeFetcher completed with pending tasks done=${reqsDone}% of ${fetcherDoneFlags.byteCodeFetcher.count}` + `byteCodeFetcher completed with pending tasks done=${reqsDone}% of ${fetcherDoneFlags.byteCodeFetcher.count}`, ) } break @@ -285,10 +286,10 @@ export class AccountFetcher extends Fetcher this.config.superMsg( `snapFetchersCompletion root=${short(this.root)} accountsRoot=${short( - fetcherDoneFlags.stateRoot ?? 'na' + fetcherDoneFlags.stateRoot ?? 'na', )} done=${this.fetcherDoneFlags.done} accountsDone=${accountFetcher.done} storageDone=${ storageFetcher.done - } byteCodesDone=${byteCodeFetcher.done} trieNodesDone=${trieNodeFetcher.done}` + } byteCodesDone=${byteCodeFetcher.done} trieNodesDone=${trieNodeFetcher.done}`, ) if (this.fetcherDoneFlags.done) { @@ -299,12 +300,12 @@ export class AccountFetcher extends Fetcher private async verifyRangeProof( stateRoot: Uint8Array, origin: Uint8Array, - { accounts, proof }: { accounts: AccountData[]; proof: Uint8Array[] } + { accounts, proof }: { accounts: AccountData[]; proof: Uint8Array[] }, ): Promise { this.debug( `verifyRangeProof accounts:${accounts.length} first=${bytesToHex( - accounts[0].hash - )} last=${short(accounts[accounts.length - 1].hash)}` + accounts[0].hash, + )} last=${short(accounts[accounts.length - 1].hash)}`, ) for (let i = 0; i < accounts.length - 1; i++) { @@ -313,7 +314,7 @@ export class AccountFetcher extends Fetcher throw Error( `Account hashes not monotonically increasing: ${i} ${accounts[i].hash} vs ${i + 1} ${ accounts[i + 1].hash - }` + }`, ) } } @@ -321,7 +322,7 @@ export class AccountFetcher extends Fetcher const keys = accounts.map((acc: any) => acc.hash) const values = accounts.map((acc: any) => accountBodyToRLP(acc.body)) // convert the request to the right values - return Trie.verifyRangeProof(stateRoot, origin, keys[keys.length - 1], keys, values, proof, { + return verifyTrieRangeProof(stateRoot, origin, keys[keys.length - 1], keys, values, proof, { common: this.config.chainCommon, useKeyHashingFunction: this.config.chainCommon?.customCrypto?.keccak256 ?? keccak256, }) @@ -346,7 +347,7 @@ export class AccountFetcher extends Fetcher private isMissingRightRange( limit: Uint8Array, - { accounts, proof: _proof }: { accounts: AccountData[]; proof: Uint8Array[] } + { accounts, proof: _proof }: { accounts: AccountData[]; proof: Uint8Array[] }, ): boolean { if ( accounts.length > 0 && @@ -368,7 +369,7 @@ export class AccountFetcher extends Fetcher * @param peer */ async request( - job: Job + job: Job, ): Promise { const { peer } = job // Currently this is the only safe place to call peer.latest() without interfering with the fetcher @@ -402,14 +403,14 @@ export class AccountFetcher extends Fetcher // check zero-element proof if (rangeResult.proof.length > 0) { try { - const isMissingRightRange = await Trie.verifyRangeProof( + const isMissingRightRange = await verifyTrieRangeProof( this.root, origin, null, [], [], rangeResult.proof, - { useKeyHashingFunction: keccak256 } + { useKeyHashingFunction: keccak256 }, ) // if proof is false, reject corrupt peer if (isMissingRightRange !== false) return undefined @@ -437,8 +438,8 @@ export class AccountFetcher extends Fetcher if (isMissingRightRange && this.isMissingRightRange(limit, rangeResult)) { this.debug( `Peer ${peerInfo} returned missing right range account=${bytesToHex( - rangeResult.accounts[rangeResult.accounts.length - 1].hash - )} limit=${bytesToHex(limit)}` + rangeResult.accounts[rangeResult.accounts.length - 1].hash, + )} limit=${bytesToHex(limit)}`, ) completed = false } else { @@ -459,7 +460,7 @@ export class AccountFetcher extends Fetcher */ process( job: Job, - result: AccountDataResponse + result: AccountDataResponse, ): AccountData[] | undefined { const fullResult = (job.partialResult ?? []).concat(result) @@ -532,11 +533,11 @@ export class AccountFetcher extends Fetcher if (storageFetchRequests.size > 0) this.storageFetcher.enqueueByStorageRequestList( - Array.from(storageFetchRequests) as StorageRequest[] + Array.from(storageFetchRequests) as StorageRequest[], ) if (byteCodeFetchRequests.size > 0) this.byteCodeFetcher.enqueueByByteCodeRequestList( - Array.from(byteCodeFetchRequests) as Uint8Array[] + Array.from(byteCodeFetchRequests) as Uint8Array[], ) } @@ -545,7 +546,7 @@ export class AccountFetcher extends Fetcher * remaining items apart from the tasks it pushes in the queue * * Divides the full 256-bit range of hashes into ranges of @maxAccountRange - * size and turnes each range into a task for the fetcher + * size and turns each range into a task for the fetcher */ tasks(first = this.first, count = this.count, maxTasks = this.config.maxFetcherJobs): JobTask[] { @@ -576,7 +577,7 @@ export class AccountFetcher extends Fetcher } debugStr += ` limit=${short( - setLengthLeft(bigIntToBytes(startedWith + pushedCount - BIGINT_1), 32) + setLengthLeft(bigIntToBytes(startedWith + pushedCount - BIGINT_1), 32), )}` this.debug(`Created new tasks num=${tasks.length} ${debugStr}`) return tasks @@ -625,7 +626,7 @@ export class AccountFetcher extends Fetcher processStoreError( error: Error, - _task: JobTask + _task: JobTask, ): { destroyFetcher: boolean; banPeer: boolean; stepBack: bigint } { const stepBack = BIGINT_0 const destroyFetcher = diff --git a/packages/client/src/sync/fetcher/blockfetcher.ts b/packages/client/src/sync/fetcher/blockfetcher.ts index 7568dcd259..30b302027b 100644 --- a/packages/client/src/sync/fetcher/blockfetcher.ts +++ b/packages/client/src/sync/fetcher/blockfetcher.ts @@ -1,4 +1,4 @@ -import { createBlockFromValuesArray } from '@ethereumjs/block' +import { createBlockFromBytesArray } from '@ethereumjs/block' import { KECCAK256_RLP, KECCAK256_RLP_ARRAY, equalsBytes } from '@ethereumjs/util' import { Event } from '../../types.js' @@ -69,7 +69,7 @@ export class BlockFetcher extends BlockFetcherBase { } const bodies = bodiesResult[1] this.debug( - `Requested blocks=${blocksRange} from ${peerInfo} (received: ${headers.length} headers / ${bodies.length} bodies)` + `Requested blocks=${blocksRange} from ${peerInfo} (received: ${headers.length} headers / ${bodies.length} bodies)`, ) const blocks: Block[] = [] for (const [i, [txsData, unclesData, withdrawalsData]] of bodies.entries()) { @@ -82,7 +82,7 @@ export class BlockFetcher extends BlockFetcherBase { (withdrawalsData?.length ?? 0) === 0) ) { this.debug( - `Requested block=${headers[i].number}} from peer ${peerInfo} missing non-empty txs=${txsData.length} or uncles=${unclesData.length} or withdrawals=${withdrawalsData?.length}` + `Requested block=${headers[i].number}} from peer ${peerInfo} missing non-empty txs=${txsData.length} or uncles=${unclesData.length} or withdrawals=${withdrawalsData?.length}`, ) return [] } @@ -91,7 +91,7 @@ export class BlockFetcher extends BlockFetcherBase { values.push(withdrawalsData) } // Supply the common from the corresponding block header already set on correct fork - const block = createBlockFromValuesArray(values, { common: headers[i].common }) + const block = createBlockFromBytesArray(values, { common: headers[i].common }) // Only validate the data integrity // Upon putting blocks into blockchain (for BlockFetcher), `validateData` is called again // In ReverseBlockFetcher we do not need to validate the entire block, since CL @@ -100,7 +100,7 @@ export class BlockFetcher extends BlockFetcherBase { blocks.push(block) } this.debug( - `Returning blocks=${blocksRange} from ${peerInfo} (received: ${headers.length} headers / ${bodies.length} bodies)` + `Returning blocks=${blocksRange} from ${peerInfo} (received: ${headers.length} headers / ${bodies.length} bodies)`, ) return blocks } @@ -136,14 +136,14 @@ export class BlockFetcher extends BlockFetcherBase { this.debug( `Fetcher results stored in blockchain (blocks num=${blocks.length} first=${ blocks[0]?.header.number - } last=${blocks[blocks.length - 1]?.header.number})` + } last=${blocks[blocks.length - 1]?.header.number})`, ) this.config.events.emit(Event.SYNC_FETCHED_BLOCKS, blocks.slice(0, num)) } catch (e: any) { this.debug( `Error storing fetcher results in blockchain (blocks num=${blocks.length} first=${ blocks[0]?.header.number - } last=${blocks[blocks.length - 1]?.header.number}): ${e}` + } last=${blocks[blocks.length - 1]?.header.number}): ${e}`, ) throw e } diff --git a/packages/client/src/sync/fetcher/blockfetcherbase.ts b/packages/client/src/sync/fetcher/blockfetcherbase.ts index c84760d0bd..fc12a0d243 100644 --- a/packages/client/src/sync/fetcher/blockfetcherbase.ts +++ b/packages/client/src/sync/fetcher/blockfetcherbase.ts @@ -56,7 +56,7 @@ export abstract class BlockFetcherBase extends Fetcher< this.count = options.count this.reverse = options.reverse ?? false this.debug( - `Block fetcher instantiated interval=${this.interval} first=${this.first} count=${this.count} reverse=${this.reverse} destroyWhenDone=${this.destroyWhenDone}` + `Block fetcher instantiated interval=${this.interval} first=${this.first} count=${this.count} reverse=${this.reverse} destroyWhenDone=${this.destroyWhenDone}`, ) } @@ -105,7 +105,7 @@ export abstract class BlockFetcherBase extends Fetcher< this.processed - this.finished < this.config.maxFetcherRequests ) { this.debug( - `Fetcher pending with first=${this.first} count=${this.count} reverse=${this.reverse}` + `Fetcher pending with first=${this.first} count=${this.count} reverse=${this.reverse}`, ) const tasks = this.tasks(this.first, this.count) for (const task of tasks) { @@ -114,7 +114,7 @@ export abstract class BlockFetcherBase extends Fetcher< this.debug(`Enqueued num=${tasks.length} tasks`) } else { this.debug( - `No new tasks enqueued in=${this.in.length} count=${this.count} processed=${this.processed} finished=${this.finished}` + `No new tasks enqueued in=${this.in.length} count=${this.count} processed=${this.processed} finished=${this.finished}`, ) } } @@ -185,7 +185,7 @@ export abstract class BlockFetcherBase extends Fetcher< first: min, count: numBlocks, }, - true + true, ) } else { for (const first of numberList) { @@ -194,12 +194,12 @@ export abstract class BlockFetcherBase extends Fetcher< first, count: 1, }, - true + true, ) } } this.debug( - `Enqueued tasks by number list num=${numberList.length} min=${min} bulkRequest=${bulkRequest} ${updateHeightStr}` + `Enqueued tasks by number list num=${numberList.length} min=${min} bulkRequest=${bulkRequest} ${updateHeightStr}`, ) if (this.in.length === 0) { this.nextTasks() @@ -208,7 +208,7 @@ export abstract class BlockFetcherBase extends Fetcher< processStoreError( error: Error, - task: JobTask + task: JobTask, ): { destroyFetcher: boolean; banPeer: boolean; stepBack: bigint } { let stepBack = BIGINT_0 const destroyFetcher = !(error.message as string).includes('could not find parent header') diff --git a/packages/client/src/sync/fetcher/bytecodefetcher.ts b/packages/client/src/sync/fetcher/bytecodefetcher.ts index e0fdb957bc..a5868d92e9 100644 --- a/packages/client/src/sync/fetcher/bytecodefetcher.ts +++ b/packages/client/src/sync/fetcher/bytecodefetcher.ts @@ -10,7 +10,7 @@ import debug from 'debug' import { keccak256 } from 'ethereum-cryptography/keccak' import { Fetcher } from './fetcher.js' -import { getInitFecherDoneFlags } from './types.js' +import { getInitFetcherDoneFlags } from './types.js' import type { Peer } from '../../net/peer/index.js' import type { FetcherOptions } from './fetcher.js' @@ -55,7 +55,7 @@ export class ByteCodeFetcher extends Fetcher super(options) this.hashes = options.hashes ?? [] this.stateManager = options.stateManager ?? new DefaultStateManager() - this.fetcherDoneFlags = options.fetcherDoneFlags ?? getInitFecherDoneFlags() + this.fetcherDoneFlags = options.fetcherDoneFlags ?? getInitFetcherDoneFlags() this.fetcherDoneFlags.byteCodeFetcher.count = BigInt(this.hashes.length) this.codeDB = this.stateManager['_getCodeDB']() @@ -65,7 +65,7 @@ export class ByteCodeFetcher extends Fetcher if (this.hashes.length > 0) { const fullJob = { task: { hashes: this.hashes } } as Job this.debug( - `Bytecode fetcher instantiated ${fullJob.task.hashes.length} hash requests destroyWhenDone=${this.destroyWhenDone}` + `Bytecode fetcher instantiated ${fullJob.task.hashes.length} hash requests destroyWhenDone=${this.destroyWhenDone}`, ) } } @@ -82,7 +82,7 @@ export class ByteCodeFetcher extends Fetcher * @param peer */ async request( - job: Job + job: Job, ): Promise { const { task, peer } = job // Currently this is the only safe place to call peer.latest() without interfering with the fetcher @@ -113,7 +113,7 @@ export class ByteCodeFetcher extends Fetcher // While results are in the same order as requested hashes but there could be gaps/misses in the results // if the node doesn't has the bytecode. We need an index to move forward through the hashes which are - // absent in the receieved responses + // absent in the received responses let requestedHashIndex = 0 for (let i = 0; i < rangeResult.codes.length; i++) { const receivedCode = rangeResult.codes[i] @@ -155,7 +155,7 @@ export class ByteCodeFetcher extends Fetcher */ process( job: Job, - result: ByteCodeDataResponse + result: ByteCodeDataResponse, ): Uint8Array[] | undefined { const fullResult = (job.partialResult ?? []).concat(result) job.partialResult = undefined @@ -187,7 +187,7 @@ export class ByteCodeFetcher extends Fetcher } await this.codeDB.batch(ops as BatchDBOp[]) this.fetcherDoneFlags.byteCodeFetcher.first += BigInt(codeHashToByteCode.size) - // no idea why first starts exceeding count, may be because of missed hashesh thing, so resort to this + // no idea why first starts exceeding count, may be because of missed hashes thing, so resort to this // weird method of tracking the count this.fetcherDoneFlags.byteCodeFetcher.count = this.fetcherDoneFlags.byteCodeFetcher.first + BigInt(this.hashes.length) @@ -208,12 +208,12 @@ export class ByteCodeFetcher extends Fetcher */ enqueueByByteCodeRequestList(byteCodeRequestList: Uint8Array[]) { this.hashes.push(...byteCodeRequestList) - // no idea why first starts exceeding count, may be because of missed hashesh thing, so resort to this + // no idea why first starts exceeding count, may be because of missed hashes thing, so resort to this // weird method of tracking the count this.fetcherDoneFlags.byteCodeFetcher.count = this.fetcherDoneFlags.byteCodeFetcher.first + BigInt(this.hashes.length) this.debug( - `Number of bytecode fetch requests added to fetcher queue: ${byteCodeRequestList.length}` + `Number of bytecode fetch requests added to fetcher queue: ${byteCodeRequestList.length}`, ) this.nextTasks() } @@ -269,7 +269,7 @@ export class ByteCodeFetcher extends Fetcher processStoreError( error: Error, - _task: JobTask + _task: JobTask, ): { destroyFetcher: boolean; banPeer: boolean; stepBack: bigint } { const stepBack = BIGINT_0 const destroyFetcher = diff --git a/packages/client/src/sync/fetcher/fetcher.ts b/packages/client/src/sync/fetcher/fetcher.ts index 6bc1e92d90..de527769ff 100644 --- a/packages/client/src/sync/fetcher/fetcher.ts +++ b/packages/client/src/sync/fetcher/fetcher.ts @@ -87,19 +87,19 @@ export abstract class Fetcher extends Readable this.maxQueue = options.maxQueue ?? 4 this.debug( - `Fetcher initialized timeout=${this.timeout} interval=${this.interval} banTime=${this.banTime} maxQueue=${this.maxQueue}` + `Fetcher initialized timeout=${this.timeout} interval=${this.interval} banTime=${this.banTime} maxQueue=${this.maxQueue}`, ) this.in = new Heap({ comparBefore: ( a: Job, - b: Job + b: Job, ) => a.index < b.index, }) as QHeap> this.out = new Heap({ comparBefore: ( a: Job, - b: Job + b: Job, ) => a.index < b.index, }) as QHeap> this.total = 0 @@ -119,7 +119,7 @@ export abstract class Fetcher extends Readable */ abstract request( _job?: Job, - _peer?: Peer + _peer?: Peer, ): Promise /** @@ -131,7 +131,7 @@ export abstract class Fetcher extends Readable */ abstract process( _job?: Job, - _result?: JobResult + _result?: JobResult, ): StorageItem[] | undefined /** @@ -146,7 +146,7 @@ export abstract class Fetcher extends Readable */ abstract processStoreError( _error: Error, - _task: JobTask | BlockFetcherJobTask + _task: JobTask | BlockFetcherJobTask, ): { destroyFetcher: boolean; banPeer: boolean; stepBack: bigint } abstract jobStr(job: Job, withIndex?: boolean): string @@ -259,8 +259,8 @@ export abstract class Fetcher extends Readable this.debug( `Re-enqueuing job ${jobStr} from peer id=${job.peer?.id?.substr( 0, - 8 - )} (${resultSet} result set returned).` + 8, + )} (${resultSet} result set returned).`, ) this.enqueue(job) void this.wait().then(() => { @@ -278,8 +278,8 @@ export abstract class Fetcher extends Readable this.debug( `Re-enqueuing job ${jobStr} from peer id=${job.peer?.id?.substr( 0, - 8 - )} (reply contains unexpected data).` + 8, + )} (reply contains unexpected data).`, ) this.enqueue(job) } @@ -297,7 +297,7 @@ export abstract class Fetcher extends Readable error?: Error, irrecoverable?: boolean, dequeued?: boolean, - banPeer?: boolean + banPeer?: boolean, ) { const jobItems = job instanceof Array ? job : [job] if (irrecoverable === true || banPeer === true) { @@ -314,8 +314,8 @@ export abstract class Fetcher extends Readable this.debug( `Failure - Re-enqueuing job ${jobStr} from peer id=${jobItem.peer?.id?.substr( 0, - 8 - )} (error: ${error}).` + 8, + )} (error: ${error}).`, ) // If the job has been dequeued, then the processed count needs to be decreased this.enqueue(jobItem, dequeued) @@ -339,7 +339,7 @@ export abstract class Fetcher extends Readable if (this.finished !== this.total) { // There are still jobs waiting to be processed out in the writer pipe this.debug( - `No job found as next task, skip next job execution processed=${this.processed} finished=${this.finished} total=${this.total}` + `No job found as next task, skip next job execution processed=${this.processed} finished=${this.finished} total=${this.total}`, ) } else { // There are no more jobs in the fetcher, so its better to resolve @@ -354,7 +354,7 @@ export abstract class Fetcher extends Readable this.debug( `Readable state length=${this._readableState!.length} exceeds max queue size=${ this.maxQueue - }, skip job ${jobStr} execution.` + }, skip job ${jobStr} execution.`, ) return false } @@ -403,7 +403,7 @@ export abstract class Fetcher extends Readable this.in.remove() } this.debug( - `Cleared out fetcher total=${this.total} processed=${this.processed} finished=${this.finished}` + `Cleared out fetcher total=${this.total} processed=${this.processed} finished=${this.finished}`, ) } @@ -435,7 +435,7 @@ export abstract class Fetcher extends Readable const _write = async ( job: Job | Job[], encoding: string | null, - cb: Function + cb: Function, ) => { const jobItems = job instanceof Array ? job : [job] this.debug(`Starting write for ${jobItems.length} jobs...`) @@ -449,7 +449,7 @@ export abstract class Fetcher extends Readable this.config.logger.warn(`Error storing received block or header result: ${error}`) const { destroyFetcher, banPeer, stepBack } = this.processStoreError( error, - jobItems[0].task + jobItems[0].task, ) if (!destroyFetcher) { // Non-fatal error: ban peer and re-enqueue job. @@ -475,12 +475,12 @@ export abstract class Fetcher extends Readable write: _write, writev: ( many: { chunk: Job; encoding: string }[], - cb: Function + cb: Function, ) => { const items = ([]>[]).concat( ...many.map( - (x: { chunk: Job; encoding: string }) => x.chunk - ) + (x: { chunk: Job; encoding: string }) => x.chunk, + ), ) return _write(items, null, cb) }, diff --git a/packages/client/src/sync/fetcher/headerfetcher.ts b/packages/client/src/sync/fetcher/headerfetcher.ts index 4c7aa0a281..0d75348487 100644 --- a/packages/client/src/sync/fetcher/headerfetcher.ts +++ b/packages/client/src/sync/fetcher/headerfetcher.ts @@ -92,14 +92,14 @@ export class HeaderFetcher extends BlockFetcherBase { try { this.debug( `verifyRangeProof slots:${slots.length} first=${short(slots[0].hash)} last=${short( - slots[slots.length - 1].hash - )}` + slots[slots.length - 1].hash, + )}`, ) const keys = slots.map((slot: any) => slot.hash) const values = slots.map((slot: any) => slot.body) - return await Trie.verifyRangeProof( + return await verifyTrieRangeProof( stateRoot, origin, keys[keys.length - 1], @@ -136,7 +136,7 @@ export class StorageFetcher extends Fetcher 0 && @@ -214,7 +214,7 @@ export class StorageFetcher extends Fetcher + job: Job, ): Promise { const { task, peer } = job // Currently this is the only safe place to call peer.latest() without interfering with the fetcher @@ -230,7 +230,7 @@ export class StorageFetcher extends Fetcher bytesToHex(req.accountHash))}` + `requested account hashes: ${task.storageRequests.map((req) => bytesToHex(req.accountHash))}`, ) this.debug(`request is multi: ${job.task.multi}`) @@ -239,7 +239,7 @@ export class StorageFetcher extends Fetcher 0) { try { - const isMissingRightRange = await Trie.verifyRangeProof( + const isMissingRightRange = await verifyTrieRangeProof( task.storageRequests[0].storageRoot, origin, null, [], [], rangeResult.proof, - { useKeyHashingFunction: keccak256 } + { useKeyHashingFunction: keccak256 }, ) // if proof is false, reject corrupt peer @@ -324,7 +324,7 @@ export class StorageFetcher extends Fetcher 0) { this.debug( - `Number of ignored account requests due to fragmentation: ${ignoredRequests.length}` + `Number of ignored account requests due to fragmentation: ${ignoredRequests.length}`, ) this.storageRequests.push(...ignoredRequests) } @@ -415,7 +415,7 @@ export class StorageFetcher extends Fetcher, - result: StorageDataResponse + result: StorageDataResponse, ): StorageData[][] | undefined { const accountSlots = (result[0] as any)[0] const highestReceivedhash = accountSlots[accountSlots.length - 1].hash @@ -442,7 +442,7 @@ export class StorageFetcher extends Fetcher { try { if (JSON.stringify(result[0]) === JSON.stringify({ skipped: true })) { @@ -468,7 +468,7 @@ export class StorageFetcher extends Fetcher 0) { this.debug( - `Number of accounts requested as a part of a multi-account request: ${this.storageRequests.length}` + `Number of accounts requested as a part of a multi-account request: ${this.storageRequests.length}`, ) tasks.unshift({ storageRequests: this.storageRequests, // TODO limit max number of accounts per single fetch request multi: true, }) - this.storageRequests = [] // greedilly request as many account slots by requesting all known ones + this.storageRequests = [] // greedily request as many account slots by requesting all known ones return tasks } else if (this.fragmentedRequests.length > 0) { this.debug('Single account request is being initiated') storageRequest = this.fragmentedRequests.shift() - whereFirstwas = storageRequest!.first + whereFirstWas = storageRequest!.first startedWith = storageRequest!.first myFirst = storageRequest!.first myCount = storageRequest!.count @@ -602,7 +602,7 @@ export class StorageFetcher extends Fetcher constructor(options: TrieNodeFetcherOptions) { super(options) this.root = options.root - this.fetcherDoneFlags = options.fetcherDoneFlags ?? getInitFecherDoneFlags() + this.fetcherDoneFlags = options.fetcherDoneFlags ?? getInitFetcherDoneFlags() this.pathToNodeRequestData = new OrderedMap() this.requestedNodeToPath = new Map() this.fetchedAccountNodes = new Map() @@ -123,7 +123,7 @@ export class TrieNodeFetcher extends Fetcher this.debug( `Trie node fetcher instantiated with ${this.pathToNodeRequestData.size()} node requests destroyWhenDone=${ this.destroyWhenDone - }` + }`, ) } @@ -139,7 +139,7 @@ export class TrieNodeFetcher extends Fetcher * @param peer */ async request( - job: Job + job: Job, ): Promise { const { task, peer } = job // Currently this is the only safe place to call peer.latest() without interfering with the fetcher @@ -169,7 +169,7 @@ export class TrieNodeFetcher extends Fetcher try { // While results are in the same order as requested hashes but there could be gaps/misses in the results // if the node doesn't has all of the requested trie nodes. We need an index to move forward through the hashes which are - // absent in the receieved responses + // absent in the received responses const receivedNodes: Uint8Array[] = [] for (let i = 0; i < rangeResult.nodes.length; i++) { const receivedNode = rangeResult.nodes[i] @@ -193,7 +193,7 @@ export class TrieNodeFetcher extends Fetcher */ process( job: Job, - result: TrieNodesResponse + result: TrieNodesResponse, ): Uint8Array[] | undefined { const fullResult = (job.partialResult ?? []).concat(result) job.partialResult = undefined @@ -253,7 +253,7 @@ export class TrieNodeFetcher extends Fetcher this.debug('leaf node found') if (storagePath === undefined) { this.debug('account leaf node found') - const account = Account.fromRlpSerializedAccount(node.value()) + const account = createAccountFromRLP(node.value()) const storageRoot: Uint8Array = account.storageRoot if (equalsBytes(storageRoot, KECCAK256_RLP) === false) { this.debug('storage component found') @@ -295,7 +295,7 @@ export class TrieNodeFetcher extends Fetcher // if error is thrown, than the node is unknown and should be queued for fetching unknownChildNodeCount++ const { parentAccountHash } = this.pathToNodeRequestData.getElementByKey( - pathString + pathString, ) as NodeRequestData this.pathToNodeRequestData.setElement(childNode.path, { nodeHash: bytesToHex(childNode.nodeHash as Uint8Array), @@ -307,13 +307,13 @@ export class TrieNodeFetcher extends Fetcher // record new node for batched storing after all subtrie nodes have been received const { nodeParentHash, parentAccountHash } = this.pathToNodeRequestData.getElementByKey( - pathString + pathString, ) as NodeRequestData if (storagePath !== undefined) { // if fetched node has a storagePath, it's storage node data and should be stored with // account leaf node data from where it originates const { pathToStorageNode } = this.fetchedAccountNodes.get( - parentAccountHash as string + parentAccountHash as string, ) as unknown as FetchedNodeData pathToStorageNode!.set(storagePath, nodeData as unknown as Uint8Array) } else { @@ -331,7 +331,7 @@ export class TrieNodeFetcher extends Fetcher this.pathToNodeRequestData.eraseElementByKey(pathString) } - // for an initial implementation, just put nodes into trie and see if root maches stateRoot + // for an initial implementation, just put nodes into trie and see if root matches stateRoot if (this.pathToNodeRequestData.length === 0) { this.debug('All requests for current heal phase have been filled') const ops: BatchDBOp[] = [] @@ -366,11 +366,11 @@ export class TrieNodeFetcher extends Fetcher } await storageTrie.batch(storageTrieOps, true) await storageTrie.persistRoot() - const a = Account.fromRlpSerializedAccount(node.value()) + const a = createAccountFromRLP(node.value()) this.debug( `Stored storageTrie with root actual=${bytesToHex( - storageTrie.root() - )} expected=${bytesToHex(a.storageRoot)}` + storageTrie.root(), + )} expected=${bytesToHex(a.storageRoot)}`, ) } } @@ -379,8 +379,8 @@ export class TrieNodeFetcher extends Fetcher await this.accountTrie.persistRoot() this.debug( `Stored accountTrie with root actual=${bytesToHex( - this.accountTrie.root() - )} expected=${bytesToHex(this.root)}` + this.accountTrie.root(), + )} expected=${bytesToHex(this.root)}`, ) } } catch (e) { @@ -468,7 +468,7 @@ export class TrieNodeFetcher extends Fetcher processStoreError( error: Error, - _task: JobTask + _task: JobTask, ): { destroyFetcher: boolean; banPeer: boolean; stepBack: bigint } { const stepBack = BIGINT_0 const destroyFetcher = diff --git a/packages/client/src/sync/fetcher/types.ts b/packages/client/src/sync/fetcher/types.ts index 8004360450..db348db0fb 100644 --- a/packages/client/src/sync/fetcher/types.ts +++ b/packages/client/src/sync/fetcher/types.ts @@ -43,7 +43,7 @@ export type SnapFetcherDoneFlags = { stateRoot?: Uint8Array } -export function getInitFecherDoneFlags(): SnapFetcherDoneFlags { +export function getInitFetcherDoneFlags(): SnapFetcherDoneFlags { return { done: false, syncing: false, diff --git a/packages/client/src/sync/fullsync.ts b/packages/client/src/sync/fullsync.ts index 53f3452368..e08ef25e5c 100644 --- a/packages/client/src/sync/fullsync.ts +++ b/packages/client/src/sync/fullsync.ts @@ -100,12 +100,12 @@ export class FullSynchronizer extends Synchronizer { const hash = this.chain.blocks.latest!.hash() this.startingBlock = number const timestamp = this.chain.blocks.latest?.header.timestamp - this.config.chainCommon.setHardforkBy({ blockNumber: number, td, timestamp }) + this.config.chainCommon.setHardforkBy({ blockNumber: number, timestamp }) this.config.logger.info( `Latest local block number=${Number(number)} td=${td} hash=${short( - hash - )} hardfork=${this.config.chainCommon.hardfork()}` + hash, + )} hardfork=${this.config.chainCommon.hardfork()}`, ) } @@ -240,18 +240,6 @@ export class FullSynchronizer extends Synchronizer { const nextHF = this.config.chainCommon.getHardforkBy({ blockNumber: nextHFBlockNum }) attentionHF = `${nextHF} HF in ${remaining} blocks` } - } else { - if ( - this.config.chainCommon.hardfork() === Hardfork.MergeForkIdTransition && - !this.config.chainCommon.gteHardfork(Hardfork.Paris) - ) { - const mergeTTD = this.config.chainCommon.hardforkTTD(Hardfork.Paris)! - const td = this.chain.blocks.td - const remaining = mergeTTD - td - if (remaining <= mergeTTD / BigInt(10)) { - attentionHF = `Paris (Merge) HF in ${remaining} TD` - } - } } this.config.logger.info( @@ -260,7 +248,7 @@ export class FullSynchronizer extends Synchronizer { } first=${first} last=${last} hash=${hash} ${baseFeeAdd}hardfork=${this.config.chainCommon.hardfork()} peers=${ this.pool.size }`, - { attentionHF } + { attentionHF }, ) this.txPool.removeNewBlockTxs(blocks) @@ -320,7 +308,7 @@ export class FullSynchronizer extends Synchronizer { this.config.logger.debug( `Error processing new block from peer ${ peer ? `id=${peer.id.slice(0, 8)}` : '(no peer)' - } hash=${short(block.hash())}` + } hash=${short(block.hash())}`, ) this.config.logger.debug(err) return diff --git a/packages/client/src/sync/lightsync.ts b/packages/client/src/sync/lightsync.ts index 946ce20058..2b487080ea 100644 --- a/packages/client/src/sync/lightsync.ts +++ b/packages/client/src/sync/lightsync.ts @@ -146,7 +146,7 @@ export class LightSynchronizer extends Synchronizer { ? `baseFee=${headers[0].baseFeePerGas} ` : '' this.config.logger.info( - `Imported headers count=${headers.length} number=${first} hash=${hash} ${baseFeeAdd}peers=${this.pool.size}` + `Imported headers count=${headers.length} number=${first} hash=${hash} ${baseFeeAdd}peers=${this.pool.size}`, ) } diff --git a/packages/client/src/sync/snapsync.ts b/packages/client/src/sync/snapsync.ts index 69162ed194..529bd59a0a 100644 --- a/packages/client/src/sync/snapsync.ts +++ b/packages/client/src/sync/snapsync.ts @@ -4,7 +4,7 @@ import { Event } from '../types.js' import { short } from '../util/index.js' import { AccountFetcher } from './fetcher/index.js' -import { getInitFecherDoneFlags } from './fetcher/types.js' +import { getInitFetcherDoneFlags } from './fetcher/types.js' import { Synchronizer } from './sync.js' import type { VMExecution } from '../execution/index.js' @@ -26,7 +26,7 @@ export class SnapSynchronizer extends Synchronizer { public running = false skeleton?: Skeleton private execution: VMExecution - readonly fetcherDoneFlags: SnapFetcherDoneFlags = getInitFecherDoneFlags() + readonly fetcherDoneFlags: SnapFetcherDoneFlags = getInitFetcherDoneFlags() constructor(options: SnapSynchronizerOptions) { super(options) @@ -62,7 +62,7 @@ export class SnapSynchronizer extends Synchronizer { await this.pool.open() this.config.logger.info( - `Opened SnapSynchronizer syncTargetHeight=${this.config.syncTargetHeight ?? 'NA'}` + `Opened SnapSynchronizer syncTargetHeight=${this.config.syncTargetHeight ?? 'NA'}`, ) } @@ -132,7 +132,7 @@ export class SnapSynchronizer extends Synchronizer { if (!this.fetcherDoneFlags.done) { throw Error( - `snap sync fetchers didn't sync complete state accountFetcherDone=${this.fetcherDoneFlags.accountFetcher.done} storageFetcherDone=${this.fetcherDoneFlags.storageFetcher.done} byteCodeFetcherDone=${this.fetcherDoneFlags.byteCodeFetcher.done} trieNodeFetcherDone=${this.fetcherDoneFlags.trieNodeFetcher.done}` + `snap sync fetchers didn't sync complete state accountFetcherDone=${this.fetcherDoneFlags.accountFetcher.done} storageFetcherDone=${this.fetcherDoneFlags.storageFetcher.done} byteCodeFetcherDone=${this.fetcherDoneFlags.byteCodeFetcher.done} trieNodeFetcherDone=${this.fetcherDoneFlags.trieNodeFetcher.done}`, ) } @@ -144,8 +144,8 @@ export class SnapSynchronizer extends Synchronizer { ) { throw Error( `Invalid synced data by snapsync snapTargetHeight=${snapTargetHeight} snapTargetRoot=${short( - snapTargetRoot ?? 'na' - )} snapTargetHash=${short(snapTargetHash ?? 'na')}` + snapTargetRoot ?? 'na', + )} snapTargetHash=${short(snapTargetHash ?? 'na')}`, ) } @@ -154,8 +154,8 @@ export class SnapSynchronizer extends Synchronizer { if (!equalsBytes(syncedRoot, snapTargetRoot)) { throw Error( `Invalid snap syncedRoot=${short(syncedRoot)} targetRoot=${short( - snapTargetRoot - )} for target height=${snapTargetHeight} hash=${short(snapTargetHash)}` + snapTargetRoot, + )} for target height=${snapTargetHeight} hash=${short(snapTargetHash)}`, ) // TODO: figure out what needs to be reinited // this.fetcherDoneFlags.accountFetcher.done = false; @@ -165,7 +165,7 @@ export class SnapSynchronizer extends Synchronizer { } const snapDoneMsg = `snapsync complete!!! height=${snapTargetHeight} root=${short( - snapTargetRoot + snapTargetRoot, )} hash=${short(snapTargetHash)}` if (fetchingAlreadyDone) { this.config.logger.debug(snapDoneMsg) @@ -223,7 +223,7 @@ export class SnapSynchronizer extends Synchronizer { this.fetcher === null ? '' : 'previous fetcher errored=' + this.fetcher.syncErrored?.message - }` + }`, ) this.fetcher = new AccountFetcher({ config: this.config, diff --git a/packages/client/src/sync/sync.ts b/packages/client/src/sync/sync.ts index 65741f07e9..53ce2d018c 100644 --- a/packages/client/src/sync/sync.ts +++ b/packages/client/src/sync/sync.ts @@ -123,7 +123,7 @@ export abstract class Synchronizer { this._syncedStatusCheckInterval = setInterval( this._syncedStatusCheck.bind(this), - this.SYNCED_STATE_REMOVAL_PERIOD + this.SYNCED_STATE_REMOVAL_PERIOD, ) const timeout = setTimeout(() => { @@ -161,7 +161,7 @@ export abstract class Synchronizer { return this.resolveSync() } catch (error: any) { this.config.logger.error( - `Received sync error, stopping sync and clearing fetcher: ${error.message ?? error}` + `Received sync error, stopping sync and clearing fetcher: ${error.message ?? error}`, ) this.clearFetcher() throw error diff --git a/packages/client/src/types.ts b/packages/client/src/types.ts index bbebaabb05..0098d534f8 100644 --- a/packages/client/src/types.ts +++ b/packages/client/src/types.ts @@ -98,6 +98,8 @@ export type DnsNetwork = string export interface ClientOpts { network?: string + chainId?: number + // Deprecated, use chainId instead networkId?: number sync?: SyncMode lightServe?: boolean diff --git a/packages/client/src/util/debug.ts b/packages/client/src/util/debug.ts index 3c01e39507..2226c849cc 100644 --- a/packages/client/src/util/debug.ts +++ b/packages/client/src/util/debug.ts @@ -28,7 +28,7 @@ export async function debugCodeReplayBlock(execution: VMExecution, block: Block) import { Level } from 'level'; import { Common } from '@ethereumjs/common' import { Block } from '@ethereumjs/block' -import { VM } from './src' +import { VM, runBlock } from './src' import { Trie } from '@ethereumjs/trie' import { DefaultStateManager } from './src/state' import { Blockchain } from '@ethereumjs/blockchain' @@ -38,7 +38,7 @@ const main = async () => { execution.hardfork }' }) const block = createBlockFromRLPSerializedBlock(hexToBytes('${bytesToHex( - block.serialize() + block.serialize(), )}'), { common }) const stateDB = new Level('${execution.config.getDataDirectory(DataDirectory.State)}') @@ -46,7 +46,7 @@ const main = async () => { const stateManager = new DefaultStateManager({ trie, common }) // Ensure we run on the right root stateManager.setStateRoot(hexToBytes('${bytesToHex( - await execution.vm.stateManager.getStateRoot() + await execution.vm.stateManager.getStateRoot(), )}')) @@ -59,11 +59,10 @@ const main = async () => { }) const vm = await VM.create({ stateManager, blockchain, common }) - await vm.runBlock({ block }) + await runBlock({ block }) } main() ` - execution.config.logger.info(code) } diff --git a/packages/client/src/util/index.ts b/packages/client/src/util/index.ts index cf0326fb53..a3e42ade2d 100644 --- a/packages/client/src/util/index.ts +++ b/packages/client/src/util/index.ts @@ -22,8 +22,8 @@ export function getClientVersion() { const packageJson = JSON.parse( readFileSync( '/' + import.meta.url.split('client')[0].split('file:///')[1] + 'client/package.json', - 'utf-8' - ) + 'utf-8', + ), ) const { version } = process return `EthereumJS/${packageJson.version}/${platform()}/node${version.substring(1)}` diff --git a/packages/client/src/util/parse.ts b/packages/client/src/util/parse.ts index 28d27ac6dc..a62f8c26d8 100644 --- a/packages/client/src/util/parse.ts +++ b/packages/client/src/util/parse.ts @@ -7,7 +7,7 @@ import type { Multiaddr } from '@multiformats/multiaddr' // From: https://community.fortra.com/forums/intermapper/miscellaneous-topics/5acc4fcf-fa83-e511-80cf-0050568460e4 const ip6RegExp = new RegExp( - /((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))/ + /((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))/, ) /** @@ -59,7 +59,7 @@ export function parseMultiaddrs(input: MultiaddrLike): Multiaddr[] { const { ip6, port } = matchip6.groups! return multiaddr(`/ip6/${ip6}/tcp/${port}`) } - // parse using WHATWG URL API + // parse using WHATWG URL API // cspell:disable-line const { hostname: ip, port } = new URL(s) if (ip && port) { return multiaddr(`/ip4/${ip}/tcp/${port}`) diff --git a/packages/client/src/util/rpc.ts b/packages/client/src/util/rpc.ts index 9eb48cfc89..77d2d52baa 100644 --- a/packages/client/src/util/rpc.ts +++ b/packages/client/src/util/rpc.ts @@ -86,7 +86,7 @@ export function inspectParams(params: any, shorten?: number) { export function createRPCServer( manager: RPCManager, - opts: CreateRPCServerOpts + opts: CreateRPCServerOpts, ): CreateRPCServerReturn { const { methodConfig, rpcDebug, rpcDebugVerbose, logger } = opts const onRequest = (request: any) => { @@ -102,7 +102,7 @@ export function createRPCServer( logger?.info(`${request.method}${batchAddOn} responded with:\n${inspectParams(response)}`) } else if (checkFilter(request.method, rpcDebug)) { logger?.info( - `${request.method}${batchAddOn} responded with:\n${inspectParams(response, 125)}` + `${request.method}${batchAddOn} responded with:\n${inspectParams(response, 125)}`, ) } } diff --git a/packages/client/test/blockchain/chain.spec.ts b/packages/client/test/blockchain/chain.spec.ts index 981573a9ab..5297024af2 100644 --- a/packages/client/test/blockchain/chain.spec.ts +++ b/packages/client/test/blockchain/chain.spec.ts @@ -1,4 +1,4 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' import { KeyEncoding, ValueEncoding, bytesToHex, equalsBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' @@ -33,17 +33,17 @@ describe('[Chain]', () => { it('should retrieve chain properties', async () => { const chain = await Chain.create({ config }) await chain.open() - assert.equal(chain.networkId, BigInt(1), 'get chain.networkId') + assert.equal(chain.chainId, BigInt(1), 'get chain.chainId') assert.equal(chain.blocks.td.toString(10), '17179869184', 'get chain.blocks.td') assert.equal(chain.blocks.height.toString(10), '0', 'get chain.blocks.height') assert.equal( bytesToHex(chain.genesis.hash()), '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', - 'get chain.genesis' + 'get chain.genesis', ) assert.ok( equalsBytes(chain.genesis.hash(), chain.blocks.latest!.hash()), - 'get chain.block.latest' + 'get chain.block.latest', ) await chain.close() }) @@ -59,7 +59,7 @@ describe('[Chain]', () => { difficulty: BigInt(0xabcdffff), parentHash: chain.genesis.hash(), } - const block = createBlockFromBlockData({ header: headerData } as BlockData, { + const block = createBlock({ header: headerData } as BlockData, { common: config.chainCommon, }) @@ -133,7 +133,7 @@ describe('[Chain]', () => { difficulty: BigInt(0xabcdffff), parentHash: chain.genesis.hash(), } - const block = createBlockFromBlockData({ header: headerData } as BlockData, { + const block = createBlock({ header: headerData } as BlockData, { common: config.chainCommon, }) await chain.putBlocks([block]) diff --git a/packages/client/test/cli/cli.spec.ts b/packages/client/test/cli/cli.spec.ts index 2fc1987eda..4dc19f483e 100644 --- a/packages/client/test/cli/cli.spec.ts +++ b/packages/client/test/cli/cli.spec.ts @@ -11,7 +11,7 @@ import type { ChildProcessWithoutNullStreams } from 'child_process' export function clientRunHelper( cliArgs: string[], onData: (message: string, child: ChildProcessWithoutNullStreams, resolve: Function) => void, - shouldError = false + shouldError = false, ) { const file = require.resolve('../../bin/cli.ts') const child = spawn('tsx', [file, ...cliArgs]) @@ -31,19 +31,33 @@ export function clientRunHelper( describe('[CLI]', () => { // chain network tests it('should successfully start client with a custom network and network id', async () => { - const cliArgs = ['--network=sepolia', '--networkId=11155111'] + const cliArgs = ['--network=sepolia', '--chainId=11155111'] const onData = (message: string, child: ChildProcessWithoutNullStreams, resolve: Function) => { if (message.includes('Initializing Ethereumjs client')) { assert.ok( message.includes('network=sepolia chainId=11155111'), - 'client is using custom inputs for network and network ID' + 'client is using custom inputs for network and network ID', ) - child.kill(9) + child.kill() resolve(undefined) } } await clientRunHelper(cliArgs, onData) }, 30000) + it('should successfully start client with non-lower case network name', async () => { + const cliArgs = ['--network=Kaustinen6'] + const onData = (message: string, child: ChildProcessWithoutNullStreams, resolve: Function) => { + if (message.includes('Initializing Ethereumjs client')) { + assert.ok( + message.includes('network=kaustinen6'), + 'client is using custom inputs for network and network ID', + ) + child.kill() + resolve(undefined) + } + } + await clientRunHelper(cliArgs, onData) + }, 5000) it('should successfully start client with custom inputs for PoA network', async () => { const cliArgs = [ '--rpc', @@ -59,7 +73,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('http://')) { // if http endpoint startup message detected, call http endpoint with RPC method @@ -71,7 +85,7 @@ describe('[CLI]', () => { const res = await client.request('eth_coinbase', [], 2.0) assert.ok( res.result === '0x7e5f4552091a69125d5dfcb7b8c2659029395bdf', - 'correct coinbase address set' + 'correct coinbase address set', ) count -= 1 } @@ -80,7 +94,7 @@ describe('[CLI]', () => { count -= 1 } if (count === 0) { - child.kill(15) + child.kill() resolve(undefined) } } @@ -91,15 +105,15 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Invalid values')) { assert.ok( true, - 'client correctly throws error when "dev" option is passed in without a value' + 'client correctly throws error when "dev" option is passed in without a value', ) } - child.kill(15) + child.kill() resolve(undefined) } await clientRunHelper(cliArgs, onData, true) @@ -109,12 +123,12 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('cannot reuse')) { assert.ok(true, 'cannot reuse ports between HTTP and WS RPCs') } - child.kill(15) + child.kill() resolve(undefined) } await clientRunHelper(cliArgs, onData, true) @@ -125,7 +139,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('http://')) { // if http endpoint startup message detected, call http endpoint with RPC method @@ -137,10 +151,10 @@ describe('[CLI]', () => { } catch (e: any) { assert( e.message.includes('Unauthorized: Error: Missing auth header'), - 'authentication failure shows that auth is defaulting to active' + 'authentication failure shows that auth is defaulting to active', ) } - child.kill(15) + child.kill() resolve(undefined) } } @@ -157,19 +171,19 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('http://')) { assert.ok(message.includes('engine'), 'engine rpc started') assert.ok( message.includes('rpcEngineAuth=false'), - 'auth is disabled according to client logs' + 'auth is disabled according to client logs', ) await wait(600) const client = Client.http({ port: 8553 }) const res = await client.request('engine_exchangeCapabilities', [], 2.0) assert.ok(res.result.length > 0, 'engine api is responsive without need for auth header') - child.kill(15) + child.kill() resolve(undefined) } } @@ -187,20 +201,20 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('http://')) { assert.ok(message.includes('engine'), 'engine rpc started') assert.ok(message.includes(customPort), 'custom port is being used') assert.ok( message.includes('rpcEngineAuth=false'), - 'auth is disabled according to client logs' + 'auth is disabled according to client logs', ) await wait(600) const client = Client.http({ port: Number(customPort) }) const res = await client.request('engine_exchangeCapabilities', [], 2.0) assert.ok(res.result.length > 0, 'engine api is responsive without need for auth header') - child.kill(15) + child.kill() resolve(undefined) } } @@ -219,20 +233,20 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('http://')) { assert.ok(message.includes('engine'), 'engine rpc started') assert.ok(message.includes('0.0.0.0'), 'custom address is being used') assert.ok( message.includes('rpcEngineAuth=false'), - 'auth is disabled according to client logs' + 'auth is disabled according to client logs', ) await wait(600) const client = Client.http({ hostname: '0.0.0.0', port: Number(customPort) }) const res = await client.request('engine_exchangeCapabilities', [], 2.0) assert.ok(res.result.length > 0, 'engine api is responsive on custom address') - child.kill(15) + child.kill() resolve(undefined) } } @@ -252,12 +266,12 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('ws://') && message.includes('engine')) { assert.ok( message.includes('0.0.0.0:' + customPort), - 'client logs show correct custom address and port being used' + 'client logs show correct custom address and port being used', ) assert.ok(message.includes('engine'), 'engine ws started') await wait(600) @@ -265,7 +279,7 @@ describe('[CLI]', () => { ;(client as any).ws.on('open', async function () { const res = await client.request('engine_exchangeCapabilities', [], 2.0) assert.ok(res.result.length > 0, 'read from WS RPC on custom address and port') - child.kill(15) + child.kill() resolve(undefined) }) } @@ -286,7 +300,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('ws://')) { // if ws endpoint startup message detected, call ws endpoint with RPC method @@ -295,7 +309,7 @@ describe('[CLI]', () => { ;(client as any).ws.on('open', async function () { const res = await client.request('web3_clientVersion', [], 2.0) assert.ok(res.result.includes('EthereumJS'), 'read from WS RPC') - child.kill(15) + child.kill() resolve(undefined) }) } @@ -315,7 +329,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('http://')) { // if http endpoint startup message detected, call http endpoint with RPC method @@ -335,7 +349,7 @@ describe('[CLI]', () => { assert.fail('should have thrown on invalid client address') } catch (e: any) { assert.ok(e !== undefined, 'failed to connect to RPC on invalid address') - child.kill(15) + child.kill() resolve(undefined) } } @@ -346,14 +360,14 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('address=http://')) { - child.kill(15) + child.kill() assert.fail('http endpoint should not be enabled') } if (message.includes('address=ws://')) { - child.kill(15) + child.kill() assert.fail('ws endpoint should not be enabled') } if (message.includes('Miner: Assembling block')) { @@ -369,11 +383,11 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('JSON-RPC: Supported Methods')) { assert.ok(message, 'logged out supported RPC methods') - child.kill(15) + child.kill() resolve(undefined) } } @@ -397,11 +411,11 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('DEBUG')) { assert.ok(message, 'debug logging is enabled') - child.kill(15) + child.kill() resolve(undefined) } } @@ -413,11 +427,11 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('account cache')) { assert.ok(message.includes('2000'), 'account cache option works') - child.kill(15) + child.kill() resolve(undefined) } } @@ -428,11 +442,11 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('storage cache')) { assert.ok(message.includes('2000'), 'storage cache option works') - child.kill(15) + child.kill() resolve(undefined) } } @@ -444,11 +458,11 @@ describe('[CLI]', () => { message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('code cache')) { assert.ok(message.includes('2000'), 'code cache option works') - child.kill(15) + child.kill() resolve(undefined) } } @@ -459,11 +473,11 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('trie cache')) { assert.ok(message.includes('2000'), 'trie cache option works') - child.kill(15) + child.kill() resolve(undefined) } } @@ -474,11 +488,11 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Reading bootnodes')) { assert.ok(message.includes('num=2'), 'passing bootnode.txt URL for bootnodes option works') - child.kill(15) + child.kill() resolve(undefined) } } @@ -490,14 +504,14 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Client started successfully')) { assert.ok( message.includes('Client started successfully'), - 'Clients started with experimental feature options' + 'Clients started with experimental feature options', ) - child.kill(15) + child.kill() resolve(undefined) } } @@ -520,14 +534,14 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Client started successfully')) { assert.ok( message.includes('Client started successfully'), - 'Clients starts with client execution limits' + 'Clients starts with client execution limits', ) - child.kill(15) + child.kill() resolve(undefined) } } @@ -547,7 +561,7 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Server listener up transport=rlpx')) { const [ip, port] = message @@ -563,7 +577,7 @@ describe('[CLI]', () => { const client = Client.http({ port: 8573 }) const res = await client.request('web3_clientVersion', [], 2.0) assert.ok(res.result.includes('EthereumJS'), 'read from HTTP RPC') - child.kill(15) + child.kill() resolve(undefined) } } @@ -584,18 +598,18 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Client started successfully')) { assert.ok( message.includes('Client started successfully'), - 'Clients starts with custom network options' + 'Clients starts with custom network options', ) await wait(600) const client = Client.http({ port: 8593 }) const res = await client.request('web3_clientVersion', [], 2.0) assert.ok(res.result.includes('EthereumJS'), 'read from HTTP RPC') - child.kill(15) + child.kill() resolve(undefined) } } @@ -616,12 +630,12 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Serving light peer requests')) { assert.ok( message.includes('Serving light peer requests'), - 'client respects custom light-mode option' + 'client respects custom light-mode option', ) } if (message.includes('Starting FullEthereumService')) { @@ -630,13 +644,13 @@ describe('[CLI]', () => { if (message.includes('Client started successfully')) { assert.ok( message.includes('Client started successfully'), - 'Client starts with custom sync options' + 'Client starts with custom sync options', ) await wait(600) const client = Client.http({ port: 8548 }) const res = await client.request('web3_clientVersion', [], 2.0) assert.ok(res.result.includes('EthereumJS'), 'read from HTTP RPC') - child.kill(15) + child.kill() resolve(undefined) } } @@ -648,7 +662,6 @@ describe('[CLI]', () => { const customChainJson = `{ "name": "customChain", "chainId": 11155111, - "networkId": 11155111, "defaultHardfork": "shanghai", "consensus": { "type": "pow", @@ -736,12 +749,12 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Reading custom genesis state')) { assert.ok( message.includes('Reading custom genesis state'), - 'client respects custom genesis state file option' + 'client respects custom genesis state file option', ) } if (message.includes('Data directory')) { @@ -750,7 +763,7 @@ describe('[CLI]', () => { if (message.includes('Initializing Ethereumjs client')) { assert.ok( message.includes('network=customChain'), - 'Client respects custom chain parameters json file option' + 'Client respects custom chain parameters json file option', ) } if (message.includes('Client started successfully')) { @@ -758,7 +771,7 @@ describe('[CLI]', () => { const client = Client.http({ port: 8549 }) const res = await client.request('web3_clientVersion', [], 2.0) assert.ok(res.result.includes('EthereumJS'), 'read from HTTP RPC') - child.kill(15) + child.kill() fs.rmSync(dir, { recursive: true, force: true }) resolve(undefined) } @@ -771,29 +784,29 @@ describe('[CLI]', () => { const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { if (message.includes('Unknown argument: datadir')) { assert.ok(true, 'correctly errors on unknown arguments') } - child.kill(15) + child.kill() resolve(undefined) } await clientRunHelper(cliArgs, onData, true) }, 5000) it('should not start client with conflicting parameters', async () => { - const cliArgs = ['--networkId', '--gethGenesis'] + const cliArgs = ['--chainId', '--gethGenesis'] const onData = async ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { - if (message.includes('Arguments networkId and gethGenesis are mutually exclusive')) { + if (message.includes('Arguments chainId and gethGenesis are mutually exclusive')) { assert.ok(true, 'correctly errors on conflicting arguments') } - child.kill(15) + child.kill() resolve(undefined) } await clientRunHelper(cliArgs, onData, true) }, 5000) -}) +}, 180000) diff --git a/packages/client/test/config.spec.ts b/packages/client/test/config.spec.ts index 14dea935ac..eb80d9505d 100644 --- a/packages/client/test/config.spec.ts +++ b/packages/client/test/config.spec.ts @@ -1,4 +1,4 @@ -import { Chain, Common } from '@ethereumjs/common' +import { Common, Goerli, Mainnet } from '@ethereumjs/common' import { assert, describe, it } from 'vitest' import { Config, DataDirectory } from '../src/config.js' @@ -30,36 +30,32 @@ describe('[Config]', () => { }) it('peer discovery default mainnet setting', () => { - const common = new Common({ chain: Chain.Mainnet }) + const common = new Common({ chain: Mainnet }) const config = new Config({ common, accountCache: 10000, storageCache: 1000 }) assert.equal(config.discDns, false, 'disables DNS peer discovery for mainnet') assert.equal(config.discV4, true, 'enables DNS peer discovery for mainnet') }) it('--discDns=true/false', () => { - let common, config, chain + let common, config - chain = Chain.Mainnet - common = new Common({ chain }) + common = new Common({ chain: Mainnet }) config = new Config({ common, discDns: true }) assert.equal(config.discDns, true, `default discDns setting can be overridden to true`) - chain = Chain.Goerli - common = new Common({ chain }) + common = new Common({ chain: Goerli }) config = new Config({ common, discDns: false }) assert.equal(config.discDns, false, `default discDns setting can be overridden to false`) }) it('--discV4=true/false', () => { - let common, config, chain + let common, config - chain = Chain.Mainnet - common = new Common({ chain }) + common = new Common({ chain: Mainnet }) config = new Config({ common, discV4: false }) assert.equal(config.discV4, false, `default discV4 setting can be overridden to false`) - chain = Chain.Goerli - common = new Common({ chain }) + common = new Common({ chain: Goerli }) config = new Config({ common, discV4: true }) assert.equal(config.discV4, true, `default discV4 setting can be overridden to true`) }) diff --git a/packages/client/test/execution/vmexecution.spec.ts b/packages/client/test/execution/vmexecution.spec.ts index 72f7095631..2d604d1906 100644 --- a/packages/client/test/execution/vmexecution.spec.ts +++ b/packages/client/test/execution/vmexecution.spec.ts @@ -1,6 +1,6 @@ import { createBlockFromExecutionPayload } from '@ethereumjs/block' import { createBlockchain, createBlockchainFromBlocksData } from '@ethereumjs/blockchain' -import { Chain as ChainEnum, Common, Hardfork } from '@ethereumjs/common' +import { Common, Goerli, Hardfork, Mainnet, createCustomCommon } from '@ethereumjs/common' import { bytesToHex } from '@ethereumjs/util' import { VM } from '@ethereumjs/vm' import { assert, describe, it } from 'vitest' @@ -16,7 +16,6 @@ import shanghaiJSON from '../testdata/geth-genesis/withdrawals.json' import type { BlockData, ExecutionPayload } from '@ethereumjs/block' import type { Blockchain } from '@ethereumjs/blockchain' -import type { ChainConfig } from '@ethereumjs/common' const shanghaiPayload = { blockNumber: '0x1', @@ -124,7 +123,8 @@ describe('[VMExecution]', () => { newHead = await exec.vm.blockchain.getIteratorHead!() assert.equal(newHead.header.number, BigInt(5), 'should run all blocks') - const common = new Common({ chain: 'testnet', customChains: [testnet] as ChainConfig[] }) + // @ts-ignore PrefixedHexString type is too strict + const common = createCustomCommon(testnet, Mainnet) exec = await testSetup(blockchain, common) await exec.run() assert.equal(exec.hardfork, 'byzantium', 'should update HF on block run') @@ -143,13 +143,8 @@ describe('[VMExecution]', () => { }) exec = await testSetup(blockchain) - await exec.run() - - assert.doesNotThrow( - async () => exec.executeBlocks(1, 5, []), - 'blocks should execute without error' - ) + await exec.executeBlocks(1, 5, []) }) it('Should fail opening if vmPromise already assigned', async () => { @@ -174,7 +169,7 @@ describe('[VMExecution]', () => { }) it('Block execution / Hardforks PoA (goerli)', async () => { - const common = new Common({ chain: ChainEnum.Goerli, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Goerli, hardfork: Hardfork.Chainstart }) let blockchain = await createBlockchain({ validateBlocks: true, validateConsensus: false, @@ -202,7 +197,9 @@ describe('[VMExecution]', () => { engine: true, }) - const block = await createBlockFromExecutionPayload(shanghaiPayload as ExecutionPayload) + const block = await createBlockFromExecutionPayload(shanghaiPayload as ExecutionPayload, { + common: new Common({ chain: Mainnet, hardfork: Hardfork.Shanghai }), + }) const oldHead = await blockchain.getIteratorHead() const parentStateRoot = oldHead.header.stateRoot @@ -215,7 +212,7 @@ describe('[VMExecution]', () => { assert.equal( bytesToHex(block.hash()), bytesToHex(newHead.hash()), - 'vmHead should be on the latest block' + 'vmHead should be on the latest block', ) // reset head and run again @@ -224,7 +221,7 @@ describe('[VMExecution]', () => { assert.equal( bytesToHex(oldHead.hash()), bytesToHex(newHead.hash()), - 'vmHead should be on the latest block' + 'vmHead should be on the latest block', ) await execution.run() @@ -232,7 +229,7 @@ describe('[VMExecution]', () => { assert.equal( bytesToHex(block.hash()), bytesToHex(newHead.hash()), - 'vmHead should be on the latest block' + 'vmHead should be on the latest block', ) closeRPC(server) diff --git a/packages/client/test/ext/jwt-simple.spec.ts b/packages/client/test/ext/jwt-simple.spec.ts index 3fa9197a2f..c0af86a255 100644 --- a/packages/client/test/ext/jwt-simple.spec.ts +++ b/packages/client/test/ext/jwt-simple.spec.ts @@ -94,7 +94,7 @@ describe('decode', function () { const obj2 = jwt.decode(token, key, false, 'HS512') expect(obj2).to.eql(obj) expect(jwt.decode.bind(null, token, key, false, 'HS256')).toThrowError( - /Signature verification failed/ + /Signature verification failed/, ) }) diff --git a/packages/client/test/integration/fullethereumservice.spec.ts b/packages/client/test/integration/fullethereumservice.spec.ts index 8a8f0b1612..d72de32016 100644 --- a/packages/client/test/integration/fullethereumservice.spec.ts +++ b/packages/client/test/integration/fullethereumservice.spec.ts @@ -1,8 +1,8 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' import { Hardfork } from '@ethereumjs/common' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { FeeMarketEIP1559Transaction } from '@ethereumjs/tx' +import { createFeeMarket1559TxFromRLP } from '@ethereumjs/tx' import { Account, bytesToHex, equalsBytes, hexToBytes, toBytes } from '@ethereumjs/util' import * as td from 'testdouble' import { assert, describe, it } from 'vitest' @@ -81,28 +81,28 @@ describe( }) peer.eth!.send('NewBlockHashes', [[hash, BigInt(2)]]) - const block = createBlockFromBlockData( + const block = createBlock( { header: { number: 1, difficulty: 1, }, }, - { common: config.chainCommon } + { common: config.chainCommon }, ) peer.eth!.send('NewBlock', [block, BigInt(1)]) const txData = '0x02f901100180843b9aca00843b9aca008402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' - const tx = FeeMarketEIP1559Transaction.fromSerializedTx(toBytes(txData)) + const tx = createFeeMarket1559TxFromRLP(toBytes(txData)) await service.execution.vm.stateManager.putAccount( tx.getSenderAddress(), - new Account(BigInt(0), BigInt('40000000000100000')) + new Account(BigInt(0), BigInt('40000000000100000')), ) await service.txPool.add(tx) service.config.chainCommon.getHardforkBy = td.func() td.when(service.config.chainCommon.getHardforkBy(td.matchers.anything())).thenReturn( - Hardfork.London + Hardfork.London, ) const [_, txs] = await peer.eth!.getPooledTransactions({ hashes: [tx.hash()] }) it('should handle GetPooledTransactions', async () => { @@ -111,7 +111,7 @@ describe( peer.eth!.send('Transactions', [tx]) }, - { timeout: 30000 } + { timeout: 30000 }, ) describe('should handle LES requests', async () => { @@ -122,7 +122,7 @@ describe('should handle LES requests', async () => { assert.equal( bytesToHex(headers[1].hash()), '0xa321d27cd2743617c1c1b0d7ecb607dd14febcdfca8f01b79c3f0249505ea069', - 'handled GetBlockHeaders' + 'handled GetBlockHeaders', ) }) await destroy(server, service) diff --git a/packages/client/test/integration/lightsync.spec.backup.ts b/packages/client/test/integration/lightsync.spec.backup.ts index 4ed1149672..1c1f3cb862 100644 --- a/packages/client/test/integration/lightsync.spec.backup.ts +++ b/packages/client/test/integration/lightsync.spec.backup.ts @@ -29,7 +29,7 @@ describe( }) await localService.synchronizer!.start() }, - { timeout: 30000 } + { timeout: 30000 }, ) describe( @@ -56,7 +56,7 @@ describe( assert.ok('did not sync') }) }, - { timeout: 30000 } + { timeout: 30000 }, ) describe( @@ -90,5 +90,5 @@ describe( }) await localService.synchronizer!.start() }, - { timeout: 30000 } + { timeout: 30000 }, ) diff --git a/packages/client/test/integration/merge.spec.ts b/packages/client/test/integration/merge.spec.ts deleted file mode 100644 index 5e07abd0f0..0000000000 --- a/packages/client/test/integration/merge.spec.ts +++ /dev/null @@ -1,182 +0,0 @@ -import { BlockHeader } from '@ethereumjs/block' -import { createBlockchain } from '@ethereumjs/blockchain' -import { - Chain as ChainCommon, - ConsensusAlgorithm, - ConsensusType, - Hardfork, - createCustomCommon, -} from '@ethereumjs/common' -import { Address, hexToBytes } from '@ethereumjs/util' -import { assert, describe, it } from 'vitest' - -import { Chain } from '../../src/blockchain/index.js' -import { Config } from '../../src/config.js' -import { FullEthereumService } from '../../src/service/index.js' -import { Event } from '../../src/types.js' - -import { MockServer } from './mocks/mockserver.js' -import { destroy, setup } from './util.js' - -import type { CliqueConsensus } from '@ethereumjs/blockchain' -import type { Common } from '@ethereumjs/common' - -const commonPoA = createCustomCommon( - { - consensus: { - type: ConsensusType.ProofOfAuthority, - algorithm: ConsensusAlgorithm.Clique, - clique: { - period: 1, // use 1s period for quicker test execution - epoch: 30000, - }, - }, - hardforks: [ - { name: 'chainstart', block: 0 }, - { name: 'london', block: 0 }, - { - name: 'paris', - block: null, - forkHash: null, - ttd: BigInt(5), - }, - ], - }, - { baseChain: ChainCommon.Goerli, hardfork: Hardfork.London } -) -const commonPoW = createCustomCommon( - { - genesis: { - gasLimit: 16777216, - difficulty: 1, - nonce: '0x0000000000000042', - extraData: '0x3535353535353535353535353535353535353535353535353535353535353535', - }, - hardforks: [ - { name: 'chainstart', block: 0 }, - { name: 'london', block: 0 }, - { - name: 'paris', - block: null, - forkHash: null, - ttd: BigInt(1000), - }, - ], - }, - { baseChain: ChainCommon.Mainnet, hardfork: Hardfork.London } -) -const accounts: [Address, Uint8Array][] = [ - [ - new Address(hexToBytes('0x0b90087d864e82a284dca15923f3776de6bb016f')), - hexToBytes('0x64bf9cc30328b0e42387b3c82c614e6386259136235e20c1357bd11cdee86993'), - ], -] -async function minerSetup(common: Common): Promise<[MockServer, FullEthereumService]> { - const config = new Config({ common, accountCache: 10000, storageCache: 1000 }) - const server = new MockServer({ config }) as any - const blockchain = await createBlockchain({ - common, - validateBlocks: false, - validateConsensus: false, - }) - ;(blockchain.consensus as CliqueConsensus).cliqueActiveSigners = () => [accounts[0][0]] // stub - const serviceConfig = new Config({ - common, - server, - mine: true, - accounts, - }) - const chain = await Chain.create({ config: serviceConfig, blockchain }) - // attach server to centralized event bus - ;(server.config as any).events = serviceConfig.events - const service = new FullEthereumService({ - config: serviceConfig, - chain, - }) - await service.open() - await server.start() - await service.start() - return [server, service] -} - -describe('should mine and stop at the merge (PoA)', async () => { - const [server, service] = await minerSetup(commonPoA) - const [remoteServer, remoteService] = await setup({ - location: '127.0.0.2', - height: 0, - common: commonPoA, - }) - ;(remoteService.chain.blockchain.consensus as CliqueConsensus).cliqueActiveSigners = () => [ - accounts[0][0], - ] // stub - BlockHeader.prototype['_consensusFormatValidation'] = () => {} //stub - await server.discover('remotePeer1', '127.0.0.2') - const targetTTD = BigInt(5) - const _td: Promise = new Promise((resolve) => { - remoteService.config.events.on(Event.SYNC_SYNCHRONIZED, async () => { - resolve(remoteService.chain.headers.td) - }) - }) - await remoteService.synchronizer!.start() - const td: bigint = await _td - it('should sync', async () => { - if (td === targetTTD) { - assert.equal( - remoteService.chain.headers.td, - targetTTD, - 'synced blocks to the merge successfully' - ) - // Make sure the miner has stopped - assert.notOk(service.miner!.running, 'miner should not be running') - } - if (td > targetTTD) { - assert.fail('chain should not exceed merge TTD') - } - assert.ok('synced') - }) - await destroy(server, service) - await destroy(remoteServer, remoteService) -}, 60000) - -describe('should mine and stop at the merge (PoW)', async () => { - const [server, service] = await minerSetup(commonPoW) - const [remoteServer, remoteService] = await setup({ - location: '127.0.0.2', - height: 0, - common: commonPoW, - }) - await server.discover('remotePeer1', '127.0.0.2') - const targetTTD = BigInt(1000) - let terminalHeight: bigint | undefined - const res: Promise<{ height: bigint; td: bigint }> = new Promise((resolve) => { - remoteService.config.events.on(Event.CHAIN_UPDATED, async () => { - const { height, td } = remoteService.chain.headers - resolve({ height, td }) - }) - }) - await remoteService.synchronizer!.start() - const { height, td } = await res - it('should sync', async () => { - if (td > targetTTD) { - if (terminalHeight === undefined || terminalHeight === BigInt(0)) { - terminalHeight = height - } - assert.equal( - remoteService.chain.headers.height, - terminalHeight, - 'synced blocks to the merge successfully' - ) - // Make sure the miner has stopped - assert.notOk(service.miner!.running, 'miner should not be running') - await destroy(server, service) - await destroy(remoteServer, remoteService) - } - if ( - typeof terminalHeight === 'bigint' && - terminalHeight !== BigInt(0) && - terminalHeight < height - ) { - assert.fail('chain should not exceed merge terminal block') - } - }) -}, 120000) diff --git a/packages/client/test/integration/miner.spec.ts b/packages/client/test/integration/miner.spec.ts index 64d7dd98d7..f9dd101829 100644 --- a/packages/client/test/integration/miner.spec.ts +++ b/packages/client/test/integration/miner.spec.ts @@ -1,9 +1,9 @@ -import { createBlockchain } from '@ethereumjs/blockchain' +import { CliqueConsensus, createBlockchain } from '@ethereumjs/blockchain' import { - Chain as ChainCommon, Common, ConsensusAlgorithm, ConsensusType, + Goerli, Hardfork, createCustomCommon, } from '@ethereumjs/common' @@ -18,15 +18,15 @@ import { Event } from '../../src/types.js' import { MockServer } from './mocks/mockserver.js' import { destroy, setup } from './util.js' -import type { CliqueConsensus } from '@ethereumjs/blockchain' +import type { ConsensusDict } from '@ethereumjs/blockchain' // Schedule london at 0 and also unset any past scheduled timestamp hardforks that might collide with test -const hardforks = new Common({ chain: ChainCommon.Goerli }) +const hardforks = new Common({ chain: Goerli }) .hardforks() .map((h) => h.name === Hardfork.London ? { ...h, block: 0, timestamp: undefined } - : { ...h, timestamp: undefined } + : { ...h, timestamp: undefined }, ) const common = createCustomCommon( { @@ -40,7 +40,8 @@ const common = createCustomCommon( }, }, }, - { baseChain: ChainCommon.Goerli, hardfork: Hardfork.London } + Goerli, + { hardfork: Hardfork.London }, ) const accounts: [Address, Uint8Array][] = [ [ @@ -52,10 +53,13 @@ async function minerSetup(): Promise<[MockServer, FullEthereumService]> { const config = new Config({ common, accountCache: 10000, storageCache: 1000 }) const server = new MockServer({ config }) as any + const consensusDict: ConsensusDict = {} + consensusDict[ConsensusAlgorithm.Clique] = new CliqueConsensus() const blockchain = await createBlockchain({ common, validateBlocks: false, validateConsensus: false, + consensusDict, }) ;(blockchain.consensus as CliqueConsensus).cliqueActiveSigners = () => [accounts[0][0]] // stub const chain = await Chain.create({ config, blockchain }) @@ -78,39 +82,40 @@ async function minerSetup(): Promise<[MockServer, FullEthereumService]> { return [server, service] } -describe( - 'should mine blocks while a peer stays connected to tip of chain', - async () => { - const [server, service] = await minerSetup() - const [remoteServer, remoteService] = await setup({ - location: '127.0.0.2', - height: 0, - common, - }) - ;(remoteService.chain.blockchain.consensus as CliqueConsensus).cliqueActiveSigners = () => [ - accounts[0][0], - ] // stub - ;(remoteService as FullEthereumService).execution.run = async () => 1 // stub - await server.discover('remotePeer1', '127.0.0.2') - const targetHeight = BigInt(5) - await new Promise((resolve) => { - remoteService.config.events.on(Event.SYNC_SYNCHRONIZED, async (chainHeight) => { - if (chainHeight === targetHeight) { - it('should sync blocks', () => { +describe('should mine blocks while a peer stays connected to tip of chain', () => { + it( + 'should work', + async () => { + const [server, service] = await minerSetup() + const [remoteServer, remoteService] = await setup({ + location: '127.0.0.2', + height: 0, + common, + }) + ;(remoteService.chain.blockchain.consensus as CliqueConsensus).cliqueActiveSigners = () => [ + accounts[0][0], + ] // stub + ;(remoteService as FullEthereumService).execution.run = async () => 1 // stub + await server.discover('remotePeer1', '127.0.0.2') + const targetHeight = BigInt(5) + await new Promise((resolve) => { + remoteService.config.events.on(Event.SYNC_SYNCHRONIZED, async (chainHeight) => { + if (chainHeight === targetHeight) { assert.equal( remoteService.chain.blocks.height, targetHeight, - 'synced blocks successfully' + 'synced blocks successfully', ) - }) - await destroy(server, service) - await destroy(remoteServer, remoteService) - resolve(undefined) - void remoteService.synchronizer!.start() - } + await destroy(server, service) + await destroy(remoteServer, remoteService) + resolve(undefined) + + void remoteService.synchronizer!.start() + } + }) }) - }) - }, - { timeout: 25000 } -) + }, + { timeout: 25000 }, + ) +}) diff --git a/packages/client/test/integration/mocks/mockchain.ts b/packages/client/test/integration/mocks/mockchain.ts index d0c888d4d1..e05af7c5c9 100644 --- a/packages/client/test/integration/mocks/mockchain.ts +++ b/packages/client/test/integration/mocks/mockchain.ts @@ -1,4 +1,4 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { Hardfork } from '@ethereumjs/common' import { Chain } from '../../../src/blockchain/index.js' @@ -30,7 +30,7 @@ export class MockChain extends Chain { const common = this.config.chainCommon const blocks: Block[] = [] for (let number = 0; number < this.height; number++) { - const block = createBlockFromBlockData( + const block = createBlock( { header: { number: number + 1, @@ -38,7 +38,7 @@ export class MockChain extends Chain { parentHash: number ? blocks[number - 1].hash() : this.genesis.hash(), }, }, - { common } + { common }, ) blocks.push(block) } diff --git a/packages/client/test/integration/mocks/mockpeer.ts b/packages/client/test/integration/mocks/mockpeer.ts index 75d3461fd5..fbaf0f5d04 100644 --- a/packages/client/test/integration/mocks/mockpeer.ts +++ b/packages/client/test/integration/mocks/mockpeer.ts @@ -79,7 +79,7 @@ export class MockPeer extends Peer { if (!(stream.protocols as string[]).includes(`${p.name}/${p.versions[0]}`)) return await p.open() await this.addProtocol(new MockSender(p.name, pushableFn, receiver), p) - }) + }), ) this.connected = true } diff --git a/packages/client/test/integration/mocks/network.ts b/packages/client/test/integration/mocks/network.ts index db67d2477d..1bc30a5583 100644 --- a/packages/client/test/integration/mocks/network.ts +++ b/packages/client/test/integration/mocks/network.ts @@ -64,7 +64,7 @@ export function createStream(id: string, location: string, protocols: string[]) servers[location].streams[id] = stream setTimeout( () => servers[location].server.emit('connection', { id, stream: stream.local(id) }), - 10 + 10, ) return stream.remote(location) } diff --git a/packages/client/test/integration/peerpool.spec.ts b/packages/client/test/integration/peerpool.spec.ts index 583e9f425b..a875386595 100644 --- a/packages/client/test/integration/peerpool.spec.ts +++ b/packages/client/test/integration/peerpool.spec.ts @@ -81,7 +81,7 @@ describe('should handle peer messages', async () => { config.events.on(Event.POOL_PEER_ADDED, (peer: any) => it('should add peer', () => { assert.equal(peer.id, 'peer0', 'added peer') - }) + }), ) config.events.on(Event.PROTOCOL_MESSAGE, (msg: any, proto: any, peer: any) => { it('should get message', () => { diff --git a/packages/client/test/integration/pow.spec.ts b/packages/client/test/integration/pow.spec.ts index 51f0b3d4ba..c2af982121 100644 --- a/packages/client/test/integration/pow.spec.ts +++ b/packages/client/test/integration/pow.spec.ts @@ -1,13 +1,15 @@ import { Hardfork, createCommonFromGethGenesis } from '@ethereumjs/common' -import { Address, hexToBytes, parseGethGenesisState } from '@ethereumjs/util' +import { createAddressFromPrivateKey, hexToBytes, parseGethGenesisState } from '@ethereumjs/util' import { rmSync } from 'fs' import { assert, describe, it } from 'vitest' import { Config } from '../../src/index.js' import { createInlineClient } from '../sim/simutils.js' +import type { Address } from '@ethereumjs/util' + const pk = hexToBytes('0x95a602ff1ae30a2243f400dcf002561b9743b2ae9827b1008e3714a5cc1c0cfe') -const minerAddress = Address.fromPrivateKey(pk) +const minerAddress = createAddressFromPrivateKey(pk) async function setupPowDevnet(prefundAddress: Address, cleanStart: boolean) { if (cleanStart) { diff --git a/packages/client/test/integration/util.ts b/packages/client/test/integration/util.ts index 16e9cf0e78..34abf6da7c 100644 --- a/packages/client/test/integration/util.ts +++ b/packages/client/test/integration/util.ts @@ -1,4 +1,5 @@ -import { createBlockchain } from '@ethereumjs/blockchain' +import { CliqueConsensus, createBlockchain } from '@ethereumjs/blockchain' +import { type Common, ConsensusAlgorithm } from '@ethereumjs/common' import { MemoryLevel } from 'memory-level' import { Config } from '../../src/config.js' @@ -9,7 +10,7 @@ import { MockChain } from './mocks/mockchain.js' import { MockServer } from './mocks/mockserver.js' import type { SyncMode } from '../../src/config.js' -import type { Common } from '@ethereumjs/common' +import type { ConsensusDict } from '@ethereumjs/blockchain' interface SetupOptions { location?: string @@ -21,7 +22,7 @@ interface SetupOptions { } export async function setup( - options: SetupOptions = {} + options: SetupOptions = {}, ): Promise<[MockServer, FullEthereumService | LightEthereumService]> { const { location, height, interval, syncmode } = options const minPeers = options.minPeers ?? 1 @@ -39,9 +40,12 @@ export async function setup( }) const server = new MockServer({ config, location }) as any + const consensusDict: ConsensusDict = {} + consensusDict[ConsensusAlgorithm.Clique] = new CliqueConsensus() const blockchain = await createBlockchain({ validateBlocks: false, validateConsensus: false, + consensusDict, common, }) @@ -82,7 +86,7 @@ export async function setup( export async function destroy( server: MockServer, - service: FullEthereumService | LightEthereumService + service: FullEthereumService | LightEthereumService, ): Promise { service.config.events.emit(Event.CLIENT_SHUTDOWN) await server.stop() diff --git a/packages/client/test/logging.spec.ts b/packages/client/test/logging.spec.ts index 5f48ea5b5e..84438dc59f 100644 --- a/packages/client/test/logging.spec.ts +++ b/packages/client/test/logging.spec.ts @@ -9,11 +9,11 @@ describe('[Logging]', () => { it('should have correct transports', () => { assert.ok( logger.transports.find((t: any) => t.name === 'console') !== undefined, - 'should have stdout transport' + 'should have stdout transport', ) assert.ok( logger.transports.find((t: any) => t.name === 'file') !== undefined, - 'should have file transport' + 'should have file transport', ) }) @@ -24,11 +24,11 @@ describe('[Logging]', () => { e.level = 'error' assert.ok( /an error\n {4}at/.test((format.transform(e) as any).message), - 'log message should contain stack trace (1)' + 'log message should contain stack trace (1)', ) assert.ok( /an error\n {4}at/.test((format.transform({ level: 'error', message: e }) as any).message), - 'log message should contain stack trace (2)' + 'log message should contain stack trace (2)', ) } }) @@ -44,8 +44,8 @@ describe('[Logging]', () => { }) as any assert.equal( message, - 'test \x1B[38;2;0;128;0mkey\x1B[39m=value ', - 'key=value pairs should be colorized' + 'test \x1B[38;2;0;128;0mkey\x1B[39m=value ', // cspell:disable-line + 'key=value pairs should be colorized', ) }) }) diff --git a/packages/client/test/miner/miner.spec.ts b/packages/client/test/miner/miner.spec.ts index 5611ee520f..ec11116b48 100644 --- a/packages/client/test/miner/miner.spec.ts +++ b/packages/client/test/miner/miner.spec.ts @@ -1,13 +1,13 @@ -import { BlockHeader, createBlockFromBlockData } from '@ethereumjs/block' +import { BlockHeader, createBlock, createBlockHeader } from '@ethereumjs/block' import { Common, - Chain as CommonChain, + Goerli, Hardfork, createCommonFromGethGenesis, createCustomCommon, } from '@ethereumjs/common' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { FeeMarketEIP1559Transaction, LegacyTransaction } from '@ethereumjs/tx' +import { createFeeMarket1559Tx, createLegacyTx } from '@ethereumjs/tx' import { Address, equalsBytes, hexToBytes } from '@ethereumjs/util' import { AbstractLevel } from 'abstract-level' // import { keccak256 } from 'ethereum-cryptography/keccak' @@ -53,21 +53,21 @@ class FakeChain { update() {} get headers() { return { - latest: BlockHeader.fromHeaderData(), + latest: createBlockHeader(), height: BigInt(0), } } get blocks() { return { - latest: createBlockFromBlockData(), + latest: createBlock(), height: BigInt(0), } } getBlock() { - return BlockHeader.fromHeaderData() + return createBlockHeader() } getCanonicalHeadHeader() { - return BlockHeader.fromHeaderData() + return createBlockHeader() } blockchain: any = { putBlock: async () => {}, @@ -79,7 +79,7 @@ class FakeChain { }, validateHeader: () => {}, getIteratorHead: () => { - return createBlockFromBlockData({ header: { number: 1 } }) + return createBlock({ header: { number: 1 } }) }, getTotalDifficulty: () => { return 1n @@ -150,7 +150,7 @@ const customConfig = new Config({ }) customConfig.events.setMaxListeners(50) -const goerliCommon = new Common({ chain: CommonChain.Goerli, hardfork: Hardfork.Berlin }) +const goerliCommon = new Common({ chain: Goerli, hardfork: Hardfork.Berlin }) goerliCommon.events.setMaxListeners(50) const goerliConfig = new Config({ accountCache: 10000, @@ -168,7 +168,7 @@ const createTx = ( value = 1, gasPrice = 1000000000, gasLimit = 100000, - common = customCommon + common = customCommon, ) => { const txData = { nonce, @@ -177,7 +177,7 @@ const createTx = ( to: to.address, value, } - const tx = LegacyTransaction.fromTxData(txData, { common }) + const tx = createLegacyTx(txData, { common }) const signedTx = tx.sign(from.privateKey) return signedTx } @@ -191,7 +191,7 @@ const txA011 = createTx( 1, 1000000000, 100000, - goerliCommon + goerliCommon, ) // A -> B, nonce: 0, value: 1, normal gasPrice const txA02 = createTx(A, B, 1, 1, 2000000000) // A -> B, nonce: 1, value: 1, 2x gasPrice @@ -287,7 +287,7 @@ describe('assembleBlocks() -> with a hardfork mismatching tx', async () => { assert.equal( blocks[0].transactions.length, 0, - 'new block should not include tx due to hardfork mismatch' + 'new block should not include tx due to hardfork mismatch', ) assert.equal(txPool.txsInPool, 1, 'transaction should remain in pool') }) @@ -327,7 +327,7 @@ describe('assembleBlocks() -> with multiple txs, properly ordered by gasPrice an ;(vm.blockchain as any)._validateConsensus = false chain.putBlocks = (blocks: Block[]) => { - it('sholud be properly orded by gasPrice and nonce', () => { + it('should be properly ordered by gasPrice and nonce', () => { const msg = 'txs in block should be properly ordered by gasPrice and nonce' const expectedOrder = [txB01, txA01, txA02, txA03] for (const [index, tx] of expectedOrder.entries()) { @@ -382,7 +382,7 @@ describe('assembleBlocks() -> with saveReceipts', async () => { ;(vm.blockchain as any)._validateConsensus = false chain.putBlocks = async (blocks: Block[]) => { - it('should be properly orded by gasPrice and nonce', async () => { + it('should be properly ordered by gasPrice and nonce', async () => { const msg = 'txs in block should be properly ordered by gasPrice and nonce' const expectedOrder = [txB01, txA01, txA02, txA03] for (const [index, tx] of expectedOrder.entries()) { @@ -412,9 +412,13 @@ describe('assembleBlocks() -> with saveReceipts', async () => { }) describe('assembleBlocks() -> should not include tx under the baseFee', async () => { - const customChainParams = { hardforks: [{ name: 'london', block: 0 }] } - const common = createCustomCommon(customChainParams, { - baseChain: CommonChain.Goerli, + const customChainParams = { + hardforks: [ + { name: 'chainstart', block: 0 }, + { name: 'london', block: 0 }, + ], + } + const common = createCustomCommon(customChainParams, Goerli, { hardfork: Hardfork.London, }) const config = new Config({ @@ -425,7 +429,7 @@ describe('assembleBlocks() -> should not include tx under the baseFee', async () common, }) const chain = new FakeChain() as any - const block = createBlockFromBlockData({}, { common }) + const block = createBlock({}, { common }) Object.defineProperty(chain, 'headers', { get() { return { latest: block.header, height: block.header.number } @@ -449,10 +453,9 @@ describe('assembleBlocks() -> should not include tx under the baseFee', async () // the default block baseFee will be 7 // add tx with maxFeePerGas of 6 - const tx = FeeMarketEIP1559Transaction.fromTxData( - { to: B.address, maxFeePerGas: 6 }, - { common } - ).sign(A.privateKey) + const tx = createFeeMarket1559Tx({ to: B.address, maxFeePerGas: 6 }, { common }).sign( + A.privateKey, + ) try { await txPool.add(tx, true) } catch { @@ -474,10 +477,7 @@ describe('assembleBlocks() -> should not include tx under the baseFee', async () describe("assembleBlocks() -> should stop assembling a block after it's full", async () => { const chain = new FakeChain() as any const gasLimit = 100000 - const block = createBlockFromBlockData( - { header: { gasLimit } }, - { common: customCommon, setHardfork: true } - ) + const block = createBlock({ header: { gasLimit } }, { common: customCommon, setHardfork: true }) Object.defineProperty(chain, 'headers', { get() { return { latest: block.header, height: BigInt(0) } @@ -503,13 +503,13 @@ describe("assembleBlocks() -> should stop assembling a block after it's full", a // add txs const data = '0xfe' // INVALID opcode, consumes all gas - const tx1FillsBlockGasLimit = LegacyTransaction.fromTxData( + const tx1FillsBlockGasLimit = createLegacyTx( { gasLimit: gasLimit - 1, data, gasPrice: BigInt('1000000000') }, - { common: customCommon } + { common: customCommon }, ).sign(A.privateKey) - const tx2ExceedsBlockGasLimit = LegacyTransaction.fromTxData( + const tx2ExceedsBlockGasLimit = createLegacyTx( { gasLimit: 21000, to: B.address, nonce: 1, gasPrice: BigInt('1000000000') }, - { common: customCommon } + { common: customCommon }, ).sign(A.privateKey) await txPool.add(tx1FillsBlockGasLimit) await txPool.add(tx2ExceedsBlockGasLimit) @@ -566,7 +566,7 @@ describe.skip('assembleBlocks() -> should stop assembling when a new block is re for (let i = 0; i < 1000; i++) { // In order not to pollute TxPool with too many txs from the same address // (or txs which are already known), keep generating a new address for each tx - const address = Address.fromPrivateKey(privateKey) + const address = createAddressFromPrivateKey(privateKey) await setBalance(vm, address, BigInt('200000000000001')) const tx = createTx({ address, privateKey }) await txPool.add(tx) @@ -647,7 +647,7 @@ describe.skip('should handle mining over the london hardfork block', async () => blockHeader3.gasLimit, 'gas limit should be double previous block' ) - const initialBaseFee = config.execCommon.paramByEIP('gasConfig', 'initialBaseFee', 1559)! + const initialBaseFee = config.execCommon.paramByEIP('initialBaseFee', 1559)! assert.equal(blockHeader3.baseFeePerGas!, initialBaseFee, 'baseFee should be initial value') // block 4 diff --git a/packages/client/test/miner/pendingBlock.spec.ts b/packages/client/test/miner/pendingBlock.spec.ts index f280c8f653..f7ab2b5633 100644 --- a/packages/client/test/miner/pendingBlock.spec.ts +++ b/packages/client/test/miner/pendingBlock.spec.ts @@ -1,16 +1,7 @@ -import { Block, BlockHeader } from '@ethereumjs/block' -import { - Common, - Chain as CommonChain, - Hardfork, - createCommonFromGethGenesis, -} from '@ethereumjs/common' +import { Block, BlockHeader, createBlockHeader } from '@ethereumjs/block' +import { Common, Goerli, Hardfork, Mainnet, createCommonFromGethGenesis } from '@ethereumjs/common' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { - BlobEIP4844Transaction, - FeeMarketEIP1559Transaction, - LegacyTransaction, -} from '@ethereumjs/tx' +import { createBlob4844Tx, createFeeMarket1559Tx, createLegacyTx } from '@ethereumjs/tx' import { Account, Address, @@ -52,7 +43,7 @@ const setBalance = async (vm: VM, address: Address, balance: bigint) => { await vm.stateManager.commit() } -const common = new Common({ chain: CommonChain.Goerli, hardfork: Hardfork.Berlin }) +const common = new Common({ chain: Goerli, hardfork: Hardfork.Berlin }) // Unschedule any timestamp since tests are not configured for timestamps common .hardforks() @@ -79,7 +70,7 @@ const setup = () => { const service: any = { chain: { headers: { height: BigInt(0) }, - getCanonicalHeadHeader: () => BlockHeader.fromHeaderData({}, { common }), + getCanonicalHeadHeader: () => createBlockHeader({}, { common }), }, execution: { vm: { @@ -87,7 +78,7 @@ const setup = () => { shallowCopy: () => service.execution.vm, setStateRoot: () => {}, blockchain: mockBlockchain({}), - common: new Common({ chain: 'mainnet' }), + common: new Common({ chain: Mainnet }), }, }, } @@ -111,7 +102,7 @@ describe('[PendingBlock]', async () => { nonce = 0, value = 1, gasPrice = 1000000000, - gasLimit = 100000 + gasLimit = 100000, ) => { const txData = { nonce, @@ -120,7 +111,7 @@ describe('[PendingBlock]', async () => { to: to.address, value, } - const tx = LegacyTransaction.fromTxData(txData, { common }) + const tx = createLegacyTx(txData, { common }) const signedTx = tx.sign(from.privateKey) return signedTx } @@ -154,7 +145,7 @@ describe('[PendingBlock]', async () => { assert.equal( pendingBlock.pendingPayloads.size, 0, - 'should reset the pending payload after build' + 'should reset the pending payload after build', ) }) @@ -175,10 +166,10 @@ describe('[PendingBlock]', async () => { const payload = pendingBlock.pendingPayloads.get(bytesToHex(payloadId)) assert.equal( (payload as any).transactions.filter( - (tx: TypedTransaction) => bytesToHex(tx.hash()) === bytesToHex(txA011.hash()) + (tx: TypedTransaction) => bytesToHex(tx.hash()) === bytesToHex(txA011.hash()), ).length, 1, - 'txA011 should be in block' + 'txA011 should be in block', ) txB011.common.setHardfork(Hardfork.Paris) @@ -191,16 +182,16 @@ describe('[PendingBlock]', async () => { assert.equal(block?.transactions.length, 2, 'should include txs from pool') assert.equal( (payload as any).transactions.filter( - (tx: TypedTransaction) => bytesToHex(tx.hash()) === bytesToHex(txB011.hash()) + (tx: TypedTransaction) => bytesToHex(tx.hash()) === bytesToHex(txB011.hash()), ).length, 1, - 'txB011 should be in block' + 'txB011 should be in block', ) pendingBlock.pruneSetToMax(0) assert.equal( pendingBlock.pendingPayloads.size, 0, - 'should reset the pending payload after build' + 'should reset the pending payload after build', ) }) @@ -217,14 +208,14 @@ describe('[PendingBlock]', async () => { assert.equal( pendingBlock.pendingPayloads.size, 0, - 'should reset the pending payload after stopping' + 'should reset the pending payload after stopping', ) }) it('should stop adding txs when block is full', async () => { const { txPool } = setup() - // set gas limit low so that can accomodate 2 txs + // set gas limit low so that can accommodate, 2 txs const prevGasLimit = common['_chainParams'].genesis.gasLimit common['_chainParams'].genesis.gasLimit = 50000 @@ -239,14 +230,14 @@ describe('[PendingBlock]', async () => { await txPool.add(txA022) // This tx will not be added since its too big to fit - const txA03 = LegacyTransaction.fromTxData( + const txA03 = createLegacyTx( { data: '0xFE', // INVALID opcode, uses all gas gasLimit: 10000000, gasPrice: 1000000000, nonce: 2, }, - { common } + { common }, ).sign(A.privateKey) await txPool.add(txA03) const pendingBlock = new PendingBlock({ config, txPool, skipHardForkValidation: true }) @@ -263,14 +254,14 @@ describe('[PendingBlock]', async () => { assert.equal( block?.transactions.length, 2, - 'should include txs from pool that fit in the block' + 'should include txs from pool that fit in the block', ) assert.equal(receipts.length, 2, 'receipts should match number of transactions') pendingBlock.pruneSetToMax(0) assert.equal( pendingBlock.pendingPayloads.size, 0, - 'should reset the pending payload after build' + 'should reset the pending payload after build', ) // reset gas Limit @@ -283,14 +274,14 @@ describe('[PendingBlock]', async () => { await setBalance(vm, A.address, BigInt(5000000000000000)) await txPool.add(txA01) await txPool.add(txA02) - const txA03 = LegacyTransaction.fromTxData( + const txA03 = createLegacyTx( { data: '0xFE', // INVALID opcode, uses all gas gasLimit: 10000000, gasPrice: 1000000000, nonce: 2, }, - { common } + { common }, ).sign(A.privateKey) await txPool.add(txA03) const pendingBlock = new PendingBlock({ config, txPool, skipHardForkValidation: true }) @@ -305,14 +296,14 @@ describe('[PendingBlock]', async () => { assert.equal( block?.transactions.length, 2, - 'should include txs from pool that fit in the block' + 'should include txs from pool that fit in the block', ) assert.equal(receipts.length, 2, 'receipts should match number of transactions') pendingBlock.pruneSetToMax(0) assert.equal( pendingBlock.pendingPayloads.size, 0, - 'should reset the pending payload after build' + 'should reset the pending payload after build', ) }) @@ -331,14 +322,14 @@ describe('[PendingBlock]', async () => { assert.equal( block.transactions.length, 0, - 'should not include tx with sender that has insufficient funds' + 'should not include tx with sender that has insufficient funds', ) assert.equal(receipts.length, 0, 'receipts should match number of transactions') pendingBlock.pruneSetToMax(0) assert.equal( pendingBlock.pendingPayloads.size, 0, - 'should reset the pending payload after build' + 'should reset the pending payload after build', ) }) @@ -354,7 +345,7 @@ describe('[PendingBlock]', async () => { } catch (err: any) { assert.equal( err.message, - 'cannot get iterator head: blockchain has no getTotalDifficulty function' + 'cannot get iterator head: blockchain has no getTotalDifficulty function', ) } }) @@ -378,7 +369,7 @@ describe('[PendingBlock]', async () => { // Create 3 txs with 2 blobs each so that only 2 of them can be included in a build for (let x = 0; x <= 2; x++) { - const txA01 = BlobEIP4844Transaction.fromTxData( + const txA01 = createBlob4844Tx( { blobVersionedHashes: [ ...blobVersionedHashes, @@ -395,13 +386,13 @@ describe('[PendingBlock]', async () => { to: randomBytes(20), nonce: BigInt(x), }, - { common } + { common }, ).sign(A.privateKey) await txPool.add(txA01) } // Add one other normal tx for nonce 3 which should also be not included in the build - const txNorm = FeeMarketEIP1559Transaction.fromTxData( + const txNorm = createFeeMarket1559Tx( { gasLimit: 0xffffffn, maxFeePerGas: 1000000000n, @@ -409,7 +400,7 @@ describe('[PendingBlock]', async () => { to: randomBytes(20), nonce: BigInt(3), }, - { common } + { common }, ).sign(A.privateKey) await txPool.add(txNorm) @@ -456,7 +447,7 @@ describe('[PendingBlock]', async () => { const proofs = blobsToProofs(kzg, blobs, commitments) // create a tx with missing blob data which should be excluded from the build - const missingBlobTx = BlobEIP4844Transaction.fromTxData( + const missingBlobTx = createBlob4844Tx( { blobVersionedHashes, kzgCommitments: commitments, @@ -468,7 +459,7 @@ describe('[PendingBlock]', async () => { to: randomBytes(20), nonce: BigInt(0), }, - { common } + { common }, ).sign(A.privateKey) await txPool.add(missingBlobTx) diff --git a/packages/client/test/net/peer/peer.spec.ts b/packages/client/test/net/peer/peer.spec.ts index 54ff58a5b6..c703effd5b 100644 --- a/packages/client/test/net/peer/peer.spec.ts +++ b/packages/client/test/net/peer/peer.spec.ts @@ -28,13 +28,13 @@ describe('[Peer]', () => { assert.equal( peer.toString(true), 'id=0123456789abcdef address=address0 transport=transport0 inbound=true', - 'correct full id string' + 'correct full id string', ) peer.inbound = false assert.equal( peer.toString(), 'id=01234567 address=address0 transport=transport0 inbound=false', - 'correct short id string' + 'correct short id string', ) }) }) diff --git a/packages/client/test/net/peer/rlpxpeer.spec.ts b/packages/client/test/net/peer/rlpxpeer.spec.ts index e664039871..65525ffe18 100644 --- a/packages/client/test/net/peer/rlpxpeer.spec.ts +++ b/packages/client/test/net/peer/rlpxpeer.spec.ts @@ -52,7 +52,7 @@ describe('[RlpxPeer]', async () => { { name: 'les', version: 4, length: 23 }, { name: 'snap', version: 1, length: 8 }, ], - 'correct capabilities' + 'correct capabilities', ) }) @@ -95,10 +95,10 @@ describe('[RlpxPeer]', async () => { }) peer.config.events.on(Event.PEER_CONNECTED, (peer) => - assert.equal(peer.id, 'abcdef0123', 'got connected') + assert.equal(peer.id, 'abcdef0123', 'got connected'), ) peer.config.events.on(Event.PEER_DISCONNECTED, (rlpxPeer) => - assert.equal(rlpxPeer.pooled, false, 'got disconnected') + assert.equal(rlpxPeer.pooled, false, 'got disconnected'), ) peer.rlpx!.events.emit('peer:error', rlpxPeer, new Error('err0')) peer.rlpx!.events.emit('peer:added', rlpxPeer) diff --git a/packages/client/test/net/peerpool.spec.ts b/packages/client/test/net/peerpool.spec.ts index 70339910ec..a066c3531d 100644 --- a/packages/client/test/net/peerpool.spec.ts +++ b/packages/client/test/net/peerpool.spec.ts @@ -82,7 +82,7 @@ describe('should get idle peers', () => { assert.equal( pool.idle((p: any) => p.id > 1), peers[1], - 'correct idle peer with filter' + 'correct idle peer with filter', ) }) }) diff --git a/packages/client/test/net/protocol/ethprotocol.spec.ts b/packages/client/test/net/protocol/ethprotocol.spec.ts index 5801a62a05..e5c40fb060 100644 --- a/packages/client/test/net/protocol/ethprotocol.spec.ts +++ b/packages/client/test/net/protocol/ethprotocol.spec.ts @@ -1,7 +1,13 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' -import { Common, Chain as CommonChain, Hardfork } from '@ethereumjs/common' -import { FeeMarketEIP1559Transaction, TransactionFactory, TransactionType } from '@ethereumjs/tx' -import { Address, bigIntToBytes, bytesToBigInt, hexToBytes, randomBytes } from '@ethereumjs/util' +import { createBlock } from '@ethereumjs/block' +import { Common, Hardfork, Holesky } from '@ethereumjs/common' +import { TransactionType, createFeeMarket1559Tx, createTxFromTxData } from '@ethereumjs/tx' +import { + bigIntToBytes, + bytesToBigInt, + createZeroAddress, + hexToBytes, + randomBytes, +} from '@ethereumjs/util' import { loadKZG } from 'kzg-wasm' import { assert, describe, it } from 'vitest' @@ -32,7 +38,7 @@ describe('[EthProtocol]', () => { const config = new Config({ accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new EthProtocol({ config, chain }) - Object.defineProperty(chain, 'networkId', { + Object.defineProperty(chain, 'chainId', { get: () => { return BigInt(1) }, @@ -53,26 +59,26 @@ describe('[EthProtocol]', () => { assert.deepEqual( p.encodeStatus(), { - networkId: hexToBytes('0x01'), + chainId: hexToBytes('0x01'), td: hexToBytes('0x64'), bestHash: '0xaa', genesisHash: '0xbb', latestBlock: hexToBytes('0x0a'), }, - 'encode status' + 'encode status', ) const status = p.decodeStatus({ - networkId: [0x01], + chainId: [0x01], td: hexToBytes('0x64'), bestHash: '0xaa', genesisHash: '0xbb', }) assert.ok( - status.networkId === BigInt(1) && + status.chainId === BigInt(1) && status.td === BigInt(100) && status.bestHash === '0xaa' && status.genesisHash === '0xbb', - 'decode status' + 'decode status', ) }) @@ -81,7 +87,7 @@ describe('[EthProtocol]', () => { const chain = await Chain.create({ config }) const p = new EthProtocol({ config, chain }) const td = BigInt(100) - const block = createBlockFromBlockData({}, { common: config.chainCommon }) + const block = createBlock({}, { common: config.chainCommon }) const res = p.decode(p.messages.filter((message) => message.name === 'NewBlock')[0], [ block.raw(), bigIntToBytes(td), @@ -98,7 +104,7 @@ describe('[EthProtocol]', () => { const config = new Config({ accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) const p = new EthProtocol({ config, chain }) - const block = createBlockFromBlockData({}) + const block = createBlock({}) const res = p.decode(p.messages.filter((message) => message.name === 'GetReceipts')[0], [ bigIntToBytes(1n), [block.hash()], @@ -122,14 +128,14 @@ describe('[EthProtocol]', () => { const p = new EthProtocol({ config, chain }) chain.config.chainCommon.setHardfork(Hardfork.London) - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { maxFeePerGas: 10, maxPriorityFeePerGas: 8, gasLimit: 100, value: 6, }, - { common: config.chainCommon } + { common: config.chainCommon }, ) const res = p.encode(p.messages.filter((message) => message.name === 'PooledTransactions')[0], { reqId: BigInt(1), @@ -184,10 +190,10 @@ describe('[EthProtocol]', () => { assert.equal(bytesToBigInt(res[0]), BigInt(1), 'correctly encoded reqId') const expectedSerializedReceipts = [ hexToBytes( - '0x02f9016d0164b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f866f864940000000000000000000000000000000000000000f842a00000000000000000000000000000000000000000000000000000000000000000a001010101010101010101010101010101010101010101010101010101010101018a00000000000000000000' + '0x02f9016d0164b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f866f864940000000000000000000000000000000000000000f842a00000000000000000000000000000000000000000000000000000000000000000a001010101010101010101010101010101010101010101010101010101010101018a00000000000000000000', ), hexToBytes( - '0xf9016f808203e8b9010001010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101f866f864940101010101010101010101010101010101010101f842a00101010101010101010101010101010101010101010101010101010101010101a001010101010101010101010101010101010101010101010101010101010101018a00000000000000000000' + '0xf9016f808203e8b9010001010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101f866f864940101010101010101010101010101010101010101f842a00101010101010101010101010101010101010101010101010101010101010101a001010101010101010101010101010101010101010101010101010101010101018a00000000000000000000', ), ] assert.deepEqual(res[1], expectedSerializedReceipts, 'correctly encoded receipts') @@ -206,7 +212,7 @@ describe('[EthProtocol]', () => { const kzg = await loadKZG() const config = new Config({ common: new Common({ - chain: CommonChain.Holesky, + chain: Holesky, hardfork: Hardfork.Paris, eips: [4895, 4844], customCrypto: { @@ -220,12 +226,16 @@ describe('[EthProtocol]', () => { const chain = await Chain.create({ config }) const p = new EthProtocol({ config, chain }) - const legacyTx = TransactionFactory.fromTxData({ type: 0 }, { common: config.chainCommon }) - const eip2929Tx = TransactionFactory.fromTxData({ type: 1 }, { common: config.chainCommon }) - const eip1559Tx = TransactionFactory.fromTxData({ type: 2 }, { common: config.chainCommon }) - const blobTx = TransactionFactory.fromTxData( - { type: 3, to: Address.zero(), blobVersionedHashes: [hexToBytes(`0x01${'00'.repeat(31)}`)] }, - { common: config.chainCommon } + const legacyTx = createTxFromTxData({ type: 0 }, { common: config.chainCommon }) + const eip2929Tx = createTxFromTxData({ type: 1 }, { common: config.chainCommon }) + const eip1559Tx = createTxFromTxData({ type: 2 }, { common: config.chainCommon }) + const blobTx = createTxFromTxData( + { + type: 3, + to: createZeroAddress(), + blobVersionedHashes: [hexToBytes(`0x01${'00'.repeat(31)}`)], + }, + { common: config.chainCommon }, ) const res = p.encode(p.messages.filter((message) => message.name === 'Transactions')[0], [ legacyTx, @@ -239,7 +249,7 @@ describe('[EthProtocol]', () => { const decoded = p.decode( p.messages.filter((message) => message.name === 'Transactions')[0], - res + res, ) assert.deepEqual(decoded[0].type, legacyTx.type, 'decoded legacy tx correctly') assert.deepEqual(decoded[1].type, eip2929Tx.type, 'decoded eip2929 tx correctly') @@ -255,27 +265,27 @@ describe('[EthProtocol]', () => { }) const chain = await Chain.create({ config }) const p = new EthProtocol({ config, chain }) - const fakeTx = TransactionFactory.fromTxData({}).sign(randomBytes(32)) + const fakeTx = createTxFromTxData({}).sign(randomBytes(32)) const fakeHash = fakeTx.hash() const encoded = p.encode( p.messages.filter((message) => message.name === 'NewPooledTransactionHashes')[0], - [fakeHash] + [fakeHash], ) const encodedEth68 = p.encode( p.messages.filter((message) => message.name === 'NewPooledTransactionHashes')[0], - [[fakeTx.type], [fakeTx.serialize().byteLength], [fakeHash]] + [[fakeTx.type], [fakeTx.serialize().byteLength], [fakeHash]], ) assert.deepEqual(encoded[0], fakeHash, 'encoded hash correctly with pre-eth/68 format') assert.deepEqual(encodedEth68[2][0], fakeHash, 'encoded hash correctly with eth/68 format') const decoded = p.decode( p.messages.filter((message) => message.name === 'NewPooledTransactionHashes')[0], - encoded + encoded, ) const decodedEth68 = p.decode( p.messages.filter((message) => message.name === 'NewPooledTransactionHashes')[0], - encodedEth68 + encodedEth68, ) assert.deepEqual(decoded[0], fakeHash, 'decoded hash correctly with pre-eth/68 format') assert.deepEqual(decodedEth68[2][0], fakeHash, 'decoded hash correctly with eth/68 format') diff --git a/packages/client/test/net/protocol/lesprotocol.spec.ts b/packages/client/test/net/protocol/lesprotocol.spec.ts index 7d9719cb5a..e142c9cdf3 100644 --- a/packages/client/test/net/protocol/lesprotocol.spec.ts +++ b/packages/client/test/net/protocol/lesprotocol.spec.ts @@ -33,7 +33,7 @@ describe('[LesProtocol]', () => { mrc: { GetBlockHeaders: { base: 10, req: 10 } }, }) const p = new LesProtocol({ config, chain, flow }) - Object.defineProperty(chain, 'networkId', { + Object.defineProperty(chain, 'chainId', { get: () => { return BigInt(1) }, @@ -65,7 +65,7 @@ describe('[LesProtocol]', () => { }) let status = p.encodeStatus() assert.ok( - bytesToHex(status.networkId) === '0x01' && + bytesToHex(status.chainId) === '0x01' && bytesToHex(status.headTd) === '0x64' && status.headHash === '0xaa' && bytesToHex(status.headNum) === '0x64' && @@ -82,12 +82,12 @@ describe('[LesProtocol]', () => { bytesToHex(status['flowControl/MRC'][0][0]) === '0x02' && bytesToHex(status['flowControl/MRC'][0][1]) === '0x0a' && bytesToHex(status['flowControl/MRC'][0][2]) === '0x0a', - 'encode status' + 'encode status', ) - status = { ...status, networkId: [0x01] } + status = { ...status, chainId: [0x01] } status = p.decodeStatus(status) assert.ok( - status.networkId === BigInt(1) && + status.chainId === BigInt(1) && status.headTd === BigInt(100) && status.headHash === '0xaa' && status.headNum === BigInt(100) && @@ -105,7 +105,7 @@ describe('[LesProtocol]', () => { status.mrc['2'].req === 10 && status.mrc.GetBlockHeaders.base === 10 && status.mrc.GetBlockHeaders.req === 10, - 'decode status' + 'decode status', ) }) }) diff --git a/packages/client/test/net/protocol/snapprotocol.spec.ts b/packages/client/test/net/protocol/snapprotocol.spec.ts index bb7eef5e8a..1fe6ac8759 100644 --- a/packages/client/test/net/protocol/snapprotocol.spec.ts +++ b/packages/client/test/net/protocol/snapprotocol.spec.ts @@ -1,5 +1,5 @@ import { RLP } from '@ethereumjs/rlp' -import { Trie, decodeNode } from '@ethereumjs/trie' +import { decodeNode, verifyTrieRangeProof } from '@ethereumjs/trie' import { KECCAK256_NULL, KECCAK256_RLP, @@ -57,34 +57,34 @@ describe('[SnapProtocol]', () => { origin, limit, bytes, - } + }, ) assert.ok( JSON.stringify(payload[0]) === JSON.stringify(bigIntToBytes(BigInt(1))), - 'correctly encoded reqId' + 'correctly encoded reqId', ) assert.ok( JSON.stringify(payload[1]) === JSON.stringify(setLengthLeft(root, 32)), - 'correctly encoded root' + 'correctly encoded root', ) assert.ok(JSON.stringify(payload[2]) === JSON.stringify(origin), 'correctly encoded origin') assert.ok(JSON.stringify(payload[3]) === JSON.stringify(limit), 'correctly encoded limit') assert.ok( JSON.stringify(payload[4]) === JSON.stringify(bigIntToBytes(bytes)), - 'correctly encoded bytes' + 'correctly encoded bytes', ) assert.ok(payload) const res = p.decode( p.messages.filter((message) => message.name === 'GetAccountRange')[0], - payload + payload, ) assert.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') assert.ok( JSON.stringify(res.root) === JSON.stringify(setLengthLeft(root, 32)), - 'correctly decoded root' + 'correctly decoded root', ) assert.ok(JSON.stringify(res.origin) === JSON.stringify(origin), 'correctly decoded origin') assert.ok(JSON.stringify(res.limit) === JSON.stringify(limit), 'correctly decoded limit') @@ -100,7 +100,7 @@ describe('[SnapProtocol]', () => { const data = RLP.decode(hexToBytes(contractAccountRangeRLP)) as unknown const { reqId, accounts, proof } = p.decode( p.messages.filter((message) => message.name === 'AccountRange')[0], - data + data, ) assert.ok(reqId === BigInt(1), 'reqId should be 1') assert.ok(accounts.length === 2, 'accounts should be 2') @@ -114,23 +114,23 @@ describe('[SnapProtocol]', () => { assert.ok( bytesToHex(secondAccount[2]) === '0x3dc6d3cfdc6210b8591ea852961d880821298c7891dea399e02d87550af9d40e', - 'storageHash of the second account' + 'storageHash of the second account', ) assert.ok( bytesToHex(secondAccount[3]) === '0xe68fe0bb7c4a483affd0f19cc2b989105242bd6b256c6de3afd738f8acd80c66', - 'codeHash of the second account' + 'codeHash of the second account', ) const payload = RLP.encode( p.encode(p.messages.filter((message) => message.name === 'AccountRange')[0], { reqId, accounts, proof, - }) + }), ) assert.ok( contractAccountRangeRLP === bytesToHex(payload), - 'Re-encoded payload should match with original' + 'Re-encoded payload should match with original', ) }) @@ -144,7 +144,7 @@ describe('[SnapProtocol]', () => { const fullData = pFull.decode( pFull.messages.filter((message) => message.name === 'AccountRange')[0], - resData + resData, ) const { accounts: accountsFull } = fullData assert.ok(accountsFull.length === 3, '3 accounts should be decoded in accountsFull') @@ -153,14 +153,14 @@ describe('[SnapProtocol]', () => { assert.ok(equalsBytes(accountFull[3], KECCAK256_NULL), 'codeHash should be KECCAK256_NULL') // Lets encode fullData as it should be encoded in slim format and upon decoding - // we shpuld get slim format + // we should get slim format const slimPayload = pFull.encode( pFull.messages.filter((message) => message.name === 'AccountRange')[0], - fullData + fullData, ) const { accounts: accountsSlim } = pSlim.decode( pSlim.messages.filter((message) => message.name === 'AccountRange')[0], - slimPayload + slimPayload, ) // 3 accounts are there in accountRangeRLP @@ -179,19 +179,19 @@ describe('[SnapProtocol]', () => { const reqData = RLP.decode(hexToBytes(getAccountRangeRLP)) const { root: stateRoot } = p.decode( p.messages.filter((message) => message.name === 'GetAccountRange')[0], - reqData + reqData, ) // accountRangeRLP is the corresponding response to getAccountRangeRLP const resData = RLP.decode(hexToBytes(accountRangeRLP)) const { accounts, proof } = p.decode( p.messages.filter((message) => message.name === 'AccountRange')[0], - resData + resData, ) try { const keys = accounts.map((acc: any) => acc.hash) const values = accounts.map((acc: any) => accountBodyToRLP(acc.body)) - await Trie.verifyRangeProof(stateRoot, keys[0], keys[keys.length - 1], keys, values, proof, { + await verifyTrieRangeProof(stateRoot, keys[0], keys[keys.length - 1], keys, values, proof, { useKeyHashingFunction: keccak256, }) } catch (e) { @@ -199,7 +199,7 @@ describe('[SnapProtocol]', () => { } assert.ok( equalsBytes(keccak256(proof[0]), stateRoot), - 'Proof should link to the requested stateRoot' + 'Proof should link to the requested stateRoot', ) }) @@ -226,38 +226,38 @@ describe('[SnapProtocol]', () => { origin, limit, bytes, - } + }, ) assert.ok( JSON.stringify(payload[0]) === JSON.stringify(bigIntToBytes(BigInt(1))), - 'correctly encoded reqId' + 'correctly encoded reqId', ) assert.ok( JSON.stringify(payload[1]) === JSON.stringify(setLengthLeft(root, 32)), - 'correctly encoded root' + 'correctly encoded root', ) assert.ok(JSON.stringify(payload[2]) === JSON.stringify(accounts), 'correctly encoded accounts') assert.ok(JSON.stringify(payload[3]) === JSON.stringify(origin), 'correctly encoded origin') assert.ok(JSON.stringify(payload[4]) === JSON.stringify(limit), 'correctly encoded limit') assert.ok( JSON.stringify(payload[5]) === JSON.stringify(bigIntToBytes(bytes)), - 'correctly encoded bytes' + 'correctly encoded bytes', ) assert.ok(payload) const res = p.decode( p.messages.filter((message) => message.name === 'GetStorageRanges')[0], - payload + payload, ) assert.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') assert.ok( JSON.stringify(res.root) === JSON.stringify(setLengthLeft(root, 32)), - 'correctly decoded root' + 'correctly decoded root', ) assert.ok( JSON.stringify(res.accounts) === JSON.stringify(accounts), - 'correctly decoded accounts' + 'correctly decoded accounts', ) assert.ok(JSON.stringify(res.origin) === JSON.stringify(origin), 'correctly decoded origin') assert.ok(JSON.stringify(res.limit) === JSON.stringify(limit), 'correctly decoded limit') @@ -274,14 +274,14 @@ describe('[SnapProtocol]', () => { const data = RLP.decode(hexToBytes(storageRangesRLP)) as unknown const { reqId, slots, proof } = p.decode( p.messages.filter((message) => message.name === 'StorageRanges')[0], - data + data, ) assert.ok(reqId === BigInt(1), 'correctly decoded reqId') assert.ok(slots.length === 1 && slots[0].length === 3, 'correctly decoded slots') const { hash, body } = slots[0][2] assert.ok( bytesToHex(hash) === '0x60264186ee63f748d340388f07b244d96d007fff5cbc397bbd69f8747c421f79', - 'Slot 3 key' + 'Slot 3 key', ) assert.ok(bytesToHex(body) === '0x8462b66ae7', 'Slot 3 value') @@ -290,11 +290,11 @@ describe('[SnapProtocol]', () => { reqId, slots, proof, - }) + }), ) assert.ok( storageRangesRLP === bytesToHex(payload), - 'Re-encoded payload should match with original' + 'Re-encoded payload should match with original', ) }) @@ -307,7 +307,7 @@ describe('[SnapProtocol]', () => { const accountsData = RLP.decode(hexToBytes(contractAccountRangeRLP)) const { accounts } = p.decode( p.messages.filter((message) => message.name === 'AccountRange')[0], - accountsData + accountsData, ) const lastAccount = accounts[accounts.length - 1] @@ -315,7 +315,7 @@ describe('[SnapProtocol]', () => { const data = RLP.decode(hexToBytes(storageRangesRLP)) const { proof, slots } = p.decode( p.messages.filter((message) => message.name === 'StorageRanges')[0], - data + data, ) // storageRangesRLP response is to the lastAccount's slots so slots[0] are the slots of // lastAccount @@ -324,7 +324,7 @@ describe('[SnapProtocol]', () => { try { const keys = lastAccountSlots.map((acc: any) => acc.hash) const values = lastAccountSlots.map((acc: any) => acc.body) - await Trie.verifyRangeProof( + await verifyTrieRangeProof( lastAccountStorageRoot, keys[0], keys[keys.length - 1], @@ -333,14 +333,14 @@ describe('[SnapProtocol]', () => { proof, { useKeyHashingFunction: keccak256, - } + }, ) } catch (e) { assert.fail(`StorageRange proof verification failed with message=${(e as Error).message}`) } assert.ok( equalsBytes(keccak256(proof[0]), lastAccountStorageRoot), - 'Proof should link to the accounts storageRoot' + 'Proof should link to the accounts storageRoot', ) }) @@ -363,18 +363,18 @@ describe('[SnapProtocol]', () => { assert.ok( JSON.stringify(payload[0]) === JSON.stringify(bigIntToBytes(BigInt(1))), - 'correctly encoded reqId' + 'correctly encoded reqId', ) assert.ok(JSON.stringify(payload[1]) === JSON.stringify(hashes), 'correctly encoded hashes') assert.ok( JSON.stringify(payload[2]) === JSON.stringify(bigIntToBytes(bytes)), - 'correctly encoded bytes' + 'correctly encoded bytes', ) assert.ok(payload) const res = p.decode( p.messages.filter((message) => message.name === 'GetByteCodes')[0], - payload + payload, ) assert.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') @@ -391,7 +391,7 @@ describe('[SnapProtocol]', () => { const codesRes = RLP.decode(hexToBytes(byteCodesRLP)) const { reqId, codes } = p.decode( p.messages.filter((message) => message.name === 'ByteCodes')[0], - codesRes + codesRes, ) assert.ok(reqId === BigInt(1), 'reqId should be 1') @@ -401,7 +401,7 @@ describe('[SnapProtocol]', () => { p.encode(p.messages.filter((message) => message.name === 'ByteCodes')[0], { reqId, codes, - }) + }), ) assert.ok(byteCodesRLP === bytesToHex(payload), 'Re-encoded payload should match with original') }) @@ -415,13 +415,13 @@ describe('[SnapProtocol]', () => { const codesReq = RLP.decode(hexToBytes(getByteCodesRLP)) const { hashes } = p.decode( p.messages.filter((message) => message.name === 'GetByteCodes')[0], - codesReq + codesReq, ) const codeHash = hashes[0] const codesRes = RLP.decode(hexToBytes(byteCodesRLP)) const { codes } = p.decode( p.messages.filter((message) => message.name === 'ByteCodes')[0], - codesRes + codesRes, ) const code = codes[0] assert.ok(equalsBytes(keccak256(code), codeHash), 'Code should match the requested codeHash') @@ -446,19 +446,19 @@ describe('[SnapProtocol]', () => { assert.ok( JSON.stringify(payload[0]) === JSON.stringify(bigIntToBytes(reqId)), - 'correctly encoded reqId' + 'correctly encoded reqId', ) assert.ok(JSON.stringify(payload[1]) === JSON.stringify(root), 'correctly encoded root') assert.ok(JSON.stringify(payload[2]) === JSON.stringify(paths), 'correctly encoded paths') assert.ok( JSON.stringify(payload[3]) === JSON.stringify(bigIntToBytes(bytes)), - 'correctly encoded bytes' + 'correctly encoded bytes', ) assert.ok(payload) const res = p.decode( p.messages.filter((message) => message.name === 'GetTrieNodes')[0], - payload + payload, ) assert.ok(JSON.stringify(res.reqId) === JSON.stringify(reqId), 'correctly decoded reqId') @@ -476,7 +476,7 @@ describe('[SnapProtocol]', () => { const nodesRes = RLP.decode(hexToBytes(trieNodesRLP)) as unknown const { reqId, nodes } = p.decode( p.messages.filter((message) => message.name === 'TrieNodes')[0], - nodesRes + nodesRes, ) assert.ok(reqId === BigInt(1), 'reqId should be 1') @@ -494,7 +494,7 @@ describe('[SnapProtocol]', () => { p.encode(p.messages.filter((message) => message.name === 'TrieNodes')[0], { reqId, nodes, - }) + }), ) assert.ok(trieNodesRLP === bytesToHex(payload), 'Re-encoded payload should match with original') }) diff --git a/packages/client/test/net/server/rlpxserver.spec.ts b/packages/client/test/net/server/rlpxserver.spec.ts index f00b0cd07a..4db0ab7e59 100644 --- a/packages/client/test/net/server/rlpxserver.spec.ts +++ b/packages/client/test/net/server/rlpxserver.spec.ts @@ -46,7 +46,7 @@ vi.doMock('@ethereumjs/devp2p', () => { } }) -const { RlpxServer } = await import('../../../src/net/server/rlpxserver') +const { RlpxServer } = await import('../../../src/net/server/rlpxserver.js') describe('[RlpxServer]', async () => { it('should initialize correctly', async () => { const config = new Config({ accountCache: 10000, storageCache: 1000 }) @@ -60,7 +60,7 @@ describe('[RlpxServer]', async () => { assert.deepEqual( server.bootnodes, [multiaddr('/ip4/10.0.0.1/tcp/1234'), multiaddr('/ip4/10.0.0.2/tcp/1234')], - 'bootnodes split' + 'bootnodes split', ) }) @@ -89,7 +89,7 @@ describe('[RlpxServer]', async () => { } server.rlpx = { destroy: vi.fn() } server.config.events.on(Event.PEER_ERROR, (err: any) => - assert.equal(err.message, 'err0', 'got error') + assert.equal(err.message, 'err0', 'got error'), ) await server.start() expect((server as any).initDpt).toHaveBeenCalled() @@ -172,7 +172,7 @@ describe('should return rlpx server info with ip4 as default', async () => { listenAddr: '0.0.0.0:30303', ports: { discovery: 30303, listener: 30303 }, }, - 'get nodeInfo' + 'get nodeInfo', ) }) await server.stop() @@ -227,7 +227,7 @@ describe('should return rlpx server info with ip6', async () => { listenAddr: '[::]:30303', ports: { discovery: 30303, listener: 30303 }, }, - 'get nodeInfo' + 'get nodeInfo', ) }) await server.stop() @@ -278,7 +278,7 @@ describe('should init dpt', async () => { config.events.on(Event.SERVER_ERROR, (err) => it('should throw', async () => { assert.equal(err.message, 'err0', 'got error') - }) + }), ) server['dpt']?.events.emit('error', new Error('err0')) }) @@ -323,22 +323,22 @@ describe('should init rlpx', async () => { config.events.on(Event.PEER_CONNECTED, (peer) => it('should connect', async () => { assert.ok(peer instanceof RlpxPeer, 'connected') - }) + }), ) config.events.on(Event.PEER_DISCONNECTED, (peer) => it('should disconnect', async () => { assert.equal(peer.id, '01', 'disconnected') - }) + }), ) config.events.on(Event.SERVER_ERROR, (err) => it('should throw error', async () => { assert.equal(err.message, 'err0', 'got error') - }) + }), ) config.events.on(Event.SERVER_LISTENING, (info) => it('should listen', async () => { assert.deepEqual(info, { transport: 'rlpx', url: 'enode://ff@0.0.0.0:30303' }, 'listening') - }) + }), ) server.rlpx!.events.emit('peer:added', rlpxPeer) ;(server as any).peers.set('01', { id: '01' } as any) diff --git a/packages/client/test/rpc/admin/peers.spec.ts b/packages/client/test/rpc/admin/peers.spec.ts new file mode 100644 index 0000000000..c7bf6f40f8 --- /dev/null +++ b/packages/client/test/rpc/admin/peers.spec.ts @@ -0,0 +1,38 @@ +import { randomBytes } from 'crypto' +import { assert, describe, it } from 'vitest' + +import { createClient, createManager, getRpcClient, startRPC } from '../helpers.js' + +const method = 'admin_peers' + +describe(method, () => { + it('works', async () => { + const manager = createManager(await createClient({ opened: true, noPeers: true })) + const rpc = getRpcClient(startRPC(manager.getMethods())) + + console.log(manager['_client'].services[0].pool) + //@ts-ignore + manager['_client'].services[0].pool.peers = [ + { + id: 'abcd', + eth: { + versions: ['68'], + status: { + td: 1n, + bestHash: randomBytes(32), + }, + }, + rlpxPeer: { + _hello: { + clientId: 'fakeClient', + }, + }, + address: '127.0.0.1:8545', + }, + ] + const res = await rpc.request(method, []) + const { result } = res + console.log(res) + assert.notEqual(result, undefined, 'admin_peers returns a value') + }) +}) diff --git a/packages/client/test/rpc/debug/getRawBlock.spec.ts b/packages/client/test/rpc/debug/getRawBlock.spec.ts index b4d32790e1..ccf4310b56 100644 --- a/packages/client/test/rpc/debug/getRawBlock.spec.ts +++ b/packages/client/test/rpc/debug/getRawBlock.spec.ts @@ -1,7 +1,7 @@ -import { BlockHeader, createBlockFromBlockData } from '@ethereumjs/block' -import { createCustomCommon } from '@ethereumjs/common' -import { BlobEIP4844Transaction, LegacyTransaction } from '@ethereumjs/tx' -import { Address, bytesToHex, hexToBytes } from '@ethereumjs/util' +import { createBlock, createBlockHeader } from '@ethereumjs/block' +import { Mainnet, createCustomCommon } from '@ethereumjs/common' +import { createBlob4844Tx, createLegacyTx } from '@ethereumjs/tx' +import { bytesToHex, createZeroAddress, hexToBytes } from '@ethereumjs/util' import { loadKZG } from 'kzg-wasm' import { assert, describe, it } from 'vitest' @@ -10,14 +10,14 @@ import { createClient, createManager, dummy, getRpcClient, startRPC } from '../h const kzg = await loadKZG() -const common = createCustomCommon({ chainId: 1 }, { customCrypto: { kzg } }) +const common = createCustomCommon({ chainId: 1 }, Mainnet, { customCrypto: { kzg } }) common.setHardfork('cancun') -const mockedTx1 = LegacyTransaction.fromTxData({}).sign(dummy.privKey) -const mockedTx2 = LegacyTransaction.fromTxData({ nonce: 1 }).sign(dummy.privKey) -const mockedBlobTx3 = BlobEIP4844Transaction.fromTxData( - { nonce: 2, blobsData: ['0x1234'], to: Address.zero() }, - { common } +const mockedTx1 = createLegacyTx({}).sign(dummy.privKey) +const mockedTx2 = createLegacyTx({ nonce: 1 }).sign(dummy.privKey) +const mockedBlobTx3 = createBlob4844Tx( + { nonce: 2, blobsData: ['0x1234'], to: createZeroAddress() }, + { common }, ).sign(dummy.privKey) const blockHash = hexToBytes('0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5') const transactions = [mockedTx1] @@ -28,28 +28,27 @@ const block = { header: { number: BigInt(1), hash: () => blockHash, - serialize: () => BlockHeader.fromHeaderData({ number: 1 }).serialize(), + serialize: () => createBlockHeader({ number: 1 }).serialize(), }, toJSON: () => ({ - ...createBlockFromBlockData({ header: { number: 1 } }).toJSON(), + ...createBlock({ header: { number: 1 } }).toJSON(), transactions: transactions2, }), - serialize: () => - createBlockFromBlockData({ header: { number: 1 }, transactions: transactions2 }).serialize(), + serialize: () => createBlock({ header: { number: 1 }, transactions: transactions2 }).serialize(), transactions: transactions2, uncleHeaders: [], } const genesisBlockHash = hexToBytes( - '0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5' + '0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5', ) const genesisBlock = { hash: () => genesisBlockHash, header: { number: BigInt(0), }, - toJSON: () => ({ ...createBlockFromBlockData({ header: { number: 0 } }).toJSON(), transactions }), - serialize: () => createBlockFromBlockData({ header: { number: 0 }, transactions }).serialize(), + toJSON: () => ({ ...createBlock({ header: { number: 0 } }).toJSON(), transactions }), + serialize: () => createBlock({ header: { number: 0 }, transactions }).serialize(), transactions, uncleHeaders: [], } @@ -83,7 +82,7 @@ describe(method, async () => { assert.equal( res.result, bytesToHex(genesisBlock.serialize()), - 'should return the genesis block as earliest' + 'should return the genesis block as earliest', ) }) @@ -118,20 +117,20 @@ describe(method, async () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"' - ) + 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"', + ), ) }) }) describe('call with block with blob txs', () => { it('retrieves a block with a blob tx in it', async () => { - const genesisBlock = createBlockFromBlockData({ header: { number: 0 } }) - const block1 = createBlockFromBlockData( + const genesisBlock = createBlock({ header: { number: 0 } }) + const block1 = createBlock( { header: { number: 1, parentHash: genesisBlock.header.hash() }, transactions: [mockedBlobTx3], }, - { common } + { common }, ) const manager = createManager(await createClient({ chain: createChain(block1 as any) })) const rpc = getRpcClient(startRPC(manager.getMethods())) @@ -140,7 +139,7 @@ describe('call with block with blob txs', () => { assert.equal( res.result, bytesToHex(block1.serialize()), - 'block body contains a transaction with the blobVersionedHashes field' + 'block body contains a transaction with the blobVersionedHashes field', ) }) }) diff --git a/packages/client/test/rpc/debug/getRawHeader.spec.ts b/packages/client/test/rpc/debug/getRawHeader.spec.ts index 409423edad..cc3065e437 100644 --- a/packages/client/test/rpc/debug/getRawHeader.spec.ts +++ b/packages/client/test/rpc/debug/getRawHeader.spec.ts @@ -1,7 +1,7 @@ -import { BlockHeader, createBlockFromBlockData } from '@ethereumjs/block' -import { createCustomCommon } from '@ethereumjs/common' -import { BlobEIP4844Transaction, LegacyTransaction } from '@ethereumjs/tx' -import { Address, bytesToHex, hexToBytes } from '@ethereumjs/util' +import { createBlock, createBlockHeader } from '@ethereumjs/block' +import { Mainnet, createCustomCommon } from '@ethereumjs/common' +import { createBlob4844Tx, createLegacyTx } from '@ethereumjs/tx' +import { bytesToHex, createZeroAddress, hexToBytes } from '@ethereumjs/util' import { loadKZG } from 'kzg-wasm' import { assert, describe, it } from 'vitest' @@ -10,14 +10,14 @@ import { createClient, createManager, dummy, getRpcClient, startRPC } from '../h const kzg = await loadKZG() -const common = createCustomCommon({ chainId: 1 }, { customCrypto: { kzg } }) +const common = createCustomCommon({ chainId: 1 }, Mainnet, { customCrypto: { kzg } }) common.setHardfork('cancun') -const mockedTx1 = LegacyTransaction.fromTxData({}).sign(dummy.privKey) -const mockedTx2 = LegacyTransaction.fromTxData({ nonce: 1 }).sign(dummy.privKey) -const mockedBlobTx3 = BlobEIP4844Transaction.fromTxData( - { nonce: 2, blobsData: ['0x1234'], to: Address.zero() }, - { common } +const mockedTx1 = createLegacyTx({}).sign(dummy.privKey) +const mockedTx2 = createLegacyTx({ nonce: 1 }).sign(dummy.privKey) +const mockedBlobTx3 = createBlob4844Tx( + { nonce: 2, blobsData: ['0x1234'], to: createZeroAddress() }, + { common }, ).sign(dummy.privKey) const blockHash = hexToBytes('0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5') const transactions = [mockedTx1] @@ -28,29 +28,28 @@ const block = { header: { number: BigInt(1), hash: () => blockHash, - serialize: () => BlockHeader.fromHeaderData({ number: 1 }).serialize(), + serialize: () => createBlockHeader({ number: 1 }).serialize(), }, toJSON: () => ({ - ...createBlockFromBlockData({ header: { number: 1 } }).toJSON(), + ...createBlock({ header: { number: 1 } }).toJSON(), transactions: transactions2, }), - serialize: () => - createBlockFromBlockData({ header: { number: 1 }, transactions: transactions2 }).serialize(), + serialize: () => createBlock({ header: { number: 1 }, transactions: transactions2 }).serialize(), transactions: transactions2, uncleHeaders: [], } const genesisBlockHash = hexToBytes( - '0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5' + '0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5', ) const genesisBlock = { hash: () => genesisBlockHash, header: { number: BigInt(0), - serialize: () => BlockHeader.fromHeaderData({ number: 0 }).serialize(), + serialize: () => createBlockHeader({ number: 0 }).serialize(), }, - toJSON: () => ({ ...createBlockFromBlockData({ header: { number: 0 } }).toJSON(), transactions }), - serialize: () => createBlockFromBlockData({ header: { number: 0 }, transactions }).serialize(), + toJSON: () => ({ ...createBlock({ header: { number: 0 } }).toJSON(), transactions }), + serialize: () => createBlock({ header: { number: 0 }, transactions }).serialize(), transactions, uncleHeaders: [], } @@ -76,7 +75,7 @@ describe(method, async () => { assert.equal( res.result, bytesToHex(genesisBlock.header.serialize()), - 'should return a valid block' + 'should return a valid block', ) }) @@ -88,7 +87,7 @@ describe(method, async () => { assert.equal( res.result, bytesToHex(genesisBlock.header.serialize()), - 'should return the genesis block as earliest' + 'should return the genesis block as earliest', ) }) @@ -123,20 +122,20 @@ describe(method, async () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"' - ) + 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"', + ), ) }) }) describe('call with block with blob txs', () => { it('retrieves a block with a blob tx in it', async () => { - const genesisBlock = createBlockFromBlockData({ header: { number: 0 } }) - const block1 = createBlockFromBlockData( + const genesisBlock = createBlock({ header: { number: 0 } }) + const block1 = createBlock( { header: { number: 1, parentHash: genesisBlock.header.hash() }, transactions: [mockedBlobTx3], }, - { common } + { common }, ) const manager = createManager(await createClient({ chain: createChain(block1 as any) })) const rpc = getRpcClient(startRPC(manager.getMethods())) @@ -145,7 +144,7 @@ describe('call with block with blob txs', () => { assert.equal( res.result, bytesToHex(block1.header.serialize()), - 'block body contains a transaction with the blobVersionedHashes field' + 'block body contains a transaction with the blobVersionedHashes field', ) }) }) diff --git a/packages/client/test/rpc/debug/getRawReceipts.spec.ts b/packages/client/test/rpc/debug/getRawReceipts.spec.ts index 87159a06cc..ef641c7614 100644 --- a/packages/client/test/rpc/debug/getRawReceipts.spec.ts +++ b/packages/client/test/rpc/debug/getRawReceipts.spec.ts @@ -1,9 +1,5 @@ import { Hardfork, createCommonFromGethGenesis } from '@ethereumjs/common' -import { - BlobEIP4844Transaction, - FeeMarketEIP1559Transaction, - LegacyTransaction, -} from '@ethereumjs/tx' +import { createBlob4844Tx, createFeeMarket1559Tx, createLegacyTx } from '@ethereumjs/tx' import { bigIntToHex, blobsToCommitments, @@ -36,13 +32,13 @@ describe(method, () => { const { chain, common, execution, server } = await setupChain(pow, 'pow') const rpc = getRpcClient(server) // construct tx - const tx = LegacyTransaction.fromTxData( + const tx = createLegacyTx( { gasLimit: 2000000, gasPrice: 100, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) const block = await runBlockWithTxs(chain, execution, [tx]) const res0 = await rpc.request(method, [bytesToHex(tx.hash())]) @@ -60,18 +56,18 @@ describe(method, () => { it('call with 1559 tx', async () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), - 'powLondon' + 'powLondon', ) const rpc = getRpcClient(server) // construct tx - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { gasLimit: 2000000, maxFeePerGas: 975000000, maxPriorityFeePerGas: 10, to: '0x1230000000000000000000000000000000000321', }, - { common } + { common }, ).sign(dummy.privKey) const block = await runBlockWithTxs(chain, execution, [tx]) @@ -125,7 +121,7 @@ describe(method, () => { const commitments = blobsToCommitments(kzg, blobs) const blobVersionedHashes = commitmentsToVersionedHashes(commitments) const proofs = blobs.map((blob, ctx) => kzg.computeBlobKzgProof(blob, commitments[ctx])) - const tx = BlobEIP4844Transaction.fromTxData( + const tx = createBlob4844Tx( { blobVersionedHashes, blobs, @@ -138,7 +134,7 @@ describe(method, () => { to: randomBytes(20), nonce: 0n, }, - { common } + { common }, ).sign(dummy.privKey) const block = await runBlockWithTxs(chain, execution, [tx], true) diff --git a/packages/client/test/rpc/debug/getRawTransaction.spec.ts b/packages/client/test/rpc/debug/getRawTransaction.spec.ts index 8811fec759..aae404c711 100644 --- a/packages/client/test/rpc/debug/getRawTransaction.spec.ts +++ b/packages/client/test/rpc/debug/getRawTransaction.spec.ts @@ -1,4 +1,4 @@ -import { FeeMarketEIP1559Transaction, LegacyTransaction } from '@ethereumjs/tx' +import { createFeeMarket1559Tx, createLegacyTx } from '@ethereumjs/tx' import { bytesToHex } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' @@ -18,9 +18,9 @@ describe(method, () => { const { chain, common, execution, server } = await setupChain(pow, 'pow', { txLookupLimit: 1 }) const rpc = getRpcClient(server) // construct tx - const tx = LegacyTransaction.fromTxData( + const tx = createLegacyTx( { gasLimit: 2000000, gasPrice: 100, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -38,18 +38,18 @@ describe(method, () => { it('call with 1559 tx', async () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), - 'powLondon' + 'powLondon', ) const rpc = getRpcClient(server) // construct tx - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { gasLimit: 2000000, maxFeePerGas: 975000000, maxPriorityFeePerGas: 10, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) diff --git a/packages/client/test/rpc/debug/storageRangeAt.spec.ts b/packages/client/test/rpc/debug/storageRangeAt.spec.ts index 6be7b12877..17eb9e18b0 100644 --- a/packages/client/test/rpc/debug/storageRangeAt.spec.ts +++ b/packages/client/test/rpc/debug/storageRangeAt.spec.ts @@ -1,5 +1,6 @@ -import { TransactionFactory } from '@ethereumjs/tx' +import { createTxFromTxData } from '@ethereumjs/tx' import { bigIntToHex, bytesToBigInt, bytesToHex, hexToBytes, setLengthLeft } from '@ethereumjs/util' +import { buildBlock } from '@ethereumjs/vm' import { keccak256 } from 'ethereum-cryptography/keccak.js' import { assert, beforeEach, describe, it } from 'vitest' @@ -88,7 +89,7 @@ describe(method, () => { txLookupLimit: 0, }) const rpc = getRpcClient(server) - const firstTx = TransactionFactory.fromTxData( + const firstTx = createTxFromTxData( { type: 0x2, gasLimit: 10000000, @@ -97,12 +98,12 @@ describe(method, () => { value: 0, data: storageBytecode, }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) const vmCopy = await execution.vm.shallowCopy() const parentBlock = await chain.getCanonicalHeadBlock() - const blockBuilder = await vmCopy.buildBlock({ + const blockBuilder = await buildBlock(vmCopy, { parentBlock, headerData: { timestamp: parentBlock.header.timestamp + BigInt(1), @@ -115,7 +116,7 @@ describe(method, () => { const result = await blockBuilder.addTransaction(firstTx, { skipHardForkValidation: true }) - const secondTx = TransactionFactory.fromTxData( + const secondTx = createTxFromTxData( { to: result.createdAddress, type: 0x2, @@ -126,12 +127,12 @@ describe(method, () => { nonce: 1, data: updateBytecode, }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) await blockBuilder.addTransaction(secondTx, { skipHardForkValidation: true }) - const thirdTx = TransactionFactory.fromTxData( + const thirdTx = createTxFromTxData( { type: 0x2, gasLimit: 10000000, @@ -141,7 +142,7 @@ describe(method, () => { nonce: 2, data: noStorageBytecode, }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) const thirdResult = await blockBuilder.addTransaction(thirdTx, { skipHardForkValidation: true }) @@ -174,27 +175,27 @@ describe(method, () => { assert.equal( storageRange.storage[bytesToHex(firstVariableHash)].value, '0x43', - 'First variable correctly included.' + 'First variable correctly included.', ) const secondVariableHash = keccak256(setLengthLeft(hexToBytes('0x01'), 32)) assert.equal( storageRange.storage[bytesToHex(secondVariableHash)].value, '0x01', - 'Second variable correctly included.' + 'Second variable correctly included.', ) const thirdVariableHash = keccak256(setLengthLeft(hexToBytes('0x02'), 32)) assert.equal( storageRange.storage[bytesToHex(thirdVariableHash)].value, '0x02', - 'Third variable correctly included.' + 'Third variable correctly included.', ) assert.equal( Object.keys(storageRange.storage).length, 3, - 'Call returned the correct number of key value pairs.' + 'Call returned the correct number of key value pairs.', ) }) @@ -218,7 +219,7 @@ describe(method, () => { assert.equal( storageRange.storage[bytesToHex(hashedKey)].value, '0x42', - 'Old value was correctly reported.' + 'Old value was correctly reported.', ) }) @@ -240,7 +241,7 @@ describe(method, () => { assert.equal( Object.keys(storageRange.storage).length, 2, - 'Call returned the correct number of key value pairs.' + 'Call returned the correct number of key value pairs.', ) }) @@ -262,7 +263,7 @@ describe(method, () => { assert.equal( Object.keys(storageRange.storage).length, 0, - 'Call returned the correct number of key value pairs.' + 'Call returned the correct number of key value pairs.', ) assert.isNull(storageRange.nextKey, 'nextKey was correctly set to null.') @@ -290,12 +291,12 @@ describe(method, () => { assert.equal( Object.keys(storageRange.storage).length, 2, - 'Call returned the correct number of key value pairs.' + 'Call returned the correct number of key value pairs.', ) assert.isUndefined( storageRange.storage[bytesToHex(smallestHashedKey)], - 'Smallest hashed key was correctly excluded from result.' + 'Smallest hashed key was correctly excluded from result.', ) }) @@ -398,8 +399,8 @@ describe(method, () => { assert.equal(res.error.code, INTERNAL_ERROR) assert.ok( res.error.message.includes( - 'txIndex cannot be larger than the number of transactions in the block.' - ) + 'txIndex cannot be larger than the number of transactions in the block.', + ), ) }) diff --git a/packages/client/test/rpc/debug/traceCall.spec.ts b/packages/client/test/rpc/debug/traceCall.spec.ts index 06ca9d8b13..2680f1f98b 100644 --- a/packages/client/test/rpc/debug/traceCall.spec.ts +++ b/packages/client/test/rpc/debug/traceCall.spec.ts @@ -1,5 +1,5 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' -import { TransactionFactory } from '@ethereumjs/tx' +import { createBlock } from '@ethereumjs/block' +import { createTxFromTxData } from '@ethereumjs/tx' import { bytesToHex } from '@ethereumjs/util' import { assert, describe, expect, expectTypeOf, it } from 'vitest' @@ -51,7 +51,7 @@ describe('trace a call', async () => { }) const rpc = getRpcClient(server) // construct block with tx - const tx = TransactionFactory.fromTxData( + const tx = createTxFromTxData( { type: 0x2, gasLimit: 0xfffff, @@ -60,12 +60,12 @@ describe('trace a call', async () => { value: 10000, data: '0x60AA', }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) tx.getSenderAddress = () => { return dummy.addr } - const block = createBlockFromBlockData({}, { common }) + const block = createBlock({}, { common }) block.transactions[0] = tx await runBlockWithTxs(chain, execution, [tx], true) @@ -103,7 +103,7 @@ describe('trace a call', async () => { }, ], }, - 'produced a correct trace' + 'produced a correct trace', ) }) }) diff --git a/packages/client/test/rpc/debug/traceTransaction.spec.ts b/packages/client/test/rpc/debug/traceTransaction.spec.ts index 135c43e584..a97e182873 100644 --- a/packages/client/test/rpc/debug/traceTransaction.spec.ts +++ b/packages/client/test/rpc/debug/traceTransaction.spec.ts @@ -1,5 +1,5 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' -import { TransactionFactory } from '@ethereumjs/tx' +import { createBlock } from '@ethereumjs/block' +import { createTxFromTxData } from '@ethereumjs/tx' import { bytesToHex } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' @@ -32,7 +32,7 @@ describe(method, () => { res = await rpc.request(method, ['0xabcd', { tracerConfig: { some: 'value' } }]) assert.equal(res.error.code, INVALID_PARAMS) assert.ok( - res.error.message.includes('custom tracers and tracer configurations are not implemented') + res.error.message.includes('custom tracers and tracer configurations are not implemented'), ) res = await rpc.request(method, ['0xabcd', { tracer: 'someTracer' }]) @@ -50,7 +50,7 @@ describe(method, () => { }) const rpc = getRpcClient(server) // construct block with tx - const tx = TransactionFactory.fromTxData( + const tx = createTxFromTxData( { type: 0x2, gasLimit: 0xfffff, @@ -59,12 +59,12 @@ describe(method, () => { value: 10000, data: '0x60AA', }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) tx.getSenderAddress = () => { return dummy.addr } - const block = createBlockFromBlockData({}, { common }) + const block = createBlock({}, { common }) block.transactions[0] = tx await runBlockWithTxs(chain, execution, [tx], true) @@ -79,7 +79,7 @@ describe(method, () => { }) const rpc = getRpcClient(server) // construct block with tx - const tx = TransactionFactory.fromTxData( + const tx = createTxFromTxData( { type: 0x2, gasLimit: 0xfffff, @@ -88,12 +88,12 @@ describe(method, () => { value: 10000, data: '0x560FAA', }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) tx.getSenderAddress = () => { return dummy.addr } - const block = createBlockFromBlockData({}, { common }) + const block = createBlock({}, { common }) block.transactions[0] = tx await runBlockWithTxs(chain, execution, [tx], true) @@ -108,7 +108,7 @@ describe(method, () => { }) const rpc = getRpcClient(server) // construct block with tx - const tx = TransactionFactory.fromTxData( + const tx = createTxFromTxData( { type: 0x2, gasLimit: 0xfffff, @@ -117,12 +117,12 @@ describe(method, () => { value: 10000, data: '0x604260005260206000F3', }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) tx.getSenderAddress = () => { return dummy.addr } - const block = createBlockFromBlockData({}, { common }) + const block = createBlock({}, { common }) block.transactions[0] = tx await runBlockWithTxs(chain, execution, [tx], true) @@ -131,7 +131,7 @@ describe(method, () => { assert.equal( res.result.structLogs[5].memory[0], '0x0000000000000000000000000000000000000000000000000000000000000042', - 'produced a trace with correct memory value returned' + 'produced a trace with correct memory value returned', ) }) @@ -141,7 +141,7 @@ describe(method, () => { }) const rpc = getRpcClient(server) // construct block with tx - const tx = TransactionFactory.fromTxData( + const tx = createTxFromTxData( { type: 0x2, gasLimit: 0xfffff, @@ -150,12 +150,12 @@ describe(method, () => { value: 10000, data: '0x600F6000', }, - { common, freeze: false } + { common, freeze: false }, ).sign(dummy.privKey) tx.getSenderAddress = () => { return dummy.addr } - const block = createBlockFromBlockData({}, { common }) + const block = createBlock({}, { common }) block.transactions[0] = tx await runBlockWithTxs(chain, execution, [tx], true) diff --git a/packages/client/test/rpc/engine/CLConnectionManager.spec.ts b/packages/client/test/rpc/engine/CLConnectionManager.spec.ts index 49e0d67ff2..3462e55dca 100644 --- a/packages/client/test/rpc/engine/CLConnectionManager.spec.ts +++ b/packages/client/test/rpc/engine/CLConnectionManager.spec.ts @@ -1,5 +1,5 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' -import { Common, parseGethGenesis } from '@ethereumjs/common' +import { createBlock } from '@ethereumjs/block' +import { createCommonFromGethGenesis, parseGethGenesis } from '@ethereumjs/common' import { assert, describe, expect, it, vi } from 'vitest' import { Config } from '../../../src/index.js' @@ -59,10 +59,7 @@ describe('starts and stops connection manager', () => { describe('hardfork MergeForkBlock', () => { ;(genesisJSON.config as any).mergeForkBlock = 0 const params = parseGethGenesis(genesisJSON, 'post-merge', false) - const common = new Common({ - chain: params.name, - customChains: [params], - }) + const common = createCommonFromGethGenesis(genesisJSON, { chain: params.name }) common.setHardforkBy({ blockNumber: 0 }) const config = new Config({ common }) it('instantiates with config', () => { @@ -76,9 +73,8 @@ describe('postmerge hardfork', () => { ;(genesisJSON.config as any).mergeForkBlock = 10 const params = parseGethGenesis(genesisJSON, 'post-merge', false) - const common = new Common({ + const common = createCommonFromGethGenesis(genesisJSON, { chain: params.name, - customChains: [params], }) common.setHardforkBy({ blockNumber: 11 }) const config = new Config({ common }) @@ -122,7 +118,7 @@ describe('updates stats when a new block is processed', () => { const manager = new CLConnectionManager({ config }) manager.lastForkchoiceUpdate(update) manager.lastNewPayload(payload) - const block = createBlockFromBlockData({ + const block = createBlock({ header: { parentHash: payload.payload.blockHash, number: payload.payload.blockNumber, @@ -149,7 +145,7 @@ describe('updates status correctly', async () => { assert.equal( manager['connectionStatus'], ConnectionStatus.Connected, - 'connection status updated correctly' + 'connection status updated correctly', ) }) }) @@ -165,7 +161,7 @@ describe('updates connection status correctly', async () => { assert.equal( manager['connectionStatus'], ConnectionStatus.Disconnected, - 'should disconnect from CL' + 'should disconnect from CL', ) }) it('should change status to uncertain', () => { @@ -175,7 +171,7 @@ describe('updates connection status correctly', async () => { assert.equal( manager['connectionStatus'], ConnectionStatus.Uncertain, - 'should update status to uncertain' + 'should update status to uncertain', ) }) diff --git a/packages/client/test/rpc/engine/exchangeCapabilities.spec.ts b/packages/client/test/rpc/engine/exchangeCapabilities.spec.ts index 358a1f0c60..e1a2db4656 100644 --- a/packages/client/test/rpc/engine/exchangeCapabilities.spec.ts +++ b/packages/client/test/rpc/engine/exchangeCapabilities.spec.ts @@ -14,7 +14,7 @@ describe(method, () => { assert.equal( res.result.findIndex((el: string) => el === 'engine_exchangeCapabilities'), -1, - 'should not include engine_exchangeCapabilities in response' + 'should not include engine_exchangeCapabilities in response', ) }) }) diff --git a/packages/client/test/rpc/engine/exchangeTransitionConfigurationV1.spec.ts b/packages/client/test/rpc/engine/exchangeTransitionConfigurationV1.spec.ts deleted file mode 100644 index ba87b658b5..0000000000 --- a/packages/client/test/rpc/engine/exchangeTransitionConfigurationV1.spec.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { assert, describe, it } from 'vitest' - -import { INVALID_PARAMS } from '../../../src/rpc/error-code.js' -import genesisJSON from '../../testdata/geth-genesis/post-merge.json' -import { getRpcClient, setupChain } from '../helpers.js' - -const method = 'engine_exchangeTransitionConfigurationV1' - -const validConfig = { - terminalTotalDifficulty: '0x0', - terminalBlockHash: '0x1d93f244823f80efbd9292a0d0d72a2b03df8cd5a9688c6c3779d26a7cc5009c', - terminalBlockNumber: '0x0', -} - -const invalidConfig = { - terminalTotalDifficulty: '0x100', - terminalBlockHash: '0x1d93f244823f80efbd9292a0d0d72a2b03df8cd5a9688c6c3779d26a7cc5009c', - terminalBlockNumber: '0x0', -} - -describe(method, () => { - it('call with valid config', async () => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - const rpc = getRpcClient(server) - const res = await rpc.request(method, [validConfig]) - assert.deepEqual(res.result, validConfig) - }) - - it('call with invalid config', async () => { - const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) - const rpc = getRpcClient(server) - const res = await rpc.request(method, [invalidConfig]) - assert.equal(res.error.code, INVALID_PARAMS) - assert.ok(res.error.message.includes('terminalTotalDifficulty set to 0, received 256')) - }) -}) diff --git a/packages/client/test/rpc/engine/forkchoiceUpdatedV1.spec.ts b/packages/client/test/rpc/engine/forkchoiceUpdatedV1.spec.ts index 3b3a14f777..735ec5a708 100644 --- a/packages/client/test/rpc/engine/forkchoiceUpdatedV1.spec.ts +++ b/packages/client/test/rpc/engine/forkchoiceUpdatedV1.spec.ts @@ -1,6 +1,6 @@ -import { BlockHeader, createBlockFromBlockData } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { bytesToHex, randomBytes, zeros } from '@ethereumjs/util' +import { BlockHeader, createBlock } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { bytesToHex, randomBytes } from '@ethereumjs/util' import { assert, describe, it, vi } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code.js' @@ -9,7 +9,7 @@ import blocks from '../../testdata/blocks/beacon.json' import genesisJSON from '../../testdata/geth-genesis/post-merge.json' import { baseSetup, batchBlocks, getRpcClient, setupChain } from '../helpers.js' -import type { Block, BlockData } from '@ethereumjs/block' +import type { Block } from '@ethereumjs/block' const method = 'engine_forkchoiceUpdatedV1' @@ -27,11 +27,11 @@ const validPayloadAttributes = { suggestedFeeRecipient: '0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b', } -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Paris }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Paris }) -function createBlock(parentBlock: Block) { +function createBlockFromParent(parentBlock: Block) { const prevRandao = randomBytes(32) - const block = createBlockFromBlockData( + const block = createBlock( { header: { parentHash: parentBlock.hash(), @@ -42,7 +42,7 @@ function createBlock(parentBlock: Block) { gasLimit: parentBlock.header.gasLimit, }, }, - { common } + { common }, ) return block } @@ -60,8 +60,8 @@ describe(method, () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - "invalid argument 0 for key 'headBlockHash': hex string without 0x prefix" - ) + "invalid argument 0 for key 'headBlockHash': hex string without 0x prefix", + ), ) }) @@ -70,14 +70,14 @@ describe(method, () => { const invalidForkChoiceState = { ...validForkChoiceState, - finalizedBlockHash: '0xinvalid', + finalizedBlockHash: '0xinvalid', // cspell:disable-line } const res = await rpc.request(method, [invalidForkChoiceState, validPayloadAttributes]) assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - "invalid argument 0 for key 'finalizedBlockHash': invalid block hash" - ) + "invalid argument 0 for key 'finalizedBlockHash': invalid block hash", + ), ) }) @@ -103,7 +103,7 @@ describe(method, () => { assert.equal(res.result.payloadStatus.status, 'VALID') assert.equal( res.result.payloadStatus.latestValidHash, - '0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4a' + '0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4a', ) assert.equal(res.result.payloadStatus.validationError, null) assert.notEqual(res.result.payloadId, null) @@ -118,7 +118,7 @@ describe(method, () => { const res = await rpc.request(method, invalidTimestampPayload) assert.equal(res.error.code, INVALID_PARAMS) assert.ok( - res.error.message.includes('invalid timestamp in payloadAttributes, got 0, need at least 1') + res.error.message.includes('invalid timestamp in payloadAttributes, got 0, need at least 1'), ) }) @@ -133,66 +133,6 @@ describe(method, () => { assert.equal(res.result.payloadId, null) }) - it('invalid terminal block with only genesis block', async () => { - const genesisWithHigherTtd = { - ...genesisJSON, - config: { - ...genesisJSON.config, - terminalTotalDifficulty: 17179869185, - }, - } - - const { server } = await setupChain(genesisWithHigherTtd, 'post-merge', { - engine: true, - }) - const rpc = getRpcClient(server) - const res = await rpc.request(method, [validForkChoiceState, null]) - assert.equal(res.result.payloadStatus.status, 'INVALID') - assert.equal(res.result.payloadStatus.latestValidHash, bytesToHex(zeros(32))) - }) - - it('invalid terminal block with 1+ blocks', async () => { - const genesisWithHigherTtd = { - ...genesisJSON, - config: { - ...genesisJSON.config, - terminalTotalDifficulty: 17179869185, - clique: undefined, - ethash: {}, - }, - } - - const { server, chain, common } = await setupChain(genesisWithHigherTtd, 'post-merge', { - engine: true, - }) - const rpc = getRpcClient(server) - const newBlock = createBlockFromBlockData( - { - header: { - number: blocks[0].blockNumber, - parentHash: blocks[0].parentHash, - difficulty: 1, - extraData: new Uint8Array(97), - }, - } as BlockData, - { common, skipConsensusFormatValidation: true } - ) - - await chain.putBlocks([newBlock]) - const newBlockHashHex = bytesToHex(newBlock.hash()) - const res = await rpc.request(method, [ - { - safeBlockHash: newBlockHashHex, - finalizedBlockHash: newBlockHashHex, - headBlockHash: newBlockHashHex, - }, - null, - ]) - - assert.equal(res.result.payloadStatus.status, 'INVALID') - assert.equal(res.result.payloadStatus.latestValidHash, bytesToHex(zeros(32))) - }) - it('call with deep parent lookup', async () => { const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) const rpc = getRpcClient(server) @@ -307,17 +247,17 @@ describe(method, () => { const canonical = [genesis] for (let i = 0; i < 2; i++) { - canonical.push(createBlock(canonical[canonical.length - 1])) + canonical.push(createBlockFromParent(canonical[canonical.length - 1])) } // Build an alternative payload const reorg = [genesis] for (let i = 0; i < 2; i++) { - reorg.push(createBlock(reorg[reorg.length - 1])) + reorg.push(createBlockFromParent(reorg[reorg.length - 1])) } const canonicalPayload = canonical.map( - (e) => blockToExecutionPayload(e, BigInt(0)).executionPayload + (e) => blockToExecutionPayload(e, BigInt(0)).executionPayload, ) const reorgPayload = reorg.map((e) => blockToExecutionPayload(e, BigInt(0)).executionPayload) @@ -347,17 +287,17 @@ describe(method, () => { const canonical = [genesis] for (let i = 0; i < 2; i++) { - canonical.push(createBlock(canonical[canonical.length - 1])) + canonical.push(createBlockFromParent(canonical[canonical.length - 1])) } // Build an alternative payload const reorg = [genesis] for (let i = 0; i < 2; i++) { - reorg.push(createBlock(reorg[reorg.length - 1])) + reorg.push(createBlockFromParent(reorg[reorg.length - 1])) } const canonicalPayload = canonical.map( - (e) => blockToExecutionPayload(e, BigInt(0)).executionPayload + (e) => blockToExecutionPayload(e, BigInt(0)).executionPayload, ) const reorgPayload = reorg.map((e) => blockToExecutionPayload(e, BigInt(0)).executionPayload) diff --git a/packages/client/test/rpc/engine/getPayloadBodiesByHashV1.spec.ts b/packages/client/test/rpc/engine/getPayloadBodiesByHashV1.spec.ts index ff1644de20..06fdc93a21 100644 --- a/packages/client/test/rpc/engine/getPayloadBodiesByHashV1.spec.ts +++ b/packages/client/test/rpc/engine/getPayloadBodiesByHashV1.spec.ts @@ -1,8 +1,14 @@ -import { BlockHeader, createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock, createBlockHeader } from '@ethereumjs/block' import { Hardfork } from '@ethereumjs/common' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { TransactionFactory } from '@ethereumjs/tx' -import { Account, Address, bytesToHex, hexToBytes, randomBytes } from '@ethereumjs/util' +import { createTxFromTxData } from '@ethereumjs/tx' +import { + Account, + bytesToHex, + createAddressFromPrivateKey, + hexToBytes, + randomBytes, +} from '@ethereumjs/util' import { assert, describe, it } from 'vitest' import { TOO_LARGE_REQUEST } from '../../../src/rpc/error-code.js' @@ -39,13 +45,13 @@ describe(method, () => { const rpc = getRpcClient(server) common.setHardfork(Hardfork.Cancun) const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') - const address = Address.fromPrivateKey(pkey) + const address = createAddressFromPrivateKey(pkey) await service.execution.vm.stateManager.putAccount(address, new Account()) const account = await service.execution.vm.stateManager.getAccount(address) account!.balance = 0xfffffffffffffffn await service.execution.vm.stateManager.putAccount(address, account!) - const tx = TransactionFactory.fromTxData( + const tx = createTxFromTxData( { type: 0x01, maxFeePerBlobGas: 1n, @@ -53,9 +59,9 @@ describe(method, () => { maxPriorityFeePerGas: 100000000n, gasLimit: 30000000n, }, - { common } + { common }, ).sign(pkey) - const tx2 = TransactionFactory.fromTxData( + const tx2 = createTxFromTxData( { type: 0x01, maxFeePerBlobGas: 1n, @@ -64,27 +70,27 @@ describe(method, () => { gasLimit: 30000000n, nonce: 1n, }, - { common } + { common }, ).sign(pkey) - const block = createBlockFromBlockData( + const block = createBlock( { transactions: [tx], - header: BlockHeader.fromHeaderData( + header: createBlockHeader( { parentHash: chain.genesis.hash(), number: 1n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) - const block2 = createBlockFromBlockData( + const block2 = createBlock( { transactions: [tx2], - header: BlockHeader.fromHeaderData( + header: createBlockHeader( { parentHash: block.hash(), number: 2n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) await chain.putBlocks([block, block2], true) @@ -96,7 +102,7 @@ describe(method, () => { assert.equal( res.result[0].transactions[0], bytesToHex(tx.serialize()), - 'got expected transaction from first payload' + 'got expected transaction from first payload', ) assert.equal(res.result[1], null, 'got null for block not found in chain') assert.equal(res.result.length, 3, 'length of response matches number of block hashes sent') @@ -120,18 +126,18 @@ describe(method, () => { { engine: true, hardfork: Hardfork.London, - } + }, ) const rpc = getRpcClient(server) common.setHardfork(Hardfork.London) const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') - const address = Address.fromPrivateKey(pkey) + const address = createAddressFromPrivateKey(pkey) await service.execution.vm.stateManager.putAccount(address, new Account()) const account = await service.execution.vm.stateManager.getAccount(address) account!.balance = 0xfffffffffffffffn await service.execution.vm.stateManager.putAccount(address, account!) - const tx = TransactionFactory.fromTxData( + const tx = createTxFromTxData( { type: 0x01, maxFeePerBlobGas: 1n, @@ -139,9 +145,9 @@ describe(method, () => { maxPriorityFeePerGas: 100000000n, gasLimit: 30000000n, }, - { common } + { common }, ).sign(pkey) - const tx2 = TransactionFactory.fromTxData( + const tx2 = createTxFromTxData( { type: 0x01, maxFeePerBlobGas: 1n, @@ -150,27 +156,27 @@ describe(method, () => { gasLimit: 30000000n, nonce: 1n, }, - { common } + { common }, ).sign(pkey) - const block = createBlockFromBlockData( + const block = createBlock( { transactions: [tx], - header: BlockHeader.fromHeaderData( + header: createBlockHeader( { parentHash: chain.genesis.hash(), number: 1n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) - const block2 = createBlockFromBlockData( + const block2 = createBlock( { transactions: [tx2], - header: BlockHeader.fromHeaderData( + header: createBlockHeader( { parentHash: block.hash(), number: 2n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) await chain.putBlocks([block, block2], true) @@ -182,7 +188,7 @@ describe(method, () => { assert.equal( res.result[0].withdrawals, null, - 'got null for withdrawals field on pre-Shanghai block' + 'got null for withdrawals field on pre-Shanghai block', ) // Restore setStateRoot diff --git a/packages/client/test/rpc/engine/getPayloadBodiesByRangeV1.spec.ts b/packages/client/test/rpc/engine/getPayloadBodiesByRangeV1.spec.ts index 466b891493..5719f74e02 100644 --- a/packages/client/test/rpc/engine/getPayloadBodiesByRangeV1.spec.ts +++ b/packages/client/test/rpc/engine/getPayloadBodiesByRangeV1.spec.ts @@ -1,8 +1,8 @@ -import { BlockHeader, createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock, createBlockHeader } from '@ethereumjs/block' import { Hardfork } from '@ethereumjs/common' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { TransactionFactory } from '@ethereumjs/tx' -import { Account, Address, bytesToHex, hexToBytes } from '@ethereumjs/util' +import { createTxFromTxData } from '@ethereumjs/tx' +import { Account, bytesToHex, createAddressFromPrivateKey, hexToBytes } from '@ethereumjs/util' import { assert, describe, it, vi } from 'vitest' import { INVALID_PARAMS, TOO_LARGE_REQUEST } from '../../../src/rpc/error-code.js' @@ -41,13 +41,13 @@ describe(method, () => { const rpc = getRpcClient(server) common.setHardfork(Hardfork.Cancun) const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') - const address = Address.fromPrivateKey(pkey) + const address = createAddressFromPrivateKey(pkey) await service.execution.vm.stateManager.putAccount(address, new Account()) const account = await service.execution.vm.stateManager.getAccount(address) account!.balance = 0xfffffffffffffffn await service.execution.vm.stateManager.putAccount(address, account!) - const tx = TransactionFactory.fromTxData( + const tx = createTxFromTxData( { type: 0x01, maxFeePerBlobGas: 1n, @@ -55,9 +55,9 @@ describe(method, () => { maxPriorityFeePerGas: 100000000n, gasLimit: 30000000n, }, - { common } + { common }, ).sign(pkey) - const tx2 = TransactionFactory.fromTxData( + const tx2 = createTxFromTxData( { type: 0x01, maxFeePerBlobGas: 1n, @@ -66,27 +66,27 @@ describe(method, () => { gasLimit: 30000000n, nonce: 1n, }, - { common } + { common }, ).sign(pkey) - const block = createBlockFromBlockData( + const block = createBlock( { transactions: [tx], - header: BlockHeader.fromHeaderData( + header: createBlockHeader( { parentHash: chain.genesis.hash(), number: 1n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) - const block2 = createBlockFromBlockData( + const block2 = createBlock( { transactions: [tx2], - header: BlockHeader.fromHeaderData( + header: createBlockHeader( { parentHash: block.hash(), number: 2n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) await chain.putBlocks([block, block2], true) @@ -95,19 +95,19 @@ describe(method, () => { assert.equal( res.result[0].transactions[0], bytesToHex(tx.serialize()), - 'got expected transaction from first payload' + 'got expected transaction from first payload', ) assert.equal( res.result.length, 2, - 'length of response matches start of range up to highest known block' + 'length of response matches start of range up to highest known block', ) const res2 = await rpc.request(method, ['0x3', '0x2']) assert.equal( res2.result.length, 0, - 'got empty array when start of requested range is beyond current chain head' + 'got empty array when start of requested range is beyond current chain head', ) }) @@ -123,13 +123,13 @@ describe(method, () => { const rpc = getRpcClient(server) common.setHardfork(Hardfork.London) const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') - const address = Address.fromPrivateKey(pkey) + const address = createAddressFromPrivateKey(pkey) await service.execution.vm.stateManager.putAccount(address, new Account()) const account = await service.execution.vm.stateManager.getAccount(address) account!.balance = 0xfffffffffffffffn await service.execution.vm.stateManager.putAccount(address, account!) - const tx = TransactionFactory.fromTxData( + const tx = createTxFromTxData( { type: 0x01, maxFeePerBlobGas: 1n, @@ -137,9 +137,9 @@ describe(method, () => { maxPriorityFeePerGas: 100000000n, gasLimit: 30000000n, }, - { common } + { common }, ).sign(pkey) - const tx2 = TransactionFactory.fromTxData( + const tx2 = createTxFromTxData( { type: 0x01, maxFeePerBlobGas: 1n, @@ -148,27 +148,27 @@ describe(method, () => { gasLimit: 30000000n, nonce: 1n, }, - { common } + { common }, ).sign(pkey) - const block = createBlockFromBlockData( + const block = createBlock( { transactions: [tx], - header: BlockHeader.fromHeaderData( + header: createBlockHeader( { parentHash: chain.genesis.hash(), number: 1n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) - const block2 = createBlockFromBlockData( + const block2 = createBlock( { transactions: [tx2], - header: BlockHeader.fromHeaderData( + header: createBlockHeader( { parentHash: block.hash(), number: 2n }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ), }, - { common, skipConsensusFormatValidation: true } + { common, skipConsensusFormatValidation: true }, ) await chain.putBlocks([block, block2], true) diff --git a/packages/client/test/rpc/engine/getPayloadV3.spec.ts b/packages/client/test/rpc/engine/getPayloadV3.spec.ts index 239eae1030..e300aa19d6 100644 --- a/packages/client/test/rpc/engine/getPayloadV3.spec.ts +++ b/packages/client/test/rpc/engine/getPayloadV3.spec.ts @@ -1,13 +1,14 @@ import { Hardfork } from '@ethereumjs/common' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { TransactionFactory } from '@ethereumjs/tx' +import { createTxFromTxData } from '@ethereumjs/tx' import { Account, - Address, blobsToCommitments, blobsToProofs, bytesToHex, commitmentsToVersionedHashes, + createAddressFromPrivateKey, + createZeroAddress, getBlobs, hexToBytes, } from '@ethereumjs/util' @@ -78,7 +79,7 @@ describe(method, () => { const rpc = getRpcClient(server) common.setHardfork(Hardfork.Cancun) const pkey = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') - const address = Address.fromPrivateKey(pkey) + const address = createAddressFromPrivateKey(pkey) await service.execution.vm.stateManager.putAccount(address, new Account()) const account = await service.execution.vm.stateManager.getAccount(address) @@ -93,7 +94,7 @@ describe(method, () => { const txVersionedHashes = commitmentsToVersionedHashes(txCommitments) const txProofs = blobsToProofs(kzg, txBlobs, txCommitments) - const tx = TransactionFactory.fromTxData( + const tx = createTxFromTxData( { type: 0x03, blobVersionedHashes: txVersionedHashes, @@ -104,9 +105,9 @@ describe(method, () => { maxFeePerGas: 10000000000n, maxPriorityFeePerGas: 100000000n, gasLimit: 30000000n, - to: Address.zero(), + to: createZeroAddress(), }, - { common } + { common }, ).sign(pkey) await service.txPool.add(tx, true) @@ -116,14 +117,14 @@ describe(method, () => { assert.equal( executionPayload.blockHash, '0x8c71ad199a3dda94de6a1c31cc50a26b1f03a8a4924e9ea3fd7420c6411cac42', - 'built expected block' + 'built expected block', ) - assert.equal(executionPayload.excessBlobGas, '0x0', 'correct execess blob gas') + assert.equal(executionPayload.excessBlobGas, '0x0', 'correct excess blob gas') assert.equal(executionPayload.blobGasUsed, '0x20000', 'correct blob gas used') const { commitments, proofs, blobs } = blobsBundle assert.ok( commitments.length === proofs.length && commitments.length === blobs.length, - 'equal commitments, proofs and blobs' + 'equal commitments, proofs and blobs', ) assert.equal(blobs.length, 1, '1 blob should be returned') assert.equal(proofs[0], bytesToHex(txProofs[0]), 'proof should match') diff --git a/packages/client/test/rpc/engine/kaustinen6.spec.ts b/packages/client/test/rpc/engine/kaustinen6.spec.ts index 4ad0f3b2b2..4a5e6310ab 100644 --- a/packages/client/test/rpc/engine/kaustinen6.spec.ts +++ b/packages/client/test/rpc/engine/kaustinen6.spec.ts @@ -32,14 +32,14 @@ const genesisVerkleBlockHash = '0x3fe165c03e7a77d1e3759362ebeeb16fd964cb411ce11f * a. On the saved blocks, comma separated (were produced for kaustinen4 ) * `TEST_SAVED_NUMBERS=353,368,374,467 npx vitest run test/rpc/engine/kaustinen5.spec.ts` * b. Geth produced testvectors (were produced for kaustinen5) - * `TEST_GETH_VEC_DIR=test/testdata/gethk5vecs DEBUG=ethjs,vm:*,evm:*,statemanager:verkle* npx vitest run test/rpc/engine/kaustinen6.spec.ts` + * `TEST_GETH_VEC_DIR=test/testdata/gethk5vecs DEBUG=ethjs,vm:*,evm:*,statemanager:verkle* npx vitest run test/rpc/engine/kaustinen6.spec.ts` // cspell:disable-line */ const originalValidate = (BlockHeader as any).prototype._consensusFormatValidation async function fetchExecutionPayload( peerBeaconUrl: string, - slot: number | string + slot: number | string, ): Promise { let beaconPayload: BeaconPayloadJson | undefined = undefined try { @@ -55,7 +55,7 @@ async function runBlock( { chain, rpc, common }: { chain: Chain; rpc: HttpClient; common: Common }, { execute, parent }: { execute: any; parent: any }, isBeaconData: boolean, - context: any + context: any, ) { const blockCache = chain.blockCache @@ -146,10 +146,10 @@ describe(`valid verkle network setup`, async () => { if (process.env.TEST_GETH_VEC_DIR !== undefined) { // eslint-disable-next-line @typescript-eslint/no-use-before-define - const gethVecs = await loadGethVectors(process.env.TEST_GETH_VEC_DIR, { common }) - let parent = gethVecs[0] - for (let i = 1; i < gethVecs.length; i++) { - const execute = gethVecs[i] + const gethVectors = await loadGethVectors(process.env.TEST_GETH_VEC_DIR, { common }) + let parent = gethVectors[0] + for (let i = 1; i < gethVectors.length; i++) { + const execute = gethVectors[i] it(`run geth vector: ${execute.blockNumber}`, async (context) => { await runBlock({ common, chain, rpc }, { parent, execute }, false, context) parent = execute diff --git a/packages/client/test/rpc/engine/newPayloadV1.spec.ts b/packages/client/test/rpc/engine/newPayloadV1.spec.ts index de3a725a33..052f2df6e8 100644 --- a/packages/client/test/rpc/engine/newPayloadV1.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV1.spec.ts @@ -1,7 +1,11 @@ -import { BlockHeader } from '@ethereumjs/block' -import { FeeMarketEIP1559Transaction } from '@ethereumjs/tx' -import { Address, bytesToHex, hexToBytes, zeros } from '@ethereumjs/util' -import { assert, describe, it, vi } from 'vitest' +import { createFeeMarket1559Tx } from '@ethereumjs/tx' +import { + bytesToHex, + createAddressFromPrivateKey, + createAddressFromString, + hexToBytes, +} from '@ethereumjs/util' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code.js' import blocks from '../../testdata/blocks/beacon.json' @@ -27,8 +31,8 @@ describe(method, () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - "invalid argument 0 for key 'parentHash': hex string without 0x prefix" - ) + "invalid argument 0 for key 'parentHash': hex string without 0x prefix", + ), ) }) @@ -39,7 +43,7 @@ describe(method, () => { const res = await rpc.request(method, blockDataWithInvalidBlockHash) assert.equal(res.error.code, INVALID_PARAMS) assert.ok( - res.error.message.includes("invalid argument 0 for key 'blockHash': invalid block hash") + res.error.message.includes("invalid argument 0 for key 'blockHash': invalid block hash"), ) }) @@ -99,28 +103,6 @@ describe(method, () => { assert.equal(res.result.status, 'VALID') }) - it('invalid terminal block', async () => { - const genesisWithHigherTtd = { - ...genesisJSON, - config: { - ...genesisJSON.config, - terminalTotalDifficulty: 17179869185, - }, - } - - BlockHeader.prototype['_consensusFormatValidation'] = vi.fn() - vi.doMock('@ethereumjs/block', () => BlockHeader) - - const { server } = await setupChain(genesisWithHigherTtd, 'post-merge', { - engine: true, - }) - const rpc = getRpcClient(server) - const res = await rpc.request(method, [blockData, null]) - - assert.equal(res.result.status, 'INVALID') - assert.equal(res.result.latestValidHash, bytesToHex(zeros(32))) - }) - it('call with valid data', async () => { const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) const rpc = getRpcClient(server) @@ -145,7 +127,7 @@ describe(method, () => { const expectedError = 'Invalid tx at index 0: Error: Invalid serialized tx input: must be array' assert.ok( res.result.validationError.includes(expectedError), - `should error with - ${expectedError}` + `should error with - ${expectedError}`, ) }) @@ -155,14 +137,14 @@ describe(method, () => { chain.config.logger.silent = true // Let's mock a non-signed transaction so execution fails - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { gasLimit: 21_000, maxFeePerGas: 10, value: 1, - to: Address.fromString('0x61FfE691821291D02E9Ba5D33098ADcee71a3a17'), + to: createAddressFromString('0x61FfE691821291D02E9Ba5D33098ADcee71a3a17'), }, - { common } + { common }, ) const transactions = [bytesToHex(tx.serialize())] @@ -180,9 +162,9 @@ describe(method, () => { it('call with valid data & valid transaction', async () => { const accountPk = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) - const accountAddress = Address.fromPrivateKey(accountPk) + const accountAddress = createAddressFromPrivateKey(accountPk) const newGenesisJSON = { ...genesisJSON, alloc: { @@ -195,13 +177,13 @@ describe(method, () => { const { server, common } = await setupChain(newGenesisJSON, 'post-merge', { engine: true }) const rpc = getRpcClient(server) - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { maxFeePerGas: '0x7', value: 6, gasLimit: 53_000, }, - { common } + { common }, ).sign(accountPk) const transactions = [bytesToHex(tx.serialize())] const blockDataWithValidTransaction = { @@ -220,9 +202,9 @@ describe(method, () => { it('call with too many transactions', async () => { const accountPk = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) - const accountAddress = Address.fromPrivateKey(accountPk) + const accountAddress = createAddressFromPrivateKey(accountPk) const newGenesisJSON = { ...genesisJSON, alloc: { @@ -238,14 +220,14 @@ describe(method, () => { }) const rpc = getRpcClient(server) const transactions = Array.from({ length: 101 }, (_v, i) => { - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { nonce: i, maxFeePerGas: '0x7', value: 6, gasLimit: 53_000, }, - { common } + { common }, ).sign(accountPk) return bytesToHex(tx.serialize()) diff --git a/packages/client/test/rpc/engine/newPayloadV2.spec.ts b/packages/client/test/rpc/engine/newPayloadV2.spec.ts index 40c0152e41..b9db309103 100644 --- a/packages/client/test/rpc/engine/newPayloadV2.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV2.spec.ts @@ -1,7 +1,11 @@ -import { BlockHeader } from '@ethereumjs/block' -import { FeeMarketEIP1559Transaction } from '@ethereumjs/tx' -import { Address, bytesToHex, hexToBytes, zeros } from '@ethereumjs/util' -import { assert, describe, it, vi } from 'vitest' +import { createFeeMarket1559Tx } from '@ethereumjs/tx' +import { + bytesToHex, + createAddressFromPrivateKey, + createAddressFromString, + hexToBytes, +} from '@ethereumjs/util' +import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code.js' import blocks from '../../testdata/blocks/beacon.json' @@ -27,8 +31,8 @@ describe(`${method}: call with executionPayloadV1`, () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - "invalid argument 0 for key 'parentHash': hex string without 0x prefix" - ) + "invalid argument 0 for key 'parentHash': hex string without 0x prefix", + ), ) }) @@ -39,7 +43,7 @@ describe(`${method}: call with executionPayloadV1`, () => { const res = await rpc.request(method, blockDataWithInvalidBlockHash) assert.equal(res.error.code, INVALID_PARAMS) assert.ok( - res.error.message.includes("invalid argument 0 for key 'blockHash': invalid block hash") + res.error.message.includes("invalid argument 0 for key 'blockHash': invalid block hash"), ) }) @@ -98,28 +102,6 @@ describe(`${method}: call with executionPayloadV1`, () => { assert.equal(res.result.status, 'VALID') }) - it('invalid terminal block', async () => { - const genesisWithHigherTtd = { - ...genesisJSON, - config: { - ...genesisJSON.config, - terminalTotalDifficulty: 17179869185, - }, - } - - ;(BlockHeader as any).prototype._consensusFormatValidation = vi.fn() - vi.doMock('@ethereumjs/block', () => BlockHeader) - - const { server } = await setupChain(genesisWithHigherTtd, 'post-merge', { - engine: true, - }) - const rpc = getRpcClient(server) - const res = await rpc.request(method, [blockData, null]) - - assert.equal(res.result.status, 'INVALID') - assert.equal(res.result.latestValidHash, bytesToHex(zeros(32))) - }) - it('call with valid data', async () => { const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) const rpc = getRpcClient(server) @@ -143,7 +125,7 @@ describe(`${method}: call with executionPayloadV1`, () => { const expectedError = 'Invalid tx at index 0: Error: Invalid serialized tx input: must be array' assert.ok( res.result.validationError.includes(expectedError), - `should error with - ${expectedError}` + `should error with - ${expectedError}`, ) }) @@ -153,14 +135,14 @@ describe(`${method}: call with executionPayloadV1`, () => { chain.config.logger.silent = true // Let's mock a non-signed transaction so execution fails - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { gasLimit: 21_000, maxFeePerGas: 10, value: 1, - to: Address.fromString('0x61FfE691821291D02E9Ba5D33098ADcee71a3a17'), + to: createAddressFromString('0x61FfE691821291D02E9Ba5D33098ADcee71a3a17'), }, - { common } + { common }, ) const transactions = [bytesToHex(tx.serialize())] @@ -177,9 +159,9 @@ describe(`${method}: call with executionPayloadV1`, () => { it('call with valid data & valid transaction', async () => { const accountPk = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) - const accountAddress = Address.fromPrivateKey(accountPk) + const accountAddress = createAddressFromPrivateKey(accountPk) const newGenesisJSON = { ...genesisJSON, alloc: { @@ -192,13 +174,13 @@ describe(`${method}: call with executionPayloadV1`, () => { const { server, common } = await setupChain(newGenesisJSON, 'post-merge', { engine: true }) const rpc = getRpcClient(server) - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { maxFeePerGas: '0x7', value: 6, gasLimit: 53_000, }, - { common } + { common }, ).sign(accountPk) const transactions = [bytesToHex(tx.serialize())] const blockDataWithValidTransaction = { diff --git a/packages/client/test/rpc/engine/newPayloadV3.spec.ts b/packages/client/test/rpc/engine/newPayloadV3.spec.ts index 33c98e9bbc..71355bd200 100644 --- a/packages/client/test/rpc/engine/newPayloadV3.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV3.spec.ts @@ -28,12 +28,12 @@ describe(`${method}: call with executionPayloadV3`, () => { const res = await rpc.request(method, [validBlock, [], parentBeaconBlockRoot]) assert.equal(res.error.code, UNSUPPORTED_FORK) assert.ok( - res.error.message.includes('NewPayloadV{1|2} MUST be used before Cancun is activated') + res.error.message.includes('NewPayloadV{1|2} MUST be used before Cancun is activated'), ) }) it('valid data', async () => { - // get the genesis json with late enougt date with respect to block data in batchBlocks + // get the genesis json with late enough date with respect to block data in batchBlocks const cancunTime = 1689945325 // deep copy json and add shanghai and cancun to genesis to avoid contamination const cancunJson = JSON.parse(JSON.stringify(genesisJSON)) @@ -71,7 +71,7 @@ describe(`${method}: call with executionPayloadV3`, () => { }) it('fcU and verify that no errors occur on new payload', async () => { - // get the genesis json with late enougt date with respect to block data in batchBlocks + // get the genesis json with late enough date with respect to block data in batchBlocks const cancunTime = 1689945325 // deep copy json and add shanghai and cancun to genesis to avoid contamination const cancunJson = JSON.parse(JSON.stringify(genesisJSON)) diff --git a/packages/client/test/rpc/engine/newPayloadV3VersionedHashes.spec.ts b/packages/client/test/rpc/engine/newPayloadV3VersionedHashes.spec.ts index 39451b9c32..4f4880a4d1 100644 --- a/packages/client/test/rpc/engine/newPayloadV3VersionedHashes.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV3VersionedHashes.spec.ts @@ -42,7 +42,7 @@ describe(`${method}: Cancun validations`, () => { assert.equal(res.result.status, 'INVALID') assert.equal( res.result.validationError, - 'Error verifying blobVersionedHashes: expected=0 received=2' + 'Error verifying blobVersionedHashes: expected=0 received=2', ) const txString = @@ -83,7 +83,7 @@ describe(`${method}: Cancun validations`, () => { res = await rpc.request(method, blockDataMissingParentBeaconRoot) assert.equal(res.error.code, INVALID_PARAMS) assert.ok( - res.error.message.includes('missing value for required argument parentBeaconBlockRoot') + res.error.message.includes('missing value for required argument parentBeaconBlockRoot'), ) const blockDataExtraMissingHashes1 = [ @@ -105,7 +105,7 @@ describe(`${method}: Cancun validations`, () => { assert.equal(res.result.status, 'INVALID') assert.equal( res.result.validationError, - 'Error verifying blobVersionedHashes: expected=2 received=1' + 'Error verifying blobVersionedHashes: expected=2 received=1', ) const blockDataExtraMisMatchingHashes1 = [ @@ -127,7 +127,7 @@ describe(`${method}: Cancun validations`, () => { assert.equal(res.result.status, 'INVALID') assert.equal( res.result.validationError, - 'Error verifying blobVersionedHashes: mismatch at index=1 expected=0x0131…52c5 received=0x3456…' + 'Error verifying blobVersionedHashes: mismatch at index=1 expected=0x0131…52c5 received=0x3456…', ) const blockDataMatchingVersionedHashes = [ diff --git a/packages/client/test/rpc/engine/newPayloadV4.spec.ts b/packages/client/test/rpc/engine/newPayloadV4.spec.ts index a78e57bfe0..f1b1b39890 100644 --- a/packages/client/test/rpc/engine/newPayloadV4.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV4.spec.ts @@ -1,4 +1,4 @@ -import { TransactionFactory } from '@ethereumjs/tx' +import { createTxFromTxData } from '@ethereumjs/tx' import { bigIntToHex, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' @@ -45,7 +45,7 @@ function readyPragueGenesis(genesisJSON: any) { describe(`${method}: call with executionPayloadV4`, () => { it('valid data', async () => { - // get the genesis json with late enougt date with respect to block data in batchBlocks + // get the genesis json with late enough date with respect to block data in batchBlocks const { pragueJson, pragueTime } = readyPragueGenesis(genesisJSON) const { service, server } = await setupChain(pragueJson, 'post-merge', { engine: true }) @@ -93,7 +93,7 @@ describe(`${method}: call with executionPayloadV4`, () => { // address 0x610adc49ecd66cbf176a8247ebd59096c031bd9f has been sufficiently funded in genesis const pk = hexToBytes('0x9c9996335451aab4fc4eac58e31a8c300e095cdbcee532d53d09280e83360355') - const depositTx = TransactionFactory.fromTxData({ + const depositTx = createTxFromTxData({ data: '0x22895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000001208cd4e5a69709cf8ee5b1b73d6efbf3f33bcac92fb7e4ce62b2467542fb50a72d0000000000000000000000000000000000000000000000000000000000000030ac842878bb70009552a4cfcad801d6e659c50bd50d7d03306790cb455ce7363c5b6972f0159d170f625a99b2064dbefc000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020010000000000000000000000818ccb1c4eda80270b04d6df822b1e72dd83c3030000000000000000000000000000000000000000000000000000000000000060a747f75c72d0cf0d2b52504c7385b516f0523e2f0842416399f42b4aee5c6384a5674f6426b1cc3d0827886fa9b909e616f5c9f61f986013ed2b9bf37071cbae951136265b549f44e3c8e26233c0433e9124b7fd0dc86e82f9fedfc0a179d769', value: 32000000000000000000n, gasLimit: 30000000n, @@ -107,15 +107,15 @@ describe(`${method}: call with executionPayloadV4`, () => { const { executionPayload } = res.result assert.ok( executionPayload.depositRequests?.length === 1, - 'depositRequests should have 1 deposit request' + 'depositRequests should have 1 deposit request', ) assert.ok( executionPayload.withdrawalRequests !== undefined, - 'depositRequests field should be received' + 'depositRequests field should be received', ) assert.ok( executionPayload.consolidationRequests !== undefined, - 'consolidationRequests field should be received' + 'consolidationRequests field should be received', ) res = await rpc.request(method, [executionPayload, [], parentBeaconBlockRoot]) diff --git a/packages/client/test/rpc/engine/preimages.spec.ts b/packages/client/test/rpc/engine/preimages.spec.ts index 87a499866b..5b6b8d19b9 100644 --- a/packages/client/test/rpc/engine/preimages.spec.ts +++ b/packages/client/test/rpc/engine/preimages.spec.ts @@ -1,10 +1,10 @@ import { BlockHeader, - createBlockFromBlockData, + createBlock, genTransactionsTrieRoot, genWithdrawalsTrieRoot, } from '@ethereumjs/block' -import { TransactionFactory } from '@ethereumjs/tx' +import { createTxFromSerializedData } from '@ethereumjs/tx' import { Withdrawal, bytesToHex, @@ -50,7 +50,7 @@ async function genBlockWithdrawals(blockNumber: number) { } }) const withdrawalsRoot = bytesToHex( - await genWithdrawalsTrieRoot(withdrawals.map(Withdrawal.fromWithdrawalData)) + await genWithdrawalsTrieRoot(withdrawals.map(Withdrawal.fromWithdrawalData)), ) return { withdrawals, withdrawalsRoot } @@ -66,14 +66,14 @@ async function runBlock( receiptTrie: PrefixedHexString gasUsed: PrefixedHexString coinbase: PrefixedHexString - } + }, ) { const { transactions, parentHash, blockNumber, stateRoot, receiptTrie, gasUsed, coinbase } = runData const txs = [] for (const [index, serializedTx] of transactions.entries()) { try { - const tx = TransactionFactory.fromSerializedData(hexToBytes(serializedTx), { + const tx = createTxFromSerializedData(hexToBytes(serializedTx), { common, }) txs.push(tx) @@ -97,7 +97,7 @@ async function runBlock( coinbase, } const blockData = { header: headerData, transactions: txs, withdrawals } - const executeBlock = createBlockFromBlockData(blockData, { common }) + const executeBlock = createBlock(blockData, { common }) const executePayload = blockToExecutionPayload(executeBlock, BigInt(0)).executionPayload const res = await rpc.request('engine_newPayloadV2', [executePayload]) assert.equal(res.result.status, 'VALID', 'valid status should be received') @@ -116,7 +116,7 @@ describe(`valid verkle network setup`, async () => { { engine: true, savePreimages: true, - } + }, ) ;(chain.blockchain as any).validateHeader = () => {} @@ -137,7 +137,7 @@ describe(`valid verkle network setup`, async () => { // and for block1 are coded to return no withdrawals // // third consideration is for feerecipient which are added here as random - // coinbase addrs + // coinbase addresses const testCases = [ { name: 'block 1 no txs', @@ -252,12 +252,12 @@ describe(`valid verkle network setup`, async () => { for (const preimage of preimages) { const preimageBytes = hexToBytes(preimage) const savedPreimage = await execution.preimagesManager!.getPreimage( - keccak256(preimageBytes) + keccak256(preimageBytes), ) assert.isNotNull(savedPreimage, `Missing preimage for ${preimage}`) assert.ok( savedPreimage !== null && equalsBytes(savedPreimage, preimageBytes), - `Incorrect preimage for ${preimage}` + `Incorrect preimage for ${preimage}`, ) } parentHash = blockHash diff --git a/packages/client/test/rpc/engine/withdrawals.spec.ts b/packages/client/test/rpc/engine/withdrawals.spec.ts index c2616a4437..768ca0deb4 100644 --- a/packages/client/test/rpc/engine/withdrawals.spec.ts +++ b/packages/client/test/rpc/engine/withdrawals.spec.ts @@ -105,12 +105,12 @@ for (const { name, withdrawals, withdrawalsRoot, gethBlockRlp } of testCases) { it(name, async () => { // check withdrawals root computation const computedWithdrawalsRoot = bytesToHex( - await genWithdrawalsTrieRoot(withdrawals.map(Withdrawal.fromWithdrawalData), new Trie()) + await genWithdrawalsTrieRoot(withdrawals.map(Withdrawal.fromWithdrawalData), new Trie()), ) assert.equal( withdrawalsRoot, computedWithdrawalsRoot, - 'withdrawalsRoot compuation should match' + 'withdrawalsRoot computation should match', ) const { server } = await setupChain(genesisJSON, 'post-merge', { engine: true }) const rpc = getRpcClient(server) @@ -120,7 +120,7 @@ for (const { name, withdrawals, withdrawalsRoot, gethBlockRlp } of testCases) { ]) assert.equal(res.error.code, INVALID_PARAMS) assert.ok( - res.error.message.includes('PayloadAttributesV2 MUST be used after Shanghai is activated') + res.error.message.includes('PayloadAttributesV2 MUST be used after Shanghai is activated'), ) res = await rpc.request('engine_forkchoiceUpdatedV2', [ @@ -139,7 +139,7 @@ for (const { name, withdrawals, withdrawalsRoot, gethBlockRlp } of testCases) { assert.equal( executionPayload!.withdrawals!.length, withdrawals.length, - 'withdrawals should match' + 'withdrawals should match', ) assert.equal(blockValue, '0x0', 'No value should be returned') payload = executionPayload @@ -149,7 +149,7 @@ for (const { name, withdrawals, withdrawalsRoot, gethBlockRlp } of testCases) { assert.equal( payload!.stateRoot, '0x23eadd91fca55c0e14034e4d63b2b3ed43f2e807b6bf4d276b784ac245e7fa3f', - 'stateRoot should match' + 'stateRoot should match', ) } diff --git a/packages/client/test/rpc/eth/blobBaseFee.spec.ts b/packages/client/test/rpc/eth/blobBaseFee.spec.ts index f4e2a12b0f..5ca11d3b1f 100644 --- a/packages/client/test/rpc/eth/blobBaseFee.spec.ts +++ b/packages/client/test/rpc/eth/blobBaseFee.spec.ts @@ -1,14 +1,16 @@ import { Hardfork } from '@ethereumjs/common' -import { TransactionFactory } from '@ethereumjs/tx' +import { createTxFromTxData } from '@ethereumjs/tx' import { - Address, BIGINT_0, BIGINT_256, blobsToCommitments, commitmentsToVersionedHashes, + createAddressFromPrivateKey, + createZeroAddress, getBlobs, hexToBytes, } from '@ethereumjs/util' +import { buildBlock } from '@ethereumjs/vm' import { loadKZG } from 'kzg-wasm' import { assert, describe, it } from 'vitest' @@ -20,11 +22,11 @@ import type { VMExecution } from '../../../src/execution/vmexecution.js' const method = 'eth_blobBaseFee' const privateKey = hexToBytes('0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8') -const accountAddress = Address.fromPrivateKey(privateKey) +const accountAddress = createAddressFromPrivateKey(privateKey) const produceBlockWith4844Tx = async ( execution: VMExecution, chain: Chain, - blobsCount: number[] + blobsCount: number[], ) => { const kzg = await loadKZG() // 4844 sample blob @@ -38,7 +40,7 @@ const produceBlockWith4844Tx = async ( const parentBlock = await chain.getCanonicalHeadBlock() const vmCopy = await vm.shallowCopy() // Set block's gas used to max - const blockBuilder = await vmCopy.buildBlock({ + const blockBuilder = await buildBlock(vmCopy, { parentBlock, headerData: { timestamp: parentBlock.header.timestamp + BigInt(1), @@ -52,7 +54,7 @@ const produceBlockWith4844Tx = async ( const blobVersionedHashes = [] const blobs = [] const kzgCommitments = [] - const to = Address.zero() + const to = createZeroAddress() if (blobsCount[i] > 0) { for (let blob = 0; blob < blobsCount[i]; blob++) { blobVersionedHashes.push(...blobVersionedHash) @@ -61,7 +63,7 @@ const produceBlockWith4844Tx = async ( } } await blockBuilder.addTransaction( - TransactionFactory.fromTxData( + createTxFromTxData( { type: 3, gasLimit: 21000, @@ -74,8 +76,8 @@ const produceBlockWith4844Tx = async ( kzgCommitments, maxFeePerBlobGas: BigInt(1000), }, - { common: vmCopy.common } - ).sign(privateKey) + { common: vmCopy.common }, + ).sign(privateKey), ) nonce++ } diff --git a/packages/client/test/rpc/eth/call.spec.ts b/packages/client/test/rpc/eth/call.spec.ts index 4015e8a89f..99ac3bed49 100644 --- a/packages/client/test/rpc/eth/call.spec.ts +++ b/packages/client/test/rpc/eth/call.spec.ts @@ -1,8 +1,9 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { LegacyTransaction } from '@ethereumjs/tx' -import { Address, bigIntToHex, bytesToHex } from '@ethereumjs/util' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createLegacyTx } from '@ethereumjs/tx' +import { bigIntToHex, bytesToHex, createAddressFromString } from '@ethereumjs/util' +import { runBlock, runTx } from '@ethereumjs/vm' import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code.js' @@ -16,7 +17,7 @@ const method = 'eth_call' describe(method, () => { it('call with valid arguments', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) const blockchain = await createBlockchain({ common, validateBlocks: false, @@ -32,7 +33,7 @@ describe(method, () => { const { vm } = execution // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const address = createAddressFromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') // contract: /* @@ -50,12 +51,12 @@ describe(method, () => { // construct block with tx const gasLimit = 2000000 - const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) + const tx = createLegacyTx({ gasLimit, data }, { common, freeze: false }) tx.getSenderAddress = () => { return address } const parent = await blockchain.getCanonicalHeadHeader() - const block = createBlockFromBlockData( + const block = createBlock( { header: { parentHash: parent.hash(), @@ -63,14 +64,14 @@ describe(method, () => { gasLimit, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx // deploy contract let ranBlock: Block | undefined = undefined vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + const result = await runBlock(vm, { block, generate: true, skipBlockValidation: true }) const { createdAddress } = result.results[0] await vm.blockchain.putBlock(ranBlock!) @@ -82,13 +83,12 @@ describe(method, () => { data: `0x${funcHash}` as PrefixedHexString, gasLimit: bigIntToHex(BigInt(53000)), } - const estimateTx = LegacyTransaction.fromTxData(estimateTxData, { freeze: false }) + const estimateTx = createLegacyTx(estimateTxData, { freeze: false }) estimateTx.getSenderAddress = () => { return address } - const { execResult } = await ( - await vm.shallowCopy() - ).runTx({ + const vmCopy = await vm.shallowCopy() + const { execResult } = await runTx(vmCopy, { tx: estimateTx, skipNonce: true, skipBalance: true, @@ -101,24 +101,26 @@ describe(method, () => { { ...estimateTxData, gas: estimateTxData.gasLimit }, 'latest', ]) + assert.equal(res.error.code, 3, 'should return the correct error code') assert.equal( - res.result, + res.error.data, bytesToHex(execResult.returnValue), - 'should return the correct return value' + 'should return the correct return value', ) res = await rpc.request(method, [{ ...estimateTxData }, 'latest']) + assert.equal(res.error.code, 3, 'should return the correct error code') assert.equal( - res.result, + res.error.data, bytesToHex(execResult.returnValue), - 'should return the correct return value with no gas limit provided' + 'should return the correct return value with no gas limit provided', ) res = await rpc.request(method, [{ gasLimit, data }, 'latest']) assert.equal( res.result, bytesToHex(result.results[0].execResult.returnValue), - `should let run call without 'to' for contract creation` + `should let run call without 'to' for contract creation`, ) }) @@ -130,7 +132,7 @@ describe(method, () => { const rpc = getRpcClient(startRPC(manager.getMethods())) // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const address = createAddressFromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') const funcHash = '26b85ee1' // borrowed from valid test above const estimateTxData = { @@ -156,7 +158,7 @@ describe(method, () => { const rpc = getRpcClient(startRPC(manager.getMethods())) // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const address = createAddressFromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') const estimateTxData = { to: address.toString(), from: address.toString(), diff --git a/packages/client/test/rpc/eth/chainId.spec.ts b/packages/client/test/rpc/eth/chainId.spec.ts index a827c875a2..cad0de0f30 100644 --- a/packages/client/test/rpc/eth/chainId.spec.ts +++ b/packages/client/test/rpc/eth/chainId.spec.ts @@ -1,4 +1,4 @@ -import { Chain, Common } from '@ethereumjs/common' +import { Common, Goerli } from '@ethereumjs/common' import { assert, describe, it } from 'vitest' import { baseSetup, createClient, createManager, getRpcClient, startRPC } from '../helpers.js' @@ -21,9 +21,9 @@ describe(method, () => { assert.equal(res.result, '0x1', 'should return chainId 1') }) - it('returns 3 for Goerli', async () => { + it('returns 5 for Goerli', async () => { const manager = createManager( - await createClient({ opened: true, commonChain: new Common({ chain: Chain.Goerli }) }) + await createClient({ opened: true, commonChain: new Common({ chain: Goerli }) }), ) const rpc = getRpcClient(startRPC(manager.getMethods())) diff --git a/packages/client/test/rpc/eth/estimateGas.spec.ts b/packages/client/test/rpc/eth/estimateGas.spec.ts index f93461d301..b4a3b8ccb4 100644 --- a/packages/client/test/rpc/eth/estimateGas.spec.ts +++ b/packages/client/test/rpc/eth/estimateGas.spec.ts @@ -1,9 +1,10 @@ -import { BlockHeader, createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock, createBlockHeader } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' import { createCommonFromGethGenesis } from '@ethereumjs/common' import { getGenesis } from '@ethereumjs/genesis' -import { LegacyTransaction } from '@ethereumjs/tx' -import { Address, bigIntToHex } from '@ethereumjs/util' +import { createLegacyTx } from '@ethereumjs/tx' +import { bigIntToHex, createAddressFromString } from '@ethereumjs/util' +import { runBlock, runTx } from '@ethereumjs/vm' import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code.js' @@ -38,10 +39,10 @@ describe( const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService assert.notEqual(execution, undefined, 'should have valid execution') const { vm } = execution - await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + await vm.stateManager.generateCanonicalGenesis!(getGenesis(1)) // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const address = createAddressFromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') // contract: /* @@ -59,12 +60,12 @@ describe( // construct block with tx const gasLimit = 2000000 - const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) + const tx = createLegacyTx({ gasLimit, data }, { common, freeze: false }) tx.getSenderAddress = () => { return address } const parent = await blockchain.getCanonicalHeadHeader() - const block = createBlockFromBlockData( + const block = createBlock( { header: { parentHash: parent.hash(), @@ -72,14 +73,14 @@ describe( gasLimit, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx // deploy contract let ranBlock: Block | undefined = undefined vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + const result = await runBlock(vm, { block, generate: true, skipBlockValidation: true }) const { createdAddress } = result.results[0] await vm.blockchain.putBlock(ranBlock!) @@ -92,13 +93,12 @@ describe( gasLimit: bigIntToHex(BigInt(53000)), gasPrice: bigIntToHex(BigInt(1000000000)), } - const estimateTx = LegacyTransaction.fromTxData(estimateTxData, { freeze: false }) + const estimateTx = createLegacyTx(estimateTxData, { freeze: false }) estimateTx.getSenderAddress = () => { return address } - const { totalGasSpent } = await ( - await vm.shallowCopy() - ).runTx({ + const vmCopy = await vm.shallowCopy() + const { totalGasSpent } = await runTx(vmCopy, { tx: estimateTx, skipNonce: true, skipBalance: true, @@ -114,7 +114,7 @@ describe( assert.equal( res.result, '0x' + totalGasSpent.toString(16), - 'should return the correct gas estimate' + 'should return the correct gas estimate', ) // Test without blockopt as its optional and should default to latest @@ -122,16 +122,16 @@ describe( assert.equal( res2.result, '0x' + totalGasSpent.toString(16), - 'should return the correct gas estimate' + 'should return the correct gas estimate', ) // Setup chain to run an EIP1559 tx const service = client.services[0] as FullEthereumService service.execution.vm.common.setHardfork('london') service.chain.config.chainCommon.setHardfork('london') const headBlock = await service.chain.getCanonicalHeadBlock() - const londonBlock = createBlockFromBlockData( + const londonBlock = createBlock( { - header: BlockHeader.fromHeaderData( + header: createBlockHeader( { baseFeePerGas: 1000000000n, number: 2n, @@ -141,14 +141,14 @@ describe( common: service.chain.config.chainCommon, skipConsensusFormatValidation: true, calcDifficultyFromHeader: headBlock.header, - } + }, ), }, - { common: service.chain.config.chainCommon } + { common: service.chain.config.chainCommon }, ) vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - await vm.runBlock({ block: londonBlock, generate: true, skipBlockValidation: true }) + await runBlock(vm, { block: londonBlock, generate: true, skipBlockValidation: true }) await vm.blockchain.putBlock(ranBlock!) // Test EIP1559 tx @@ -158,7 +158,7 @@ describe( assert.equal( EIP1559res.result, '0x' + totalGasSpent.toString(16), - 'should return the correct gas estimate for EIP1559 tx' + 'should return the correct gas estimate for EIP1559 tx', ) // Test EIP1559 tx with no maxFeePerGas @@ -174,7 +174,7 @@ describe( assert.equal( EIP1559reqNoGas.result, '0x' + totalGasSpent.toString(16), - 'should return the correct gas estimate' + 'should return the correct gas estimate', ) // Test legacy tx with London head block @@ -184,7 +184,7 @@ describe( assert.equal( legacyTxNoGas.result, '0x' + totalGasSpent.toString(16), - 'should return the correct gas estimate' + 'should return the correct gas estimate', ) }) @@ -196,7 +196,7 @@ describe( const rpc = getRpcClient(startRPC(manager.getMethods())) // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const address = createAddressFromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') const funcHash = '26b85ee1' // borrowed from valid test above const estimateTxData = { @@ -214,5 +214,5 @@ describe( assert.ok(res.error.message.includes('"pending" is not yet supported')) }) }, - 20000 + 20000, ) diff --git a/packages/client/test/rpc/eth/gasPrice.spec.ts b/packages/client/test/rpc/eth/gasPrice.spec.ts index 9ff204ed90..031995887e 100644 --- a/packages/client/test/rpc/eth/gasPrice.spec.ts +++ b/packages/client/test/rpc/eth/gasPrice.spec.ts @@ -1,4 +1,4 @@ -import { FeeMarketEIP1559Transaction, LegacyTransaction } from '@ethereumjs/tx' +import { createFeeMarket1559Tx, createLegacyTx } from '@ethereumjs/tx' import { bigIntToHex, intToHex } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' @@ -11,6 +11,8 @@ import { setupChain, } from '../helpers.js' +import type { LegacyTx } from '@ethereumjs/tx' + const method = 'eth_gasPrice' describe(method, () => { @@ -19,9 +21,9 @@ describe(method, () => { const rpc = getRpcClient(server) const GAS_PRICE = 100 // construct tx - const tx = LegacyTransaction.fromTxData( + const tx = createLegacyTx( { gasLimit: 21000, gasPrice: GAS_PRICE, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -30,7 +32,7 @@ describe(method, () => { assert.equal( res.result, intToHex(GAS_PRICE), - 'should return the correct suggested gas price with 1 legacy transaction' + 'should return the correct suggested gas price with 1 legacy transaction', ) }) @@ -42,9 +44,9 @@ describe(method, () => { for (let i = 0; i < iterations; i++) { const gasPrice = i * 100 averageGasPrice += BigInt(gasPrice) - const tx = LegacyTransaction.fromTxData( + const tx = createLegacyTx( { nonce: i, gasLimit: 21000, gasPrice, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) } @@ -54,7 +56,7 @@ describe(method, () => { assert.equal( res.result, bigIntToHex(averageGasPrice), - 'should return the correct gas price with multiple legacy transactions' + 'should return the correct gas price with multiple legacy transactions', ) }) @@ -64,13 +66,13 @@ describe(method, () => { const G1 = 100 const G2 = 1231231 - const tx1 = LegacyTransaction.fromTxData( + const tx1 = createLegacyTx( { gasLimit: 21000, gasPrice: G1, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey) - const tx2 = LegacyTransaction.fromTxData( + const tx2 = createLegacyTx( { nonce: 1, gasLimit: 21000, gasPrice: G2, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx1, tx2]) @@ -80,24 +82,24 @@ describe(method, () => { assert.equal( res.result, intToHex(Math.trunc(averageGasPrice)), - 'should return the correct gas price with multiple legacy transactions in a block' + 'should return the correct gas price with multiple legacy transactions in a block', ) }) it('call with 1559 transaction data', async () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), - 'powLondon' + 'powLondon', ) const rpc = getRpcClient(server) - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { gasLimit: 21000, maxPriorityFeePerGas: 10, maxFeePerGas: 975000000, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -108,28 +110,28 @@ describe(method, () => { assert.equal( res.result, bigIntToHex(gasPrice), - 'should return the correct gas price with 1 1559 transaction' + 'should return the correct gas price with 1 1559 transaction', ) }) it('call with multiple 1559 transactions', async () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), - 'powLondon' + 'powLondon', ) const rpc = getRpcClient(server) const maxPriority1 = 10 const maxPriority2 = 1231231 - const tx1 = FeeMarketEIP1559Transaction.fromTxData( + const tx1 = createFeeMarket1559Tx( { gasLimit: 21000, maxPriorityFeePerGas: maxPriority1, maxFeePerGas: 975000000, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) - const tx2 = FeeMarketEIP1559Transaction.fromTxData( + const tx2 = createFeeMarket1559Tx( { nonce: 1, gasLimit: 21000, @@ -137,7 +139,7 @@ describe(method, () => { maxFeePerGas: 975000000, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx1, tx2]) @@ -149,7 +151,7 @@ describe(method, () => { assert.equal( res.result, bigIntToHex(gasPrice), - 'should return the correct gas price with 1 1559 transaction' + 'should return the correct gas price with 1 1559 transaction', ) }) @@ -159,27 +161,27 @@ describe(method, () => { const iterations = BigInt(21) const gasPrice = BigInt(20) const firstBlockGasPrice = BigInt(11111111111111) - let tx: LegacyTransaction + let tx: LegacyTx for (let i = 0; i < iterations; i++) { if (i === 0) { - tx = LegacyTransaction.fromTxData( + tx = createLegacyTx( { nonce: i, gasLimit: 21000, gasPrice: firstBlockGasPrice, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) } else { - tx = LegacyTransaction.fromTxData( + tx = createLegacyTx( { nonce: i, gasLimit: 21000, gasPrice, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) } await runBlockWithTxs(chain, execution, [tx!]) @@ -195,7 +197,7 @@ describe(method, () => { assert.equal( res.result, bigIntToHex(gasPrice), - 'should return the correct gas price for 21 blocks' + 'should return the correct gas price for 21 blocks', ) }) }) diff --git a/packages/client/test/rpc/eth/getBalance.spec.ts b/packages/client/test/rpc/eth/getBalance.spec.ts index 57b4175155..6231d43d55 100644 --- a/packages/client/test/rpc/eth/getBalance.spec.ts +++ b/packages/client/test/rpc/eth/getBalance.spec.ts @@ -1,9 +1,10 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { getGenesis } from '@ethereumjs/genesis' -import { LegacyTransaction } from '@ethereumjs/tx' -import { Address, bigIntToHex } from '@ethereumjs/util' +import { createLegacyTx } from '@ethereumjs/tx' +import { bigIntToHex, createAddressFromString } from '@ethereumjs/util' +import { runBlock } from '@ethereumjs/vm' import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code.js' @@ -17,7 +18,7 @@ describe( method, () => { it('ensure balance deducts after a tx', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) const blockchain = await createBlockchain({ common }) const client = await createClient({ blockchain, commonChain: common, includeVM: true }) @@ -31,10 +32,10 @@ describe( // since synchronizer.run() is not executed in the mock setup, // manually run stateManager.generateCanonicalGenesis() - await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + await vm.stateManager.generateCanonicalGenesis!(getGenesis(1)) // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const address = createAddressFromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') // verify balance is genesis amount const genesisBalance = BigInt(0x15ac56edc4d12c0000) @@ -43,18 +44,18 @@ describe( assert.equal( res.result, bigIntToHex(genesisBalance), - 'should return the correct genesis balance' + 'should return the correct genesis balance', ) // construct block with tx - const tx = LegacyTransaction.fromTxData({ gasLimit: 53000 }, { common, freeze: false }) + const tx = createLegacyTx({ gasLimit: 53000 }, { common, freeze: false }) tx.getSenderAddress = () => { return address } - const block = createBlockFromBlockData({}, { common }) + const block = createBlock({}, { common }) block.transactions[0] = tx - const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + const result = await runBlock(vm, { block, generate: true, skipBlockValidation: true }) const { amountSpent } = result.results[0] // verify balance is genesis amount minus amountSpent @@ -63,7 +64,7 @@ describe( assert.equal( res.result, bigIntToHex(expectedNewBalance), - 'should return the correct balance after a tx' + 'should return the correct balance after a tx', ) // verify we can query with "earliest" @@ -71,7 +72,7 @@ describe( assert.equal( res.result, bigIntToHex(genesisBalance), - "should return the correct balance with 'earliest'" + "should return the correct balance with 'earliest'", ) // verify we can query with a past block number @@ -79,7 +80,7 @@ describe( assert.equal( res.result, bigIntToHex(genesisBalance), - 'should return the correct balance with a past block number' + 'should return the correct balance with a past block number', ) // call with height that exceeds chain height @@ -107,5 +108,5 @@ describe( assert.ok(res.error.message.includes('"pending" is not yet supported')) }) }, - 40000 + 40000, ) diff --git a/packages/client/test/rpc/eth/getBlockByHash.spec.ts b/packages/client/test/rpc/eth/getBlockByHash.spec.ts index 5416ada56d..f737b4f428 100644 --- a/packages/client/test/rpc/eth/getBlockByHash.spec.ts +++ b/packages/client/test/rpc/eth/getBlockByHash.spec.ts @@ -33,7 +33,7 @@ describe(method, () => { assert.equal( typeof res.result.transactions[0], 'string', - 'should return only the hashes of the transactions' + 'should return only the hashes of the transactions', ) }) diff --git a/packages/client/test/rpc/eth/getBlockByNumber.spec.ts b/packages/client/test/rpc/eth/getBlockByNumber.spec.ts index 7916783d26..3c8cff73e1 100644 --- a/packages/client/test/rpc/eth/getBlockByNumber.spec.ts +++ b/packages/client/test/rpc/eth/getBlockByNumber.spec.ts @@ -1,7 +1,7 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' -import { createCustomCommon } from '@ethereumjs/common' -import { BlobEIP4844Transaction, LegacyTransaction } from '@ethereumjs/tx' -import { Address, hexToBytes } from '@ethereumjs/util' +import { createBlock } from '@ethereumjs/block' +import { Mainnet, createCustomCommon } from '@ethereumjs/common' +import { createBlob4844Tx, createLegacyTx } from '@ethereumjs/tx' +import { createZeroAddress, hexToBytes } from '@ethereumjs/util' import { loadKZG } from 'kzg-wasm' import { assert, describe, it } from 'vitest' @@ -10,14 +10,14 @@ import { createClient, createManager, dummy, getRpcClient, startRPC } from '../h const kzg = await loadKZG() -const common = createCustomCommon({ chainId: 1 }, { customCrypto: { kzg } }) +const common = createCustomCommon({ chainId: 1 }, Mainnet, { customCrypto: { kzg } }) common.setHardfork('cancun') -const mockedTx1 = LegacyTransaction.fromTxData({}).sign(dummy.privKey) -const mockedTx2 = LegacyTransaction.fromTxData({ nonce: 1 }).sign(dummy.privKey) -const mockedBlobTx3 = BlobEIP4844Transaction.fromTxData( - { nonce: 2, blobsData: ['0x1234'], to: Address.zero() }, - { common } +const mockedTx1 = createLegacyTx({}).sign(dummy.privKey) +const mockedTx2 = createLegacyTx({ nonce: 1 }).sign(dummy.privKey) +const mockedBlobTx3 = createBlob4844Tx( + { nonce: 2, blobsData: ['0x1234'], to: createZeroAddress() }, + { common }, ).sign(dummy.privKey) const blockHash = hexToBytes('0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5') const transactions = [mockedTx1] @@ -25,14 +25,13 @@ const transactions2 = [mockedTx2] const block = { hash: () => blockHash, - serialize: () => - createBlockFromBlockData({ header: { number: 1 }, transactions: transactions2 }).serialize(), + serialize: () => createBlock({ header: { number: 1 }, transactions: transactions2 }).serialize(), header: { number: BigInt(1), hash: () => blockHash, }, toJSON: () => ({ - ...createBlockFromBlockData({ header: { number: 1 } }).toJSON(), + ...createBlock({ header: { number: 1 } }).toJSON(), transactions: transactions2, }), transactions: transactions2, @@ -41,16 +40,16 @@ const block = { function createChain(headBlock = block) { const genesisBlockHash = hexToBytes( - '0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5' + '0xdcf93da321b27bca12087d6526d2c10540a4c8dc29db1b36610c3004e0e5d2d5', ) const genesisBlock = { hash: () => genesisBlockHash, - serialize: () => createBlockFromBlockData({ header: { number: 0 }, transactions }).serialize(), + serialize: () => createBlock({ header: { number: 0 }, transactions }).serialize(), header: { number: BigInt(0), }, toJSON: () => ({ - ...createBlockFromBlockData({ header: { number: 0 } }).toJSON(), + ...createBlock({ header: { number: 0 } }).toJSON(), transactions, }), transactions, @@ -86,7 +85,7 @@ describe(method, async () => { assert.equal( typeof res.result.transactions[0], 'string', - 'should return only the hashes of the transactions' + 'should return only the hashes of the transactions', ) }) @@ -134,8 +133,8 @@ describe(method, async () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"' - ) + 'invalid argument 0: block option must be a valid 0x-prefixed block hash or hex integer, or "latest", "earliest" or "pending"', + ), ) }) @@ -166,13 +165,13 @@ describe(method, async () => { describe('call with block with blob txs', () => { it('retrieves a block with a blob tx in it', async () => { - const genesisBlock = createBlockFromBlockData({ header: { number: 0 } }) - const block1 = createBlockFromBlockData( + const genesisBlock = createBlock({ header: { number: 0 } }) + const block1 = createBlock( { header: { number: 1, parentHash: genesisBlock.header.hash() }, transactions: [mockedBlobTx3], }, - { common } + { common }, ) const manager = createManager(await createClient({ chain: createChain(block1 as any) })) const rpc = getRpcClient(startRPC(manager.getMethods())) @@ -181,7 +180,7 @@ describe(method, async () => { assert.equal( res.result.transactions[0].blobVersionedHashes.length, 1, - 'block body contains a transaction with the blobVersionedHashes field' + 'block body contains a transaction with the blobVersionedHashes field', ) }) }) diff --git a/packages/client/test/rpc/eth/getBlockReceipts.spec.ts b/packages/client/test/rpc/eth/getBlockReceipts.spec.ts index 62ce82605c..f42ced7c80 100644 --- a/packages/client/test/rpc/eth/getBlockReceipts.spec.ts +++ b/packages/client/test/rpc/eth/getBlockReceipts.spec.ts @@ -1,9 +1,5 @@ import { Hardfork, createCommonFromGethGenesis } from '@ethereumjs/common' -import { - BlobEIP4844Transaction, - FeeMarketEIP1559Transaction, - LegacyTransaction, -} from '@ethereumjs/tx' +import { createBlob4844Tx, createFeeMarket1559Tx, createLegacyTx } from '@ethereumjs/tx' import { bigIntToHex, blobsToCommitments, @@ -32,22 +28,22 @@ describe(method, () => { const { chain, common, execution, server } = await setupChain(pow, 'pow') const rpc = getRpcClient(server) // construct tx - const tx = LegacyTransaction.fromTxData( + const tx = createLegacyTx( { gasLimit: 2000000, gasPrice: 100, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) - const tx2 = LegacyTransaction.fromTxData( + const tx2 = createLegacyTx( { gasLimit: 2000000, gasPrice: 100, to: '0x0000000000000000000000000000000000000000', nonce: 1, }, - { common } + { common }, ).sign(dummy.privKey) const block = await runBlockWithTxs(chain, execution, [tx, tx2]) const res0 = await rpc.request(method, [bytesToHex(tx.hash())]) @@ -59,20 +55,20 @@ describe(method, () => { it('call with 1559 tx', async () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), - 'powLondon' + 'powLondon', ) const rpc = getRpcClient(server) // construct tx - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { gasLimit: 2000000, maxFeePerGas: 975000000, maxPriorityFeePerGas: 10, to: '0x1230000000000000000000000000000000000321', }, - { common } + { common }, ).sign(dummy.privKey) - const tx1 = FeeMarketEIP1559Transaction.fromTxData( + const tx1 = createFeeMarket1559Tx( { gasLimit: 2000000, maxFeePerGas: 975000000, @@ -80,7 +76,7 @@ describe(method, () => { to: '0x1230000000000000000000000000000000000321', nonce: 1, }, - { common } + { common }, ).sign(dummy.privKey) const block = await runBlockWithTxs(chain, execution, [tx, tx1]) @@ -128,7 +124,7 @@ describe(method, () => { const commitments = blobsToCommitments(kzg, blobs) const blobVersionedHashes = commitmentsToVersionedHashes(commitments) const proofs = blobs.map((blob, ctx) => kzg.computeBlobKzgProof(blob, commitments[ctx])) - const tx = BlobEIP4844Transaction.fromTxData( + const tx = createBlob4844Tx( { blobVersionedHashes, blobs, @@ -141,7 +137,7 @@ describe(method, () => { to: randomBytes(20), nonce: 0n, }, - { common } + { common }, ).sign(dummy.privKey) const block = await runBlockWithTxs(chain, execution, [tx], true) diff --git a/packages/client/test/rpc/eth/getBlockTransactionCountByNumber.spec.ts b/packages/client/test/rpc/eth/getBlockTransactionCountByNumber.spec.ts index 74c974f541..b58d9c50a0 100644 --- a/packages/client/test/rpc/eth/getBlockTransactionCountByNumber.spec.ts +++ b/packages/client/test/rpc/eth/getBlockTransactionCountByNumber.spec.ts @@ -1,9 +1,10 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { getGenesis } from '@ethereumjs/genesis' -import { LegacyTransaction } from '@ethereumjs/tx' -import { Address } from '@ethereumjs/util' +import { createLegacyTx } from '@ethereumjs/tx' +import { createAddressFromString } from '@ethereumjs/util' +import { runBlock } from '@ethereumjs/vm' import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code.js' @@ -14,7 +15,7 @@ import type { Block } from '@ethereumjs/block' const method = 'eth_getBlockTransactionCountByNumber' -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) describe(method, () => { it('call with valid arguments', async () => { @@ -32,17 +33,17 @@ describe(method, () => { assert.notEqual(execution, undefined, 'should have valid execution') const { vm } = execution - await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + await vm.stateManager.generateCanonicalGenesis!(getGenesis(1)) - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const address = createAddressFromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') // construct block with tx - const tx = LegacyTransaction.fromTxData({ gasLimit: 53000 }, { common, freeze: false }) + const tx = createLegacyTx({ gasLimit: 53000 }, { common, freeze: false }) tx.getSenderAddress = () => { return address } const parent = await blockchain.getCanonicalHeadHeader() - const block = createBlockFromBlockData( + const block = createBlock( { header: { parentHash: parent.hash(), @@ -50,13 +51,13 @@ describe(method, () => { gasLimit: 2000000, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx let ranBlock: Block | undefined = undefined vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + await runBlock(vm, { block, generate: true, skipBlockValidation: true }) await vm.blockchain.putBlock(ranBlock!) // verify that the transaction count is 1 @@ -79,32 +80,26 @@ describe(method, () => { assert.notEqual(execution, undefined, 'should have valid execution') const { vm } = execution - await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + await vm.stateManager.generateCanonicalGenesis!(getGenesis(1)) - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const address = createAddressFromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') // construct block with tx - const tx = LegacyTransaction.fromTxData({ gasLimit: 53000 }, { common, freeze: false }) + const tx = createLegacyTx({ gasLimit: 53000 }, { common, freeze: false }) tx.getSenderAddress = () => { return address } - const tx2 = LegacyTransaction.fromTxData( - { gasLimit: 53000, nonce: 1 }, - { common, freeze: false } - ) + const tx2 = createLegacyTx({ gasLimit: 53000, nonce: 1 }, { common, freeze: false }) tx2.getSenderAddress = () => { return address } - const tx3 = LegacyTransaction.fromTxData( - { gasLimit: 53000, nonce: 2 }, - { common, freeze: false } - ) + const tx3 = createLegacyTx({ gasLimit: 53000, nonce: 2 }, { common, freeze: false }) tx3.getSenderAddress = () => { return address } const parent = await blockchain.getCanonicalHeadHeader() - const block = createBlockFromBlockData( + const block = createBlock( { header: { parentHash: parent.hash(), @@ -112,7 +107,7 @@ describe(method, () => { gasLimit: 2000000, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx block.transactions[1] = tx2 @@ -120,7 +115,7 @@ describe(method, () => { let ranBlock: Block | undefined = undefined vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + await runBlock(vm, { block, generate: true, skipBlockValidation: true }) await vm.blockchain.putBlock(ranBlock!) // verify that the transaction count is 3 diff --git a/packages/client/test/rpc/eth/getCode.spec.ts b/packages/client/test/rpc/eth/getCode.spec.ts index 0e1419bb38..3ec030e32c 100644 --- a/packages/client/test/rpc/eth/getCode.spec.ts +++ b/packages/client/test/rpc/eth/getCode.spec.ts @@ -1,9 +1,10 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { getGenesis } from '@ethereumjs/genesis' -import { LegacyTransaction } from '@ethereumjs/tx' -import { Address } from '@ethereumjs/util' +import { createLegacyTx } from '@ethereumjs/tx' +import { createAddressFromString, createContractAddress } from '@ethereumjs/util' +import { runBlock } from '@ethereumjs/vm' import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code.js' @@ -14,7 +15,7 @@ import type { Block } from '@ethereumjs/block' const method = 'eth_getCode' -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) describe(method, () => { it('call with valid arguments', async () => { @@ -27,10 +28,10 @@ describe(method, () => { const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService assert.notEqual(execution, undefined, 'should have valid execution') const { vm } = execution - await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + await vm.stateManager.generateCanonicalGenesis!(getGenesis(1)) // genesis address - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const address = createAddressFromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') // verify code is null const res = await rpc.request(method, [address.toString(), 'latest']) @@ -53,7 +54,7 @@ describe(method, () => { const { vm } = execution // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const address = createAddressFromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') // sample contract from https://ethereum.stackexchange.com/a/70791 const data = @@ -63,12 +64,12 @@ describe(method, () => { // construct block with tx const gasLimit = 2000000 - const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) + const tx = createLegacyTx({ gasLimit, data }, { common, freeze: false }) tx.getSenderAddress = () => { return address } const parent = await blockchain.getCanonicalHeadHeader() - const block = createBlockFromBlockData( + const block = createBlock( { header: { parentHash: parent.hash(), @@ -76,21 +77,21 @@ describe(method, () => { gasLimit, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx // deploy contract let ranBlock: Block | undefined = undefined vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + const result = await runBlock(vm, { block, generate: true, skipBlockValidation: true }) const { createdAddress } = result.results[0] await vm.blockchain.putBlock(ranBlock!) - const expectedContractAddress = Address.generate(address, BigInt(0)) + const expectedContractAddress = createContractAddress(address, BigInt(0)) assert.ok( createdAddress!.equals(expectedContractAddress), - 'should match the expected contract address' + 'should match the expected contract address', ) // verify contract has code diff --git a/packages/client/test/rpc/eth/getFeeHistory.spec.ts b/packages/client/test/rpc/eth/getFeeHistory.spec.ts index 0de524c748..fd6d80f044 100644 --- a/packages/client/test/rpc/eth/getFeeHistory.spec.ts +++ b/packages/client/test/rpc/eth/getFeeHistory.spec.ts @@ -1,15 +1,18 @@ -import { Common, Chain as CommonChain, Hardfork } from '@ethereumjs/common' -import { TransactionFactory } from '@ethereumjs/tx' +import { paramsBlock } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createTxFromTxData } from '@ethereumjs/tx' import { - Address, BIGINT_0, BIGINT_256, bigIntToHex, blobsToCommitments, bytesToBigInt, commitmentsToVersionedHashes, + createAddressFromPrivateKey, + createZeroAddress, getBlobs, } from '@ethereumjs/util' +import { buildBlock } from '@ethereumjs/vm' import { hexToBytes } from 'ethereum-cryptography/utils' import { loadKZG } from 'kzg-wasm' import { assert, describe, it } from 'vitest' @@ -24,19 +27,19 @@ import type { VMExecution } from '../../../src/execution/index.js' const method = 'eth_feeHistory' const privateKey = hexToBytes('0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109') -const pKeyAddress = Address.fromPrivateKey(privateKey) +const pKeyAddress = createAddressFromPrivateKey(privateKey) const privateKey4844 = hexToBytes( - '0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8' + '0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8', ) -const p4844Address = Address.fromPrivateKey(privateKey4844) +const p4844Address = createAddressFromPrivateKey(privateKey4844) const produceFakeGasUsedBlock = async (execution: VMExecution, chain: Chain, gasUsed: bigint) => { const { vm } = execution const parentBlock = await chain.getCanonicalHeadBlock() const vmCopy = await vm.shallowCopy() // Set block's gas used to max - const blockBuilder = await vmCopy.buildBlock({ + const blockBuilder = await buildBlock(vmCopy, { parentBlock, headerData: { timestamp: parentBlock.header.timestamp + BigInt(1), @@ -67,7 +70,7 @@ const produceBlockWithTx = async ( execution: VMExecution, chain: Chain, maxPriorityFeesPerGas: bigint[] = [BigInt(0xff)], - gasLimits: bigint[] = [BigInt(0xfffff)] + gasLimits: bigint[] = [BigInt(0xfffff)], ) => { const { vm } = execution const account = await vm.stateManager.getAccount(pKeyAddress) @@ -75,7 +78,7 @@ const produceBlockWithTx = async ( const parentBlock = await chain.getCanonicalHeadBlock() const vmCopy = await vm.shallowCopy() // Set block's gas used to max - const blockBuilder = await vmCopy.buildBlock({ + const blockBuilder = await buildBlock(vmCopy, { parentBlock, headerData: { timestamp: parentBlock.header.timestamp + BigInt(1), @@ -89,7 +92,7 @@ const produceBlockWithTx = async ( const maxPriorityFeePerGas = maxPriorityFeesPerGas[i] const gasLimit = gasLimits[i] await blockBuilder.addTransaction( - TransactionFactory.fromTxData( + createTxFromTxData( { type: 2, gasLimit, @@ -98,8 +101,8 @@ const produceBlockWithTx = async ( nonce, data: '0xFE', }, - { common: vmCopy.common } - ).sign(privateKey) + { common: vmCopy.common }, + ).sign(privateKey), ) nonce++ } @@ -118,7 +121,7 @@ const produceBlockWithTx = async ( const produceBlockWith4844Tx = async ( execution: VMExecution, chain: Chain, - blobsCount: number[] + blobsCount: number[], ) => { const kzg = await loadKZG() // 4844 sample blob @@ -132,7 +135,7 @@ const produceBlockWith4844Tx = async ( const parentBlock = await chain.getCanonicalHeadBlock() const vmCopy = await vm.shallowCopy() // Set block's gas used to max - const blockBuilder = await vmCopy.buildBlock({ + const blockBuilder = await buildBlock(vmCopy, { parentBlock, headerData: { timestamp: parentBlock.header.timestamp + BigInt(1), @@ -146,7 +149,7 @@ const produceBlockWith4844Tx = async ( const blobVersionedHashes = [] const blobs = [] const kzgCommitments = [] - const to = Address.zero() + const to = createZeroAddress() if (blobsCount[i] > 0) { for (let blob = 0; blob < blobsCount[i]; blob++) { blobVersionedHashes.push(...blobVersionedHash) @@ -155,7 +158,7 @@ const produceBlockWith4844Tx = async ( } } await blockBuilder.addTransaction( - TransactionFactory.fromTxData( + createTxFromTxData( { type: 3, gasLimit: 21000, @@ -168,8 +171,8 @@ const produceBlockWith4844Tx = async ( kzgCommitments, maxFeePerBlobGas: BigInt(1000), }, - { common: vmCopy.common } - ).sign(privateKey4844) + { common: vmCopy.common }, + ).sign(privateKey4844), ) nonce++ } @@ -197,13 +200,13 @@ describe(method, () => { const [firstBaseFee, previousBaseFee, nextBaseFee] = res.result.baseFeePerGas as [ string, string, - string + string, ] const increase = Number( (1000n * (bytesToBigInt(hexToBytes(nextBaseFee)) - bytesToBigInt(hexToBytes(previousBaseFee)))) / - bytesToBigInt(hexToBytes(previousBaseFee)) + bytesToBigInt(hexToBytes(previousBaseFee)), ) / 1000 // Note: this also ensures that block 2,3 are returned, since gas of block 0 -> 1 and 1 -> 2 does not change @@ -238,7 +241,7 @@ describe(method, () => { Number( (1000n * (bytesToBigInt(hexToBytes(nextBaseFee)) - bytesToBigInt(hexToBytes(previousBaseFee)))) / - bytesToBigInt(hexToBytes(previousBaseFee)) + bytesToBigInt(hexToBytes(previousBaseFee)), ) / 1000 assert.equal(decrease, -0.125) @@ -247,11 +250,12 @@ describe(method, () => { it(`${method}: should return initial base fee if the block number is london hard fork`, async () => { const common = new Common({ eips: [1559], - chain: CommonChain.Mainnet, + chain: Mainnet, hardfork: Hardfork.London, + params: paramsBlock, }) - const initialBaseFee = common.param('gasConfig', 'initialBaseFee') + const initialBaseFee = common.param('initialBaseFee') const { server } = await setupChain(gethGenesisStartLondon(pow), 'powLondon') const rpc = getRpcClient(server) @@ -322,12 +326,12 @@ describe(method, () => { assert.equal( parseInt(res.result.reward[0][0]), 0, - 'Should return 0 for empty block reward percentiles' + 'Should return 0 for empty block reward percentiles', ) assert.equal( res.result.reward[0][1], '0x0', - 'Should return 0 for empty block reward percentiles' + 'Should return 0 for empty block reward percentiles', ) }) it(`${method}: should generate reward percentiles`, async () => { @@ -384,7 +388,7 @@ describe(method, () => { const res = await rpc.request(method, ['0x1', 'latest', [10, 20, 60, 100]]) const expected = [priorityFees[0], priorityFees[0], priorityFees[1], priorityFees[1]].map( - bigIntToHex + bigIntToHex, ) assert.deepEqual(res.result.reward[0], expected) @@ -441,6 +445,6 @@ describe(method, () => { }, { timeout: 60000, - } + }, ) }) diff --git a/packages/client/test/rpc/eth/getLogs.spec.ts b/packages/client/test/rpc/eth/getLogs.spec.ts index 8267907200..db1f6e6d91 100644 --- a/packages/client/test/rpc/eth/getLogs.spec.ts +++ b/packages/client/test/rpc/eth/getLogs.spec.ts @@ -1,5 +1,5 @@ -import { LegacyTransaction } from '@ethereumjs/tx' -import { Address, bytesToHex, hexToBytes } from '@ethereumjs/util' +import { createLegacyTx } from '@ethereumjs/tx' +import { bytesToHex, createContractAddress, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code.js' @@ -23,7 +23,7 @@ const method = 'eth_getLogs' ``` */ const logExampleBytecode = hexToBytes( - '0x608060405234801561001057600080fd5b50610257806100206000396000f3fe608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063aefb4f0a1461004d575b600080fd5b610067600480360381019061006291906100de565b610069565b005b60005b858110156100c1578284867fbf642f3055e2ef2589825c2c0dd4855c1137a63f6260d9d112629e5cd034a3eb856040516100a69190610168565b60405180910390a480806100b99061018d565b91505061006c565b505050505050565b6000813590506100d88161020a565b92915050565b600080600080600060a086880312156100fa576100f9610205565b5b6000610108888289016100c9565b9550506020610119888289016100c9565b945050604061012a888289016100c9565b935050606061013b888289016100c9565b925050608061014c888289016100c9565b9150509295509295909350565b61016281610183565b82525050565b600060208201905061017d6000830184610159565b92915050565b6000819050919050565b600061019882610183565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8214156101cb576101ca6101d6565b5b600182019050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b600080fd5b61021381610183565b811461021e57600080fd5b5056fea2646970667358221220b98f45f4d4112e71fd287ab0ce7cc1872e53b463eb0abf1182b892192d3d8a1d64736f6c63430008070033' + '0x608060405234801561001057600080fd5b50610257806100206000396000f3fe608060405234801561001057600080fd5b5060043610610048576000357c010000000000000000000000000000000000000000000000000000000090048063aefb4f0a1461004d575b600080fd5b610067600480360381019061006291906100de565b610069565b005b60005b858110156100c1578284867fbf642f3055e2ef2589825c2c0dd4855c1137a63f6260d9d112629e5cd034a3eb856040516100a69190610168565b60405180910390a480806100b99061018d565b91505061006c565b505050505050565b6000813590506100d88161020a565b92915050565b600080600080600060a086880312156100fa576100f9610205565b5b6000610108888289016100c9565b9550506020610119888289016100c9565b945050604061012a888289016100c9565b935050606061013b888289016100c9565b925050608061014c888289016100c9565b9150509295509295909350565b61016281610183565b82525050565b600060208201905061017d6000830184610159565b92915050565b6000819050919050565b600061019882610183565b91507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8214156101cb576101ca6101d6565b5b600182019050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b600080fd5b61021381610183565b811461021e57600080fd5b5056fea2646970667358221220b98f45f4d4112e71fd287ab0ce7cc1872e53b463eb0abf1182b892192d3d8a1d64736f6c63430008070033', ) describe(method, async () => { @@ -32,47 +32,47 @@ describe(method, async () => { const rpc = getRpcClient(server) // deploy contracts at two different addresses const txData = { gasLimit: 2000000, gasPrice: 100 } - const tx1 = LegacyTransaction.fromTxData( + const tx1 = createLegacyTx( { ...txData, data: logExampleBytecode, nonce: 0, }, - { common } + { common }, ).sign(dummy.privKey) - const tx2 = LegacyTransaction.fromTxData( + const tx2 = createLegacyTx( { ...txData, data: logExampleBytecode, nonce: 1, }, - { common } + { common }, ).sign(dummy.privKey) - const contractAddr1 = Address.generate(dummy.addr, BigInt(0)) - const contractAddr2 = Address.generate(dummy.addr, BigInt(1)) + const contractAddr1 = createContractAddress(dummy.addr, BigInt(0)) + const contractAddr2 = createContractAddress(dummy.addr, BigInt(1)) // construct txs to emit the logs // data calls log(logCount: 10, num1: 1, num2: 2, num3: 3, num4: 4) const data = hexToBytes( - '0xaefb4f0a000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000004' + '0xaefb4f0a000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000004', ) - const tx3 = LegacyTransaction.fromTxData( + const tx3 = createLegacyTx( { ...txData, data, to: contractAddr1, nonce: 2, }, - { common } + { common }, ).sign(dummy.privKey) - const tx4 = LegacyTransaction.fromTxData( + const tx4 = createLegacyTx( { ...txData, data, to: contractAddr2, nonce: 3, }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx1, tx2, tx3, tx4]) @@ -94,7 +94,7 @@ describe(method, async () => { ) { assert.ok( true, - `should return the correct logs (fromBlock/toBlock as 'earliest' and 'latest')` + `should return the correct logs (fromBlock/toBlock as 'earliest' and 'latest')`, ) } else { assert.fail(`should return the correct logs (fromBlock/toBlock as 'earliest' and 'latest')`) @@ -105,7 +105,7 @@ describe(method, async () => { assert.equal( res.result.length, 20, - 'should return the correct logs (fromBlock/toBlock as block numbers)' + 'should return the correct logs (fromBlock/toBlock as block numbers)', ) // test filtering by single address @@ -137,7 +137,7 @@ describe(method, async () => { assert.equal( res.result.length, 20, - 'should return the correct logs (filter by topic - empty means anything)' + 'should return the correct logs (filter by topic - empty means anything)', ) // test filtering by topics (exact match) @@ -147,7 +147,7 @@ describe(method, async () => { assert.equal( res.result.length, 20, - 'should return the correct logs (filter by topic - exact match)' + 'should return the correct logs (filter by topic - exact match)', ) // test filtering by topics (exact match for second topic) @@ -157,7 +157,7 @@ describe(method, async () => { assert.equal( res.result.length, 20, - 'should return the correct logs (filter by topic - exact match for second topic)' + 'should return the correct logs (filter by topic - exact match for second topic)', ) // test filtering by topics (A or B in first position) @@ -177,7 +177,7 @@ describe(method, async () => { assert.equal( res.result.length, 20, - 'should return the correct logs (filter by topic - A or B in first position)' + 'should return the correct logs (filter by topic - A or B in first position)', ) // test filtering by topics (null means anything) @@ -190,7 +190,7 @@ describe(method, async () => { assert.equal( res.result.length, 20, - 'should return the correct logs (filter by topic - null means anything)' + 'should return the correct logs (filter by topic - null means anything)', ) // test filtering by blockHash @@ -234,8 +234,8 @@ describe(method, async () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - 'Can only specify a blockHash if fromBlock or toBlock are not provided' - ) + 'Can only specify a blockHash if fromBlock or toBlock are not provided', + ), ) res = await rpc.request(method, [ @@ -247,8 +247,8 @@ describe(method, async () => { assert.equal(res.error.code, INVALID_PARAMS) assert.ok( res.error.message.includes( - 'Can only specify a blockHash if fromBlock or toBlock are not provided' - ) + 'Can only specify a blockHash if fromBlock or toBlock are not provided', + ), ) // unknown address diff --git a/packages/client/test/rpc/eth/getProof.spec.ts b/packages/client/test/rpc/eth/getProof.spec.ts index 0c298ba2b7..667c42226e 100644 --- a/packages/client/test/rpc/eth/getProof.spec.ts +++ b/packages/client/test/rpc/eth/getProof.spec.ts @@ -1,8 +1,9 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' -import { Common } from '@ethereumjs/common' -import { LegacyTransaction } from '@ethereumjs/tx' -import { Address, bigIntToHex } from '@ethereumjs/util' +import { Mainnet, createCustomCommon } from '@ethereumjs/common' +import { createLegacyTx } from '@ethereumjs/tx' +import { bigIntToHex, createAddressFromString } from '@ethereumjs/util' +import { runBlock } from '@ethereumjs/vm' import { assert, describe, it } from 'vitest' import { createClient, createManager, getRpcClient, startRPC } from '../helpers.js' @@ -42,7 +43,6 @@ const expectedProof = { const testnetData = { name: 'testnet2', chainId: 12345, - networkId: 12345, defaultHardfork: 'istanbul', consensus: { type: 'pow', @@ -87,7 +87,7 @@ const testnetData = { bootstrapNodes: [], } -const common = new Common({ chain: 'testnet2', customChains: [testnetData] }) +const common = createCustomCommon({ ...testnetData }, Mainnet) describe(method, async () => { it('call with valid arguments', async () => { @@ -106,7 +106,7 @@ describe(method, async () => { const { vm } = execution // genesis address with balance - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const address = createAddressFromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') // contract inspired from https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getstorageat/ /* @@ -127,12 +127,12 @@ describe(method, async () => { // construct block with tx const gasLimit = 2000000 - const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) + const tx = createLegacyTx({ gasLimit, data }, { common, freeze: false }) tx.getSenderAddress = () => { return address } const parent = await blockchain.getCanonicalHeadHeader() - const block = createBlockFromBlockData( + const block = createBlock( { header: { parentHash: parent.hash(), @@ -140,14 +140,14 @@ describe(method, async () => { gasLimit, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx // deploy contract let ranBlock: Block | undefined = undefined vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - const result = await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + const result = await runBlock(vm, { block, generate: true, skipBlockValidation: true }) const { createdAddress } = result.results[0] await vm.blockchain.putBlock(ranBlock!) @@ -160,11 +160,11 @@ describe(method, async () => { gasLimit: bigIntToHex(BigInt(530000)), nonce: 1, } - const storeTx = LegacyTransaction.fromTxData(storeTxData, { common, freeze: false }) + const storeTx = createLegacyTx(storeTxData, { common, freeze: false }) storeTx.getSenderAddress = () => { return address } - const block2 = createBlockFromBlockData( + const block2 = createBlock( { header: { parentHash: ranBlock!.hash(), @@ -172,14 +172,14 @@ describe(method, async () => { gasLimit, }, }, - { common, calcDifficultyFromHeader: block.header } + { common, calcDifficultyFromHeader: block.header }, ) block2.transactions[0] = storeTx // run block let ranBlock2: Block | undefined = undefined vm.events.once('afterBlock', (result: any) => (ranBlock2 = result.block)) - await vm.runBlock({ block: block2, generate: true, skipBlockValidation: true }) + await runBlock(vm, { block: block2, generate: true, skipBlockValidation: true }) await vm.blockchain.putBlock(ranBlock2!) // verify proof is accurate diff --git a/packages/client/test/rpc/eth/getStorageAt.spec.ts b/packages/client/test/rpc/eth/getStorageAt.spec.ts index 0dada977d2..39fd257111 100644 --- a/packages/client/test/rpc/eth/getStorageAt.spec.ts +++ b/packages/client/test/rpc/eth/getStorageAt.spec.ts @@ -1,6 +1,7 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' -import { LegacyTransaction } from '@ethereumjs/tx' -import { Address } from '@ethereumjs/util' +import { createBlock } from '@ethereumjs/block' +import { createLegacyTx } from '@ethereumjs/tx' +import { createAddressFromString } from '@ethereumjs/util' +import { runBlock } from '@ethereumjs/vm' import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code.js' @@ -13,7 +14,7 @@ const method = 'eth_getStorageAt' describe(method, async () => { it('call with valid arguments', async () => { - const address = Address.fromString(`0x${'11'.repeat(20)}`) + const address = createAddressFromString(`0x${'11'.repeat(20)}`) const emptySlotStr = `0x${'00'.repeat(32)}` const { execution, common, server, chain } = await setupChain(pow, 'pow') @@ -28,11 +29,11 @@ describe(method, async () => { // construct block with tx const gasLimit = 2000000 - const tx = LegacyTransaction.fromTxData({ gasLimit, data }, { common, freeze: false }) + const tx = createLegacyTx({ gasLimit, data }, { common, freeze: false }) const signedTx = tx.sign(tx.getHashedMessageToSign()) const parent = await chain.blockchain.getCanonicalHeadHeader() - const block = createBlockFromBlockData( + const block = createBlock( { header: { parentHash: parent.hash(), @@ -40,14 +41,18 @@ describe(method, async () => { gasLimit, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = signedTx // deploy contract let ranBlock: Block | undefined = undefined execution.vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - const result = await execution.vm.runBlock({ block, generate: true, skipBlockValidation: true }) + const result = await runBlock(execution.vm, { + block, + generate: true, + skipBlockValidation: true, + }) const { createdAddress } = result.results[0] await chain.putBlocks([ranBlock as unknown as Block]) @@ -60,7 +65,7 @@ describe(method, async () => { assert.equal( res.result, emptySlotStr, - 'should not have new slot value for block that is addressed by "earliest" tag and is older than latest' + 'should not have new slot value for block that is addressed by "earliest" tag and is older than latest', ) // call with integer for block number to see if getStorageAt allows addressing blocks by number index @@ -68,7 +73,7 @@ describe(method, async () => { assert.equal( res.result, expectedSlotValue, - 'should return the correct slot value when addressing the latest block by integer index' + 'should return the correct slot value when addressing the latest block by integer index', ) // call with unsupported block argument diff --git a/packages/client/test/rpc/eth/getTransactionByBlockHashAndIndex.spec.ts b/packages/client/test/rpc/eth/getTransactionByBlockHashAndIndex.spec.ts index 47a4c00884..05ee246b57 100644 --- a/packages/client/test/rpc/eth/getTransactionByBlockHashAndIndex.spec.ts +++ b/packages/client/test/rpc/eth/getTransactionByBlockHashAndIndex.spec.ts @@ -1,4 +1,4 @@ -import { LegacyTransaction } from '@ethereumjs/tx' +import { createLegacyTx } from '@ethereumjs/tx' import { assert, describe, it } from 'vitest' import { INVALID_PARAMS } from '../../../src/rpc/error-code.js' @@ -11,18 +11,18 @@ const method = 'eth_getTransactionByBlockHashAndIndex' async function setUp() { const { common, execution, server, chain } = await setupChain(pow, 'pow') const txs = [ - LegacyTransaction.fromTxData( + createLegacyTx( { gasLimit: 21000, gasPrice: 100, nonce: 0, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey), - LegacyTransaction.fromTxData( + createLegacyTx( { gasLimit: 21000, gasPrice: 50, nonce: 1, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey), ] @@ -88,7 +88,7 @@ describe(method, async () => { const { rpc } = await baseSetup() const mockBlockHash = '0x572856aae9a653012a7df7aeb56bfb7fe77f5bcb4b69fd971c04e989f6ccf9b1' - const mockTxIndex = 'INVALIDA_TXINDEX' + const mockTxIndex = 'INVALID_TXINDEX' const res = await rpc.request(method, [mockBlockHash, mockTxIndex]) assert.equal(res.error.code, INVALID_PARAMS) diff --git a/packages/client/test/rpc/eth/getTransactionByHash.spec.ts b/packages/client/test/rpc/eth/getTransactionByHash.spec.ts index 6297d4cc38..405ef78192 100644 --- a/packages/client/test/rpc/eth/getTransactionByHash.spec.ts +++ b/packages/client/test/rpc/eth/getTransactionByHash.spec.ts @@ -1,4 +1,4 @@ -import { FeeMarketEIP1559Transaction, LegacyTransaction } from '@ethereumjs/tx' +import { createFeeMarket1559Tx, createLegacyTx } from '@ethereumjs/tx' import { bytesToHex } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' @@ -18,9 +18,9 @@ describe(method, () => { const { chain, common, execution, server } = await setupChain(pow, 'pow', { txLookupLimit: 1 }) const rpc = getRpcClient(server) // construct tx - const tx = LegacyTransaction.fromTxData( + const tx = createLegacyTx( { gasLimit: 2000000, gasPrice: 100, to: '0x0000000000000000000000000000000000000000' }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -39,18 +39,18 @@ describe(method, () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), 'powLondon', - { txLookupLimit: 0 } + { txLookupLimit: 0 }, ) const rpc = getRpcClient(server) // construct tx - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { gasLimit: 2000000, maxFeePerGas: 975000000, maxPriorityFeePerGas: 10, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -67,7 +67,7 @@ describe(method, () => { assert.equal( res.result.hash, bytesToHex(tx.hash()), - 'should return the correct tx when txLookupLimit=0' + 'should return the correct tx when txLookupLimit=0', ) }) diff --git a/packages/client/test/rpc/eth/getTransactionCount.spec.ts b/packages/client/test/rpc/eth/getTransactionCount.spec.ts index 71b8eb29f8..dd5b1f52a6 100644 --- a/packages/client/test/rpc/eth/getTransactionCount.spec.ts +++ b/packages/client/test/rpc/eth/getTransactionCount.spec.ts @@ -1,9 +1,16 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { getGenesis } from '@ethereumjs/genesis' -import { LegacyTransaction, TransactionFactory } from '@ethereumjs/tx' -import { Account, Address, hexToBytes, randomBytes } from '@ethereumjs/util' +import { createLegacyTx, createTxFromTxData } from '@ethereumjs/tx' +import { + Account, + createAddressFromPrivateKey, + createAddressFromString, + hexToBytes, + randomBytes, +} from '@ethereumjs/util' +import { runBlock } from '@ethereumjs/vm' import { assert, describe, it } from 'vitest' import { createClient, createManager, getRpcClient, startRPC } from '../helpers.js' @@ -13,7 +20,7 @@ import type { Block } from '@ethereumjs/block' const method = 'eth_getTransactionCount' -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) describe(method, () => { it('call with valid arguments', async () => { @@ -33,22 +40,22 @@ describe(method, () => { // since synchronizer.run() is not executed in the mock setup, // manually run stateManager.generateCanonicalGenesis() - await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + await vm.stateManager.generateCanonicalGenesis!(getGenesis(1)) // a genesis address - const address = Address.fromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') + const address = createAddressFromString('0xccfd725760a68823ff1e062f4cc97e1360e8d997') // verify nonce is 0 let res = await rpc.request(method, [address.toString(), 'latest']) assert.equal(res.result, '0x0', 'should return the correct nonce (0)') // construct block with tx - const tx = LegacyTransaction.fromTxData({ gasLimit: 53000 }, { common, freeze: false }) + const tx = createLegacyTx({ gasLimit: 53000 }, { common, freeze: false }) tx.getSenderAddress = () => { return address } const parent = await blockchain.getCanonicalHeadHeader() - const block = createBlockFromBlockData( + const block = createBlock( { header: { parentHash: parent.hash(), @@ -56,13 +63,13 @@ describe(method, () => { gasLimit: 2000000, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) block.transactions[0] = tx let ranBlock: Block | undefined = undefined vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + await runBlock(vm, { block, generate: true, skipBlockValidation: true }) await vm.blockchain.putBlock(ranBlock!) // verify nonce increments after a tx @@ -83,12 +90,12 @@ describe(method, () => { const rpc = getRpcClient(startRPC(manager.getMethods())) const pk = hexToBytes('0x266682876da8fd86410d001ec33c7c281515aeeb640d175693534062e2599238') - const address = Address.fromPrivateKey(pk) + const address = createAddressFromPrivateKey(pk) await service.execution.vm.stateManager.putAccount(address, new Account()) const account = await service.execution.vm.stateManager.getAccount(address) account!.balance = 0xffffffffffffffn await service.execution.vm.stateManager.putAccount(address, account!) - const tx = TransactionFactory.fromTxData({ + const tx = createTxFromTxData({ to: randomBytes(20), value: 1, maxFeePerGas: 0xffffff, diff --git a/packages/client/test/rpc/eth/getTransactionReceipt.spec.ts b/packages/client/test/rpc/eth/getTransactionReceipt.spec.ts index 8ce2c8cf2e..cd56eee9e3 100644 --- a/packages/client/test/rpc/eth/getTransactionReceipt.spec.ts +++ b/packages/client/test/rpc/eth/getTransactionReceipt.spec.ts @@ -1,9 +1,5 @@ import { Hardfork, createCommonFromGethGenesis } from '@ethereumjs/common' -import { - BlobEIP4844Transaction, - FeeMarketEIP1559Transaction, - LegacyTransaction, -} from '@ethereumjs/tx' +import { createBlob4844Tx, createFeeMarket1559Tx, createLegacyTx } from '@ethereumjs/tx' import { blobsToCommitments, bytesToHex, @@ -30,13 +26,13 @@ describe(method, () => { const { chain, common, execution, server } = await setupChain(pow, 'pow') const rpc = getRpcClient(server) // construct tx - const tx = LegacyTransaction.fromTxData( + const tx = createLegacyTx( { gasLimit: 2000000, gasPrice: 100, to: '0x0000000000000000000000000000000000000000', }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -49,18 +45,18 @@ describe(method, () => { it('call with 1559 tx', async () => { const { chain, common, execution, server } = await setupChain( gethGenesisStartLondon(pow), - 'powLondon' + 'powLondon', ) const rpc = getRpcClient(server) // construct tx - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { gasLimit: 2000000, maxFeePerGas: 975000000, maxPriorityFeePerGas: 10, to: '0x1230000000000000000000000000000000000321', }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx]) @@ -108,7 +104,7 @@ describe(method, () => { const commitments = blobsToCommitments(kzg, blobs) const blobVersionedHashes = commitmentsToVersionedHashes(commitments) const proofs = blobs.map((blob, ctx) => kzg.computeBlobKzgProof(blob, commitments[ctx])) - const tx = BlobEIP4844Transaction.fromTxData( + const tx = createBlob4844Tx( { blobVersionedHashes, blobs, @@ -121,7 +117,7 @@ describe(method, () => { to: randomBytes(20), nonce: 0n, }, - { common } + { common }, ).sign(dummy.privKey) await runBlockWithTxs(chain, execution, [tx], true) diff --git a/packages/client/test/rpc/eth/sendRawTransaction.spec.ts b/packages/client/test/rpc/eth/sendRawTransaction.spec.ts index 3fd39270a5..8d1748049d 100644 --- a/packages/client/test/rpc/eth/sendRawTransaction.spec.ts +++ b/packages/client/test/rpc/eth/sendRawTransaction.spec.ts @@ -1,11 +1,7 @@ import { BlockHeader } from '@ethereumjs/block' -import { Chain, Common, Hardfork, createCommonFromGethGenesis } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet, createCommonFromGethGenesis } from '@ethereumjs/common' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { - BlobEIP4844Transaction, - FeeMarketEIP1559Transaction, - LegacyTransaction, -} from '@ethereumjs/tx' +import { createBlob4844Tx, createFeeMarket1559TxFromRLP, createLegacyTx } from '@ethereumjs/tx' import { Account, blobsToCommitments, @@ -34,33 +30,33 @@ describe(method, () => { DefaultStateManager.prototype.shallowCopy = function () { return this } - const common = new Common({ chain: Chain.Mainnet }) + const common = new Common({ chain: Mainnet }) common .hardforks() .filter((hf) => hf.timestamp !== undefined) .map((hf) => { hf.timestamp = undefined }) + const syncTargetHeight = common.hardforkBlock(Hardfork.London) const { rpc, client } = await baseSetup({ syncTargetHeight, includeVM: true }) // Mainnet EIP-1559 tx const txData = '0x02f90108018001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' - const transaction = FeeMarketEIP1559Transaction.fromSerializedTx(hexToBytes(txData)) + const transaction = createFeeMarket1559TxFromRLP(hexToBytes(txData)) const address = transaction.getSenderAddress() const vm = (client.services.find((s) => s.name === 'eth') as FullEthereumService).execution.vm - await vm.stateManager.putAccount(address, new Account()) const account = await vm.stateManager.getAccount(address) account!.balance = BigInt('40100000') await vm.stateManager.putAccount(address, account!) - const res = await rpc.request(method, [txData]) + assert.equal( res.result, '0xd7217a7d3251880051783f305a3536e368c604aa1f1602e6cd107eb7b87129da', - 'should return the correct tx hash' + 'should return the correct tx hash', ) // Restore setStateRoot @@ -72,10 +68,10 @@ describe(method, () => { // Disable stateroot validation in TxPool since valid state root isn't available const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot DefaultStateManager.prototype.setStateRoot = (): any => {} - const syncTargetHeight = new Common({ chain: Chain.Mainnet }).hardforkBlock(Hardfork.London) + const syncTargetHeight = new Common({ chain: Mainnet }).hardforkBlock(Hardfork.London) const { rpc } = await baseSetup({ syncTargetHeight, includeVM: true }) - const transaction = LegacyTransaction.fromTxData({ + const transaction = createLegacyTx({ gasLimit: 21000, gasPrice: 0, nonce: 0, @@ -88,7 +84,7 @@ describe(method, () => { assert.equal( res.result, '0xf6798d5ed936a464ef4f49dd5a3abe1ad6947364912bd47c5e56781125d44ac3', - 'local tx with lower gasprice than minimum gasprice added to pool' + 'local tx with lower gasprice than minimum gasprice added to pool', ) // Restore setStateRoot @@ -99,7 +95,7 @@ describe(method, () => { // Disable stateroot validation in TxPool since valid state root isn't available const originalSetStateRoot = DefaultStateManager.prototype.setStateRoot DefaultStateManager.prototype.setStateRoot = (): any => {} - const syncTargetHeight = new Common({ chain: Chain.Mainnet }).hardforkBlock(Hardfork.London) + const syncTargetHeight = new Common({ chain: Mainnet }).hardforkBlock(Hardfork.London) const { rpc } = await baseSetup({ syncTargetHeight, includeVM: true }) // Mainnet EIP-1559 tx @@ -126,13 +122,13 @@ describe(method, () => { assert.equal(res.error.code, INTERNAL_ERROR) assert.ok( res.error.message.includes( - 'client is not aware of the current chain height yet (give sync some more time)' - ) + 'client is not aware of the current chain height yet (give sync some more time)', + ), ) }) it('call with invalid tx (wrong chain ID)', async () => { - const syncTargetHeight = new Common({ chain: Chain.Mainnet }).hardforkBlock(Hardfork.London) + const syncTargetHeight = new Common({ chain: Mainnet }).hardforkBlock(Hardfork.London) const { rpc } = await baseSetup({ syncTargetHeight, includeVM: true }) // Baikal EIP-1559 tx @@ -145,14 +141,14 @@ describe(method, () => { }) it('call with unsigned tx', async () => { - const syncTargetHeight = new Common({ chain: Chain.Mainnet }).hardforkBlock(Hardfork.London) + const syncTargetHeight = new Common({ chain: Mainnet }).hardforkBlock(Hardfork.London) const { rpc } = await baseSetup({ syncTargetHeight }) // Mainnet EIP-1559 tx const txData = '0x02f90108018001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - const tx = FeeMarketEIP1559Transaction.fromSerializedTx(hexToBytes(txData), { + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) + const tx = createFeeMarket1559TxFromRLP(hexToBytes(txData), { common, freeze: false, }) @@ -174,7 +170,7 @@ describe(method, () => { DefaultStateManager.prototype.shallowCopy = function () { return this } - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const syncTargetHeight = common.hardforkBlock(Hardfork.London) const { rpc, client } = await baseSetup({ @@ -187,7 +183,7 @@ describe(method, () => { // Mainnet EIP-1559 tx const txData = '0x02f90108018001018402625a0094cccccccccccccccccccccccccccccccccccccccc830186a0b8441a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85bf859940000000000000000000000000000000000000101f842a00000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000060a701a0afb6e247b1c490e284053c87ab5f6b59e219d51f743f7a4d83e400782bc7e4b9a0479a268e0e0acd4de3f1e28e4fac2a6b32a4195e8dfa9d19147abe8807aa6f64' - const transaction = FeeMarketEIP1559Transaction.fromSerializedTx(hexToBytes(txData)) + const transaction = createFeeMarket1559TxFromRLP(hexToBytes(txData)) const address = transaction.getSenderAddress() const vm = (client.services.find((s) => s.name === 'eth') as FullEthereumService).execution.vm @@ -237,7 +233,7 @@ describe(method, () => { const blobVersionedHashes = commitmentsToVersionedHashes(commitments) const proofs = blobs.map((blob, ctx) => kzg.computeBlobKzgProof(blob, commitments[ctx])) const pk = randomBytes(32) - const tx = BlobEIP4844Transaction.fromTxData( + const tx = createBlob4844Tx( { blobVersionedHashes, blobs, @@ -249,10 +245,10 @@ describe(method, () => { maxPriorityFeePerGas: 1000000n, to: randomBytes(20), }, - { common } + { common }, ).sign(pk) - const replacementTx = BlobEIP4844Transaction.fromTxData( + const replacementTx = createBlob4844Tx( { blobVersionedHashes, blobs, @@ -264,7 +260,7 @@ describe(method, () => { maxPriorityFeePerGas: 10000000n, to: randomBytes(20), }, - { common } + { common }, ).sign(pk) const vm = (client.services.find((s) => s.name === 'eth') as FullEthereumService).execution.vm await vm.stateManager.putAccount(tx.getSenderAddress(), new Account()) diff --git a/packages/client/test/rpc/eth/syncing.spec.ts b/packages/client/test/rpc/eth/syncing.spec.ts index 1945cf933e..89f55855fb 100644 --- a/packages/client/test/rpc/eth/syncing.spec.ts +++ b/packages/client/test/rpc/eth/syncing.spec.ts @@ -43,7 +43,7 @@ describe(method, () => { const rpcServer = startRPC(manager.getMethods()) const rpc = getRpcClient(rpcServer) const sync = client.services[0].synchronizer! - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ latest: () => { return @@ -65,7 +65,7 @@ describe(method, () => { const rpcServer = startRPC(manager.getMethods()) const rpc = getRpcClient(rpcServer) const sync = client.services[0].synchronizer as FullSynchronizer - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ latest: () => { return { diff --git a/packages/client/test/rpc/helpers.ts b/packages/client/test/rpc/helpers.ts index b3b6304b3c..0118bd5aa2 100644 --- a/packages/client/test/rpc/helpers.ts +++ b/packages/client/test/rpc/helpers.ts @@ -1,14 +1,22 @@ -import { BlockHeader } from '@ethereumjs/block' +import { createBlockHeader } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' -import { Chain as ChainEnum, Common, Hardfork, parseGethGenesis } from '@ethereumjs/common' +import { + Common, + Hardfork, + Mainnet, + createCommonFromGethGenesis, + parseGethGenesis, +} from '@ethereumjs/common' import { getGenesis } from '@ethereumjs/genesis' import { Address, BIGINT_1, KECCAK256_RLP, + createAddressFromString, hexToBytes, parseGethGenesisState, } from '@ethereumjs/util' +import { buildBlock } from '@ethereumjs/vm' import { Client, Server as RPCServer } from 'jayson/promise' import { MemoryLevel } from 'memory-level' import { assert } from 'vitest' @@ -59,7 +67,7 @@ type createClientArgs = { export function startRPC( methods: any, opts: StartRPCOpts = { port: 0 }, - withEngineMiddleware?: WithEngineMiddleware + withEngineMiddleware?: WithEngineMiddleware, ) { const { port, wsServer } = opts const server = new RPCServer(methods) @@ -87,12 +95,12 @@ export function createManager(client: EthereumClient) { } export async function createClient(clientOpts: Partial = {}) { - const common: Common = clientOpts.commonChain ?? new Common({ chain: ChainEnum.Mainnet }) + const common: Common = clientOpts.commonChain ?? new Common({ chain: Mainnet }) const genesisState = clientOpts.genesisState ?? getGenesis(Number(common.chainId())) ?? {} const config = new Config({ minerCoinbase: clientOpts.minerCoinbase !== undefined - ? Address.fromString(clientOpts.minerCoinbase) + ? createAddressFromString(clientOpts.minerCoinbase) : undefined, common, saveReceipts: clientOpts.enableMetaDB, @@ -120,9 +128,9 @@ export async function createClient(clientOpts: Partial = {}) { chain.getTd = async (_hash: Uint8Array, _num: bigint) => BigInt(1000) if ((chain as any)._headers !== undefined) { - ;(chain as any)._headers.latest = BlockHeader.fromHeaderData( + ;(chain as any)._headers.latest = createBlockHeader( { withdrawalsRoot: common.isActivatedEIP(4895) ? KECCAK256_RLP : undefined }, - { common } + { common }, ) } @@ -203,8 +211,11 @@ export async function createClient(clientOpts: Partial = {}) { export async function baseSetup(clientOpts: any = {}) { const client = await createClient(clientOpts) const manager = createManager(client) - const engineMethods = clientOpts.engine === true ? manager.getMethods(true) : {} - const server = startRPC({ ...manager.getMethods(), ...engineMethods }) + const engineMethods = clientOpts.engine === true ? manager.getMethods(true, true) : {} + const server = startRPC({ + ...manager.getMethods(false, true), // Add debug trace since this is for tests + ...engineMethods, + }) const host = server.address() as AddressInfo const rpc = Client.http({ port: host.port }) server.once('close', () => { @@ -220,15 +231,12 @@ export async function setupChain(genesisFile: any, chainName = 'dev', clientOpts const genesisParams = parseGethGenesis(genesisFile, chainName) const genesisState = parseGethGenesisState(genesisFile) const genesisStateRoot = clientOpts.genesisStateRoot - - const common = new Common({ + const common = createCommonFromGethGenesis(genesisFile, { chain: chainName, - customChains: [genesisParams], customCrypto: clientOpts.customCrypto, }) common.setHardforkBy({ blockNumber: 0, - td: genesisParams.genesis.difficulty, timestamp: genesisParams.genesis.timestamp, }) @@ -279,13 +287,13 @@ export async function runBlockWithTxs( chain: Chain, execution: VMExecution, txs: TypedTransaction[], - fromEngine = false + fromEngine = false, ) { const { vm } = execution // build block with tx const parentBlock = await chain.getCanonicalHeadBlock() const vmCopy = await vm.shallowCopy() - const blockBuilder = await vmCopy.buildBlock({ + const blockBuilder = await buildBlock(vmCopy, { parentBlock, headerData: { timestamp: parentBlock.header.timestamp + BIGINT_1, diff --git a/packages/client/test/rpc/mockBlockchain.ts b/packages/client/test/rpc/mockBlockchain.ts index 79d12643b2..85a3dccb13 100644 --- a/packages/client/test/rpc/mockBlockchain.ts +++ b/packages/client/test/rpc/mockBlockchain.ts @@ -1,27 +1,27 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' -import { LegacyTransaction } from '@ethereumjs/tx' +import { createBlock } from '@ethereumjs/block' +import { createLegacyTx } from '@ethereumjs/tx' import { equalsBytes, toBytes } from '@ethereumjs/util' import { dummy } from './helpers.js' +import type { LegacyTx } from '@ethereumjs/tx' + export function mockBlockchain(options: any = {}) { const number = options.number ?? '0x444444' const blockHash = options.hash ?? '0x910abca1728c53e8d6df870dd7af5352e974357dc58205dea1676be17ba6becf' - const transactions = options.transactions ?? [ - LegacyTransaction.fromTxData({}).sign(dummy.privKey), - ] + const transactions = options.transactions ?? [createLegacyTx({}).sign(dummy.privKey)] const block = { hash: () => toBytes(blockHash), - serialize: () => createBlockFromBlockData({ header: { number }, transactions }).serialize(), + serialize: () => createBlock({ header: { number }, transactions }).serialize(), header: { number: BigInt(number), hash: () => toBytes(blockHash), }, toJSON: () => ({ - ...createBlockFromBlockData({ header: { number } }).toJSON(), + ...createBlock({ header: { number } }).toJSON(), hash: options.hash ?? blockHash, - transactions: transactions.map((t: LegacyTransaction) => t.toJSON()), + transactions: transactions.map((t: LegacyTx) => t.toJSON()), }), transactions, uncleHeaders: [], @@ -35,7 +35,7 @@ export function mockBlockchain(options: any = {}) { return block }, getCanonicalHeadHeader: () => { - return createBlockFromBlockData().header + return createBlock().header }, getIteratorHead: () => { return block diff --git a/packages/client/test/rpc/net/version.spec.ts b/packages/client/test/rpc/net/version.spec.ts index c57f6d8069..a20c98431b 100644 --- a/packages/client/test/rpc/net/version.spec.ts +++ b/packages/client/test/rpc/net/version.spec.ts @@ -1,4 +1,4 @@ -import { Chain, Common } from '@ethereumjs/common' +import { Common, Goerli, Holesky } from '@ethereumjs/common' import { assert, describe, it, vi } from 'vitest' import { baseSetup, createClient, createManager, getRpcClient, startRPC } from '../helpers.js' @@ -12,7 +12,7 @@ function compareResult(result: any, chainId: any) { assert.equal( result, chainId, - `should be the correct chain ID (expected: ${chainId}, received: ${result})` + `should be the correct chain ID (expected: ${chainId}, received: ${result})`, ) } @@ -28,7 +28,7 @@ describe(method, () => { it('call on holesky', async () => { const manager = createManager( - await createClient({ opened: true, commonChain: new Common({ chain: Chain.Holesky }) }) + await createClient({ opened: true, commonChain: new Common({ chain: Holesky }) }), ) const rpc = getRpcClient(startRPC(manager.getMethods())) @@ -42,7 +42,7 @@ describe(method, () => { it('call on goerli', async () => { const manager = createManager( - await createClient({ opened: true, commonChain: new Common({ chain: Chain.Goerli }) }) + await createClient({ opened: true, commonChain: new Common({ chain: Goerli }) }), ) const rpc = getRpcClient(startRPC(manager.getMethods())) diff --git a/packages/client/test/rpc/txpool/content.spec.ts b/packages/client/test/rpc/txpool/content.spec.ts index 4c7910a367..2473a8fd02 100644 --- a/packages/client/test/rpc/txpool/content.spec.ts +++ b/packages/client/test/rpc/txpool/content.spec.ts @@ -1,9 +1,10 @@ -import { BlockHeader, createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock, createBlockHeader } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { getGenesis } from '@ethereumjs/genesis' -import { TransactionFactory } from '@ethereumjs/tx' +import { createTxFromTxData } from '@ethereumjs/tx' import { randomBytes } from '@ethereumjs/util' +import { runBlock } from '@ethereumjs/vm' import { assert, describe, it } from 'vitest' import { createClient, createManager, getRpcClient, startRPC } from '../helpers.js' @@ -15,7 +16,7 @@ const method = 'txpool_content' describe(method, () => { it('call with valid arguments', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) const blockchain = await createBlockchain({ common, validateBlocks: false, @@ -28,10 +29,10 @@ describe(method, () => { const { execution } = client.services.find((s) => s.name === 'eth') as FullEthereumService assert.notEqual(execution, undefined, 'should have valid execution') const { vm } = execution - await vm.stateManager.generateCanonicalGenesis(getGenesis(1)) + await vm.stateManager.generateCanonicalGenesis!(getGenesis(1)) const gasLimit = 2000000 const parent = await blockchain.getCanonicalHeadHeader() - const block = createBlockFromBlockData( + const block = createBlock( { header: { parentHash: parent.hash(), @@ -39,20 +40,20 @@ describe(method, () => { gasLimit, }, }, - { common, calcDifficultyFromHeader: parent } + { common, calcDifficultyFromHeader: parent }, ) let ranBlock: Block | undefined = undefined vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - await vm.runBlock({ block, generate: true, skipBlockValidation: true }) + await runBlock(vm, { block, generate: true, skipBlockValidation: true }) await vm.blockchain.putBlock(ranBlock!) const service = client.services[0] as FullEthereumService service.execution.vm.common.setHardfork('london') service.chain.config.chainCommon.setHardfork('london') const headBlock = await service.chain.getCanonicalHeadBlock() - const londonBlock = createBlockFromBlockData( + const londonBlock = createBlock( { - header: BlockHeader.fromHeaderData( + header: createBlockHeader( { baseFeePerGas: 1000000000n, number: 2n, @@ -62,23 +63,23 @@ describe(method, () => { common: service.chain.config.chainCommon, skipConsensusFormatValidation: true, calcDifficultyFromHeader: headBlock.header, - } + }, ), }, - { common: service.chain.config.chainCommon } + { common: service.chain.config.chainCommon }, ) vm.events.once('afterBlock', (result: any) => (ranBlock = result.block)) - await vm.runBlock({ block: londonBlock, generate: true, skipBlockValidation: true }) + await runBlock(vm, { block: londonBlock, generate: true, skipBlockValidation: true }) await vm.blockchain.putBlock(ranBlock!) ;(service.txPool as any).validate = () => {} - await service.txPool.add(TransactionFactory.fromTxData({ type: 2 }, {}).sign(randomBytes(32))) + await service.txPool.add(createTxFromTxData({ type: 2 }, {}).sign(randomBytes(32))) const res = await rpc.request(method, []) assert.equal( Object.keys(res.result.pending).length, 1, - 'received one pending transaction back from response' + 'received one pending transaction back from response', ) }) }) diff --git a/packages/client/test/rpc/validation.spec.ts b/packages/client/test/rpc/validation.spec.ts index b252433a34..5b62da0cb7 100644 --- a/packages/client/test/rpc/validation.spec.ts +++ b/packages/client/test/rpc/validation.spec.ts @@ -52,15 +52,15 @@ describe(prefix, () => { // valid // zero address assert.ok( - validatorResult(validators.address(['0x0000000000000000000000000000000000000000'], 0)) + validatorResult(validators.address(['0x0000000000000000000000000000000000000000'], 0)), ) // lowercase address assert.ok( - validatorResult(validators.address(['0xa7d8d9ef8d8ce8992df33d8b8cf4aebabd5bd270'], 0)) + validatorResult(validators.address(['0xa7d8d9ef8d8ce8992df33d8b8cf4aebabd5bd270'], 0)), ) // checksummed address assert.ok( - validatorResult(validators.address(['0xa7d8d9ef8D8Ce8992Df33D8b8CF4Aebabd5bD270'], 0)) + validatorResult(validators.address(['0xa7d8d9ef8D8Ce8992Df33D8b8CF4Aebabd5bD270'], 0)), ) // invalid @@ -70,23 +70,23 @@ describe(prefix, () => { assert.notOk(validatorResult(validators.address(['0x1'], 0))) // invalid length: 38 chars assert.notOk( - validatorResult(validators.address(['0x00000000000000000000000000000000000000'], 0)) + validatorResult(validators.address(['0x00000000000000000000000000000000000000'], 0)), ) // invalidlength: 39 chars assert.notOk( - validatorResult(validators.address(['0x000000000000000000000000000000000000000'], 0)) + validatorResult(validators.address(['0x000000000000000000000000000000000000000'], 0)), ) // invalidlength: 41 chars assert.notOk( - validatorResult(validators.address(['0x00000000000000000000000000000000000000000'], 0)) + validatorResult(validators.address(['0x00000000000000000000000000000000000000000'], 0)), ) // invalid length: 42 chars assert.notOk( - validatorResult(validators.address(['0x00000000000000000000000000000000000000000'], 0)) + validatorResult(validators.address(['0x00000000000000000000000000000000000000000'], 0)), ) // invalid character assert.notOk( - validatorResult(validators.address(['0x62223651d6a33d58be70eb9876c3caf7096169ez'], 0)) + validatorResult(validators.address(['0x62223651d6a33d58be70eb9876c3caf7096169ez'], 0)), ) assert.ok(validatorResult(validators.bytes8([bytesToHex(randomBytes(8))], 0))) assert.ok(validatorResult(validators.bytes8([bytes(8)], 0))) @@ -228,59 +228,59 @@ describe(prefix, () => { validatorResult( validators.blockHash( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249'], - 0 - ) - ) + 0, + ), + ), ) assert.ok( validatorResult( validators.blockHash( ['0xf79d019c58d58a4efcfdf100c9596dd38014dcec6cf6f52000d4fae4e139b703'], - 0 - ) - ) + 0, + ), + ), ) // invalid length assert.notOk( validatorResult( validators.blockHash( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a2'], - 0 - ) - ) + 0, + ), + ), ) assert.notOk( validatorResult( validators.blockHash( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a24'], - 0 - ) - ) + 0, + ), + ), ) assert.notOk( validatorResult( validators.blockHash( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a2499'], - 0 - ) - ) + 0, + ), + ), ) assert.notOk( validatorResult( validators.blockHash( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a24999'], - 0 - ) - ) + 0, + ), + ), ) // invalid character assert.notOk( validatorResult( validators.blockHash( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66z249'], - 0 - ) - ) + 0, + ), + ), ) }) @@ -293,17 +293,17 @@ describe(prefix, () => { validatorResult( validators.blockOption( ['0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249'], - 0 - ) - ) + 0, + ), + ), ) assert.ok(validatorResult(validators.blockOption(['0x1'], 0))) assert.ok(validatorResult(validators.blockOption(['0x01'], 0))) // invalid - assert.notOk(validatorResult(validators.blockOption(['lates'], 0))) - assert.notOk(validatorResult(validators.blockOption(['arliest'], 0))) - assert.notOk(validatorResult(validators.blockOption(['pendin'], 0))) + assert.notOk(validatorResult(validators.blockOption(['lates'], 0))) // cspell:disable-line + assert.notOk(validatorResult(validators.blockOption(['arliest'], 0))) // cspell:disable-line + assert.notOk(validatorResult(validators.blockOption(['pendin'], 0))) // cspell:disable-line assert.notOk(validatorResult(validators.blockOption(['0'], 0))) assert.notOk(validatorResult(validators.blockOption(['00'], 0))) assert.notOk(validatorResult(validators.blockOption(['1'], 0))) @@ -312,9 +312,9 @@ describe(prefix, () => { validatorResult( validators.blockOption( ['573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249'], - 0 - ) - ) + 0, + ), + ), ) }) @@ -327,7 +327,7 @@ describe(prefix, () => { assert.notOk(validatorResult(validators.bool(['true'], 0))) assert.notOk(validatorResult(validators.bool(['false'], 0))) assert.notOk(validatorResult(validators.bool(['tru'], 0))) - assert.notOk(validatorResult(validators.bool(['fals'], 0))) + assert.notOk(validatorResult(validators.bool(['fals'], 0))) // cspell:disable-line }) it('hex', () => { @@ -463,14 +463,14 @@ describe(prefix, () => { gas: '0xcf08', }, ], - 0 - ) - ) + 0, + ), + ), ) assert.ok( validatorResult( - validators.transaction(['to'])([{ to: '0x0000000000000000000000000000000000000000' }], 0) - ) + validators.transaction(['to'])([{ to: '0x0000000000000000000000000000000000000000' }], 0), + ), ) // invalid @@ -489,17 +489,17 @@ describe(prefix, () => { from: '0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249', }, ], - 0 - ) - ) + 0, + ), + ), ) assert.notOk( validatorResult( validators.transaction(['to'])( [{ from: '0x573155e65afb5cc55035aa9113d29d4ca3625454b33d32b2dff7b6673c66a249' }], - 0 - ) - ) + 0, + ), + ), ) assert.notOk(validatorResult(validators.transaction([])([{ gas: '12' }], 0))) assert.notOk(validatorResult(validators.transaction([])([{ gasPrice: '12' }], 0))) @@ -525,22 +525,22 @@ describe(prefix, () => { hex: '0x1', }, ], - 0 - ) - ) + 0, + ), + ), ) // invalid assert.notOk( - validatorResult(validators.object({ address: validators.address })([{ address: '0x0' }], 0)) + validatorResult(validators.object({ address: validators.address })([{ address: '0x0' }], 0)), ) assert.notOk( validatorResult( - validators.object({ blockHash: validators.blockHash })([{ blockHash: '0x0' }], 0) - ) + validators.object({ blockHash: validators.blockHash })([{ blockHash: '0x0' }], 0), + ), ) assert.notOk( - validatorResult(validators.object({ bool: validators.bool })([{ bool: '0x0' }], 0)) + validatorResult(validators.object({ bool: validators.bool })([{ bool: '0x0' }], 0)), ) assert.notOk(validatorResult(validators.object({ hex: validators.hex })([{ hex: '1' }], 0))) }) @@ -557,37 +557,37 @@ describe(prefix, () => { '0xda4a22ad0d0e9aff0846ca54225637ada5bf7a14', ], ], - 0 - ) - ) + 0, + ), + ), ) assert.ok( validatorResult( validators.array(validators.blockHash)( [['0xb6dbbc1c702583de187e1284a00a23f9d322bf96f70fd4968b6339d0ace066b3']], - 0 - ) - ) + 0, + ), + ), ) assert.ok(validatorResult(validators.array(validators.bool)([[true, false]], 0))) // invalid assert.notOk( - validatorResult(validators.array(validators.hex)([['0x0', '0x1', '0x2', 'true']], 0)) + validatorResult(validators.array(validators.hex)([['0x0', '0x1', '0x2', 'true']], 0)), ) assert.notOk( validatorResult( validators.array(validators.address)( [['0xb7e390864a90b7b923c9f9310c6f98aafe43f707', '0x0']], - 0 - ) - ) + 0, + ), + ), ) assert.notOk( - validatorResult(validators.array(validators.blockHash)([['0xb6dbbc1cd0ace066b3']], 0)) + validatorResult(validators.array(validators.blockHash)([['0xb6dbbc1cd0ace066b3']], 0)), ) assert.notOk( - validatorResult(validators.array(validators.bool)([['0x123', '0x456', '0x789']], 0)) + validatorResult(validators.array(validators.bool)([['0x123', '0x456', '0x789']], 0)), ) assert.notOk(validatorResult(validators.array(validators.bool)([[true, 'true']], 0))) }) @@ -667,15 +667,15 @@ describe(prefix, () => { validatorResult( validators.optional(validators.blockHash)( ['0x0000000000000000000000000000000000000000000000000000000000000000'], - 0 - ) - ) + 0, + ), + ), ) assert.ok( - validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))(['INVALID'], 0)) + validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))(['INVALID'], 0)), ) assert.ok( - validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))([''], 0)) + validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))([''], 0)), ) assert.ok(validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))([], 0))) @@ -683,7 +683,7 @@ describe(prefix, () => { assert.notOk(validatorResult(validators.optional(validators.bool)(['hey'], 0))) assert.notOk(validatorResult(validators.optional(validators.blockHash)(['0x0'], 0))) assert.notOk( - validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))(['ANOTHER'], 0)) + validatorResult(validators.optional(validators.values(['VALID', 'INVALID']))(['ANOTHER'], 0)), ) }) @@ -696,35 +696,35 @@ describe(prefix, () => { validators.either( validators.bool, validators.hex, - validators.array(validators.hex) - )([['0xaaa']], 0) - ) + validators.array(validators.hex), + )([['0xaaa']], 0), + ), ) assert.ok( validatorResult( validators.either(validators.bool, validators.blockHash)( ['0x0000000000000000000000000000000000000000000000000000000000000000'], - 0 - ) - ) + 0, + ), + ), ) // invalid assert.notOk( - validatorResult(validators.either(validators.bool, validators.blockHash)(['0xabc'], 0)) + validatorResult(validators.either(validators.bool, validators.blockHash)(['0xabc'], 0)), ) assert.notOk(validatorResult(validators.either(validators.bool, validators.hex)(['abc'], 0))) assert.notOk( - validatorResult(validators.either(validators.hex, validators.blockHash)([true], 0)) + validatorResult(validators.either(validators.hex, validators.blockHash)([true], 0)), ) assert.notOk( validatorResult( validators.either( validators.hex, validators.blockHash, - validators.array(validators.hex) - )([[false]], 0) - ) + validators.array(validators.hex), + )([[false]], 0), + ), ) }) }) diff --git a/packages/client/test/rpc/web3/sha3.spec.ts b/packages/client/test/rpc/web3/sha3.spec.ts index 31706682fa..03896efd0c 100644 --- a/packages/client/test/rpc/web3/sha3.spec.ts +++ b/packages/client/test/rpc/web3/sha3.spec.ts @@ -8,7 +8,7 @@ function compareErrorCode(error: any, errorCode: any) { assert.equal( error.code, errorCode, - `should return the correct error code (expected: ${errorCode}, received: ${error.code})` + `should return the correct error code (expected: ${errorCode}, received: ${error.code})`, ) } @@ -26,7 +26,7 @@ describe(method, () => { assert.equal( result, '0x47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad', - 'should return the correct hash value' + 'should return the correct hash value', ) }) diff --git a/packages/client/test/service/fullethereumservice.spec.ts b/packages/client/test/service/fullethereumservice.spec.ts index 50041146ee..de3551196e 100644 --- a/packages/client/test/service/fullethereumservice.spec.ts +++ b/packages/client/test/service/fullethereumservice.spec.ts @@ -1,5 +1,5 @@ -import { Common, Hardfork, createCommonFromGethGenesis } from '@ethereumjs/common' -import { TransactionFactory, TransactionType } from '@ethereumjs/tx' +import { Common, Hardfork, Mainnet, createCommonFromGethGenesis } from '@ethereumjs/common' +import { TransactionType, createTxFromTxData } from '@ethereumjs/tx' import { equalsBytes, hexToBytes, randomBytes } from '@ethereumjs/util' import { assert, describe, expect, it, vi } from 'vitest' @@ -9,7 +9,7 @@ import { RlpxServer } from '../../src/net/server/index.js' import { Event } from '../../src/types.js' import genesisJSON from '../testdata/geth-genesis/post-merge.json' -import type { BeaconSynchronizer } from '../../src/sync' +import type { BeaconSynchronizer } from '../../src/sync/index.js' import type { Log } from '@ethereumjs/evm' vi.mock('../../src/net/peerpool.js', () => { @@ -96,7 +96,7 @@ describe('should open', async () => { expect(service.synchronizer!.open).toBeCalled() expect(server.addProtocols).toBeCalled() service.config.events.on(Event.SYNC_SYNCHRONIZED, () => { - it('should syncronize', () => { + it('should synchronize', () => { assert.ok('synchronized') }) }) @@ -136,7 +136,7 @@ describe('should start/stop', async () => { describe('should correctly handle GetBlockHeaders', async () => { const config = new Config({ accountCache: 10000, storageCache: 1000 }) vi.unmock('../../src/blockchain') - await import('../../src/blockchain') + await import('../../src/blockchain/index.js') const chain = await Chain.create({ config }) chain.getHeaders = () => [{ number: 1n }] as any const service = new FullEthereumService({ config, chain }) @@ -152,12 +152,12 @@ describe('should correctly handle GetBlockHeaders', async () => { it('should send empty headers', () => { assert.ok( title === 'BlockHeaders' && msg.headers.length === 0, - 'sent empty headers when block height is too high' + 'sent empty headers when block height is too high', ) }) }, } as any, - } as any + } as any, ) ;(service.chain as any)._headers = { height: 5n, @@ -177,12 +177,12 @@ describe('should correctly handle GetBlockHeaders', async () => { it('should send 1 header', () => { assert.ok( title === 'BlockHeaders' && msg.headers.length === 1, - 'sent 1 header when requested' + 'sent 1 header when requested', ) }) }, } as any, - } as any + } as any, ) }) @@ -204,20 +204,20 @@ describe('should call handleNewBlock on NewBlock and handleNewBlockHashes on New await service.switchToBeaconSync() assert.ok( (service.synchronizer as BeaconSynchronizer).type === 'beacon', - 'switched to BeaconSynchronizer' + 'switched to BeaconSynchronizer', ) assert.ok(service.beaconSync, 'can access BeaconSynchronizer') }) }) describe('should ban peer for sending NewBlock/NewBlockHashes after merge', async () => { - const common = new Common({ chain: 'mainnet', hardfork: Hardfork.Paris }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Paris }) const config = new Config({ common, accountCache: 10000, storageCache: 1000 }) const chain = await Chain.create({ config }) chain.config.chainCommon.setHardfork(Hardfork.Paris) const service = new FullEthereumService({ config, chain }) service.pool.ban = () => { - it('should ban peeer', () => { + it('should ban peer', () => { assert.ok(true, 'banned peer when NewBlock/NewBlockHashes announced after Merge') }) } @@ -272,21 +272,17 @@ describe('should handle Transactions', async () => { const service = new FullEthereumService({ config, chain }) service.txPool.handleAnnouncedTxs = async (msg, _peer, _pool) => { it('should handle transaction message', () => { - assert.deepEqual( - msg[0], - TransactionFactory.fromTxData({ type: 2 }), - 'handled Transactions message' - ) + assert.deepEqual(msg[0], createTxFromTxData({ type: 2 }), 'handled Transactions message') }) } await service.handle( { name: 'Transactions', - data: [TransactionFactory.fromTxData({ type: 2 })], + data: [createTxFromTxData({ type: 2 })], }, 'eth', - undefined as any + undefined as any, ) }) @@ -296,7 +292,7 @@ describe('should handle NewPooledTransactionHashes', async () => { const service = new FullEthereumService({ config, chain }) service.txPool.handleAnnouncedTxHashes = async (msg, _peer, _pool) => { it('should handle NewPooledTransactionHashes', () => { - assert.deepEqual(msg[0], hexToBytes('0xabcd'), 'handled NewPooledTransactionhashes') + assert.deepEqual(msg[0], hexToBytes('0xabcd'), 'handled NewPooledTransactionHashes') }) } @@ -310,7 +306,7 @@ describe('should handle NewPooledTransactionHashes', async () => { eth: { versions: [66], }, - } as any + } as any, ) }) @@ -320,7 +316,7 @@ describe('should handle GetPooledTransactions', async () => { const service = new FullEthereumService({ config, chain }) ;(service.txPool as any).validate = () => {} - const tx = TransactionFactory.fromTxData({ type: 2 }).sign(randomBytes(32)) + const tx = createTxFromTxData({ type: 2 }).sign(randomBytes(32)) await service.txPool.add(tx) await service.handle( @@ -334,7 +330,7 @@ describe('should handle GetPooledTransactions', async () => { }) }, } as any, - } as any + } as any, ) }) @@ -348,7 +344,7 @@ describe('should handle decoding NewPooledTransactionHashes with eth/68 message ;(service.txPool as any).handleAnnouncedTxHashes = ( hashes: Uint8Array[], _peer: any, - _pool: any + _pool: any, ) => { it('should get correct tx hash from eth68 message', () => { assert.deepEqual(hashes[0], txHash) @@ -362,7 +358,7 @@ describe('should handle decoding NewPooledTransactionHashes with eth/68 message eth: { versions: [67, 68], }, - } as any + } as any, ) }) @@ -385,7 +381,7 @@ describe.skip('should handle structuring NewPooledTransactionHashes with eth/68 }, }, } as any, - ] + ], ) }) diff --git a/packages/client/test/service/lightethereumservice.spec.ts b/packages/client/test/service/lightethereumservice.spec.ts index cec1e520bc..8d7bfc0206 100644 --- a/packages/client/test/service/lightethereumservice.spec.ts +++ b/packages/client/test/service/lightethereumservice.spec.ts @@ -1,6 +1,6 @@ import { assert, describe, expect, it, vi } from 'vitest' -import { Chain } from '../../src/blockchain/chain' +import { Chain } from '../../src/blockchain/chain.js' import { Config } from '../../src/config.js' import { LesProtocol } from '../../src/net/protocol/index.js' import { RlpxServer } from '../../src/net/server/index.js' @@ -60,7 +60,7 @@ describe('should open', async () => { expect(server.addProtocols).toBeCalled() }) service.config.events.on(Event.SYNC_SYNCHRONIZED, () => { - it('should syncronize', () => { + it('should synchronize', () => { assert.ok(true, 'synchronized') }) }) diff --git a/packages/client/test/sim/4844-blobpost.md b/packages/client/test/sim/4844-blobpost.md index 2237e2d558..efc8030e87 100644 --- a/packages/client/test/sim/4844-blobpost.md +++ b/packages/client/test/sim/4844-blobpost.md @@ -24,4 +24,4 @@ currently it posts 2 txs, but that can be modified with another env variable `NU You can manipulate the fees for the txs using env variables in the following way (for e.g. to replace a low fee stuck tx): -`GAS_LIMIT=0xffffff MAX_FEE=1000000000 MAX_PRIORITY=100000000 MAX_DATAFEE=100000000 RIVATE_KEY=ae557af4ceefda559c924516cabf029bedc36b68109bf8d6183fe96e04121f4e RPC_URL=https://rpc.lodestar-ethereumjs-1.srv.4844-devnet-5.ethpandaops.io npm run tape -- test/sim/4844devnet5.spec.ts` +`GAS_LIMIT=0xffffff MAX_FEE=1000000000 MAX_PRIORITY=100000000 MAX_DATAFEE=100000000 PRIVATE_KEY=ae557af4ceefda559c924516cabf029bedc36b68109bf8d6183fe96e04121f4e RPC_URL=https://rpc.lodestar-ethereumjs-1.srv.4844-devnet-5.ethpandaops.io npm run tape -- test/sim/4844devnet5.spec.ts` diff --git a/packages/client/test/sim/4844-blobpost.spec.ts b/packages/client/test/sim/4844-blobpost.spec.ts index 237098b5aa..3282cfccff 100644 --- a/packages/client/test/sim/4844-blobpost.spec.ts +++ b/packages/client/test/sim/4844-blobpost.spec.ts @@ -17,7 +17,7 @@ import type { PrefixedHexString } from '@ethereumjs/util' const pkey = hexToBytes( (process.env.PRIVATE_KEY as PrefixedHexString) ?? - '0xae557af4ceefda559c924516cabf029bedc36b68109bf8d6183fe96e04121f4e' + '0xae557af4ceefda559c924516cabf029bedc36b68109bf8d6183fe96e04121f4e', ) const sender = bytesToHex(privateToAddress(pkey)) const rpcUrl = @@ -36,7 +36,7 @@ console.log({ sender, rpcUrl, chainId, numTxs }) const network = 'sharding' const shardingJson = require(`./configs/${network}.json`) -// safely change chainId without modifying undelying json +// safely change chainId without modifying underlying json const commonJson = { ...shardingJson } commonJson.config = { ...commonJson.config, chainId } const common = createCommonFromGethGenesis(commonJson, { chain: network }) @@ -68,7 +68,7 @@ describe(`running txes on ${rpcUrl}`, async () => { const nonceFetch = await client.request( 'eth_getTransactionCount', [sender.toString(), 'latest'], - 2.0 + 2.0, ) const nonce = Number(nonceFetch.result) assert.ok(true, `fetched ${sender}'s nonce=${nonce} for blob txs`) @@ -86,7 +86,7 @@ describe(`running txes on ${rpcUrl}`, async () => { gasLimit: BigInt(process.env.GAS_LIMIT ?? 0xffffffn), blobSize: Number(process.env.BLOB_SIZE ?? 4096), }, - { common } + { common }, ) const txHashes = [] for (const txn of txns) { @@ -101,7 +101,7 @@ describe(`running txes on ${rpcUrl}`, async () => { } assert.ok(true, `posted txs=${txHashes.length}`) }, - 10 * 60_000 + 10 * 60_000, ) it('cleanup', async () => { diff --git a/packages/client/test/sim/4844-devnet.spec.ts b/packages/client/test/sim/4844-devnet.spec.ts index 4623c99c20..b482324020 100644 --- a/packages/client/test/sim/4844-devnet.spec.ts +++ b/packages/client/test/sim/4844-devnet.spec.ts @@ -1,5 +1,5 @@ import { createCommonFromGethGenesis } from '@ethereumjs/common' -import { TransactionFactory } from '@ethereumjs/tx' +import { createTxFromTxData } from '@ethereumjs/tx' import { bytesToHex, hexToBytes, privateToAddress } from '@ethereumjs/util' import { Client } from 'jayson/promise' import { randomBytes } from 'node:crypto' @@ -63,7 +63,7 @@ describe('sharding/eip4844 hardfork tests', async () => { pkey, '0x3dA33B9A0894b908DdBb00d96399e506515A1009', undefined, - { common } + { common }, ) const eth2res = await (await fetch('http://127.0.0.1:9596/eth/v1/beacon/headers')).json() @@ -98,7 +98,7 @@ describe('sharding/eip4844 hardfork tests', async () => { assert.equal( eth2kzgs[0], bytesToHex(txResult.tx.kzgCommitments![0]), - 'found expected blob commitments on CL' + 'found expected blob commitments on CL', ) }, 60_000) @@ -119,7 +119,7 @@ describe('sharding/eip4844 hardfork tests', async () => { gasLimit: BigInt(1000000) as any, blobSize: 4096, }, - { common } + { common }, ) const txHashes = [] for (const txn of txns) { @@ -138,7 +138,7 @@ describe('sharding/eip4844 hardfork tests', async () => { const block1 = await client.request( 'eth_getBlockByHash', [txReceipt.result.blockHash, false], - 2.0 + 2.0, ) // next block will have the excessBlobGas done = false @@ -153,14 +153,14 @@ describe('sharding/eip4844 hardfork tests', async () => { } assert.ok(BigInt(block2.result.excessBlobGas) > 0n, 'block1 has excess blob gas > 0') }, - 10 * 60_000 + 10 * 60_000, ) it('point precompile contract test', async () => { const nonce = await client.request( 'eth_getTransactionCount', [sender.toString(), 'latest'], - 2.0 + 2.0, ) /* Data is contract deployment code for the below contract borrowed from the 4844-interop repo @@ -179,7 +179,7 @@ describe('sharding/eip4844 hardfork tests', async () => { const txData = { data: hexToBytes( - '0xf9031103830186a0830f42408080b902c0608060405234801561001057600080fd5b50604051610260380380610260833981810160405281019061003291906101ca565b60008060c0835160145afa61004657600080fd5b50610213565b6000604051905090565b600080fd5b600080fd5b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6100b38261006a565b810181811067ffffffffffffffff821117156100d2576100d161007b565b5b80604052505050565b60006100e561004c565b90506100f182826100aa565b919050565b600067ffffffffffffffff8211156101115761011061007b565b5b61011a8261006a565b9050602081019050919050565b60005b8381101561014557808201518184015260208101905061012a565b83811115610154576000848401525b50505050565b600061016d610168846100f6565b6100db565b90508281526020810184848401111561018957610188610065565b5b610194848285610127565b509392505050565b600082601f8301126101b1576101b0610060565b5b81516101c184826020860161015a565b91505092915050565b6000602082840312156101e0576101df610056565b5b600082015167ffffffffffffffff8111156101fe576101fd61005b565b5b61020a8482850161019c565b91505092915050565b603f806102216000396000f3fe6080604052600080fdfea2646970667358221220cbb964afe0f584a89b887bf992e18697c0ebd77a40a102c121f54213f23d4d9464736f6c634300080f00330000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000212340000000000000000000000000000000000000000000000000000000000001ba002e89a44a4e4da739fed1ed658079a75dbcb59eebbd8ea0cb11f88a41d611dfaa025fe1645a1d3c9828be471fac5cd3e4be59c90ea304c94d774ff88c84349d8db' + '0xf9031103830186a0830f42408080b902c0608060405234801561001057600080fd5b50604051610260380380610260833981810160405281019061003291906101ca565b60008060c0835160145afa61004657600080fd5b50610213565b6000604051905090565b600080fd5b600080fd5b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6100b38261006a565b810181811067ffffffffffffffff821117156100d2576100d161007b565b5b80604052505050565b60006100e561004c565b90506100f182826100aa565b919050565b600067ffffffffffffffff8211156101115761011061007b565b5b61011a8261006a565b9050602081019050919050565b60005b8381101561014557808201518184015260208101905061012a565b83811115610154576000848401525b50505050565b600061016d610168846100f6565b6100db565b90508281526020810184848401111561018957610188610065565b5b610194848285610127565b509392505050565b600082601f8301126101b1576101b0610060565b5b81516101c184826020860161015a565b91505092915050565b6000602082840312156101e0576101df610056565b5b600082015167ffffffffffffffff8111156101fe576101fd61005b565b5b61020a8482850161019c565b91505092915050565b603f806102216000396000f3fe6080604052600080fdfea2646970667358221220cbb964afe0f584a89b887bf992e18697c0ebd77a40a102c121f54213f23d4d9464736f6c634300080f00330000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000212340000000000000000000000000000000000000000000000000000000000001ba002e89a44a4e4da739fed1ed658079a75dbcb59eebbd8ea0cb11f88a41d611dfaa025fe1645a1d3c9828be471fac5cd3e4be59c90ea304c94d774ff88c84349d8db', ), nonce: BigInt(nonce.result), gasLimit: 0xffffff, @@ -187,12 +187,12 @@ describe('sharding/eip4844 hardfork tests', async () => { maxPriorityFeePerGas: 0xf, } - const tx = TransactionFactory.fromTxData({ type: 2, ...txData }, { common }).sign(pkey) + const tx = createTxFromTxData({ type: 2, ...txData }, { common }).sign(pkey) const txResult = await client.request( 'eth_sendRawTransaction', [bytesToHex(tx.serialize())], - 2.0 + 2.0, ) let receipt = await client.request('eth_getTransactionReceipt', [txResult.result], 2.0) while (receipt.result === null) { @@ -201,7 +201,7 @@ describe('sharding/eip4844 hardfork tests', async () => { } assert.ok( receipt.result.contractAddress !== undefined, - 'successfully deployed contract that calls precompile' + 'successfully deployed contract that calls precompile', ) }, 60_000) /* diff --git a/packages/client/test/sim/beaconsync.spec.ts b/packages/client/test/sim/beaconsync.spec.ts index cbc46eb1e4..01c4b8d5f1 100644 --- a/packages/client/test/sim/beaconsync.spec.ts +++ b/packages/client/test/sim/beaconsync.spec.ts @@ -51,7 +51,7 @@ describe('simple mainnet test run', async () => { } // Better add it as a option in startnetwork - process.env.NETWORKID = `${common.networkId()}` + process.env.NETWORKID = `${common.chainId()}` const { teardownCallBack, result } = await startNetwork(network, client, { filterKeywords, filterOutWords, @@ -89,7 +89,7 @@ describe('simple mainnet test run', async () => { assert.equal( EOATransferToBalance, BigInt(balance.result), - `fetched ${EOATransferToAccount} balance=${EOATransferToBalance}` + `fetched ${EOATransferToAccount} balance=${EOATransferToBalance}`, ) balance = await client.request('eth_getBalance', [EOATransferToAccount, 'latest']) @@ -107,10 +107,10 @@ describe('simple mainnet test run', async () => { balance = await client.request('eth_getBalance', [sender, 'latest']) assert.ok( balance.result !== undefined, - 'remaining sender balance after transfers and gas fee' + 'remaining sender balance after transfers and gas fee', ) }, - 2 * 60_000 + 2 * 60_000, ) it.skipIf(process.env.BEACON_SYNC === undefined)( @@ -127,7 +127,7 @@ describe('simple mainnet test run', async () => { common, customGenesisState, [nodeInfo.enode], - peerBeaconUrl + peerBeaconUrl, ).catch((e) => { console.log(e) return null @@ -152,7 +152,7 @@ describe('simple mainnet test run', async () => { assert.fail('could not connect to geth peer in 10 seconds') } }, - 60_000 + 60_000, ) it.skipIf(process.env.BEACON_SYNC === undefined)( @@ -170,7 +170,7 @@ describe('simple mainnet test run', async () => { assert.equal( ['SYNCED', 'VALID'].includes(syncResponse.syncState), true, - 'beaconSyncRelayer should have synced client' + 'beaconSyncRelayer should have synced client', ) await ejsClient.stop() assert.ok(true, 'completed beacon sync') @@ -182,7 +182,7 @@ describe('simple mainnet test run', async () => { assert.fail('ethereumjs client not setup properly for beacon sync') } }, - 10 * 60_000 + 10 * 60_000, ) it('network cleanup', async () => { @@ -201,7 +201,7 @@ async function createBeaconSyncClient( customGenesisState?: any, bootnodes?: any, peerBeaconUrl?: any, - datadir?: any + datadir?: any, ) { // Turn on `debug` logs, defaults to all client logging debug.enable(process.env.DEBUG_SYNC ?? '') diff --git a/packages/client/test/sim/configs/4844-devnet.json b/packages/client/test/sim/configs/4844-devnet.json index 4287403d32..8018d91428 100644 --- a/packages/client/test/sim/configs/4844-devnet.json +++ b/packages/client/test/sim/configs/4844-devnet.json @@ -20,7 +20,7 @@ "period": 5, "epoch": 30000 }, - "terminalTotalDifficulty": 1, + "terminalTotalDifficulty": 0, "terminalTotalDifficultyPassed": true }, "nonce": "0x42", diff --git a/packages/client/test/sim/configs/eof.json b/packages/client/test/sim/configs/eof.json index 874d6faa21..01bd405e7d 100644 --- a/packages/client/test/sim/configs/eof.json +++ b/packages/client/test/sim/configs/eof.json @@ -13,7 +13,8 @@ "londonBlock": 0, "mergeForkBlock": 0, "eofBlock": 0, - "terminalTotalDifficulty": 1 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "alloc": { "0x0000000000000000000000000000000000000000": { diff --git a/packages/client/test/sim/configs/mainnet.json b/packages/client/test/sim/configs/mainnet.json index 321a864e18..33825ee061 100644 --- a/packages/client/test/sim/configs/mainnet.json +++ b/packages/client/test/sim/configs/mainnet.json @@ -13,7 +13,7 @@ "londonBlock": 0, "mergeForkBlock": 0, "shanghaiTime": 0, - "terminalTotalDifficulty": 1, + "terminalTotalDifficulty": 0, "terminalTotalDifficultyPassed": true }, "alloc": { diff --git a/packages/client/test/sim/eof.spec.ts b/packages/client/test/sim/eof.spec.ts index 33401e9e2b..b44ad8700a 100644 --- a/packages/client/test/sim/eof.spec.ts +++ b/packages/client/test/sim/eof.spec.ts @@ -87,7 +87,7 @@ describe('EOF ephemeral hardfork tests', async () => { assert.equal( code.result, '0XEF00010100010200010000AA'.toLowerCase(), - 'deposited valid EOF1 code' + 'deposited valid EOF1 code', ) }) // ------------EIP 3860 tests------------------------------- @@ -105,7 +105,7 @@ describe('EOF ephemeral hardfork tests', async () => { const push0res = await runTx('0x5F') assert.ok( BigInt(push1res.gasUsed) > BigInt(push0res.gasUsed), - 'PUSH1 transaction costs higher gas than PUSH0' + 'PUSH1 transaction costs higher gas than PUSH0', ) }) // ------------EIP 3651 tests------------------------------- @@ -129,18 +129,18 @@ describe('EOF ephemeral hardfork tests', async () => { */ const contractAddress = ( await runTx( - '0x608060405234801561001057600080fd5b5061021d806100206000396000f3fe608060405234801561001057600080fd5b50600436106100365760003560e01c80635caba0a41461003b578063e178495614610057575b600080fd5b6100556004803603810190610050919061011b565b610061565b005b61005f6100b4565b005b7fe37f346e484eff2a55fc81911c0cd6f3f9403f2c3d4c34f3b705adaf5e15620f818273ffffffffffffffffffffffffffffffffffffffff16316040516100a9929190610166565b60405180910390a150565b7fe37f346e484eff2a55fc81911c0cd6f3f9403f2c3d4c34f3b705adaf5e15620f414173ffffffffffffffffffffffffffffffffffffffff16316040516100fc929190610166565b60405180910390a1565b600081359050610115816101d0565b92915050565b600060208284031215610131576101306101cb565b5b600061013f84828501610106565b91505092915050565b6101518161018f565b82525050565b610160816101c1565b82525050565b600060408201905061017b6000830185610148565b6101886020830184610157565b9392505050565b600061019a826101a1565b9050919050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b6000819050919050565b600080fd5b6101d98161018f565b81146101e457600080fd5b5056fea2646970667358221220d00dedb6dcbb511fab3ae484199f836b4c36119fb6faec1baee5e29db1ead12864736f6c63430008070033' + '0x608060405234801561001057600080fd5b5061021d806100206000396000f3fe608060405234801561001057600080fd5b50600436106100365760003560e01c80635caba0a41461003b578063e178495614610057575b600080fd5b6100556004803603810190610050919061011b565b610061565b005b61005f6100b4565b005b7fe37f346e484eff2a55fc81911c0cd6f3f9403f2c3d4c34f3b705adaf5e15620f818273ffffffffffffffffffffffffffffffffffffffff16316040516100a9929190610166565b60405180910390a150565b7fe37f346e484eff2a55fc81911c0cd6f3f9403f2c3d4c34f3b705adaf5e15620f414173ffffffffffffffffffffffffffffffffffffffff16316040516100fc929190610166565b60405180910390a1565b600081359050610115816101d0565b92915050565b600060208284031215610131576101306101cb565b5b600061013f84828501610106565b91505092915050565b6101518161018f565b82525050565b610160816101c1565b82525050565b600060408201905061017b6000830185610148565b6101886020830184610157565b9392505050565b600061019a826101a1565b9050919050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b6000819050919050565b600080fd5b6101d98161018f565b81146101e457600080fd5b5056fea2646970667358221220d00dedb6dcbb511fab3ae484199f836b4c36119fb6faec1baee5e29db1ead12864736f6c63430008070033', ) ).contractAddress const readWarmCoinbase = await runTx('0xe1784956', contractAddress) const readCold = await runTx( '0x5caba0a40000000000000000000000004242424242424242424242424242424242424242', - contractAddress + contractAddress, ) assert.ok( BigInt(readCold.gasUsed) > BigInt(readWarmCoinbase.gasUsed), - 'read cold storage tx should have higher cumulative gas than than read coinbase tx' + 'read cold storage tx should have higher cumulative gas than than read coinbase tx', ) }) diff --git a/packages/client/test/sim/mainnet.spec.ts b/packages/client/test/sim/mainnet.spec.ts index 459a5c0614..f21262f476 100644 --- a/packages/client/test/sim/mainnet.spec.ts +++ b/packages/client/test/sim/mainnet.spec.ts @@ -75,7 +75,7 @@ describe('simple mainnet test run', async () => { const latestBlock = await client.request('eth_getBlockByNumber', ['latest', false]) blockHashes.push(latestBlock.result.hash) }, - 2 * 60_000 + 2 * 60_000, ) it('Validate execution hashes present in beacon headers', async () => { @@ -84,7 +84,7 @@ describe('simple mainnet test run', async () => { 'http://127.0.0.1:9596', 1, parseInt(eth2res.data[0].header.message.slot), - blockHashes + blockHashes, ) }, 60_000) diff --git a/packages/client/test/sim/simutils.ts b/packages/client/test/sim/simutils.ts index a65e4e3de4..89cf821779 100644 --- a/packages/client/test/sim/simutils.ts +++ b/packages/client/test/sim/simutils.ts @@ -1,14 +1,14 @@ import { executionPayloadFromBeaconPayload } from '@ethereumjs/block' import { createBlockchain } from '@ethereumjs/blockchain' -import { BlobEIP4844Transaction, FeeMarketEIP1559Transaction } from '@ethereumjs/tx' +import { createBlob4844Tx, createFeeMarket1559Tx } from '@ethereumjs/tx' import { - Address, BIGINT_1, blobsToCommitments, blobsToProofs, bytesToHex, bytesToUtf8, commitmentsToVersionedHashes, + createAddressFromPrivateKey, getBlobs, randomBytes, } from '@ethereumjs/util' @@ -19,11 +19,11 @@ import { execSync, spawn } from 'node:child_process' import * as net from 'node:net' import qs from 'qs' -import { EthereumClient } from '../../src/client' +import { EthereumClient } from '../../src/client.js' import { Config } from '../../src/config.js' -import { LevelDB } from '../../src/execution/level' -import { RPCManager } from '../../src/rpc' -import { Event } from '../../src/types' +import { LevelDB } from '../../src/execution/level.js' +import { RPCManager } from '../../src/rpc/index.js' +import { Event } from '../../src/types.js' import type { Common } from '@ethereumjs/common' import type { TransactionType, TxData, TxOptions } from '@ethereumjs/tx' @@ -117,7 +117,7 @@ export async function validateBlockHashesInclusionInBeacon( beaconUrl: string, from: number, to: number, - blockHashes: string[] + blockHashes: string[], ) { const executionHashes: string[] = [] for (let i = from; i <= to; i++) { @@ -147,7 +147,7 @@ type RunOpts = { export function runNetwork( network: string, client: Client, - { filterKeywords, filterOutWords, withPeer }: RunOpts + { filterKeywords, filterOutWords, withPeer }: RunOpts, ): () => Promise { const runProc = spawn('test/sim/single-run.sh', [], { env: { @@ -240,10 +240,10 @@ export function runNetwork( throw Error('network is killed before end of test') } console.log('Killing network process', runProc.pid) - execSync(`pkill -15 -P ${runProc.pid}`) + execSync(`pkill -15 -P ${runProc.pid}`) // cspell:disable-line pkill if (peerRunProc !== undefined) { console.log('Killing peer network process', peerRunProc.pid) - execSync(`pkill -15 -P ${peerRunProc.pid}`) + execSync(`pkill -15 -P ${peerRunProc.pid}`) // cspell:disable-line pkill } // Wait for the P2P to be offline await waitForELOffline() @@ -255,7 +255,7 @@ export function runNetwork( export async function startNetwork( network: string, client: Client, - opts: RunOpts + opts: RunOpts, ): Promise<{ teardownCallBack: () => Promise; result: string }> { let teardownCallBack if (opts.externalRun === undefined) { @@ -271,7 +271,7 @@ export async function runTxHelper( opts: { client: Client; common: Common; sender: string; pkey: Uint8Array }, data: PrefixedHexString | '', to?: PrefixedHexString, - value?: bigint + value?: bigint, ) { const { client, common, sender, pkey } = opts const nonce = BigInt((await client.request('eth_getTransactionCount', [sender, 'latest'])).result) @@ -279,7 +279,7 @@ export async function runTxHelper( const block = await client.request('eth_getBlockByNumber', ['latest', false]) const baseFeePerGas = BigInt(block.result.baseFeePerGas) * 100n const maxPriorityFeePerGas = 100000000n - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { data, gasLimit: 1000000, @@ -289,7 +289,7 @@ export async function runTxHelper( to, value, }, - { common } + { common }, ).sign(pkey) const res = await client.request('eth_sendRawTransaction', [bytesToHex(tx.serialize())], 2.0) @@ -316,14 +316,14 @@ export const runBlobTx = async ( pkey: Uint8Array, to?: PrefixedHexString, value?: bigint, - opts?: TxOptions + opts?: TxOptions, ) => { const blobs = getBlobs(bytesToHex(randomBytes(blobSize))) const commitments = blobsToCommitments(kzg, blobs) const proofs = blobsToProofs(kzg, blobs, commitments) const hashes = commitmentsToVersionedHashes(commitments) - const sender = Address.fromPrivateKey(pkey) + const sender = createAddressFromPrivateKey(pkey) const txData: TxData[TransactionType.BlobEIP4844] = { to, data: '0x', @@ -346,7 +346,7 @@ export const runBlobTx = async ( txData.gasLimit = BigInt(1000000) const nonce = await client.request('eth_getTransactionCount', [sender.toString(), 'latest'], 2.0) txData.nonce = BigInt(nonce.result) - const blobTx = BlobEIP4844Transaction.fromTxData(txData, opts).sign(pkey) + const blobTx = createBlob4844Tx(txData, opts).sign(pkey) const serializedWrapper = blobTx.serializeNetworkWrapper() @@ -383,7 +383,7 @@ export const createBlobTxs = async ( gasLimit: bigint blobSize: number }, - opts?: TxOptions + opts?: TxOptions, ) => { const txHashes: string[] = [] const blobSize = txMeta.blobSize ?? 2 ** 17 - 1 @@ -395,7 +395,7 @@ export const createBlobTxs = async ( const txns = [] for (let x = startNonce; x <= startNonce + numTxs; x++) { - const sender = Address.fromPrivateKey(pkey) + const sender = createAddressFromPrivateKey(pkey) const txData = { from: sender.toString(), ...txMeta, @@ -407,7 +407,7 @@ export const createBlobTxs = async ( gas: undefined, } - const blobTx = BlobEIP4844Transaction.fromTxData(txData, opts).sign(pkey) + const blobTx = createBlob4844Tx(txData, opts).sign(pkey) const serializedWrapper = blobTx.serializeNetworkWrapper() await fs.appendFile('./blobs.txt', bytesToHex(serializedWrapper) + '\n') @@ -432,17 +432,17 @@ export async function createInlineClient( config: any, common: any, customGenesisState: any, - datadir: any = Config.DATADIR_DEFAULT + datadir: any = Config.DATADIR_DEFAULT, ) { config.events.setMaxListeners(50) const chainDB = new Level( - `${datadir}/${common.chainName()}/chainDB` + `${datadir}/${common.chainName()}/chainDB`, ) const stateDB = new Level( - `${datadir}/${common.chainName()}/stateDB` + `${datadir}/${common.chainName()}/stateDB`, ) const metaDB = new Level( - `${datadir}/${common.chainName()}/metaDB` + `${datadir}/${common.chainName()}/metaDB`, ) const blockchain = await createBlockchain({ @@ -506,7 +506,7 @@ export async function setupEngineUpdateRelay(client: EthereumClient, peerBeaconU !['SYNCING', 'VALID', 'ACCEPTED'].includes(newPayloadRes.status) ) { throw Error( - `newPayload error: status${newPayloadRes.status} validationError=${newPayloadRes.validationError} error=${newPayloadRes.error}` + `newPayload error: status${newPayloadRes.status} validationError=${newPayloadRes.validationError} error=${newPayloadRes.error}`, ) } @@ -545,7 +545,7 @@ export async function setupEngineUpdateRelay(client: EthereumClient, peerBeaconU const beaconHead = await (await fetch(`${peerBeaconUrl}/eth/v2/beacon/blocks/head`)).json() const payload = executionPayloadFromBeaconPayload( - beaconHead.data.message.body.execution_payload + beaconHead.data.message.body.execution_payload, ) const finalizedBlockHash = beaconFinalized.data.finalized_header.execution.block_hash @@ -608,7 +608,7 @@ export async function setupEngineUpdateRelay(client: EthereumClient, peerBeaconU } } -// To minimise noise on the spec run, selective filteration is applied to let the important events +// To minimize noise on the spec run, selective filtering is applied to let the important events // of the testnet log to show up in the spec log export const filterKeywords = [ 'warn', diff --git a/packages/client/test/sim/snapsync.spec.ts b/packages/client/test/sim/snapsync.spec.ts index 81ab062c2f..dfd5719af1 100644 --- a/packages/client/test/sim/snapsync.spec.ts +++ b/packages/client/test/sim/snapsync.spec.ts @@ -1,7 +1,7 @@ import { createCommonFromGethGenesis } from '@ethereumjs/common' import { - Address, bytesToHex, + createAddressFromString, hexToBytes, parseGethGenesisState, privateToAddress, @@ -22,9 +22,9 @@ import { setupEngineUpdateRelay, startNetwork, waitForELStart, -} from './simutils' +} from './simutils.js' -import type { EthereumClient } from '../../src/client' +import type { EthereumClient } from '../../src/client.js' import type { DefaultStateManager } from '@ethereumjs/statemanager' import type { PrefixedHexString } from '@ethereumjs/util' @@ -58,7 +58,7 @@ describe('simple mainnet test run', async () => { process.env.EXTRA_CL_PARAMS = '--params.CAPELLA_FORK_EPOCH 0' } // Better add it as a option in startnetwork - process.env.NETWORKID = `${common.networkId()}` + process.env.NETWORKID = `${common.chainId()}` const { teardownCallBack, result } = await startNetwork(network, client, { filterKeywords, filterOutWords, @@ -92,7 +92,7 @@ describe('simple mainnet test run', async () => { assert.equal( EOATransferToBalance, BigInt(balance.result), - `fetched ${EOATransferToAccount} balance=${EOATransferToBalance}` + `fetched ${EOATransferToAccount} balance=${EOATransferToBalance}`, ) balance = await client.request('eth_getBalance', [EOATransferToAccount, 'latest']) @@ -110,11 +110,11 @@ describe('simple mainnet test run', async () => { balance = await client.request('eth_getBalance', [sender, 'latest']) assert.ok( balance.result !== undefined, - 'remaining sender balance after transfers and gas fee' + 'remaining sender balance after transfers and gas fee', ) senderBalance = BigInt(balance.result) }, - 2 * 60_000 + 2 * 60_000, ) it.skipIf(process.env.SNAP_SYNC === undefined)( @@ -135,7 +135,7 @@ describe('simple mainnet test run', async () => { customGenesisState, [nodeInfo.enode], peerBeaconUrl, - '' + '', ).catch((e) => { console.log(e) return null @@ -162,7 +162,7 @@ describe('simple mainnet test run', async () => { assert.fail('could not connect to geth peer in 10 seconds') } }, - 60_000 + 60_000, ) it.skipIf(process.env.SNAP_SYNC === undefined)( @@ -201,7 +201,7 @@ describe('simple mainnet test run', async () => { assert.fail('ethereumjs client not setup properly for snap sync') } }, - 10 * 60_000 + 10 * 60_000, ) it.skipIf(stateManager !== undefined)('should match entire state', async () => { @@ -217,12 +217,12 @@ describe('simple mainnet test run', async () => { } for (const addressString of Object.keys(customGenesisState)) { - const address = Address.fromString(addressString) + const address = createAddressFromString(addressString) const account = await stateManager?.getAccount(address) assert.equal( account?.balance, BigInt(customGenesisState[addressString][0]), - `${addressString} balance should match` + `${addressString} balance should match`, ) } }) @@ -244,7 +244,7 @@ async function createSnapClient( customGenesisState: any, bootnodes: any, peerBeaconUrl: any, - datadir: any + datadir: any, ) { // Turn on `debug` logs, defaults to all client logging debug.enable(process.env.DEBUG_SNAP ?? '') @@ -272,7 +272,7 @@ async function createSnapClient( config.events.once( Event.SYNC_SNAPSYNC_COMPLETE, (stateRoot: Uint8Array, stateManager: DefaultStateManager) => - resolve([stateRoot, stateManager]) + resolve([stateRoot, stateManager]), ) }) diff --git a/packages/client/test/sim/txGenerator.ts b/packages/client/test/sim/txGenerator.ts index a3122f095c..f59a64b267 100644 --- a/packages/client/test/sim/txGenerator.ts +++ b/packages/client/test/sim/txGenerator.ts @@ -1,10 +1,10 @@ // Adapted from - https://github.com/Inphi/eip4844-interop/blob/master/blob_tx_generator/blob.js -import { BlobEIP4844Transaction } from '@ethereumjs/tx' +import { createBlob4844Tx } from '@ethereumjs/tx' import { - Address, blobsToCommitments, bytesToHex, commitmentsToVersionedHashes, + createAddressFromPrivateKey, hexToBytes, randomBytes, } from '@ethereumjs/util' @@ -24,17 +24,17 @@ const MAX_USEFUL_BYTES_PER_TX = USEFUL_BYTES_PER_BLOB * MAX_BLOBS_PER_TX - 1 const BLOB_SIZE = BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_BLOB const pkey = hexToBytes('0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8') -const sender = Address.fromPrivateKey(pkey) +const sender = createAddressFromPrivateKey(pkey) const kzg = await loadKZG() function get_padded(data: any, blobs_len: number) { - const pdata = new Uint8Array(blobs_len * USEFUL_BYTES_PER_BLOB) - const datalen = (data as Uint8Array).byteLength - pdata.fill(data, 0, datalen) + const pData = new Uint8Array(blobs_len * USEFUL_BYTES_PER_BLOB) + const dataLen = (data as Uint8Array).byteLength + pData.fill(data, 0, dataLen) // TODO: if data already fits in a pad, then ka-boom - pdata[datalen] = 0x80 - return pdata + pData[dataLen] = 0x80 + return pData } function get_blob(data: any) { @@ -61,11 +61,11 @@ function get_blobs(data: any) { const blobs_len = Math.ceil(len / USEFUL_BYTES_PER_BLOB) - const pdata = get_padded(data, blobs_len) + const pData = get_padded(data, blobs_len) const blobs: Uint8Array[] = [] for (let i = 0; i < blobs_len; i++) { - const chunk = pdata.subarray(i * USEFUL_BYTES_PER_BLOB, (i + 1) * USEFUL_BYTES_PER_BLOB) + const chunk = pData.subarray(i * USEFUL_BYTES_PER_BLOB, (i + 1) * USEFUL_BYTES_PER_BLOB) const blob = get_blob(chunk) blobs.push(blob) } @@ -100,7 +100,7 @@ async function run(data: any) { const commitments = blobsToCommitments(kzg, blobs) const hashes = commitmentsToVersionedHashes(commitments) - const account = Address.fromPrivateKey(randomBytes(32)) + const account = createAddressFromPrivateKey(randomBytes(32)) const txData: TxData[TransactionType.BlobEIP4844] = { to: account.toString(), data: '0x', @@ -121,7 +121,7 @@ async function run(data: any) { txData.gasLimit = BigInt(28000000) const nonce = await getNonce(client, sender.toString()) txData.nonce = BigInt(nonce) - const blobTx = BlobEIP4844Transaction.fromTxData(txData).sign(pkey) + const blobTx = createBlob4844Tx(txData).sign(pkey) const serializedWrapper = blobTx.serializeNetworkWrapper() diff --git a/packages/client/test/sync/beaconsync.spec.ts b/packages/client/test/sync/beaconsync.spec.ts index 81173704eb..550846418c 100644 --- a/packages/client/test/sync/beaconsync.spec.ts +++ b/packages/client/test/sync/beaconsync.spec.ts @@ -1,4 +1,4 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { MemoryLevel } from 'memory-level' import * as td from 'testdouble' import { assert, describe, it, vi } from 'vitest' @@ -30,7 +30,7 @@ describe('[BeaconSynchronizer]', async () => { ReverseBlockFetcher.prototype.destroy = td.func() vi.doMock('../../src/sync/fetcher/reverseblockfetcher.js', () => - td.constructor(ReverseBlockFetcher) + td.constructor(ReverseBlockFetcher), ) const { BeaconSynchronizer } = await import('../../src/sync/beaconsync.js') @@ -132,11 +132,11 @@ describe('[BeaconSynchronizer]', async () => { const pool = new PeerPool() as any const chain = await Chain.create({ config }) const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) - skeleton['getSyncStatus'] = td.func() + skeleton['getSyncStatus'] = td.func<(typeof skeleton)['getSyncStatus']>() await skeleton.open() const sync = new BeaconSynchronizer({ config, pool, chain, execution, skeleton }) - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ latest: () => { return { @@ -186,10 +186,10 @@ describe('[BeaconSynchronizer]', async () => { const pool = new PeerPool() as any const chain = await Chain.create({ config }) const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) - skeleton['getSyncStatus'] = td.func() + skeleton['getSyncStatus'] = td.func<(typeof skeleton)['getSyncStatus']>() await skeleton.open() const sync = new BeaconSynchronizer({ config, pool, chain, execution, skeleton }) - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ latest: () => { return { @@ -218,7 +218,7 @@ describe('[BeaconSynchronizer]', async () => { const chain = await Chain.create({ config }) const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) const sync = new BeaconSynchronizer({ config, pool, chain, execution, skeleton }) - const head = createBlockFromBlockData({ header: { number: BigInt(15) } }) + const head = createBlock({ header: { number: BigInt(15) } }) await skeleton['putBlock'](head) ;(skeleton as any).status.progress.subchains = [ { @@ -227,18 +227,18 @@ describe('[BeaconSynchronizer]', async () => { }, ] await sync.open() - const block = createBlockFromBlockData({ + const block = createBlock({ header: { number: BigInt(16), parentHash: head.hash() }, }) assert.ok(await sync.extendChain(block), 'should extend chain successfully') assert.ok(await sync.setHead(block), 'should set head successfully') assert.equal(skeleton.bounds().head, BigInt(16), 'head should be updated') - const gapBlock = createBlockFromBlockData({ header: { number: BigInt(18) } }) + const gapBlock = createBlock({ header: { number: BigInt(18) } }) assert.notOk(await sync.extendChain(gapBlock), 'should not extend chain with gapped block') assert.ok( await sync.setHead(gapBlock), - 'should be able to set and update head with gapped block' + 'should be able to set and update head with gapped block', ) assert.equal(skeleton.bounds().head, BigInt(18), 'head should update with gapped block') await sync.stop() @@ -256,7 +256,7 @@ describe('[BeaconSynchronizer]', async () => { assert.equal( await sync.syncWithPeer({} as any), false, - `syncWithPeer should return false as nothing to sync` + `syncWithPeer should return false as nothing to sync`, ) await sync.stop() await sync.close() diff --git a/packages/client/test/sync/fetcher/accountfetcher.spec.ts b/packages/client/test/sync/fetcher/accountfetcher.spec.ts index bff9ba88a5..86a9da8c1b 100644 --- a/packages/client/test/sync/fetcher/accountfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/accountfetcher.spec.ts @@ -1,5 +1,5 @@ import { RLP } from '@ethereumjs/rlp' -import { Trie } from '@ethereumjs/trie' +import { createTrieFromProof } from '@ethereumjs/trie' import { bytesToBigInt, hexToBytes } from '@ethereumjs/util' import * as td from 'testdouble' import { assert, describe, it, vi } from 'vitest' @@ -19,10 +19,10 @@ export const _accountRangeRLP = '0xf90b7c01f88aeda0000001907a67cf7ece54c42262997b2f19041a4d99466b94b8c12f225827e239cb80872386f26fc100008080eda00000107c642e29a6b613205c923ac3a4cf0cf1704ae9a8bef2784caba060f4b7cb07870e22e1219054118080eda000001d26422787b6d40c0c0c2df85757c5ad4a3e367831e932fa24f34da43d57cb80872386f26fc100008080f90aecb90214f90211a0b3f22b069c398ded55d4ce421b06f6b4d5e13cb53ad1c6220276b2b3a078937ba08a54e492e7b9ef911b4a299487a12390ccd81a087398af7106e00b81a791868da0a323a93f5791d4c39e1496e4856f9233e5e86070c722efde613219aca834bde3a0d8c11a8fc2eba0b47de9d5b207b702a8bd62609e9c2504aaa444fd2e98e31deaa0dbfc625e370fa89cb7b123550ef6fd637687b9e9a7c8556bd41bcd4226226095a094fe5f6ac37c805917beefa220d7c6b3bd50848322f6342e940cc047c9b6a8ffa074af7e57b9c59e06a2e478610d56ab39004cda3109cfd953dc8b1d168c453cbca0d58f31d0ecce773d610aa5d12f7cc2f4ca992db4ce2e154c13a12cb4bb567816a0b26a7d9776165bb52e793df6a77d4032164d788bf9954c9cac289ea0786da2fda043804bd146f583b183dc267b36bbe55f63daa36fd6cbdafce48ce451a444b4eca0fc724e8bb65724450eb3966d8672330c8e49a94c6ceaed06174a2322aafee105a02ccb0445b0a4028f167e425b57cb9462cc6caceda0c3cfb5363f08614314a77ca0c64db3edb50609b6de331f00ba1f455113d1388e9eb5f50f5420983012d62b7da0168c680c03ef3fbcc36a6c1ddd9bf7d46b5fd5ee34dd7048320223c8bbe412f9a05747d2eb930bffce317c253e3889a7db57c87dcc55f1f1f77b3d02fc82bc6bcfa0997073e1664f9cbbcfd968277856596c325a6b83887f4ad007c3b93e1133c65280b90214f90211a0b3e6ec5fa09062b280599994d38261cae87ab198ed1b3a7d7003a277ffc735dfa01bac91007228f4fa15ac9c2a4822b7d4103eafae61dd3db30eb830e31de9cddfa0809973bebc62f48fb834336800b1ce8e1b2128ee5824645464b6c09ddd381578a0f8d54e19e888fc01cd5069bfcddb7ee78a4afdec24aa03822d9fd5356a3c109fa08a61ea95c616906799398778b28f0e8a19f6569f885e4b4f1192f3e9f690cefea09aa53cd259b1df9650222dc285236399da685b7350312a3ac0a07a86bef64d5ea01596637937233489a70e114c23818e3512b3c2abf621d142c14a9b9a3afb09d1a0e8a8bcda78ae77bee956389dff38a10c8c1565bc1a85064da6cd8ba606b9aa35a04ae4b4bfbfb97f5b4e178f8c30a6d93ffd6614c8b4d0b44df31b653a3a1e4f0fa0a4e3413e6ee6c5886ed346827ee0cce05a8e4f799b005aacf002a17e6d93e5aaa09a3e6d344bbd2496bf8fa84abc96a3d5f363ba03103edff2164244bb020c52a2a0998f39835105197f860930b46adad4527f5a9ef31c4744476718b910ffc5e586a01cec4592958b5aefe25bea6a49a11089e798d96aebc2be7fce0f1772146d18aea0d7c178ed5bcf822d22f9ed3ca8c95e5144ee0a9fbae901b21da002e2c3c0415ea0a9d5c5c67326f4154449575827ab68ea47c7c8931490160a7a299f829a670476a074814ffe69da7e253de29fc7d5eb57291a67bd6f16cb52175106b7cbd3b19c8f80b90214f90211a0947eec1b645849d129fb8c65cd06bd52526fb2399d1660ee5108fc4698e809aaa02735f6cbb0e10514b1515826ae1c539850543dbe162badaf2efa51b1a353ca1ca0fde2642bcc8db8d6d6e42731eeae2045fc30b84c6efdc420ce8cee5d537b648fa071e7887ca31ae375838ceeed57165f5592a9e6cae9beb070e92a4f5d5aec5014a0f81f4b4d5e2c52373b8884b398838941df0b16177aa4ea8494b183176cf7d526a0dc6ecec073532c8f9581ece75cb4eea83a40ba0210cc10ef0fd8b27a102a028fa0426f18f1de1bc9b665e9efb45d6547e88e35a267d7ec9197ae97052d1be59ab9a0d6aad68bece934d578e18eb3acd147490bc6cc01e646f1d8618a747526eae4f5a04ffee6f8660794981b15fda1ceafef98db853bfc31c029db7cb515bb34bb5572a0da2497fed45626b94c1eb910c9eedc9c26a4ff5b56b709b96d5a567991ebe2aca021b3bfcd8aa97eb8d9a3ce258389603564f01d6f485899a9f6e0a00d85dc00dfa0339e45f0407ad527a899a2e06e17330c2cfe25b81689dcffd20c166ef256fbc6a0dafd25416aaf44a8bfa1a6bf2b0cc563f9be84b9b3b8bf307983252d7cd63c51a0191504034adb55fe0926c7c4066654739af3e1c9c4173f4d90fa2e1df62a99cca0504e2144c1a889e48cd5a6baa17e39b6a176dbf41147dd171f2673c5c9d849dba04850f33ad929cb1a07136f162e33a5df0f65c48f359637774e7c8ebabe90eb7080b90214f90211a05d16e93a6e58a13a7c7dde40d0c543b9d63d029ec0da5efb4be34cd4ce672181a089cbb0e940fb7bb395091e3b665755be6b51292fba7a7bc39904568c63a907e1a050314b93f73fed553cd9dee63dc1fe9b789f9b9e111a659ff4e4c91c8167a63ca04444bd2a1bb78a83b66a36a09076b2b49eade4e2e8c8ef91538117525893841aa0abde6220817f3608bdfec46ebed292c464ee1d2c58d0b43286b8617bb4cb49d9a07257eff6aebb380db4c75752a84c6b2d0bb86bb190cef2a58829497997262b6aa0a0d4ab9d93be97287f29637a9b16fb8a6c8cd3bc29786b64343113b95a4153ffa0f0d479377ce4c0f31185c45319f915532cea13e97d5abfc939b75b642b5b47bba0eb96a911347f5321e03f1602a041ce82ec29bb4b322faa9f999cf02bb0c7a932a047b6c76ffeb29b4e3c3c09749289213395c8b0126dbd8acee45c6d32d2a0ab5fa0ca462e8ff237f9e56698ca416fac835ed37bc90683d363effe7ec9dacb4963fba0d385f828becce3665e070b645df25dec507a7c6c3813591e3436147be0becc75a0537a7451522228feca0ceb55374615e8396229e1c7a6b0ae16fb49cd8e6ed7a9a0b96561ab484f67b604d2dc46ac170750b321334aabcfb6b212a906e1cb5b3532a09f64f7c76e201d48b4bc1fb02f7e052a5a1bf05b2c59f3c969c8d2d6b373b3dca0398a988af30676952fcf1a968ac530b30dbe32922efe8c27acb9025adcaf1a5180b90134f90131a0b2151043be015f98b1b249180bfac505781022ede708f533f373b2d612837df7a0031e6ffe32d313f0cd57b4bebbf6fcacf83c366157846040108d198129d99a5aa0bfca4f79ac9eb24bcbdbd94fc49c0ca30a6399a2071e4ab3024e1aae0159a31180808080a0f1a2c911436f5bf1aa936e140b17399f7c092ad64a8ab839057a67fc6923a318a0e648ced926c977b0dcc17452361ac43e53f839b8e485a288e93fb667573ae088a0808107d197eb28741f8cec92b6fa76957fa6928b00f4b7301d464809519258098080a02c7ac441b072bbe33030110dccfdda0de6705c4bdb2c94594e10c2fb8687c41080a0162e8104a86bd043ca2fac0c5d56181127c7b24f6c10fefb90c27064b4edeff8a0376bcbdd3b7503a144b9016159b7e2cd074c9566b843cb834123057c61adbd2e80b870f86e9e31907a67cf7ece54c42262997b2f19041a4d99466b94b8c12f225827e239b84df84b80872386f26fc10000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470b873f871a0a75a6fa397f39292a3bb4fdb84463908c473bad9a0206bd00964adabd7a4b589808080808080808080808080a0ea5b9774dfc3fd50b359b86fa49a57fce0186593cf89d865e279413b63947bed80a0a0747bb1023533b4f9cdaa7c845609975d413348fc5f185a120037dccdf3584c80b870f86e9e2026422787b6d40c0c0c2df85757c5ad4a3e367831e932fa24f34da43d57b84df84b80872386f26fc10000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' export const _zeroElementProofRoot = hexToBytes( - '0xe794e45a596856bcd5412788f46752a559a4aa89fe556ab26a8c2cf0fc24cb5e' + '0xe794e45a596856bcd5412788f46752a559a4aa89fe556ab26a8c2cf0fc24cb5e', ) export const _zeroElementProofOrigin = bytesToBigInt( - hexToBytes('0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffa') + hexToBytes('0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffa'), ) export const _zeroElementProof = [ '0xf90211a07d363fdc4ad4413321005a1981d415a872aed14651c159bea575d713fb1d1fd8a0d51e3a39747ab080d602e8dff07ed7fdf18fd5dd480b85ec8d5ebd86475481fba0382fbb965c19798b116e1b32ad64d99bdf09f8f4ed4c83e1b388ffad0ee8bc62a02ff7448b0092b7926a01bbb4f72e6f38366fdf109f3e9f8ac0794af3dc0e3de4a05db544523b1c10f8aead4252bff05665b8c7d21f02a102b51ac79acb6b3d2854a0cb0c46c37d6b44be6ff2204c4f4cea393099fefeae88cf5aa88195da74cca13fa0b459b6b3672dab2bb058e561761a0838e349d1dd1292dda31245e8404ec844eaa082cbce67bd082cb430296662fb1f32aabe866dee947970877abaf4233eb0fb48a0828820316cc02bfefd899aba41340659fd06df1e0a0796287ec2a4110239f6d2a0be88e4724326382a8b56e2328eeef0ad51f18d5bae0e84296afe14c4028c4af9a0c14e9060c6b3784e35b9e6ae2ad2984142a75910ccc89eb89dc1e2f44b6c58c2a091467954490d127631d2a2f39a6edabd702153de817fe8da2ab9a30513e5c6dda01c00f6abbb9bcb3ae9b12c887bc3ea3b13dba33a5dbad455c24778fa7d3ab01ea0899f71abb18c6c956118bf567fac629b75f7e9526873e429d3d8abb6dbb58021a00fd717235298742623c0b3cafb3e4bd86c0b5ab1f71097b4dd19f3d6925d758da011e10e11fa54a847669b26adaf1b5cbe7736eafde6c552b9b6158fe12307e60680', @@ -38,7 +38,7 @@ describe('[AccountFetcher]', async () => { PeerPool.prototype.idle = td.func() PeerPool.prototype.ban = td.func() - const { AccountFetcher } = await import('../../../src/sync/fetcher/accountfetcher') + const { AccountFetcher } = await import('../../../src/sync/fetcher/accountfetcher.js') it('should start/stop', async () => { const config = new Config({ maxPerRequest: 5 }) @@ -89,7 +89,7 @@ describe('[AccountFetcher]', async () => { assert.deepEqual( fetcher.highestKnownHash, highestReceivedHash, - 'highest known hash correctly updated' + 'highest known hash correctly updated', ) }) @@ -204,7 +204,7 @@ describe('[AccountFetcher]', async () => { const result = (await fetcher.request(job as any)) as any assert.ok( JSON.stringify(result[0]) === JSON.stringify({ skipped: true }), - 'skipped fetching task with limit lower than highest known key hash' + 'skipped fetching task with limit lower than highest known key hash', ) }) @@ -280,7 +280,7 @@ describe('[AccountFetcher]', async () => { const ret = await fetcher.request(job as any) assert.ok( ret?.completed === true, - 'should handle peer that is signaling that an empty range has been requested with no elements remaining to the right' + 'should handle peer that is signaling that an empty range has been requested with no elements remaining to the right', ) }) @@ -289,7 +289,7 @@ describe('[AccountFetcher]', async () => { const pool = new PeerPool() as any // calculate new root with a key all the way to the right of the trie - const trie = await Trie.createFromProof(_zeroElementProof) + const trie = await createTrieFromProof(_zeroElementProof) await trie.put(hexToBytes(`0x${'F'.repeat(32)}`), hexToBytes('0x123'), true) const newRoot = trie.root() @@ -319,7 +319,7 @@ describe('[AccountFetcher]', async () => { const ret = await fetcher.request(job as any) assert.ok( ret?.completed === undefined, - 'proof verification should fail if elements still remain to the right of the proof' + 'proof verification should fail if elements still remain to the right of the proof', ) }) @@ -333,10 +333,10 @@ describe('[AccountFetcher]', async () => { pool, root: hexToBytes('0x39ed8daab7679c0b1b7cf3667c50108185d4d9d1431c24a1c35f696a58277f8f'), first: bytesToBigInt( - hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000001') + hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000001'), ), count: bytesToBigInt( - hexToBytes('0x000010c6f7a0b5ed8d36b4c7f34938583621fafc8b0079a2834d26fa3fcc9ea9') + hexToBytes('0x000010c6f7a0b5ed8d36b4c7f34938583621fafc8b0079a2834d26fa3fcc9ea9'), ), }) assert.ok(fetcher.storageFetcher !== undefined, 'storageFetcher should be created') @@ -345,7 +345,7 @@ describe('[AccountFetcher]', async () => { const resData = RLP.decode(hexToBytes(_accountRangeRLP)) const { accounts, proof } = p.decode( p.messages.filter((message) => message.name === 'AccountRange')[0], - resData + resData, ) const mockedGetAccountRange = vi.fn(() => { return { @@ -365,7 +365,7 @@ describe('[AccountFetcher]', async () => { assert.ok(results !== undefined, 'Proof verification is completed without errors') assert.ok( fetcher.process(job as any, results!) !== undefined, - 'Response should be processed properly' + 'Response should be processed properly', ) // mock storageFetches's enqueue so to not having a hanging storage fetcher diff --git a/packages/client/test/sync/fetcher/blockfetcher.spec.ts b/packages/client/test/sync/fetcher/blockfetcher.spec.ts index f7c7e0f0b9..15c04cd7f1 100644 --- a/packages/client/test/sync/fetcher/blockfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/blockfetcher.spec.ts @@ -1,4 +1,4 @@ -import { BlockHeader } from '@ethereumjs/block' +import { createBlockHeader } from '@ethereumjs/block' import { Hardfork } from '@ethereumjs/common' import { KECCAK256_RLP } from '@ethereumjs/util' import { assert, describe, it, vi } from 'vitest' @@ -71,7 +71,7 @@ describe('[BlockFetcher]', async () => { assert.equal( fetcher.first + fetcher.count - BigInt(1) === BigInt(15), true, - 'height should now be 15' + 'height should now be 15', ) // Clear fetcher queue for next test of gap when following head @@ -83,7 +83,7 @@ describe('[BlockFetcher]', async () => { assert.equal( (fetcher as any).in.length, 11, - '10 new tasks to catch up to head (1-49, 5 per request), 1 new task for subsequent block numbers (50-51)' + '10 new tasks to catch up to head (1-49, 5 per request), 1 new task for subsequent block numbers (50-51)', ) fetcher.destroy() @@ -104,7 +104,7 @@ describe('[BlockFetcher]', async () => { assert.deepEqual(fetcher.process({ task: { count: 2 } } as any, blocks), blocks, 'got results') assert.notOk( fetcher.process({ task: { count: 2 } } as any, { blocks: [] } as any), - 'bad results' + 'bad results', ) }) @@ -188,14 +188,10 @@ describe('[BlockFetcher]', async () => { it('should parse bodies correctly', async () => { const config = new Config({ accountCache: 10000, storageCache: 1000 }) config.chainCommon.getHardforkBy = vi.fn((input) => { - if ( - input['blockNumber'] !== undefined && - input['td'] !== undefined && - input['timestamp'] !== undefined - ) + if (input['blockNumber'] !== undefined && input['timestamp'] !== undefined) return Hardfork.Shanghai - if (input['blockNumber'] !== undefined && input['td'] !== undefined) return Hardfork.Shanghai + if (input['blockNumber'] !== undefined) return Hardfork.Shanghai if (input['blockNumber'] !== undefined && input['timestamp'] !== undefined) return Hardfork.Shanghai @@ -211,9 +207,9 @@ describe('[BlockFetcher]', async () => { count: BigInt(0), }) - const shanghaiHeader = BlockHeader.fromHeaderData( + const shanghaiHeader = createBlockHeader( { number: 1, withdrawalsRoot: KECCAK256_RLP }, - { common: config.chainCommon, setHardfork: true } + { common: config.chainCommon, setHardfork: true }, ) const task = { count: 1, first: BigInt(1) } @@ -267,7 +263,7 @@ describe('store()', async () => { config.events.on(Event.SYNC_FETCHED_BLOCKS, () => it('should emit fetched blocks event', () => { assert.ok(true, 'store() emitted SYNC_FETCHED_BLOCKS event on putting blocks') - }) + }), ) await fetcher.store([]) }) diff --git a/packages/client/test/sync/fetcher/bytecodefetcher.spec.ts b/packages/client/test/sync/fetcher/bytecodefetcher.spec.ts index ba7e443e72..45cbba8132 100644 --- a/packages/client/test/sync/fetcher/bytecodefetcher.spec.ts +++ b/packages/client/test/sync/fetcher/bytecodefetcher.spec.ts @@ -73,7 +73,7 @@ describe('[ByteCodeFetcher]', async () => { assert.deepEqual( (fetcher.process(job, ByteCodeResponse) as any)[0], fullResult[0], - 'got results' + 'got results', ) assert.notOk(fetcher.process({} as any, { ByteCodeResponse: [] } as any), 'bad results') }) diff --git a/packages/client/test/sync/fetcher/fetcher.spec.ts b/packages/client/test/sync/fetcher/fetcher.spec.ts index 8d22c2b351..868a669e4d 100644 --- a/packages/client/test/sync/fetcher/fetcher.spec.ts +++ b/packages/client/test/sync/fetcher/fetcher.spec.ts @@ -44,7 +44,7 @@ it('should handle failure', () => { ;(fetcher as any).running = true fetcher.next = td.func() config.events.on(Event.SYNC_FETCHER_ERROR, (err) => - assert.equal(err.message, 'err0', 'got error') + assert.equal(err.message, 'err0', 'got error'), ) ;(fetcher as any).failure(job as Job, new Error('err0')) assert.equal((fetcher as any).in.length, 1, 'enqueued job') @@ -131,7 +131,7 @@ describe('should re-enqueue on a non-fatal error', () => { ;(fetcher as any).running = true fetcher.store = td.func() td.when(fetcher.store(td.matchers.anything())).thenReject( - new Error('could not find parent header') + new Error('could not find parent header'), ) td.when(fetcher.processStoreError(td.matchers.anything(), td.matchers.anything())).thenReturn({ destroyFetcher: false, @@ -142,7 +142,7 @@ describe('should re-enqueue on a non-fatal error', () => { it('should step back', () => { assert.ok( (fetcher as any).in.peek().task.first === BigInt(1), - 'should step back for safeReorgDistance' + 'should step back for safeReorgDistance', ) }) }) diff --git a/packages/client/test/sync/fetcher/headerfetcher.spec.ts b/packages/client/test/sync/fetcher/headerfetcher.spec.ts index ed70d029a3..83eb52baca 100644 --- a/packages/client/test/sync/fetcher/headerfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/headerfetcher.spec.ts @@ -25,14 +25,14 @@ describe('[HeaderFetcher]', async () => { assert.deepEqual( fetcher.process( { task: { count: 2 }, peer: 'peer0' } as any, - { headers, bv: BigInt(1) } as any + { headers, bv: BigInt(1) } as any, ), headers as any, - 'got results' + 'got results', ) assert.notOk( fetcher.process({ task: { count: 2 } } as any, { headers: [], bv: BigInt(1) } as any), - 'bad results' + 'bad results', ) expect((fetcher as any).flow.handleReply).toHaveBeenCalledWith('peer0', 1) }) @@ -126,7 +126,7 @@ describe('store()', async () => { config.events.on(Event.SYNC_FETCHED_HEADERS, () => it('should emit event on put headers', () => { assert.ok(true, 'store() emitted SYNC_FETCHED_HEADERS event on putting headers') - }) + }), ) await fetcher.store([1 as any]) }) diff --git a/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts b/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts index ca63d3c87a..867cf84a47 100644 --- a/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts @@ -1,4 +1,4 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { MemoryLevel } from 'memory-level' import { assert, describe, it, vi } from 'vitest' @@ -89,7 +89,7 @@ describe('[ReverseBlockFetcher]', async () => { assert.deepEqual(fetcher.process({ task: { count: 2 } } as any, blocks), blocks, 'got results') assert.notOk( fetcher.process({ task: { count: 2 } } as any, { blocks: [] } as any), - 'bad results' + 'bad results', ) }) @@ -195,33 +195,33 @@ describe('[ReverseBlockFetcher]', async () => { count: BigInt(5), timeout: 5, }) - const block47 = createBlockFromBlockData( + const block47 = createBlock( { header: { number: BigInt(47), difficulty: BigInt(1) } }, - { setHardfork: true } + { setHardfork: true }, ) - const block48 = createBlockFromBlockData( + const block48 = createBlock( { header: { number: BigInt(48), parentHash: block47.hash(), difficulty: BigInt(1) }, }, - { setHardfork: true } + { setHardfork: true }, ) - const block49 = createBlockFromBlockData( + const block49 = createBlock( { header: { number: BigInt(49), parentHash: block48.hash(), difficulty: BigInt(1) }, }, - { setHardfork: true } + { setHardfork: true }, ) - const block4 = createBlockFromBlockData( + const block4 = createBlock( { header: { number: BigInt(4), difficulty: BigInt(1) }, }, - { setHardfork: true } + { setHardfork: true }, ) - const block5 = createBlockFromBlockData( + const block5 = createBlock( { header: { number: BigInt(5), difficulty: BigInt(1), parentHash: block4.hash() }, }, - { setHardfork: true } + { setHardfork: true }, ) ;(skeleton as any).status.progress.subchains = [ { head: BigInt(100), tail: BigInt(50), next: block49.hash() }, @@ -232,12 +232,12 @@ describe('[ReverseBlockFetcher]', async () => { await fetcher.store([block49, block48]) assert.ok( (skeleton as any).status.progress.subchains.length === 1, - 'subchains should be merged' + 'subchains should be merged', ) assert.equal( (skeleton as any).status.progress.subchains[0].tail, BigInt(5), - 'subchain tail should be next segment' + 'subchain tail should be next segment', ) assert.notOk((fetcher as any).running, 'fetcher should stop') assert.equal((fetcher as any).in.length, 0, 'fetcher in should be cleared') @@ -269,7 +269,7 @@ describe('store()', async () => { assert.equal( err.message, `Blocks don't extend canonical subchain`, - 'store() threw on invalid block' + 'store() threw on invalid block', ) const { destroyFetcher, banPeer } = fetcher.processStoreError(err, { first: BigInt(10), @@ -283,7 +283,7 @@ describe('store()', async () => { config.events.on(Event.SYNC_FETCHED_BLOCKS, () => it('should emit event on put blocks', async () => { assert.ok(true, 'store() emitted SYNC_FETCHED_BLOCKS event on putting blocks') - }) + }), ) await fetcher.store([]) }) diff --git a/packages/client/test/sync/fetcher/storagefetcher.spec.ts b/packages/client/test/sync/fetcher/storagefetcher.spec.ts index 53437d50e8..a1cb73eac4 100644 --- a/packages/client/test/sync/fetcher/storagefetcher.spec.ts +++ b/packages/client/test/sync/fetcher/storagefetcher.spec.ts @@ -1,5 +1,5 @@ import { RLP } from '@ethereumjs/rlp' -import { Trie } from '@ethereumjs/trie' +import { createTrieFromProof } from '@ethereumjs/trie' import { hexToBytes } from '@ethereumjs/util' import { utf8ToBytes } from 'ethereum-cryptography/utils' import { assert, describe, it, vi } from 'vitest' @@ -45,10 +45,10 @@ describe('[StorageFetcher]', async () => { storageRequests: [ { accountHash: hexToBytes( - '0x352a47fc6863b89a6b51890ef3c1550d560886c027141d2058ba1e2d4c66d99a' + '0x352a47fc6863b89a6b51890ef3c1550d560886c027141d2058ba1e2d4c66d99a', ), storageRoot: hexToBytes( - '0x556a482068355939c95a3412bdb21213a301483edb1b64402fb66ac9f3583599' + '0x556a482068355939c95a3412bdb21213a301483edb1b64402fb66ac9f3583599', ), first: BigInt(0), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -62,10 +62,10 @@ describe('[StorageFetcher]', async () => { fetcher.enqueueByStorageRequestList([ { accountHash: hexToBytes( - '0xe9a5016cb1a53dbc750d06e725514ac164231d71853cafdcbff42f5adb6ca6f1' + '0xe9a5016cb1a53dbc750d06e725514ac164231d71853cafdcbff42f5adb6ca6f1', ), storageRoot: hexToBytes( - '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92' + '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92', ), first: BigInt(0), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -111,10 +111,10 @@ describe('[StorageFetcher]', async () => { storageRequests: [ { accountHash: hexToBytes( - '0xe9a5016cb1a53dbc750d06e725514ac164231d71853cafdcbff42f5adb6ca6f1' + '0xe9a5016cb1a53dbc750d06e725514ac164231d71853cafdcbff42f5adb6ca6f1', ), storageRoot: hexToBytes( - '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92' + '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92', ), first: BigInt(0), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -127,7 +127,7 @@ describe('[StorageFetcher]', async () => { assert.deepEqual( (fetcher.process(job, StorageDataResponse) as any)[0], fullResult[0], - 'got results' + 'got results', ) assert.throws(() => fetcher.process({} as any, { StorageDataResponse: [] } as any)) }) @@ -161,7 +161,7 @@ describe('[StorageFetcher]', async () => { { accountHash: hexToBytes(accountHashString), storageRoot: hexToBytes( - '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92' + '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92', ), first: BigInt(10), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -177,14 +177,14 @@ describe('[StorageFetcher]', async () => { assert.equal( JSON.stringify(fetcher.accountToHighestKnownHash.get(accountHashString)), JSON.stringify(utf8ToBytes(highestReceivedhash)), - 'should set new highest known hash' + 'should set new highest known hash', ) ;(job.task.storageRequests[0] as any).first = BigInt(3) ;(job.task.storageRequests[0] as any).count = BigInt(4) const result = (await fetcher.request(job as any)) as any assert.ok( JSON.stringify(result[0]) === JSON.stringify({ skipped: true }), - 'should skip fetching task with limit lower than highest known key hash' + 'should skip fetching task with limit lower than highest known key hash', ) StorageDataResponse.completed = true @@ -192,7 +192,7 @@ describe('[StorageFetcher]', async () => { assert.equal( fetcher.accountToHighestKnownHash.get(accountHashString), undefined, - 'should delete highest known hash for completed job' + 'should delete highest known hash for completed job', ) }) @@ -215,10 +215,10 @@ describe('[StorageFetcher]', async () => { storageRequests: [ { accountHash: hexToBytes( - '0xe9a5016cb1a53dbc750d06e725514ac164231d71853cafdcbff42f5adb6ca6f1' + '0xe9a5016cb1a53dbc750d06e725514ac164231d71853cafdcbff42f5adb6ca6f1', ), storageRoot: hexToBytes( - '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92' + '0x69522138e4770e642ec8d7bd5e2b71a23fb732bb447cd4faf838b45cfe3b2a92', ), first: BigInt(0), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -265,10 +265,10 @@ describe('[StorageFetcher]', async () => { storageRequests: [ { accountHash: hexToBytes( - '0x00009e5969eba9656d7e4dad5b0596241deb87c29bbab71c23b602c2b88a7276' + '0x00009e5969eba9656d7e4dad5b0596241deb87c29bbab71c23b602c2b88a7276', ), storageRoot: hexToBytes( - '0x4431bd7d69241190bb930b74485c1e31ff75552f67d758d0b6612e7bd9226121' + '0x4431bd7d69241190bb930b74485c1e31ff75552f67d758d0b6612e7bd9226121', ), first: BigInt(0), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -278,7 +278,7 @@ describe('[StorageFetcher]', async () => { const resData = RLP.decode(hexToBytes(_storageRangesRLP)) as unknown const res = p.decode( p.messages.filter((message) => message.name === 'StorageRanges')[0], - resData + resData, ) const { reqId, slots, proof } = res const mockedGetStorageRanges = vi.fn((input) => { @@ -353,7 +353,7 @@ describe('[StorageFetcher]', async () => { const ret = await fetcher.request(job as any) assert.ok( ret?.completed === true, - 'should handle peer that is signaling that an empty range has been requested with no elements remaining to the right' + 'should handle peer that is signaling that an empty range has been requested with no elements remaining to the right', ) }) @@ -362,7 +362,7 @@ describe('[StorageFetcher]', async () => { const pool = new PeerPool() as any // calculate new root with a key all the way to the right of the trie - const trie = await Trie.createFromProof(_zeroElementProof) + const trie = await createTrieFromProof(_zeroElementProof) await trie.put(hexToBytes(`0x${'F'.repeat(32)}`), hexToBytes('0x123'), true) const newRoot = trie.root() @@ -397,7 +397,7 @@ describe('[StorageFetcher]', async () => { const ret = await fetcher.request(job as any) assert.ok( ret?.completed === undefined, - 'proof verification should fail if elements still remain to the right of the proof' + 'proof verification should fail if elements still remain to the right of the proof', ) }) @@ -422,10 +422,10 @@ describe('[StorageFetcher]', async () => { storageRequests: [ { accountHash: hexToBytes( - '0x00009e5969eba9656d7e4dad5b0596241deb87c29bbab71c23b602c2b88a7276' + '0x00009e5969eba9656d7e4dad5b0596241deb87c29bbab71c23b602c2b88a7276', ), storageRoot: hexToBytes( - '0x4431bd7d69241190bb930b74485c1e31ff75552f67d758d0b6612e7bd9226121' + '0x4431bd7d69241190bb930b74485c1e31ff75552f67d758d0b6612e7bd9226121', ), first: BigInt(0), count: BigInt(2) ** BigInt(256) - BigInt(1), @@ -435,7 +435,7 @@ describe('[StorageFetcher]', async () => { const resData = RLP.decode(hexToBytes(_storageRangesRLP)) as unknown const res = p.decode( p.messages.filter((message) => message.name === 'StorageRanges')[0], - resData + resData, ) const { reqId, slots, proof } = res const mockedGetStorageRanges = vi.fn().mockReturnValueOnce({ @@ -473,10 +473,10 @@ describe('[StorageFetcher]', async () => { const accResData = RLP.decode(hexToBytes(_accountRangeRLP)) as unknown const { proof: proofInvalid } = p.decode( p.messages.filter((message) => message.name === 'AccountRange')[0], - accResData + accResData, ) const dummyStorageRoot = hexToBytes( - '0x39ed8daab7679c0b1b7cf3667c50108185d4d9d1431c24a1c35f696a58277f8f' + '0x39ed8daab7679c0b1b7cf3667c50108185d4d9d1431c24a1c35f696a58277f8f', ) const dummyOrigin = new Uint8Array(32) try { @@ -488,7 +488,7 @@ describe('[StorageFetcher]', async () => { } catch (e) { assert.ok( true, - `verifyRangeProof correctly failed on invalid proof, Error: ${(e as Error).message}` + `verifyRangeProof correctly failed on invalid proof, Error: ${(e as Error).message}`, ) } @@ -497,7 +497,7 @@ describe('[StorageFetcher]', async () => { await fetcher.store([Object.create(null)] as any) assert.ok( fetcher['destroyWhenDone'] === false, - 'should still be open to enqueue and process new requests' + 'should still be open to enqueue and process new requests', ) fetcher.setDestroyWhenDone() assert.ok(fetcher['destroyWhenDone'] === true, 'should mark to close on finished') diff --git a/packages/client/test/sync/fetcher/trienodefetcher.spec.ts b/packages/client/test/sync/fetcher/trienodefetcher.spec.ts index d3000a9715..64303e1d69 100644 --- a/packages/client/test/sync/fetcher/trienodefetcher.spec.ts +++ b/packages/client/test/sync/fetcher/trienodefetcher.spec.ts @@ -1,7 +1,7 @@ import { RLP } from '@ethereumjs/rlp' import { decodeNode } from '@ethereumjs/trie' import { bytesToHex, hexToBytes } from '@ethereumjs/util' -import { OrderedMap } from 'js-sdsl' +import { OrderedMap } from '@js-sdsl/ordered-map' import { assert, describe, it, vi } from 'vitest' import { Chain } from '../../../src/blockchain/index.js' @@ -44,7 +44,7 @@ describe('[TrieNodeFetcher]', async () => { assert.equal( (fetcher as any).pathToNodeRequestData.length, 1, - 'one node request has been added' + 'one node request has been added', ) void fetcher.fetch() @@ -77,7 +77,7 @@ describe('[TrieNodeFetcher]', async () => { assert.deepEqual( (fetcher.process(job, NodeDataResponse) as any)[0], fullResult[0], - 'got results' + 'got results', ) assert.notOk(fetcher.process({} as any, { NodeDataResponse: [] } as any), 'bad results') }) @@ -135,7 +135,7 @@ describe('[TrieNodeFetcher]', async () => { fetcher.requestedNodeToPath = new Map() fetcher.requestedNodeToPath.set( '9100b295173da75cf0f160214e47b480abc2c9d2fe11330fe8befa69aac69656', - '' + '', ) const resData = RLP.decode(hexToBytes(_trieNodesRLP)) as unknown @@ -160,7 +160,7 @@ describe('[TrieNodeFetcher]', async () => { assert.equal( requestResult[0][0], res.nodes[0], - 'Request phase should cross-validate received nodes with requested nodes' + 'Request phase should cross-validate received nodes with requested nodes', ) await fetcher.store(requestResult) @@ -170,7 +170,7 @@ describe('[TrieNodeFetcher]', async () => { assert.equal( children.length, fetcher.pathToNodeRequestData.length, - 'Should generate requests for all child nodes' + 'Should generate requests for all child nodes', ) }) it('should not throw if undefined', async () => { diff --git a/packages/client/test/sync/fullsync.spec.ts b/packages/client/test/sync/fullsync.spec.ts index 5dd4c104cf..0df617ac9e 100644 --- a/packages/client/test/sync/fullsync.spec.ts +++ b/packages/client/test/sync/fullsync.spec.ts @@ -1,4 +1,4 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import * as td from 'testdouble' import { assert, describe, it, vi } from 'vitest' @@ -135,7 +135,7 @@ describe('[FullSynchronizer]', async () => { txPool, execution, }) - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ les: { status: { headNum: BigInt(2) } }, latest: () => { @@ -235,10 +235,10 @@ describe('[FullSynchronizer]', async () => { ] ;(sync as any).pool = { peers } - const chainTip = createBlockFromBlockData({ + const chainTip = createBlock({ header: {}, }) - const newBlock = createBlockFromBlockData({ + const newBlock = createBlock({ header: { parentHash: chainTip.hash(), }, @@ -247,7 +247,7 @@ describe('[FullSynchronizer]', async () => { chain.putBlocks = vi.fn((input) => { assert.ok( JSON.stringify(input) === JSON.stringify([newBlock]), - 'putBlocks is called as expected' + 'putBlocks is called as expected', ) }) as any // NewBlock message from Peer 3 @@ -277,10 +277,10 @@ describe('[FullSynchronizer]', async () => { execution, }) - const chainTip = createBlockFromBlockData({ + const chainTip = createBlock({ header: {}, }) - const newBlock = createBlockFromBlockData({ + const newBlock = createBlock({ header: { parentHash: chainTip.hash(), }, diff --git a/packages/client/test/sync/lightsync.spec.ts b/packages/client/test/sync/lightsync.spec.ts index 67cfc6c4de..e8d718e1fb 100644 --- a/packages/client/test/sync/lightsync.spec.ts +++ b/packages/client/test/sync/lightsync.spec.ts @@ -1,4 +1,4 @@ -import { BlockHeader } from '@ethereumjs/block' +import { createBlockHeader } from '@ethereumjs/block' import * as td from 'testdouble' import { assert, describe, it, vi } from 'vitest' @@ -8,6 +8,8 @@ import { Peer } from '../../src/net/peer/peer.js' import { HeaderFetcher } from '../../src/sync/fetcher/headerfetcher.js' import { Event } from '../../src/types.js' +import type { BlockHeader } from '@ethereumjs/block' + class PeerPool { open() {} close() {} @@ -75,7 +77,7 @@ describe('[LightSynchronizer]', async () => { pool, chain, }) - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ les: { status: { headNum: BigInt(2) } }, latest: () => { @@ -118,7 +120,7 @@ describe('sync errors', async () => { pool, chain, }) - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ les: { status: { headNum: BigInt(2) } }, latest: () => { @@ -130,7 +132,7 @@ describe('sync errors', async () => { } as any) td.when(HeaderFetcher.prototype.fetch()).thenResolve(true) td.when(HeaderFetcher.prototype.fetch()).thenDo(() => - config.events.emit(Event.SYNC_FETCHED_HEADERS, [] as BlockHeader[]) + config.events.emit(Event.SYNC_FETCHED_HEADERS, [] as BlockHeader[]), ) config.logger.on('data', async (data) => { if ((data.message as string).includes('No headers fetched are applicable for import')) { @@ -168,7 +170,7 @@ describe('import headers', () => { pool, chain, }) - sync.best = td.func() + sync.best = td.func<(typeof sync)['best']>() td.when(sync.best()).thenResolve({ les: { status: { headNum: BigInt(2) } }, latest: () => { @@ -180,7 +182,7 @@ describe('import headers', () => { } as any) td.when(HeaderFetcher.prototype.fetch()).thenResolve(true) td.when(HeaderFetcher.prototype.fetch()).thenDo(() => - config.events.emit(Event.SYNC_FETCHED_HEADERS, [BlockHeader.fromHeaderData({})]) + config.events.emit(Event.SYNC_FETCHED_HEADERS, [createBlockHeader({})]), ) config.logger.on('data', async (data) => { if ((data.message as string).includes('Imported headers count=1')) { diff --git a/packages/client/test/sync/skeleton.spec.ts b/packages/client/test/sync/skeleton.spec.ts index 8220a98316..a26853cfa8 100644 --- a/packages/client/test/sync/skeleton.spec.ts +++ b/packages/client/test/sync/skeleton.spec.ts @@ -1,8 +1,13 @@ -import { BlockHeader, createBlockFromBlockData } from '@ethereumjs/block' -import { Common, createCommonFromGethGenesis } from '@ethereumjs/common' +import { createBlock } from '@ethereumjs/block' +import { + Common, + Mainnet, + createCommonFromGethGenesis, + createCustomCommon, +} from '@ethereumjs/common' import { equalsBytes, utf8ToBytes } from '@ethereumjs/util' import { MemoryLevel } from 'memory-level' -import { assert, describe, it, vi } from 'vitest' +import { assert, describe, it } from 'vitest' import { Chain } from '../../src/blockchain/index.js' import { Config } from '../../src/config.js' @@ -10,6 +15,7 @@ import { getLogger } from '../../src/logging.js' import { Skeleton, errReorgDenied, errSyncMerged } from '../../src/sync/index.js' import { short } from '../../src/util/index.js' import { wait } from '../integration/util.js' +import mergeGenesisParams from '../testdata/common/mergeTestnet.json' import genesisJSON from '../testdata/geth-genesis/post-merge.json' import type { Block } from '@ethereumjs/block' @@ -18,24 +24,15 @@ type Subchain = { tail: bigint } -const common = new Common({ chain: 1 }) -const block49 = createBlockFromBlockData({ header: { number: 49 } }, { common }) -const block49B = createBlockFromBlockData( - { header: { number: 49, extraData: utf8ToBytes('B') } }, - { common } -) -const block50 = createBlockFromBlockData( - { header: { number: 50, parentHash: block49.hash() } }, - { common } -) -const block50B = createBlockFromBlockData( +const common = new Common({ chain: Mainnet }) +const block49 = createBlock({ header: { number: 49 } }, { common }) +const block49B = createBlock({ header: { number: 49, extraData: utf8ToBytes('B') } }, { common }) +const block50 = createBlock({ header: { number: 50, parentHash: block49.hash() } }, { common }) +const block50B = createBlock( { header: { number: 50, parentHash: block49.hash(), gasLimit: 999 } }, - { common } -) -const block51 = createBlockFromBlockData( - { header: { number: 51, parentHash: block50.hash() } }, - { common } + { common }, ) +const block51 = createBlock({ header: { number: 51, parentHash: block50.hash() } }, { common }) describe('[Skeleton]/ startup scenarios ', () => { it('starts the chain when starting the skeleton', async () => { @@ -252,17 +249,17 @@ describe('[Skeleton] / initSync', async () => { const { progress } = skeleton['status'] if (progress.subchains.length !== testCase.newState.length) { assert.fail( - `test ${testCaseIndex}: subchain count mismatch: have ${progress.subchains.length}, want ${testCase.newState.length}` + `test ${testCaseIndex}: subchain count mismatch: have ${progress.subchains.length}, want ${testCase.newState.length}`, ) } for (const [i, subchain] of progress.subchains.entries()) { if (subchain.head !== testCase.newState[i].head) { assert.fail( - `test ${testCaseIndex}: subchain head mismatch: have ${subchain.head}, want ${testCase.newState[i].head}` + `test ${testCaseIndex}: subchain head mismatch: have ${subchain.head}, want ${testCase.newState[i].head}`, ) } else if (subchain.tail !== testCase.newState[i].tail) { assert.fail( - `test ${testCaseIndex}: subchain tail mismatch: have ${subchain.tail}, want ${testCase.newState[i].tail}` + `test ${testCaseIndex}: subchain tail mismatch: have ${subchain.tail}, want ${testCase.newState[i].tail}`, ) } else { assert.ok(true, `test ${testCaseIndex}: subchain[${i}] matched`) @@ -376,7 +373,7 @@ describe('[Skeleton] / setHead', async () => { assert.ok(true, `test ${testCaseIndex}: passed with correct error`) } else { assert.fail( - `test ${testCaseIndex}: received wrong error expected=${testCase.err?.message} actual=${error.message}` + `test ${testCaseIndex}: received wrong error expected=${testCase.err?.message} actual=${error.message}`, ) } } @@ -384,17 +381,17 @@ describe('[Skeleton] / setHead', async () => { const { progress } = skeleton['status'] if (progress.subchains.length !== testCase.newState.length) { assert.fail( - `test ${testCaseIndex}: subchain count mismatch: have ${progress.subchains.length}, want ${testCase.newState.length}` + `test ${testCaseIndex}: subchain count mismatch: have ${progress.subchains.length}, want ${testCase.newState.length}`, ) } for (const [i, subchain] of progress.subchains.entries()) { if (subchain.head !== testCase.newState[i].head) { assert.fail( - `test ${testCaseIndex}: subchain head mismatch: have ${subchain.head}, want ${testCase.newState[i].head}` + `test ${testCaseIndex}: subchain head mismatch: have ${subchain.head}, want ${testCase.newState[i].head}`, ) } else if (subchain.tail !== testCase.newState[i].tail) { assert.fail( - `test ${testCaseIndex}: subchain tail mismatch: have ${subchain.tail}, want ${testCase.newState[i].tail}` + `test ${testCaseIndex}: subchain tail mismatch: have ${subchain.tail}, want ${testCase.newState[i].tail}`, ) } else { assert.ok(true, `test ${testCaseIndex}: subchain[${i}] matched`) @@ -435,17 +432,17 @@ describe('[Skeleton] / setHead', async () => { await chain.open() const genesis = await chain.getBlock(BigInt(0)) - const block1 = createBlockFromBlockData( + const block1 = createBlock( { header: { number: 1, parentHash: genesis.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block2 = createBlockFromBlockData( + const block2 = createBlock( { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block3 = createBlockFromBlockData( + const block3 = createBlock( { header: { number: 3, difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) await skeleton.open() @@ -463,12 +460,12 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains.length, 1, - 'trivial subchain0 should have been created' + 'trivial subchain0 should have been created', ) assert.equal( skeleton['status'].progress.subchains[0]!.head, BigInt(0), - 'trivial subchain0 should have been created' + 'trivial subchain0 should have been created', ) try { @@ -484,12 +481,12 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains.length, 1, - 'trivial subchain should have been created' + 'trivial subchain should have been created', ) assert.equal( skeleton['status'].progress.subchains[0]!.head, BigInt(0), - 'trivial subchain0 should have been created' + 'trivial subchain0 should have been created', ) reorg = await skeleton.setHead(block1, true) @@ -497,12 +494,12 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains.length, 1, - 'subchain should have been created' + 'subchain should have been created', ) assert.equal( skeleton['status'].progress.subchains[0].head, BigInt(1), - 'head should be set to first block' + 'head should be set to first block', ) assert.equal(skeleton.isLinked(), true, 'subchain status should be linked') @@ -512,7 +509,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains[0].head, BigInt(2), - 'head should be set to first block' + 'head should be set to first block', ) assert.equal(skeleton.isLinked(), true, 'subchain status should stay linked') @@ -523,7 +520,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains[0].head, BigInt(2), - 'head should be set to second block' + 'head should be set to second block', ) assert.equal(skeleton.isLinked(), true, 'subchain status should stay linked') @@ -534,7 +531,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains[0].head, BigInt(3), - 'head should be set to third block' + 'head should be set to third block', ) assert.equal(skeleton.isLinked(), false, 'subchain status should not be linked anymore') }) @@ -547,25 +544,25 @@ describe('[Skeleton] / setHead', async () => { await chain.open() const genesis = await chain.getBlock(BigInt(0)) - const block1 = createBlockFromBlockData( + const block1 = createBlock( { header: { number: 1, parentHash: genesis.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block2 = createBlockFromBlockData( + const block2 = createBlock( { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block3 = createBlockFromBlockData( + const block3 = createBlock( { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block4 = createBlockFromBlockData( + const block4 = createBlock( { header: { number: 4, parentHash: block3.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block5 = createBlockFromBlockData( + const block5 = createBlock( { header: { number: 5, parentHash: block4.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) await skeleton.open() @@ -578,14 +575,14 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(4), - 'canonical height should update after being linked' + 'canonical height should update after being linked', ) await skeleton.setHead(block5, false) await wait(200) assert.equal( chain.blocks.height, BigInt(4), - 'canonical height should not change when setHead is set with force=false' + 'canonical height should not change when setHead is set with force=false', ) await skeleton.setHead(block5, true) await skeleton.blockingFillWithCutoff(10) @@ -594,7 +591,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(5), - 'canonical height should change when setHead is set with force=true' + 'canonical height should change when setHead is set with force=true', ) // unlink the skeleton for the below check to check all blocks cleared @@ -603,14 +600,14 @@ describe('[Skeleton] / setHead', async () => { assert.equal( (await skeleton.getBlock(block.header.number, true))?.hash(), undefined, - `skeleton block number=${block.header.number} should be cleaned up after filling canonical chain` + `skeleton block number=${block.header.number} should be cleaned up after filling canonical chain`, ) assert.equal( (await skeleton.getBlockByHash(block.hash(), true))?.hash(), undefined, `skeleton block hash=${short( - block.hash() - )} should be cleaned up after filling canonical chain` + block.hash(), + )} should be cleaned up after filling canonical chain`, ) } }) @@ -626,25 +623,25 @@ describe('[Skeleton] / setHead', async () => { const genesis = await chain.getBlock(BigInt(0)) - const block1 = createBlockFromBlockData( + const block1 = createBlock( { header: { number: 1, parentHash: genesis.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block2 = createBlockFromBlockData( + const block2 = createBlock( { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block3 = createBlockFromBlockData( + const block3 = createBlock( { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block4 = createBlockFromBlockData( + const block4 = createBlock( { header: { number: 4, parentHash: block3.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block5 = createBlockFromBlockData( + const block5 = createBlock( { header: { number: 5, parentHash: block4.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) await chain.putBlocks([block1, block2]) @@ -655,14 +652,14 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(4), - 'canonical height should update after being linked' + 'canonical height should update after being linked', ) await skeleton.setHead(block5, false) await wait(200) assert.equal( chain.blocks.height, BigInt(4), - 'canonical height should not change when setHead with force=false' + 'canonical height should not change when setHead with force=false', ) // test sethead and blockingFillWithCutoff true via forkchoice update @@ -672,7 +669,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(5), - 'canonical height should change when setHead with force=true' + 'canonical height should change when setHead with force=true', ) // unlink the skeleton for the below check to check all blocks cleared @@ -682,30 +679,30 @@ describe('[Skeleton] / setHead', async () => { assert.equal( (await skeleton.getBlock(block.header.number, true))?.hash(), undefined, - `skeleton block number=${block.header.number} should be cleaned up after filling canonical chain` + `skeleton block number=${block.header.number} should be cleaned up after filling canonical chain`, ) assert.equal( (await skeleton.getBlockByHash(block.hash(), true))?.hash(), undefined, `skeleton block hash=${short( - block.hash() - )} should be cleaned up after filling canonical chain` + block.hash(), + )} should be cleaned up after filling canonical chain`, ) } // restore linkedStatus skeleton['status'].linked = prevLinked - const block41 = createBlockFromBlockData( + const block41 = createBlock( { header: { number: 4, parentHash: block3.hash(), difficulty: 101 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block51 = createBlockFromBlockData( + const block51 = createBlock( { header: { number: 5, parentHash: block41.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block61 = createBlockFromBlockData( + const block61 = createBlock( { header: { number: 6, parentHash: block51.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) await skeleton.setHead(block41, false) @@ -716,41 +713,41 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains[0]?.head, BigInt(6), - 'head should be correct' + 'head should be correct', ) assert.equal( skeleton['status'].progress.subchains[0]?.tail, BigInt(4), - 'tail should be backfilled' + 'tail should be backfilled', ) assert.equal(skeleton['status'].linked, true, 'should be linked') assert.equal(chain.blocks.height, BigInt(6), 'all blocks should be in chain') - const block71 = createBlockFromBlockData( + const block71 = createBlock( { header: { number: 7, parentHash: block61.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block81 = createBlockFromBlockData( + const block81 = createBlock( { header: { number: 8, parentHash: block71.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block91 = createBlockFromBlockData( + const block91 = createBlock( { header: { number: 9, parentHash: block81.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - // lets jump ahead and add the block 81 and 71 with annoucements and trigger tryTailBackfill + // lets jump ahead and add the block 81 and 71 with announcements and trigger tryTailBackfill await skeleton.forkchoiceUpdate(block91) assert.equal(skeleton['status'].progress.subchains.length, 1, '1 subchain with older dropped') assert.equal( skeleton['status'].progress.subchains[0]?.head, BigInt(9), - 'head should be correct' + 'head should be correct', ) assert.equal( skeleton['status'].progress.subchains[0]?.tail, BigInt(9), - 'new subchain should be created' + 'new subchain should be created', ) await skeleton.setHead(block81, false) await skeleton.setHead(block71, false) @@ -759,12 +756,12 @@ describe('[Skeleton] / setHead', async () => { assert.equal( skeleton['status'].progress.subchains[0]?.head, BigInt(9), - 'head should be correct' + 'head should be correct', ) assert.equal( skeleton['status'].progress.subchains[0]?.tail, BigInt(7), - 'tail should be backfilled' + 'tail should be backfilled', ) assert.equal(skeleton['status'].linked, true, 'should be linked') // async wait needed here so the async fillCanonicalChain can fill the chain @@ -773,34 +770,34 @@ describe('[Skeleton] / setHead', async () => { assert.equal( equalsBytes(chain.blocks.latest!.hash(), block91.hash()), true, - 'correct head hash' + 'correct head hash', ) // do a very common reorg that happens in a network: reorged head block - const block92 = createBlockFromBlockData( + const block92 = createBlock( { header: { number: 9, parentHash: block81.hash(), difficulty: 101 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) - const block102 = createBlockFromBlockData( + const block102 = createBlock( { header: { number: 10, parentHash: block92.hash(), difficulty: 100 } }, - { common, setHardfork: true } + { common, setHardfork: true }, ) await skeleton.forkchoiceUpdate(block92) assert.equal( skeleton['status'].progress.subchains[0]?.head, BigInt(9), - 'head number should be same' + 'head number should be same', ) assert.equal( skeleton['status'].progress.subchains[0]?.tail, BigInt(9), - 'tail should be truncated to head' + 'tail should be truncated to head', ) assert.equal( equalsBytes(chain.blocks.latest!.hash(), block92.hash()), true, - 'correct reorged head hash' + 'correct reorged head hash', ) // should be able to build on top of the next block @@ -808,24 +805,14 @@ describe('[Skeleton] / setHead', async () => { assert.equal( equalsBytes(chain.blocks.latest!.hash(), block102.hash()), true, - 'continue reorged chain' + 'continue reorged chain', ) }) it('should abort filling the canonical chain if the terminal block is invalid', async () => { - const genesis = { - ...genesisJSON, - config: { - ...genesisJSON.config, - terminalTotalDifficulty: 200, - clique: undefined, - ethash: {}, - }, - extraData: '0x00000000000000000', - difficulty: '0x1', - } - const common = createCommonFromGethGenesis(genesis, { chain: 'post-merge' }) - common.setHardforkBy({ blockNumber: BigInt(0), td: BigInt(0) }) + // @ts-ignore PrefixedHexString type is too strict + const common = createCustomCommon(mergeGenesisParams, Mainnet, { name: 'post-merge' }) + common.setHardforkBy({ blockNumber: BigInt(0) }) const config = new Config({ common, accountCache: 10000, @@ -836,33 +823,33 @@ describe('[Skeleton] / setHead', async () => { await chain.open() const genesisBlock = await chain.getBlock(BigInt(0)) - const block1 = createBlockFromBlockData( + const block1 = createBlock( { header: { number: 1, parentHash: genesisBlock.hash(), difficulty: 100 } }, - { common } + { common }, ) - const block2 = createBlockFromBlockData( + const block2 = createBlock( { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common } + { common }, ) - const block3PoW = createBlockFromBlockData( + const block3PoW = createBlock( { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, - { common } + { common }, ) - const block3PoS = createBlockFromBlockData( + const block3PoS = createBlock( { header: { number: 3, parentHash: block2.hash(), difficulty: 0 } }, - { common, setHardfork: BigInt(200) } + { common, setHardfork: true }, ) - const block4InvalidPoS = createBlockFromBlockData( + const block4InvalidPoS = createBlock( { header: { number: 4, parentHash: block3PoW.hash(), difficulty: 0 } }, - { common, setHardfork: BigInt(200) } + { common, setHardfork: true }, ) - const block4PoS = createBlockFromBlockData( + const block4PoS = createBlock( { header: { number: 4, parentHash: block3PoS.hash(), difficulty: 0 } }, - { common, setHardfork: BigInt(200) } + { common, setHardfork: true }, ) - const block5 = createBlockFromBlockData( + const block5 = createBlock( { header: { number: 5, parentHash: block4PoS.hash(), difficulty: 0 } }, - { common, setHardfork: BigInt(200) } + { common, setHardfork: true }, ) const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) @@ -876,7 +863,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(2), - 'canonical height should stop at block 2 (valid terminal block), since block 3 is invalid (past ttd)' + 'canonical height should stop at block 2 (valid terminal block), since block 3 is invalid (past ttd)', ) try { await skeleton.setHead(block5, false) @@ -889,7 +876,7 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(2), - 'canonical height should not change when setHead is set with force=false' + 'canonical height should not change when setHead is set with force=false', ) // Put correct chain await skeleton.initSync(block4PoS) @@ -904,168 +891,15 @@ describe('[Skeleton] / setHead', async () => { assert.equal( chain.blocks.height, BigInt(4), - 'canonical height should now be at head with correct chain' + 'canonical height should now be at head with correct chain', ) const latestHash = chain.headers.latest?.hash() assert.ok( latestHash !== undefined && equalsBytes(latestHash, block4PoS.hash()), - 'canonical height should now be at head with correct chain' + 'canonical height should now be at head with correct chain', ) await skeleton.setHead(block5, true) await wait(200) assert.equal(skeleton.bounds().head, BigInt(5), 'should update to new height') }) - - it('should abort filling the canonical chain and backstep if the terminal block is invalid', async () => { - const genesis = { - ...genesisJSON, - config: { - ...genesisJSON.config, - terminalTotalDifficulty: 200, - clique: undefined, - ethash: {}, - }, - extraData: '0x00000000000000000', - difficulty: '0x1', - } - const common = createCommonFromGethGenesis(genesis, { chain: 'post-merge' }) - common.setHardforkBy({ blockNumber: BigInt(0), td: BigInt(0) }) - const config = new Config({ - common, - accountCache: 10000, - storageCache: 1000, - }) - const chain = await Chain.create({ config }) - ;(chain.blockchain as any)._validateBlocks = false - ;(chain.blockchain as any)._validateConsensus = false - await chain.open() - const genesisBlock = await chain.getBlock(BigInt(0)) - - const block1 = createBlockFromBlockData( - { header: { number: 1, parentHash: genesisBlock.hash(), difficulty: 100 } }, - { common } - ) - const block2 = createBlockFromBlockData( - { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common } - ) - const block3PoW = createBlockFromBlockData( - { header: { number: 3, parentHash: block2.hash(), difficulty: 100 } }, - { common } - ) - const block4InvalidPoS = createBlockFromBlockData( - { header: { number: 4, parentHash: block3PoW.hash(), difficulty: 0 } }, - { common, setHardfork: 200 } - ) - - const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) - await skeleton.open() - - await skeleton.initSync(block4InvalidPoS) - await skeleton.putBlocks([block3PoW, block2]) - assert.equal(chain.blocks.height, BigInt(0), 'canonical height should be at genesis') - await skeleton.putBlocks([block1]) - await wait(200) - assert.equal( - chain.blocks.height, - BigInt(2), - 'canonical height should stop at block 2 (valid terminal block), since block 3 is invalid (past ttd)' - ) - assert.equal( - skeleton['status'].progress.subchains[0].tail, - BigInt(1), - `Subchain should have been backstepped to 1` - ) - }) - - it('should abort filling the canonical chain if a PoS block comes too early without hitting ttd', async () => { - const genesis = { - ...genesisJSON, - config: { - ...genesisJSON.config, - terminalTotalDifficulty: 200, - skeletonFillCanonicalBackStep: 0, - }, - difficulty: '0x1', - } - const common = createCommonFromGethGenesis(genesis, { chain: 'post-merge' }) - common.setHardforkBy({ blockNumber: BigInt(0), td: BigInt(0) }) - const config = new Config({ - common, - logger: getLogger({ logLevel: 'debug' }), - accountCache: 10000, - storageCache: 1000, - }) - - const chain = await Chain.create({ config }) - ;(chain.blockchain as any)._validateConsensus = false - // Only add td validations to the validateBlock - chain.blockchain.validateBlock = async (block: Block) => { - if (!(block.header.common.consensusType() === 'pos') && block.header.difficulty === 0n) { - throw Error( - `Invalid header difficulty=${ - block.header.difficulty - } for consensus=${block.header.common.consensusType()}` - ) - } - } - - const originalValidate = BlockHeader.prototype['_consensusFormatValidation'] - BlockHeader.prototype['_consensusFormatValidation'] = vi.fn() - vi.doMock('@ethereumjs/block', () => BlockHeader) - await chain.open() - const genesisBlock = await chain.getBlock(BigInt(0)) - - const block1 = createBlockFromBlockData( - { header: { number: 1, parentHash: genesisBlock.hash(), difficulty: 100 } }, - { common } - ) - const block2 = createBlockFromBlockData( - { header: { number: 2, parentHash: block1.hash(), difficulty: 100 } }, - { common } - ) - const block2PoS = createBlockFromBlockData( - { header: { number: 2, parentHash: block1.hash(), difficulty: 0 } }, - { common } - ) - const block3 = createBlockFromBlockData( - { header: { number: 3, parentHash: block2.hash(), difficulty: 0 } }, - { common } - ) - - const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) - await skeleton.open() - - await skeleton.initSync(block2PoS) - await skeleton.putBlocks([block1]) - - await wait(200) - assert.equal( - chain.blocks.height, - BigInt(1), - 'canonical height should stop at block 1 (valid PoW block), since block 2 is invalid (invalid PoS, not past ttd)' - ) - // Put correct chain - await skeleton.initSync(block3) - try { - await skeleton.putBlocks([block2]) - } catch (error: any) { - if (error !== errSyncMerged) { - assert.fail(error) - } - } - await wait(200) - assert.equal( - chain.blocks.height, - BigInt(3), - 'canonical height should now be at head with correct chain' - ) - const latestHash = chain.headers.latest?.hash() - assert.ok( - latestHash !== undefined && equalsBytes(latestHash, block3.hash()), - 'canonical height should now be at head with correct chain' - ) - - BlockHeader.prototype['_consensusFormatValidation'] = originalValidate - }) }) diff --git a/packages/client/test/sync/snapsync.spec.ts b/packages/client/test/sync/snapsync.spec.ts index 9a311080c4..1f5683ca31 100644 --- a/packages/client/test/sync/snapsync.spec.ts +++ b/packages/client/test/sync/snapsync.spec.ts @@ -1,4 +1,4 @@ -import { BlockHeader } from '@ethereumjs/block' +import { createBlockHeader } from '@ethereumjs/block' import { assert, describe, it, vi } from 'vitest' import { Chain } from '../../src/blockchain/index.js' @@ -74,10 +74,10 @@ describe('[SnapSynchronizer]', async () => { ;(sync as any).chain = { blocks: { height: 1 } } const getBlockHeaders1 = vi .fn() - .mockReturnValue([BigInt(1), [BlockHeader.fromHeaderData({ number: 1 })]]) + .mockReturnValue([BigInt(1), [createBlockHeader({ number: 1 })]]) const getBlockHeaders2 = vi .fn() - .mockReturnValue([BigInt(2), [BlockHeader.fromHeaderData({ number: 2 })]]) + .mockReturnValue([BigInt(2), [createBlockHeader({ number: 2 })]]) const peers = [ { snap: {}, diff --git a/packages/client/test/sync/txpool.spec.ts b/packages/client/test/sync/txpool.spec.ts index f2c31222cf..b93d5e2f25 100644 --- a/packages/client/test/sync/txpool.spec.ts +++ b/packages/client/test/sync/txpool.spec.ts @@ -1,7 +1,7 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { createBlock } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { AccessListEIP2930Transaction, FeeMarketEIP1559Transaction } from '@ethereumjs/tx' +import { createAccessList2930Tx, createFeeMarket1559Tx } from '@ethereumjs/tx' import { Account, bytesToHex, @@ -100,14 +100,14 @@ const setup = () => { return { pool, metricsServer } } -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const config = new Config({ accountCache: 10000, storageCache: 1000 }) const handleTxs = async ( txs: any[], failMessage: string, stateManager?: DefaultStateManager, - pool?: TxPool + pool?: TxPool, ) => { if (pool === undefined) { pool = setup().pool @@ -134,7 +134,7 @@ const handleTxs = async ( await pool.handleAnnouncedTxHashes( validTxs.map((e) => e.hash()), peer, - peerPool + peerPool, ) await pool.add(txs[txs.length - 1]) @@ -176,7 +176,7 @@ describe('[TxPool]', async () => { } txData.maxFeePerGas += (txData.maxFeePerGas * feeBump) / 100 txData.maxPriorityFeePerGas += (txData.maxPriorityFeePerGas * feeBump) / 100 - const tx = FeeMarketEIP1559Transaction.fromTxData(txData, { common }) + const tx = createFeeMarket1559Tx(txData, { common }) const signedTx = tx.sign(from.privateKey) return signedTx } @@ -262,13 +262,13 @@ describe('[TxPool]', async () => { assert.equal( (pool as any).knownByPeer.size, 2, - 'known tx hashes size 2 (entries for both peers)' + 'known tx hashes size 2 (entries for both peers)', ) assert.equal((pool as any).knownByPeer.get(peer.id).length, 1, 'one tx added for peer 1') assert.equal( (pool as any).knownByPeer.get(peer.id)[0].hash, bytesToUnprefixedHex(txA01.hash()), - 'new known tx hashes entry for announcing peer' + 'new known tx hashes entry for announcing peer', ) const txs = pool.getByHash([txA01.hash()]) @@ -276,7 +276,7 @@ describe('[TxPool]', async () => { assert.equal( bytesToHex(txs[0].serialize()), bytesToHex(txA01.serialize()), - 'should get correct tx by hash' + 'should get correct tx by hash', ) // check if transaction added in metrics @@ -294,7 +294,7 @@ describe('[TxPool]', async () => { assert.equal( feeMarketEip1559TransactionCountInPool, pool.pool.size, - 'pool should contain single eip 1559 transaction' + 'pool should contain single eip 1559 transaction', ) pool.pool.clear() @@ -303,12 +303,12 @@ describe('[TxPool]', async () => { assert.equal( (pool as any).knownByPeer.get(peer.id).length, 1, - 'should add tx only once to known tx hashes' + 'should add tx only once to known tx hashes', ) assert.equal( (pool as any).knownByPeer.size, 2, - 'known tx hashes size 2 (entries for both peers)' + 'known tx hashes size 2 (entries for both peers)', ) pool.stop() @@ -329,7 +329,7 @@ describe('[TxPool]', async () => { assert.equal( res['hashes'].length, TX_RETRIEVAL_LIMIT, - 'should limit to TX_RETRIEVAL_LIMIT' + 'should limit to TX_RETRIEVAL_LIMIT', ) return [null, []] }, @@ -431,7 +431,7 @@ describe('[TxPool]', async () => { } catch (e: any) { assert.ok( e.message.includes('replacement gas too low'), - 'successfully failed adding underpriced txn' + 'successfully failed adding underpriced txn', ) const poolObject = pool['handled'].get(bytesToUnprefixedHex(txA02_Underpriced.hash())) assert.equal(poolObject?.error, e, 'should have an errored poolObject') @@ -444,13 +444,13 @@ describe('[TxPool]', async () => { assert.equal( (pool as any).knownByPeer.get(peer2.id)[0]?.error?.message, 'NewPooledTransactionHashes', - 'should have errored sendObject for NewPooledTransactionHashes broadcast' + 'should have errored sendObject for NewPooledTransactionHashes broadcast', ) const address = bytesToUnprefixedHex(A.address) const poolContent = pool.pool.get(address)! assert.equal(poolContent.length, 1, 'only one tx') assert.deepEqual(poolContent[0].tx.hash(), txA01.hash(), 'only later-added tx') - // Another attempt to add tx which should not be broadcased to peer2 + // Another attempt to add tx which should not be broadcasted to peer2 await pool.handleAnnouncedTxHashes([txA01.hash()], peer, peerPool) assert.equal(sentToPeer2, 1, 'no new broadcast attempt to the peer') // Just to enhance logging coverage, assign peerPool for stats collection @@ -492,7 +492,7 @@ describe('[TxPool]', async () => { for (let account = 0; account < 51; account++) { const pkey = concatBytes( hexToBytes(`0x${'aa'.repeat(31)}`), - hexToBytes(`0x${account.toString(16).padStart(2, '0')}`) + hexToBytes(`0x${account.toString(16).padStart(2, '0')}`), ) const from = { address: privateToAddress(pkey), @@ -523,7 +523,7 @@ describe('[TxPool]', async () => { assert.notOk( await handleTxs(txs, 'already have max amount of txs for this account'), - 'successfully rejected too many txs from same account' + 'successfully rejected too many txs from same account', ) }) @@ -531,15 +531,15 @@ describe('[TxPool]', async () => { const txs = [] txs.push( - FeeMarketEIP1559Transaction.fromTxData({ + createFeeMarket1559Tx({ maxFeePerGas: 1000000000, maxPriorityFeePerGas: 1000000000, - }) + }), ) assert.notOk( await handleTxs(txs, 'Cannot call hash method if transaction is not signed'), - 'successfully rejected unsigned tx' + 'successfully rejected unsigned tx', ) }) @@ -547,42 +547,42 @@ describe('[TxPool]', async () => { const txs = [] txs.push( - FeeMarketEIP1559Transaction.fromTxData({ + createFeeMarket1559Tx({ maxFeePerGas: 1000000000, maxPriorityFeePerGas: 1000000000, nonce: 0, - }).sign(A.privateKey) + }).sign(A.privateKey), ) assert.notOk( await handleTxs(txs, 'tx nonce too low', { getAccount: () => new Account(BigInt(1), BigInt('50000000000000000000')), } as any), - 'successfully rejected tx with invalid nonce' + 'successfully rejected tx with invalid nonce', ) }) it('announcedTxHashes() -> reject txs with too much data', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Paris }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Paris }) const txs = [] txs.push( - FeeMarketEIP1559Transaction.fromTxData( + createFeeMarket1559Tx( { maxFeePerGas: 1000000000, maxPriorityFeePerGas: 1000000000, nonce: 0, data: `0x${'00'.repeat(128 * 1024 + 1)}`, }, - { common } - ).sign(A.privateKey) + { common }, + ).sign(A.privateKey), ) assert.notOk( await handleTxs(txs, 'exceeds the max data size', { getAccount: () => new Account(BigInt(0), BigInt('50000000000000000000000')), } as any), - 'successfully rejected tx with too much data' + 'successfully rejected tx with too much data', ) }) @@ -590,19 +590,19 @@ describe('[TxPool]', async () => { const txs = [] txs.push( - FeeMarketEIP1559Transaction.fromTxData({ + createFeeMarket1559Tx({ maxFeePerGas: 1000000000, maxPriorityFeePerGas: 1000000000, gasLimit: 21000, nonce: 0, - }).sign(A.privateKey) + }).sign(A.privateKey), ) assert.notOk( await handleTxs(txs, 'insufficient balance', { getAccount: () => new Account(BigInt(0), BigInt('0')), } as any), - 'successfully rejected account with too low balance' + 'successfully rejected account with too low balance', ) }) @@ -610,11 +610,11 @@ describe('[TxPool]', async () => { const txs = [] txs.push( - FeeMarketEIP1559Transaction.fromTxData({ + createFeeMarket1559Tx({ maxFeePerGas: 1000000000, maxPriorityFeePerGas: 1000000000, nonce: 0, - }).sign(A.privateKey) + }).sign(A.privateKey), ) const { pool } = setup() @@ -625,7 +625,7 @@ describe('[TxPool]', async () => { assert.notOk( await handleTxs(txs, 'not within 50% range of current basefee', undefined, pool), - 'successfully rejected tx with too low gas price' + 'successfully rejected tx with too low gas price', ) }) @@ -633,12 +633,12 @@ describe('[TxPool]', async () => { const txs = [] txs.push( - FeeMarketEIP1559Transaction.fromTxData({ + createFeeMarket1559Tx({ maxFeePerGas: 1000000000, maxPriorityFeePerGas: 1000000000, nonce: 0, gasLimit: 21000, - }).sign(A.privateKey) + }).sign(A.privateKey), ) const { pool } = setup() @@ -649,7 +649,7 @@ describe('[TxPool]', async () => { assert.notOk( await handleTxs(txs, 'exceeds last block gas limit', undefined, pool), - 'successfully rejected tx which has gas limit higher than block gas limit' + 'successfully rejected tx which has gas limit higher than block gas limit', ) }) @@ -657,10 +657,10 @@ describe('[TxPool]', async () => { const txs = [] txs.push( - FeeMarketEIP1559Transaction.fromTxData({ + createFeeMarket1559Tx({ maxFeePerGas: 1000000000, maxPriorityFeePerGas: 1000000000, - }).sign(A.privateKey) + }).sign(A.privateKey), ) txs.push(txs[0]) @@ -669,7 +669,7 @@ describe('[TxPool]', async () => { assert.notOk( await handleTxs(txs, 'this transaction is already in the TxPool', undefined, pool), - 'successfully rejected tx which is already in pool' + 'successfully rejected tx which is already in pool', ) }) @@ -677,16 +677,16 @@ describe('[TxPool]', async () => { const txs = [] txs.push( - FeeMarketEIP1559Transaction.fromTxData({ + createFeeMarket1559Tx({ maxFeePerGas: 10000000, maxPriorityFeePerGas: 10000000, nonce: 0, - }).sign(A.privateKey) + }).sign(A.privateKey), ) assert.notOk( await handleTxs(txs, 'does not pay the minimum gas price of'), - 'successfully rejected tx with too low gas price' + 'successfully rejected tx with too low gas price', ) }) @@ -694,29 +694,29 @@ describe('[TxPool]', async () => { const txs = [] txs.push( - AccessListEIP2930Transaction.fromTxData({ + createAccessList2930Tx({ gasPrice: 10000000, nonce: 0, - }).sign(A.privateKey) + }).sign(A.privateKey), ) assert.notOk( await handleTxs(txs, 'does not pay the minimum gas price of'), - 'successfully rejected tx with too low gas price' + 'successfully rejected tx with too low gas price', ) }) it('announcedTxHashes() -> reject txs with too low gas price (invalid tx type)', async () => { const txs = [] - const tx = AccessListEIP2930Transaction.fromTxData( + const tx = createAccessList2930Tx( { gasPrice: 1000000000 - 1, nonce: 0, }, { freeze: false, - } + }, ).sign(A.privateKey) Object.defineProperty(tx, 'type', { get: () => 5 }) @@ -766,12 +766,12 @@ describe('[TxPool]', async () => { assert.equal(pool.pool.size, 1, 'pool size 1') // Craft block with tx not in pool - let block = createBlockFromBlockData({ transactions: [txA02] }, { common }) + let block = createBlock({ transactions: [txA02] }, { common }) pool.removeNewBlockTxs([block]) assert.equal(pool.pool.size, 1, 'pool size 1') // Craft block with tx in pool - block = createBlockFromBlockData({ transactions: [txA01] }, { common }) + block = createBlock({ transactions: [txA01] }, { common }) pool.removeNewBlockTxs([block]) assert.equal(pool.pool.size, 0, 'pool should be empty') @@ -789,20 +789,20 @@ describe('[TxPool]', async () => { assert.equal(poolContent.length, 2, 'two txs') // Craft block with tx not in pool - block = createBlockFromBlockData({ transactions: [txA02] }, { common }) + block = createBlock({ transactions: [txA02] }, { common }) pool.removeNewBlockTxs([block]) assert.equal(pool.pool.size, 1, 'pool size 1') poolContent = pool.pool.get(address)! assert.equal(poolContent.length, 2, 'two txs') // Craft block with tx in pool - block = createBlockFromBlockData({ transactions: [txB01] }, { common }) + block = createBlock({ transactions: [txB01] }, { common }) pool.removeNewBlockTxs([block]) poolContent = pool.pool.get(address)! assert.equal(poolContent.length, 1, 'only one tx') // Craft block with tx in pool - block = createBlockFromBlockData({ transactions: [txB02] }, { common }) + block = createBlock({ transactions: [txB02] }, { common }) pool.removeNewBlockTxs([block]) assert.equal(pool.pool.size, 0, 'pool size 0') @@ -836,17 +836,17 @@ describe('[TxPool]', async () => { assert.equal( pool.pool.size, 2, - 'should not remove txs from pool (POOLED_STORAGE_TIME_LIMIT within range)' + 'should not remove txs from pool (POOLED_STORAGE_TIME_LIMIT within range)', ) assert.equal( (pool as any).knownByPeer.size, 1, - 'should not remove txs from known by peer map (POOLED_STORAGE_TIME_LIMIT within range)' + 'should not remove txs from known by peer map (POOLED_STORAGE_TIME_LIMIT within range)', ) assert.equal( (pool as any).handled.size, 2, - 'should not remove txs from handled (HANDLED_CLEANUP_TIME_LIMIT within range)' + 'should not remove txs from handled (HANDLED_CLEANUP_TIME_LIMIT within range)', ) const address = txB01.getSenderAddress().toString().slice(2) @@ -868,17 +868,17 @@ describe('[TxPool]', async () => { assert.equal( pool.pool.size, 1, - 'should remove txs from pool (POOLED_STORAGE_TIME_LIMIT before range)' + 'should remove txs from pool (POOLED_STORAGE_TIME_LIMIT before range)', ) assert.equal( (pool as any).knownByPeer.get(peer.id).length, 1, - 'should remove one tx from known by peer map (POOLED_STORAGE_TIME_LIMIT before range)' + 'should remove one tx from known by peer map (POOLED_STORAGE_TIME_LIMIT before range)', ) assert.equal( (pool as any).handled.size, 1, - 'should remove txs from handled (HANDLED_CLEANUP_TIME_LIMIT before range)' + 'should remove txs from handled (HANDLED_CLEANUP_TIME_LIMIT before range)', ) pool.stop() diff --git a/packages/common/test/data/merge/testnetMerge.json b/packages/client/test/testdata/common/mergeTestnet.json similarity index 75% rename from packages/common/test/data/merge/testnetMerge.json rename to packages/client/test/testdata/common/mergeTestnet.json index 995d7b1d2a..bdf12be8db 100644 --- a/packages/common/test/data/merge/testnetMerge.json +++ b/packages/client/test/testdata/common/mergeTestnet.json @@ -1,15 +1,10 @@ { - "name": "testnetMerge", - "chainId": 55555, - "networkId": 55555, - "defaultHardfork": "istanbul", + "name": "testnet", + "chainId": 12345, + "defaultHardfork": "byzantium", "consensus": { - "type": "poa", - "algorithm": "clique", - "clique": { - "period": 15, - "epoch": 30000 - } + "type": "pow", + "algorithm": "ethash" }, "comment": "Private test network", "url": "[TESTNET_URL]", @@ -26,40 +21,39 @@ }, { "name": "homestead", - "block": 1 + "block": 0 }, { "name": "tangerineWhistle", - "block": 2 + "block": 0 }, { "name": "spuriousDragon", - "block": 3 + "block": 0 }, { - "name": "istanbul", - "block": 8 + "name": "byzantium", + "block": 0 }, { - "name": "muirGlacier", - "block": 10 + "name": "constantinople", + "block": 0 }, { "name": "berlin", - "block": 12 + "block": 0 }, { "name": "london", - "block": 14 + "block": 0 }, { "name": "paris", - "block": null, - "ttd": "5000" + "block": 3 }, { - "name": "shanghai", - "block": null + "name": "mergeForkIdTransition", + "block": 3 } ], "bootstrapNodes": [ diff --git a/packages/client/test/testdata/common/testnet.json b/packages/client/test/testdata/common/testnet.json index 0c7531e072..88e4a72ab5 100644 --- a/packages/client/test/testdata/common/testnet.json +++ b/packages/client/test/testdata/common/testnet.json @@ -1,7 +1,6 @@ { "name": "testnet", "chainId": 12345, - "networkId": 12345, "defaultHardfork": "byzantium", "consensus": { "type": "pow", diff --git a/packages/client/test/testdata/geth-genesis/debug.json b/packages/client/test/testdata/geth-genesis/debug.json index 8f6da427f0..94660dd079 100644 --- a/packages/client/test/testdata/geth-genesis/debug.json +++ b/packages/client/test/testdata/geth-genesis/debug.json @@ -16,7 +16,8 @@ "period": 5, "epoch": 30000 }, - "terminalTotalDifficulty": 0 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "nonce": "0x42", "timestamp": "0x0", diff --git a/packages/client/test/testdata/geth-genesis/eip4844.json b/packages/client/test/testdata/geth-genesis/eip4844.json index cd5726b939..78cae3c9f0 100644 --- a/packages/client/test/testdata/geth-genesis/eip4844.json +++ b/packages/client/test/testdata/geth-genesis/eip4844.json @@ -18,7 +18,8 @@ "blockperiodseconds": 5, "epochlength": 30000 }, - "terminalTotalDifficulty": 0 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "nonce": "0x42", "timestamp": "0x0", diff --git a/packages/client/test/testdata/geth-genesis/no-extra-data.json b/packages/client/test/testdata/geth-genesis/no-extra-data.json index b9d2f14595..73fc19d02b 100644 --- a/packages/client/test/testdata/geth-genesis/no-extra-data.json +++ b/packages/client/test/testdata/geth-genesis/no-extra-data.json @@ -16,7 +16,8 @@ "period": 5, "epoch": 30000 }, - "terminalTotalDifficulty": 0 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "nonce": "0x42", "timestamp": "16", diff --git a/packages/client/test/testdata/geth-genesis/post-merge.json b/packages/client/test/testdata/geth-genesis/post-merge.json index ffbb465e69..32f5f093d3 100644 --- a/packages/client/test/testdata/geth-genesis/post-merge.json +++ b/packages/client/test/testdata/geth-genesis/post-merge.json @@ -16,7 +16,8 @@ "period": 5, "epoch": 30000 }, - "terminalTotalDifficulty": 0 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "nonce": "0x42", "timestamp": "0x0", diff --git a/packages/client/test/testdata/geth-genesis/withdrawals.json b/packages/client/test/testdata/geth-genesis/withdrawals.json index 10060534e3..7941658039 100644 --- a/packages/client/test/testdata/geth-genesis/withdrawals.json +++ b/packages/client/test/testdata/geth-genesis/withdrawals.json @@ -17,7 +17,8 @@ "blockperiodseconds": 5, "epochlength": 30000 }, - "terminalTotalDifficulty": 0 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "nonce": "0x42", "timestamp": "0x0", diff --git a/packages/client/test/util/parse.spec.ts b/packages/client/test/util/parse.spec.ts index 717b60a01b..a0ac847f96 100644 --- a/packages/client/test/util/parse.spec.ts +++ b/packages/client/test/util/parse.spec.ts @@ -9,37 +9,37 @@ describe('[Util/Parse]', () => { assert.deepEqual( parseMultiaddrs('10.0.0.1:1234'), [multiaddr('/ip4/10.0.0.1/tcp/1234')], - 'parse ip:port' + 'parse ip:port', ) assert.deepEqual( parseMultiaddrs('enode://abc@10.0.0.1:1234'), [multiaddr('/ip4/10.0.0.1/tcp/1234')], - 'parse url' + 'parse url', ) assert.deepEqual( parseMultiaddrs('/ip4/1.1.1.1/tcp/50507/ws'), [multiaddr('/ip4/1.1.1.1/tcp/50507/ws')], - 'parse multiaddr' + 'parse multiaddr', ) assert.deepEqual( parseMultiaddrs( - '/ip4/1.1.1.2/tcp/50508/ws/p2p/QmYAuYxw6QX1x5aafs6g3bUrPbMDifP5pDun3N9zbVLpEa' + '/ip4/1.1.1.2/tcp/50508/ws/p2p/QmYAuYxw6QX1x5aafs6g3bUrPbMDifP5pDun3N9zbVLpEa', // cspell:disable-line ), - [multiaddr('/ip4/1.1.1.2/tcp/50508/ws/p2p/QmYAuYxw6QX1x5aafs6g3bUrPbMDifP5pDun3N9zbVLpEa')], - 'parse multiaddr with peer id' + [multiaddr('/ip4/1.1.1.2/tcp/50508/ws/p2p/QmYAuYxw6QX1x5aafs6g3bUrPbMDifP5pDun3N9zbVLpEa')], // cspell:disable-line + 'parse multiaddr with peer id', ) assert.deepEqual( parseMultiaddrs( - '10.0.0.1:1234,enode://343149e4feefa15d882d9fe4ac7d88f885bd05ebb735e547f12e12080a9fa07c8014ca6fd7f373123488102fe5e34111f8509cf0b7de3f5b44339c9f25e87cb8@127.0.0.1:2345' + '10.0.0.1:1234,enode://343149e4feefa15d882d9fe4ac7d88f885bd05ebb735e547f12e12080a9fa07c8014ca6fd7f373123488102fe5e34111f8509cf0b7de3f5b44339c9f25e87cb8@127.0.0.1:2345', ), [multiaddr('/ip4/10.0.0.1/tcp/1234'), multiaddr('/ip4/127.0.0.1/tcp/2345')], - 'parse multiple' + 'parse multiple', ) assert.throws(() => parseMultiaddrs(10 as any), /not a function/, 'throws error') assert.deepEqual( parseMultiaddrs('[2607:f8b0:4003:c00::6a]:5678'), [multiaddr('/ip6/2607:f8b0:4003:c00::6a/tcp/5678')], - 'parse ipv6 multiaddr' + 'parse ipv6 multiaddr', ) }) }) diff --git a/packages/client/test/util/rpc.spec.ts b/packages/client/test/util/rpc.spec.ts index ac36366d7c..6dc89c6f5a 100644 --- a/packages/client/test/util/rpc.spec.ts +++ b/packages/client/test/util/rpc.spec.ts @@ -54,7 +54,7 @@ describe('[Util/RPC]', () => { assert.ok( httpServer !== undefined && wsServer !== undefined, - 'should return http and ws servers' + 'should return http and ws servers', ) } } @@ -81,7 +81,7 @@ describe('[Util/RPC]', () => { }) assert.ok( httpServer !== undefined && wsServer !== undefined, - 'should return http and ws servers' + 'should return http and ws servers', ) }) }) diff --git a/packages/client/test/util/wasmCrypto.spec.ts b/packages/client/test/util/wasmCrypto.spec.ts index 4fad467ef3..59e7bdfc29 100644 --- a/packages/client/test/util/wasmCrypto.spec.ts +++ b/packages/client/test/util/wasmCrypto.spec.ts @@ -1,5 +1,5 @@ -import { Common } from '@ethereumjs/common' -import { LegacyTransaction } from '@ethereumjs/tx' +import { Common, Mainnet } from '@ethereumjs/common' +import { createLegacyTx } from '@ethereumjs/tx' import { BIGINT_2, bytesToHex, @@ -29,29 +29,29 @@ describe('WASM crypto tests', () => { v: bigint, r: Uint8Array, s: Uint8Array, - chainID?: bigint + chainID?: bigint, ) => secp256k1Expand( secp256k1Recover( msgHash, concatBytes(setLengthLeft(r, 32), setLengthLeft(s, 32)), - Number(calculateSigRecovery(v, chainID)) - ) + Number(calculateSigRecovery(v, chainID)), + ), ).slice(1) await waitReady() const commonWithCustomCrypto = new Common({ - chain: 'mainnet', + chain: Mainnet, customCrypto: { ecrecover: wasmecrecover, keccak256, }, }) - const common = new Common({ chain: 'mainnet' }) + const common = new Common({ chain: Mainnet }) const pk = randomBytes(32) - const tx = LegacyTransaction.fromTxData({}, { common }).sign(pk) - const tx2 = LegacyTransaction.fromTxData({}, { common: commonWithCustomCrypto }).sign(pk) + const tx = createLegacyTx({}, { common }).sign(pk) + const tx2 = createLegacyTx({}, { common: commonWithCustomCrypto }).sign(pk) assert.deepEqual(tx.getSenderPublicKey(), tx2.getSenderPublicKey()) assert.deepEqual(tx.hash(), tx2.hash()) @@ -90,7 +90,7 @@ describe('WASM crypto tests', () => { assert.deepEqual(wasmSig, jsSig, 'wasm signatures produce same result as js signatures') assert.throws( () => wasmSign(randomBytes(31), randomBytes(32)), - 'message length must be 32 bytes or greater' + 'message length must be 32 bytes or greater', ) }) it('should have the same signature and verification', async () => { diff --git a/packages/client/tsconfig.eslint.json b/packages/client/tsconfig.eslint.json deleted file mode 100644 index e2b5df7d39..0000000000 --- a/packages/client/tsconfig.eslint.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "include": ["webpack.config.js"] -} diff --git a/packages/client/tsconfig.lint.json b/packages/client/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/client/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/common/.eslintrc.cjs b/packages/common/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/common/.eslintrc.cjs +++ b/packages/common/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/common/CHANGELOG.md b/packages/common/CHANGELOG.md index 617fc94dea..27cef87e72 100644 --- a/packages/common/CHANGELOG.md +++ b/packages/common/CHANGELOG.md @@ -6,7 +6,39 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 4.3.0 - 2024-03-05 +## 4.4.0 - 2024-08-15 + +### EIP-7685 Requests: EIP-6110 (Deposits) / EIP-7002 (Withdrawals) / EIP-7251 (Consolidations) + +This library now supports `EIP-6110` deposit requests, see PR [#3390](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3390), `EIP-7002` withdrawal requests, see PR [#3385](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3385) and `EIP-7251` consolidation requests, see PR [#3477](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3477) as well as the underlying generic execution layer request logic introduced with `EIP-7685` (PR [#3372](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3372)). + +These new request types will be activated with the `Prague` hardfork, see [@ethereumjs/block](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/block) README for detailed documentation. + +### Verkle Updates + +- Fixes for Kaustinen4 support, PR [#3269](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3269) +- Kaustinen5 related fixes, PR [#3343](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3343) +- Kaustinen6 adjustments, `verkle-cryptography-wasm` migration, PRs [#3355](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3355) and [#3356](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3356) +- Verkle decoupling, PR [#3462](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3462) + +### Other Features + +- Adds support for [EIP-7702](https://eips.ethereum.org/EIPS/eip-7702) EOA code transactions (outdated) (see tx library for full documentation), see PR [#3470](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3470) +- Adds support for [EIP-2935](https://eips.ethereum.org/EIPS/eip-2935) Serve Historical Block Hashes from State (Prague) (see EVM for full docs) as well as the related [EIP-7709](https://eips.ethereum.org/EIPS/eip-7709), PR [#3475](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3475) +- Stricter prefixed hex typing, PRs [#3348](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3348), [#3427](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3427) and [#3357](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3357) (some changes removed in PR [#3382](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3382) for backwards compatibility reasons, will be reintroduced along upcoming breaking releases) + +### Other Changes + +- Removes support for [EIP-2315](https://eips.ethereum.org/EIPS/eip-2315) simple subroutines for EVM (deprecated with an alternative version integrated into EOF), PR [#3342](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3342) +- Clean up access to deposit address, PR [#3411](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3411) +- Add spec test for 2935 contract code and update history storage address, PR [#3373](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3373) +- Parse deposit contract address from geth genesis for chain config, PR [#3422](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3422) + +### Bugfixes + +- BLS gas prices fixes, PR [#3400](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3400) + +## 4.3.0 - 2024-03-18 ### Full 4844 Browser Readiness @@ -188,7 +220,7 @@ While you could use our libraries in the browser libraries before, there had bee WE HAVE ELIMINATED ALL OF THEM. -The largest two undertakings: First: we have rewritten all (half) of our API and elimited the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went throuh our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. +The largest two undertakings: First: we have rewritten all (half) of our API and eliminated the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went through our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. Together with some few other modifications this now allows to run each (maybe adding an asterisk for client and devp2p) of our libraries directly in the browser - more or less without any modifications - see the `examples/browser.html` file in each package folder for an easy to set up example. @@ -458,7 +490,7 @@ Beta 2 release for the upcoming breaking release round on the [EthereumJS monore ### Removed Default Exports -The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all accross the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. +The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all across the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. Default exports were a common source of error and confusion when using our libraries in a CommonJS context, leading to issues like Issue [#978](https://github.com/ethereumjs/ethereumjs-monorepo/issues/978). @@ -466,7 +498,7 @@ Now every import is a named import and we think the long term benefits will very #### Import Updates -Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all accross our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. +Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all across our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. So Common import and usage is changing from: @@ -589,9 +621,9 @@ Following methods have been removed accordingly: #### Type Changes -There are a few new types e.g. for configuration files (e.g. `CliqueConfig`) to get rid of some last `any` types in the package, see PR [#1906](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1906). Eventually related problems should be seen early on in your TypeScript setup though and it should also be possile to easily attribute and fix. +There are a few new types e.g. for configuration files (e.g. `CliqueConfig`) to get rid of some last `any` types in the package, see PR [#1906](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1906). Eventually related problems should be seen early on in your TypeScript setup though and it should also be possible to easily attribute and fix. -#### New File Structue +#### New File Structure The file structure of the package has been aligned with other libraries and there is now a dedicated `common.ts` file for the main source code with `index.ts` re-exporting functionality and types, see PR [#1915](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1915). Some misplaced types have been moved to `types.ts` and enums (like `Chain` or `Hardfork`) have been (internally) moved to an `enum.ts` file. You should generally use the root import from `index.ts`, if you are not doing and some imports broke this should be easily fixable though. @@ -601,7 +633,7 @@ We have completely refactored all our genesis (block) handling and moved the cod The most imminent benefit from this is a **dramatically reduced bundle size for the library, going down from a packed ~9 MB to something about 50 KB (!)**. -**Breaking:** See if you use `Common` genesis state functionality, e.g. by accessing pre-defined state with the `genesisState()` function (now removed) or by adding custom state with the `customChain` constructor (genesis-extended data format removed) and see description for `Block` and `Blockchain` breaking releases for context and how to replace the functionality. There are now also no `stateRoot` and `hash` configuration paramters in the `JSON` chain files any more, inclusion was a blocker for a clean refactor and this also wasn't compatible with the Geth genesis file format (these values can be calculated on an upper-library level). So you should remove these from your (custom) chain config files as well. +**Breaking:** See if you use `Common` genesis state functionality, e.g. by accessing pre-defined state with the `genesisState()` function (now removed) or by adding custom state with the `customChain` constructor (genesis-extended data format removed) and see description for `Block` and `Blockchain` breaking releases for context and how to replace the functionality. There are now also no `stateRoot` and `hash` configuration parameters in the `JSON` chain files any more, inclusion was a blocker for a clean refactor and this also wasn't compatible with the Geth genesis file format (these values can be calculated on an upper-library level). So you should remove these from your (custom) chain config files as well. ### Other Changes @@ -737,7 +769,7 @@ To allow a HF switch by total difficulty (TD) - which is planned for the Merge - There is a new `hardforkTD(hardfork?: string | Hardfork): BN | null` function to get the TD value for a HF switch (so primarily: for the `merge` HF) if a total difficulty HF switch is configured. -### Improved Typing for Hardfork, Chain and Genesis releated API Calls +### Improved Typing for Hardfork, Chain and Genesis related API Calls In the Common library all functionality returning hardfork, chain or genesis parameters has previously been under-typed respectively just returned `any` in most cases. This has been improved along PR [#1480](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1480) and is now finding its way into a release. @@ -777,7 +809,7 @@ If you have got left over type 1. `dao` HF inclusions in your custom chain files This release integrates the `london` HF blocks for all networks including `mainnet` and is therefore the first release with finalized London HF support. -### Reworked Custom Chain Instantation / Supported Custom Chains (Polygon / Arbitrum / xDaiChain) +### Reworked Custom Chain Instantiation / Supported Custom Chains (Polygon / Arbitrum / xDaiChain) This release introduces a new `Common.custom()` static constructor which replaces the now deprecated `Common.forCustomChain()` constructor and allows for an easier instantiation of a Common instance with somewhat adopted chain parameters, with the main use case to adopt on instantiating with a deviating chain ID. Instantiating a custom common instance with its own chain ID and inheriting all other parameters from `mainnet` can now be as easily done as: @@ -840,7 +872,7 @@ Common now supports settings for the following additional EIPs: - [EIP-3541](https://eips.ethereum.org/EIPS/eip-3541): Reject new contracts starting with the 0xEF byte, PR [#1240](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1240) - [EIP-3554](https://eips.ethereum.org/EIPS/eip-3554): Difficulty Bomb Delay to December 2021 (only PoW networks), PR [#1245](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1245) -All new EIPs have their dedicated EIP configuration file and can also be activated spearately with the `eips` parameter (and the so-created `common` instance can then e.g. be used within the VM): +All new EIPs have their dedicated EIP configuration file and can also be activated separately with the `eips` parameter (and the so-created `common` instance can then e.g. be used within the VM): ```ts import Common from '@ethereumjs/common' @@ -1033,7 +1065,7 @@ Gas fees for all hardforks up to `MuirGlacier` are now completely present within There is a new `Common.forkHash()` method returning pre-calculated Forkhash values or alternatively use the internal `Common._calcForkHash()` implementation to calculate a forkhash on the fly. -Forkhashes are used to uniquely identify a set of hardforks passed to be able to better differentiate between different dedicated chains. This is used for the `Eth/64` devp2p protocol update and specificed in [EIP-2124](https://eips.ethereum.org/EIPS/eip-2124) to help improve the devp2p networking stack. +Forkhashes are used to uniquely identify a set of hardforks passed to be able to better differentiate between different dedicated chains. This is used for the `Eth/64` devp2p protocol update and specified in [EIP-2124](https://eips.ethereum.org/EIPS/eip-2124) to help improve the devp2p networking stack. ### New Block/Hardfork related Utility Functions @@ -1054,7 +1086,7 @@ Current default hardfork is set to `istanbul`, PR [#906](https://github.com/ethe We significantly updated our internal tool and CI setup along the work on PR [#913](https://github.com/ethereumjs/ethereumjs-monorepo/pull/913) with an update to `ESLint` from `TSLint` for code linting and formatting and the introduction of a new build setup. -Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes @@ -1139,7 +1171,7 @@ Gas fees for all hardforks up to `MuirGlacier` are now completely present within There is a new `Common.forkHash()` method returning pre-calculated Forkhash values or alternatively use the internal `Common._calcForkHash()` implementation to calculate a forkhash on the fly. -Forkhashes are used to uniquely identify a set of hardforks passed to be able to better differentiate between different dedicated chains. This is used for the `Eth/64` devp2p protocol update and specificed in [EIP-2124](https://eips.ethereum.org/EIPS/eip-2124) to help improve the devp2p networking stack. +Forkhashes are used to uniquely identify a set of hardforks passed to be able to better differentiate between different dedicated chains. This is used for the `Eth/64` devp2p protocol update and specified in [EIP-2124](https://eips.ethereum.org/EIPS/eip-2124) to help improve the devp2p networking stack. ### New Block/Hardfork related Utility Functions @@ -1165,7 +1197,7 @@ for code linting and formatting and the introduction of a new build setup. Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result -in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes diff --git a/packages/common/README.md b/packages/common/README.md index 1124468ef7..79abe06cb6 100644 --- a/packages/common/README.md +++ b/packages/common/README.md @@ -80,13 +80,13 @@ console.log(`EIP 4844 is active -- ${c.isActivatedEIP(4844)}`) ### Custom Cryptography Primitives (WASM) All EthereumJS packages use cryptographic primitives from the audited `ethereum-cryptography` library by default. -These primitves, including `keccak256`, `sha256`, and elliptic curve signature methods, are all written in native +These primitives, including `keccak256`, `sha256`, and elliptic curve signature methods, are all written in native Javascript and therefore have the potential downside of being less performant than alternative cryptography modules written in other languages and then compiled to WASM. If cryptography performance is a bottleneck in your usage of the EthereumJS libraries, you can provide your own primitives to the `Common` constructor and they will be used in place of the defaults. Depending on how your preferred primitives are implemented, you may need to write wrapper methods around them so they conform to the interface exposed by the [`common.customCrypto` property](./src/types.ts). -See the implementation of this in the [`@etheruemjs/client`](../client/bin/cli.ts#L810) using `@polkadot/wasm-crypto` +See the implementation of this in the [`@ethereumjs/client`](../client/bin/cli.ts#L810) using `@polkadot/wasm-crypto` for an example of how this is done for each available cryptographic primitive. Note: replacing native JS crypto primitives with WASM based libraries comes with new security assumptions (additional external dependencies, unauditability of WASM code). It is therefore recommended to evaluate your usage context before applying! @@ -121,7 +121,7 @@ main() The KZG library used for EIP-4844 Blob Transactions is initialized by `common` under the `common.customCrypto` property and is then used throughout the `Ethereumjs` stack wherever KZG cryptography is required. Below is an example of how -to initalize (assuming you are using the `c-kzg` package as your KZG cryptography library). +to initialize (assuming you are using the `c-kzg` package as your KZG cryptography library). ```ts // ./examples/initKzg.ts @@ -362,7 +362,7 @@ library supported: - `byzantium` (`Hardfork.Byzantium`) - `constantinople` (`Hardfork.Constantinople`) - `petersburg` (`Hardfork.Petersburg`) (aka `constantinopleFix`, apply together with `constantinople`) -- `istanbul` (`Hardfork.Instanbul`) +- `istanbul` (`Hardfork.Istanbul`) - `muirGlacier` (`Hardfork.MuirGlacier`) - `berlin` (`Hardfork.Berlin`) (since `v2.2.0`) - `london` (`Hardfork.London`) (since `v2.4.0`) @@ -405,11 +405,10 @@ The following EIPs are currently supported: - [EIP-1153](https://eips.ethereum.org/EIPS/eip-1153) - Transient storage opcodes (Cancun) - [EIP-1559](https://eips.ethereum.org/EIPS/eip-1559) - Fee market change for ETH 1.0 chain -- [EIP-2315](https://eips.ethereum.org/EIPS/eip-2315) - Simple subroutines for the EVM (`outdated`) - [EIP-2537](https://eips.ethereum.org/EIPS/eip-2537) - BLS precompiles (removed in v4.0.0, see latest v3 release) - [EIP-2565](https://eips.ethereum.org/EIPS/eip-2565) - ModExp gas cost - [EIP-2718](https://eips.ethereum.org/EIPS/eip-2718) - Transaction Types -- [EIP-2935](https://eips.ethereum.org/EIPS/eip-2935) - Save historical block hashes in state (`experimental`) +- [EIP-2935](https://eips.ethereum.org/EIPS/eip-2935) - Serve historical block hashes from state (Prague) - [EIP-2929](https://eips.ethereum.org/EIPS/eip-2929) - gas cost increases for state access opcodes - [EIP-2930](https://eips.ethereum.org/EIPS/eip-2930) - Optional access list tx type - [EIP-3074](https://eips.ethereum.org/EIPS/eip-3074) - AUTH and AUTHCALL opcodes @@ -431,8 +430,14 @@ The following EIPs are currently supported: - [EIP-4895](https://eips.ethereum.org/EIPS/eip-4895) - Beacon chain push withdrawals as operations (Shanghai) - [EIP-5133](https://eips.ethereum.org/EIPS/eip-5133) - Delaying Difficulty Bomb to mid-September 2022 (Gray Glacier) - [EIP-5656](https://eips.ethereum.org/EIPS/eip-5656) - MCOPY - Memory copying instruction (Cancun) +- [EIP-6110](https://eips.ethereum.org/EIPS/eip-6110) - Supply validator deposits on chain (Prague) - [EIP-6780](https://eips.ethereum.org/EIPS/eip-6780) - SELFDESTRUCT only in same transaction (Cancun) +- [EIP-7002](https://eips.ethereum.org/EIPS/eip-7002) - Execution layer triggerable withdrawals (Prague) +- [EIP-7251](https://eips.ethereum.org/EIPS/eip-7251) - Execution layer triggerable validator consolidations (Prague) +- [EIP-7702](https://eips.ethereum.org/EIPS/eip-7702) - EOA code transactions (Prague) (`outdated`) +- [EIP-7709](https://eips.ethereum.org/EIPS/eip-7709) - Read BLOCKHASH from storage and update cost (Osaka) - [EIP-7516](https://eips.ethereum.org/EIPS/eip-7516) - BLOBBASEFEE opcode (Cancun) +- [EIP-7685](https://eips.ethereum.org/EIPS/eip-7685) - General purpose execution layer requests (Prague) ### Bootstrap Nodes diff --git a/packages/common/examples/common.ts b/packages/common/examples/common.ts index b7dcacab5c..cb28e1c681 100644 --- a/packages/common/examples/common.ts +++ b/packages/common/examples/common.ts @@ -1,27 +1,19 @@ -import { Chain, Common, createCustomCommon, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet, createCustomCommon } from '@ethereumjs/common' // With enums: -const commonWithEnums = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - -// (also possible with directly passing in strings:) -const commonWithStrings = new Common({ chain: 'mainnet', hardfork: 'london' }) +const commonWithEnums = new Common({ chain: Mainnet, hardfork: Hardfork.London }) // Instantiate with the chain (and the default hardfork) -let c = new Common({ chain: Chain.Mainnet }) -console.log(`The gas price for ecAdd is ${c.param('gasPrices', 'ecAddGas')}`) // 500 - -// Chain and hardfork provided -c = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Byzantium }) -console.log(`The miner reward under PoW on Byzantium us ${c.param('pow', 'minerReward')}`) // 3000000000000000000 +let c = new Common({ chain: Mainnet }) // Get bootstrap nodes for chain/network console.log('Below are the known bootstrap nodes') console.log(c.bootstrapNodes()) // Array with current nodes // Instantiate with an EIP activated -c = new Common({ chain: Chain.Mainnet, eips: [4844] }) +c = new Common({ chain: Mainnet, eips: [4844] }) console.log(`EIP 4844 is active -- ${c.isActivatedEIP(4844)}`) // Instantiate common with custom chainID -const commonWithCustomChainId = createCustomCommon({ chainId: 1234 }) +const commonWithCustomChainId = createCustomCommon({ chainId: 1234 }, Mainnet) console.log(`The current chain ID is ${commonWithCustomChainId.chainId}`) diff --git a/packages/common/examples/customChain.ts b/packages/common/examples/customChain.ts index 44a89a547b..b473517c1b 100644 --- a/packages/common/examples/customChain.ts +++ b/packages/common/examples/customChain.ts @@ -1,6 +1,7 @@ -import { Common } from '@ethereumjs/common' +import { Common, Mainnet, createCustomCommon } from '@ethereumjs/common' + import myCustomChain1 from './genesisData/testnet.json' // Add custom chain config -const common1 = new Common({ chain: myCustomChain1 }) +const common1 = createCustomCommon(myCustomChain1, Mainnet) console.log(`Common is instantiated with custom chain parameters - ${common1.chainName()}`) diff --git a/packages/common/examples/customChains.ts b/packages/common/examples/customChains.ts deleted file mode 100644 index f051611dd8..0000000000 --- a/packages/common/examples/customChains.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { Common } from '@ethereumjs/common' -import myCustomChain1 from './genesisData/testnet.json' -import myCustomChain2 from './genesisData/testnet2.json' -// Add two custom chains, initial mainnet activation -const common1 = new Common({ chain: 'mainnet', customChains: [myCustomChain1, myCustomChain2] }) -console.log(`Common is instantiated with mainnet parameters - ${common1.chainName()}`) -common1.setChain('testnet1') -console.log(`Common is set to use testnet parameters - ${common1.chainName()}`) -// Add two custom chains, activate customChain1 -const common2 = new Common({ - chain: 'testnet2', - customChains: [myCustomChain1, myCustomChain2], -}) - -console.log(`Common is instantiated with testnet2 parameters - ${common1.chainName()}`) diff --git a/packages/common/examples/customCrypto.ts b/packages/common/examples/customCrypto.ts index 60668a71d2..0a5600c959 100644 --- a/packages/common/examples/customCrypto.ts +++ b/packages/common/examples/customCrypto.ts @@ -1,17 +1,17 @@ +import { createBlock } from '@ethereumjs/block' +import { Common, Mainnet } from '@ethereumjs/common' import { keccak256, waitReady } from '@polkadot/wasm-crypto' -import { Chain, Common } from '@ethereumjs/common' -import { Block, createBlockFromBlockData } from '@ethereumjs/block' const main = async () => { // @polkadot/wasm-crypto specific initialization await waitReady() - const common = new Common({ chain: Chain.Mainnet, customCrypto: { keccak256 } }) - const block = createBlockFromBlockData({}, { common }) + const common = new Common({ chain: Mainnet, customCrypto: { keccak256 } }) + const block = createBlock({}, { common }) // Method invocations within EthereumJS library instantiations where the common // instance above is passed will now use the custom keccak256 implementation console.log(block.hash()) } -main() +void main() diff --git a/packages/common/examples/fromGeth.ts b/packages/common/examples/fromGeth.ts index 2164390404..af3828e4e4 100644 --- a/packages/common/examples/fromGeth.ts +++ b/packages/common/examples/fromGeth.ts @@ -1,4 +1,4 @@ -import { Common, createCommonFromGethGenesis } from '@ethereumjs/common' +import { createCommonFromGethGenesis } from '@ethereumjs/common' import { hexToBytes } from '@ethereumjs/util' import genesisJson from './genesisData/post-merge.json' diff --git a/packages/common/examples/genesisData/post-merge.json b/packages/common/examples/genesisData/post-merge.json index ffbb465e69..32f5f093d3 100644 --- a/packages/common/examples/genesisData/post-merge.json +++ b/packages/common/examples/genesisData/post-merge.json @@ -16,7 +16,8 @@ "period": 5, "epoch": 30000 }, - "terminalTotalDifficulty": 0 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "nonce": "0x42", "timestamp": "0x0", diff --git a/packages/common/examples/genesisData/testnet.json b/packages/common/examples/genesisData/testnet.json index 1b208d2140..bfb6fe4974 100644 --- a/packages/common/examples/genesisData/testnet.json +++ b/packages/common/examples/genesisData/testnet.json @@ -1,7 +1,6 @@ { "name": "testnet1", "chainId": 22222, - "networkId": 22222, "defaultHardfork": "istanbul", "consensus": { "type": "poa", diff --git a/packages/common/examples/genesisData/testnet2.json b/packages/common/examples/genesisData/testnet2.json index 2215fde86c..5eec2f59ce 100644 --- a/packages/common/examples/genesisData/testnet2.json +++ b/packages/common/examples/genesisData/testnet2.json @@ -1,7 +1,6 @@ { "name": "testnet2", "chainId": 33333, - "networkId": 33333, "defaultHardfork": "istanbul", "consensus": { "type": "poa", diff --git a/packages/common/examples/initKzg.ts b/packages/common/examples/initKzg.ts index 4d6ecdcdf7..c7c496e677 100644 --- a/packages/common/examples/initKzg.ts +++ b/packages/common/examples/initKzg.ts @@ -1,14 +1,14 @@ +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { loadKZG } from 'kzg-wasm' -import { Common, Chain, Hardfork } from '@ethereumjs/common' const main = async () => { const kzg = await loadKZG() const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.Cancun, customCrypto: { kzg }, }) console.log(common.customCrypto.kzg) // Should print the initialized KZG interface } -main() +void main() diff --git a/packages/common/package.json b/packages/common/package.json index 7e817c1f8d..bc9fc72af1 100644 --- a/packages/common/package.json +++ b/packages/common/package.json @@ -1,6 +1,6 @@ { "name": "@ethereumjs/common", - "version": "4.3.0", + "version": "4.4.0", "description": "Resources common to all Ethereum implementations", "keywords": [ "ethereum", @@ -57,7 +57,7 @@ "tsc": "../../config/cli/ts-compile.sh" }, "dependencies": { - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/util": "^9.1.0", "ethereum-cryptography": "^2.2.1" }, "devDependencies": { diff --git a/packages/common/src/chains.ts b/packages/common/src/chains.ts index 946ed17c08..b463c81d1b 100644 --- a/packages/common/src/chains.ts +++ b/packages/common/src/chains.ts @@ -1,660 +1,648 @@ import type { ChainConfig } from './types.js' -type ChainsDict = { - [key: string]: ChainConfig +export const Mainnet: ChainConfig = { + name: 'mainnet', + chainId: 1, + defaultHardfork: 'cancun', + consensus: { + type: 'pow', + algorithm: 'ethash', + ethash: {}, + }, + comment: 'The Ethereum main chain', + url: 'https://ethstats.net/', + genesis: { + gasLimit: 5000, + difficulty: 17179869184, + nonce: '0x0000000000000042', + extraData: '0x11bbe8db4e347b4e8c937c1c8370e4b5ed33adb3db69cbdb7a38e1e50b1b82fa', + }, + depositContractAddress: '0x00000000219ab540356cBB839Cbe05303d7705Fa', + hardforks: [ + { + name: 'chainstart', + block: 0, + forkHash: '0xfc64ec04', + }, + { + name: 'homestead', + block: 1150000, + forkHash: '0x97c2c34c', + }, + { + name: 'dao', + block: 1920000, + forkHash: '0x91d1f948', + }, + { + name: 'tangerineWhistle', + block: 2463000, + forkHash: '0x7a64da13', + }, + { + name: 'spuriousDragon', + block: 2675000, + forkHash: '0x3edd5b10', + }, + { + name: 'byzantium', + block: 4370000, + forkHash: '0xa00bc324', + }, + { + name: 'constantinople', + block: 7280000, + forkHash: '0x668db0af', + }, + { + name: 'petersburg', + block: 7280000, + forkHash: '0x668db0af', + }, + { + name: 'istanbul', + block: 9069000, + forkHash: '0x879d6e30', + }, + { + name: 'muirGlacier', + block: 9200000, + forkHash: '0xe029e991', + }, + { + name: 'berlin', + block: 12244000, + forkHash: '0x0eb440f6', + }, + { + name: 'london', + block: 12965000, + forkHash: '0xb715077d', + }, + { + name: 'arrowGlacier', + block: 13773000, + forkHash: '0x20c327fc', + }, + { + name: 'grayGlacier', + block: 15050000, + forkHash: '0xf0afd0e3', + }, + { + // The forkHash will remain same as mergeForkIdTransition is post merge + // terminal block: https://etherscan.io/block/15537393 + name: 'paris', + block: 15537394, + forkHash: '0xf0afd0e3', + }, + { + name: 'mergeForkIdTransition', + block: null, + forkHash: null, + }, + { + name: 'shanghai', + block: null, + timestamp: '1681338455', + forkHash: '0xdce96c2d', + }, + { + name: 'cancun', + block: null, + timestamp: '1710338135', + forkHash: '0x9f3d2254', + }, + { + name: 'prague', + block: null, + }, + ], + bootstrapNodes: [ + { + ip: '18.138.108.67', + port: 30303, + id: 'd860a01f9722d78051619d1e2351aba3f43f943f6f00718d1b9baa4101932a1f5011f16bb2b1bb35db20d6fe28fa0bf09636d26a87d31de9ec6203eeedb1f666', + location: 'ap-southeast-1-001', + comment: 'bootnode-aws-ap-southeast-1-001', + }, + { + ip: '3.209.45.79', + port: 30303, + id: '22a8232c3abc76a16ae9d6c3b164f98775fe226f0917b0ca871128a74a8e9630b458460865bab457221f1d448dd9791d24c4e5d88786180ac185df813a68d4de', + location: 'us-east-1-001', + comment: 'bootnode-aws-us-east-1-001', + }, + { + ip: '65.108.70.101', + port: 30303, + id: '2b252ab6a1d0f971d9722cb839a42cb81db019ba44c08754628ab4a823487071b5695317c8ccd085219c3a03af063495b2f1da8d18218da2d6a82981b45e6ffc', + location: 'eu-west-1-001', + comment: 'bootnode-hetzner-hel', + }, + { + ip: '157.90.35.166', + port: 30303, + id: '4aeb4ab6c14b23e2c4cfdce879c04b0748a20d8e9b59e25ded2a08143e265c6c25936e74cbc8e641e3312ca288673d91f2f93f8e277de3cfa444ecdaaf982052', + location: 'eu-central-1-001', + comment: 'bootnode-hetzner-fsn', + }, + ], + dnsNetworks: [ + 'enrtree://AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE@all.mainnet.ethdisco.net', + ], } -export const chains: ChainsDict = { - mainnet: { - name: 'mainnet', - chainId: 1, - networkId: 1, - defaultHardfork: 'shanghai', - consensus: { - type: 'pow', - algorithm: 'ethash', - ethash: {}, - }, - comment: 'The Ethereum main chain', - url: 'https://ethstats.net/', - genesis: { - gasLimit: 5000, - difficulty: 17179869184, - nonce: '0x0000000000000042', - extraData: '0x11bbe8db4e347b4e8c937c1c8370e4b5ed33adb3db69cbdb7a38e1e50b1b82fa', - }, - depositContractAddress: '0x00000000219ab540356cBB839Cbe05303d7705Fa', - hardforks: [ - { - name: 'chainstart', - block: 0, - forkHash: '0xfc64ec04', - }, - { - name: 'homestead', - block: 1150000, - forkHash: '0x97c2c34c', - }, - { - name: 'dao', - block: 1920000, - forkHash: '0x91d1f948', - }, - { - name: 'tangerineWhistle', - block: 2463000, - forkHash: '0x7a64da13', - }, - { - name: 'spuriousDragon', - block: 2675000, - forkHash: '0x3edd5b10', - }, - { - name: 'byzantium', - block: 4370000, - forkHash: '0xa00bc324', - }, - { - name: 'constantinople', - block: 7280000, - forkHash: '0x668db0af', - }, - { - name: 'petersburg', - block: 7280000, - forkHash: '0x668db0af', - }, - { - name: 'istanbul', - block: 9069000, - forkHash: '0x879d6e30', - }, - { - name: 'muirGlacier', - block: 9200000, - forkHash: '0xe029e991', - }, - { - name: 'berlin', - block: 12244000, - forkHash: '0x0eb440f6', - }, - { - name: 'london', - block: 12965000, - forkHash: '0xb715077d', - }, - { - name: 'arrowGlacier', - block: 13773000, - forkHash: '0x20c327fc', - }, - { - name: 'grayGlacier', - block: 15050000, - forkHash: '0xf0afd0e3', - }, - { - // The forkHash will remain same as mergeForkIdTransition is post merge - // terminal block: https://etherscan.io/block/15537393 - name: 'paris', - ttd: '58750000000000000000000', - block: 15537394, - forkHash: '0xf0afd0e3', - }, - { - name: 'mergeForkIdTransition', - block: null, - forkHash: null, - }, - { - name: 'shanghai', - block: null, - timestamp: '1681338455', - forkHash: '0xdce96c2d', - }, - { - name: 'cancun', - block: null, - timestamp: '1710338135', - forkHash: '0x9f3d2254', - }, - { - name: 'prague', - block: null, - }, - ], - bootstrapNodes: [ - { - ip: '18.138.108.67', - port: 30303, - id: 'd860a01f9722d78051619d1e2351aba3f43f943f6f00718d1b9baa4101932a1f5011f16bb2b1bb35db20d6fe28fa0bf09636d26a87d31de9ec6203eeedb1f666', - location: 'ap-southeast-1-001', - comment: 'bootnode-aws-ap-southeast-1-001', - }, - { - ip: '3.209.45.79', - port: 30303, - id: '22a8232c3abc76a16ae9d6c3b164f98775fe226f0917b0ca871128a74a8e9630b458460865bab457221f1d448dd9791d24c4e5d88786180ac185df813a68d4de', - location: 'us-east-1-001', - comment: 'bootnode-aws-us-east-1-001', - }, - { - ip: '65.108.70.101', - port: 30303, - id: '2b252ab6a1d0f971d9722cb839a42cb81db019ba44c08754628ab4a823487071b5695317c8ccd085219c3a03af063495b2f1da8d18218da2d6a82981b45e6ffc', - location: 'eu-west-1-001', - comment: 'bootnode-hetzner-hel', - }, - { - ip: '157.90.35.166', - port: 30303, - id: '4aeb4ab6c14b23e2c4cfdce879c04b0748a20d8e9b59e25ded2a08143e265c6c25936e74cbc8e641e3312ca288673d91f2f93f8e277de3cfa444ecdaaf982052', - location: 'eu-central-1-001', - comment: 'bootnode-hetzner-fsn', - }, - ], - dnsNetworks: [ - 'enrtree://AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE@all.mainnet.ethdisco.net', - ], +export const Goerli: ChainConfig = { + name: 'goerli', + chainId: 5, + defaultHardfork: 'cancun', + consensus: { + type: 'poa', + algorithm: 'clique', + clique: { + period: 15, + epoch: 30000, + }, }, - goerli: { - name: 'goerli', - chainId: 5, - networkId: 5, - defaultHardfork: 'shanghai', - consensus: { - type: 'poa', - algorithm: 'clique', - clique: { - period: 15, - epoch: 30000, - }, - }, - comment: 'Cross-client PoA test network', - url: 'https://github.com/goerli/testnet', - genesis: { - timestamp: '0x5c51a607', - gasLimit: 10485760, - difficulty: 1, - nonce: '0x0000000000000000', - extraData: - '0x22466c6578692069732061207468696e6722202d204166726900000000000000e0a2bd4258d2768837baa26a28fe71dc079f84c70000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', - }, - hardforks: [ - { - name: 'chainstart', - block: 0, - forkHash: '0xa3f5ab08', - }, - { - name: 'homestead', - block: 0, - forkHash: '0xa3f5ab08', - }, - { - name: 'tangerineWhistle', - block: 0, - forkHash: '0xa3f5ab08', - }, - { - name: 'spuriousDragon', - block: 0, - forkHash: '0xa3f5ab08', - }, - { - name: 'byzantium', - block: 0, - forkHash: '0xa3f5ab08', - }, - { - name: 'constantinople', - block: 0, - forkHash: '0xa3f5ab08', - }, - { - name: 'petersburg', - block: 0, - forkHash: '0xa3f5ab08', - }, - { - name: 'istanbul', - block: 1561651, - forkHash: '0xc25efa5c', - }, - { - name: 'berlin', - block: 4460644, - forkHash: '0x757a1c47', - }, - { - name: 'london', - block: 5062605, - forkHash: '0xb8c6299d', - }, - { - // The forkHash will remain same as mergeForkIdTransition is post merge, - // terminal block: https://goerli.etherscan.io/block/7382818 - name: 'paris', - ttd: '10790000', - block: 7382819, - forkHash: '0xb8c6299d', - }, - { - name: 'mergeForkIdTransition', - block: null, - forkHash: null, - }, - { - name: 'shanghai', - block: null, - timestamp: '1678832736', - forkHash: '0xf9843abf', - }, - { - name: 'cancun', - block: null, - timestamp: '1705473120', - forkHash: '0x70cc14e2', - }, - ], - bootstrapNodes: [ - { - ip: '51.141.78.53', - port: 30303, - id: '011f758e6552d105183b1761c5e2dea0111bc20fd5f6422bc7f91e0fabbec9a6595caf6239b37feb773dddd3f87240d99d859431891e4a642cf2a0a9e6cbb98a', - location: '', - comment: 'Upstream bootnode 1', - }, - { - ip: '13.93.54.137', - port: 30303, - id: '176b9417f511d05b6b2cf3e34b756cf0a7096b3094572a8f6ef4cdcb9d1f9d00683bf0f83347eebdf3b81c3521c2332086d9592802230bf528eaf606a1d9677b', - location: '', - comment: 'Upstream bootnode 2', - }, - { - ip: '94.237.54.114', - port: 30313, - id: '46add44b9f13965f7b9875ac6b85f016f341012d84f975377573800a863526f4da19ae2c620ec73d11591fa9510e992ecc03ad0751f53cc02f7c7ed6d55c7291', - location: '', - comment: 'Upstream bootnode 3', - }, - { - ip: '18.218.250.66', - port: 30313, - id: 'b5948a2d3e9d486c4d75bf32713221c2bd6cf86463302339299bd227dc2e276cd5a1c7ca4f43a0e9122fe9af884efed563bd2a1fd28661f3b5f5ad7bf1de5949', - location: '', - comment: 'Upstream bootnode 4', - }, - { - ip: '3.11.147.67', - port: 30303, - id: 'a61215641fb8714a373c80edbfa0ea8878243193f57c96eeb44d0bc019ef295abd4e044fd619bfc4c59731a73fb79afe84e9ab6da0c743ceb479cbb6d263fa91', - location: '', - comment: 'Ethereum Foundation bootnode', - }, - { - ip: '51.15.116.226', - port: 30303, - id: 'a869b02cec167211fb4815a82941db2e7ed2936fd90e78619c53eb17753fcf0207463e3419c264e2a1dd8786de0df7e68cf99571ab8aeb7c4e51367ef186b1dd', - location: '', - comment: 'Goerli Initiative bootnode', - }, - { - ip: '51.15.119.157', - port: 30303, - id: '807b37ee4816ecf407e9112224494b74dd5933625f655962d892f2f0f02d7fbbb3e2a94cf87a96609526f30c998fd71e93e2f53015c558ffc8b03eceaf30ee33', - location: '', - comment: 'Goerli Initiative bootnode', - }, - { - ip: '51.15.119.157', - port: 40303, - id: 'a59e33ccd2b3e52d578f1fbd70c6f9babda2650f0760d6ff3b37742fdcdfdb3defba5d56d315b40c46b70198c7621e63ffa3f987389c7118634b0fefbbdfa7fd', - location: '', - comment: 'Goerli Initiative bootnode', - }, - ], - dnsNetworks: [ - 'enrtree://AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE@all.goerli.ethdisco.net', - ], + comment: 'Cross-client PoA test network', + url: 'https://github.com/goerli/testnet', + genesis: { + timestamp: '0x5c51a607', + gasLimit: 10485760, + difficulty: 1, + nonce: '0x0000000000000000', + extraData: + '0x22466c6578692069732061207468696e6722202d204166726900000000000000e0a2bd4258d2768837baa26a28fe71dc079f84c70000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', }, - sepolia: { - name: 'sepolia', - chainId: 11155111, - networkId: 11155111, - defaultHardfork: 'shanghai', - consensus: { - type: 'pow', - algorithm: 'ethash', - ethash: {}, - }, - comment: 'PoW test network to replace Ropsten', - url: 'https://github.com/ethereum/go-ethereum/pull/23730', - genesis: { - timestamp: '0x6159af19', - gasLimit: 30000000, - difficulty: 131072, - nonce: '0x0000000000000000', - extraData: '0x5365706f6c69612c20417468656e732c204174746963612c2047726565636521', - }, - hardforks: [ - { - name: 'chainstart', - block: 0, - forkHash: '0xfe3366e7', - }, - { - name: 'homestead', - block: 0, - forkHash: '0xfe3366e7', - }, - { - name: 'tangerineWhistle', - block: 0, - forkHash: '0xfe3366e7', - }, - { - name: 'spuriousDragon', - block: 0, - forkHash: '0xfe3366e7', - }, - { - name: 'byzantium', - block: 0, - forkHash: '0xfe3366e7', - }, - { - name: 'constantinople', - block: 0, - forkHash: '0xfe3366e7', - }, - { - name: 'petersburg', - block: 0, - forkHash: '0xfe3366e7', - }, - { - name: 'istanbul', - block: 0, - forkHash: '0xfe3366e7', - }, - { - name: 'muirGlacier', - block: 0, - forkHash: '0xfe3366e7', - }, - { - name: 'berlin', - block: 0, - forkHash: '0xfe3366e7', - }, - { - name: 'london', - block: 0, - forkHash: '0xfe3366e7', - }, - { - // The forkHash will remain same as mergeForkIdTransition is post merge, - // terminal block: https://sepolia.etherscan.io/block/1450408 - name: 'paris', - ttd: '17000000000000000', - block: 1450409, - forkHash: '0xfe3366e7', - }, - { - name: 'mergeForkIdTransition', - block: 1735371, - forkHash: '0xb96cbd13', - }, - { - name: 'shanghai', - block: null, - timestamp: '1677557088', - forkHash: '0xf7f9bc08', - }, - { - name: 'cancun', - block: null, - timestamp: '1706655072', - forkHash: '0x88cf81d9', - }, - ], - bootstrapNodes: [ - { - ip: '18.168.182.86', - port: 30303, - id: '9246d00bc8fd1742e5ad2428b80fc4dc45d786283e05ef6edbd9002cbc335d40998444732fbe921cb88e1d2c73d1b1de53bae6a2237996e9bfe14f871baf7066', - location: '', - comment: 'geth', - }, - { - ip: '52.14.151.177', - port: 30303, - id: 'ec66ddcf1a974950bd4c782789a7e04f8aa7110a72569b6e65fcd51e937e74eed303b1ea734e4d19cfaec9fbff9b6ee65bf31dcb50ba79acce9dd63a6aca61c7', - location: '', - comment: 'besu', - }, - { - ip: '165.22.196.173', - port: 30303, - id: 'ce970ad2e9daa9e14593de84a8b49da3d54ccfdf83cbc4fe519cb8b36b5918ed4eab087dedd4a62479b8d50756b492d5f762367c8d20329a7854ec01547568a6', - location: '', - comment: 'EF', - }, - { - ip: '65.108.95.67', - port: 30303, - id: '075503b13ed736244896efcde2a992ec0b451357d46cb7a8132c0384721742597fc8f0d91bbb40bb52e7d6e66728d36a1fda09176294e4a30cfac55dcce26bc6', - location: '', - comment: 'lodestar', - }, - ], - dnsNetworks: [ - 'enrtree://AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE@all.sepolia.ethdisco.net', - ], + hardforks: [ + { + name: 'chainstart', + block: 0, + forkHash: '0xa3f5ab08', + }, + { + name: 'homestead', + block: 0, + forkHash: '0xa3f5ab08', + }, + { + name: 'tangerineWhistle', + block: 0, + forkHash: '0xa3f5ab08', + }, + { + name: 'spuriousDragon', + block: 0, + forkHash: '0xa3f5ab08', + }, + { + name: 'byzantium', + block: 0, + forkHash: '0xa3f5ab08', + }, + { + name: 'constantinople', + block: 0, + forkHash: '0xa3f5ab08', + }, + { + name: 'petersburg', + block: 0, + forkHash: '0xa3f5ab08', + }, + { + name: 'istanbul', + block: 1561651, + forkHash: '0xc25efa5c', + }, + { + name: 'berlin', + block: 4460644, + forkHash: '0x757a1c47', + }, + { + name: 'london', + block: 5062605, + forkHash: '0xb8c6299d', + }, + { + // The forkHash will remain same as mergeForkIdTransition is post merge, + // terminal block: https://goerli.etherscan.io/block/7382818 + name: 'paris', + block: 7382819, + forkHash: '0xb8c6299d', + }, + { + name: 'mergeForkIdTransition', + block: null, + forkHash: null, + }, + { + name: 'shanghai', + block: null, + timestamp: '1678832736', + forkHash: '0xf9843abf', + }, + { + name: 'cancun', + block: null, + timestamp: '1705473120', + forkHash: '0x70cc14e2', + }, + ], + bootstrapNodes: [ + { + ip: '51.141.78.53', + port: 30303, + id: '011f758e6552d105183b1761c5e2dea0111bc20fd5f6422bc7f91e0fabbec9a6595caf6239b37feb773dddd3f87240d99d859431891e4a642cf2a0a9e6cbb98a', + location: '', + comment: 'Upstream bootnode 1', + }, + { + ip: '13.93.54.137', + port: 30303, + id: '176b9417f511d05b6b2cf3e34b756cf0a7096b3094572a8f6ef4cdcb9d1f9d00683bf0f83347eebdf3b81c3521c2332086d9592802230bf528eaf606a1d9677b', + location: '', + comment: 'Upstream bootnode 2', + }, + { + ip: '94.237.54.114', + port: 30313, + id: '46add44b9f13965f7b9875ac6b85f016f341012d84f975377573800a863526f4da19ae2c620ec73d11591fa9510e992ecc03ad0751f53cc02f7c7ed6d55c7291', + location: '', + comment: 'Upstream bootnode 3', + }, + { + ip: '18.218.250.66', + port: 30313, + id: 'b5948a2d3e9d486c4d75bf32713221c2bd6cf86463302339299bd227dc2e276cd5a1c7ca4f43a0e9122fe9af884efed563bd2a1fd28661f3b5f5ad7bf1de5949', + location: '', + comment: 'Upstream bootnode 4', + }, + { + ip: '3.11.147.67', + port: 30303, + id: 'a61215641fb8714a373c80edbfa0ea8878243193f57c96eeb44d0bc019ef295abd4e044fd619bfc4c59731a73fb79afe84e9ab6da0c743ceb479cbb6d263fa91', + location: '', + comment: 'Ethereum Foundation bootnode', + }, + { + ip: '51.15.116.226', + port: 30303, + id: 'a869b02cec167211fb4815a82941db2e7ed2936fd90e78619c53eb17753fcf0207463e3419c264e2a1dd8786de0df7e68cf99571ab8aeb7c4e51367ef186b1dd', + location: '', + comment: 'Goerli Initiative bootnode', + }, + { + ip: '51.15.119.157', + port: 30303, + id: '807b37ee4816ecf407e9112224494b74dd5933625f655962d892f2f0f02d7fbbb3e2a94cf87a96609526f30c998fd71e93e2f53015c558ffc8b03eceaf30ee33', + location: '', + comment: 'Goerli Initiative bootnode', + }, + { + ip: '51.15.119.157', + port: 40303, + id: 'a59e33ccd2b3e52d578f1fbd70c6f9babda2650f0760d6ff3b37742fdcdfdb3defba5d56d315b40c46b70198c7621e63ffa3f987389c7118634b0fefbbdfa7fd', + location: '', + comment: 'Goerli Initiative bootnode', + }, + ], + dnsNetworks: [ + 'enrtree://AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE@all.goerli.ethdisco.net', + ], +} + +export const Sepolia: ChainConfig = { + name: 'sepolia', + chainId: 11155111, + defaultHardfork: 'cancun', + consensus: { + type: 'pow', + algorithm: 'ethash', + ethash: {}, }, - holesky: { - name: 'holesky', - chainId: 17000, - networkId: 17000, - defaultHardfork: 'paris', - consensus: { - type: 'pos', - algorithm: 'casper', - }, - comment: 'PoS test network to replace Goerli', - url: 'https://github.com/eth-clients/holesky/', - genesis: { - baseFeePerGas: '0x3B9ACA00', - difficulty: '0x01', - extraData: '0x', - gasLimit: '0x17D7840', - nonce: '0x0000000000001234', - timestamp: '0x65156994', - }, - hardforks: [ - { - name: 'chainstart', - block: 0, - forkHash: '0xc61a6098', - }, - { - name: 'homestead', - block: 0, - forkHash: '0xc61a6098', - }, - { - name: 'tangerineWhistle', - block: 0, - forkHash: '0xc61a6098', - }, - { - name: 'spuriousDragon', - block: 0, - forkHash: '0xc61a6098', - }, - { - name: 'byzantium', - block: 0, - forkHash: '0xc61a6098', - }, - { - name: 'constantinople', - block: 0, - forkHash: '0xc61a6098', - }, - { - name: 'petersburg', - block: 0, - forkHash: '0xc61a6098', - }, - { - name: 'istanbul', - block: 0, - forkHash: '0xc61a6098', - }, - { - name: 'muirGlacier', - block: 0, - forkHash: '0xc61a6098', - }, - { - name: 'berlin', - block: 0, - forkHash: '0xc61a6098', - }, - { - name: 'london', - block: 0, - forkHash: '0xc61a6098', - }, - { - name: 'paris', - ttd: '0', - block: 0, - forkHash: '0xc61a6098', - }, - { - name: 'mergeForkIdTransition', - block: 0, - forkHash: '0xc61a6098', - }, - { - name: 'shanghai', - block: null, - timestamp: '1696000704', - forkHash: '0xfd4f016b', - }, - { - name: 'cancun', - block: null, - timestamp: '1707305664', - forkHash: '0x9b192ad0', - }, - ], - bootstrapNodes: [ - { - ip: '146.190.13.128', - port: 30303, - id: 'ac906289e4b7f12df423d654c5a962b6ebe5b3a74cc9e06292a85221f9a64a6f1cfdd6b714ed6dacef51578f92b34c60ee91e9ede9c7f8fadc4d347326d95e2b', - location: '', - comment: 'bootnode 1', - }, - { - ip: '178.128.136.233', - port: 30303, - id: 'a3435a0155a3e837c02f5e7f5662a2f1fbc25b48e4dc232016e1c51b544cb5b4510ef633ea3278c0e970fa8ad8141e2d4d0f9f95456c537ff05fdf9b31c15072', - location: '', - comment: 'bootnode 2', - }, - ], - dnsNetworks: [ - 'enrtree://AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE@all.holesky.ethdisco.net', - ], + comment: 'PoW test network to replace Ropsten', + url: 'https://github.com/ethereum/go-ethereum/pull/23730', + genesis: { + timestamp: '0x6159af19', + gasLimit: 30000000, + difficulty: 131072, + nonce: '0x0000000000000000', + extraData: '0x5365706f6c69612c20417468656e732c204174746963612c2047726565636521', }, - kaustinen6: { - name: 'kaustinen6', - chainId: 69420, - networkId: 69420, - defaultHardfork: 'osaka', - consensus: { - type: 'pos', - algorithm: 'casper', - }, - comment: 'Verkle kaustinen testnet 6 (likely temporary, do not hard-wire into production code)', - url: 'https://github.com/eth-clients/kaustinen/', - genesis: { - difficulty: '0x01', - extraData: '0x', - gasLimit: '0x17D7840', - nonce: '0x0000000000001234', - timestamp: '0x66190fbc', - }, - hardforks: [ - { - name: 'chainstart', - block: 0, - }, - { - name: 'homestead', - block: 0, - }, - { - name: 'tangerineWhistle', - block: 0, - }, - { - name: 'spuriousDragon', - block: 0, - }, - { - name: 'byzantium', - block: 0, - }, - { - name: 'constantinople', - block: 0, - }, - { - name: 'petersburg', - block: 0, - }, - { - name: 'istanbul', - block: 0, - }, - { - name: 'berlin', - block: 0, - }, - { - name: 'london', - block: 0, - }, - { - name: 'paris', - ttd: '0', - block: 0, - }, - { - name: 'mergeForkIdTransition', - block: 0, - }, - { - name: 'shanghai', - block: null, - timestamp: '0', - }, - { - name: 'osaka', - block: null, - timestamp: '1712848500', - }, - ], - bootstrapNodes: [], - dnsNetworks: [], + hardforks: [ + { + name: 'chainstart', + block: 0, + forkHash: '0xfe3366e7', + }, + { + name: 'homestead', + block: 0, + forkHash: '0xfe3366e7', + }, + { + name: 'tangerineWhistle', + block: 0, + forkHash: '0xfe3366e7', + }, + { + name: 'spuriousDragon', + block: 0, + forkHash: '0xfe3366e7', + }, + { + name: 'byzantium', + block: 0, + forkHash: '0xfe3366e7', + }, + { + name: 'constantinople', + block: 0, + forkHash: '0xfe3366e7', + }, + { + name: 'petersburg', + block: 0, + forkHash: '0xfe3366e7', + }, + { + name: 'istanbul', + block: 0, + forkHash: '0xfe3366e7', + }, + { + name: 'muirGlacier', + block: 0, + forkHash: '0xfe3366e7', + }, + { + name: 'berlin', + block: 0, + forkHash: '0xfe3366e7', + }, + { + name: 'london', + block: 0, + forkHash: '0xfe3366e7', + }, + { + // The forkHash will remain same as mergeForkIdTransition is post merge, + // terminal block: https://sepolia.etherscan.io/block/1450408 + name: 'paris', + block: 1450409, + forkHash: '0xfe3366e7', + }, + { + name: 'mergeForkIdTransition', + block: 1735371, + forkHash: '0xb96cbd13', + }, + { + name: 'shanghai', + block: null, + timestamp: '1677557088', + forkHash: '0xf7f9bc08', + }, + { + name: 'cancun', + block: null, + timestamp: '1706655072', + forkHash: '0x88cf81d9', + }, + ], + bootstrapNodes: [ + { + ip: '18.168.182.86', + port: 30303, + id: '9246d00bc8fd1742e5ad2428b80fc4dc45d786283e05ef6edbd9002cbc335d40998444732fbe921cb88e1d2c73d1b1de53bae6a2237996e9bfe14f871baf7066', + location: '', + comment: 'geth', + }, + { + ip: '52.14.151.177', + port: 30303, + id: 'ec66ddcf1a974950bd4c782789a7e04f8aa7110a72569b6e65fcd51e937e74eed303b1ea734e4d19cfaec9fbff9b6ee65bf31dcb50ba79acce9dd63a6aca61c7', + location: '', + comment: 'besu', + }, + { + ip: '165.22.196.173', + port: 30303, + id: 'ce970ad2e9daa9e14593de84a8b49da3d54ccfdf83cbc4fe519cb8b36b5918ed4eab087dedd4a62479b8d50756b492d5f762367c8d20329a7854ec01547568a6', + location: '', + comment: 'EF', + }, + { + ip: '65.108.95.67', + port: 30303, + id: '075503b13ed736244896efcde2a992ec0b451357d46cb7a8132c0384721742597fc8f0d91bbb40bb52e7d6e66728d36a1fda09176294e4a30cfac55dcce26bc6', + location: '', + comment: 'lodestar', + }, + ], + dnsNetworks: [ + 'enrtree://AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE@all.sepolia.ethdisco.net', + ], +} + +export const Holesky: ChainConfig = { + name: 'holesky', + chainId: 17000, + defaultHardfork: 'paris', + consensus: { + type: 'pos', + algorithm: 'casper', + }, + comment: 'PoS test network to replace Goerli', + url: 'https://github.com/eth-clients/holesky/', + genesis: { + baseFeePerGas: '0x3B9ACA00', + difficulty: '0x01', + extraData: '0x', + gasLimit: '0x17D7840', + nonce: '0x0000000000001234', + timestamp: '0x65156994', }, + hardforks: [ + { + name: 'chainstart', + block: 0, + forkHash: '0xc61a6098', + }, + { + name: 'homestead', + block: 0, + forkHash: '0xc61a6098', + }, + { + name: 'tangerineWhistle', + block: 0, + forkHash: '0xc61a6098', + }, + { + name: 'spuriousDragon', + block: 0, + forkHash: '0xc61a6098', + }, + { + name: 'byzantium', + block: 0, + forkHash: '0xc61a6098', + }, + { + name: 'constantinople', + block: 0, + forkHash: '0xc61a6098', + }, + { + name: 'petersburg', + block: 0, + forkHash: '0xc61a6098', + }, + { + name: 'istanbul', + block: 0, + forkHash: '0xc61a6098', + }, + { + name: 'muirGlacier', + block: 0, + forkHash: '0xc61a6098', + }, + { + name: 'berlin', + block: 0, + forkHash: '0xc61a6098', + }, + { + name: 'london', + block: 0, + forkHash: '0xc61a6098', + }, + { + name: 'paris', + block: 0, + forkHash: '0xc61a6098', + }, + { + name: 'mergeForkIdTransition', + block: 0, + forkHash: '0xc61a6098', + }, + { + name: 'shanghai', + block: null, + timestamp: '1696000704', + forkHash: '0xfd4f016b', + }, + { + name: 'cancun', + block: null, + timestamp: '1707305664', + forkHash: '0x9b192ad0', + }, + ], + bootstrapNodes: [ + { + ip: '146.190.13.128', + port: 30303, + id: 'ac906289e4b7f12df423d654c5a962b6ebe5b3a74cc9e06292a85221f9a64a6f1cfdd6b714ed6dacef51578f92b34c60ee91e9ede9c7f8fadc4d347326d95e2b', + location: '', + comment: 'bootnode 1', + }, + { + ip: '178.128.136.233', + port: 30303, + id: 'a3435a0155a3e837c02f5e7f5662a2f1fbc25b48e4dc232016e1c51b544cb5b4510ef633ea3278c0e970fa8ad8141e2d4d0f9f95456c537ff05fdf9b31c15072', + location: '', + comment: 'bootnode 2', + }, + ], + dnsNetworks: [ + 'enrtree://AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE@all.holesky.ethdisco.net', + ], +} + +export const Kaustinen6: ChainConfig = { + name: 'kaustinen6', + chainId: 69420, + defaultHardfork: 'osaka', + consensus: { + type: 'pos', + algorithm: 'casper', + }, + comment: 'Verkle kaustinen testnet 6 (likely temporary, do not hard-wire into production code)', + url: 'https://github.com/eth-clients/kaustinen/', + genesis: { + difficulty: '0x01', + extraData: '0x', + gasLimit: '0x17D7840', + nonce: '0x0000000000001234', + timestamp: '0x66190fbc', + }, + hardforks: [ + { + name: 'chainstart', + block: 0, + }, + { + name: 'homestead', + block: 0, + }, + { + name: 'tangerineWhistle', + block: 0, + }, + { + name: 'spuriousDragon', + block: 0, + }, + { + name: 'byzantium', + block: 0, + }, + { + name: 'constantinople', + block: 0, + }, + { + name: 'petersburg', + block: 0, + }, + { + name: 'istanbul', + block: 0, + }, + { + name: 'berlin', + block: 0, + }, + { + name: 'london', + block: 0, + }, + { + name: 'paris', + block: 0, + }, + { + name: 'mergeForkIdTransition', + block: 0, + }, + { + name: 'shanghai', + block: null, + timestamp: '0', + }, + { + name: 'osaka', + block: null, + timestamp: '1712848500', + }, + ], + bootstrapNodes: [], + dnsNetworks: [], } diff --git a/packages/common/src/common.ts b/packages/common/src/common.ts index ddba7ecb36..7b598ae1ba 100644 --- a/packages/common/src/common.ts +++ b/packages/common/src/common.ts @@ -10,13 +10,11 @@ import { import { EventEmitter } from 'events' import { crc32 } from './crc.js' -import { EIPs } from './eips.js' +import { eipsDict } from './eips.js' import { Hardfork } from './enums.js' -import { hardforks as HARDFORK_SPECS } from './hardforks.js' +import { hardforksDict } from './hardforks.js' -import { _getChainParams } from './index.js' - -import type { Chain, ConsensusAlgorithm, ConsensusType } from './enums.js' +import type { ConsensusAlgorithm, ConsensusType } from './enums.js' import type { BootstrapNodeConfig, CasperConfig, @@ -24,28 +22,23 @@ import type { CliqueConfig, CommonOpts, CustomCrypto, - EIPConfig, - EIPOrHFConfig, EthashConfig, GenesisBlockConfig, HardforkByOpts, HardforkConfig, HardforkTransitionConfig, + ParamsConfig, + ParamsDict, } from './types.js' import type { BigIntLike, PrefixedHexString } from '@ethereumjs/util' -type HardforkSpecKeys = string // keyof typeof HARDFORK_SPECS -type HardforkSpecValues = typeof HARDFORK_SPECS[HardforkSpecKeys] - -type ParamsCacheConfig = Omit - /** * Common class to access chain and hardfork parameters and to provide * a unified and shared view on the network and hardfork state. * * Use the {@link Common.custom} static constructor for creating simple * custom chain {@link Common} objects (more complete custom chain setups - * can be created via the main constructor and the {@link CommonOpts.customChains} parameter). + * can be created via the main constructor). */ export class Common { readonly DEFAULT_HARDFORK: string | Hardfork @@ -53,30 +46,31 @@ export class Common { protected _chainParams: ChainConfig protected _hardfork: string | Hardfork protected _eips: number[] = [] - protected _customChains: ChainConfig[] + protected _params: ParamsDict public readonly customCrypto: CustomCrypto - protected _paramsCache: ParamsCacheConfig = {} + protected _paramsCache: ParamsConfig = {} protected _activatedEIPsCache: number[] = [] - protected HARDFORK_CHANGES: [HardforkSpecKeys, HardforkSpecValues][] + protected HARDFORK_CHANGES: [string, HardforkConfig][] public events: EventEmitter constructor(opts: CommonOpts) { this.events = new EventEmitter() - this._customChains = opts.customChains ?? [] - this._chainParams = this.setChain(opts.chain) - this.DEFAULT_HARDFORK = this._chainParams.defaultHardfork ?? Hardfork.Shanghai + this._chainParams = opts.chain + this.DEFAULT_HARDFORK = this._chainParams.defaultHardfork ?? Hardfork.Cancun // Assign hardfork changes in the sequence of the applied hardforks this.HARDFORK_CHANGES = this.hardforks().map((hf) => [ - hf.name as HardforkSpecKeys, - HARDFORK_SPECS[hf.name] ?? + hf.name, + hardforksDict[hf.name] ?? (this._chainParams.customHardforks && this._chainParams.customHardforks[hf.name]), ]) this._hardfork = this.DEFAULT_HARDFORK + this._params = { ...(opts.params ?? {}) } // copy + if (opts.hardfork !== undefined) { this.setHardfork(opts.hardfork) } @@ -92,36 +86,52 @@ export class Common { } /** - * Sets the chain - * @param chain String ('mainnet') or Number (1) chain representation. - * Or, a Dictionary of chain parameters for a private network. - * @returns The dictionary with parameters set as chain + * Update the internal Common EIP params set. Existing values + * will get preserved unless there is a new value for a parameter + * provided with params. + * + * Example Format: + * + * ```ts + * { + * 1559: { + * initialBaseFee: 1000000000, + * } + * } + * ``` + * + * @param params */ - setChain(chain: string | number | Chain | bigint | object): ChainConfig { - if (typeof chain === 'number' || typeof chain === 'bigint' || typeof chain === 'string') { - this._chainParams = _getChainParams(chain, this._customChains) - } else if (typeof chain === 'object') { - if (this._customChains.length > 0) { - throw new Error( - 'Chain must be a string, number, or bigint when initialized with customChains passed in' - ) - } - const required = ['networkId', 'genesis', 'hardforks', 'bootstrapNodes'] - for (const param of required) { - if (!(param in chain)) { - throw new Error(`Missing required chain parameter: ${param}`) - } - } - this._chainParams = chain as ChainConfig - } else { - throw new Error('Wrong input format') - } - for (const hf of this.hardforks()) { - if (hf.block === undefined) { - throw new Error(`Hardfork cannot have undefined block number`) + updateParams(params: ParamsDict) { + for (const [eip, paramsConfig] of Object.entries(params)) { + if (!(eip in this._params)) { + this._params[eip] = { ...paramsConfig } // copy + } else { + this._params[eip] = { ...this._params[eip], ...params[eip] } } } - return this._chainParams + + this._buildParamsCache() + } + + /** + * Fully resets the internal Common EIP params set with the values provided. + * + * Example Format: + * + * ```ts + * { + * 1559: { + * initialBaseFee: 1000000000, + * } + * } + * ``` + * + * @param params + */ + resetParams(params: ParamsDict) { + this._params = { ...params } // copy + this._buildParamsCache() } /** @@ -147,42 +157,26 @@ export class Common { } /** - * Returns the hardfork either based on block numer (older HFs) or + * Returns the hardfork either based on block number (older HFs) or * timestamp (Shanghai upwards). * - * An optional TD takes precedence in case the corresponding HF block - * is set to `null` or otherwise needs to match (if not an error - * will be thrown). - * - * @param Opts Block number, timestamp or TD (all optional) + * @param Opts Block number or timestamp * @returns The name of the HF */ getHardforkBy(opts: HardforkByOpts): string { const blockNumber: bigint | undefined = toType(opts.blockNumber, TypeOutput.BigInt) - const td: bigint | undefined = toType(opts.td, TypeOutput.BigInt) const timestamp: bigint | undefined = toType(opts.timestamp, TypeOutput.BigInt) - // Filter out hardforks with no block number, no ttd or no timestamp (i.e. unapplied hardforks) - const hfs = this.hardforks().filter( - (hf) => - hf.block !== null || (hf.ttd !== null && hf.ttd !== undefined) || hf.timestamp !== undefined - ) - const mergeIndex = hfs.findIndex((hf) => hf.ttd !== null && hf.ttd !== undefined) - const doubleTTDHF = hfs - .slice(mergeIndex + 1) - .findIndex((hf) => hf.ttd !== null && hf.ttd !== undefined) - if (doubleTTDHF >= 0) { - throw Error(`More than one merge hardforks found with ttd specified`) - } + // Filter out hardforks with no block number, no timestamp (i.e. unapplied hardforks) + const hfs = this.hardforks().filter((hf) => hf.block !== null || hf.timestamp !== undefined) // Find the first hardfork that has a block number greater than `blockNumber` - // (skips the merge hardfork since it cannot have a block number specified). // If timestamp is not provided, it also skips timestamps hardforks to continue // discovering/checking number hardforks. let hfIndex = hfs.findIndex( (hf) => (blockNumber !== undefined && hf.block !== null && BigInt(hf.block) > blockNumber) || - (timestamp !== undefined && hf.timestamp !== undefined && BigInt(hf.timestamp) > timestamp) + (timestamp !== undefined && hf.timestamp !== undefined && BigInt(hf.timestamp) > timestamp), ) if (hfIndex === -1) { @@ -194,35 +188,17 @@ export class Common { throw Error('Must have at least one hardfork at block 0') } - // If timestamp is not provided, we need to rollback to the last hf with block or ttd + // If timestamp is not provided, we need to rollback to the last hf with block if (timestamp === undefined) { const stepBack = hfs .slice(0, hfIndex) .reverse() - .findIndex((hf) => hf.block !== null || hf.ttd !== undefined) + .findIndex((hf) => hf.block !== null) hfIndex = hfIndex - stepBack } // Move hfIndex one back to arrive at candidate hardfork hfIndex = hfIndex - 1 - // If the timestamp was not provided, we could have skipped timestamp hardforks to look for number - // hardforks. so it will now be needed to rollback - if (hfs[hfIndex].block === null && hfs[hfIndex].timestamp === undefined) { - // We're on the merge hardfork. Let's check the TTD - if (td === undefined || td === null || BigInt(hfs[hfIndex].ttd!) > td) { - // Merge ttd greater than current td so we're on hardfork before merge - hfIndex -= 1 - } - } else { - if (mergeIndex >= 0 && td !== undefined && td !== null) { - if (hfIndex >= mergeIndex && BigInt(hfs[mergeIndex].ttd!) > td) { - throw Error('Maximum HF determined by total difficulty is lower than the block number HF') - } else if (hfIndex < mergeIndex && BigInt(hfs[mergeIndex].ttd!) < td) { - throw Error('HF determined by block number is lower than the minimum total difficulty HF') - } - } - } - const hfStartIndex = hfIndex // Move the hfIndex to the end of the hardforks that might be scheduled on the same block/timestamp // This won't anyway be the case with Merge hfs @@ -241,10 +217,10 @@ export class Common { .slice(0, hfStartIndex) .reduce( (acc: number, hf: HardforkTransitionConfig) => Math.max(Number(hf.timestamp ?? '0'), acc), - 0 + 0, ) if (minTimeStamp > timestamp) { - throw Error(`Maximum HF determined by timestamp is lower than the block number/ttd HF`) + throw Error(`Maximum HF determined by timestamp is lower than the block number HF`) } const maxTimeStamp = hfs @@ -252,10 +228,11 @@ export class Common { .reduce( (acc: number, hf: HardforkTransitionConfig) => Math.min(Number(hf.timestamp ?? timestamp), acc), - Number(timestamp) + Number(timestamp), ) + if (maxTimeStamp < timestamp) { - throw Error(`Maximum HF determined by block number/ttd is lower than timestamp HF`) + throw Error(`Maximum HF determined by block number is lower than timestamp HF`) } } const hardfork = hfs[hfIndex] @@ -263,14 +240,10 @@ export class Common { } /** - * Sets a new hardfork either based on block numer (older HFs) or + * Sets a new hardfork either based on block number (older HFs) or * timestamp (Shanghai upwards). * - * An optional TD takes precedence in case the corresponding HF block - * is set to `null` or otherwise needs to match (if not an error - * will be thrown). - * - * @param Opts Block number, timestamp or TD (all optional) + * @param Opts Block number or timestamp * @returns The name of the HF set */ setHardforkBy(opts: HardforkByOpts): string { @@ -298,13 +271,13 @@ export class Common { */ setEIPs(eips: number[] = []) { for (const eip of eips) { - if (!(eip in EIPs)) { + if (!(eip in eipsDict)) { throw new Error(`${eip} not supported`) } - const minHF = this.gteHardfork((EIPs as any)[eip]['minimumHardfork']) + const minHF = this.gteHardfork(eipsDict[eip]['minimumHardfork']) if (!minHF) { throw new Error( - `${eip} cannot be activated on hardfork ${this.hardfork()}, minimumHardfork: ${minHF}` + `${eip} cannot be activated on hardfork ${this.hardfork()}, minimumHardfork: ${minHF}`, ) } } @@ -313,8 +286,8 @@ export class Common { this._buildActivatedEIPsCache() for (const eip of eips) { - if ((EIPs as any)[eip].requiredEIPs !== undefined) { - for (const elem of (EIPs as any)[eip].requiredEIPs) { + if (eipsDict[eip].requiredEIPs !== undefined) { + for (const elem of eipsDict[eip].requiredEIPs!) { if (!(eips.includes(elem) || this.isActivatedEIP(elem))) { throw new Error(`${eip} requires EIP ${elem}, but is not included in the EIP list`) } @@ -326,26 +299,10 @@ export class Common { /** * Internal helper for _buildParamsCache() */ - protected _mergeWithParamsCache(params: HardforkConfig | EIPConfig) { - this._paramsCache['gasConfig'] = { - ...this._paramsCache['gasConfig'], - ...params['gasConfig'], - } - this._paramsCache['gasPrices'] = { - ...this._paramsCache['gasPrices'], - ...params['gasPrices'], - } - this._paramsCache['pow'] = { - ...this._paramsCache['pow'], - ...params['pow'], - } - this._paramsCache['sharding'] = { - ...this._paramsCache['sharding'], - ...params['sharding'], - } - this._paramsCache['vm'] = { - ...this._paramsCache['vm'], - ...params['vm'], + protected _mergeWithParamsCache(params: ParamsConfig) { + this._paramsCache = { + ...this._paramsCache, + ...params, } } @@ -361,43 +318,24 @@ export class Common { if ('eips' in hfChanges[1]) { const hfEIPs = hfChanges[1]['eips'] for (const eip of hfEIPs!) { - if (!(eip in EIPs)) { - throw new Error(`${eip} not supported`) - } - - this._mergeWithParamsCache(EIPs[eip]) + this._mergeWithParamsCache(this._params[eip] ?? {}) } - // Parameter-inlining HF config (e.g. for istanbul) - } else { - this._mergeWithParamsCache(hfChanges[1]) - } - if ( - hfChanges[1].vm || - hfChanges[1].gasConfig || - hfChanges[1].gasPrices || - hfChanges[1].pow || - hfChanges[1].sharding - ) { - this._mergeWithParamsCache(hfChanges[1]) } + // Parameter-inlining HF config (e.g. for istanbul) + this._mergeWithParamsCache(hfChanges[1].params ?? {}) if (hfChanges[0] === hardfork) break } // Iterate through all additionally activated EIPs for (const eip of this._eips) { - if (!(eip in EIPs)) { - throw new Error(`${eip} not supported`) - } - - this._mergeWithParamsCache(EIPs[eip]) + this._mergeWithParamsCache(this._params[eip] ?? {}) } } protected _buildActivatedEIPsCache() { this._activatedEIPsCache = [] - for (const hfChanges of this.HARDFORK_CHANGES) { - const hf = hfChanges[1] - if (this.gteHardfork(hf['name']) && 'eips' in hf) { + for (const [name, hf] of this.HARDFORK_CHANGES) { + if (this.gteHardfork(name) && 'eips' in hf) { this._activatedEIPsCache = this._activatedEIPsCache.concat(hf['eips'] as number[]) } } @@ -411,95 +349,82 @@ export class Common { * Otherwise the parameter is taken from the latest applied HF with * a change on the respective parameter. * - * @param topic Parameter topic ('gasConfig', 'gasPrices', 'vm', 'pow') - * @param name Parameter name (e.g. 'minGasLimit' for 'gasConfig' topic) - * @returns The value requested or `BigInt(0)` if not found + * @param name Parameter name (e.g. 'minGasLimit') + * @returns The value requested (throws if not found) */ - param(topic: string, name: string): bigint { + param(name: string): bigint { // TODO: consider the case that different active EIPs // can change the same parameter - let value = null - if ( - (this._paramsCache as any)[topic] !== undefined && - (this._paramsCache as any)[topic][name] !== undefined - ) { - value = (this._paramsCache as any)[topic][name] + if (!(name in this._paramsCache)) { + throw new Error(`Missing parameter value for ${name}`) } + const value = this._paramsCache[name] return BigInt(value ?? 0) } /** * Returns the parameter corresponding to a hardfork - * @param topic Parameter topic ('gasConfig', 'gasPrices', 'vm', 'pow') - * @param name Parameter name (e.g. 'minGasLimit' for 'gasConfig' topic) + * @param name Parameter name (e.g. 'minGasLimit') * @param hardfork Hardfork name - * @returns The value requested or `BigInt(0)` if not found + * @returns The value requested (throws if not found) */ - paramByHardfork(topic: string, name: string, hardfork: string | Hardfork): bigint { - let value: bigint | null = null + paramByHardfork(name: string, hardfork: string | Hardfork): bigint { + let value for (const hfChanges of this.HARDFORK_CHANGES) { // EIP-referencing HF config (e.g. for berlin) if ('eips' in hfChanges[1]) { const hfEIPs = hfChanges[1]['eips'] for (const eip of hfEIPs!) { - const valueEIP = this.paramByEIP(topic, name, eip) - value = typeof valueEIP === 'bigint' ? valueEIP : value + const eipParams = this._params[eip] + const eipValue = eipParams?.[name] + if (eipValue !== undefined) { + value = eipValue + } } // Parameter-inlining HF config (e.g. for istanbul) } else { - if ( - (hfChanges[1] as any)[topic] !== undefined && - (hfChanges[1] as any)[topic][name] !== undefined - ) { - value = (hfChanges[1] as any)[topic][name] + const hfValue = hfChanges[1].params?.[name] + if (hfValue !== undefined) { + value = hfValue } } if (hfChanges[0] === hardfork) break } + if (value === undefined) { + throw new Error(`Missing parameter value for ${name}`) + } return BigInt(value ?? 0) } /** * Returns a parameter corresponding to an EIP - * @param topic Parameter topic ('gasConfig', 'gasPrices', 'vm', 'pow') * @param name Parameter name (e.g. 'minGasLimit' for 'gasConfig' topic) * @param eip Number of the EIP - * @returns The value requested or `undefined` if not found + * @returns The value requested (throws if not found) */ - paramByEIP(topic: string, name: string, eip: number): bigint | undefined { - if (!(eip in EIPs)) { + paramByEIP(name: string, eip: number): bigint | undefined { + if (!(eip in eipsDict)) { throw new Error(`${eip} not supported`) } - const eipParams = (EIPs as any)[eip] - if (!(topic in eipParams)) { - return undefined + const eipParams = this._params[eip] + if (eipParams?.[name] === undefined) { + throw new Error(`Missing parameter value for ${name}`) } - if (eipParams[topic][name] === undefined) { - return undefined - } - const value = eipParams[topic][name] - return BigInt(value) + const value = eipParams![name] + return BigInt(value ?? 0) } /** * Returns a parameter for the hardfork active on block number or * optional provided total difficulty (Merge HF) - * @param topic Parameter topic * @param name Parameter name * @param blockNumber Block number - * @param td Total difficulty * * @returns The value requested or `BigInt(0)` if not found */ - paramByBlock( - topic: string, - name: string, - blockNumber: BigIntLike, - td?: BigIntLike, - timestamp?: BigIntLike - ): bigint { - const hardfork = this.getHardforkBy({ blockNumber, td, timestamp }) - return this.paramByHardfork(topic, name, hardfork) + paramByBlock(name: string, blockNumber: BigIntLike, timestamp?: BigIntLike): bigint { + const hardfork = this.getHardforkBy({ blockNumber, timestamp }) + return this.paramByHardfork(name, hardfork) } /** @@ -618,7 +543,7 @@ export class Common { /** * Returns the scheduled timestamp of the EIP (if scheduled and scheduled by timestamp) * @param eip EIP number - * @returns Scheduled timestamp. If this EIP is unscheduled, or the EIP is scheduled by block number or ttd, then it returns `null`. + * @returns Scheduled timestamp. If this EIP is unscheduled, or the EIP is scheduled by block number, then it returns `null`. */ eipTimestamp(eip: number): bigint | null { for (const hfChanges of this.HARDFORK_CHANGES) { @@ -633,20 +558,6 @@ export class Common { return null } - /** - * Returns the hardfork change total difficulty (Merge HF) for hardfork provided or set - * @param hardfork Hardfork name, optional if HF set - * @returns Total difficulty or null if no set - */ - hardforkTTD(hardfork?: string | Hardfork): bigint | null { - hardfork = hardfork ?? this._hardfork - const ttd = this._getHardfork(hardfork)?.['ttd'] - if (ttd === undefined || ttd === null) { - return null - } - return BigInt(ttd) - } - /** * Returns the change block for the next hardfork after the hardfork provided or set * @param hardfork Hardfork name, optional if HF set @@ -739,15 +650,12 @@ export class Common { /** * Returns an eth/64 compliant fork hash (EIP-2124) * @param hardfork Hardfork name, optional if HF set - * @param genesisHash Genesis block hash of the chain, optional if already defined and not needed to be calculated + * @param genesisHash Genesis block hash of the network, optional if already defined and not needed to be calculated */ forkHash(hardfork?: string | Hardfork, genesisHash?: Uint8Array): PrefixedHexString { hardfork = hardfork ?? this._hardfork const data = this._getHardfork(hardfork) - if ( - data === null || - (data?.block === null && data?.timestamp === undefined && data?.ttd === undefined) - ) { + if (data === null || (data?.block === null && data?.timestamp === undefined)) { const msg = 'No fork hash calculation possible for future hardfork' throw new Error(msg) } @@ -780,7 +688,8 @@ export class Common { const blockOrTime = hf.timestamp ?? hf.block if ( (hf.forkHash === null || hf.forkHash === undefined) && - ((blockOrTime !== null && blockOrTime !== undefined) || typeof hf.ttd !== 'undefined') + blockOrTime !== null && + blockOrTime !== undefined ) { hf.forkHash = this.forkHash(hf.name, genesisHash) } @@ -847,14 +756,6 @@ export class Common { return this._chainParams.name } - /** - * Returns the Id of current network - * @returns network Id - */ - networkId(): bigint { - return BigInt(this._chainParams.networkId) - } - /** * Returns the additionally activated EIPs * (by using the `eips` constructor option) diff --git a/packages/common/src/constructors.ts b/packages/common/src/constructors.ts index 4fbb87208d..cf6e51e70c 100644 --- a/packages/common/src/constructors.ts +++ b/packages/common/src/constructors.ts @@ -1,6 +1,6 @@ -import { Common, CustomChain, Hardfork, _getChainParams, parseGethGenesis } from './index.js' +import { Common, parseGethGenesis } from './index.js' -import type { ChainConfig, CustomCommonOpts, GethConfigOpts } from './index.js' +import type { BaseOpts, ChainConfig, GethConfigOpts } from './index.js' /** * Creates a {@link Common} object for a custom chain, based on a standard one. @@ -9,122 +9,44 @@ import type { ChainConfig, CustomCommonOpts, GethConfigOpts } from './index.js' * in a provided {@link chainParamsOrName} dictionary. Some usage example: * * ```javascript - * createCustomCommon({chainId: 123}) - * ``` + * import { createCustomCommon, Mainnet } from '@ethereumjs/common' * - * There are also selected supported custom chains which can be initialized by using one of the - * {@link CustomChains} for {@link chainParamsOrName}, e.g.: + * createCustomCommon({chainId: 123}, Mainnet) + * `` * - * ```javascript - * createCustomCommon(CustomChains.MaticMumbai) - * ``` - * - * Note that these supported custom chains only provide some base parameters (usually the chain and - * network ID and a name) and can only be used for selected use cases (e.g. sending a tx with - * the `@ethereumjs/tx` library to a Layer-2 chain). - * - * @param chainParamsOrName Custom parameter dict (`name` will default to `custom-chain`) or string with name of a supported custom chain - * @param opts Custom chain options to set the {@link CustomCommonOpts.baseChain}, selected {@link CustomCommonOpts.hardfork} and others + * @param partialConfig Custom parameter dict + * @param baseChain `ChainConfig` chain configuration taken as a base chain, e.g. `Mainnet` (exported at root level) + * @param opts Custom chain options to set various {@link BaseOpts} */ export function createCustomCommon( - chainParamsOrName: Partial | CustomChain, - opts: CustomCommonOpts = {} + partialConfig: Partial, + baseChain: ChainConfig, + opts: BaseOpts = {}, ): Common { - const baseChain = opts.baseChain ?? 'mainnet' - const standardChainParams = { ..._getChainParams(baseChain) } - standardChainParams['name'] = 'custom-chain' - - if (typeof chainParamsOrName !== 'string') { - return new Common({ - chain: { - ...standardChainParams, - ...chainParamsOrName, - }, - ...opts, - }) - } else { - if (chainParamsOrName === CustomChain.PolygonMainnet) { - return createCustomCommon( - { - name: CustomChain.PolygonMainnet, - chainId: 137, - networkId: 137, - }, - opts - ) - } - if (chainParamsOrName === CustomChain.PolygonMumbai) { - return createCustomCommon( - { - name: CustomChain.PolygonMumbai, - chainId: 80001, - networkId: 80001, - }, - opts - ) - } - if (chainParamsOrName === CustomChain.ArbitrumOne) { - return createCustomCommon( - { - name: CustomChain.ArbitrumOne, - chainId: 42161, - networkId: 42161, - }, - opts - ) - } - if (chainParamsOrName === CustomChain.xDaiChain) { - return createCustomCommon( - { - name: CustomChain.xDaiChain, - chainId: 100, - networkId: 100, - }, - opts - ) - } - - if (chainParamsOrName === CustomChain.OptimisticKovan) { - return createCustomCommon( - { - name: CustomChain.OptimisticKovan, - chainId: 69, - networkId: 69, - }, - opts - ) - } - - if (chainParamsOrName === CustomChain.OptimisticEthereum) { - return createCustomCommon( - { - name: CustomChain.OptimisticEthereum, - chainId: 10, - networkId: 10, - }, - // Optimism has not implemented the London hardfork yet (targeting Q1.22) - { hardfork: Hardfork.Berlin, ...opts } - ) - } - throw new Error(`Custom chain ${chainParamsOrName} not supported`) - } + return new Common({ + chain: { + ...baseChain, + ...partialConfig, + }, + ...opts, + }) } /** * Static method to load and set common from a geth genesis json * @param genesisJson json of geth configuration - * @param { chain, eips, genesisHash, hardfork, mergeForkIdPostMerge } to further configure the common instance + * @param opts additional {@link GethConfigOpts} for configuring common * @returns Common */ export function createCommonFromGethGenesis( genesisJson: any, - { chain, eips, genesisHash, hardfork, mergeForkIdPostMerge, customCrypto }: GethConfigOpts + { chain, eips, genesisHash, hardfork, params, customCrypto }: GethConfigOpts, ): Common { - const genesisParams = parseGethGenesis(genesisJson, chain, mergeForkIdPostMerge) + const genesisParams = parseGethGenesis(genesisJson, chain) const common = new Common({ - chain: genesisParams.name ?? 'custom', - customChains: [genesisParams], + chain: genesisParams, eips, + params, hardfork: hardfork ?? genesisParams.hardfork, customCrypto, }) diff --git a/packages/common/src/eips.ts b/packages/common/src/eips.ts index b6016ba84b..091fae9ce4 100644 --- a/packages/common/src/eips.ts +++ b/packages/common/src/eips.ts @@ -1,421 +1,454 @@ import { Hardfork } from './enums.js' -import type { EIPConfig } from './types.js' +import type { EIPsDict } from './types.js' -type EIPsDict = { - [key: string]: EIPConfig -} - -enum Status { - Stagnant = 'stagnant', - Draft = 'draft', - Review = 'review', - Final = 'final', -} - -export const EIPs: EIPsDict = { +export const eipsDict: EIPsDict = { + /** + * Frontier/Chainstart + * (there is no Meta-EIP currently for Frontier, so 1 was chosen) + */ + 1: { + minimumHardfork: Hardfork.Chainstart, + }, + /** + * Homestead HF Meta EIP + */ + 606: { + minimumHardfork: Hardfork.Chainstart, + }, + /** + * TangerineWhistle HF Meta EIP + */ + 608: { + minimumHardfork: Hardfork.Homestead, + }, + /** + * Spurious Dragon HF Meta EIP + */ + 607: { + minimumHardfork: Hardfork.TangerineWhistle, + }, + /** + * Byzantium HF Meta EIP + */ + 609: { + minimumHardfork: Hardfork.SpuriousDragon, + }, + /** + * Constantinople HF Meta EIP + */ + 1013: { + minimumHardfork: Hardfork.Constantinople, + }, + /** + * Petersburg HF Meta EIP + */ + 1716: { + minimumHardfork: Hardfork.Constantinople, + }, + /** + * Istanbul HF Meta EIP + */ + 1679: { + minimumHardfork: Hardfork.Constantinople, + }, + /** + * MuirGlacier HF Meta EIP + */ + 2384: { + minimumHardfork: Hardfork.Istanbul, + }, + /** + * Description : SWAPN, DUPN and EXCHANGE instructions + * URL : https://github.com/ethereum/EIPs/blob/bd421962b4e241aa2b00a85d9cf4e57770bdb954/EIPS/eip-663.md + * Status : Review + */ + 663: { + minimumHardfork: Hardfork.Chainstart, + requiredEIPs: [3540, 5450], + }, + /** + * Description : Transient storage opcodes + * URL : https://eips.ethereum.org/EIPS/eip-1153 + * Status : Final + */ 1153: { - comment: 'Transient storage opcodes', - url: 'https://eips.ethereum.org/EIPS/eip-1153', - status: Status.Review, minimumHardfork: Hardfork.Chainstart, - requiredEIPs: [], - gasPrices: { - tstore: 100, // Base fee of the TSTORE opcode - tload: 100, // Base fee of the TLOAD opcode - }, }, + /** + * Description : Fee market change for ETH 1.0 chain + * URL : https://eips.ethereum.org/EIPS/eip-1559 + * Status : Final + */ 1559: { - comment: 'Fee market change for ETH 1.0 chain', - url: 'https://eips.ethereum.org/EIPS/eip-1559', - status: Status.Final, minimumHardfork: Hardfork.Berlin, requiredEIPs: [2930], - gasConfig: { - baseFeeMaxChangeDenominator: 8, // Maximum base fee change denominator - elasticityMultiplier: 2, // Maximum block gas target elasticity - initialBaseFee: 1000000000, // Initial base fee on first EIP1559 block - }, }, + /** + * Description : ModExp gas cost + * URL : https://eips.ethereum.org/EIPS/eip-2565 + * Status : Final + */ 2565: { - comment: 'ModExp gas cost', - url: 'https://eips.ethereum.org/EIPS/eip-2565', - status: Status.Final, minimumHardfork: Hardfork.Byzantium, - requiredEIPs: [], - gasPrices: { - modexpGquaddivisor: 3, // Gquaddivisor from modexp precompile for gas calculation - }, }, + /** + * Description : BLS12-381 precompiles + * URL : https://eips.ethereum.org/EIPS/eip-2537 + * Status : Review + */ 2537: { - comment: 'BLS12-381 precompiles', - url: 'https://eips.ethereum.org/EIPS/eip-2537', - status: 'Draft', minimumHardfork: Hardfork.Chainstart, - requiredEIPs: [], - gasConfig: {}, - gasPrices: { - Bls12381G1AddGas: 500, // Gas cost of a single BLS12-381 G1 addition precompile-call - Bls12381G1MulGas: 12000, // Gas cost of a single BLS12-381 G1 multiplication precompile-call - Bls12381G2AddGas: 800, // Gas cost of a single BLS12-381 G2 addition precompile-call - Bls12381G2MulGas: 45000, // Gas cost of a single BLS12-381 G2 multiplication precompile-call - Bls12381PairingBaseGas: 65000, // Base gas cost of BLS12-381 pairing check - Bls12381PairingPerPairGas: 43000, // Per-pair gas cost of BLS12-381 pairing check - Bls12381MapG1Gas: 5500, // Gas cost of BLS12-381 map field element to G1 - Bls12381MapG2Gas: 75000, // Gas cost of BLS12-381 map field element to G2 - }, - vm: {}, - pow: {}, }, + /** + * Description : Typed Transaction Envelope + * URL : https://eips.ethereum.org/EIPS/eip-2718 + * Status : Final + */ 2718: { - comment: 'Typed Transaction Envelope', - url: 'https://eips.ethereum.org/EIPS/eip-2718', - status: Status.Final, minimumHardfork: Hardfork.Chainstart, - requiredEIPs: [], }, + /** + * Description : Gas cost increases for state access opcodes + * URL : https://eips.ethereum.org/EIPS/eip-2929 + * Status : Final + */ 2929: { - comment: 'Gas cost increases for state access opcodes', - url: 'https://eips.ethereum.org/EIPS/eip-2929', - status: Status.Final, minimumHardfork: Hardfork.Chainstart, - requiredEIPs: [], - gasPrices: { - coldsload: 2100, // Gas cost of the first read of storage from a given location (per transaction) - coldaccountaccess: 2600, // Gas cost of the first read of a given address (per transaction) - warmstorageread: 100, // Gas cost of reading storage locations which have already loaded 'cold' - sstoreCleanGasEIP2200: 2900, // Once per SSTORE operation from clean non-zero to something else - sstoreNoopGasEIP2200: 100, // Once per SSTORE operation if the value doesn't change - sstoreDirtyGasEIP2200: 100, // Once per SSTORE operation if a dirty value is changed - sstoreInitRefundEIP2200: 19900, // Once per SSTORE operation for resetting to the original zero value - sstoreCleanRefundEIP2200: 4900, // Once per SSTORE operation for resetting to the original non-zero value - call: 0, // Base fee of the CALL opcode - callcode: 0, // Base fee of the CALLCODE opcode - delegatecall: 0, // Base fee of the DELEGATECALL opcode - staticcall: 0, // Base fee of the STATICCALL opcode - balance: 0, // Base fee of the BALANCE opcode - extcodesize: 0, // Base fee of the EXTCODESIZE opcode - extcodecopy: 0, // Base fee of the EXTCODECOPY opcode - extcodehash: 0, // Base fee of the EXTCODEHASH opcode - sload: 0, // Base fee of the SLOAD opcode - sstore: 0, // Base fee of the SSTORE opcode - }, }, + /** + * Description : Optional access lists + * URL : https://eips.ethereum.org/EIPS/eip-2930 + * Status : Final + */ 2930: { - comment: 'Optional access lists', - url: 'https://eips.ethereum.org/EIPS/eip-2930', - status: Status.Final, minimumHardfork: Hardfork.Istanbul, requiredEIPs: [2718, 2929], - gasPrices: { - accessListStorageKeyCost: 1900, // Gas cost per storage key in an Access List transaction - accessListAddressCost: 2400, // Gas cost per storage key in an Access List transaction - }, }, + /** + * Description : Save historical block hashes in state (Verkle related usage, UNSTABLE) + * URL : https://github.com/gballet/EIPs/pull/3/commits/2e9ac09a142b0d9fb4db0b8d4609f92e5d9990c5 + * Status : Draft + */ 2935: { - comment: 'Save historical block hashes in state (Verkle related usage, UNSTABLE)', - url: 'https://github.com/gballet/EIPs/pull/3/commits/2e9ac09a142b0d9fb4db0b8d4609f92e5d9990c5', - status: Status.Draft, minimumHardfork: Hardfork.Chainstart, - requiredEIPs: [], - vm: { - historyStorageAddress: BigInt('0x0aae40965e6800cd9b1f4b05ff21581047e3f91e'), // The address where the historical blockhashes are stored - historyServeWindow: BigInt(8192), // The amount of blocks to be served by the historical blockhash contract - }, - }, - 3074: { - comment: 'AUTH and AUTHCALL opcodes', - url: 'https://github.com/ethereum/EIPs/commit/eca4416ff3c025fcb6ec8cd4eac481e74e108481', - status: Status.Review, - minimumHardfork: Hardfork.London, - requiredEIPs: [], - gasPrices: { - auth: 3100, // Gas cost of the AUTH opcode - authcall: 0, // Gas cost of the AUTHCALL opcode - authcallValueTransfer: 6700, // Paid for CALL when the value transfer is non-zero - }, }, + /** + * Description : BASEFEE opcode + * URL : https://eips.ethereum.org/EIPS/eip-3198 + * Status : Final + */ 3198: { - comment: 'BASEFEE opcode', - url: 'https://eips.ethereum.org/EIPS/eip-3198', - status: Status.Final, minimumHardfork: Hardfork.London, - requiredEIPs: [], - gasPrices: { - basefee: 2, // Gas cost of the BASEFEE opcode - }, }, + /** + * Description : Reduction in refunds + * URL : https://eips.ethereum.org/EIPS/eip-3529 + * Status : Final + */ 3529: { - comment: 'Reduction in refunds', - url: 'https://eips.ethereum.org/EIPS/eip-3529', - status: Status.Final, minimumHardfork: Hardfork.Berlin, requiredEIPs: [2929], - gasConfig: { - maxRefundQuotient: 5, // Maximum refund quotient; max tx refund is min(tx.gasUsed/maxRefundQuotient, tx.gasRefund) - }, - gasPrices: { - selfdestructRefund: 0, // Refunded following a selfdestruct operation - sstoreClearRefundEIP2200: 4800, // Once per SSTORE operation for clearing an originally existing storage slot - }, }, + /** + * Description : EVM Object Format (EOF) v1 + * URL : https://github.com/ethereum/EIPs/blob/bd421962b4e241aa2b00a85d9cf4e57770bdb954/EIPS/eip-3540.md + * Status : Review + */ 3540: { - comment: 'EVM Object Format (EOF) v1', - url: 'https://eips.ethereum.org/EIPS/eip-3540', - status: Status.Review, minimumHardfork: Hardfork.London, - requiredEIPs: [3541], + requiredEIPs: [3541, 3860], }, + /** + * Description : Reject new contracts starting with the 0xEF byte + * URL : https://eips.ethereum.org/EIPS/eip-3541 + * Status : Final + */ 3541: { - comment: 'Reject new contracts starting with the 0xEF byte', - url: 'https://eips.ethereum.org/EIPS/eip-3541', - status: Status.Final, minimumHardfork: Hardfork.Berlin, - requiredEIPs: [], }, + /** + * Description : Difficulty Bomb Delay to December 1st 2021 + * URL : https://eips.ethereum.org/EIPS/eip-3554 + * Status : Final + */ 3554: { - comment: 'Difficulty Bomb Delay to December 1st 2021', - url: 'https://eips.ethereum.org/EIPS/eip-3554', - status: Status.Final, minimumHardfork: Hardfork.MuirGlacier, - requiredEIPs: [], - pow: { - difficultyBombDelay: 9500000, // the amount of blocks to delay the difficulty bomb with - }, }, + /** + * Description : Reject transactions from senders with deployed code + * URL : https://eips.ethereum.org/EIPS/eip-3607 + * Status : Final + */ 3607: { - comment: 'Reject transactions from senders with deployed code', - url: 'https://eips.ethereum.org/EIPS/eip-3607', - status: Status.Final, minimumHardfork: Hardfork.Chainstart, - requiredEIPs: [], }, + /** + * Description : Warm COINBASE + * URL : https://eips.ethereum.org/EIPS/eip-3651 + * Status : Final + */ 3651: { - comment: 'Warm COINBASE', - url: 'https://eips.ethereum.org/EIPS/eip-3651', - status: Status.Review, minimumHardfork: Hardfork.London, requiredEIPs: [2929], }, + /** + * Description : EOF - Code Validation + * URL : https://github.com/ethereum/EIPs/blob/bd421962b4e241aa2b00a85d9cf4e57770bdb954/EIPS/eip-3670.md + * Status : Review + */ 3670: { - comment: 'EOF - Code Validation', - url: 'https://eips.ethereum.org/EIPS/eip-3670', - status: 'Review', minimumHardfork: Hardfork.London, requiredEIPs: [3540], - gasConfig: {}, - gasPrices: {}, - vm: {}, - pow: {}, }, + /** + * Description : Upgrade consensus to Proof-of-Stake + * URL : https://eips.ethereum.org/EIPS/eip-3675 + * Status : Final + */ 3675: { - comment: 'Upgrade consensus to Proof-of-Stake', - url: 'https://eips.ethereum.org/EIPS/eip-3675', - status: Status.Final, minimumHardfork: Hardfork.London, - requiredEIPs: [], }, + /** + * Description : PUSH0 instruction + * URL : https://eips.ethereum.org/EIPS/eip-3855 + * Status : Final + */ 3855: { - comment: 'PUSH0 instruction', - url: 'https://eips.ethereum.org/EIPS/eip-3855', - status: Status.Review, minimumHardfork: Hardfork.Chainstart, - requiredEIPs: [], - gasPrices: { - push0: 2, // Base fee of the PUSH0 opcode - }, }, + /** + * Description : Limit and meter initcode + * URL : https://eips.ethereum.org/EIPS/eip-3860 + * Status : Final + */ 3860: { - comment: 'Limit and meter initcode', - url: 'https://eips.ethereum.org/EIPS/eip-3860', - status: Status.Review, minimumHardfork: Hardfork.SpuriousDragon, - requiredEIPs: [], - gasPrices: { - initCodeWordCost: 2, // Gas to pay for each word (32 bytes) of initcode when creating a contract - }, - vm: { - maxInitCodeSize: 49152, // Maximum length of initialization code when creating a contract - }, }, + /** + * Description : EOF - Static relative jumps + * URL : https://github.com/ethereum/EIPs/blob/bd421962b4e241aa2b00a85d9cf4e57770bdb954/EIPS/eip-4200.md + * Status : Review + */ + 4200: { + minimumHardfork: Hardfork.London, + requiredEIPs: [3540, 3670], + }, + /** + * Description : Difficulty Bomb Delay to June 2022 + * URL : https://eips.ethereum.org/EIPS/eip-4345 + * Status : Final + */ 4345: { - comment: 'Difficulty Bomb Delay to June 2022', - url: 'https://eips.ethereum.org/EIPS/eip-4345', - status: Status.Final, minimumHardfork: Hardfork.London, - requiredEIPs: [], - pow: { - difficultyBombDelay: 10700000, // the amount of blocks to delay the difficulty bomb with - }, }, + /** + * Description : Supplant DIFFICULTY opcode with PREVRANDAO + * URL : https://eips.ethereum.org/EIPS/eip-4399 + * Status : Final + */ 4399: { - comment: 'Supplant DIFFICULTY opcode with PREVRANDAO', - url: 'https://eips.ethereum.org/EIPS/eip-4399', - status: Status.Review, minimumHardfork: Hardfork.London, - requiredEIPs: [], - gasPrices: { - prevrandao: 2, // Base fee of the PREVRANDAO opcode (previously DIFFICULTY) - }, }, + /** + * Description : EOF - Functions + * URL : https://github.com/ethereum/EIPs/blob/bd421962b4e241aa2b00a85d9cf4e57770bdb954/EIPS/eip-4750.md + * Status : Review + */ + 4750: { + minimumHardfork: Hardfork.London, + requiredEIPs: [3540, 3670, 5450], + }, + /** + * Description : Beacon block root in the EVM + * URL : https://eips.ethereum.org/EIPS/eip-4788 + * Status : Final + */ 4788: { - comment: 'Beacon block root in the EVM', - url: 'https://eips.ethereum.org/EIPS/eip-4788', - status: Status.Draft, minimumHardfork: Hardfork.Cancun, - requiredEIPs: [], - gasPrices: {}, - vm: { - historicalRootsLength: 8191, // The modulo parameter of the beaconroot ring buffer in the beaconroot statefull precompile - }, }, + /** + * Description : Shard Blob Transactions + * URL : https://eips.ethereum.org/EIPS/eip-4844 + * Status : Final + */ 4844: { - comment: 'Shard Blob Transactions', - url: 'https://eips.ethereum.org/EIPS/eip-4844', - status: Status.Draft, minimumHardfork: Hardfork.Paris, requiredEIPs: [1559, 2718, 2930, 4895], - gasConfig: { - blobGasPerBlob: 131072, // The base fee for blob gas per blob - targetBlobGasPerBlock: 393216, // The target blob gas consumed per block - maxblobGasPerBlock: 786432, // The max blob gas allowable per block - blobGasPriceUpdateFraction: 3338477, // The denominator used in the exponential when calculating a blob gas price - }, - gasPrices: { - simpleGasPerBlob: 12000, // The basic gas fee for each blob - minBlobGasPrice: 1, // The minimum fee per blob gas - kzgPointEvaluationGasPrecompilePrice: 50000, // The fee associated with the point evaluation precompile - blobhash: 3, // Base fee of the BLOBHASH opcode - }, - sharding: { - blobCommitmentVersionKzg: 1, // The number indicated a versioned hash is a KZG commitment - fieldElementsPerBlob: 4096, // The number of field elements allowed per blob - }, }, + /** + * Description : Beacon chain push withdrawals as operations + * URL : https://eips.ethereum.org/EIPS/eip-4895 + * Status : Final + */ 4895: { - comment: 'Beacon chain push withdrawals as operations', - url: 'https://eips.ethereum.org/EIPS/eip-4895', - status: Status.Review, minimumHardfork: Hardfork.Paris, - requiredEIPs: [], }, + /** + * Description : Delaying Difficulty Bomb to mid-September 2022 + * URL : https://eips.ethereum.org/EIPS/eip-5133 + * Status : Final + */ 5133: { - comment: 'Delaying Difficulty Bomb to mid-September 2022', - url: 'https://eips.ethereum.org/EIPS/eip-5133', - status: Status.Draft, minimumHardfork: Hardfork.GrayGlacier, - requiredEIPs: [], - pow: { - difficultyBombDelay: 11400000, // the amount of blocks to delay the difficulty bomb with - }, }, + /** + * Description : EOF - Stack Validation + * URL : https://github.com/ethereum/EIPs/blob/bd421962b4e241aa2b00a85d9cf4e57770bdb954/EIPS/eip-5450.md + * Status : Review + */ + 5450: { + minimumHardfork: Hardfork.London, + requiredEIPs: [3540, 3670, 4200, 4750], + }, + /** + * Description : MCOPY - Memory copying instruction + * URL : https://eips.ethereum.org/EIPS/eip-5656 + * Status : Final + */ 5656: { - comment: 'MCOPY - Memory copying instruction', - url: 'https://eips.ethereum.org/EIPS/eip-5656', - status: Status.Draft, minimumHardfork: Hardfork.Shanghai, - requiredEIPs: [], - gasPrices: { - mcopy: 3, // Base fee of the MCOPY opcode - }, }, + /** + * Description : Supply validator deposits on chain + * URL : https://eips.ethereum.org/EIPS/eip-6110 + * Status : Review + */ 6110: { - comment: 'Supply validator deposits on chain', - url: 'https://eips.ethereum.org/EIPS/eip-6110', - status: Status.Draft, minimumHardfork: Hardfork.Cancun, requiredEIPs: [7685], }, + /** + * Description : EOF - JUMPF and non-returning functions + * URL : https://github.com/ethereum/EIPs/blob/bd421962b4e241aa2b00a85d9cf4e57770bdb954/EIPS/eip-6206.md + * Status : Review + */ + 6206: { + minimumHardfork: Hardfork.London, + requiredEIPs: [4750, 5450], + }, + /** + * Description : SELFDESTRUCT only in same transaction + * URL : https://eips.ethereum.org/EIPS/eip-6780 + * Status : Final + */ 6780: { - comment: 'SELFDESTRUCT only in same transaction', - url: 'https://eips.ethereum.org/EIPS/eip-6780', - status: Status.Draft, minimumHardfork: Hardfork.London, - requiredEIPs: [], }, + /** + * Description : Ethereum state using a unified verkle tree (experimental) + * URL : https://github.com/ethereum/EIPs/pull/6800 + * Status : Draft + */ 6800: { - comment: 'Ethereum state using a unified verkle tree (experimental)', - url: 'https://github.com/ethereum/EIPs/pull/6800', - status: Status.Draft, minimumHardfork: Hardfork.London, - requiredEIPs: [], - gasPrices: { - create: 1000, // Base fee of the CREATE opcode - coldsload: 0, // Gas cost of the first read of storage from a given location (per transaction) - }, - vm: { - // kaustinen 6 current uses this address, however this will be updated to correct address - // in next iteration - historyStorageAddress: BigInt('0xfffffffffffffffffffffffffffffffffffffffe'), // The address where the historical blockhashes are stored - }, }, + /** + * Description : Execution layer triggerable withdrawals (experimental) + * URL : https://github.com/ethereum/EIPs/blob/3b5fcad6b35782f8aaeba7d4ac26004e8fbd720f/EIPS/eip-7002.md + * Status : Review + */ 7002: { - comment: 'Execution layer triggerable withdrawals (experimental)', - url: 'https://github.com/ethereum/EIPs/blob/3b5fcad6b35782f8aaeba7d4ac26004e8fbd720f/EIPS/eip-7002.md', - status: Status.Draft, minimumHardfork: Hardfork.Paris, requiredEIPs: [7685], - vm: { - withdrawalRequestType: BigInt(0x01), // The withdrawal request type for EIP-7685 - excessWithdrawalsRequestStorageSlot: BigInt(0), // The storage slot of the excess withdrawals - withdrawalsRequestCountStorage: BigInt(1), // The storage slot of the withdrawal request count - withdrawalsRequestQueueHeadStorageSlot: BigInt(2), // The storage slot of the withdrawal request head of the queue - withdrawalsRequestTailHeadStorageSlot: BigInt(3), // The storage slot of the withdrawal request tail of the queue - withdrawalsRequestQueueStorageOffset: BigInt(4), // The storage slot of the withdrawal request queue offset - maxWithdrawalRequestsPerBlock: BigInt(16), // The max withdrawal requests per block - targetWithdrawalRequestsPerBlock: BigInt(2), // The target withdrawal requests per block - minWithdrawalRequestFee: BigInt(1), // The minimum withdrawal request fee (in wei) - withdrawalRequestFeeUpdateFraction: BigInt(17), // The withdrawal request fee update fraction (used in the fake exponential) - systemAddress: BigInt('0xfffffffffffffffffffffffffffffffffffffffe'), // The system address to perform operations on the withdrawal requests predeploy address - withdrawalRequestPredeployAddress: BigInt('0x00A3ca265EBcb825B45F985A16CEFB49958cE017'), // Address of the validator excess address - }, }, + /** + * Description : Revamped CALL instructions + * URL : https://github.com/ethereum/EIPs/blob/bd421962b4e241aa2b00a85d9cf4e57770bdb954/EIPS/eip-7069.md + * Status : Review + */ + 7069: { + minimumHardfork: Hardfork.Berlin, + /* Note: per EIP these are the additionally required EIPs: + EIP 150 - This is the entire Tangerine Whistle hardfork + EIP 211 - (RETURNDATASIZE / RETURNDATACOPY) - Included in Byzantium + EIP 214 - (STATICCALL) - Included in Byzantium + */ + requiredEIPs: [2929], + }, + /** + * Description : Increase the MAX_EFFECTIVE_BALANCE -> Execution layer triggered consolidations (experimental) + * URL : https://eips.ethereum.org/EIPS/eip-7251 + * Status : Draft + */ 7251: { - comment: 'Execution layer triggered consolidations (experimental)', - url: 'https://eips.ethereum.org/EIPS/eip-7251', - status: Status.Draft, minimumHardfork: Hardfork.Paris, requiredEIPs: [7685], - vm: { - consolidationRequestType: BigInt(0x02), // The withdrawal request type for EIP-7685 - systemAddress: BigInt('0xfffffffffffffffffffffffffffffffffffffffe'), // The system address to perform operations on the consolidation requests predeploy address - consolidationRequestPredeployAddress: BigInt('0x00b42dbF2194e931E80326D950320f7d9Dbeac02'), // Address of the consolidations contract - }, }, + /** + * Description : EOF - Data section access instructions + * URL : https://github.com/ethereum/EIPs/blob/bd421962b4e241aa2b00a85d9cf4e57770bdb954/EIPS/eip-7480.md + * Status : Review + */ + 7480: { + minimumHardfork: Hardfork.London, + requiredEIPs: [3540, 3670], + }, + /** + * Description : BLOBBASEFEE opcode + * URL : https://eips.ethereum.org/EIPS/eip-7516 + * Status : Final + */ 7516: { - comment: 'BLOBBASEFEE opcode', - url: 'https://eips.ethereum.org/EIPS/eip-7516', - status: Status.Draft, minimumHardfork: Hardfork.Paris, requiredEIPs: [4844], - gasPrices: { - blobbasefee: 2, // Gas cost of the BLOBBASEFEE opcode - }, }, + /** + * Description : EOF Contract Creation + * URL : https://github.com/ethereum/EIPs/blob/dd32a34cfe4473bce143641bfffe4fd67e1987ab/EIPS/eip-7620.md + * Status : Review + */ + 7620: { + minimumHardfork: Hardfork.London, + /* Note: per EIP these are the additionally required EIPs: + EIP 170 - (Max contract size) - Included in Spurious Dragon + */ + requiredEIPs: [3540, 3541, 3670], + }, + /** + * Description : General purpose execution layer requests + * URL : https://eips.ethereum.org/EIPS/eip-7685 + * Status : Review + */ 7685: { - comment: 'General purpose execution layer requests', - url: 'https://eips.ethereum.org/EIPS/eip-7685', - status: Status.Draft, // TODO: Set correct minimum hardfork minimumHardfork: Hardfork.Cancun, requiredEIPs: [3675], - gasPrices: {}, }, + /** + * Description : EVM Object Format (EOFv1) Meta + * URL : https://github.com/ethereum/EIPs/blob/4153e95befd0264082de3c4c2fe3a85cc74d3152/EIPS/eip-7692.md + * Status : Draft + */ + 7692: { + minimumHardfork: Hardfork.Cancun, + requiredEIPs: [663, 3540, 3670, 4200, 4750, 5450, 6206, 7069, 7480, 7620, 7698], + }, + /** + * Description : EOF - Creation transaction + * URL : https://github.com/ethereum/EIPs/blob/bd421962b4e241aa2b00a85d9cf4e57770bdb954/EIPS/eip-7698.md + * Status : Draft + */ + 7698: { + minimumHardfork: Hardfork.London, + requiredEIPs: [3540, 7620], + }, + /** + * Description : Set EOA account code for one transaction + * URL : https://github.com/ethereum/EIPs/blob/62419ca3f45375db00b04a368ea37c0bfb05386a/EIPS/eip-7702.md + * Status : Review + */ 7702: { - comment: 'Set EOA account code for one transaction', - url: 'https://github.com/ethereum/EIPs/blob/62419ca3f45375db00b04a368ea37c0bfb05386a/EIPS/eip-7702.md', - status: Status.Review, // TODO: Set correct minimum hardfork minimumHardfork: Hardfork.Cancun, requiredEIPs: [2718, 2929, 2930], - gasPrices: { - perAuthBaseCost: 2500, // Gas cost of each authority item - }, }, + /** + * Description : Use historical block hashes saved in state for BLOCKHASH + * URL : https://eips.ethereum.org/EIPS/eip-7709 + * Status : Final + */ 7709: { - comment: 'Use historical block hashes saved in state for BLOCKHASH', - url: 'https://eips.ethereum.org/EIPS/eip-7709', - status: Status.Draft, minimumHardfork: Hardfork.Chainstart, requiredEIPs: [2935], }, diff --git a/packages/common/src/enums.ts b/packages/common/src/enums.ts index 2c2f9553b2..d4fbf08b0a 100644 --- a/packages/common/src/enums.ts +++ b/packages/common/src/enums.ts @@ -85,47 +85,3 @@ export enum ConsensusAlgorithm { Clique = 'clique', Casper = 'casper', } - -export enum CustomChain { - /** - * Polygon (Matic) Mainnet - * - * - [Documentation](https://docs.matic.network/docs/develop/network-details/network) - */ - PolygonMainnet = 'polygon-mainnet', - - /** - * Polygon (Matic) Mumbai Testnet - * - * - [Documentation](https://docs.matic.network/docs/develop/network-details/network) - */ - PolygonMumbai = 'polygon-mumbai', - - /** - * Arbitrum One - mainnet for Arbitrum roll-up - * - * - [Documentation](https://developer.offchainlabs.com/public-chains) - */ - ArbitrumOne = 'arbitrum-one', - - /** - * xDai EVM sidechain with a native stable token - * - * - [Documentation](https://www.xdaichain.com/) - */ - xDaiChain = 'x-dai-chain', - - /** - * Optimistic Kovan - testnet for Optimism roll-up - * - * - [Documentation](https://community.optimism.io/docs/developers/tutorials.html) - */ - OptimisticKovan = 'optimistic-kovan', - - /** - * Optimistic Ethereum - mainnet for Optimism roll-up - * - * - [Documentation](https://community.optimism.io/docs/developers/tutorials.html) - */ - OptimisticEthereum = 'optimistic-ethereum', -} diff --git a/packages/common/src/hardforks.ts b/packages/common/src/hardforks.ts index 43d509f122..527ae642a1 100644 --- a/packages/common/src/hardforks.ts +++ b/packages/common/src/hardforks.ts @@ -1,306 +1,124 @@ import type { HardforksDict } from './types.js' -export enum Status { - Draft = 'draft', - Review = 'review', - Final = 'final', -} - -export const hardforks: HardforksDict = { +export const hardforksDict: HardforksDict = { + /** + * Description: Start of the Ethereum main chain + * URL : - + * Status : Final + */ chainstart: { - name: 'chainstart', - comment: 'Start of the Ethereum main chain', - url: '', - status: Status.Final, - gasConfig: { - minGasLimit: 5000, // Minimum the gas limit may ever be - gasLimitBoundDivisor: 1024, // The bound divisor of the gas limit, used in update calculations - maxRefundQuotient: 2, // Maximum refund quotient; max tx refund is min(tx.gasUsed/maxRefundQuotient, tx.gasRefund) - }, - gasPrices: { - base: 2, // Gas base cost, used e.g. for ChainID opcode (Istanbul) - exp: 10, // Base fee of the EXP opcode - expByte: 10, // Times ceil(log256(exponent)) for the EXP instruction - keccak256: 30, // Base fee of the SHA3 opcode - keccak256Word: 6, // Once per word of the SHA3 operation's data - sload: 50, // Base fee of the SLOAD opcode - sstoreSet: 20000, // Once per SSTORE operation if the zeroness changes from zero - sstoreReset: 5000, // Once per SSTORE operation if the zeroness does not change from zero - sstoreRefund: 15000, // Once per SSTORE operation if the zeroness changes to zero - jumpdest: 1, // Base fee of the JUMPDEST opcode - log: 375, // Base fee of the LOG opcode - logData: 8, // Per byte in a LOG* operation's data - logTopic: 375, // Multiplied by the * of the LOG*, per LOG transaction. e.g. LOG0 incurs 0 * c_txLogTopicGas, LOG4 incurs 4 * c_txLogTopicGas - create: 32000, // Base fee of the CREATE opcode - call: 40, // Base fee of the CALL opcode - callStipend: 2300, // Free gas given at beginning of call - callValueTransfer: 9000, // Paid for CALL when the value transfor is non-zero - callNewAccount: 25000, // Paid for CALL when the destination address didn't exist prior - selfdestructRefund: 24000, // Refunded following a selfdestruct operation - memory: 3, // Times the address of the (highest referenced byte in memory + 1). NOTE: referencing happens on read, write and in instructions such as RETURN and CALL - quadCoeffDiv: 512, // Divisor for the quadratic particle of the memory cost equation - createData: 200, // - tx: 21000, // Per transaction. NOTE: Not payable on data of calls between transactions - txCreation: 32000, // The cost of creating a contract via tx - txDataZero: 4, // Per byte of data attached to a transaction that equals zero. NOTE: Not payable on data of calls between transactions - txDataNonZero: 68, // Per byte of data attached to a transaction that is not equal to zero. NOTE: Not payable on data of calls between transactions - copy: 3, // Multiplied by the number of 32-byte words that are copied (round up) for any *COPY operation and added - ecRecover: 3000, - sha256: 60, - sha256Word: 12, - ripemd160: 600, - ripemd160Word: 120, - identity: 15, - identityWord: 3, - stop: 0, // Base fee of the STOP opcode - add: 3, // Base fee of the ADD opcode - mul: 5, // Base fee of the MUL opcode - sub: 3, // Base fee of the SUB opcode - div: 5, // Base fee of the DIV opcode - sdiv: 5, // Base fee of the SDIV opcode - mod: 5, // Base fee of the MOD opcode - smod: 5, // Base fee of the SMOD opcode - addmod: 8, // Base fee of the ADDMOD opcode - mulmod: 8, // Base fee of the MULMOD opcode - signextend: 5, // Base fee of the SIGNEXTEND opcode - lt: 3, // Base fee of the LT opcode - gt: 3, // Base fee of the GT opcode - slt: 3, // Base fee of the SLT opcode - sgt: 3, // Base fee of the SGT opcode - eq: 3, // Base fee of the EQ opcode - iszero: 3, // Base fee of the ISZERO opcode - and: 3, // Base fee of the AND opcode - or: 3, // Base fee of the OR opcode - xor: 3, // Base fee of the XOR opcode - not: 3, // Base fee of the NOT opcode - byte: 3, // Base fee of the BYTE opcode - address: 2, // Base fee of the ADDRESS opcode - balance: 20, // Base fee of the BALANCE opcode - origin: 2, // Base fee of the ORIGIN opcode - caller: 2, // Base fee of the CALLER opcode - callvalue: 2, // Base fee of the CALLVALUE opcode - calldataload: 3, // Base fee of the CALLDATALOAD opcode - calldatasize: 2, // Base fee of the CALLDATASIZE opcode - calldatacopy: 3, // Base fee of the CALLDATACOPY opcode - codesize: 2, // Base fee of the CODESIZE opcode - codecopy: 3, // Base fee of the CODECOPY opcode - gasprice: 2, // Base fee of the GASPRICE opcode - extcodesize: 20, // Base fee of the EXTCODESIZE opcode - extcodecopy: 20, // Base fee of the EXTCODECOPY opcode - blockhash: 20, // Base fee of the BLOCKHASH opcode - coinbase: 2, // Base fee of the COINBASE opcode - timestamp: 2, // Base fee of the TIMESTAMP opcode - number: 2, // Base fee of the NUMBER opcode - difficulty: 2, // Base fee of the DIFFICULTY opcode - gaslimit: 2, // Base fee of the GASLIMIT opcode - pop: 2, // Base fee of the POP opcode - mload: 3, // Base fee of the MLOAD opcode - mstore: 3, // Base fee of the MSTORE opcode - mstore8: 3, // Base fee of the MSTORE8 opcode - sstore: 0, // Base fee of the SSTORE opcode - jump: 8, // Base fee of the JUMP opcode - jumpi: 10, // Base fee of the JUMPI opcode - pc: 2, // Base fee of the PC opcode - msize: 2, // Base fee of the MSIZE opcode - gas: 2, // Base fee of the GAS opcode - push: 3, // Base fee of the PUSH opcode - dup: 3, // Base fee of the DUP opcode - swap: 3, // Base fee of the SWAP opcode - callcode: 40, // Base fee of the CALLCODE opcode - return: 0, // Base fee of the RETURN opcode - invalid: 0, // Base fee of the INVALID opcode - selfdestruct: 0, // Base fee of the SELFDESTRUCT opcode - }, - vm: { - stackLimit: 1024, // Maximum size of VM stack allowed - callCreateDepth: 1024, // Maximum depth of call/create stack - maxExtraDataSize: 32, // Maximum size extra data may be after Genesis - }, - pow: { - minimumDifficulty: 131072, // The minimum that the difficulty may ever be - difficultyBoundDivisor: 2048, // The bound divisor of the difficulty, used in the update calculations - durationLimit: 13, // The decision boundary on the blocktime duration used to determine whether difficulty should go up or not - epochDuration: 30000, // Duration between proof-of-work epochs - timebombPeriod: 100000, // Exponential difficulty timebomb period - minerReward: BigInt('5000000000000000000'), // the amount a miner get rewarded for mining a block - difficultyBombDelay: 0, // the amount of blocks to delay the difficulty bomb with - }, + eips: [1], }, + /** + * Description: Homestead hardfork with protocol and network changes + * URL : https://eips.ethereum.org/EIPS/eip-606 + * Status : Final + */ homestead: { - name: 'homestead', - comment: 'Homestead hardfork with protocol and network changes', - url: 'https://eips.ethereum.org/EIPS/eip-606', - status: Status.Final, - gasPrices: { - delegatecall: 40, // Base fee of the DELEGATECALL opcode - }, + eips: [606], }, + /** + * Description: DAO rescue hardfork + * URL : https://eips.ethereum.org/EIPS/eip-779 + * Status : Final + */ dao: { - name: 'dao', - comment: 'DAO rescue hardfork', - url: 'https://eips.ethereum.org/EIPS/eip-779', - status: Status.Final, + eips: [], }, + /** + * Description: Hardfork with gas cost changes for IO-heavy operations + * URL : https://eips.ethereum.org/EIPS/eip-608 + * Status : Final + */ tangerineWhistle: { - name: 'tangerineWhistle', - comment: 'Hardfork with gas cost changes for IO-heavy operations', - url: 'https://eips.ethereum.org/EIPS/eip-608', - status: Status.Final, - gasPrices: { - sload: 200, // Once per SLOAD operation - call: 700, // Once per CALL operation & message call transaction - extcodesize: 700, // Base fee of the EXTCODESIZE opcode - extcodecopy: 700, // Base fee of the EXTCODECOPY opcode - balance: 400, // Base fee of the BALANCE opcode - delegatecall: 700, // Base fee of the DELEGATECALL opcode - callcode: 700, // Base fee of the CALLCODE opcode - selfdestruct: 5000, // Base fee of the SELFDESTRUCT opcode - }, + eips: [608], }, + /** + * Description: HF with EIPs for simple replay attack protection, EXP cost increase, state trie clearing, contract code size limit + * URL : https://eips.ethereum.org/EIPS/eip-607 + * Status : Final + */ spuriousDragon: { - name: 'spuriousDragon', - comment: - 'HF with EIPs for simple replay attack protection, EXP cost increase, state trie clearing, contract code size limit', - url: 'https://eips.ethereum.org/EIPS/eip-607', - status: Status.Final, - gasPrices: { - expByte: 50, // Times ceil(log256(exponent)) for the EXP instruction - }, - vm: { - maxCodeSize: 24576, // Maximum length of contract code - }, + eips: [607], }, + /** + * Description: Hardfork with new precompiles, instructions and other protocol changes + * URL : https://eips.ethereum.org/EIPS/eip-609 + * Status : Final + */ byzantium: { - name: 'byzantium', - comment: 'Hardfork with new precompiles, instructions and other protocol changes', - url: 'https://eips.ethereum.org/EIPS/eip-609', - status: Status.Final, - gasPrices: { - modexpGquaddivisor: 20, // Gquaddivisor from modexp precompile for gas calculation - ecAdd: 500, // Gas costs for curve addition precompile - ecMul: 40000, // Gas costs for curve multiplication precompile - ecPairing: 100000, // Base gas costs for curve pairing precompile - ecPairingWord: 80000, // Gas costs regarding curve pairing precompile input length - revert: 0, // Base fee of the REVERT opcode - staticcall: 700, // Base fee of the STATICCALL opcode - returndatasize: 2, // Base fee of the RETURNDATASIZE opcode - returndatacopy: 3, // Base fee of the RETURNDATACOPY opcode - }, - pow: { - minerReward: BigInt('3000000000000000000'), // the amount a miner get rewarded for mining a block - difficultyBombDelay: 3000000, // the amount of blocks to delay the difficulty bomb with - }, + eips: [609], }, + /** + * Description: Postponed hardfork including EIP-1283 (SSTORE gas metering changes) + * URL : https://eips.ethereum.org/EIPS/eip-1013 + * Status : Final + */ constantinople: { - name: 'constantinople', - comment: 'Postponed hardfork including EIP-1283 (SSTORE gas metering changes)', - url: 'https://eips.ethereum.org/EIPS/eip-1013', - status: Status.Final, - gasPrices: { - netSstoreNoopGas: 200, // Once per SSTORE operation if the value doesn't change - netSstoreInitGas: 20000, // Once per SSTORE operation from clean zero - netSstoreCleanGas: 5000, // Once per SSTORE operation from clean non-zero - netSstoreDirtyGas: 200, // Once per SSTORE operation from dirty - netSstoreClearRefund: 15000, // Once per SSTORE operation for clearing an originally existing storage slot - netSstoreResetRefund: 4800, // Once per SSTORE operation for resetting to the original non-zero value - netSstoreResetClearRefund: 19800, // Once per SSTORE operation for resetting to the original zero value - shl: 3, // Base fee of the SHL opcode - shr: 3, // Base fee of the SHR opcode - sar: 3, // Base fee of the SAR opcode - extcodehash: 400, // Base fee of the EXTCODEHASH opcode - create2: 32000, // Base fee of the CREATE2 opcode - }, - pow: { - minerReward: BigInt('2000000000000000000'), // The amount a miner gets rewarded for mining a block - difficultyBombDelay: 5000000, // the amount of blocks to delay the difficulty bomb with - }, + eips: [1013], }, + /** + * Description: Aka constantinopleFix, removes EIP-1283, activate together with or after constantinople + * URL : https://eips.ethereum.org/EIPS/eip-1716 + * Status : Final + */ petersburg: { - name: 'petersburg', - comment: - 'Aka constantinopleFix, removes EIP-1283, activate together with or after constantinople', - url: 'https://eips.ethereum.org/EIPS/eip-1716', - status: Status.Final, - gasPrices: { - netSstoreNoopGas: null, // Removed along EIP-1283 - netSstoreInitGas: null, // Removed along EIP-1283 - netSstoreCleanGas: null, // Removed along EIP-1283 - netSstoreDirtyGas: null, // Removed along EIP-1283 - netSstoreClearRefund: null, // Removed along EIP-1283 - netSstoreResetRefund: null, // Removed along EIP-1283 - netSstoreResetClearRefund: null, // Removed along EIP-1283 - }, + eips: [1716], }, + /** + * Description: HF targeted for December 2019 following the Constantinople/Petersburg HF + * URL : https://eips.ethereum.org/EIPS/eip-1679 + * Status : Final + */ istanbul: { - name: 'istanbul', - comment: 'HF targeted for December 2019 following the Constantinople/Petersburg HF', - url: 'https://eips.ethereum.org/EIPS/eip-1679', - status: Status.Final, - gasConfig: {}, - gasPrices: { - blake2Round: 1, // Gas cost per round for the Blake2 F precompile - ecAdd: 150, // Gas costs for curve addition precompile - ecMul: 6000, // Gas costs for curve multiplication precompile - ecPairing: 45000, // Base gas costs for curve pairing precompile - ecPairingWord: 34000, // Gas costs regarding curve pairing precompile input length - txDataNonZero: 16, // Per byte of data attached to a transaction that is not equal to zero. NOTE: Not payable on data of calls between transactions - sstoreSentryGasEIP2200: 2300, // Minimum gas required to be present for an SSTORE call, not consumed - sstoreNoopGasEIP2200: 800, // Once per SSTORE operation if the value doesn't change - sstoreDirtyGasEIP2200: 800, // Once per SSTORE operation if a dirty value is changed - sstoreInitGasEIP2200: 20000, // Once per SSTORE operation from clean zero to non-zero - sstoreInitRefundEIP2200: 19200, // Once per SSTORE operation for resetting to the original zero value - sstoreCleanGasEIP2200: 5000, // Once per SSTORE operation from clean non-zero to something else - sstoreCleanRefundEIP2200: 4200, // Once per SSTORE operation for resetting to the original non-zero value - sstoreClearRefundEIP2200: 15000, // Once per SSTORE operation for clearing an originally existing storage slot - balance: 700, // Base fee of the BALANCE opcode - extcodehash: 700, // Base fee of the EXTCODEHASH opcode - chainid: 2, // Base fee of the CHAINID opcode - selfbalance: 5, // Base fee of the SELFBALANCE opcode - sload: 800, // Base fee of the SLOAD opcode - }, + eips: [1679], }, + /** + * Description: HF to delay the difficulty bomb + * URL : https://eips.ethereum.org/EIPS/eip-2384 + * Status : Final + */ muirGlacier: { - name: 'muirGlacier', - comment: 'HF to delay the difficulty bomb', - url: 'https://eips.ethereum.org/EIPS/eip-2384', - status: Status.Final, - pow: { - difficultyBombDelay: 9000000, // the amount of blocks to delay the difficulty bomb with - }, + eips: [2384], }, + /** + * Description: HF targeted for July 2020 following the Muir Glacier HF + * URL : https://eips.ethereum.org/EIPS/eip-2070 + * Status : Final + */ berlin: { - name: 'berlin', - comment: 'HF targeted for July 2020 following the Muir Glacier HF', - url: 'https://eips.ethereum.org/EIPS/eip-2070', - status: Status.Final, eips: [2565, 2929, 2718, 2930], }, + /** + * Description: HF targeted for July 2021 following the Berlin fork + * URL : https://github.com/ethereum/eth1.0-specs/blob/master/network-upgrades/mainnet-upgrades/london.md + * Status : Final + */ london: { - name: 'london', - comment: 'HF targeted for July 2021 following the Berlin fork', - url: 'https://github.com/ethereum/eth1.0-specs/blob/master/network-upgrades/mainnet-upgrades/london.md', - status: Status.Final, eips: [1559, 3198, 3529, 3541], }, + /** + * Description: HF to delay the difficulty bomb + * URL : https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/arrow-glacier.md + * Status : Final + */ arrowGlacier: { - name: 'arrowGlacier', - comment: 'HF to delay the difficulty bomb', - url: 'https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/arrow-glacier.md', - status: Status.Final, eips: [4345], }, + /** + * Description: Delaying the difficulty bomb to Mid September 2022 + * URL : https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/gray-glacier.md + * Status : Final + */ grayGlacier: { - name: 'grayGlacier', - comment: 'Delaying the difficulty bomb to Mid September 2022', - url: 'https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/gray-glacier.md', - status: Status.Final, eips: [5133], }, + /** + * Description: Hardfork to upgrade the consensus mechanism to Proof-of-Stake + * URL : https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/merge.md + * Status : Final + */ paris: { - name: 'paris', - comment: 'Hardfork to upgrade the consensus mechanism to Proof-of-Stake', - url: 'https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/merge.md', - status: Status.Final, consensus: { type: 'pos', algorithm: 'casper', @@ -308,43 +126,48 @@ export const hardforks: HardforksDict = { }, eips: [3675, 4399], }, + /** + * Description: Pre-merge hardfork to fork off non-upgraded clients + * URL : https://eips.ethereum.org/EIPS/eip-3675 + * Status : Final + */ mergeForkIdTransition: { - name: 'mergeForkIdTransition', - comment: 'Pre-merge hardfork to fork off non-upgraded clients', - url: 'https://eips.ethereum.org/EIPS/eip-3675', - status: Status.Final, eips: [], }, + /** + * Description: Next feature hardfork after the merge hardfork having withdrawals, warm coinbase, push0, limit/meter initcode + * URL : https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/shanghai.md + * Status : Final + */ shanghai: { - name: 'shanghai', - comment: - 'Next feature hardfork after the merge hardfork having withdrawals, warm coinbase, push0, limit/meter initcode', - url: 'https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/shanghai.md', - status: Status.Final, eips: [3651, 3855, 3860, 4895], }, + /** + * Description: Next feature hardfork after shanghai, includes proto-danksharding EIP 4844 blobs + * (still WIP hence not for production use), transient storage opcodes, parent beacon block root + * availability in EVM, selfdestruct only in same transaction, and blob base fee opcode + * URL : https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/cancun.md + * Status : Final + */ cancun: { - name: 'cancun', - comment: - 'Next feature hardfork after shanghai, includes proto-danksharding EIP 4844 blobs (still WIP hence not for production use), transient storage opcodes, parent beacon block root availability in EVM, selfdestruct only in same transaction, and blob base fee opcode', - url: 'https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/cancun.md', - status: Status.Final, eips: [1153, 4844, 4788, 5656, 6780, 7516], }, + /** + * Description: Next feature hardfork after cancun, internally used for pectra testing/implementation (incomplete/experimental) + * URL : https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/prague.md + * Status : Final + */ prague: { - name: 'prague', - comment: - 'Next feature hardfork after cancun, internally used for pectra testing/implementation (incomplete/experimental)', - url: 'https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/prague.md', - status: Status.Draft, - eips: [2537, 2935, 6110, 7002, 7251, 7685, 7702], - }, + // TODO update this accordingly to the right devnet setup + //eips: [663, 3540, 3670, 4200, 4750, 5450, 6206, 7069, 7480, 7620, 7692, 7698], // This is EOF-only + eips: [2537, 2935, 6110, 7002, 7251, 7685, 7702], // This is current prague without EOF + }, + /** + * Description: Next feature hardfork after prague, internally used for verkle testing/implementation (incomplete/experimental) + * URL : https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/osaka.md + * Status : Final + */ osaka: { - name: 'osaka', - comment: - 'Next feature hardfork after prague, internally used for verkle testing/implementation (incomplete/experimental)', - url: 'https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/osaka.md', - status: Status.Draft, eips: [2935, 6800], }, } diff --git a/packages/common/src/index.ts b/packages/common/src/index.ts index db416328bc..b9a0bae15e 100644 --- a/packages/common/src/index.ts +++ b/packages/common/src/index.ts @@ -1,3 +1,4 @@ +export * from './chains.js' export * from './common.js' export * from './constructors.js' export * from './enums.js' diff --git a/packages/common/src/interfaces.ts b/packages/common/src/interfaces.ts index 9721b31945..03f189b57e 100644 --- a/packages/common/src/interfaces.ts +++ b/packages/common/src/interfaces.ts @@ -49,46 +49,6 @@ export type Proof = { storageProof: StorageProof[] } -/* - * Access List types - */ - -export type AccessListItem = { - address: PrefixedHexString - storageKeys: PrefixedHexString[] -} - -/* - * An Access List as a tuple of [address: Uint8Array, storageKeys: Uint8Array[]] - */ -export type AccessListBytesItem = [Uint8Array, Uint8Array[]] -export type AccessListBytes = AccessListBytesItem[] -export type AccessList = AccessListItem[] - -/** - * Authorization list types - */ -export type AuthorizationListItem = { - chainId: PrefixedHexString - address: PrefixedHexString - nonce: PrefixedHexString[] - yParity: PrefixedHexString - r: PrefixedHexString - s: PrefixedHexString -} - -// Tuple of [chain_id, address, [nonce], y_parity, r, s] -export type AuthorizationListBytesItem = [ - Uint8Array, - Uint8Array, - Uint8Array[], - Uint8Array, - Uint8Array, - Uint8Array -] -export type AuthorizationListBytes = AuthorizationListBytesItem[] -export type AuthorizationList = AuthorizationListItem[] - /** * Verkle related * @@ -120,24 +80,24 @@ export interface AccessWitnessInterface { touchAddressOnWriteAndComputeGas( address: Address, treeIndex: number | bigint, - subIndex: number | Uint8Array + subIndex: number | Uint8Array, ): bigint touchAddressOnReadAndComputeGas( address: Address, treeIndex: number | bigint, - subIndex: number | Uint8Array + subIndex: number | Uint8Array, ): bigint touchAddressAndChargeGas( address: Address, treeIndex: number | bigint, subIndex: number | Uint8Array, - { isWrite }: { isWrite?: boolean } + { isWrite }: { isWrite?: boolean }, ): bigint touchAddress( address: Address, treeIndex: number | bigint, subIndex: number | Uint8Array, - { isWrite }: { isWrite?: boolean } + { isWrite }: { isWrite?: boolean }, ): AccessEventFlags shallowCopy(): AccessWitnessInterface merge(accessWitness: AccessWitnessInterface): void @@ -148,44 +108,65 @@ export interface AccessWitnessInterface { * */ export interface StateManagerInterface { + /* + * Core Access Functionality + */ + // Account methods getAccount(address: Address): Promise putAccount(address: Address, account?: Account): Promise deleteAccount(address: Address): Promise modifyAccountFields(address: Address, accountFields: AccountFields): Promise - putContractCode(address: Address, value: Uint8Array): Promise - getContractCode(address: Address): Promise - getContractCodeSize?(address: Address): Promise - getContractStorage(address: Address, key: Uint8Array): Promise - putContractStorage(address: Address, key: Uint8Array, value: Uint8Array): Promise - clearContractStorage(address: Address): Promise + + // Code methods + putCode(address: Address, value: Uint8Array): Promise + getCode(address: Address): Promise + getCodeSize(address: Address): Promise + + // Storage methods + getStorage(address: Address, key: Uint8Array): Promise + putStorage(address: Address, key: Uint8Array, value: Uint8Array): Promise + clearStorage(address: Address): Promise + + /* + * Checkpointing Functionality + */ checkpoint(): Promise commit(): Promise revert(): Promise + + /* + * State Root Functionality + */ getStateRoot(): Promise setStateRoot(stateRoot: Uint8Array, clearCache?: boolean): Promise - getProof?(address: Address, storageSlots: Uint8Array[]): Promise hasStateRoot(root: Uint8Array): Promise // only used in client - shallowCopy(downlevelCaches?: boolean): StateManagerInterface - getAppliedKey?(address: Uint8Array): Uint8Array /* - * The following optional methods are Verkle related + * Extra Functionality * - * Experimental (do not implement) + * Optional non-essential methods, these methods should always be guarded + * on usage (check for existence) */ - checkChunkWitnessPresent?(contract: Address, programCounter: number): Promise -} + // Client RPC + getProof?(address: Address, storageSlots: Uint8Array[]): Promise + dumpStorage?(address: Address): Promise + dumpStorageRange?(address: Address, startKey: bigint, limit: number): Promise -export interface EVMStateManagerInterface extends StateManagerInterface { + /* + * EVM/VM Specific Functionality + */ originalStorageCache: { get(address: Address, key: Uint8Array): Promise clear(): void } + generateCanonicalGenesis?(initState: any): Promise // TODO make input more typesafe + // only Verkle/EIP-6800 (experimental) + checkChunkWitnessPresent?(contract: Address, programCounter: number): Promise + getAppliedKey?(address: Uint8Array): Uint8Array // only for preimages - dumpStorage(address: Address): Promise // only used in client - dumpStorageRange(address: Address, startKey: bigint, limit: number): Promise // only used in client - generateCanonicalGenesis(initState: any): Promise // TODO make input more typesafe - getProof(address: Address, storageSlots?: Uint8Array[]): Promise - - shallowCopy(downlevelCaches?: boolean): EVMStateManagerInterface + /* + * Utility + */ + clearCaches(): void + shallowCopy(downlevelCaches?: boolean): StateManagerInterface } diff --git a/packages/common/src/types.ts b/packages/common/src/types.ts index 6507efbadd..c76a264689 100644 --- a/packages/common/src/types.ts +++ b/packages/common/src/types.ts @@ -1,4 +1,4 @@ -import type { Chain, ConsensusAlgorithm, ConsensusType, Hardfork } from './enums.js' +import type { ConsensusAlgorithm, ConsensusType, Hardfork } from './enums.js' import type { BigIntLike, ECDSASignature, Kzg, PrefixedHexString } from '@ethereumjs/util' export interface ChainName { @@ -27,8 +27,7 @@ type ConsensusConfig = { export interface ChainConfig { name: string - chainId: number | bigint - networkId: number | bigint + chainId: number | string defaultHardfork?: string comment?: string url?: string @@ -41,21 +40,19 @@ export interface ChainConfig { depositContractAddress?: PrefixedHexString } -// TODO: Remove the string type and only keep PrefixedHexString export interface GenesisBlockConfig { - timestamp?: PrefixedHexString | string - gasLimit: number | PrefixedHexString | string - difficulty: number | PrefixedHexString | string - nonce: PrefixedHexString | string - extraData: PrefixedHexString | string - baseFeePerGas?: PrefixedHexString | string - excessBlobGas?: PrefixedHexString | string + timestamp?: PrefixedHexString + gasLimit: number | PrefixedHexString + difficulty: number | PrefixedHexString + nonce: PrefixedHexString + extraData: PrefixedHexString + baseFeePerGas?: PrefixedHexString + excessBlobGas?: PrefixedHexString } export interface HardforkTransitionConfig { name: Hardfork | string block: number | null // null is used for hardforks that should not be applied -- since `undefined` isn't a valid value in JSON - ttd?: bigint | string timestamp?: number | string forkHash?: PrefixedHexString | null } @@ -80,7 +77,7 @@ export interface CustomCrypto { v: bigint, r: Uint8Array, s: Uint8Array, - chainId?: bigint + chainId?: bigint, ) => Uint8Array sha256?: (msg: Uint8Array) => Uint8Array ecsign?: (msg: Uint8Array, pk: Uint8Array, chainId?: bigint) => ECDSASignature @@ -89,7 +86,7 @@ export interface CustomCrypto { kzg?: Kzg } -interface BaseOpts { +export interface BaseOpts { /** * String identifier ('byzantium') for hardfork or {@link Hardfork} enum. * @@ -105,6 +102,24 @@ interface BaseOpts { * - [EIP-2537](https://eips.ethereum.org/EIPS/eip-2537) - BLS12-381 precompiles */ eips?: number[] + /** + * Optionally pass in an EIP params dictionary, see one of the + * EthereumJS library `params.ts` files for an example (e.g. tx, evm). + * By default parameters are set by the respective library, so this + * is only relevant if you want to use EthereumJS libraries with a + * custom parameter set. + * + * Example Format: + * + * ```ts + * { + * 1559: { + * initialBaseFee: 1000000000, + * } + * } + * ``` + */ + params?: ParamsDict /** * This option can be used to replace the most common crypto primitives * (keccak256 hashing e.g.) within the EthereumJS ecosystem libraries @@ -123,34 +138,10 @@ interface BaseOpts { */ export interface CommonOpts extends BaseOpts { /** - * Chain name ('mainnet'), id (1), or {@link Chain} enum, - * either from a chain directly supported or a custom chain - * passed in via {@link CommonOpts.customChains}. - */ - chain: string | number | Chain | bigint | object - /** - * Initialize (in addition to the supported chains) with the selected - * custom chains. Custom genesis state should be passed to the Blockchain class if used. - * - * Usage (directly with the respective chain initialization via the {@link CommonOpts.chain} option): - * - * ```javascript - * import myCustomChain1 from '[PATH_TO_MY_CHAINS]/myCustomChain1.json' - * const common = new Common({ chain: 'myCustomChain1', customChains: [ myCustomChain1 ]}) - * ``` - */ - customChains?: ChainConfig[] -} - -/** - * Options to be used with the {@link Common.custom} static constructor. - */ -export interface CustomCommonOpts extends BaseOpts { - /** - * The name (`mainnet`), id (`1`), or {@link Chain} enum of - * a standard chain used to base the custom chain params on. + * The chain configuration to be used. There are available configuration object for mainnet + * (`Mainnet`) and the currently active testnets which can be directly used. */ - baseChain?: string | number | Chain | bigint + chain: ChainConfig } export interface GethConfigOpts extends BaseOpts { @@ -159,44 +150,33 @@ export interface GethConfigOpts extends BaseOpts { mergeForkIdPostMerge?: boolean } -// TODO: Deprecate the string type and only keep BigIntLike export interface HardforkByOpts { - blockNumber?: BigIntLike | string - timestamp?: BigIntLike | string - td?: BigIntLike | string -} - -export type EIPOrHFConfig = { - comment: string - url: string - status: string - gasConfig?: { - [key: string]: number | bigint | null - } - gasPrices?: { - [key: string]: number | bigint | null - } - pow?: { - [key: string]: number | bigint | null - } - sharding?: { - [key: string]: number | bigint | null - } - vm?: { - [key: string]: number | bigint | null - } + blockNumber?: BigIntLike + timestamp?: BigIntLike } export type EIPConfig = { minimumHardfork: Hardfork - requiredEIPs: number[] -} & EIPOrHFConfig + requiredEIPs?: number[] +} + +export type ParamsConfig = { + [key: string]: number | string | null +} export type HardforkConfig = { - name: string eips?: number[] consensus?: ConsensusConfig -} & EIPOrHFConfig + params?: ParamsConfig +} + +export type EIPsDict = { + [key: string]: EIPConfig +} + +export type ParamsDict = { + [key: string]: ParamsConfig +} export type HardforksDict = { [key: string]: HardforkConfig diff --git a/packages/common/src/utils.ts b/packages/common/src/utils.ts index 91b6db3931..5061ecd520 100644 --- a/packages/common/src/utils.ts +++ b/packages/common/src/utils.ts @@ -1,9 +1,8 @@ import { intToHex, isHexString, stripHexPrefix } from '@ethereumjs/util' -import { chains as CHAIN_SPECS } from './chains.js' -import { Chain, Hardfork } from './enums.js' +import { Goerli, Holesky, Kaustinen6, Mainnet, Sepolia } from './chains.js' +import { Hardfork } from './enums.js' -import type { ChainConfig, ChainName, ChainsConfig } from './index.js' import type { PrefixedHexString } from '@ethereumjs/util' type ConfigHardfork = @@ -27,12 +26,9 @@ function formatNonce(nonce: string): PrefixedHexString { /** * Converts Geth genesis parameters to an EthereumJS compatible `CommonOpts` object * @param json object representing the Geth genesis file - * @param optional mergeForkIdPostMerge which clarifies the placement of MergeForkIdTransition - * hardfork, which by default is post merge as with the merged eth networks but could also come - * before merge like in kiln genesis * @returns genesis parameters in a `CommonOpts` compliant object */ -function parseGethParams(json: any, mergeForkIdPostMerge: boolean = true) { +function parseGethParams(json: any) { const { name, config, @@ -81,14 +77,23 @@ function parseGethParams(json: any, mergeForkIdPostMerge: boolean = true) { // but have different configuration parameters in geth genesis parameters if (config.eip155Block !== config.eip158Block) { throw new Error( - 'EIP155 block number must equal EIP 158 block number since both are part of SpuriousDragon hardfork and the client only supports activating the full hardfork' + 'EIP155 block number must equal EIP 158 block number since both are part of SpuriousDragon hardfork and the client only supports activating the full hardfork', ) } + // Terminal total difficulty logic is not supported any more as the merge has been completed + // so the Merge/Paris hardfork block must be 0 + if ( + config.terminalTotalDifficulty !== undefined && + (BigInt(difficulty) < BigInt(config.terminalTotalDifficulty) || + config.terminalTotalDifficultyPassed === false) + ) { + throw new Error('nonzero terminal total difficulty is not supported') + } + const params = { name, chainId, - networkId: chainId, depositContractAddress, genesis: { timestamp, @@ -110,10 +115,10 @@ function parseGethParams(json: any, mergeForkIdPostMerge: boolean = true) { type: 'poa', algorithm: 'clique', clique: { - // The recent geth genesis seems to be using blockperiodseconds + // The recent geth genesis seems to be using blockperiodseconds // cspell:disable-line // and epochlength for clique specification // see: https://hackmd.io/PqZgMpnkSWCWv5joJoFymQ - period: config.clique.period ?? config.clique.blockperiodseconds, + period: config.clique.period ?? config.clique.blockperiodseconds, // cspell:disable-line epoch: config.clique.epoch ?? config.clique.epochlength, }, } @@ -136,7 +141,7 @@ function parseGethParams(json: any, mergeForkIdPostMerge: boolean = true) { [Hardfork.MuirGlacier]: { name: 'muirGlacierBlock' }, [Hardfork.Berlin]: { name: 'berlinBlock' }, [Hardfork.London]: { name: 'londonBlock' }, - [Hardfork.MergeForkIdTransition]: { name: 'mergeForkBlock', postMerge: mergeForkIdPostMerge }, + [Hardfork.MergeForkIdTransition]: { name: 'mergeForkBlock', postMerge: true }, [Hardfork.Shanghai]: { name: 'shanghaiTime', postMerge: true, isTimestamp: true }, [Hardfork.Cancun]: { name: 'cancunTime', postMerge: true, isTimestamp: true }, [Hardfork.Prague]: { name: 'pragueTime', postMerge: true, isTimestamp: true }, @@ -144,12 +149,15 @@ function parseGethParams(json: any, mergeForkIdPostMerge: boolean = true) { } // forkMapRev is the map from config field name to Hardfork - const forkMapRev = Object.keys(forkMap).reduce((acc, elem) => { - acc[forkMap[elem].name] = elem - return acc - }, {} as { [key: string]: string }) + const forkMapRev = Object.keys(forkMap).reduce( + (acc, elem) => { + acc[forkMap[elem].name] = elem + return acc + }, + {} as { [key: string]: string }, + ) const configHardforkNames = Object.keys(config).filter( - (key) => forkMapRev[key] !== undefined && config[key] !== undefined && config[key] !== null + (key) => forkMapRev[key] !== undefined && config[key] !== undefined && config[key] !== null, ) params.hardforks = configHardforkNames @@ -175,7 +183,7 @@ function parseGethParams(json: any, mergeForkIdPostMerge: boolean = true) { return (a.timestamp ?? 0) - (b.timestamp ?? 0) }) - // only set the genesis timestamp forks to zero post the above sort has happended + // only set the genesis timestamp forks to zero post the above sort has happened // to get the correct sorting for (const hf of params.hardforks) { if (hf.timestamp === genesisTimestamp) { @@ -184,20 +192,16 @@ function parseGethParams(json: any, mergeForkIdPostMerge: boolean = true) { } if (config.terminalTotalDifficulty !== undefined) { - // Following points need to be considered for placement of merge hf - // - Merge hardfork can't be placed at genesis - // - Place merge hf before any hardforks that require CL participation for e.g. withdrawals - // - Merge hardfork has to be placed just after genesis if any of the genesis hardforks make CL - // necessary for e.g. withdrawals + // Merge fork must be placed at 0 since ttd logic is no longer supported const mergeConfig = { name: Hardfork.Paris, - ttd: config.terminalTotalDifficulty, - block: null, + block: 0, + timestamp: undefined, } // Merge hardfork has to be placed before first hardfork that is dependent on merge const postMergeIndex = params.hardforks.findIndex( - (hf: any) => forkMap[hf.name]?.postMerge === true + (hf: any) => forkMap[hf.name]?.postMerge === true, ) if (postMergeIndex !== -1) { params.hardforks.splice(postMergeIndex, 0, mergeConfig as unknown as ConfigHardfork) @@ -219,7 +223,7 @@ function parseGethParams(json: any, mergeForkIdPostMerge: boolean = true) { * @param name optional chain name * @returns parsed params */ -export function parseGethGenesis(json: any, name?: string, mergeForkIdPostMerge?: boolean) { +export function parseGethGenesis(json: any, name?: string) { try { const required = ['config', 'difficulty', 'gasLimit', 'nonce', 'alloc'] if (required.some((field) => !(field in json))) { @@ -233,58 +237,34 @@ export function parseGethGenesis(json: any, name?: string, mergeForkIdPostMerge? if (name !== undefined) { finalJson.name = name } - return parseGethParams(finalJson, mergeForkIdPostMerge) + return parseGethParams(finalJson) } catch (e: any) { throw new Error(`Error parsing parameters file: ${e.message}`) } } -export function getInitializedChains(customChains?: ChainConfig[]): ChainsConfig { - const names: ChainName = {} - for (const [name, id] of Object.entries(Chain)) { - names[id] = name.toLowerCase() - } - const chains = { ...CHAIN_SPECS } as ChainsConfig - if (customChains) { - for (const chain of customChains) { - const { name } = chain - names[chain.chainId.toString()] = name - chains[name] = chain - } - } - chains.names = names - return chains -} - /** - * Determine if a {@link chainId} is supported as a standard chain - * @param chainId bigint id (`1`) of a standard chain - * @returns boolean + * Return the preset chain config for one of the predefined chain configurations + * @param chain the representing a network name (e.g. 'mainnet') or number representing the chain ID + * @returns a {@link ChainConfig} */ -export function isSupportedChainId(chainId: bigint): boolean { - const initializedChains = getInitializedChains() - return Boolean((initializedChains['names'] as ChainName)[chainId.toString()]) -} - -export function _getChainParams( - chain: string | number | Chain | bigint, - customChains?: ChainConfig[] -): ChainConfig { - const initializedChains = getInitializedChains(customChains) - if (typeof chain === 'number' || typeof chain === 'bigint') { - chain = chain.toString() - - if ((initializedChains['names'] as ChainName)[chain]) { - const name: string = (initializedChains['names'] as ChainName)[chain] - return initializedChains[name] as ChainConfig - } - - throw new Error(`Chain with ID ${chain} not supported`) - } - - if (initializedChains[chain] !== undefined) { - return initializedChains[chain] as ChainConfig +export const getPresetChainConfig = (chain: string | number) => { + switch (chain) { + case 'goerli': + case 5: + return Goerli + case 'holesky': + case 17000: + return Holesky + case 'kaustinen6': + case 69420: + return Kaustinen6 + case 'sepolia': + case 11155111: + return Sepolia + case 'mainnet': + case 1: + default: + return Mainnet } - - throw new Error(`Chain with name ${chain} not supported`) } diff --git a/packages/common/test/chains.spec.ts b/packages/common/test/chains.spec.ts index 3a2da8caea..d7ad6e6b98 100644 --- a/packages/common/test/chains.spec.ts +++ b/packages/common/test/chains.spec.ts @@ -1,106 +1,96 @@ import { assert, describe, it } from 'vitest' import { - Chain, Common, ConsensusAlgorithm, ConsensusType, + Goerli, Hardfork, - isSupportedChainId, + Mainnet, + getPresetChainConfig, } from '../src/index.js' describe('[Common/Chains]: Initialization / Chain params', () => { it('Should initialize with chain provided', () => { - let c = new Common({ chain: 'mainnet' }) + const c = new Common({ chain: Mainnet }) assert.equal(c.chainName(), 'mainnet', 'should initialize with chain name') assert.equal(c.chainId(), BigInt(1), 'should return correct chain Id') - assert.equal(c.networkId(), BigInt(1), 'should return correct network Id') - assert.equal(c.hardfork(), Hardfork.Shanghai, 'should set hardfork to current default hardfork') + assert.equal(c.hardfork(), Hardfork.Cancun, 'should set hardfork to current default hardfork') assert.equal( c.hardfork(), c.DEFAULT_HARDFORK, - 'should set hardfork to hardfork set as DEFAULT_HARDFORK' + 'should set hardfork to hardfork set as DEFAULT_HARDFORK', ) - - c = new Common({ chain: 1 }) - assert.equal(c.chainName(), 'mainnet', 'should initialize with chain Id') }) - it('Should initialize with chain provided by Chain enum', () => { - const c = new Common({ chain: Chain.Mainnet }) - assert.equal(c.chainName(), 'mainnet', 'should initialize with chain name') - assert.equal(c.chainId(), BigInt(1), 'should return correct chain Id') - assert.equal(c.networkId(), BigInt(1), 'should return correct network Id') - assert.equal(c.hardfork(), Hardfork.Shanghai, 'should set hardfork to current default hardfork') - assert.equal( - c.hardfork(), - c.DEFAULT_HARDFORK, - 'should set hardfork to hardfork set as DEFAULT_HARDFORK' - ) + it('Should initialize with chain provided by chain name or network Id', () => { + let chain = getPresetChainConfig('mainnet') + let c = new Common({ chain }) + assert.equal(c.chainName(), 'mainnet') + chain = getPresetChainConfig(5) + c = new Common({ chain }) + assert.equal(c.chainName(), 'goerli') + chain = getPresetChainConfig(123) + c = new Common({ chain }) + assert.equal(c.chainName(), 'mainnet') }) it('Should initialize with chain and hardfork provided', () => { - const c = new Common({ chain: 'mainnet', hardfork: 'byzantium' }) + const c = new Common({ chain: Mainnet, hardfork: 'byzantium' }) assert.equal(c.hardfork(), 'byzantium', 'should return correct hardfork name') }) it('Should initialize with chain and hardfork provided by Chain and Hardfork enums', () => { - const c = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Byzantium }) + const c = new Common({ chain: Mainnet, hardfork: Hardfork.Byzantium }) assert.equal(c.hardfork(), 'byzantium', 'should return correct hardfork name') }) it('Should handle initialization errors', () => { - let f = function () { - new Common({ chain: 'chainnotexisting' }) + const f = function () { + new Common({ chain: Mainnet, hardfork: 'hardforkNotExisting' }) } - let msg = 'should throw an exception on non-existing chain' - assert.throws(f, /not supported$/, undefined, msg) // eslint-disable-line no-new - - f = function () { - new Common({ chain: 'mainnet', hardfork: 'hardforknotexisting' }) - } - msg = 'should throw an exception on non-existing hardfork' + const msg = 'should throw an exception on non-existing hardfork' assert.throws(f, /not supported$/, undefined, msg) // eslint-disable-line no-new }) it('Should provide correct access to chain parameters', () => { - let c = new Common({ chain: 'mainnet', hardfork: 'tangerineWhistle' }) + let c = new Common({ chain: Mainnet, hardfork: 'tangerineWhistle' }) assert.equal(c.hardforks()[3]['block'], 2463000, 'should return correct hardfork data') assert.equal(typeof c.bootstrapNodes()[0].port, 'number', 'should return a port as number') assert.equal( c.consensusType(), ConsensusType.ProofOfWork, - 'should return correct consensus type' + 'should return correct consensus type', ) assert.equal( c.consensusAlgorithm(), ConsensusAlgorithm.Ethash, - 'should return correct consensus algorithm' + 'should return correct consensus algorithm', ) assert.deepEqual(c.consensusConfig(), {}, 'should return empty dictionary for consensus config') - c = new Common({ chain: 'goerli', hardfork: 'spuriousDragon' }) + c = new Common({ chain: Goerli, hardfork: 'spuriousDragon' }) assert.equal(c.hardforks()[3]['block'], 0, 'should return correct hardfork data') assert.equal(typeof c.bootstrapNodes()[0].port, 'number', 'should return a port as number') assert.equal( c.consensusType(), ConsensusType.ProofOfAuthority, - 'should return correct consensus type' + 'should return correct consensus type', ) assert.equal( c.consensusAlgorithm(), ConsensusAlgorithm.Clique, - 'should return correct consensus algorithm' + 'should return correct consensus algorithm', ) assert.equal( c.consensusConfig().epoch, 30000, - 'should return correct consensus config parameters' + 'should return correct consensus config parameters', ) }) it('Should provide the bootnode information in a uniform way', () => { - const configs = ['mainnet', 'goerli'] + const configs = [Mainnet, Goerli] for (const network of configs) { const c = new Common({ chain: network }) const bootnode = c.bootstrapNodes()[0] @@ -110,18 +100,18 @@ describe('[Common/Chains]: Initialization / Chain params', () => { assert.equal( typeof bootnode.location, 'string', - 'returns the location as string (empty string if unavailable)' + 'returns the location as string (empty string if unavailable)', ) assert.equal( typeof bootnode.comment, 'string', - 'returns a comment as string (empty string if unavailable)' + 'returns a comment as string (empty string if unavailable)', ) } }) it('Should provide DNS network information in a uniform way', () => { - const configs = ['mainnet', 'goerli'] + const configs = [Mainnet, Goerli] for (const network of configs) { const c = new Common({ chain: network }) const dnsNetworks = c.dnsNetworks() @@ -131,19 +121,9 @@ describe('[Common/Chains]: Initialization / Chain params', () => { }) }) -describe('[Common]: isSupportedChainId static method', () => { - it('Should return true for supported chainId', () => { - assert.equal(isSupportedChainId(BigInt(1)), true, 'returns true') - }) - - it('Should return false for unsupported chainId', () => { - assert.equal(isSupportedChainId(BigInt(0)), false, 'returns false') - }) -}) - describe('[Common]: copy() listener tests', () => { it('Should work', () => { - const common = new Common({ chain: 'mainnet' }) + const common = new Common({ chain: Mainnet }) // Add two listeners common.events.on('hardforkChanged', () => {}) common.events.on('hardforkChanged', () => {}) @@ -151,12 +131,12 @@ describe('[Common]: copy() listener tests', () => { assert.equal( common.events.listenerCount('hardforkChanged'), 2, - 'original common instance should have two listeners' + 'original common instance should have two listeners', ) assert.equal( commonCopy.events.listenerCount('hardforkChanged'), 0, - 'copied common instance should have zero listeners' + 'copied common instance should have zero listeners', ) }) }) diff --git a/packages/common/test/customChains.spec.ts b/packages/common/test/customChains.spec.ts index 55b04a35c0..02c3cacd0f 100644 --- a/packages/common/test/customChains.spec.ts +++ b/packages/common/test/customChains.spec.ts @@ -1,218 +1,95 @@ -import { BIGINT_0 } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { Status } from '../src/hardforks.js' -import { - Chain, - Common, - ConsensusType, - CustomChain, - Hardfork, - createCustomCommon, -} from '../src/index.js' +import { Common, Hardfork, Mainnet, createCustomCommon } from '../src/index.js' import * as testnet from './data/testnet.json' -import * as testnet2 from './data/testnet2.json' -import * as testnet3 from './data/testnet3.json' - -import type { ChainConfig, HardforkTransitionConfig } from '../src/index.js' describe('[Common]: Custom chains', () => { it('chain -> object: should provide correct access to private network chain parameters', () => { const c = new Common({ chain: testnet, hardfork: Hardfork.Byzantium }) assert.equal(c.chainName(), 'testnet', 'should initialize with chain name') assert.equal(c.chainId(), BigInt(12345), 'should return correct chain Id') - assert.equal(c.networkId(), BigInt(12345), 'should return correct network Id') assert.equal(c.hardforks()[3]['block'], 3, 'should return correct hardfork data') assert.equal(c.bootstrapNodes()[1].ip, '10.0.0.2', 'should return a bootstrap node array') }) - it('chain -> object: should handle custom chain parameters with missing field', () => { - const chainParams = Object.assign({}, testnet) - delete (chainParams as any)['hardforks'] - assert.throws( - function () { - new Common({ chain: chainParams }) - }, - /Missing required/, - undefined, - 'should throw an exception on missing parameter' - ) - }) - it('custom() -> base functionality', () => { - const mainnetCommon = new Common({ chain: Chain.Mainnet }) + const mainnetCommon = new Common({ chain: Mainnet }) - const customChainParams = { name: 'custom', chainId: 123, networkId: 678 } - const customChainCommon = createCustomCommon(customChainParams, { + const customChainParams = { name: 'custom', chainId: 123 } + const customChainCommon = createCustomCommon(customChainParams, Mainnet, { hardfork: Hardfork.Byzantium, }) // From custom chain params assert.equal(customChainCommon.chainName(), customChainParams.name) assert.equal(customChainCommon.chainId(), BigInt(customChainParams.chainId)) - assert.equal(customChainCommon.networkId(), BigInt(customChainParams.networkId)) // Fallback params from mainnet - assert.equal(customChainCommon.genesis(), mainnetCommon.genesis()) - assert.equal(customChainCommon.bootstrapNodes(), mainnetCommon.bootstrapNodes()) - assert.equal(customChainCommon.hardforks(), mainnetCommon.hardforks()) + assert.deepEqual(customChainCommon.genesis(), mainnetCommon.genesis()) + assert.deepEqual(customChainCommon.bootstrapNodes(), mainnetCommon.bootstrapNodes()) + assert.deepEqual(customChainCommon.hardforks(), mainnetCommon.hardforks()) // Set only to this Common assert.equal(customChainCommon.hardfork(), 'byzantium') }) it('custom() -> behavior', () => { - let common = createCustomCommon({ chainId: 123 }) - assert.deepEqual(common.networkId(), BigInt(1), 'should default to mainnet base chain') - assert.equal(common.chainName(), 'custom-chain', 'should set default custom chain name') - - common = createCustomCommon(CustomChain.PolygonMumbai) - assert.deepEqual( - common.networkId(), - BigInt(80001), - 'supported chain -> should initialize with correct chain ID' - ) - for (const customChain of Object.values(CustomChain)) { - common = createCustomCommon(customChain) - assert.equal( - common.chainName(), - customChain, - `supported chain -> should initialize with enum name (${customChain})` - ) - } + let common = createCustomCommon({ chainId: 123 }, Mainnet) + assert.equal(common.consensusAlgorithm(), 'casper', 'should default to mainnet base chain') + assert.equal(common.chainName(), 'mainnet', 'should set default custom chain name') - common = createCustomCommon(CustomChain.PolygonMumbai) + common = createCustomCommon({ chainId: 123 }, Mainnet) assert.equal( common.hardfork(), common.DEFAULT_HARDFORK, - 'uses default hardfork when no options are present' - ) - - common = createCustomCommon(CustomChain.OptimisticEthereum, { hardfork: Hardfork.Byzantium }) - assert.equal( - common.hardfork(), - Hardfork.Byzantium, - 'should correctly set an option (default options present)' - ) - - try { - //@ts-ignore TypeScript complains, nevertheless do the test for JS behavior - createCustomCommon('this-chain-is-not-supported') - assert.fail('test should fail') - } catch (e: any) { - assert.ok( - e.message.includes('not supported'), - 'supported chain -> should throw if chain name is not supported' - ) - } - }) - - it('customChains parameter: initialization exception', () => { - try { - new Common({ chain: testnet, customChains: [testnet] as ChainConfig[] }) - assert.fail('should throw') - } catch (e: any) { - assert.ok( - e.message.includes( - 'Chain must be a string, number, or bigint when initialized with customChains passed in' - ), - 'should throw an exception on wrong initialization' - ) - } - }) - - it('customChains parameter: initialization', () => { - let c = new Common({ - chain: Chain.Mainnet, - hardfork: Hardfork.Byzantium, - customChains: [testnet] as ChainConfig[], - }) - assert.equal(c.chainName(), 'mainnet', 'customChains, chain set to supported chain') - assert.equal(c.hardforkBlock()!, BigInt(4370000), 'customChains, chain set to supported chain') - - c.setChain('testnet') - assert.equal(c.chainName(), 'testnet', 'customChains, chain switched to custom chain') - assert.equal(c.hardforkBlock()!, BigInt(4), 'customChains, chain switched to custom chain') - - c = new Common({ - chain: 'testnet', - hardfork: Hardfork.Byzantium, - customChains: [testnet] as ChainConfig[], - }) - assert.equal(c.chainName(), 'testnet', 'customChains, chain initialized with custom chain') - assert.equal(c.hardforkBlock()!, BigInt(4), 'customChains, chain initialized with custom chain') - - const customChains = [testnet, testnet2, testnet3] as ChainConfig[] - c = new Common({ - chain: 'testnet2', - hardfork: Hardfork.Istanbul, - customChains, - }) - assert.equal(c.chainName(), 'testnet2', 'customChains, chain initialized with custom chain') - assert.equal( - c.hardforkBlock()!, - BigInt(10), - 'customChains, chain initialized with custom chain' + 'uses default hardfork when no options are present', ) - const customChainParams: Partial = { - name: 'custom', - chainId: 123, - networkId: 678, - depositContractAddress: '0x4242424242424242424242424242424242424242', - } - const customChainCommon = createCustomCommon(customChainParams, { + common = createCustomCommon({ chainId: 123 }, Mainnet, { hardfork: Hardfork.Byzantium, }) - - assert.equal( - customChainCommon['_chainParams'].depositContractAddress, - customChainParams.depositContractAddress - ) - c.setChain('testnet') - assert.equal(c.chainName(), 'testnet', 'customChains, should allow to switch custom chain') assert.equal( - c.consensusType(), - ConsensusType.ProofOfWork, - 'customChains, should allow to switch custom chain' + common.hardfork(), + Hardfork.Byzantium, + 'should correctly set an option (default options present)', ) }) it('customHardforks parameter: initialization and transition tests', () => { - const c = createCustomCommon({ - customHardforks: { - testEIP2935Hardfork: { - name: 'testEIP2935Hardfork', - comment: 'Hardfork to test EIP 2935', - url: '', - status: Status.Final, - eips: [2935], + const c = createCustomCommon( + { + customHardforks: { + // Hardfork to test EIP 2935 + testEIP2935Hardfork: { + eips: [2935], + }, }, + hardforks: [ + { + name: 'chainstart', + block: 0, + }, + { + name: 'berlin', + block: null, + timestamp: 999, + }, + { + // Note: this custom hardfork name MUST be in customHardforks as field + // If this is not the case, Common will throw with a random error + // Should we throw early with a descriptive error? TODO + name: 'testEIP2935Hardfork', + block: null, + timestamp: 1000, + }, + ], }, - hardforks: [ - { - name: 'chainstart', - block: 0, - }, - { - name: 'berlin', - block: null, - timestamp: 999, - }, - { - // Note: this custom hardfork name MUST be in customHardforks as field - // If this is not the case, Common will throw with a random error - // Should we throw early with a descriptive error? TODO - name: 'testEIP2935Hardfork', - block: null, - timestamp: 1000, - }, - ], - }) - // Note: default HF of Common is currently Shanghai + Mainnet, + ) + // Note: default HF of Common is currently Cancun // Did not pass any "hardfork" param - assert.equal(c.hardfork(), Hardfork.Shanghai) + assert.equal(c.hardfork(), Hardfork.Cancun) c.setHardforkBy({ blockNumber: 0, }) @@ -232,61 +109,46 @@ describe('[Common]: Custom chains', () => { }) it('customHardforks: override params', () => { - const c = createCustomCommon({ - customHardforks: { - stop10Gas: { - name: 'stop10Gas', - comment: 'Hardfork which changes the gas of STOP from 0 to 10', - url: '', - status: Status.Final, - eips: [2935], - vm: { - stop: BigInt(10), + const c = createCustomCommon( + { + customHardforks: { + // Hardfork which changes the gas of STOP from 0 to 10 + stop10Gas: { + eips: [2935], + params: { + stop: 10, + }, }, }, + hardforks: [ + { + name: 'chainstart', + block: 0, + }, + { + name: 'stop10Gas', + block: null, + timestamp: 1000, + }, + ], }, - hardforks: [ - { - name: 'chainstart', - block: 0, - }, - { - name: 'stop10Gas', - block: null, - timestamp: 1000, - }, - ], - }) + Mainnet, + ) c.setHardfork(Hardfork.Chainstart) - assert.equal(c.param('vm', 'stop'), BIGINT_0) + assert.throws(() => { + c.param('stop') + }) c.setHardforkBy({ blockNumber: 1, timestamp: 1000, }) assert.equal(c.hardfork(), 'stop10Gas') - assert.equal(c.param('vm', 'stop'), BigInt(10)) + assert.equal(c.param('stop'), BigInt(10)) }) }) describe('custom chain setup with hardforks with undefined/null block numbers', () => { it('Should work', () => { - const undefinedHardforks = [ - { - name: 'chainstart', - block: 0, - }, - { name: 'homestead' }, - { name: 'byzantium', block: null }, - { name: 'tangerineWhistle', block: 10 }, - ] - - assert.throws( - () => createCustomCommon({ hardforks: undefinedHardforks as HardforkTransitionConfig[] }), - undefined, - undefined, - 'throws when a hardfork with an undefined block number is passed' - ) - const nullHardforks = [ { name: 'chainstart', @@ -296,7 +158,7 @@ describe('custom chain setup with hardforks with undefined/null block numbers', { name: 'tangerineWhistle', block: 10 }, ] - const common = createCustomCommon({ hardforks: nullHardforks }) + const common = createCustomCommon({ hardforks: nullHardforks }, Mainnet) common.setHardforkBy({ blockNumber: 10n }) assert.equal('tangerineWhistle', common.hardfork(), 'set correct hardfork') common.setHardforkBy({ blockNumber: 3n }) diff --git a/packages/common/test/customCrypto.spec.ts b/packages/common/test/customCrypto.spec.ts index 0783026c84..65830bd6a4 100644 --- a/packages/common/test/customCrypto.spec.ts +++ b/packages/common/test/customCrypto.spec.ts @@ -1,7 +1,7 @@ import { concatBytes, randomBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { Chain, Common, createCustomCommon } from '../src/index.js' +import { Common, Mainnet, createCustomCommon } from '../src/index.js' import type { ECDSASignature } from '@ethereumjs/util' @@ -14,7 +14,7 @@ describe('[Common]: Custom Crypto', () => { v: bigint, r: Uint8Array, s: Uint8Array, - _chainID?: bigint + _chainID?: bigint, ) => { return concatBytes(msgHash, Uint8Array.from([Number(v)]), r, s) } @@ -34,15 +34,15 @@ describe('[Common]: Custom Crypto', () => { } const value = new Uint8Array([2]) - let c = new Common({ chain: Chain.Mainnet, customCrypto }) + let c = new Common({ chain: Mainnet, customCrypto }) let msg = 'Should initialize with custom keccak256 function and use properly (main constructor)' assert.deepEqual(c.customCrypto.keccak256!(value), new Uint8Array([2, 1]), msg) msg = 'Should still work on a copied instance' assert.deepEqual(c.copy().customCrypto.keccak256!(value), new Uint8Array([2, 1]), msg) - const customChainParams = { name: 'custom', chainId: 123, networkId: 678 } - c = createCustomCommon(customChainParams, { customCrypto }) + const customChainParams = { name: 'custom', chainId: 123 } + c = createCustomCommon(customChainParams, Mainnet, { customCrypto }) msg = 'Should initialize with custom keccak256 function and use properly (custom() constructor)' assert.deepEqual(c.customCrypto.keccak256!(value), new Uint8Array([2, 1]), msg) }) @@ -51,15 +51,15 @@ describe('[Common]: Custom Crypto', () => { const customCrypto = { ecrecover: customEcrecover, } - const c = new Common({ chain: Chain.Mainnet, customCrypto }) + const c = new Common({ chain: Mainnet, customCrypto }) assert.deepEqual( Uint8Array.from([1, 2, 3, 4]), c.customCrypto.ecrecover!( Uint8Array.from([1]), BigInt(2), Uint8Array.from([3]), - Uint8Array.from([4]) - ) + Uint8Array.from([4]), + ), ) }) @@ -68,7 +68,7 @@ describe('[Common]: Custom Crypto', () => { sha256: customSha256, } const msg = Uint8Array.from([0, 1, 2, 3]) - const c = new Common({ chain: Chain.Mainnet, customCrypto }) + const c = new Common({ chain: Mainnet, customCrypto }) assert.equal(c.customCrypto.sha256!(msg)[0], 0xff, 'used custom sha256 function') }) @@ -76,7 +76,7 @@ describe('[Common]: Custom Crypto', () => { const customCrypto = { ecsign: customEcSign, } - const c = new Common({ chain: Chain.Mainnet, customCrypto }) + const c = new Common({ chain: Mainnet, customCrypto }) assert.equal(c.customCrypto.ecsign!(randomBytes(32), randomBytes(32), 0n).v, 0n) assert.equal(c.customCrypto.ecsign!(randomBytes(32), randomBytes(32)).v, 27n) }) diff --git a/packages/common/test/data/geth-genesis/debug.json b/packages/common/test/data/geth-genesis/debug.json index 8f6da427f0..94660dd079 100644 --- a/packages/common/test/data/geth-genesis/debug.json +++ b/packages/common/test/data/geth-genesis/debug.json @@ -16,7 +16,8 @@ "period": 5, "epoch": 30000 }, - "terminalTotalDifficulty": 0 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "nonce": "0x42", "timestamp": "0x0", diff --git a/packages/common/test/data/geth-genesis/eip4844.json b/packages/common/test/data/geth-genesis/eip4844.json index cd5726b939..78cae3c9f0 100644 --- a/packages/common/test/data/geth-genesis/eip4844.json +++ b/packages/common/test/data/geth-genesis/eip4844.json @@ -18,7 +18,8 @@ "blockperiodseconds": 5, "epochlength": 30000 }, - "terminalTotalDifficulty": 0 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "nonce": "0x42", "timestamp": "0x0", diff --git a/packages/common/test/data/geth-genesis/geth-genesis-kiln.json b/packages/common/test/data/geth-genesis/geth-genesis-kiln.json deleted file mode 100644 index 6d99cf2355..0000000000 --- a/packages/common/test/data/geth-genesis/geth-genesis-kiln.json +++ /dev/null @@ -1,865 +0,0 @@ -{ - "config": { - "chainId": 1337802, - "homesteadBlock": 0, - "eip150Block": 0, - "eip155Block": 0, - "eip158Block": 0, - "byzantiumBlock": 0, - "constantinopleBlock": 0, - "petersburgBlock": 0, - "istanbulBlock": 0, - "berlinBlock": 0, - "londonBlock": 0, - "mergeForkBlock": 1000, - "terminalTotalDifficulty": 20000000000000 - }, - "alloc": { - "0x0000000000000000000000000000000000000000": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000001": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000002": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000003": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000004": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000005": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000006": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000007": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000008": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000009": { - "balance": "1" - }, - "0x000000000000000000000000000000000000000a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000000b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000000c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000000d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000000e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000000f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000010": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000011": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000012": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000013": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000014": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000015": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000016": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000017": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000018": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000019": { - "balance": "1" - }, - "0x000000000000000000000000000000000000001a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000001b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000001c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000001d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000001e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000001f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000020": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000021": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000022": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000023": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000024": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000025": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000026": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000027": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000028": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000029": { - "balance": "1" - }, - "0x000000000000000000000000000000000000002a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000002b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000002c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000002d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000002e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000002f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000030": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000031": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000032": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000033": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000034": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000035": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000036": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000037": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000038": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000039": { - "balance": "1" - }, - "0x000000000000000000000000000000000000003a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000003b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000003c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000003d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000003e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000003f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000040": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000041": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000042": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000043": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000044": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000045": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000046": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000047": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000048": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000049": { - "balance": "1" - }, - "0x000000000000000000000000000000000000004a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000004b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000004c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000004d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000004e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000004f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000050": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000051": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000052": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000053": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000054": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000055": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000056": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000057": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000058": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000059": { - "balance": "1" - }, - "0x000000000000000000000000000000000000005a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000005b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000005c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000005d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000005e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000005f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000060": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000061": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000062": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000063": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000064": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000065": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000066": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000067": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000068": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000069": { - "balance": "1" - }, - "0x000000000000000000000000000000000000006a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000006b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000006c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000006d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000006e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000006f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000070": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000071": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000072": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000073": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000074": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000075": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000076": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000077": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000078": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000079": { - "balance": "1" - }, - "0x000000000000000000000000000000000000007a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000007b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000007c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000007d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000007e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000007f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000080": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000081": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000082": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000083": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000084": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000085": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000086": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000087": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000088": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000089": { - "balance": "1" - }, - "0x000000000000000000000000000000000000008a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000008b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000008c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000008d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000008e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000008f": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000090": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000091": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000092": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000093": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000094": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000095": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000096": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000097": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000098": { - "balance": "1" - }, - "0x0000000000000000000000000000000000000099": { - "balance": "1" - }, - "0x000000000000000000000000000000000000009a": { - "balance": "1" - }, - "0x000000000000000000000000000000000000009b": { - "balance": "1" - }, - "0x000000000000000000000000000000000000009c": { - "balance": "1" - }, - "0x000000000000000000000000000000000000009d": { - "balance": "1" - }, - "0x000000000000000000000000000000000000009e": { - "balance": "1" - }, - "0x000000000000000000000000000000000000009f": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a0": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a1": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a2": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a3": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a4": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a5": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a6": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a7": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a8": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000a9": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000aa": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ab": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ac": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ad": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ae": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000af": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b0": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b1": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b2": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b3": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b4": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b5": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b6": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b7": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b8": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000b9": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ba": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000bb": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000bc": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000bd": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000be": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000bf": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c0": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c1": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c2": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c3": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c4": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c5": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c6": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c7": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c8": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000c9": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ca": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000cb": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000cc": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000cd": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ce": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000cf": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d0": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d1": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d2": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d3": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d4": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d5": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d6": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d7": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d8": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000d9": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000da": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000db": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000dc": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000dd": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000de": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000df": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e0": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e1": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e2": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e3": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e4": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e5": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e6": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e7": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e8": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000e9": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ea": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000eb": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ec": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ed": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ee": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ef": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f0": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f1": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f2": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f3": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f4": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f5": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f6": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f7": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f8": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000f9": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000fa": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000fb": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000fc": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000fd": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000fe": { - "balance": "1" - }, - "0x00000000000000000000000000000000000000ff": { - "balance": "1" - }, - "0x4242424242424242424242424242424242424242": { - "balance": "0", - "code": "0x60806040526004361061003f5760003560e01c806301ffc9a71461004457806322895118146100a4578063621fd130146101ba578063c5f2892f14610244575b600080fd5b34801561005057600080fd5b506100906004803603602081101561006757600080fd5b50357fffffffff000000000000000000000000000000000000000000000000000000001661026b565b604080519115158252519081900360200190f35b6101b8600480360360808110156100ba57600080fd5b8101906020810181356401000000008111156100d557600080fd5b8201836020820111156100e757600080fd5b8035906020019184600183028401116401000000008311171561010957600080fd5b91939092909160208101903564010000000081111561012757600080fd5b82018360208201111561013957600080fd5b8035906020019184600183028401116401000000008311171561015b57600080fd5b91939092909160208101903564010000000081111561017957600080fd5b82018360208201111561018b57600080fd5b803590602001918460018302840111640100000000831117156101ad57600080fd5b919350915035610304565b005b3480156101c657600080fd5b506101cf6110b5565b6040805160208082528351818301528351919283929083019185019080838360005b838110156102095781810151838201526020016101f1565b50505050905090810190601f1680156102365780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561025057600080fd5b506102596110c7565b60408051918252519081900360200190f35b60007fffffffff0000000000000000000000000000000000000000000000000000000082167f01ffc9a70000000000000000000000000000000000000000000000000000000014806102fe57507fffffffff0000000000000000000000000000000000000000000000000000000082167f8564090700000000000000000000000000000000000000000000000000000000145b92915050565b6030861461035d576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118056026913960400191505060405180910390fd5b602084146103b6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252603681526020018061179c6036913960400191505060405180910390fd5b6060821461040f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260298152602001806118786029913960400191505060405180910390fd5b670de0b6b3a7640000341015610470576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118526026913960400191505060405180910390fd5b633b9aca003406156104cd576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260338152602001806117d26033913960400191505060405180910390fd5b633b9aca00340467ffffffffffffffff811115610535576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252602781526020018061182b6027913960400191505060405180910390fd5b6060610540826114ba565b90507f649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c589898989858a8a6105756020546114ba565b6040805160a0808252810189905290819060208201908201606083016080840160c085018e8e80828437600083820152601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690910187810386528c815260200190508c8c808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690920188810386528c5181528c51602091820193918e019250908190849084905b83811015610648578181015183820152602001610630565b50505050905090810190601f1680156106755780820380516001836020036101000a031916815260200191505b5086810383528881526020018989808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169092018881038452895181528951602091820193918b019250908190849084905b838110156106ef5781810151838201526020016106d7565b50505050905090810190601f16801561071c5780820380516001836020036101000a031916815260200191505b509d505050505050505050505050505060405180910390a1600060028a8a600060801b604051602001808484808284377fffffffffffffffffffffffffffffffff0000000000000000000000000000000090941691909301908152604080517ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0818403018152601090920190819052815191955093508392506020850191508083835b602083106107fc57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016107bf565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610859573d6000803e3d6000fd5b5050506040513d602081101561086e57600080fd5b5051905060006002806108846040848a8c6116fe565b6040516020018083838082843780830192505050925050506040516020818303038152906040526040518082805190602001908083835b602083106108f857805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016108bb565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610955573d6000803e3d6000fd5b5050506040513d602081101561096a57600080fd5b5051600261097b896040818d6116fe565b60405160009060200180848480828437919091019283525050604080518083038152602092830191829052805190945090925082918401908083835b602083106109f457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016109b7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610a51573d6000803e3d6000fd5b5050506040513d6020811015610a6657600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610ada57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610a9d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610b37573d6000803e3d6000fd5b5050506040513d6020811015610b4c57600080fd5b50516040805160208101858152929350600092600292839287928f928f92018383808284378083019250505093505050506040516020818303038152906040526040518082805190602001908083835b60208310610bd957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610b9c565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610c36573d6000803e3d6000fd5b5050506040513d6020811015610c4b57600080fd5b50516040518651600291889160009188916020918201918291908601908083835b60208310610ca957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610c6c565b6001836020036101000a0380198251168184511680821785525050505050509050018367ffffffffffffffff191667ffffffffffffffff1916815260180182815260200193505050506040516020818303038152906040526040518082805190602001908083835b60208310610d4e57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610d11565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610dab573d6000803e3d6000fd5b5050506040513d6020811015610dc057600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610e3457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610df7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610e91573d6000803e3d6000fd5b5050506040513d6020811015610ea657600080fd5b50519050858114610f02576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260548152602001806117486054913960600191505060405180910390fd5b60205463ffffffff11610f60576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260218152602001806117276021913960400191505060405180910390fd5b602080546001019081905560005b60208110156110a9578160011660011415610fa0578260008260208110610f9157fe5b0155506110ac95505050505050565b600260008260208110610faf57fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061102557805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610fe8565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015611082573d6000803e3d6000fd5b5050506040513d602081101561109757600080fd5b50519250600282049150600101610f6e565b50fe5b50505050505050565b60606110c26020546114ba565b905090565b6020546000908190815b60208110156112f05781600116600114156111e6576002600082602081106110f557fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061116b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161112e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156111c8573d6000803e3d6000fd5b5050506040513d60208110156111dd57600080fd5b505192506112e2565b600283602183602081106111f657fe5b015460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061126b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161122e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156112c8573d6000803e3d6000fd5b5050506040513d60208110156112dd57600080fd5b505192505b6002820491506001016110d1565b506002826112ff6020546114ba565b600060401b6040516020018084815260200183805190602001908083835b6020831061135a57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161131d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790527fffffffffffffffffffffffffffffffffffffffffffffffff000000000000000095909516920191825250604080518083037ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8018152601890920190819052815191955093508392850191508083835b6020831061143f57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101611402565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa15801561149c573d6000803e3d6000fd5b5050506040513d60208110156114b157600080fd5b50519250505090565b60408051600880825281830190925260609160208201818036833701905050905060c082901b8060071a60f81b826000815181106114f457fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060061a60f81b8260018151811061153757fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060051a60f81b8260028151811061157a57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060041a60f81b826003815181106115bd57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060031a60f81b8260048151811061160057fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060021a60f81b8260058151811061164357fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060011a60f81b8260068151811061168657fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060001a60f81b826007815181106116c957fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a90535050919050565b6000808585111561170d578182fd5b83861115611719578182fd5b505082019391909203915056fe4465706f736974436f6e74726163743a206d65726b6c6520747265652066756c6c4465706f736974436f6e74726163743a207265636f6e7374727563746564204465706f7369744461746120646f6573206e6f74206d6174636820737570706c696564206465706f7369745f646174615f726f6f744465706f736974436f6e74726163743a20696e76616c6964207769746864726177616c5f63726564656e7469616c73206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c7565206e6f74206d756c7469706c65206f6620677765694465706f736974436f6e74726163743a20696e76616c6964207075626b6579206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f20686967684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f206c6f774465706f736974436f6e74726163743a20696e76616c6964207369676e6174757265206c656e677468a26469706673582212201dd26f37a621703009abf16e77e69c93dc50c79db7f6cc37543e3e0e3decdc9764736f6c634300060b0033", - "storage": { - "0x0000000000000000000000000000000000000000000000000000000000000022": "0xf5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b", - "0x0000000000000000000000000000000000000000000000000000000000000023": "0xdb56114e00fdd4c1f85c892bf35ac9a89289aaecb1ebd0a96cde606a748b5d71", - "0x0000000000000000000000000000000000000000000000000000000000000024": "0xc78009fdf07fc56a11f122370658a353aaa542ed63e44c4bc15ff4cd105ab33c", - "0x0000000000000000000000000000000000000000000000000000000000000025": "0x536d98837f2dd165a55d5eeae91485954472d56f246df256bf3cae19352a123c", - "0x0000000000000000000000000000000000000000000000000000000000000026": "0x9efde052aa15429fae05bad4d0b1d7c64da64d03d7a1854a588c2cb8430c0d30", - "0x0000000000000000000000000000000000000000000000000000000000000027": "0xd88ddfeed400a8755596b21942c1497e114c302e6118290f91e6772976041fa1", - "0x0000000000000000000000000000000000000000000000000000000000000028": "0x87eb0ddba57e35f6d286673802a4af5975e22506c7cf4c64bb6be5ee11527f2c", - "0x0000000000000000000000000000000000000000000000000000000000000029": "0x26846476fd5fc54a5d43385167c95144f2643f533cc85bb9d16b782f8d7db193", - "0x000000000000000000000000000000000000000000000000000000000000002a": "0x506d86582d252405b840018792cad2bf1259f1ef5aa5f887e13cb2f0094f51e1", - "0x000000000000000000000000000000000000000000000000000000000000002b": "0xffff0ad7e659772f9534c195c815efc4014ef1e1daed4404c06385d11192e92b", - "0x000000000000000000000000000000000000000000000000000000000000002c": "0x6cf04127db05441cd833107a52be852868890e4317e6a02ab47683aa75964220", - "0x000000000000000000000000000000000000000000000000000000000000002d": "0xb7d05f875f140027ef5118a2247bbb84ce8f2f0f1123623085daf7960c329f5f", - "0x000000000000000000000000000000000000000000000000000000000000002e": "0xdf6af5f5bbdb6be9ef8aa618e4bf8073960867171e29676f8b284dea6a08a85e", - "0x000000000000000000000000000000000000000000000000000000000000002f": "0xb58d900f5e182e3c50ef74969ea16c7726c549757cc23523c369587da7293784", - "0x0000000000000000000000000000000000000000000000000000000000000030": "0xd49a7502ffcfb0340b1d7885688500ca308161a7f96b62df9d083b71fcc8f2bb", - "0x0000000000000000000000000000000000000000000000000000000000000031": "0x8fe6b1689256c0d385f42f5bbe2027a22c1996e110ba97c171d3e5948de92beb", - "0x0000000000000000000000000000000000000000000000000000000000000032": "0x8d0d63c39ebade8509e0ae3c9c3876fb5fa112be18f905ecacfecb92057603ab", - "0x0000000000000000000000000000000000000000000000000000000000000033": "0x95eec8b2e541cad4e91de38385f2e046619f54496c2382cb6cacd5b98c26f5a4", - "0x0000000000000000000000000000000000000000000000000000000000000034": "0xf893e908917775b62bff23294dbbe3a1cd8e6cc1c35b4801887b646a6f81f17f", - "0x0000000000000000000000000000000000000000000000000000000000000035": "0xcddba7b592e3133393c16194fac7431abf2f5485ed711db282183c819e08ebaa", - "0x0000000000000000000000000000000000000000000000000000000000000036": "0x8a8d7fe3af8caa085a7639a832001457dfb9128a8061142ad0335629ff23ff9c", - "0x0000000000000000000000000000000000000000000000000000000000000037": "0xfeb3c337d7a51a6fbf00b9e34c52e1c9195c969bd4e7a0bfd51d5c5bed9c1167", - "0x0000000000000000000000000000000000000000000000000000000000000038": "0xe71f0aa83cc32edfbefa9f4d3e0174ca85182eec9f3a09f6a6c0df6377a510d7", - "0x0000000000000000000000000000000000000000000000000000000000000039": "0x31206fa80a50bb6abe29085058f16212212a60eec8f049fecb92d8c8e0a84bc0", - "0x000000000000000000000000000000000000000000000000000000000000003a": "0x21352bfecbeddde993839f614c3dac0a3ee37543f9b412b16199dc158e23b544", - "0x000000000000000000000000000000000000000000000000000000000000003b": "0x619e312724bb6d7c3153ed9de791d764a366b389af13c58bf8a8d90481a46765", - "0x000000000000000000000000000000000000000000000000000000000000003c": "0x7cdd2986268250628d0c10e385c58c6191e6fbe05191bcc04f133f2cea72c1c4", - "0x000000000000000000000000000000000000000000000000000000000000003d": "0x848930bd7ba8cac54661072113fb278869e07bb8587f91392933374d017bcbe1", - "0x000000000000000000000000000000000000000000000000000000000000003e": "0x8869ff2c22b28cc10510d9853292803328be4fb0e80495e8bb8d271f5b889636", - "0x000000000000000000000000000000000000000000000000000000000000003f": "0xb5fe28e79f1b850f8658246ce9b6a1e7b49fc06db7143e8fe0b4f2b0c5523a5c", - "0x0000000000000000000000000000000000000000000000000000000000000040": "0x985e929f70af28d0bdd1a90a808f977f597c7c778c489e98d3bd8910d31ac0f7" - } - }, - "0xf97e180c050e5Ab072211Ad2C213Eb5AEE4DF134": { - "balance": "10000000000000000000000000" - }, - "0x2cA5F489CC1Fd1CEC24747B64E8dE0F4A6A850E1": { - "balance": "10000000000000000000000000" - }, - "0x7203bd333a874D9d329050ecE393820fCD501eaA": { - "balance": "10000000000000000000000000" - }, - "0xA51918aA40D78Ff8be939bf0E8404252875c6aDF": { - "balance": "10000000000000000000000000" - }, - "0xAA81078e6b2121dd7A846690DFdD6b10d7658d8B": { - "balance": "10000000000000000000000000" - }, - "0xFA2d31D8f21c1D1633E9BEB641dF77D21D63ccDd": { - "balance": "10000000000000000000000000" - }, - "0xf751C9c6d60614226fE57D2cAD6e10C856a2ddA3": { - "balance": "10000000000000000000000000" - }, - "0x9cD16887f6A808AEaa65D3c840f059EeA4ca1319": { - "balance": "10000000000000000000000000" - }, - "0x2E07043584F11BFF0AC39c927665DF6c6ebaffFB": { - "balance": "10000000000000000000000000" - }, - "0x60e771E5eCA8E26690920de669520Da210D64A9B": { - "balance": "10000000000000000000000000" - }, - "0xFC4db92C2Cf77CE02fBfd7Da0346d2CbFA66aD59": { - "balance": "10000000000000000000000000" - } - }, - "coinbase": "0x0000000000000000000000000000000000000000", - "difficulty": "0x01", - "extraData": "", - "gasLimit": "0x400000", - "nonce": "0x1234", - "mixhash": "0x0000000000000000000000000000000000000000000000000000000000000000", - "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", - "timestamp": "0" -} diff --git a/packages/common/test/data/geth-genesis/no-extra-data.json b/packages/common/test/data/geth-genesis/no-extra-data.json index b9d2f14595..73fc19d02b 100644 --- a/packages/common/test/data/geth-genesis/no-extra-data.json +++ b/packages/common/test/data/geth-genesis/no-extra-data.json @@ -16,7 +16,8 @@ "period": 5, "epoch": 30000 }, - "terminalTotalDifficulty": 0 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "nonce": "0x42", "timestamp": "16", diff --git a/packages/common/test/data/geth-genesis/post-merge.json b/packages/common/test/data/geth-genesis/post-merge.json index ffbb465e69..32f5f093d3 100644 --- a/packages/common/test/data/geth-genesis/post-merge.json +++ b/packages/common/test/data/geth-genesis/post-merge.json @@ -16,7 +16,8 @@ "period": 5, "epoch": 30000 }, - "terminalTotalDifficulty": 0 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "nonce": "0x42", "timestamp": "0x0", diff --git a/packages/common/test/data/geth-genesis/withdrawals.json b/packages/common/test/data/geth-genesis/withdrawals.json index 10060534e3..7941658039 100644 --- a/packages/common/test/data/geth-genesis/withdrawals.json +++ b/packages/common/test/data/geth-genesis/withdrawals.json @@ -17,7 +17,8 @@ "blockperiodseconds": 5, "epochlength": 30000 }, - "terminalTotalDifficulty": 0 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "nonce": "0x42", "timestamp": "0x0", diff --git a/packages/common/test/data/merge/testnetPOS.json b/packages/common/test/data/merge/testnetPOS.json deleted file mode 100644 index 549193be0b..0000000000 --- a/packages/common/test/data/merge/testnetPOS.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "name": "testnetPOS", - "chainId": 66666, - "networkId": 66666, - "defaultHardfork": "chainstart", - "consensus": { - "type": "pos", - "algorithm": "casper", - "casper": {} - }, - "comment": "Private test network (TODO: genesis block not constructed according to POS block rules yet)", - "url": "[TESTNET_URL]", - "genesis": { - "gasLimit": 1000000, - "difficulty": 1, - "nonce": "0xbb00000000000000", - "extraData": "0xcc000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - }, - "hardforks": [ - { - "name": "chainstart", - "block": 0, - "ttd": "0" - }, - { - "name": "shanghai", - "block": 5 - } - ], - "bootstrapNodes": [ - { - "ip": "10.0.0.1", - "port": 30303, - "id": "11000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "location": "", - "comment": "" - }, - { - "ip": "10.0.0.2", - "port": 30303, - "id": "22000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "location": "", - "comment": "" - } - ] -} diff --git a/packages/common/test/data/paramsTest.ts b/packages/common/test/data/paramsTest.ts new file mode 100644 index 0000000000..367c2e3c78 --- /dev/null +++ b/packages/common/test/data/paramsTest.ts @@ -0,0 +1,50 @@ +import type { ParamsDict } from '@ethereumjs/common' + +export const paramsTest: ParamsDict = { + /** + * Frontier/Chainstart + */ + 1: { + // pow + minerReward: '5000000000000000000', // the amount a miner get rewarded for mining a block + }, + /** + * Byzantium HF Meta EIP + */ + 609: { + // gasPrices + ecAddGas: 500, // Gas costs for curve addition precompile + // pow + minerReward: '3000000000000000000', // the amount a miner get rewarded for mining a block + }, + /** +. * Constantinople HF Meta EIP +. */ + 1013: { + // gasPrices + netSstoreNoopGas: 200, // Once per SSTORE operation if the value doesn't change + // pow + minerReward: '2000000000000000000', // The amount a miner gets rewarded for mining a block + }, + /** +. * Petersburg HF Meta EIP +. */ + 1716: { + // gasPrices + netSstoreNoopGas: null, // Removed along EIP-1283 + }, + /** + * Istanbul HF Meta EIP + */ + 1679: { + // gasPrices + ecAddGas: 150, // Gas costs for curve addition precompile + }, + /** + * BLS12-381 precompiles + */ + 2537: { + // gasPrices + Bls12381G1AddGas: 500, // Gas cost of a single BLS12-381 G1 addition precompile-call + }, +} diff --git a/packages/common/test/data/post-merge-hardfork.json b/packages/common/test/data/post-merge-hardfork.json index 9f22e06d1e..71582b9b7d 100644 --- a/packages/common/test/data/post-merge-hardfork.json +++ b/packages/common/test/data/post-merge-hardfork.json @@ -18,7 +18,7 @@ "period": 5, "epoch": 30000 }, - "terminalTotalDifficulty": 2, + "terminalTotalDifficulty": 0, "terminalTotalDifficultyPassed": true }, "nonce": "0x42", diff --git a/packages/common/test/data/shanghai-time.json b/packages/common/test/data/shanghai-time.json index e9ef0adc05..796ede98f5 100644 --- a/packages/common/test/data/shanghai-time.json +++ b/packages/common/test/data/shanghai-time.json @@ -15,7 +15,8 @@ "arrowGlacierBlock": 0, "grayGlacierBlock": 0, "shanghaiTime": 1668699476, - "terminalTotalDifficulty": 9 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "alloc": { "0x0000000000000000000000000000000000000000": { diff --git a/packages/common/test/data/testnet.json b/packages/common/test/data/testnet.json index 0c7531e072..88e4a72ab5 100644 --- a/packages/common/test/data/testnet.json +++ b/packages/common/test/data/testnet.json @@ -1,7 +1,6 @@ { "name": "testnet", "chainId": 12345, - "networkId": 12345, "defaultHardfork": "byzantium", "consensus": { "type": "pow", diff --git a/packages/common/test/data/testnet2.json b/packages/common/test/data/testnet2.json index ddbb57107a..a76bc09f7c 100644 --- a/packages/common/test/data/testnet2.json +++ b/packages/common/test/data/testnet2.json @@ -1,7 +1,6 @@ { "name": "testnet2", "chainId": 22222, - "networkId": 22222, "defaultHardfork": "istanbul", "consensus": { "type": "poa", diff --git a/packages/common/test/data/testnet3.json b/packages/common/test/data/testnet3.json index c8b59ac9f2..7a10962950 100644 --- a/packages/common/test/data/testnet3.json +++ b/packages/common/test/data/testnet3.json @@ -1,7 +1,6 @@ { "name": "testnet3", "chainId": 33333, - "networkId": 33333, "defaultHardfork": "istanbul", "consensus": { "type": "poa", diff --git a/packages/common/test/data/withdrawals-devnet.json b/packages/common/test/data/withdrawals-devnet.json index e9ef0adc05..796ede98f5 100644 --- a/packages/common/test/data/withdrawals-devnet.json +++ b/packages/common/test/data/withdrawals-devnet.json @@ -15,7 +15,8 @@ "arrowGlacierBlock": 0, "grayGlacierBlock": 0, "shanghaiTime": 1668699476, - "terminalTotalDifficulty": 9 + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true }, "alloc": { "0x0000000000000000000000000000000000000000": { diff --git a/packages/common/test/eips.spec.ts b/packages/common/test/eips.spec.ts index c40e50f620..161a1051d7 100644 --- a/packages/common/test/eips.spec.ts +++ b/packages/common/test/eips.spec.ts @@ -1,16 +1,16 @@ import { assert, describe, it } from 'vitest' -import { Chain, Common, Hardfork } from '../src/index.js' +import { Common, Hardfork, Mainnet } from '../src/index.js' describe('[Common/EIPs]: Initialization / Chain params', () => { it('Correct initialization', () => { let eips = [2537, 2929] - const c = new Common({ chain: Chain.Mainnet, eips }) + const c = new Common({ chain: Mainnet, eips }) assert.equal(c.eips(), eips, 'should initialize with supported EIP') eips = [2718, 2929, 2930] let f = () => { - new Common({ chain: Chain.Mainnet, eips, hardfork: Hardfork.Istanbul }) + new Common({ chain: Mainnet, eips, hardfork: Hardfork.Istanbul }) } assert.doesNotThrow(f, 'Should not throw when initializing with a consistent EIP list') @@ -18,7 +18,7 @@ describe('[Common/EIPs]: Initialization / Chain params', () => { const msg = 'should throw when initializing with an EIP with required EIPs not being activated along' f = () => { - new Common({ chain: Chain.Mainnet, eips, hardfork: Hardfork.Istanbul }) + new Common({ chain: Mainnet, eips, hardfork: Hardfork.Istanbul }) } assert.throws(f, undefined, undefined, msg) }) @@ -28,7 +28,7 @@ describe('[Common/EIPs]: Initialization / Chain params', () => { const eips = [UNSUPPORTED_EIP] const msg = 'should throw on an unsupported EIP' const f = () => { - new Common({ chain: Chain.Mainnet, eips }) + new Common({ chain: Mainnet, eips }) } assert.throws(f, /not supported$/, undefined, msg) @@ -39,14 +39,14 @@ describe('[Common/EIPs]: Initialization / Chain params', () => { eips = [ 2537, ] msg = 'should throw on not meeting minimum hardfork requirements' f = () => { - new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Byzantium, eips }) + new Common({ chain: Mainnet, hardfork: Hardfork.Byzantium, eips }) } assert.throws(f, /minimumHardfork/, undefined, msg) */ }) it('eipBlock', () => { - const c = new Common({ chain: Chain.Mainnet }) + const c = new Common({ chain: Mainnet }) let msg = 'should return correct value' assert.ok(c.eipBlock(1559)! === 12965000n, msg) @@ -56,7 +56,7 @@ describe('[Common/EIPs]: Initialization / Chain params', () => { }) it('eipTimestamp', () => { - const c = new Common({ chain: Chain.Mainnet }) + const c = new Common({ chain: Mainnet }) let msg = 'should return null for unscheduled eip by timestamp' assert.ok(c.eipTimestamp(1559) === null, msg) diff --git a/packages/common/test/hardforks.spec.ts b/packages/common/test/hardforks.spec.ts index 9375d3a2b2..3e37ec5e3c 100644 --- a/packages/common/test/hardforks.spec.ts +++ b/packages/common/test/hardforks.spec.ts @@ -2,16 +2,19 @@ import { hexToBytes, zeros } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' import { - Chain, Common, ConsensusAlgorithm, ConsensusType, + Goerli, Hardfork, + Holesky, + Mainnet, + Sepolia, createCommonFromGethGenesis, createCustomCommon, } from '../src/index.js' -import * as gethGenesisKilnJSON from './data/geth-genesis/geth-genesis-kiln.json' +import type { ChainConfig } from '../src/index.js' describe('[Common]: Hardfork logic', () => { it('Hardfork access', () => { @@ -35,13 +38,13 @@ describe('[Common]: Hardfork logic', () => { let c for (const hardfork of supportedHardforks) { - c = new Common({ chain: Chain.Mainnet, hardfork }) + c = new Common({ chain: Mainnet, hardfork }) assert.equal(c.hardfork(), hardfork, hardfork) } }) it('getHardforkBy() / setHardforkBy()', () => { - const c = new Common({ chain: Chain.Mainnet }) + const c = new Common({ chain: Mainnet }) let msg = 'should get HF correctly' assert.equal(c.getHardforkBy({ blockNumber: 0n }), Hardfork.Chainstart, msg) @@ -85,7 +88,7 @@ describe('[Common]: Hardfork logic', () => { }, ] - const c = createCustomCommon({ hardforks }, { baseChain: Chain.Sepolia }) + const c = createCustomCommon({ hardforks }, Sepolia) const f = () => { c.getHardforkBy({ blockNumber: 0n }) } @@ -96,7 +99,7 @@ describe('[Common]: Hardfork logic', () => { }) it('setHardfork(): hardforkChanged event', () => { - const c = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const c = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) c.events.on('hardforkChanged', (hardfork: string) => { assert.equal(hardfork, Hardfork.Byzantium, 'should send correct hardforkChanged event') }) @@ -104,24 +107,24 @@ describe('[Common]: Hardfork logic', () => { }) it('hardforkBlock()', () => { - let c = new Common({ chain: Chain.Mainnet }) + let c = new Common({ chain: Mainnet }) let msg = 'should return the correct HF change block for byzantium (provided)' assert.equal(c.hardforkBlock(Hardfork.Byzantium)!, BigInt(4370000), msg) msg = 'should return null if HF does not exist on chain' assert.equal(c.hardforkBlock('thisHardforkDoesNotExist'), null, msg) - c = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Byzantium }) + c = new Common({ chain: Mainnet, hardfork: Hardfork.Byzantium }) msg = 'should return the correct HF change block for byzantium (set)' assert.equal(c.hardforkBlock()!, BigInt(4370000), msg) - c = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + c = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) msg = 'should return the correct HF change block for istanbul (set)' assert.equal(c.hardforkBlock()!, BigInt(9069000), msg) }) it('nextHardforkBlockOrTimestamp()', () => { - const c = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) + const c = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) let msg = 'should work with HF set / return correct next HF block for chainstart (mainnet: chainstart -> homestead)' assert.equal(c.nextHardforkBlockOrTimestamp()!, BigInt(1150000), msg) @@ -132,7 +135,7 @@ describe('[Common]: Hardfork logic', () => { msg = 'should return null if next HF is not available (mainnet: cancun -> prague)' assert.equal(c.nextHardforkBlockOrTimestamp(Hardfork.Cancun), null, msg) - const c2 = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Chainstart }) + const c2 = new Common({ chain: Goerli, hardfork: Hardfork.Chainstart }) msg = 'should return null if next HF is not available (goerli: cancun -> prague)' assert.equal(c2.nextHardforkBlockOrTimestamp(Hardfork.Cancun), null, msg) @@ -143,7 +146,7 @@ describe('[Common]: Hardfork logic', () => { }) it('hardforkIsActiveOnBlock() / activeOnBlock()', () => { - let c = new Common({ chain: Chain.Mainnet }) + let c = new Common({ chain: Mainnet }) let msg = 'Mainnet, byzantium (provided), 4370000 -> true' assert.equal(c.hardforkIsActiveOnBlock(Hardfork.Byzantium, 4370000), true, msg) @@ -153,7 +156,7 @@ describe('[Common]: Hardfork logic', () => { msg = 'Mainnet, byzantium (provided), 4369999 -> false' assert.equal(c.hardforkIsActiveOnBlock(Hardfork.Byzantium, 4369999), false, msg) - c = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Byzantium }) + c = new Common({ chain: Mainnet, hardfork: Hardfork.Byzantium }) msg = 'Mainnet, byzantium (set), 4370000 -> true' assert.equal(c.hardforkIsActiveOnBlock(null, 4370000), true, msg) @@ -168,7 +171,7 @@ describe('[Common]: Hardfork logic', () => { }) it('hardforkBlock()', () => { - const c = new Common({ chain: Chain.Mainnet }) + const c = new Common({ chain: Mainnet }) let msg = 'should return correct value' assert.equal(c.hardforkBlock(Hardfork.Berlin)!, BigInt(12244000), msg) @@ -183,7 +186,7 @@ describe('[Common]: Hardfork logic', () => { }) it('hardforkGteHardfork()', () => { - let c = new Common({ chain: Chain.Mainnet }) + let c = new Common({ chain: Mainnet }) let msg = 'Mainnet, constantinople >= byzantium (provided) -> true' assert.equal(c.hardforkGteHardfork(Hardfork.Constantinople, Hardfork.Byzantium), true, msg) @@ -196,7 +199,7 @@ describe('[Common]: Hardfork logic', () => { msg = 'Mainnet, spuriousDragon >= byzantium (provided) -> false' assert.equal(c.hardforkGteHardfork(Hardfork.SpuriousDragon, Hardfork.Byzantium), false, msg) - c = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Byzantium }) + c = new Common({ chain: Mainnet, hardfork: Hardfork.Byzantium }) msg = 'Mainnet, byzantium (set) >= spuriousDragon -> true' assert.equal(c.hardforkGteHardfork(null, Hardfork.SpuriousDragon), true, msg) @@ -211,26 +214,14 @@ describe('[Common]: Hardfork logic', () => { }) it('_calcForkHash()', () => { - const chains: [Chain, Uint8Array][] = [ - [ - Chain.Mainnet, - hexToBytes('0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3'), - ], - [ - Chain.Goerli, - hexToBytes('0xbf7e331f7f7c1dd2e05159666b3bf8bc7a8a3a9eb1d518969eab529dd9b88c1a'), - ], - [ - Chain.Sepolia, - hexToBytes('0x25a5cc106eea7138acab33231d7160d69cb777ee0c2c553fcddf5138993e6dd9'), - ], - [ - Chain.Holesky, - hexToBytes('0xb5f7f912443c940f21fd611f12828d75b534364ed9e95ca4e307729a4661bde4'), - ], + const chains: [ChainConfig, Uint8Array][] = [ + [Mainnet, hexToBytes('0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3')], + [Goerli, hexToBytes('0xbf7e331f7f7c1dd2e05159666b3bf8bc7a8a3a9eb1d518969eab529dd9b88c1a')], + [Sepolia, hexToBytes('0x25a5cc106eea7138acab33231d7160d69cb777ee0c2c553fcddf5138993e6dd9')], + [Holesky, hexToBytes('0xb5f7f912443c940f21fd611f12828d75b534364ed9e95ca4e307729a4661bde4')], ] - let c = new Common({ chain: Chain.Mainnet }) + let c = new Common({ chain: Mainnet }) const mainnetGenesisHash = chains[0][1] let msg = 'should calc correctly for chainstart (only genesis)' assert.equal(c['_calcForkHash'](Hardfork.Chainstart, mainnetGenesisHash), '0xfc64ec04', msg) @@ -241,30 +232,30 @@ describe('[Common]: Hardfork logic', () => { msg = 'should calc correctly for in-between applied HF' assert.equal(c['_calcForkHash'](Hardfork.Byzantium, mainnetGenesisHash), '0xa00bc324', msg) - for (const [chain, genesisHash] of chains) { - c = new Common({ chain }) + for (const chain of chains) { + c = new Common({ chain: chain[0] }) for (const hf of c.hardforks()) { if (typeof hf.forkHash === 'string') { - const msg = `Verify forkHash calculation for: ${Chain[chain]} -> ${hf.name}` - assert.equal(c['_calcForkHash'](hf.name, genesisHash), hf.forkHash, msg) + const msg = `Verify forkHash calculation for: ${chain[0].name} -> ${hf.name}` + assert.equal(c['_calcForkHash'](hf.name, chain[1]), hf.forkHash, msg) } } } }) it('forkHash()', () => { - let c = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Byzantium }) + let c = new Common({ chain: Mainnet, hardfork: Hardfork.Byzantium }) let msg = 'should provide correct forkHash for HF set' assert.equal(c.forkHash(), '0xa00bc324', msg) msg = 'should provide correct forkHash for HF provided' assert.equal(c.forkHash(Hardfork.SpuriousDragon), '0x3edd5b10', msg) const genesisHash = hexToBytes( - '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ) assert.equal(c.forkHash(Hardfork.SpuriousDragon, genesisHash), '0x3edd5b10', msg) - c = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai }) + c = new Common({ chain: Mainnet, hardfork: Hardfork.Shanghai }) // unschedule shanghai on it to test c.hardforks() .filter((hf) => hf.name === Hardfork.Shanghai) @@ -302,8 +293,8 @@ describe('[Common]: Hardfork logic', () => { istanbulBlock: 0, muirGlacierBlock: 0, berlinBlock: 0, - yolov2Block: 0, - yolov3Block: 0, + yolov2Block: 0, // cspell:disable-line + yolov3Block: 0, // cspell:disable-line londonBlock: 0, mergeForkBlock: 0, terminalTotalDifficulty: 0, @@ -327,7 +318,7 @@ describe('[Common]: Hardfork logic', () => { const zeroCommonShanghaiFork = zeroCommon.forkHash(Hardfork.Shanghai, genesisHash) const zeroCommonCancunFork = zeroCommon.forkHash(Hardfork.Shanghai, genesisHash) - // Ensure that Shangai fork + Cancun fork have equal forkhash + // Ensure that Shanghai fork + Cancun fork have equal forkhash assert.equal(zeroCommonShanghaiFork, zeroCommonCancunFork) // Set the cancun time to the genesis block time (this should not change the forkHash) @@ -342,7 +333,7 @@ describe('[Common]: Hardfork logic', () => { }) it('hardforkForForkHash()', () => { - const c = new Common({ chain: Chain.Mainnet }) + const c = new Common({ chain: Mainnet }) let msg = 'should return the correct HF array for a matching forkHash' const res = c.hardforkForForkHash('0x3edd5b10')! @@ -353,110 +344,72 @@ describe('[Common]: Hardfork logic', () => { }) it('HF consensus updates', () => { - let c = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Byzantium }) + let c = new Common({ chain: Goerli, hardfork: Hardfork.Byzantium }) assert.equal( c.consensusType(), ConsensusType.ProofOfAuthority, - 'should provide the correct initial chain consensus type' + 'should provide the correct initial chain consensus type', ) assert.equal( c.consensusAlgorithm(), ConsensusAlgorithm.Clique, - 'should provide the correct initial chain consensus algorithm' + 'should provide the correct initial chain consensus algorithm', ) assert.equal( c.consensusConfig()['period'], 15, - 'should provide the correct initial chain consensus configuration' + 'should provide the correct initial chain consensus configuration', ) - c = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Paris }) + c = new Common({ chain: Goerli, hardfork: Hardfork.Paris }) assert.equal( c.consensusType(), ConsensusType.ProofOfStake, - 'should provide the correct updated chain consensus type' + 'should provide the correct updated chain consensus type', ) assert.equal( c.consensusAlgorithm(), ConsensusAlgorithm.Casper, - 'should provide the correct updated chain consensus algorithm' + 'should provide the correct updated chain consensus algorithm', ) assert.deepEqual( c.consensusConfig(), {}, - 'should provide the correct updated chain consensus configuration' + 'should provide the correct updated chain consensus configuration', ) }) it('Should correctly apply hardfork changes', () => { // For sepolia MergeForkIdTransition happens AFTER merge - let c = new Common({ chain: Chain.Sepolia, hardfork: Hardfork.London }) + const c = new Common({ chain: Sepolia, hardfork: Hardfork.London }) assert.equal( c['HARDFORK_CHANGES'][11][0], Hardfork.Paris, - 'should correctly apply hardfork changes' + 'should correctly apply hardfork changes', ) assert.equal( c['HARDFORK_CHANGES'][12][0], Hardfork.MergeForkIdTransition, - 'should correctly apply hardfork changes' + 'should correctly apply hardfork changes', ) // Should give correct ConsensusType pre and post merge assert.equal( c.consensusType(), ConsensusType.ProofOfWork, - 'should provide the correct initial chain consensus type' + 'should provide the correct initial chain consensus type', ) c.setHardfork(Hardfork.Paris) assert.equal( c.consensusType(), ConsensusType.ProofOfStake, - `should switch to ProofOfStake consensus on merge` + `should switch to ProofOfStake consensus on merge`, ) c.setHardfork(Hardfork.MergeForkIdTransition) assert.equal( c.consensusType(), ConsensusType.ProofOfStake, - `should stay on ProofOfStake consensus post merge` - ) - - // For kiln MergeForkIdTransition happens BEFORE Merge - c = createCommonFromGethGenesis(gethGenesisKilnJSON, { - chain: 'kiln', - mergeForkIdPostMerge: false, - }) - - // MergeForkIdTransition change should be before Merge - assert.equal( - c['HARDFORK_CHANGES'][10][0], - Hardfork.MergeForkIdTransition, - 'should correctly apply hardfork changes' - ) - assert.equal( - c['HARDFORK_CHANGES'][11][0], - Hardfork.Paris, - 'should correctly apply hardfork changes' - ) - - // Should give correct ConsensusType pre and post merge - c.setHardfork(Hardfork.London) - assert.equal( - c.consensusType(), - ConsensusType.ProofOfWork, - 'should provide the correct initial chain consensus type' - ) - c.setHardfork(Hardfork.Paris) - assert.equal( - c.consensusType(), - ConsensusType.ProofOfStake, - `should switch to ProofOfStake consensus on merge` - ) - c.setHardfork(Hardfork.MergeForkIdTransition) - assert.equal( - c.consensusType(), - ConsensusType.ProofOfWork, - `should give pow consensus as MergeForkIdTransition is pre-merge` + `should stay on ProofOfStake consensus post merge`, ) }) }) diff --git a/packages/common/test/mergePOS.spec.ts b/packages/common/test/mergePOS.spec.ts deleted file mode 100644 index f5dbe3a826..0000000000 --- a/packages/common/test/mergePOS.spec.ts +++ /dev/null @@ -1,317 +0,0 @@ -import { assert, describe, it } from 'vitest' - -import { Chain, Common, Hardfork, createCommonFromGethGenesis } from '../src/index.js' - -import * as postMergeJSON from './data/geth-genesis/post-merge.json' -import * as testnetMerge from './data/merge/testnetMerge.json' -import * as testnetPOS from './data/merge/testnetPOS.json' - -import type { ChainConfig } from '../src/index.js' - -describe('[Common]: Merge/POS specific logic', () => { - it('hardforkTTD()', () => { - const customChains = [testnetMerge] as ChainConfig[] - const c = new Common({ chain: 'testnetMerge', hardfork: Hardfork.Istanbul, customChains }) - assert.equal(c.hardforkTTD(Hardfork.Paris), BigInt(5000), 'should get the HF total difficulty') - assert.equal( - c.hardforkTTD('thisHardforkDoesNotExist'), - null, - 'should return null if HF does not exist on chain' - ) - }) - - it('getHardforkBy(), merge block null, with total difficulty', () => { - const customChains = [testnetMerge] as ChainConfig[] - const c = new Common({ chain: 'testnetMerge', hardfork: Hardfork.Istanbul, customChains }) - - let msg = 'block number < last HF block number set, without TD set' - assert.equal(c.getHardforkBy({ blockNumber: 0n }), 'chainstart', msg) - msg = 'block number > last HF block number set, without TD set' - assert.equal(c.getHardforkBy({ blockNumber: 14n }), 'london', msg) - msg = 'block number > last HF block number set, TD set and equal' - assert.equal(c.getHardforkBy({ blockNumber: 15n, td: 5000n }), 'paris', msg) - msg = 'block number > last HF block number set, TD set and higher' - assert.equal(c.getHardforkBy({ blockNumber: 15n, td: 5001n }), 'paris', msg) - msg = 'block number > last HF block number set, TD set and smaller' - assert.equal(c.getHardforkBy({ blockNumber: 15n, td: 4999n }), 'london', msg) - msg = 'block number < last HF block number set, TD set and smaller' - assert.equal(c.getHardforkBy({ blockNumber: 12n, td: 4999n }), 'berlin', msg) - }) - - it('getHardforkBy(), merge block set, with total difficulty', () => { - const testnetMergeWithBlockNumber = JSON.parse(JSON.stringify(testnetMerge)) - // Set Merge block to 15 - testnetMergeWithBlockNumber['hardforks'][8]['block'] = 16 - const customChains = [testnetMergeWithBlockNumber] - const c = new Common({ chain: 'testnetMerge', hardfork: Hardfork.Istanbul, customChains }) - - let msg = 'block number < last HF block number set, without TD set' - assert.equal(c.getHardforkBy({ blockNumber: 0n }), 'chainstart', msg) - msg = 'block number > last HF block number set, without TD set' - assert.equal(c.getHardforkBy({ blockNumber: 16n }), 'paris', msg) - msg = 'block number > last HF block number set, TD set and equal' - assert.equal(c.getHardforkBy({ blockNumber: 16n, td: 5000n }), 'paris', msg) - msg = 'block number > last HF block number set, TD set and higher' - assert.equal(c.getHardforkBy({ blockNumber: 16n, td: 5001n }), 'paris', msg) - msg = 'block number < last HF block number set, TD set and smaller' - assert.equal(c.getHardforkBy({ blockNumber: 12n, td: 4999n }), 'berlin', msg) - - try { - c.getHardforkBy({ blockNumber: 16n, td: 4999n }) - } catch (e: any) { - msg = 'block number > last HF block number set, TD set and smaller (should throw)' - const eMsg = 'Maximum HF determined by total difficulty is lower than the block number HF' - assert.ok(e.message.includes(eMsg), msg) - } - try { - c.getHardforkBy({ blockNumber: 14n, td: 5000n }) - } catch (e: any) { - msg = 'block number < last HF block number set, TD set and higher (should throw)' - const eMsg = 'HF determined by block number is lower than the minimum total difficulty HF' - assert.ok(e.message.includes(eMsg), msg) - } - }) - - it('getHardforkBy(), merge block set + subsequent HF, with total difficulty', () => { - const testnetMergeWithBlockNumber = JSON.parse(JSON.stringify(testnetMerge)) - // Set Merge block to 15 - testnetMergeWithBlockNumber['hardforks'][8]['block'] = 16 - // Set Shanghai block to 18 - testnetMergeWithBlockNumber['hardforks'][9]['block'] = 18 - const customChains = [testnetMergeWithBlockNumber] as ChainConfig[] - const c = new Common({ chain: 'testnetMerge', hardfork: Hardfork.Istanbul, customChains }) - - const msg = 'block number > last HF block number set, TD set and higher' - assert.equal(c.getHardforkBy({ blockNumber: 18n, td: 5001n }), 'shanghai', msg) - }) - - it('setHardforkBy(), merge block null, with total difficulty', () => { - const customChains = [testnetMerge] as ChainConfig[] - const c = new Common({ chain: 'testnetMerge', hardfork: Hardfork.Istanbul, customChains }) - - let msg = 'block number < last HF block number set, without TD set' - assert.equal(c.setHardforkBy({ blockNumber: 0n }), 'chainstart', msg) - msg = 'block number > last HF block number set, without TD set' - assert.equal(c.setHardforkBy({ blockNumber: 14n }), 'london', msg) - msg = 'block number > last HF block number set, TD set and equal' - assert.equal(c.setHardforkBy({ blockNumber: 15n, td: 5000n }), 'paris', msg) - msg = 'block number > last HF block number set, TD set and higher' - assert.equal(c.setHardforkBy({ blockNumber: 15n, td: 5001n }), 'paris', msg) - msg = 'block number > last HF block number set, TD set and smaller' - assert.equal(c.setHardforkBy({ blockNumber: 15n, td: 4999n }), 'london', msg) - msg = 'block number < last HF block number set, TD set and smaller' - assert.equal(c.setHardforkBy({ blockNumber: 12n, td: 4999n }), 'berlin', msg) - }) - - it('setHardforkBy(), merge block set, with total difficulty', () => { - const testnetMergeWithBlockNumber = JSON.parse(JSON.stringify(testnetMerge)) - // Set Merge block to 15 - testnetMergeWithBlockNumber['hardforks'][8]['block'] = 16 - const customChains = [testnetMergeWithBlockNumber] - const c = new Common({ chain: 'testnetMerge', hardfork: Hardfork.Istanbul, customChains }) - - let msg = 'block number < last HF block number set, without TD set' - assert.equal(c.setHardforkBy({ blockNumber: 0n }), 'chainstart', msg) - msg = 'block number > last HF block number set, without TD set' - assert.equal(c.setHardforkBy({ blockNumber: 16n }), 'paris', msg) - msg = 'block number > last HF block number set, TD set and equal' - assert.equal(c.setHardforkBy({ blockNumber: 16n, td: 5000n }), 'paris', msg) - msg = 'block number > last HF block number set, TD set and higher' - assert.equal(c.setHardforkBy({ blockNumber: 16n, td: 5001n }), 'paris', msg) - msg = 'block number < last HF block number set, TD set and smaller' - assert.equal(c.setHardforkBy({ blockNumber: 12n, td: 4999n }), 'berlin', msg) - - try { - c.setHardforkBy({ blockNumber: 16n, td: 4999n }) - assert.fail(`should have thrown td < ttd validation error`) - } catch (e: any) { - msg = 'block number > last HF block number set, TD set and smaller (should throw)' - const eMsg = 'Maximum HF determined by total difficulty is lower than the block number HF' - assert.ok(e.message.includes(eMsg), msg) - } - try { - c.setHardforkBy({ blockNumber: 14n, td: 5001n }) - assert.fail(`should have thrown td > ttd validation error`) - } catch (e: any) { - msg = 'block number < last HF block number set, TD set and higher (should throw)' - const eMsg = 'HF determined by block number is lower than the minimum total difficulty HF' - assert.ok(e.message.includes(eMsg), msg) - } - }) - - it('setHardforkBy(), merge block set + subsequent HF, with total difficulty', () => { - const testnetMergeWithBlockNumber = JSON.parse(JSON.stringify(testnetMerge)) - // Set Merge block to 15 - testnetMergeWithBlockNumber['hardforks'][8]['block'] = 16 - // Set Shanghai block to 18 - testnetMergeWithBlockNumber['hardforks'][9]['block'] = 18 - const customChains = [testnetMergeWithBlockNumber] - const c = new Common({ chain: 'testnetMerge', hardfork: Hardfork.Istanbul, customChains }) - - const msg = 'block number > last HF block number set, TD set and higher' - assert.equal(c.setHardforkBy({ blockNumber: 18n, td: 5001n }), 'shanghai', msg) - }) - - it('Pure POS testnet', () => { - const customChains = [testnetPOS] as ChainConfig[] - const c = new Common({ chain: 'testnetPOS', hardfork: Hardfork.Chainstart, customChains }) - - assert.equal( - c.hardforkTTD(Hardfork.Chainstart), - BigInt(0), - 'should get the HF total difficulty' - ) - - const msg = 'block number > last HF block number set, TD set (0) and equal' - assert.equal(c.getHardforkBy({ blockNumber: 5n, td: 0n }), 'shanghai', msg) - }) - - it('Should fail setting invalid hardfork', () => { - const customChains = [testnetPOS] as ChainConfig[] - const f = () => { - new Common({ chain: 'testnetPOS', hardfork: Hardfork.Istanbul, customChains }) - } - assert.throws(f, undefined, undefined, `failed setting absent hardfork instanbul`) - }) - - it('should get the correct merge hardfork at genesis', async () => { - const c = createCommonFromGethGenesis(postMergeJSON, { chain: 'post-merge' }) - const msg = 'should get HF correctly' - assert.equal(c.getHardforkBy({ blockNumber: 0n }), Hardfork.London, msg) - assert.equal(c.getHardforkBy({ blockNumber: 0n, td: 0n }), Hardfork.Paris, msg) - }) - - it('test post merge hardforks using Sepolia with block null', () => { - const c = new Common({ chain: Chain.Sepolia }) - let msg = 'should get HF correctly' - - assert.equal(c.getHardforkBy({ blockNumber: 0n }), Hardfork.London, msg) - // Make it null manually as config could be updated later - const mergeHf = c.hardforks().filter((hf) => hf.ttd !== undefined && hf.ttd !== null)[0] - const prevMergeBlockVal = mergeHf.block - mergeHf.block = null - - // should get Hardfork.London even though happened with 1450408 as terminal as config doesn't have that info - assert.equal(c.getHardforkBy({ blockNumber: 1450409n }), Hardfork.London, msg) - // however with correct td in input it should select merge - assert.equal( - c.getHardforkBy({ blockNumber: 1450409n, td: 17000000000000000n }), - Hardfork.Paris, - msg - ) - // should select MergeForkIdTransition even without td specified as the block is set for this hardfork - assert.equal(c.getHardforkBy({ blockNumber: 1735371n }), Hardfork.MergeForkIdTransition, msg) - // also with td specified - assert.equal( - c.getHardforkBy({ blockNumber: 1735371n, td: 17000000000000000n }), - Hardfork.MergeForkIdTransition, - msg - ) - - // Check nextHardforkBlockOrTimestamp should be MergeForkIdTransition's block on london and merge both - assert.equal( - c.nextHardforkBlockOrTimestamp(Hardfork.Berlin), - 1735371n, - `should get nextHardforkBlockOrTimestamp correctly` - ) - assert.equal( - c.nextHardforkBlockOrTimestamp(Hardfork.London), - 1735371n, - `should get nextHardforkBlockOrTimestamp correctly` - ) - assert.equal( - c.nextHardforkBlockOrTimestamp(Hardfork.Paris), - 1735371n, - `should get nextHardforkBlockOrTimestamp correctly` - ) - - let f = () => { - c.getHardforkBy({ blockNumber: 1735371n, td: 15000000000000000n }) - } - assert.throws( - f, - undefined, - undefined, - 'throws error as specified td < merge ttd for a post merge hardfork' - ) - - msg = 'should set HF correctly' - - assert.equal(c.setHardforkBy({ blockNumber: 0n }), Hardfork.London, msg) - assert.equal(c.setHardforkBy({ blockNumber: 1450409n }), Hardfork.London, msg) - assert.equal( - c.setHardforkBy({ blockNumber: 1450409n, td: 17000000000000000n }), - Hardfork.Paris, - msg - ) - assert.equal(c.setHardforkBy({ blockNumber: 1735371n }), Hardfork.MergeForkIdTransition, msg) - assert.equal( - c.setHardforkBy({ blockNumber: 1735371n, td: 17000000000000000n }), - Hardfork.MergeForkIdTransition, - msg - ) - f = () => { - c.setHardforkBy({ blockNumber: 1735371n, td: 15000000000000000n }) - } - assert.throws( - f, - undefined, - undefined, - 'throws error as specified td < merge ttd for a post merge hardfork' - ) - - // restore value - mergeHf.block = prevMergeBlockVal - }) - - it('should get correct merge and post merge hf with merge block specified ', () => { - const c = new Common({ chain: Chain.Sepolia }) - - const mergeHf = c.hardforks().filter((hf) => hf.ttd !== undefined && hf.ttd !== null)[0] - const prevMergeBlockVal = mergeHf.block - // the terminal block on sepolia is 1450408 - mergeHf.block = 1450409 - const msg = 'should get HF correctly' - - // should get merge even without td supplied as the merge hf now has the block specified - assert.equal(c.setHardforkBy({ blockNumber: 1450409n }), Hardfork.Paris, msg) - assert.equal( - c.setHardforkBy({ blockNumber: 1450409n, td: 17000000000000000n }), - Hardfork.Paris, - msg - ) - assert.equal(c.setHardforkBy({ blockNumber: 1735371n }), Hardfork.MergeForkIdTransition, msg) - assert.equal( - c.setHardforkBy({ blockNumber: 1735371n, td: 17000000000000000n }), - Hardfork.MergeForkIdTransition, - msg - ) - - // Check nextHardforkBlockOrTimestamp should be MergeForkIdTransition's block on london and merge both - assert.equal( - c.nextHardforkBlockOrTimestamp(Hardfork.London), - 1735371n, - `should get nextHardforkBlockOrTimestamp correctly` - ) - assert.equal( - c.nextHardforkBlockOrTimestamp(Hardfork.Paris), - 1735371n, - `should get nextHardforkBlockOrTimestamp correctly` - ) - - // restore value - mergeHf.block = prevMergeBlockVal - }) - - it('should throw if encounters a double ttd hardfork specification', () => { - const c = new Common({ chain: Chain.Sepolia }) - // Add the ttd to mergeForkIdTransition which occurs post merge in sepolia - c.hardforks().filter((hf) => hf.name === 'mergeForkIdTransition')[0]!['ttd'] = - '17000000000000000' - - const f = () => { - c.setHardforkBy({ blockNumber: 1735371n }) - } - assert.throws(f, undefined, undefined, 'throws error as two hardforks with ttd specified') - }) -}) diff --git a/packages/common/test/params.spec.ts b/packages/common/test/params.spec.ts index c6420e2847..5d2e086877 100644 --- a/packages/common/test/params.spec.ts +++ b/packages/common/test/params.spec.ts @@ -1,24 +1,53 @@ import { assert, describe, it } from 'vitest' -import { Chain, Common, Hardfork } from '../src/index.js' +import { Common, Hardfork, Mainnet } from '../src/index.js' + +import { paramsTest } from './data/paramsTest.js' + +describe('[Common]: Parameter instantiation / params option / Updates', () => { + it('Param option', () => { + const c = new Common({ chain: Mainnet, params: paramsTest }) + let msg = 'Should also work with parameters passed with params option' + assert.equal(c.param('ecAddGas'), BigInt(150), msg) + + const params = { + 1679: { + ecAddGas: 250, + }, + } + c.updateParams(params) + msg = 'Should update parameter on updateParams() and properly rebuild cache' + assert.equal(c.param('ecAddGas'), BigInt(250), msg) + + c.resetParams(params) + msg = 'Should reset all parameters on resetParams() and properly rebuild cache' + assert.equal(c.param('ecAddGas'), BigInt(250), msg) + assert.throws(() => { + c.param('ecMulGas'), BigInt(250) + }) + + msg = 'Should not side-manipulate the original params file during updating internally' + assert.equal(paramsTest['1679']['ecAddGas'], 150) + }) +}) describe('[Common]: Parameter access for param(), paramByHardfork()', () => { it('Basic usage', () => { - const c = new Common({ chain: Chain.Mainnet, eips: [2537] }) + const c = new Common({ chain: Mainnet, params: paramsTest, eips: [2537] }) let msg = 'Should return correct value when HF directly provided' - assert.equal(c.paramByHardfork('gasPrices', 'ecAdd', 'byzantium'), BigInt(500), msg) + assert.equal(c.paramByHardfork('ecAddGas', 'byzantium'), BigInt(500), msg) msg = 'Should return correct value for HF set in class' c.setHardfork(Hardfork.Byzantium) - assert.equal(c.param('gasPrices', 'ecAdd'), BigInt(500), msg) + assert.equal(c.param('ecAddGas'), BigInt(500), msg) c.setHardfork(Hardfork.Istanbul) - assert.equal(c.param('gasPrices', 'ecAdd'), BigInt(150), msg) + assert.equal(c.param('ecAddGas'), BigInt(150), msg) c.setHardfork(Hardfork.MuirGlacier) - assert.equal(c.param('gasPrices', 'ecAdd'), BigInt(150), msg) + assert.equal(c.param('ecAddGas'), BigInt(150), msg) - msg = 'Should return 0n for non-existing value' - assert.equal(c.param('gasPrices', 'notexistingvalue'), BigInt(0), msg) - assert.equal(c.paramByHardfork('gasPrices', 'notexistingvalue', 'byzantium'), BigInt(0), msg) + assert.throws(() => { + c.paramByHardfork('notExistingValue', 'byzantium') + }) /* // Manual test since no test triggering EIP config available @@ -26,119 +55,80 @@ describe('[Common]: Parameter access for param(), paramByHardfork()', () => { // To run please manually add an "ecAdd" entry with value 12345 to EIP2537 config // and uncomment the test msg = 'EIP config should take precedence over HF config' - assert.equal(c.param('gasPrices', 'ecAdd'), 12345, msg) + assert.equal(c.param('ecAddGas'), 12345, msg) */ }) it('Error cases for param(), paramByHardfork()', () => { - const c = new Common({ chain: Chain.Mainnet }) - - const msg = 'Should return 0n when called with non-existing topic' - assert.equal(c.paramByHardfork('gasPrizes', 'ecAdd', 'byzantium'), 0n, msg) + const c = new Common({ chain: Mainnet, params: paramsTest }) c.setHardfork(Hardfork.Byzantium) assert.equal( - c.param('gasPrices', 'ecAdd'), + c.param('ecAddGas'), BigInt(500), - 'Should return correct value for HF set in class' + 'Should return correct value for HF set in class', ) }) it('Parameter updates', () => { - const c = new Common({ chain: Chain.Mainnet }) + const c = new Common({ chain: Mainnet, params: paramsTest }) let msg = 'Should return correct value for chain start' - assert.equal( - c.paramByHardfork('pow', 'minerReward', 'chainstart'), - BigInt(5000000000000000000), - msg - ) + assert.equal(c.paramByHardfork('minerReward', 'chainstart'), BigInt(5000000000000000000), msg) msg = 'Should reflect HF update changes' - assert.equal( - c.paramByHardfork('pow', 'minerReward', 'byzantium'), - BigInt(3000000000000000000), - msg - ) + assert.equal(c.paramByHardfork('minerReward', 'byzantium'), BigInt(3000000000000000000), msg) msg = 'Should return updated sstore gas prices for constantinople' - assert.equal( - c.paramByHardfork('gasPrices', 'netSstoreNoopGas', 'constantinople'), - BigInt(200), - msg - ) + assert.equal(c.paramByHardfork('netSstoreNoopGas', 'constantinople'), BigInt(200), msg) msg = 'Should nullify SSTORE related values for petersburg' - assert.equal(c.paramByHardfork('gasPrices', 'netSstoreNoopGas', 'petersburg'), BigInt(0), msg) + assert.equal(c.paramByHardfork('netSstoreNoopGas', 'petersburg'), BigInt(0), msg) }) it('Access by block number, paramByBlock()', () => { - const c = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Byzantium }) + const c = new Common({ chain: Mainnet, hardfork: Hardfork.Byzantium, params: paramsTest }) let msg = 'Should correctly translate block numbers into HF states (updated value)' - assert.equal(c.paramByBlock('pow', 'minerReward', 4370000), BigInt(3000000000000000000), msg) + assert.equal(c.paramByBlock('minerReward', 4370000), BigInt(3000000000000000000), msg) msg = 'Should correctly translate block numbers into HF states (original value)' - assert.equal(c.paramByBlock('pow', 'minerReward', 4369999), BigInt(5000000000000000000), msg) - - msg = 'Should correctly translate total difficulty into HF states' - const td = BigInt('1196768507891266117779') - assert.equal( - c.paramByBlock('pow', 'minerReward', 4370000, td), - BigInt(3000000000000000000), - msg - ) + assert.equal(c.paramByBlock('minerReward', 4369999), BigInt(5000000000000000000), msg) }) it('Access on copied Common instances', () => { - const c = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai }) + const c = new Common({ chain: Mainnet, hardfork: Hardfork.Shanghai, params: paramsTest }) let msg = 'Should correctly access param with param() on original Common' - assert.equal(c.param('pow', 'minerReward'), BigInt(2000000000000000000), msg) + assert.equal(c.param('minerReward'), BigInt(2000000000000000000), msg) const cCopy = c.copy() cCopy.setHardfork(Hardfork.Chainstart) msg = 'Should correctly access param with param() on copied Common with hardfork changed' - assert.equal(cCopy.param('pow', 'minerReward'), BigInt(5000000000000000000), msg) + assert.equal(cCopy.param('minerReward'), BigInt(5000000000000000000), msg) msg = 'Should correctly access param with param() on original Common after copy and HF change on copied Common' - assert.equal(c.param('pow', 'minerReward'), BigInt(2000000000000000000), msg) + assert.equal(c.param('minerReward'), BigInt(2000000000000000000), msg) }) it('EIP param access, paramByEIP()', () => { - const c = new Common({ chain: Chain.Mainnet }) + const c = new Common({ chain: Mainnet, params: paramsTest }) - let msg = 'Should return undefined for non-existing value' - assert.equal(c.paramByEIP('gasConfig', 'notexistingvalue', 1559), undefined, msg) - assert.equal(c.paramByEIP('gasPrices', 'notexistingvalue', 2537), undefined, msg) + assert.throws(() => { + c.paramByEIP('notExistingValue', 1559) + }) + assert.throws(() => { + c.paramByEIP('notExistingValue', 2537) + }) const UNSUPPORTED_EIP = 1000000 - let f = function () { - c.paramByEIP('gasPrices', 'Bls12381G1AddGas', UNSUPPORTED_EIP) + const f = function () { + c.paramByEIP('Bls12381G1AddGas', UNSUPPORTED_EIP) } - msg = 'Should throw for using paramByEIP() with an unsupported EIP' + let msg = 'Should throw for using paramByEIP() with an unsupported EIP' assert.throws(f, /not supported$/, undefined, msg) - msg = 'Should return undefined for paramByEIP() with a not existing topic' - assert.equal(c.paramByEIP('notExistingTopic', 'Bls12381G1AddGas', 1559), undefined, msg) - f = function () { - c.paramByEIP('notExistingTopic', 'Bls12381G1AddGas', 2537) - } - msg = 'Should return undefined for paramByEIP() with a not existing topic' - assert.equal(f(), undefined, msg) - msg = 'Should return Bls12381G1AddGas gas price for EIP2537' - assert.equal(c.paramByEIP('gasPrices', 'Bls12381G1AddGas', 2537), BigInt(500), msg) - }) - - it('returns the right block delay for EIP3554', () => { - for (const fork of [Hardfork.MuirGlacier, Hardfork.Berlin]) { - const c = new Common({ chain: Chain.Mainnet, hardfork: fork }) - let delay = c.param('pow', 'difficultyBombDelay') - assert.equal(delay, BigInt(9000000)) - c.setEIPs([3554]) - delay = c.param('pow', 'difficultyBombDelay') - assert.equal(delay, BigInt(9500000)) - } + assert.equal(c.paramByEIP('Bls12381G1AddGas', 2537), BigInt(500), msg) }) }) diff --git a/packages/common/test/timestamp.spec.ts b/packages/common/test/timestamp.spec.ts index 7bd4008208..ffe4945eed 100644 --- a/packages/common/test/timestamp.spec.ts +++ b/packages/common/test/timestamp.spec.ts @@ -2,9 +2,9 @@ import { hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' import { - Chain, Common, Hardfork, + Mainnet, createCommonFromGethGenesis, createCustomCommon, } from '../src/index.js' @@ -19,17 +19,17 @@ describe('[Common]: Timestamp Hardfork logic', () => { assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 0n }), Hardfork.MergeForkIdTransition, - 'should match the HF' + 'should match the HF', ) assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 1668699476n }), Hardfork.Shanghai, - 'should match the HF' + 'should match the HF', ) assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 1668699576n }), Hardfork.Shanghai, - 'should match the HF' + 'should match the HF', ) }) @@ -44,12 +44,12 @@ describe('[Common]: Timestamp Hardfork logic', () => { assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 0n }), Hardfork.MergeForkIdTransition, - 'should match the HF' + 'should match the HF', ) assert.equal( c.nextHardforkBlockOrTimestamp(Hardfork.Shanghai), null, - 'should give null on next Hardfork block' + 'should give null on next Hardfork block', ) }) @@ -64,23 +64,23 @@ describe('[Common]: Timestamp Hardfork logic', () => { assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 0n }), Hardfork.MergeForkIdTransition, - 'should match the HF' + 'should match the HF', ) // Should give the shanghai as sharding is schedule a bit post shanghai assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 1668699476n }), Hardfork.Shanghai, - 'should match the HF' + 'should match the HF', ) assert.equal( c.getHardforkBy({ blockNumber: 1n, timestamp: 1668699576n }), Hardfork.Shanghai, - 'should match the HF' + 'should match the HF', ) }) it('forkHash', () => { - const mainnet = new Common({ chain: Chain.Mainnet }) + const mainnet = new Common({ chain: Mainnet }) const hfs = mainnet.hardforks() const mergeIndex = hfs.findIndex((hf) => hf.name === Hardfork.Paris) const hardforks = hfs.slice(0, mergeIndex + 1).concat([ @@ -99,9 +99,9 @@ describe('[Common]: Timestamp Hardfork logic', () => { }, ]) - const c = createCustomCommon({ hardforks }, { baseChain: Chain.Mainnet }) + const c = createCustomCommon({ hardforks }, Mainnet) const mainnetGenesisHash = hexToBytes( - '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ) for (const hf of c.hardforks()) { if (typeof hf.forkHash === 'string') { @@ -118,12 +118,12 @@ describe('[Common]: Timestamp Hardfork logic', () => { assert.equal( c.hardforkForForkHash('0xc1fdf181')?.name, Hardfork.Shanghai, - 'Should be able to get Shanghai from forkHash' + 'Should be able to get Shanghai from forkHash', ) }) it('setForkHashes', () => { - const mainnet = new Common({ chain: Chain.Mainnet }) + const mainnet = new Common({ chain: Mainnet }) const hfs = mainnet.hardforks() const mergeIndex = hfs.findIndex((hf) => hf.name === Hardfork.Paris) const hardforks = hfs.slice(0, mergeIndex + 1).concat([ @@ -140,9 +140,9 @@ describe('[Common]: Timestamp Hardfork logic', () => { }, ]) - const c = createCustomCommon({ hardforks }, { baseChain: Chain.Mainnet }) + const c = createCustomCommon({ hardforks }, Mainnet) const mainnetGenesisHash = hexToBytes( - '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ) let noForkHashes = c.hardforks().reduce((acc, hf) => { diff --git a/packages/common/test/utils.spec.ts b/packages/common/test/utils.spec.ts index c478553469..f9a9e2a8da 100644 --- a/packages/common/test/utils.spec.ts +++ b/packages/common/test/utils.spec.ts @@ -1,11 +1,10 @@ -import { hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { createCommonFromGethGenesis } from '../src/constructors.js' +import { Mainnet } from '../src/chains.js' import { Hardfork } from '../src/enums.js' -import { getInitializedChains, parseGethGenesis } from '../src/utils.js' +import { createCommonFromGethGenesis } from '../src/index.js' +import { parseGethGenesis } from '../src/utils.js' -import * as gethGenesisKilnJSON from './data/geth-genesis/geth-genesis-kiln.json' import * as invalidSpuriousDragonJSON from './data/geth-genesis/invalid-spurious-dragon.json' import * as noExtraDataJSON from './data/geth-genesis/no-extra-data.json' import * as poaJSON from './data/geth-genesis/poa.json' @@ -32,22 +31,22 @@ describe('[Utils/Parse]', () => { assert.deepEqual( params.consensus, { type: 'poa', algorithm: 'clique', clique: { period: 15, epoch: 30000 } }, - 'consensus config matches' + 'consensus config matches', ) const poaJSONCopy = Object.assign({}, poaJSON) - poaJSONCopy.nonce = '00' + poaJSONCopy.default.nonce = '00' params = parseGethGenesis(poaJSONCopy, 'poa') assert.equal( params.genesis.nonce, '0x0000000000000000', - 'non-hex prefixed nonce is formatted correctly' + 'non-hex prefixed nonce is formatted correctly', ) assert.equal(params.hardfork, Hardfork.London, 'should correctly infer current hardfork') }) it('should generate expected hash with london block zero and base fee per gas defined', async () => { const params = parseGethGenesis(postMergeJSON, 'post-merge') - assert.equal(params.genesis.baseFeePerGas, postMergeJSON.baseFeePerGas) + assert.equal(params.genesis.baseFeePerGas, postMergeJSON.default.baseFeePerGas) }) it('should successfully parse genesis file with no extraData', async () => { @@ -56,122 +55,24 @@ describe('[Utils/Parse]', () => { assert.equal(params.genesis.timestamp, '0x10', 'timestamp parsed correctly') }) - it('should successfully parse kiln genesis and set forkhash', async () => { - const common = createCommonFromGethGenesis(gethGenesisKilnJSON, { - chain: 'customChain', - genesisHash: hexToBytes('0x51c7fe41be669f69c45c33a56982cbde405313342d9e2b00d7c91a7b284dd4f8'), - mergeForkIdPostMerge: false, - }) - assert.deepEqual( - common.hardforks().map((hf) => hf.name), - [ - 'chainstart', - 'homestead', - 'tangerineWhistle', - 'spuriousDragon', - 'byzantium', - 'constantinople', - 'petersburg', - 'istanbul', - 'berlin', - 'london', - 'mergeForkIdTransition', - 'paris', - ], - 'hardfork parse order should be correct' - ) - for (const hf of common.hardforks()) { - /* eslint-disable @typescript-eslint/no-use-before-define */ - assert.equal(hf.forkHash, kilnForkHashes[hf.name], `${hf.name} forkHash should match`) - } - - assert.equal(common.hardfork(), Hardfork.Paris, 'should correctly infer current hardfork') - - // Ok lets schedule shanghai at block 0, this should force merge to be scheduled at just after - // genesis if even mergeForkIdTransition is not confirmed to be post merge - // This will also check if the forks are being correctly sorted based on block - Object.assign(gethGenesisKilnJSON.config, { shanghaiTime: Math.floor(Date.now() / 1000) }) - const common1 = createCommonFromGethGenesis(gethGenesisKilnJSON, { - chain: 'customChain', - }) - // merge hardfork is now scheduled just after shanghai even if mergeForkIdTransition is not confirmed - // to be post merge - assert.deepEqual( - common1.hardforks().map((hf) => hf.name), - [ - 'chainstart', - 'homestead', - 'tangerineWhistle', - 'spuriousDragon', - 'byzantium', - 'constantinople', - 'petersburg', - 'istanbul', - 'berlin', - 'london', - 'paris', - 'mergeForkIdTransition', - 'shanghai', - ], - 'hardfork parse order should be correct' - ) - - assert.equal(common1.hardfork(), Hardfork.Shanghai, 'should correctly infer current hardfork') + it('should set merge to block 0 when terminalTotalDifficultyPassed is true', () => { + const mergeAtGenesisJson = {} as any + Object.assign(mergeAtGenesisJson, postMergeJSON) + mergeAtGenesisJson.config.terminalTotalDifficultyPassed = true + const common = createCommonFromGethGenesis(mergeAtGenesisJson, {}) + assert.equal(common.hardforks().slice(-1)[0].block, 0) }) - it('should successfully parse genesis with hardfork scheduled post merge', async () => { - const common = createCommonFromGethGenesis(postMergeHardforkJSON, { - chain: 'customChain', - }) - assert.deepEqual( - common.hardforks().map((hf) => hf.name), - [ - 'chainstart', - 'homestead', - 'tangerineWhistle', - 'spuriousDragon', - 'byzantium', - 'constantinople', - 'petersburg', - 'istanbul', - 'muirGlacier', - 'berlin', - 'london', - 'paris', - 'shanghai', - ], - 'hardfork parse order should be correct' - ) - - assert.equal(common.getHardforkBy({ blockNumber: 0n }), Hardfork.London, 'london at genesis') - assert.equal( - common.getHardforkBy({ blockNumber: 1n, td: 2n }), - Hardfork.Paris, - 'merge at block 1' - ) - // shanghai is at timestamp 8 - assert.equal( - common.getHardforkBy({ blockNumber: 8n }), - Hardfork.London, - 'without timestamp still london' - ) - assert.equal( - common.getHardforkBy({ blockNumber: 8n, td: 2n }), - Hardfork.Paris, - 'without timestamp at merge' - ) - assert.equal( - common.getHardforkBy({ blockNumber: 8n, timestamp: 8n }), - Hardfork.Shanghai, - 'with timestamp at shanghai' - ) - // should be post merge at shanghai - assert.equal( - common.getHardforkBy({ blockNumber: 8n, td: 2n, timestamp: 8n }), - Hardfork.Shanghai, - 'post merge shanghai' - ) - assert.equal(common.hardfork(), Hardfork.Shanghai, 'should correctly infer common hardfork') + it('should set merge to block 0 when terminalTotalDifficultyPassed is true', () => { + const mergeAtGenesisJson = {} as any + Object.assign(mergeAtGenesisJson, postMergeJSON) + mergeAtGenesisJson.config.terminalTotalDifficultyPassed = false + try { + createCommonFromGethGenesis(mergeAtGenesisJson, {}) + assert.fail('should have thrown') + } catch (err: any) { + assert.ok(err.message.includes('nonzero terminal total difficulty')) + } }) it('should successfully assign mainnet deposit contract address when none provided', async () => { @@ -179,17 +80,16 @@ describe('[Utils/Parse]', () => { chain: 'customChain', }) const depositContractAddress = - common['_chainParams'].depositContractAddress ?? - getInitializedChains().mainnet.depositContractAddress + common['_chainParams'].depositContractAddress ?? Mainnet.depositContractAddress assert.equal( depositContractAddress, - getInitializedChains().mainnet.depositContractAddress, - 'should assign mainnet deposit contract' + Mainnet.depositContractAddress, + 'should assign mainnet deposit contract', ) }) - it('should correctly parse deposit contract adddress', async () => { + it('should correctly parse deposit contract address', async () => { // clone json out to not have side effects const customJson = JSON.parse(JSON.stringify(postMergeHardforkJSON)) Object.assign(customJson.config, { @@ -200,28 +100,12 @@ describe('[Utils/Parse]', () => { chain: 'customChain', }) const depositContractAddress = - common['_chainParams'].depositContractAddress ?? - getInitializedChains().mainnet.depositContractAddress + common['_chainParams'].depositContractAddress ?? Mainnet.depositContractAddress assert.equal( depositContractAddress, '0x4242424242424242424242424242424242424242', - 'should parse correct address' + 'should parse correct address', ) }) }) - -const kilnForkHashes: any = { - chainstart: '0xbcadf543', - homestead: '0xbcadf543', - tangerineWhistle: '0xbcadf543', - spuriousDragon: '0xbcadf543', - byzantium: '0xbcadf543', - constantinople: '0xbcadf543', - petersburg: '0xbcadf543', - istanbul: '0xbcadf543', - berlin: '0xbcadf543', - london: '0xbcadf543', - mergeForkIdTransition: '0x013fd1b5', - paris: '0x013fd1b5', -} diff --git a/packages/common/tsconfig.lint.json b/packages/common/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/common/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/devp2p/.eslintrc.cjs b/packages/devp2p/.eslintrc.cjs index 940a7fc316..9c3e67209e 100644 --- a/packages/devp2p/.eslintrc.cjs +++ b/packages/devp2p/.eslintrc.cjs @@ -5,4 +5,13 @@ module.exports = { 'no-redeclare': 'off', 'no-undef': 'off', // temporary until fixed: 'NodeJS' is not defined }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], } diff --git a/packages/devp2p/CHANGELOG.md b/packages/devp2p/CHANGELOG.md index 17ffa2d4e5..c15c99b400 100644 --- a/packages/devp2p/CHANGELOG.md +++ b/packages/devp2p/CHANGELOG.md @@ -6,7 +6,11 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 6.1.2 - 2024-03-05 +## 6.1.3 - 2024-08-15 + +Maintenance release with downstream dependency updates, see PR [#3527](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3527) + +## 6.1.2 - 2024-03-18 - Fix a type error related to the `lru-cache` dependency, PR [#3285](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3285) - Downstream dependency updates, see PR [#3297](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3297) @@ -89,7 +93,7 @@ While you could use our libraries in the browser libraries before, there had bee WE HAVE ELIMINATED ALL OF THEM. -The largest two undertakings: First: we have rewritten all (half) of our API and elimited the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went throuh our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. +The largest two undertakings: First: we have rewritten all (half) of our API and eliminated the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went through our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. Together with some few other modifications this now allows to run each (maybe adding an asterisk for client and devp2p) of our libraries directly in the browser - more or less without any modifications - see the `examples/browser.html` file in each package folder for an easy to set up example. @@ -312,7 +316,7 @@ Beta 2 release for the upcoming breaking release round on the [EthereumJS monore ### Removed Default Exports -The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all accross the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. +The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all across the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. Default exports were a common source of error and confusion when using our libraries in a CommonJS context, leading to issues like Issue [#978](https://github.com/ethereumjs/ethereumjs-monorepo/issues/978). @@ -320,7 +324,7 @@ Now every import is a named import and we think the long term benefits will very #### Common Library Import Updates -Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all accross our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. +Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all across our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. So Common import and usage is changing from: @@ -464,7 +468,7 @@ DNS discovery can be activated in the `DPT` module with the `shouldGetDnsPeers` - `maxPeers`, `dpt`, and `listenPort` are now optional in `RLPxOptions`, PR [#1019](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1019) - New `DPTOptions` interface, `DPT` type improvements, PR [#1029](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1029) - Improved `RLPx` disconnect reason debug output, PR [#1031](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1031) -- `LES`: unifiy `ETH` and `LES` `sendMessage()` signature by somewhat change payload semantics and pass in `reqId` along, PR [#1087](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1087) +- `LES`: unify `ETH` and `LES` `sendMessage()` signature by somewhat change payload semantics and pass in `reqId` along, PR [#1087](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1087) - `RLPx`: limit connection refill debug logging to a restarted interval log message to not bloat logging too much, PR [#1087](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1087) ### Connection Reliability / Bug Fixes diff --git a/packages/devp2p/README.md b/packages/devp2p/README.md index a9c7269b8d..7021acefeb 100644 --- a/packages/devp2p/README.md +++ b/packages/devp2p/README.md @@ -172,7 +172,7 @@ import { hexToBytes } from '@ethereumjs/util' const main = async () => { const common = new Common({ chain: Chain.Mainnet }) const PRIVATE_KEY = hexToBytes( - '0xed6df2d4b7e82d105538e4a1279925a16a84e772243e80a561e1b201f2e78220' + '0xed6df2d4b7e82d105538e4a1279925a16a84e772243e80a561e1b201f2e78220', ) const rlpx = new RLPx(PRIVATE_KEY, { maxPeers: 25, @@ -541,7 +541,7 @@ The following is a list of major implementations of the `devp2p` stack in other - Python: [pydevp2p](https://github.com/ethereum/pydevp2p) - Go: [Go Ethereum](https://github.com/ethereum/go-ethereum/tree/master/p2p) -- Elixir: [Exthereum](https://github.com/exthereum/exth_crypto) +- Elixir: [Exthereum](https://github.com/exthereum/exth_crypto) ### Links diff --git a/packages/devp2p/examples/dpt.ts b/packages/devp2p/examples/dpt.ts index c054953328..af85e5a13f 100644 --- a/packages/devp2p/examples/dpt.ts +++ b/packages/devp2p/examples/dpt.ts @@ -1,5 +1,5 @@ import { DPT } from '@ethereumjs/devp2p' -import { bytesToHex, hexToBytes, randomBytes } from '@ethereumjs/util' +import { bytesToHex, hexToBytes } from '@ethereumjs/util' const PRIVATE_KEY = hexToBytes('0xed6df2d4b7e82d105538e4a1279925a16a84e772243e80a561e1b201f2e78220') const main = async () => { @@ -12,7 +12,7 @@ const main = async () => { }) console.log(`DPT is active and has id - ${bytesToHex(dpt.id!)}`) // Should log the DPT's hex ID - 0xcd80bb7a768432302d267729c15da61d172373ea036... - await dpt.destroy() + dpt.destroy() } -main() +void main() diff --git a/packages/devp2p/examples/peer-communication-les.ts b/packages/devp2p/examples/peer-communication-les.ts index f85d1ad84e..8aeb9be843 100644 --- a/packages/devp2p/examples/peer-communication-les.ts +++ b/packages/devp2p/examples/peer-communication-les.ts @@ -1,20 +1,21 @@ -import { bytesToInt, intToBytes, randomBytes, bytesToHex, hexToBytes } from '@ethereumjs/util' -import { Block, BlockHeader, createBlockFromValuesArray } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { createBlockFromBytesArray, createBlockHeaderFromBytesArray } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import * as devp2p from '@ethereumjs/devp2p' +import { bytesToHex, bytesToInt, hexToBytes, intToBytes, randomBytes } from '@ethereumjs/util' import chalk from 'chalk' import ms from 'ms' -import * as devp2p from '@ethereumjs/devp2p' -import { ETH, Peer } from '@ethereumjs/devp2p' +import type { Block, BlockHeader } from '@ethereumjs/block' +import type { Peer } from '@ethereumjs/devp2p' const PRIVATE_KEY = randomBytes(32) const GENESIS_TD = 1 const GENESIS_HASH = hexToBytes( - '0x6341fd3daf94b748c72ced5a5b26028f2474f5f00d824504e4fa37a75767e177' + '0x6341fd3daf94b748c72ced5a5b26028f2474f5f00d824504e4fa37a75767e177', ) -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const bootstrapNodes = common.bootstrapNodes() const BOOTNODES = bootstrapNodes.map((node: any) => { return { @@ -72,8 +73,8 @@ rlpx.events.on('peer:added', (peer) => { const clientId = peer.getHelloMessage().clientId console.log( chalk.green( - `Add peer: ${addr} ${clientId} (les${les.getVersion()}) (total: ${rlpx.getPeers().length})` - ) + `Add peer: ${addr} ${clientId} (les${les.getVersion()}) (total: ${rlpx.getPeers().length})`, + ), ) les.sendStatus({ @@ -104,11 +105,11 @@ rlpx.events.on('peer:added', (peer) => { case devp2p.LES.MESSAGE_CODES.BLOCK_HEADERS: { if (payload[2].length > 1) { console.log( - `${addr} not more than one block header expected (received: ${payload[2].length})` + `${addr} not more than one block header expected (received: ${payload[2].length})`, ) break } - const header = BlockHeader.fromValuesArray(payload[2][0], { common }) + const header = createBlockHeaderFromBytesArray(payload[2][0], { common }) setTimeout(() => { les.sendMessage(devp2p.LES.MESSAGE_CODES.GET_BLOCK_BODIES, [ @@ -123,7 +124,7 @@ rlpx.events.on('peer:added', (peer) => { case devp2p.LES.MESSAGE_CODES.BLOCK_BODIES: { if (payload[2].length !== 1) { console.log( - `${addr} not more than one block body expected (received: ${payload[2].length})` + `${addr} not more than one block body expected (received: ${payload[2].length})`, ) break } @@ -131,7 +132,7 @@ rlpx.events.on('peer:added', (peer) => { const header2 = requests.bodies.shift() const txs = payload[2][0][0] const uncleHeaders = payload[2][0][1] - const block = createBlockFromValuesArray([header2.raw(), txs, uncleHeaders], { common }) + const block = createBlockFromBytesArray([header2.raw(), txs, uncleHeaders], { common }) const isValid = await isValidBlock(block) let isValidPayload = false if (isValid) { @@ -155,9 +156,9 @@ rlpx.events.on('peer:removed', (peer, reasonCode, disconnectWe) => { console.log( chalk.yellow( `Remove peer: ${getPeerAddr(peer)} - ${who}, reason: ${peer.getDisconnectPrefix( - reasonCode - )} (${String(reasonCode)}) (total: ${total})` - ) + reasonCode, + )} (${String(reasonCode)}) (total: ${total})`, + ), ) }) @@ -203,11 +204,11 @@ function onNewBlock(block: Block, peer: Peer) { const blockNumber = block.header.number console.log( - `----------------------------------------------------------------------------------------------------------` + `----------------------------------------------------------------------------------------------------------`, ) console.log(`block ${blockNumber} received: ${blockHashHex} (from ${getPeerAddr(peer)})`) console.log( - `----------------------------------------------------------------------------------------------------------` + `----------------------------------------------------------------------------------------------------------`, ) } @@ -229,7 +230,7 @@ setInterval(() => { console.log( chalk.yellow( - `Total nodes in DPT: ${peersCount}, open slots: ${openSlots}, queue: ${queueLength} / ${queueLength2}` - ) + `Total nodes in DPT: ${peersCount}, open slots: ${openSlots}, queue: ${queueLength} / ${queueLength2}`, + ), ) }, ms('30s')) diff --git a/packages/devp2p/examples/peer-communication.ts b/packages/devp2p/examples/peer-communication.ts index df602d8c72..e98808331c 100644 --- a/packages/devp2p/examples/peer-communication.ts +++ b/packages/devp2p/examples/peer-communication.ts @@ -1,26 +1,27 @@ +import { BlockHeader, createBlockFromBytesArray } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import * as devp2p from '@ethereumjs/devp2p' +import { RLP } from '@ethereumjs/rlp' +import { createTxFromBlockBodyData } from '@ethereumjs/tx' import { bytesToInt, - intToBytes, - randomBytes, bytesToUnprefixedHex, equalsBytes, hexToBytes, + intToBytes, + randomBytes, } from '@ethereumjs/util' -import { Block, BlockHeader, createBlockFromValuesArray } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { RLP } from '@ethereumjs/rlp' -import { TransactionFactory, TypedTransaction } from '@ethereumjs/tx' import chalk from 'chalk' import { LRUCache } from 'lru-cache' - import ms from 'ms' -import * as devp2p from '@ethereumjs/devp2p' -import { ETH, Peer } from '@ethereumjs/devp2p' +import type { Block } from '@ethereumjs/block' +import type { ETH, Peer } from '@ethereumjs/devp2p' +import type { TypedTransaction } from '@ethereumjs/tx' const PRIVATE_KEY = randomBytes(32) -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Berlin }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Berlin }) const bootstrapNodes = common.bootstrapNodes() const BOOTNODES = bootstrapNodes.map((node: any) => { return { @@ -45,7 +46,7 @@ const CHECK_BLOCK_TITLE = 'Berlin Fork' // Only for debugging/console output const CHECK_BLOCK_NR = 12244000 const CHECK_BLOCK = '1638380ab737e0e916bd1c7f23bd2bab2a532e44b90047f045f262ee21c42b21' const CHECK_BLOCK_HEADER = RLP.decode( - '0xf90219a0d44a4d33e28d7ea9edd12b69bd32b394587eee498b0e2543ce2bad1877ffbeaca01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347941ad91ee08f21be3de0ba2ba6918e714da6b45836a0fdec060ee45e55da9e36060fc95dddd0bdc47e447224666a895d9f0dc9adaa0ca0092d9fcc02ca9b372daec726704ce720d3aa366739868f4820ecaabadb9ac309a0974fee017515a46303f467b6fd50872994db1b0ea64d3455bad93ff9678aced9b90100356050004c5c89691add79838a01d4c302419252a4d3c96e9273908b7ee84660886c070607b4928c416a1800746a0d1dbb442d0baf06eea321422263726748600cc200e82aec08336863514d12d665718016989189c116bc0947046cc6718110586c11464a189000a11a41cc96991970153d88840768170244197e164c6204249b9091a0052ac85088c8108a4418dd2903690a036722623888ea14e90458a390a305a2342cb02766094f68c4100036330719848b48411614686717ab6068a46318204232429dc42020608802ceecd66c3c33a3a1fc6e82522049470328a4a81ba07c6604228ba94f008476005087a6804463696b41002650c0fdf548448a90408717ca31b6d618e883bad42083be153b83bdfbb1846078104798307834383639373636353666366532303530366636663663a0ae1de0acd35a98e211c7e276ad7524bb84a5e1b8d33dd7d1c052b095b564e8b888cca66773148b6e12' + '0xf90219a0d44a4d33e28d7ea9edd12b69bd32b394587eee498b0e2543ce2bad1877ffbeaca01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347941ad91ee08f21be3de0ba2ba6918e714da6b45836a0fdec060ee45e55da9e36060fc95dddd0bdc47e447224666a895d9f0dc9adaa0ca0092d9fcc02ca9b372daec726704ce720d3aa366739868f4820ecaabadb9ac309a0974fee017515a46303f467b6fd50872994db1b0ea64d3455bad93ff9678aced9b90100356050004c5c89691add79838a01d4c302419252a4d3c96e9273908b7ee84660886c070607b4928c416a1800746a0d1dbb442d0baf06eea321422263726748600cc200e82aec08336863514d12d665718016989189c116bc0947046cc6718110586c11464a189000a11a41cc96991970153d88840768170244197e164c6204249b9091a0052ac85088c8108a4418dd2903690a036722623888ea14e90458a390a305a2342cb02766094f68c4100036330719848b48411614686717ab6068a46318204232429dc42020608802ceecd66c3c33a3a1fc6e82522049470328a4a81ba07c6604228ba94f008476005087a6804463696b41002650c0fdf548448a90408717ca31b6d618e883bad42083be153b83bdfbb1846078104798307834383639373636353666366532303530366636663663a0ae1de0acd35a98e211c7e276ad7524bb84a5e1b8d33dd7d1c052b095b564e8b888cca66773148b6e12', ) const getPeerAddr = (peer: Peer) => `${peer['_socket'].remoteAddress}:${peer['_socket'].remotePort}` @@ -88,8 +89,8 @@ rlpx.events.on('peer:added', (peer) => { const clientId = peer.getHelloMessage().clientId console.log( chalk.green( - `Add peer: ${addr} ${clientId} (eth${eth.getVersion()}) (total: ${rlpx.getPeers().length})` - ) + `Add peer: ${addr} ${clientId} (eth${eth.getVersion()}) (total: ${rlpx.getPeers().length})`, + ), ) eth.sendStatus({ @@ -141,7 +142,7 @@ rlpx.events.on('peer:added', (peer) => { if (!forkVerified) break for (const item of payload) { - const tx = TransactionFactory.fromBlockBodyData(item) + const tx = createTxFromBlockBodyData(item) if (tx.isValid()) onNewTx(tx, peer) } @@ -166,14 +167,14 @@ rlpx.events.on('peer:added', (peer) => { if (!forkVerified) { if (payload[1].length !== 1) { console.log( - `${addr} expected one header for ${CHECK_BLOCK_TITLE} verify (received: ${payload[1].length})` + `${addr} expected one header for ${CHECK_BLOCK_TITLE} verify (received: ${payload[1].length})`, ) peer.disconnect(devp2p.DISCONNECT_REASON.USELESS_PEER) break } const expectedHash = CHECK_BLOCK - const header = BlockHeader.fromValuesArray(payload[1][0], { common }) + const header = createBlockHeaderFromBytesArray(payload[1][0], { common }) if (bytesToUnprefixedHex(header.hash()) === expectedHash) { console.log(`${addr} verified to be on the same side of the ${CHECK_BLOCK_TITLE}`) clearTimeout(forkDrop) @@ -182,13 +183,13 @@ rlpx.events.on('peer:added', (peer) => { } else { if (payload[1].length > 1) { console.log( - `${addr} not more than one block header expected (received: ${payload[1].length})` + `${addr} not more than one block header expected (received: ${payload[1].length})`, ) break } let isValidPayload = false - const header = BlockHeader.fromValuesArray(payload[1][0], { common }) + const header = createBlockHeaderFromBytesArray(payload[1][0], { common }) while (requests.headers.length > 0) { const blockHash = requests.headers.shift() if (equalsBytes(header.hash(), blockHash)) { @@ -206,7 +207,7 @@ rlpx.events.on('peer:added', (peer) => { if (!isValidPayload) { console.log( - `${addr} received wrong block header ${bytesToUnprefixedHex(header.hash())}` + `${addr} received wrong block header ${bytesToUnprefixedHex(header.hash())}`, ) } } @@ -227,7 +228,7 @@ rlpx.events.on('peer:added', (peer) => { if (payload[1].length !== 1) { console.log( - `${addr} not more than one block body expected (received: ${payload[1].length})` + `${addr} not more than one block body expected (received: ${payload[1].length})`, ) break } @@ -237,7 +238,7 @@ rlpx.events.on('peer:added', (peer) => { const header = requests.bodies.shift() const txs = payload[1][0][0] const uncleHeaders = payload[1][0][1] - const block = createBlockFromValuesArray([header.raw(), txs, uncleHeaders], { common }) + const block = createBlockFromBytesArray([header.raw(), txs, uncleHeaders], { common }) const isValid = await isValidBlock(block) if (isValid) { isValidPayload = true @@ -256,7 +257,7 @@ rlpx.events.on('peer:added', (peer) => { case devp2p.ETH.MESSAGE_CODES.NEW_BLOCK: { if (!forkVerified) break - const newBlock = createBlockFromValuesArray(payload[0], { common }) + const newBlock = createBlockFromBytesArray(payload[0], { common }) const isValidNewBlock = await isValidBlock(newBlock) if (isValidNewBlock) onNewBlock(newBlock, peer) @@ -294,9 +295,9 @@ rlpx.events.on('peer:removed', (peer, reasonCode, disconnectWe) => { console.log( chalk.yellow( `Remove peer: ${getPeerAddr(peer)} - ${who}, reason: ${peer.getDisconnectPrefix( - reasonCode - )} (${String(reasonCode)}) (total: ${total})` - ) + reasonCode, + )} (${String(reasonCode)}) (total: ${total})`, + ), ) }) @@ -378,7 +379,7 @@ setInterval(() => { console.log( chalk.yellow( - `Total nodes in DPT: ${peersCount}, open slots: ${openSlots}, queue: ${queueLength} / ${queueLength2}` - ) + `Total nodes in DPT: ${peersCount}, open slots: ${openSlots}, queue: ${queueLength} / ${queueLength2}`, + ), ) }, ms('30s')) diff --git a/packages/devp2p/examples/rlpx.ts b/packages/devp2p/examples/rlpx.ts index b2293763d3..dc363a0eaf 100644 --- a/packages/devp2p/examples/rlpx.ts +++ b/packages/devp2p/examples/rlpx.ts @@ -1,11 +1,11 @@ -import { Chain, Common } from '@ethereumjs/common' -import { RLPx, ETH } from '@ethereumjs/devp2p' +import { Common, Mainnet } from '@ethereumjs/common' +import { ETH, RLPx } from '@ethereumjs/devp2p' import { hexToBytes } from '@ethereumjs/util' const main = async () => { - const common = new Common({ chain: Chain.Mainnet }) + const common = new Common({ chain: Mainnet }) const PRIVATE_KEY = hexToBytes( - '0xed6df2d4b7e82d105538e4a1279925a16a84e772243e80a561e1b201f2e78220' + '0xed6df2d4b7e82d105538e4a1279925a16a84e772243e80a561e1b201f2e78220', ) const rlpx = new RLPx(PRIVATE_KEY, { maxPeers: 25, @@ -13,7 +13,7 @@ const main = async () => { common, }) console.log(`RLPx is active - ${rlpx._isAlive()}`) - await rlpx.destroy() + rlpx.destroy() } -main() +void main() diff --git a/packages/devp2p/examples/simple.ts b/packages/devp2p/examples/simple.ts index 130fbac857..817f5940f3 100644 --- a/packages/devp2p/examples/simple.ts +++ b/packages/devp2p/examples/simple.ts @@ -1,13 +1,12 @@ -import { Chain, Common } from '@ethereumjs/common' -import chalk from 'chalk' -import { bytesToHex, hexToBytes } from '@ethereumjs/util' - +import { Common, Mainnet } from '@ethereumjs/common' import { DPT } from '@ethereumjs/devp2p' +import { bytesToHex, hexToBytes } from '@ethereumjs/util' +import chalk from 'chalk' const TIMEOUT = 5000 // 5 second timeout const PRIVATE_KEY = '0xd772e3d6a001a38064dd23964dd2836239fa0e6cec8b28972a87460a17210fe9' -const config = new Common({ chain: Chain.Mainnet }) +const config = new Common({ chain: Mainnet }) const bootstrapNodes = config.bootstrapNodes() const BOOTNODES = bootstrapNodes.map((node: any) => { return { diff --git a/packages/devp2p/package.json b/packages/devp2p/package.json index a20a1e5591..88c2e2afe8 100644 --- a/packages/devp2p/package.json +++ b/packages/devp2p/package.json @@ -1,6 +1,6 @@ { "name": "@ethereumjs/devp2p", - "version": "6.1.2", + "version": "6.1.3", "description": "A JavaScript implementation of ÐΞVp2p", "keywords": [ "ethereum", @@ -55,12 +55,13 @@ "lint:fix": "../../config/cli/lint-fix.sh", "prepublishOnly": "../../config/cli/prepublish.sh", "test": "vitest run", + "test:node": "npm run test", "tsc": "../../config/cli/ts-compile.sh" }, "dependencies": { - "@ethereumjs/common": "^4.3.0", + "@ethereumjs/common": "^4.4.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/util": "^9.1.0", "@scure/base": "^1.1.7", "debug": "^4.3.3", "ethereum-cryptography": "^2.2.1", @@ -69,8 +70,8 @@ "snappyjs": "^0.6.1" }, "devDependencies": { - "@ethereumjs/block": "^5.2.0", - "@ethereumjs/tx": "^5.3.0", + "@ethereumjs/block": "^5.3.0", + "@ethereumjs/tx": "^5.4.0", "@types/debug": "^4.1.9", "@types/k-bucket": "^5.0.0", "chalk": "^4.1.2", diff --git a/packages/devp2p/src/dns/dns.ts b/packages/devp2p/src/dns/dns.ts index d2d056994e..2b0ad7226a 100644 --- a/packages/devp2p/src/dns/dns.ts +++ b/packages/devp2p/src/dns/dns.ts @@ -32,7 +32,7 @@ export class DNS { this._common = options.common this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } /** @@ -129,7 +129,7 @@ export class DNS { * * @param {string[]} branches * @param {SearchContext} context - * @return {String} subdomian + * @return {String} subdomain */ private _selectRandomPath(branches: string[], context: SearchContext): string { // Identify domains already visited in this traversal of the DNS tree. diff --git a/packages/devp2p/src/dns/enr.ts b/packages/devp2p/src/dns/enr.ts index 996ed5938c..52bea14b9e 100644 --- a/packages/devp2p/src/dns/enr.ts +++ b/packages/devp2p/src/dns/enr.ts @@ -74,7 +74,7 @@ export class ENR { const isVerified = ecdsaVerify( signature as Uint8Array, (common?.customCrypto.keccak256 ?? keccak256)(RLP.encode([seq, ...kvs])), - obj.secp256k1 + obj.secp256k1, ) if (!isVerified) throw new Error('Unable to verify ENR signature') @@ -100,19 +100,19 @@ export class ENR { if (!root.startsWith(this.ROOT_PREFIX)) throw new Error(`ENR root entry must start with '${this.ROOT_PREFIX}'`) - const rootVals = sscanf( + const rootValues = sscanf( root, `${this.ROOT_PREFIX}v1 e=%s l=%s seq=%d sig=%s`, 'eRoot', 'lRoot', 'seq', - 'signature' + 'signature', ) as ENRRootValues - if (!rootVals.eRoot) throw new Error("Could not parse 'e' value from ENR root entry") - if (!rootVals.lRoot) throw new Error("Could not parse 'l' value from ENR root entry") - if (!rootVals.seq) throw new Error("Could not parse 'seq' value from ENR root entry") - if (!rootVals.signature) throw new Error("Could not parse 'sig' value from ENR root entry") + if (!rootValues.eRoot) throw new Error("Could not parse 'e' value from ENR root entry") + if (!rootValues.lRoot) throw new Error("Could not parse 'l' value from ENR root entry") + if (!rootValues.seq) throw new Error("Could not parse 'seq' value from ENR root entry") + if (!rootValues.signature) throw new Error("Could not parse 'sig' value from ENR root entry") const decodedPublicKey = [...base32.decode(publicKey + '===').values()] @@ -122,7 +122,7 @@ export class ENR { const signedComponent = root.split(' sig')[0] const signedComponentBytes = utf8ToBytes(signedComponent) const signatureBytes = Uint8Array.from( - [...base64url.decode(rootVals.signature + '=').values()].slice(0, 64) + [...base64url.decode(rootValues.signature + '=').values()].slice(0, 64), ) const keyBytes = Uint8Array.from(decodedPublicKey) @@ -130,12 +130,12 @@ export class ENR { const isVerified = ecdsaVerify( signatureBytes, (common?.customCrypto.keccak256 ?? keccak256)(signedComponentBytes), - keyBytes + keyBytes, ) if (!isVerified) throw new Error('Unable to verify ENR root signature') - return rootVals.eRoot + return rootValues.eRoot } /** @@ -150,17 +150,17 @@ export class ENR { if (!tree.startsWith(this.TREE_PREFIX)) throw new Error(`ENR tree entry must start with '${this.TREE_PREFIX}'`) - const treeVals = sscanf( + const treeValues = sscanf( tree, `${this.TREE_PREFIX}//%s@%s`, 'publicKey', - 'domain' + 'domain', ) as ENRTreeValues - if (!treeVals.publicKey) throw new Error('Could not parse public key from ENR tree entry') - if (!treeVals.domain) throw new Error('Could not parse domain from ENR tree entry') + if (!treeValues.publicKey) throw new Error('Could not parse public key from ENR tree entry') + if (!treeValues.domain) throw new Error('Could not parse domain from ENR tree entry') - return treeVals + return treeValues } /** diff --git a/packages/devp2p/src/dpt/ban-list.ts b/packages/devp2p/src/dpt/ban-list.ts index 639a700e96..cae50bd8af 100644 --- a/packages/devp2p/src/dpt/ban-list.ts +++ b/packages/devp2p/src/dpt/ban-list.ts @@ -16,7 +16,7 @@ export class BanList { constructor() { this._lru = new LRUCache({ max: 10000 }) this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } add(obj: string | Uint8Array | PeerInfo, maxAge?: number) { diff --git a/packages/devp2p/src/dpt/dpt.ts b/packages/devp2p/src/dpt/dpt.ts index 60c6f61f1e..8a6b2341c7 100644 --- a/packages/devp2p/src/dpt/dpt.ts +++ b/packages/devp2p/src/dpt/dpt.ts @@ -86,7 +86,7 @@ export class DPT { this._refreshIntervalId = setInterval(() => this.refresh(), refreshIntervalSubdivided) this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } bind(...args: any[]): void { @@ -113,7 +113,8 @@ export class DPT { }) .then(() => { if (++count < oldPeers.length) return - if (err === null) this._banlist.add(newPeer, 300000) // 5 min * 60 * 1000 + if (err === null) + this._banlist.add(newPeer, 300000) // 5 min * 60 * 1000 else this._kbucket.add(newPeer) }) } @@ -200,7 +201,7 @@ export class DPT { let peers = this._kbucket.closest(id) if (this._onlyConfirmed && this._confirmedPeers.size > 0) { peers = peers.filter((peer) => - this._confirmedPeers.has(bytesToUnprefixedHex(peer.id as Uint8Array)) ? true : false + this._confirmedPeers.has(bytesToUnprefixedHex(peer.id as Uint8Array)) ? true : false, ) } return peers @@ -231,7 +232,7 @@ export class DPT { const peers = this.getPeers() if (this.DEBUG) { this._debug( - `call .refresh() (selector ${this._refreshIntervalSelectionCounter}) (${peers.length} peers in table)` + `call .refresh() (selector ${this._refreshIntervalSelectionCounter}) (${peers.length} peers in table)`, ) } @@ -259,7 +260,7 @@ export class DPT { this._debug( `.refresh() Adding ${dnsPeers.length} from DNS tree, (${ this.getPeers().length - } current peers in table)` + } current peers in table)`, ) } diff --git a/packages/devp2p/src/dpt/message.ts b/packages/devp2p/src/dpt/message.ts index f0d09ccfcd..8e861c6519 100644 --- a/packages/devp2p/src/dpt/message.ts +++ b/packages/devp2p/src/dpt/message.ts @@ -197,7 +197,7 @@ export function decode(bytes: Uint8Array, common?: Common) { signature, recoverId, sighash, - false + false, ) return { typename, data, publicKey } } diff --git a/packages/devp2p/src/dpt/server.ts b/packages/devp2p/src/dpt/server.ts index dfb982e3f0..3fea7d07a7 100644 --- a/packages/devp2p/src/dpt/server.ts +++ b/packages/devp2p/src/dpt/server.ts @@ -63,7 +63,7 @@ export class Server { this._common = options.common this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } bind(...args: any[]) { @@ -90,8 +90,8 @@ export class Server { async ping(peer: PeerInfo): Promise { this._isAliveCheck() - const rckey = `${peer.address}:${peer.udpPort}` - const promise = this._requestsCache.get(rckey) + const rcKey = `${peer.address}:${peer.udpPort}` + const promise = this._requestsCache.get(rcKey) if (promise !== undefined) return promise const hash = this._send(peer, 'ping', { @@ -101,27 +101,27 @@ export class Server { }) const deferred = createDeferred() - const rkey = bytesToUnprefixedHex(hash) - this._requests.set(rkey, { + const rKey = bytesToUnprefixedHex(hash) + this._requests.set(rKey, { peer, deferred, timeoutId: setTimeout(() => { - if (this._requests.get(rkey) !== undefined) { + if (this._requests.get(rKey) !== undefined) { if (this.DEBUG) { this._debug( `ping timeout: ${peer.address}:${peer.udpPort} ${ peer.id ? formatLogId(bytesToHex(peer.id), verbose) : '-' - }` + }`, ) } - this._requests.delete(rkey) + this._requests.delete(rKey) deferred.reject(new Error(`Timeout error: ping ${peer.address}:${peer.udpPort}`)) } else { return deferred.promise } }, this._timeout), }) - this._requestsCache.set(rckey, deferred.promise) + this._requestsCache.set(rcKey, deferred.promise) return deferred.promise } @@ -140,7 +140,7 @@ export class Server { typename, `send ${typename} to ${peer.address}:${peer.udpPort} (peerId: ${ peer.id ? formatLogId(bytesToHex(peer.id), verbose) : '-' - })` + })`, ) } @@ -159,8 +159,8 @@ export class Server { info.typename.toString(), `received ${info.typename} from ${rinfo.address}:${rinfo.port} (peerId: ${formatLogId( bytesToHex(peerId), - verbose - )})` + verbose, + )})`, ) } @@ -189,10 +189,10 @@ export class Server { } case 'pong': { - const rkey = bytesToUnprefixedHex(info.data.hash) - const request = this._requests.get(rkey) + const rKey = bytesToUnprefixedHex(info.data.hash) + const request = this._requests.get(rKey) if (request !== undefined) { - this._requests.delete(rkey) + this._requests.delete(rKey) request.deferred.resolve({ id: peerId, address: request.peer.address, @@ -217,7 +217,7 @@ export class Server { case 'neighbours': { this.events.emit( 'peers', - info.data.peers.map((peer: any) => peer.endpoint) + info.data.peers.map((peer: any) => peer.endpoint), ) break } diff --git a/packages/devp2p/src/ext/kbucket.ts b/packages/devp2p/src/ext/kbucket.ts index 38f25eeab2..de33f2824f 100644 --- a/packages/devp2p/src/ext/kbucket.ts +++ b/packages/devp2p/src/ext/kbucket.ts @@ -29,7 +29,7 @@ OTHER DEALINGS IN THE SOFTWARE. // TODO: Also internalize types from Definitely Typed at some point // https://github.com/DefinitelyTyped/DefinitelyTyped/blob/266eae5148c535e6b41fe5d0adb2ad23f302bc8a/types/k-bucket/index.d.ts#L4 -// (side note: this was once done by tomonari-t dedicatedly for this library +// (side note: this was once done by tomonari-t dedicatedly for this library // cspell:disable-line // (please nevertheless include the original license reference)) import { equalsBytes, randomBytes } from '@ethereumjs/util' @@ -38,12 +38,12 @@ import { EventEmitter } from 'events' import type { Contact, KBucketOptions, PeerInfo } from '../types.js' function createNode() { - return { contacts: [], dontSplit: false, left: null, right: null } + return { contacts: [], noSplit: false, left: null, right: null } } type KBucketNode = { contacts: Contact[] | null - dontSplit: boolean + noSplit: boolean left: KBucketNode | null right: KBucketNode | null } @@ -149,12 +149,12 @@ export class KBucket { } // the bucket is full - if (node.dontSplit) { + if (node.noSplit) { // we are not allowed to split the bucket // we need to ping the first this._numberOfNodesToPing // in order to determine if they are alive // only if one of the pinged nodes does not respond, can the new contact - // be added (this prevents DoS flodding with new invalid contacts) + // be added (this prevents DoS flooding with new invalid contacts) this.events.emit('ping', node.contacts.slice(0, this._numberOfNodesToPing), contact) return this } @@ -339,10 +339,10 @@ export class KBucket { // don't split the "far away" node // we check where the local node would end up and mark the other one as - // "dontSplit" (i.e. "far away") + // "noSplit" (i.e. "far away") const detNode = this._determineNode(node, this._localNodeId, bitIndex) const otherNode = node.left === detNode ? node.right : node.left - otherNode.dontSplit = true + otherNode.noSplit = true } /** diff --git a/packages/devp2p/src/protocol/eth.ts b/packages/devp2p/src/protocol/eth.ts index 4e4a6deeb1..fc89e9cbb7 100644 --- a/packages/devp2p/src/protocol/eth.ts +++ b/packages/devp2p/src/protocol/eth.ts @@ -78,7 +78,7 @@ export class ETH extends Protocol { null, 'Uncontrolled status message', this.debug.bind(this), - 'STATUS' + 'STATUS', ) this._peerStatus = payload as ETH.StatusMsg const peerStatusMsg = `${ @@ -185,31 +185,31 @@ export class ETH extends Protocol { this._peerStatus[0], 'Protocol version mismatch', this.debug.bind(this), - 'STATUS' + 'STATUS', ) assertEq( this._status[1], this._peerStatus[1], 'NetworkId mismatch', this.debug.bind(this), - 'STATUS' + 'STATUS', ) assertEq( this._status[4], this._peerStatus[4], 'Genesis block mismatch', this.debug.bind(this), - 'STATUS' + 'STATUS', ) const status: { - networkId: Uint8Array | Uint8Array[] + chainId: Uint8Array | Uint8Array[] td: Uint8Array bestHash: Uint8Array genesisHash: Uint8Array forkId?: Uint8Array | Uint8Array[] } = { - networkId: this._peerStatus[1], + chainId: this._peerStatus[1], td: this._peerStatus[2] as Uint8Array, bestHash: this._peerStatus[3] as Uint8Array, genesisHash: this._peerStatus[4] as Uint8Array, @@ -222,7 +222,7 @@ export class ETH extends Protocol { 2, 'Incorrect forkId msg format', this.debug.bind(this), - 'STATUS' + 'STATUS', ) this._validateForkId(this._peerStatus[5] as Uint8Array[]) status.forkId = this._peerStatus[5] @@ -248,11 +248,11 @@ export class ETH extends Protocol { _getStatusString(status: ETH.StatusMsg) { let sStr = `[V:${bytesToInt(status[0] as Uint8Array)}, NID:${bytesToInt( - status[1] as Uint8Array + status[1] as Uint8Array, )}, TD:${status[2].length === 0 ? 0 : bytesToBigInt(status[2] as Uint8Array).toString()}` sStr += `, BestH:${formatLogId( bytesToHex(status[3] as Uint8Array), - this._verbose + this._verbose, )}, GenH:${formatLogId(bytesToHex(status[4] as Uint8Array), this._verbose)}` if (this._version >= 64) { sStr += `, ForkHash: ${ @@ -280,13 +280,13 @@ export class ETH extends Protocol { const latestBlock = bytesToBigInt(status.latestBlock) if (latestBlock < this._latestBlock) { throw new Error( - 'latest block provided is not matching the HF setting of the Common instance (Rlpx)' + 'latest block provided is not matching the HF setting of the Common instance (Rlpx)', ) } this._latestBlock = latestBlock } const forkHashB = hexToBytes( - isHexString(this._forkHash) ? this._forkHash : `0x${this._forkHash}` + isHexString(this._forkHash) ? this._forkHash : `0x${this._forkHash}`, ) const nextForkB = @@ -301,7 +301,7 @@ export class ETH extends Protocol { `Send STATUS message to ${this._peer['_socket'].remoteAddress}:${ this._peer['_socket'].remotePort - } (eth${this._version}): ${this._getStatusString(this._status)}` + } (eth${this._version}): ${this._getStatusString(this._status)}`, ) } diff --git a/packages/devp2p/src/protocol/les.ts b/packages/devp2p/src/protocol/les.ts index 357c8d5c6d..6c23c389c2 100644 --- a/packages/devp2p/src/protocol/les.ts +++ b/packages/devp2p/src/protocol/les.ts @@ -34,7 +34,7 @@ export class LES extends Protocol { }, 5000) // 5 sec * 1000 this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } static les2 = { name: 'les', version: 2, length: 21, constructor: LES } @@ -50,7 +50,7 @@ export class LES extends Protocol { this.getMsgPrefix(code), `${`Received ${this.getMsgPrefix(code)} message from ${ this._peer['_socket'].remoteAddress - }:${this._peer['_socket'].remotePort}`}: ${logData}` + }:${this._peer['_socket'].remotePort}`}: ${logData}`, ) } } @@ -61,7 +61,7 @@ export class LES extends Protocol { null, 'Uncontrolled status message', this.debug.bind(this), - 'STATUS' + 'STATUS', ) const status: LES.Status = Object.assign({}) for (const value of payload as NestedUint8Array) { @@ -73,7 +73,7 @@ export class LES extends Protocol { this.getMsgPrefix(code), `${`Received ${this.getMsgPrefix(code)} message from ${ this._peer['_socket'].remoteAddress - }:${this._peer['_socket'].remotePort}`}: ${this._getStatusString(this._peerStatus)}` + }:${this._peer['_socket'].remotePort}`}: ${this._getStatusString(this._peerStatus)}`, ) } this._handleStatus() @@ -124,21 +124,21 @@ export class LES extends Protocol { this._peerStatus['protocolVersion'], 'Protocol version mismatch', this.debug.bind(this), - 'STATUS' + 'STATUS', ) assertEq( - this._status['networkId'], - this._peerStatus['networkId'], + this._status['chainId'], + this._peerStatus['chainId'], 'NetworkId mismatch', this.debug.bind(this), - 'STATUS' + 'STATUS', ) assertEq( this._status['genesisHash'], this._peerStatus['genesisHash'], 'Genesis block mismatch', this.debug.bind(this), - 'STATUS' + 'STATUS', ) this.events.emit('status', this._peerStatus) @@ -153,8 +153,8 @@ export class LES extends Protocol { _getStatusString(status: LES.Status) { let sStr = `[V:${bytesToInt(status['protocolVersion'])}, ` - sStr += `NID:${bytesToInt(status['networkId'] as Uint8Array)}, HTD:${bytesToInt( - status['headTd'] + sStr += `NID:${bytesToInt(status['chainId'] as Uint8Array)}, HTD:${bytesToInt( + status['headTd'], )}, ` sStr += `HeadH:${bytesToHex(status['headHash'])}, HeadN:${bytesToInt(status['headNum'])}, ` sStr += `GenH:${bytesToHex(status['genesisHash'])}` @@ -169,7 +169,7 @@ export class LES extends Protocol { if (status['flowControl/MRC)'] !== undefined) sStr += `, flowControl/MRC set` if (status['forkID'] !== undefined) sStr += `, forkID: [crc32: ${bytesToHex(status['forkID'][0])}, nextFork: ${bytesToInt( - status['forkID'][1] + status['forkID'][1], )}]` if (status['recentTxLookup'] !== undefined) sStr += `, recentTxLookup: ${bytesToInt(status['recentTxLookup'])}` @@ -184,7 +184,7 @@ export class LES extends Protocol { status['announceType'] = intToBytes(DEFAULT_ANNOUNCE_TYPE) } status['protocolVersion'] = intToBytes(this._version) - status['networkId'] = bigIntToBytes(this._peer.common.chainId()) + status['chainId'] = bigIntToBytes(this._peer.common.chainId()) this._status = status @@ -198,7 +198,7 @@ export class LES extends Protocol { 'STATUS', `Send STATUS message to ${this._peer['_socket'].remoteAddress}:${ this._peer['_socket'].remotePort - } (les${this._version}): ${this._getStatusString(this._status)}` + } (les${this._version}): ${this._getStatusString(this._status)}`, ) } @@ -224,7 +224,7 @@ export class LES extends Protocol { this.getMsgPrefix(code), `Send ${this.getMsgPrefix(code)} message to ${this._peer['_socket'].remoteAddress}:${ this._peer['_socket'].remotePort - }: ${formatLogData(bytesToHex(RLP.encode(payload)), this._verbose)}` + }: ${formatLogData(bytesToHex(RLP.encode(payload)), this._verbose)}`, ) } @@ -284,7 +284,7 @@ export namespace LES { export interface Status { [key: string]: any protocolVersion: Uint8Array - networkId: Uint8Array + chainId: Uint8Array headTd: Uint8Array headHash: Uint8Array headNum: Uint8Array diff --git a/packages/devp2p/src/protocol/protocol.ts b/packages/devp2p/src/protocol/protocol.ts index 01ccd3e22e..6d54e2be4f 100644 --- a/packages/devp2p/src/protocol/protocol.ts +++ b/packages/devp2p/src/protocol/protocol.ts @@ -34,7 +34,7 @@ export abstract class Protocol { send: SendMethod, protocol: ProtocolType, version: number, - messageCodes: MessageCodes + messageCodes: MessageCodes, ) { this.events = new EventEmitter() this._peer = peer @@ -55,7 +55,7 @@ export abstract class Protocol { private initMsgDebuggers(protocol: ProtocolType) { const MESSAGE_NAMES = Object.values(this._messageCodes).filter( - (value) => typeof value === 'string' + (value) => typeof value === 'string', ) as string[] for (const name of MESSAGE_NAMES) { this.msgDebuggers[name] = devp2pDebug.extend(protocol).extend(name) diff --git a/packages/devp2p/src/protocol/snap.ts b/packages/devp2p/src/protocol/snap.ts index 084c93aa72..3fa8da2d44 100644 --- a/packages/devp2p/src/protocol/snap.ts +++ b/packages/devp2p/src/protocol/snap.ts @@ -16,7 +16,7 @@ export class SNAP extends Protocol { constructor(version: number, peer: Peer, send: SendMethod) { super(peer, send, ProtocolType.SNAP, version, SNAP.MESSAGE_CODES) this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } static snap = { name: 'snap', version: 1, length: 8, constructor: SNAP } @@ -30,7 +30,7 @@ export class SNAP extends Protocol { this.getMsgPrefix(code), `Received ${this.getMsgPrefix(code)} message from ${this._peer['_socket'].remoteAddress}:${ this._peer['_socket'].remotePort - }: ${formatLogData(bytesToHex(data), this._verbose)}` + }: ${formatLogData(bytesToHex(data), this._verbose)}`, ) } @@ -66,7 +66,7 @@ export class SNAP extends Protocol { this.getMsgPrefix(code), `Send ${this.getMsgPrefix(code)} message to ${this._peer['_socket'].remoteAddress}:${ this._peer['_socket'].remotePort - }: ${formatLogData(utils.bytesToHex(RLP.encode(payload)), this._verbose)}` + }: ${formatLogData(utils.bytesToHex(RLP.encode(payload)), this._verbose)}`, ) } diff --git a/packages/devp2p/src/rlpx/ecies.ts b/packages/devp2p/src/rlpx/ecies.ts index d13fa10886..c688cbbead 100644 --- a/packages/devp2p/src/rlpx/ecies.ts +++ b/packages/devp2p/src/rlpx/ecies.ts @@ -43,7 +43,7 @@ function concatKDF(keyMaterial: Uint8Array, keyLength: number) { counter += 1 new DataView(tmp.buffer).setUint32(0, counter) bytes.push( - Uint8Array.from(crypto.createHash('sha256').update(tmp).update(keyMaterial).digest()) + Uint8Array.from(crypto.createHash('sha256').update(tmp).update(keyMaterial).digest()), ) } @@ -73,7 +73,7 @@ export class ECIES { protected _keccakFunction: (msg: Uint8Array) => Uint8Array protected _ecdsaSign: ( msg: Uint8Array, - pk: Uint8Array + pk: Uint8Array, ) => { signature: Uint8Array recid: number @@ -82,7 +82,7 @@ export class ECIES { sig: Uint8Array, recId: number, hash: Uint8Array, - compressed?: boolean + compressed?: boolean, ) => Uint8Array constructor(privateKey: Uint8Array, id: Uint8Array, remoteId: Uint8Array, common?: Common) { @@ -101,14 +101,14 @@ export class ECIES { _encryptMessage( data: Uint8Array, - sharedMacData: Uint8Array | null = null + sharedMacData: Uint8Array | null = null, ): Uint8Array | undefined { const privateKey = genPrivateKey() if (!this._remotePublicKey) return const x = ecdhX(this._remotePublicKey, privateKey) const key = concatKDF(x, 32) const ekey = key.subarray(0, 16) // encryption key - const mkey = crypto.createHash('sha256').update(key.subarray(16, 32)).digest() // MAC key + const mKey = crypto.createHash('sha256').update(key.subarray(16, 32)).digest() // MAC key // encrypt const IV = getRandomBytesSync(16) @@ -121,7 +121,7 @@ export class ECIES { sharedMacData = Uint8Array.from([]) } const tag = Uint8Array.from( - crypto.createHmac('sha256', mkey).update(concatBytes(dataIV, sharedMacData)).digest() + crypto.createHmac('sha256', mKey).update(concatBytes(dataIV, sharedMacData)).digest(), ) const publicKey = secp256k1.getPublicKey(privateKey, false) @@ -133,7 +133,7 @@ export class ECIES { data.subarray(0, 1), hexToBytes('0x04'), 'wrong ecies header (possible cause: EIP8 upgrade)', - debug + debug, ) const publicKey = data.subarray(0, 65) @@ -144,14 +144,14 @@ export class ECIES { const x = ecdhX(publicKey, this._privateKey) const key = concatKDF(x, 32) const ekey = key.subarray(0, 16) // encryption key - const mkey = Uint8Array.from(crypto.createHash('sha256').update(key.subarray(16, 32)).digest()) // MAC key + const mKey = Uint8Array.from(crypto.createHash('sha256').update(key.subarray(16, 32)).digest()) // MAC key // check the tag if (!sharedMacData) { sharedMacData = Uint8Array.from([]) } const _tag = crypto - .createHmac('sha256', mkey) + .createHmac('sha256', mKey) .update(concatBytes(dataIV, sharedMacData)) .digest() assertEq(_tag, tag, 'should have valid tag', debug) @@ -220,7 +220,7 @@ export class ECIES { this._keccakFunction(pk2id(this._ephemeralPublicKey)), pk2id(this._publicKey), this._nonce, - Uint8Array.from([0x00]) + Uint8Array.from([0x00]), ) this._initMsg = this._encryptMessage(data) @@ -229,7 +229,7 @@ export class ECIES { parseAuthPlain( data: Uint8Array, - sharedMacData: Uint8Array | null = null + sharedMacData: Uint8Array | null = null, ): Uint8Array | undefined { const prefix = sharedMacData !== null ? sharedMacData : new Uint8Array() this._remoteInitMsg = concatBytes(prefix, data) @@ -237,7 +237,7 @@ export class ECIES { let signature = null let recoveryId = null - let heid = null + let heId = null let remotePublicKey = null let nonce = null @@ -246,7 +246,7 @@ export class ECIES { signature = decrypted.subarray(0, 64) recoveryId = decrypted[64] - heid = decrypted.subarray(65, 97) // 32 bytes + heId = decrypted.subarray(65, 97) // 32 bytes remotePublicKey = id2pk(decrypted.subarray(97, 161)) nonce = decrypted.subarray(161, 193) } else { @@ -272,17 +272,17 @@ export class ECIES { signature, recoveryId, xor(x, this._remoteNonce), - false + false, ) if (this._remoteEphemeralPublicKey === null) return this._ephemeralSharedSecret = ecdhX(this._remoteEphemeralPublicKey, this._ephemeralPrivateKey) - if (heid !== null && this._remoteEphemeralPublicKey !== null) { + if (heId !== null && this._remoteEphemeralPublicKey !== null) { assertEq( this._keccakFunction(pk2id(this._remoteEphemeralPublicKey)), - heid, + heId, 'the hash of the ephemeral key should match', - debug + debug, ) } } @@ -356,7 +356,7 @@ export class ECIES { this.parseAckPlain(data.subarray(2), data.subarray(0, 2)) } - createHeader(size: number): Uint8Array | undefined { + createBlockHeader(size: number): Uint8Array | undefined { const bufSize = zfill(intToBytes(size), 3) const headerData = RLP.encode([0, 0]) // [capability-id, context-id] (currently unused in spec) let header = concatBytes(bufSize, headerData) diff --git a/packages/devp2p/src/rlpx/peer.ts b/packages/devp2p/src/rlpx/peer.ts index e980c89688..65111987d1 100644 --- a/packages/devp2p/src/rlpx/peer.ts +++ b/packages/devp2p/src/rlpx/peer.ts @@ -143,7 +143,7 @@ export class Peer { this._sendAuth() } this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } /** @@ -152,7 +152,7 @@ export class Peer { _sendAuth() { if (this._closed) return this._logger( - `Send auth (EIP8: ${this._EIP8}) to ${this._socket.remoteAddress}:${this._socket.remotePort}` + `Send auth (EIP8: ${this._EIP8}) to ${this._socket.remoteAddress}:${this._socket.remotePort}`, ) if (this._EIP8 === true) { const authEIP8 = this._eciesSession.createAuthEIP8() @@ -173,7 +173,7 @@ export class Peer { _sendAck() { if (this._closed) return this._logger( - `Send ack (EIP8: ${this._eciesSession['_gotEIP8Auth']}) to ${this._socket.remoteAddress}:${this._socket.remotePort}` + `Send ack (EIP8: ${this._eciesSession['_gotEIP8Auth']}) to ${this._socket.remoteAddress}:${this._socket.remotePort}`, ) if (this._eciesSession['_gotEIP8Auth']) { @@ -200,7 +200,7 @@ export class Peer { if (this._closed) return false const msg = concatBytes(RLP.encode(code), data) - const header = this._eciesSession.createHeader(msg.length) + const header = this._eciesSession.createBlockHeader(msg.length) if (!header || this._socket.destroyed) return this._socket.write(header) @@ -227,7 +227,7 @@ export class Peer { // TODO: Remove when we can also serve snap requests from other peers .filter((c) => c.name !== 'snap') .map((c) => `${c.name}${c.version}`) - .join(',')} clientId=${bytesToUtf8(this.clientId)}` + .join(',')} clientId=${bytesToUtf8(this.clientId)}`, ) } const payload: HelloMsg = [ @@ -260,7 +260,7 @@ export class Peer { this.debug( 'DISCONNECT', `Send DISCONNECT to ${this._socket.remoteAddress}:${this._socket.remotePort} (reason: ${reasonName})`, - reasonName + reasonName, ) } const data = RLP.encode(reason) @@ -340,7 +340,7 @@ export class Peer { if (parseData.subarray(0, 1) === hexToBytes('0x04')) { this._eciesSession.parseAckPlain(parseData) this._logger( - `Received ack (old format) from ${this._socket.remoteAddress}:${this._socket.remotePort}` + `Received ack (old format) from ${this._socket.remoteAddress}:${this._socket.remotePort}`, ) } else { this._eciesSession['_gotEIP8Ack'] = true @@ -350,7 +350,7 @@ export class Peer { } else { this._eciesSession.parseAckEIP8(parseData) this._logger( - `Received ack (EIP8) from ${this._socket.remoteAddress}:${this._socket.remotePort}` + `Received ack (EIP8) from ${this._socket.remoteAddress}:${this._socket.remotePort}`, ) } this._state = 'Header' @@ -380,7 +380,7 @@ export class Peer { this._hello.protocolVersion } capabilities=${(this._hello.capabilities ?? []) .map((c) => `${c.name}${c.version}`) - .join(',')} clientId=${this._hello.clientId}` + .join(',')} clientId=${this._hello.clientId}`, ) } @@ -458,7 +458,7 @@ export class Peer { `DISCONNECT reason: ${DISCONNECT_REASON[this._disconnectReason as number]} ${ this._socket.remoteAddress }:${this._socket.remotePort}`, - DISCONNECT_REASON[this._disconnectReason as number] + DISCONNECT_REASON[this._disconnectReason as number], ) } this._disconnectWe = false @@ -534,8 +534,8 @@ export class Peer { this._logger( `Received body ${this._socket.remoteAddress}:${this._socket.remotePort} ${formatLogData( bytesToHex(body), - verbose - )}` + verbose, + )}`, ) this._state = 'Header' this._nextPacketSize = 32 diff --git a/packages/devp2p/src/rlpx/rlpx.ts b/packages/devp2p/src/rlpx/rlpx.ts index 45232dcf4c..00454c1980 100644 --- a/packages/devp2p/src/rlpx/rlpx.ts +++ b/packages/devp2p/src/rlpx/rlpx.ts @@ -94,7 +94,7 @@ export class RLPx { this._dpt.events.on('peer:removed', (peer: PeerInfo) => { // remove from queue this._peersQueue = this._peersQueue.filter( - (item) => !equalsBytes(item.peer.id! as Uint8Array, peer.id as Uint8Array) + (item) => !equalsBytes(item.peer.id! as Uint8Array, peer.id as Uint8Array), ) }) } @@ -112,14 +112,14 @@ export class RLPx { this._peers = new Map() this._peersQueue = [] this._peersLRU = new LRUCache({ max: 25000 }) - const REFILL_INTERVALL = 10000 // 10 sec * 1000 - const refillIntervalSubdivided = Math.floor(REFILL_INTERVALL / 10) + const REFILL_INTERVAL = 10000 // 10 sec * 1000 + const refillIntervalSubdivided = Math.floor(REFILL_INTERVAL / 10) this._refillIntervalId = setInterval(() => this._refillConnections(), refillIntervalSubdivided) this._keccakFunction = options.common?.customCrypto.keccak256 ?? keccak256 this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } listen(...args: any[]) { @@ -157,7 +157,7 @@ export class RLPx { if (this.DEBUG) { this._debug( - `connect to ${peer.address}:${peer.tcpPort} (id: ${formatLogId(peerKey, verbose)})` + `connect to ${peer.address}:${peer.tcpPort} (id: ${formatLogId(peerKey, verbose)})`, ) } const deferred = createDeferred() @@ -272,7 +272,7 @@ export class RLPx { if (this.DEBUG) { this._debug( `disconnect from ${socket.remoteAddress}:${socket.remotePort}, reason: ${DISCONNECT_REASON[reason]}`, - `disconnect` + `disconnect`, ) } } @@ -309,7 +309,7 @@ export class RLPx { this._refillIntervalSelectionCounter } peers: ${this._peers.size}, queue size: ${ this._peersQueue.length - }, open slots: ${this._getOpenSlots()}` + }, open slots: ${this._getOpenSlots()}`, ) } } diff --git a/packages/devp2p/src/util.ts b/packages/devp2p/src/util.ts index 746cece35e..e1de5db726 100644 --- a/packages/devp2p/src/util.ts +++ b/packages/devp2p/src/util.ts @@ -46,7 +46,7 @@ export function assertEq( actual: assertInput, msg: string, debug: Function, - messageName?: string + messageName?: string, ): void { let fullMsg diff --git a/packages/devp2p/test/dns.spec.ts b/packages/devp2p/test/dns.spec.ts index bee07a82a7..ff20fd020d 100644 --- a/packages/devp2p/test/dns.spec.ts +++ b/packages/devp2p/test/dns.spec.ts @@ -6,7 +6,7 @@ import { DNS } from '../src/dns/index.js' import * as testdata from './testdata.json' describe('DNS', () => { - const mockData = testdata.dns + const mockData = testdata.default.dns const mockDns = td.replace('dns') let dns: DNS @@ -20,6 +20,7 @@ describe('DNS', () => { } const host = 'nodes.example.org' + // cspell:disable const rootDomain = 'JORXBYVVM7AEKETX5DGXW44EAY' const branchDomainA = 'D2SNLTAGWNQ34NTQTPHNZDECFU' const branchDomainB = 'D3SNLTAGWNQ34NTQTPHNZDECFU' @@ -27,6 +28,7 @@ describe('DNS', () => { const branchDomainD = 'D5SNLTAGWNQ34NTQTPHNZDECFU' const partialBranchA = 'AAAA' const partialBranchB = 'BBBB' + // cspell:enable const singleBranch = `enrtree-branch:${branchDomainA}` const doubleBranch = `enrtree-branch:${branchDomainA},${branchDomainB}` const multiComponentBranch = [ @@ -153,7 +155,7 @@ describe('DNS', () => { }) describe('DNS: (integration)', () => { - const publicKey = 'AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE' + const publicKey = 'AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE' // cspell:disable-line const goerliDNS = 'all.goerli.ethdisco.net' const enrTree = `enrtree://${publicKey}@${goerliDNS}` const ipTestRegex = /^\d+\.\d+\.\d+\.\d+$/ // e.g 123.44.55.77 @@ -174,6 +176,6 @@ describe('DNS: (integration)', () => { seen.push(peer!.address as string) } }, - { timeout: 10000 } + { timeout: 10000 }, ) }) diff --git a/packages/devp2p/test/dpt-message.spec.ts b/packages/devp2p/test/dpt-message.spec.ts index a5ed1ca801..b5c4cdf38e 100644 --- a/packages/devp2p/test/dpt-message.spec.ts +++ b/packages/devp2p/test/dpt-message.spec.ts @@ -9,7 +9,7 @@ const publicKey = publicKeyCreate(privateKey, false) describe('DPT message tests', () => { it('ping packet with version 4, additional list elements', () => { const bytes = hexToBytes( - '0xe9614ccfd9fc3e74360018522d30e1419a143407ffcce748de3e22116b7e8dc92ff74788c0b6663aaa3d67d641936511c8f8d6ad8698b820a7cf9e1be7155e9a241f556658c55428ec0563514365799a4be2be5a685a80971ddcfa80cb422cdd0101ec04cb847f000001820cfa8215a8d790000000000000000000000000000000018208ae820d058443b9a3550102' + '0xe9614ccfd9fc3e74360018522d30e1419a143407ffcce748de3e22116b7e8dc92ff74788c0b6663aaa3d67d641936511c8f8d6ad8698b820a7cf9e1be7155e9a241f556658c55428ec0563514365799a4be2be5a685a80971ddcfa80cb422cdd0101ec04cb847f000001820cfa8215a8d790000000000000000000000000000000018208ae820d058443b9a3550102', ) const msg = message.decode(bytes) @@ -20,7 +20,7 @@ describe('DPT message tests', () => { it('ping packet with version 555, additional list elements and additional random data:', () => { const bytes = hexToBytes( - '0x577be4349c4dd26768081f58de4c6f375a7a22f3f7adda654d1428637412c3d7fe917cadc56d4e5e7ffae1dbe3efffb9849feb71b262de37977e7c7a44e677295680e9e38ab26bee2fcbae207fba3ff3d74069a50b902a82c9903ed37cc993c50001f83e82022bd79020010db83c4d001500000000abcdef12820cfa8215a8d79020010db885a308d313198a2e037073488208ae82823a8443b9a355c5010203040531b9019afde696e582a78fa8d95ea13ce3297d4afb8ba6433e4154caa5ac6431af1b80ba76023fa4090c408f6b4bc3701562c031041d4702971d102c9ab7fa5eed4cd6bab8f7af956f7d565ee1917084a95398b6a21eac920fe3dd1345ec0a7ef39367ee69ddf092cbfe5b93e5e568ebc491983c09c76d922dc3' + '0x577be4349c4dd26768081f58de4c6f375a7a22f3f7adda654d1428637412c3d7fe917cadc56d4e5e7ffae1dbe3efffb9849feb71b262de37977e7c7a44e677295680e9e38ab26bee2fcbae207fba3ff3d74069a50b902a82c9903ed37cc993c50001f83e82022bd79020010db83c4d001500000000abcdef12820cfa8215a8d79020010db885a308d313198a2e037073488208ae82823a8443b9a355c5010203040531b9019afde696e582a78fa8d95ea13ce3297d4afb8ba6433e4154caa5ac6431af1b80ba76023fa4090c408f6b4bc3701562c031041d4702971d102c9ab7fa5eed4cd6bab8f7af956f7d565ee1917084a95398b6a21eac920fe3dd1345ec0a7ef39367ee69ddf092cbfe5b93e5e568ebc491983c09c76d922dc3', ) const msg = message.decode(bytes) @@ -31,7 +31,7 @@ describe('DPT message tests', () => { it('pong packet with additional list elements and additional random data', () => { const bytes = hexToBytes( - '0x09b2428d83348d27cdf7064ad9024f526cebc19e4958f0fdad87c15eb598dd61d08423e0bf66b2069869e1724125f820d851c136684082774f870e614d95a2855d000f05d1648b2d5945470bc187c2d2216fbe870f43ed0909009882e176a46b0102f846d79020010db885a308d313198a2e037073488208ae82823aa0fbc914b16819237dcd8801d7e53f69e9719adecb3cc0e790c57e91ca4461c9548443b9a355c6010203c2040506a0c969a58f6f9095004c0177a6b47f451530cab38966a25cca5cb58f055542124e' + '0x09b2428d83348d27cdf7064ad9024f526cebc19e4958f0fdad87c15eb598dd61d08423e0bf66b2069869e1724125f820d851c136684082774f870e614d95a2855d000f05d1648b2d5945470bc187c2d2216fbe870f43ed0909009882e176a46b0102f846d79020010db885a308d313198a2e037073488208ae82823aa0fbc914b16819237dcd8801d7e53f69e9719adecb3cc0e790c57e91ca4461c9548443b9a355c6010203c2040506a0c969a58f6f9095004c0177a6b47f451530cab38966a25cca5cb58f055542124e', ) const msg = message.decode(bytes) @@ -41,7 +41,7 @@ describe('DPT message tests', () => { it('findnode packet with additional list elements and additional random data', () => { const bytes = hexToBytes( - '0xc7c44041b9f7c7e41934417ebac9a8e1a4c6298f74553f2fcfdcae6ed6fe53163eb3d2b52e39fe91831b8a927bf4fc222c3902202027e5e9eb812195f95d20061ef5cd31d502e47ecb61183f74a504fe04c51e73df81f25c4d506b26db4517490103f84eb840ca634cae0d49acb401d8a4c6b6fe8c55b70d115bf400769cc1400f3258cd31387574077f301b421bc84df7266c44e9e6d569fc56be00812904767bf5ccd1fc7f8443b9a35582999983999999280dc62cc8255c73471e0a61da0c89acdc0e035e260add7fc0c04ad9ebf3919644c91cb247affc82b69bd2ca235c71eab8e49737c937a2c396' + '0xc7c44041b9f7c7e41934417ebac9a8e1a4c6298f74553f2fcfdcae6ed6fe53163eb3d2b52e39fe91831b8a927bf4fc222c3902202027e5e9eb812195f95d20061ef5cd31d502e47ecb61183f74a504fe04c51e73df81f25c4d506b26db4517490103f84eb840ca634cae0d49acb401d8a4c6b6fe8c55b70d115bf400769cc1400f3258cd31387574077f301b421bc84df7266c44e9e6d569fc56be00812904767bf5ccd1fc7f8443b9a35582999983999999280dc62cc8255c73471e0a61da0c89acdc0e035e260add7fc0c04ad9ebf3919644c91cb247affc82b69bd2ca235c71eab8e49737c937a2c396', ) const msg = message.decode(bytes) @@ -51,7 +51,7 @@ describe('DPT message tests', () => { it('neighbours packet with additional list elements and additional random data', () => { const bytes = hexToBytes( - '0xc679fc8fe0b8b12f06577f2e802d34f6fa257e6137a995f6f4cbfc9ee50ed3710faf6e66f932c4c8d81d64343f429651328758b47d3dbc02c4042f0fff6946a50f4a49037a72bb550f3a7872363a83e1b9ee6469856c24eb4ef80b7535bcf99c0004f9015bf90150f84d846321163782115c82115db8403155e1427f85f10a5c9a7755877748041af1bcd8d474ec065eb33df57a97babf54bfd2103575fa829115d224c523596b401065a97f74010610fce76382c0bf32f84984010203040101b840312c55512422cf9b8a4097e9a6ad79402e87a15ae909a4bfefa22398f03d20951933beea1e4dfa6f968212385e829f04c2d314fc2d4e255e0d3bc08792b069dbf8599020010db83c4d001500000000abcdef12820d05820d05b84038643200b172dcfef857492156971f0e6aa2c538d8b74010f8e140811d53b98c765dd2d96126051913f44582e8c199ad7c6d6819e9a56483f637feaac9448aacf8599020010db885a308d313198a2e037073488203e78203e8b8408dcab8618c3253b558d459da53bd8fa68935a719aff8b811197101a4b2b47dd2d47295286fc00cc081bb542d760717d1bdd6bec2c37cd72eca367d6dd3b9df738443b9a355010203b525a138aa34383fec3d2719a0' + '0xc679fc8fe0b8b12f06577f2e802d34f6fa257e6137a995f6f4cbfc9ee50ed3710faf6e66f932c4c8d81d64343f429651328758b47d3dbc02c4042f0fff6946a50f4a49037a72bb550f3a7872363a83e1b9ee6469856c24eb4ef80b7535bcf99c0004f9015bf90150f84d846321163782115c82115db8403155e1427f85f10a5c9a7755877748041af1bcd8d474ec065eb33df57a97babf54bfd2103575fa829115d224c523596b401065a97f74010610fce76382c0bf32f84984010203040101b840312c55512422cf9b8a4097e9a6ad79402e87a15ae909a4bfefa22398f03d20951933beea1e4dfa6f968212385e829f04c2d314fc2d4e255e0d3bc08792b069dbf8599020010db83c4d001500000000abcdef12820d05820d05b84038643200b172dcfef857492156971f0e6aa2c538d8b74010f8e140811d53b98c765dd2d96126051913f44582e8c199ad7c6d6819e9a56483f637feaac9448aacf8599020010db885a308d313198a2e037073488203e78203e8b8408dcab8618c3253b558d459da53bd8fa68935a719aff8b811197101a4b2b47dd2d47295286fc00cc081bb542d760717d1bdd6bec2c37cd72eca367d6dd3b9df738443b9a355010203b525a138aa34383fec3d2719a0', ) const msg = message.decode(bytes) diff --git a/packages/devp2p/test/dpt.spec.ts b/packages/devp2p/test/dpt.spec.ts index 4f37791f5e..0e776e1483 100644 --- a/packages/devp2p/test/dpt.spec.ts +++ b/packages/devp2p/test/dpt.spec.ts @@ -11,7 +11,7 @@ describe('DPT', () => { }) const privateKey1 = hexToBytes( - '0x012e930448c53e0b73edbbbc433e8a741e978cda79be2be039905f538d6247c2' + '0x012e930448c53e0b73edbbbc433e8a741e978cda79be2be039905f538d6247c2', ) const peers: PeerInfo[] = [] @@ -56,7 +56,7 @@ describe('DPT', () => { assert.equal( dpt.getClosestPeers(peers[0].id!).length, 2, - 'should return all peers on getClosestPeers()' + 'should return all peers on getClosestPeers()', ) dpt.destroy() @@ -73,7 +73,7 @@ describe('DPT', () => { await dpt.refresh() expect( spy, - 'call findneighbours on unconfirmed if no confirmed peers yet' + 'call findneighbours on unconfirmed if no confirmed peers yet', ).toHaveBeenCalledTimes(1) dpt['_refreshIntervalSelectionCounter'] = 0 @@ -86,21 +86,21 @@ describe('DPT', () => { assert.equal( dpt.getClosestPeers(peers[0].id!).length, 1, - 'should not return unconfirmed on getClosestPeers()' + 'should not return unconfirmed on getClosestPeers()', ) dpt.confirmPeer('02') assert.equal( dpt.getClosestPeers(peers[0].id!).length, 2, - 'should return confirmed on getClosestPeers()' + 'should return confirmed on getClosestPeers()', ) dpt.removePeer(peers[1]) assert.equal( dpt.getClosestPeers(peers[0].id!).length, 1, - 'should work after peers being removed' + 'should work after peers being removed', ) dpt.destroy() diff --git a/packages/devp2p/test/enr.spec.ts b/packages/devp2p/test/enr.spec.ts index 01df40ac6d..9882a41137 100644 --- a/packages/devp2p/test/enr.spec.ts +++ b/packages/devp2p/test/enr.spec.ts @@ -10,7 +10,7 @@ describe('ENR tests', () => { // Root DNS entries it('ENR (root): should parse and verify and DNS root entry', () => { const subdomain = ENR.parseAndVerifyRoot(dns.enrRoot, dns.publicKey) - assert.equal(subdomain, 'JORXBYVVM7AEKETX5DGXW44EAY', 'returns correct subdomain') + assert.equal(subdomain, 'JORXBYVVM7AEKETX5DGXW44EAY', 'returns correct subdomain') // cspell:disable-line }) it('ENR (root): should error if DNS root entry is mis-prefixed', () => { @@ -19,7 +19,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes("ENR root entry must start with 'enrtree-root:'"), - 'has correct error message' + 'has correct error message', ) } }) @@ -30,7 +30,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes('Unable to verify ENR root signature'), - 'has correct error message' + 'has correct error message', ) } }) @@ -41,7 +41,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes("Could not parse 'l' value from ENR root entry"), - 'has correct error message' + 'has correct error message', ) } }) @@ -60,7 +60,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes("ENR tree entry must start with 'enrtree:'"), - 'has correct error message' + 'has correct error message', ) } }) @@ -71,7 +71,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes('Could not parse domain from ENR tree entry'), - 'has correct error message' + 'has correct error message', ) } }) @@ -79,9 +79,11 @@ describe('ENR tests', () => { // Branch entries it('ENR (branch): should parse and verify a single component DNS branch entry', () => { const expected = [ + // cspell:disable 'D2SNLTAGWNQ34NTQTPHNZDECFU', '67BLTJEU5R2D5S3B4QKJSBRFCY', 'A2HDMZBB4JIU53VTEGC4TG6P4A', + // cspell:enable ] const branches = ENR.parseBranch(dns.enrBranch) @@ -94,7 +96,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes("ENR branch entry must start with 'enrtree-branch:'"), - 'has correct error message' + 'has correct error message', ) } }) @@ -121,7 +123,7 @@ describe('ENR tests', () => { } catch (e: any) { assert.ok( e.toString().includes("String encoded ENR must start with 'enr:'"), - 'has correct error message' + 'has correct error message', ) } }) diff --git a/packages/devp2p/test/integration/dpt-simulator.spec.ts b/packages/devp2p/test/integration/dpt-simulator.spec.ts index 0b2682effa..6bea867128 100644 --- a/packages/devp2p/test/integration/dpt-simulator.spec.ts +++ b/packages/devp2p/test/integration/dpt-simulator.spec.ts @@ -37,7 +37,7 @@ describe('DPT simulator tests', () => { assert.equal( dpts[0].getPeers().length, 0, - 'should have removed peer from k-bucket on peer:removed' + 'should have removed peer from k-bucket on peer:removed', ) await util.delay(500) util.destroyDPTs(dpts) @@ -60,7 +60,7 @@ describe('DPT simulator tests', () => { assert.equal( dpts[0].getPeers().length, 0, - 'should have removed peer from k-bucket on peer:removed' + 'should have removed peer from k-bucket on peer:removed', ) await util.delay(500) util.destroyDPTs(dpts) diff --git a/packages/devp2p/test/integration/eth-simulator.spec.ts b/packages/devp2p/test/integration/eth-simulator.spec.ts index 3a8d1847c5..186bea401c 100644 --- a/packages/devp2p/test/integration/eth-simulator.spec.ts +++ b/packages/devp2p/test/integration/eth-simulator.spec.ts @@ -1,4 +1,4 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet, Sepolia } from '@ethereumjs/common' import { hexToBytes, intToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' @@ -11,7 +11,7 @@ import type { Capabilities } from '../../src/index.js' const GENESIS_TD = 17179869184 const GENESIS_HASH = hexToBytes( - '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ) const capabilities = [devp2p.ETH.eth63, devp2p.ETH.eth62] @@ -49,8 +49,8 @@ describe('ETH simulator tests', () => { resolve(undefined) } - const c1 = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - const c2 = new Common({ chain: Chain.Sepolia, hardfork: Hardfork.London }) + const c1 = new Common({ chain: Mainnet, hardfork: Hardfork.London }) + const c2 = new Common({ chain: Sepolia, hardfork: Hardfork.London }) util.twoPeerMsgExchange(it, opts, capabilities, [c1, c2], 27126) }) }) @@ -98,7 +98,7 @@ describe('ETH simulator tests', () => { t: typeof it, version: number, cap: Capabilities[], - expectedCode: ETH.MESSAGE_CODES + expectedCode: ETH.MESSAGE_CODES, ) { await new Promise((resolve) => { const opts: any = {} @@ -128,7 +128,7 @@ describe('ETH simulator tests', () => { async () => { await new Promise((resolve) => { const cap = [devp2p.ETH.eth65] - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Byzantium }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Byzantium }) const status0: any = Object.assign({}, status) status0['latestBlock'] = intToBytes(100000) // lower than Byzantium fork block 4370000 @@ -142,7 +142,7 @@ describe('ETH simulator tests', () => { resolve(undefined) }) }) - } + }, ) it('ETH: send not-allowed eth67', async () => { @@ -163,7 +163,7 @@ describe('ETH simulator tests', () => { await new Promise((resolve) => { const opts: any = {} const cap = [devp2p.ETH.eth64] - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Byzantium }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Byzantium }) const status0: any = Object.assign({}, status) // Take a latest block > next mainnet fork block (constantinople) // to trigger validation condition diff --git a/packages/devp2p/test/integration/les-simulator.spec.ts b/packages/devp2p/test/integration/les-simulator.spec.ts index 93091ae10d..610f85eed5 100644 --- a/packages/devp2p/test/integration/les-simulator.spec.ts +++ b/packages/devp2p/test/integration/les-simulator.spec.ts @@ -1,4 +1,4 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet, Sepolia } from '@ethereumjs/common' import { hexToBytes, intToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' @@ -8,7 +8,7 @@ import * as util from './util.js' const GENESIS_TD = 17179869184 const GENESIS_HASH = hexToBytes( - '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' + '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3', ) const capabilities = [devp2p.LES.les4] @@ -63,8 +63,8 @@ describe('LES simulator tests', () => { resolve(undefined) } - const c1 = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - const c2 = new Common({ chain: Chain.Sepolia, hardfork: Hardfork.London }) + const c1 = new Common({ chain: Mainnet, hardfork: Hardfork.London }) + const c2 = new Common({ chain: Sepolia, hardfork: Hardfork.London }) util.twoPeerMsgExchange(it, opts, capabilities, [c1, c2], 41599) }) }) diff --git a/packages/devp2p/test/integration/rlpx-simulator.spec.ts b/packages/devp2p/test/integration/rlpx-simulator.spec.ts index e958b39221..b35bccdc2e 100644 --- a/packages/devp2p/test/integration/rlpx-simulator.spec.ts +++ b/packages/devp2p/test/integration/rlpx-simulator.spec.ts @@ -14,7 +14,7 @@ describe('RLPx simulator tests', () => { assert.equal( peer._port, basePort + 1, - 'should have added peer on peer:added after successful handshake' + 'should have added peer on peer:added after successful handshake', ) assert.equal(rlpxs[0].getPeers().length, 1, 'peer list length should be 1') assert.equal(rlpxs[0]._getOpenSlots(), 9, 'should have maxPeers - 1 open slots left') @@ -35,12 +35,12 @@ describe('RLPx simulator tests', () => { } assert.notOk( rlpxs[0]['_dpt']!['_banlist'].has(peer), - 'should not be in ban list before bad peer discovered' + 'should not be in ban list before bad peer discovered', ) rlpxs[0]['_dpt']!.events.emit('peer:new', peer) assert.ok( rlpxs[0]['_dpt']!['_banlist'].has(peer), - 'should be in ban list after bad peer discovered' + 'should be in ban list after bad peer discovered', ) await util.delay(500) util.destroyRLPXs(rlpxs) @@ -58,7 +58,7 @@ describe('RLPx simulator tests', () => { assert.equal( reason, DISCONNECT_REASON.CLIENT_QUITTING, - 'should close with CLIENT_QUITTING disconnect reason' + 'should close with CLIENT_QUITTING disconnect reason', ) assert.equal(rlpxs[0]._getOpenSlots(), 10, 'should have maxPeers open slots left') await util.delay(500) diff --git a/packages/devp2p/test/integration/util.ts b/packages/devp2p/test/integration/util.ts index 6ebef037a6..c22e41bb4b 100644 --- a/packages/devp2p/test/integration/util.ts +++ b/packages/devp2p/test/integration/util.ts @@ -1,4 +1,4 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { assert } from 'vitest' import { DPT, ETH, RLPx, genPrivateKey } from '../../src/index.js' @@ -44,7 +44,7 @@ export function getTestDPTsWithDns(numDPTs: number, basePort: number) { }, timeout: 1000, refreshInterval: 400, - dnsNetworks: [testdata.dns.enrTree], + dnsNetworks: [testdata.default.dns.enrTree], shouldFindNeighbours: false, shouldGetDnsPeers: true, }) @@ -72,14 +72,14 @@ export function getTestRLPXs( maxPeers: number = 10, basePort: number, capabilities?: Capabilities[], - common?: Object | Common + common?: Object | Common, ) { const rlpxs = [] if (typeof capabilities === 'undefined') { capabilities = [ETH.eth66, ETH.eth65, ETH.eth64, ETH.eth63, ETH.eth62] } if (!common) { - common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) } const dpts = getTestDPTs(numRLPXs, basePort) @@ -101,7 +101,7 @@ export function initTwoPeerRLPXSetup( maxPeers?: any, capabilities?: any, common?: Object | Common, - basePort = 30306 + basePort = 30306, ): RLPx[] { const rlpxs = getTestRLPXs(2, maxPeers, basePort, capabilities, common) const peer = { address: localhost, udpPort: basePort + 1, tcpPort: basePort + 1 } @@ -128,7 +128,7 @@ export function twoPeerMsgExchange( opts: any, capabilities?: Capabilities[], common?: Object | Common, - basePort = 30306 + basePort = 30306, ) { const rlpxs = initTwoPeerRLPXSetup(null, capabilities, common, basePort) rlpxs[0].events.on('peer:added', function (peer: any) { @@ -190,7 +190,7 @@ export async function twoPeerMsgExchange2( opts: any, capabilities?: any, common?: Object | Common, - basePort = 30306 + basePort = 30306, ) { const rlpxs = initTwoPeerRLPXSetup(null, capabilities, common, basePort) rlpxs[0].events.on('peer:added', function (peer: any) { @@ -223,7 +223,7 @@ export async function twoPeerMsgExchange2( assert.equal( err.message, 'Invalid Snappy bitstream', - 'unable to process snappy compressed message' + 'unable to process snappy compressed message', ) destroyRLPXs(rlpxs) opts.promise(undefined) @@ -245,7 +245,7 @@ export function twoPeerMsgExchange3( opts: any, capabilities?: any, common?: Object | Common, - basePort = 30306 + basePort = 30306, ) { const rlpxs = initTwoPeerRLPXSetup(null, capabilities, common, basePort) rlpxs[0].events.on('peer:added', function (peer: any) { diff --git a/packages/devp2p/test/rlpx-ecies.spec.ts b/packages/devp2p/test/rlpx-ecies.spec.ts index fc67c63fbf..72d9e6c2de 100644 --- a/packages/devp2p/test/rlpx-ecies.spec.ts +++ b/packages/devp2p/test/rlpx-ecies.spec.ts @@ -64,7 +64,7 @@ it( const encrypted = t.context.a._encryptMessage(message) const decrypted = t.context.b._decryptMessage(encrypted as Uint8Array) assert.deepEqual(message, decrypted, 'encryptMessage -> decryptMessage should lead to same') - }) + }), ) it( @@ -84,12 +84,12 @@ it( const body = getRandomBytesSync(600) - const header = t.context.b.parseHeader(t.context.a.createHeader(body.length) as Uint8Array) - assert.equal(header, body.length, 'createHeader -> parseHeader should lead to same') + const header = t.context.b.parseHeader(t.context.a.createBlockHeader(body.length) as Uint8Array) + assert.equal(header, body.length, 'createBlockHeader -> parseHeader should lead to same') const parsedBody = t.context.b.parseBody(t.context.a.createBody(body) as Uint8Array) assert.deepEqual(parsedBody, body, 'createBody -> parseBody should lead to same') - }) + }), ) it( @@ -106,7 +106,7 @@ it( t.context.a['_gotEIP8Ack'] = true t.context.a.parseAckEIP8(ack as Uint8Array) }, 'should not throw on ack creation/parsing') - }) + }), ) it( @@ -122,7 +122,7 @@ it( t.context.a['_gotEIP8Ack'] = false t.context.a.parseAckPlain(t.context.h0?.ack as Uint8Array) }, 'should not throw on ack parsing') - }) + }), ) it( @@ -137,5 +137,5 @@ it( t.context.a['_gotEIP8Ack'] = true t.context.a.parseAckEIP8(t.context.h1?.ack as Uint8Array) }, 'should not throw on ack parsing') - }) + }), ) diff --git a/packages/devp2p/test/rlpx.spec.ts b/packages/devp2p/test/rlpx.spec.ts index 457bab846e..04262069e6 100644 --- a/packages/devp2p/test/rlpx.spec.ts +++ b/packages/devp2p/test/rlpx.spec.ts @@ -1,6 +1,6 @@ // Tests written with help from CodiumAI -import { Common } from '@ethereumjs/common' +import { Common, Mainnet } from '@ethereumjs/common' import { equalsBytes, randomBytes } from '@ethereumjs/util' import assert from 'assert' import { secp256k1 } from 'ethereum-cryptography/secp256k1.js' @@ -19,7 +19,7 @@ describe('RLPx', () => { maxPeers: 10, clientId: new Uint8Array([6, 7, 8, 9, 10]), capabilities: [], - common: new Common({ chain: 1 }), + common: new Common({ chain: Mainnet }), } const rlpx = new RLPx(privateKey, options) @@ -48,7 +48,7 @@ describe('RLPx', () => { maxPeers: 10, clientId: new Uint8Array([6, 7, 8, 9, 10]), capabilities: [], - common: new Common({ chain: 1 }), + common: new Common({ chain: Mainnet }), } const rlpx = new RLPx(privateKey, options) @@ -104,7 +104,7 @@ describe('RLPx', () => { maxPeers: 10, clientId: new Uint8Array([6, 7, 8, 9, 10]), capabilities: [], - common: new Common({ chain: 1 }), + common: new Common({ chain: Mainnet }), } const rlpx = new RLPx(privateKey, options) @@ -134,7 +134,7 @@ describe('RLPx', () => { maxPeers: 10, clientId: new Uint8Array([6, 7, 8, 9, 10]), capabilities: [], - common: new Common({ chain: 1 }), + common: new Common({ chain: Mainnet }), } const rlpx = new RLPx(privateKey, options) const mockPeer = { @@ -158,19 +158,19 @@ describe('RLPx', () => { maxPeers: 10, clientId: new Uint8Array([6, 7, 8, 9, 10]), capabilities: [], - common: new Common({ chain: 1 }), + common: new Common({ chain: Mainnet }), } const rlpx = new RLPx(privateKey, options) assert.equal( rlpx['_getOpenSlots'](), 10, - 'returns default number of open slots (i.e. `max_peers`) on startup' + 'returns default number of open slots (i.e. `max_peers`) on startup', ) assert.equal( rlpx['_getOpenQueueSlots'](), 20, - 'returns default number of open queue slots on startup' + 'returns default number of open queue slots on startup', ) }) }) diff --git a/packages/devp2p/tsconfig.lint.json b/packages/devp2p/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/devp2p/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/ethash/.eslintrc.cjs b/packages/ethash/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/ethash/.eslintrc.cjs +++ b/packages/ethash/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/ethash/CHANGELOG.md b/packages/ethash/CHANGELOG.md index e6e3a6159c..c47b127690 100644 --- a/packages/ethash/CHANGELOG.md +++ b/packages/ethash/CHANGELOG.md @@ -6,7 +6,11 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 3.0.3 - 2024-03-05 +## 3.0.4 - 2024-08-15 + +Maintenance release with downstream dependency updates, see PR [#3527](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3527) + +## 3.0.3 - 2024-03-18 Maintenance release with downstream dependency updates, see PR [#3297](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3297) @@ -38,7 +42,7 @@ While you could use our libraries in the browser libraries before, there had bee WE HAVE ELIMINATED ALL OF THEM. -The largest two undertakings: First: we have rewritten all (half) of our API and elimited the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went throuh our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. +The largest two undertakings: First: we have rewritten all (half) of our API and eliminated the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went through our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. Together with some few other modifications this now allows to run each (maybe adding an asterisk for client and devp2p) of our libraries directly in the browser - more or less without any modifications - see the `examples/browser.html` file in each package folder for an easy to set up example. @@ -174,7 +178,7 @@ Beta 2 release for the upcoming breaking release round on the [EthereumJS monore ### Removed Default Exports -The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all accross the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. +The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all across the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. Default exports were a common source of error and confusion when using our libraries in a CommonJS context, leading to issues like Issue [#978](https://github.com/ethereumjs/ethereumjs-monorepo/issues/978). @@ -182,7 +186,7 @@ Now every import is a named import and we think the long term benefits will very #### Common Library Import Updates -Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all accross our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. +Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all across our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. So Common import and usage is changing from: @@ -327,7 +331,7 @@ See `Ethash` [README](https://github.com/ethereumjs/ethereumjs-monorepo/tree/mas We significantly updated our internal tool and CI setup along the work on PR [#913](https://github.com/ethereumjs/ethereumjs-monorepo/pull/913) with an update to `ESLint` from `TSLint` for code linting and formatting and the introduction of a new build setup. -Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes @@ -390,7 +394,7 @@ for code linting and formatting and the introduction of a new build setup. Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result -in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes diff --git a/packages/ethash/examples/example.ts b/packages/ethash/examples/example.ts index 28461ba38f..983a4007bf 100644 --- a/packages/ethash/examples/example.ts +++ b/packages/ethash/examples/example.ts @@ -1,4 +1,5 @@ import { bytesToHex, hexToBytes } from '@ethereumjs/util' + import { Ethash } from '../dist/cjs/index.js' const ethash = new Ethash() diff --git a/packages/ethash/examples/miner.ts b/packages/ethash/examples/miner.ts index ab783d6d60..39800318b5 100644 --- a/packages/ethash/examples/miner.ts +++ b/packages/ethash/examples/miner.ts @@ -1,15 +1,17 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { Ethash } from '@ethereumjs/ethash' -import { DBObject, MapDB, bytesToHex } from '@ethereumjs/util' +import { MapDB, bytesToHex } from '@ethereumjs/util' -const block = createBlockFromBlockData( +import type { DBObject } from '@ethereumjs/util' + +const block = createBlock( { header: { difficulty: BigInt(100), number: BigInt(1), }, }, - { setHardfork: true, skipConsensusFormatValidation: true } + { setHardfork: true, skipConsensusFormatValidation: true }, ) const cacheDB = new MapDB() diff --git a/packages/ethash/examples/powBlock.ts b/packages/ethash/examples/powBlock.ts index fcd54323c0..f9bbce1a4e 100644 --- a/packages/ethash/examples/powBlock.ts +++ b/packages/ethash/examples/powBlock.ts @@ -1,6 +1,8 @@ -import { Ethash } from '@ethereumjs/ethash' import { createBlockFromRLPSerializedBlock } from '@ethereumjs/block' -import { DBObject, hexToBytes, MapDB } from '@ethereumjs/util' +import { Ethash } from '@ethereumjs/ethash' +import { MapDB, hexToBytes } from '@ethereumjs/util' + +import type { DBObject } from '@ethereumjs/util' const cacheDB = new MapDB() diff --git a/packages/ethash/examples/rawExample.ts b/packages/ethash/examples/rawExample.ts index 3046119e2c..67f0f44727 100644 --- a/packages/ethash/examples/rawExample.ts +++ b/packages/ethash/examples/rawExample.ts @@ -1,5 +1,8 @@ +import { MapDB, bytesToHex, hexToBytes } from '@ethereumjs/util' + import { Ethash } from '../dist/cjs/index.js' -import { DBObject, MapDB, bytesToHex, hexToBytes } from '@ethereumjs/util' + +import type { DBObject } from '@ethereumjs/util' const ethash = new Ethash(new MapDB()) @@ -7,7 +10,7 @@ const verifySubmit = async ( ethash: Ethash, number: number, headerHash: Uint8Array, - nonce: Uint8Array + nonce: Uint8Array, ): Promise => { console.log('Verifying number: ', number) await ethash.loadEpoc(BigInt(number)) @@ -20,6 +23,6 @@ const verifySubmit = async ( const headerHash = hexToBytes('0x0e2887aa1a0668bf8254d1a6ae518927de99e3e5d7f30fd1f16096e2608fe05e') const nonce = hexToBytes('0xe360b6170c229d15') -verifySubmit(ethash, 35414, headerHash, nonce).then((result) => { +void verifySubmit(ethash, 35414, headerHash, nonce).then((result) => { console.log('Result: ', bytesToHex(result)) }) diff --git a/packages/ethash/package.json b/packages/ethash/package.json index df473dcc59..1e51df3b78 100644 --- a/packages/ethash/package.json +++ b/packages/ethash/package.json @@ -1,6 +1,6 @@ { "name": "@ethereumjs/ethash", - "version": "3.0.3", + "version": "3.0.4", "description": "An implementation of the Ethash consensus algorithm in JavaScript", "keywords": [ "ethash", @@ -46,14 +46,14 @@ "tsc": "../../config/cli/ts-compile.sh" }, "dependencies": { - "@ethereumjs/block": "^5.2.0", + "@ethereumjs/block": "^5.3.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/util": "^9.1.0", "bigint-crypto-utils": "^3.2.2", "ethereum-cryptography": "^2.2.1" }, "devDependencies": { - "@ethereumjs/common": "^4.3.0" + "@ethereumjs/common": "^4.4.0" }, "engines": { "node": ">=18" diff --git a/packages/ethash/src/index.ts b/packages/ethash/src/index.ts index 31b8157c8d..41c3739e70 100644 --- a/packages/ethash/src/index.ts +++ b/packages/ethash/src/index.ts @@ -1,4 +1,4 @@ -import { Block, BlockHeader, createBlockFromBlockData } from '@ethereumjs/block' +import { Block, BlockHeader, createBlock, createBlockHeader } from '@ethereumjs/block' import { RLP } from '@ethereumjs/rlp' import { BIGINT_0, @@ -96,12 +96,12 @@ export class Miner { const data = this.block.toJSON() data.header!.mixHash = solution.mixHash data.header!.nonce = solution.nonce - return createBlockFromBlockData(data, { common: this.block.common }) + return createBlock(data, { common: this.block.common }) } else { const data = this.blockHeader.toJSON() data.mixHash = solution.mixHash data.nonce = solution.nonce - return BlockHeader.fromHeaderData(data, { common: this.blockHeader.common }) + return createBlockHeader(data, { common: this.blockHeader.common }) } } } @@ -227,7 +227,7 @@ export class Ethash { const p = (fnv( i ^ new DataView(s.buffer).getUint32(0, true), - new DataView(mix.buffer).getUint32((i % w) * 4, true) + new DataView(mix.buffer).getUint32((i % w) * 4, true), ) % Math.floor(n / mixhashes)) * mixhashes @@ -344,7 +344,7 @@ export class Ethash { { keyEncoding: KeyEncoding.Number, valueEncoding: ValueEncoding.JSON, - } + }, ) } else { this.cache = data.cache.map((a: Uint8Array) => { diff --git a/packages/ethash/src/util.ts b/packages/ethash/src/util.ts index eae06d2a0b..43c261c450 100644 --- a/packages/ethash/src/util.ts +++ b/packages/ethash/src/util.ts @@ -66,7 +66,7 @@ export function fnvBytes(a: Uint8Array, b: Uint8Array) { rView.setUint32( i, fnv(new DataView(a.buffer).getUint32(i, true), new DataView(b.buffer).getUint32(i, true)), - true + true, ) } return r diff --git a/packages/ethash/test/block.spec.ts b/packages/ethash/test/block.spec.ts index 86edb0bd4f..a39807ed27 100644 --- a/packages/ethash/test/block.spec.ts +++ b/packages/ethash/test/block.spec.ts @@ -1,9 +1,9 @@ import { - createBlockFromBlockData, + createBlock, + createBlockFromBytesArray, createBlockFromRLPSerializedBlock, - createBlockFromValuesArray, } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' import { MapDB, hexToBytes, toBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' @@ -20,9 +20,9 @@ describe('Verify POW for valid and invalid blocks', () => { it('should work', async () => { const e = new Ethash(cacheDB as any) - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) - const genesis = createBlockFromBlockData({}, { common }) + const genesis = createBlock({}, { common }) const genesisResult = await e.verifyPOW(genesis) assert.ok(genesisResult, 'genesis block should be valid') @@ -35,7 +35,7 @@ describe('Verify POW for valid and invalid blocks', () => { // Put correct amount of extraData in block extraData field so block can be deserialized const values = RLP.decode(Uint8Array.from(invalidRlp)) as BlockBytes values[0][12] = new Uint8Array(32) - const invalidBlock = createBlockFromValuesArray(values, { common }) + const invalidBlock = createBlockFromBytesArray(values, { common }) const invalidBlockResult = await e.verifyPOW(invalidBlock) assert.ok(!invalidBlockResult, 'should be invalid') diff --git a/packages/ethash/test/ethash.spec.ts b/packages/ethash/test/ethash.spec.ts index ecbee8e2ca..d22e26823c 100644 --- a/packages/ethash/test/ethash.spec.ts +++ b/packages/ethash/test/ethash.spec.ts @@ -1,5 +1,5 @@ -import { BlockHeader } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { createBlockHeaderFromRLP } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { bytesToHex, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' @@ -10,13 +10,13 @@ const powTests = require('./ethash_tests.json') const ethash = new Ethash() const tests = Object.keys(powTests) -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) describe('POW tests', () => { it('should work', async () => { for (const key of tests) { const test = powTests[key] - const header = BlockHeader.fromRLPSerializedHeader(hexToBytes(`0x${test.header}`), { common }) + const header = createBlockHeaderFromRLP(hexToBytes(`0x${test.header}`), { common }) const headerHash = ethash.headerHash(header.raw()) assert.equal(bytesToHex(headerHash), '0x' + test.header_hash, 'generate header hash') diff --git a/packages/ethash/test/miner.spec.ts b/packages/ethash/test/miner.spec.ts index 493245da5b..1ce3af091c 100644 --- a/packages/ethash/test/miner.spec.ts +++ b/packages/ethash/test/miner.spec.ts @@ -1,5 +1,5 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { createBlock } from '@ethereumjs/block' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { MapDB } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' @@ -9,20 +9,20 @@ import type { Block, BlockHeader } from '@ethereumjs/block' import type { DBObject } from '@ethereumjs/util' const cacheDb = new MapDB() -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Petersburg }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Petersburg }) describe('Miner', () => { it('Check if miner works as expected', async () => { const e = new Ethash(cacheDb) - const block = createBlockFromBlockData( + const block = createBlock( { header: { difficulty: BigInt(100), number: BigInt(1), }, }, - { common } + { common }, ) const invalidBlockResult = await e.verifyPOW(block) @@ -37,7 +37,7 @@ describe('Miner', () => { const solution = await miner.iterate(-1) - const validBlock = createBlockFromBlockData( + const validBlock = createBlock( { header: { difficulty: block.header.difficulty, @@ -46,7 +46,7 @@ describe('Miner', () => { mixHash: solution?.mixHash, }, }, - { common } + { common }, ) const validBlockResult = await e.verifyPOW(validBlock) @@ -57,21 +57,21 @@ describe('Miner', () => { it('Check if it is possible to mine Blocks and BlockHeaders', async () => { const e = new Ethash(cacheDb as any) - const block = createBlockFromBlockData( + const block = createBlock( { header: { difficulty: BigInt(100), number: BigInt(1), }, }, - { common } + { common }, ) const miner = e.getMiner(block.header) const solution = await miner.mine(-1) assert.ok( - e.verifyPOW(createBlockFromBlockData({ header: solution.toJSON() }, { common })), - 'successfully mined block' + e.verifyPOW(createBlock({ header: solution.toJSON() }, { common })), + 'successfully mined block', ) const blockMiner = e.getMiner(block) @@ -83,14 +83,14 @@ describe('Miner', () => { it('Check if it is possible to stop the miner', async () => { const e = new Ethash(cacheDb as any) - const block = createBlockFromBlockData( + const block = createBlock( { header: { difficulty: BigInt(10000000000000), number: BigInt(1), }, }, - { common } + { common }, ) const miner = e.getMiner(block.header) setTimeout(function () { @@ -111,14 +111,14 @@ describe('Miner', () => { }, undefined, undefined, - 'miner constructor successfully throws if no BlockHeader or Block object is passed' + 'miner constructor successfully throws if no BlockHeader or Block object is passed', ) }) it('Should keep common when mining blocks or headers', async () => { const e = new Ethash(cacheDb as any) - const block = createBlockFromBlockData( + const block = createBlock( { header: { difficulty: BigInt(100), @@ -127,7 +127,7 @@ describe('Miner', () => { }, { common, - } + }, ) const miner = e.getMiner(block.header) diff --git a/packages/ethash/tsconfig.lint.json b/packages/ethash/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/ethash/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/evm/.eslintrc.cjs b/packages/evm/.eslintrc.cjs index 965b6e94e6..10790297fc 100644 --- a/packages/evm/.eslintrc.cjs +++ b/packages/evm/.eslintrc.cjs @@ -7,9 +7,10 @@ module.exports = { }, overrides: [ { - files: ['test/util.ts', 'test/tester/**/*.ts'], + files: ['test/util.ts', 'test/tester/**/*.ts', 'examples/**/*.ts'], rules: { 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', }, }, ], diff --git a/packages/evm/CHANGELOG.md b/packages/evm/CHANGELOG.md index d88163ffa6..1abb3d322f 100644 --- a/packages/evm/CHANGELOG.md +++ b/packages/evm/CHANGELOG.md @@ -6,11 +6,62 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 3.0.0 - 2024-03-05 +## 3.1.0 - 2024-08-15 + +### EIP-2537 BLS Precompiles (Prague) + +Starting with this release the EVM support the BLS precompiles introduced with [EIP-2537](https://eips.ethereum.org/EIPS/eip-2537). These precompiles run natively using the [@noble/curves](https://github.com/paulmillr/noble-curves) library (❤️ to `@paulmillr`!), see PRs [#3350](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3350) and [#3471](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3471). + +An alternative WASM implementation (using [bls-wasm](https://github.com/herumi/bls-wasm)) can be optionally used like this if needed for performance reasons: + +```ts +import { EVM, MCLBLS } from '@ethereumjs/evm' + +const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Prague }) +await mcl.init(mcl.BLS12_381) +const mclbls = new MCLBLS(mcl) +const evm = await EVM.create({ common, bls }) +``` + +### Verkle Dependency Decoupling + +We have relatively light-heartedly added a new `@ethereumjs/verkle` main dependency to the VM/EVM stack in the `v7.2.1` release, which added an additional burden to the bundle size by several hundred KB and additionally draws in unnecessary WASM code. Coupling with Verkle has been refactored in PR [#3462](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3462) and the direct dependency has been removed again. + +An update to this release is therefore strongly recommended even if other fixes or features are not that relevant for you right now. + +### Verkle Updates + +- Adds ability to run [EIP-7702](https://eips.ethereum.org/EIPS/eip-7702) EOA code transactions (see tx library for full documentation), see PR [#3470](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3470) +- Fixes for Kaustinen4 support, PR [#3269](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3269) +- Kaustinen5 related fixes, PR [#3343](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3343) +- Kaustinen6 adjustments, `verkle-cryptography-wasm` migration, PRs [#3355](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3355) and [#3356](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3356) +- Update `kzg-wasm` to `0.4.0`, PR [#3358](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3358) +- Shift Verkle to `osaka` hardfork, PR [#3371](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3371) +- Fix `accessWitness` passing, PR [#3405](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3405) +- Remove the hacks to prevent account cleanups of system contracts, PR [#3418](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3418) +- Fix EIP-2935 address conversion issues, PR [#3447](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3447) + +### Other Features + +- Add support for retroactive [EIP-7610](https://eips.ethereum.org/EIPS/eip-7610), PR [#3480](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3480) +- Adds bundle visualizer (to be used with `npm run visualize:bundle`), PR [#3463](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3463) +- Stricter prefixed hex typing, PRs [#3348](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3348), [#3427](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3427) and [#3357](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3357) (some changes removed in PR [#3382](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3382) for backwards compatibility reasons, will be reintroduced along upcoming breaking releases) + +### Other Changes + +- Removes support for [EIP-2315](https://eips.ethereum.org/EIPS/eip-2315) simple subroutines for EVM (deprecated with an alternative version integrated into EOF), PR [#3342](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3342) +- Update `mcl-wasm` Dependency (Esbuild Issue), PR [#3461](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3461) + +### Bugfixes + +- BLS precompile fixes, PR [#3400](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3400) +- Ignore precompile addresses for some target access events, PR [#3366](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3366) + +## 3.0.0 - 2024-03-18 ### New EVM.create() Async Static Constructor -This is an in-between breaking release on both the EVM and VM packages due to a problematic top level await() discovery in the underlying `rustbn-wasm` library (see issue [#10](https://github.com/ethereumjs/rustbn-wasm/issues/10)) generally affecting the compatiblity of our libraries. +This is an in-between breaking release on both the EVM and VM packages due to a problematic top level await() discovery in the underlying `rustbn-wasm` library (see issue [#10](https://github.com/ethereumjs/rustbn-wasm/issues/10)) generally affecting the compatibility of our libraries. The `EVM` direct constructor initialization with `new EVM()` now has been deprecated and replaced by an async static `create()` constructor, as it is already done in various other libraries in the EthereumJS monorepo, see PRs [#3304](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3304/) and [#3315](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3315). @@ -70,7 +121,7 @@ Since this fits well also to be placed here relatively prominently for awareness ## 2.2.1 - 2024-02-08 -- Hotfix release moving the `@ethereumjs/verkle` dependency for `@ethereumjs/statemanager` from a peer dependency to the main dependencis (note that this decision might be temporary) +- Hotfix release moving the `@ethereumjs/verkle` dependency for `@ethereumjs/statemanager` from a peer dependency to the main dependencies (note that this decision might be temporary) ## 2.2.0 - 2024-02-08 @@ -188,7 +239,7 @@ While you could use our libraries in the browser libraries before, there had bee WE HAVE ELIMINATED ALL OF THEM. -The largest two undertakings: First: we have rewritten all (half) of our API and elimited the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went throuh our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. +The largest two undertakings: First: we have rewritten all (half) of our API and eliminated the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went through our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. Together with some few other modifications this now allows to run each (maybe adding an asterisk for client and devp2p) of our libraries directly in the browser - more or less without any modifications - see the `examples/browser.html` file in each package folder for an easy to set up example. @@ -471,7 +522,7 @@ The EVM now integrates a new point evaluation precompile at address `0x14` to "v ### Bug Fixes and Other Changes - Gas cost fixes for `EIP-3860` (experimental), PR [#2397](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2397) -- More correctly timed `nonce` updates to avoid certain consensus-critical `nonce`/`account` update constallations. PR [#2404](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2404) +- More correctly timed `nonce` updates to avoid certain consensus-critical `nonce`/`account` update constellations. PR [#2404](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2404) - Fixed chainstart/Frontier mainnet bug, PR [#2439](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2439) - EVM memory expansion performance optimizations, PR [#2405](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2405) - `EIP-4895` beacon chain withdrawals support (see `@ethereumjs/vm` for full documentation), PRs [#2353](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2353) and [#2401](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2401) @@ -514,7 +565,7 @@ common.setForkHashes(genesisHash) Along some deeper investigation of build errors related to the usage of the `async-eventemitter` package we finally decided to completely switch to a new async event emitter package for VM/EVM events, see PR [#2303](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2303). The old [async-eventemitter](https://github.com/ahultgren/async-eventemitter) package hasn't been updated for several years and the new [eventemitter2](https://github.com/EventEmitter2/EventEmitter2) package is more modern and maintained as well as substantially more used and therefore a future-proof choice for an async event emitter library to build the VM/EVM event emitting system upon. -The significant parts of the API of both the old and the new libraries are the same and the switch shouldn't cause too much hazzle for people upgrading. In case you nevertheless stumble upon upgrading problems regarding the event emitter package switch please feel free to open an issue, we'll be there to assist you on the upgrade! +The significant parts of the API of both the old and the new libraries are the same and the switch shouldn't cause too much hassle for people upgrading. In case you nevertheless stumble upon upgrading problems regarding the event emitter package switch please feel free to open an issue, we'll be there to assist you on the upgrade! ### Other Changes and Fixes @@ -547,7 +598,7 @@ This is the biggest EVM change in this release. The inheritance structure of the This allows for an easier typing of the `EVM` and makes the core EVM class leaner and not overloaded with various other partly unused properties. The new `events` property is optional. -Usage code of events needs to be slighly adopted and updated from: +Usage code of events needs to be slightly adopted and updated from: ```ts evm.on('step', (e) => { @@ -596,7 +647,7 @@ const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - Ensure EVM runs when nonce is 0, PR [#2054](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2054) - EVM/VM instantiation fixes, PR [#2078](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2078) -- Moved `@types/async-eventemitter` from devDependencis to dependencies, PR [#2077](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2077) +- Moved `@types/async-eventemitter` from devDependencies to dependencies, PR [#2077](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2077) - Added additional exports `EvmErrorMessage`, `ExecResult`, `InterpreterStep`, `Message`, PR [#2063](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2063) ## 1.0.0-beta.2 - 2022-07-15 @@ -605,7 +656,7 @@ Beta 2 release for the upcoming breaking release round on the [EthereumJS monore ### Removed Default Exports -The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all accross the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. +The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all across the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. Default exports were a common source of error and confusion when using our libraries in a CommonJS context, leading to issues like Issue [#978](https://github.com/ethereumjs/ethereumjs-monorepo/issues/978). @@ -613,7 +664,7 @@ Now every import is a named import and we think the long term benefits will very #### Common Library Import Updates -Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all accross our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. +Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all across our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. So Common import and usage is changing from: @@ -855,7 +906,7 @@ Note that state in the VM is not activated by default (this also goes for accoun ### L2 Support: Custom Opcodes Option -There is now a new option `customOpcodes` for the VM which allows to add custom opcodes to the VM, see PR [#1705](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1705). This should be useful for L2s and other EVM based side chains if they come with a slighly different opcode set for bytecode execution. +There is now a new option `customOpcodes` for the VM which allows to add custom opcodes to the VM, see PR [#1705](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1705). This should be useful for L2s and other EVM based side chains if they come with a slightly different opcode set for bytecode execution. New opcodes can be passed in with its own logic function and an additional function for gas calculation. Additionally the new option allows for overwriting and/or deleting existing opcodes. @@ -942,7 +993,7 @@ invalid receiptTrie (vm hf=berlin -> block number=1 hash=0x8e368301586b53e30c58d The extended errors give substantial more object and chain context and should ease debugging. -**Potentially breaking**: Attention! If you do react on errors in your code and do exact errror matching (`error.message === 'invalid transaction trie'`) things will break. Please make sure to do error comparisons with something like `error.message.includes('invalid transaction trie')` instead. This should generally be the pattern used for all error message comparisions and is assured to be future proof on all error messages (we won't change the core text in non-breaking releases). +**Potentially breaking**: Attention! If you do react on errors in your code and do exact error matching (`error.message === 'invalid transaction trie'`) things will break. Please make sure to do error comparisons with something like `error.message.includes('invalid transaction trie')` instead. This should generally be the pattern used for all error message comparisons and is assured to be future proof on all error messages (we won't change the core text in non-breaking releases). ### Other Changes @@ -1061,7 +1112,7 @@ There is a new EVM Object Format (EOF) in preparation which will allow to valida ### StateManager: Preserve State History -This VM release bumps the `merkle-patricia-tree` dependeny to `v4.2.0`, which is used as a datastore for the default `StateManager` implementation. The new MPT version switches to a default behavior to not delete any trie nodes on checkpoint commits, which has implications on the `StateManager.commit()` function which internally calls the MPT commit. This allows to go back to older trie states by setting a new (old) state root with `StateManager.setStateRoot()`. The trie state is now guaranteed to still be consistent and complete, which has not been the case before and lead to erraneous behaviour in certain usage scenarios (e.g. reported by HardHat). +This VM release bumps the `merkle-patricia-tree` dependency to `v4.2.0`, which is used as a datastore for the default `StateManager` implementation. The new MPT version switches to a default behavior to not delete any trie nodes on checkpoint commits, which has implications on the `StateManager.commit()` function which internally calls the MPT commit. This allows to go back to older trie states by setting a new (old) state root with `StateManager.setStateRoot()`. The trie state is now guaranteed to still be consistent and complete, which has not been the case before and lead to erroneous behaviour in certain usage scenarios (e.g. reported by HardHat). See PR [#1262](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1262) @@ -1082,7 +1133,7 @@ See PR [#1168](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1168) ## 5.3.2 - 2021-04-12 -This is a hot-fix performance release, removing the `debug` functionality from PR [#1080](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1080) and follow-up PRs. While highly useful for debugging, this feature side-introduced a siginficant reduction in VM performance which went along unnoticed. For now we will remove since upstream dependencies are awaiting a new release before the `Belin` HF happening. We will try to re-introduce in a performance friendly manner in some subsequent release (we cannot promise on that though). +This is a hot-fix performance release, removing the `debug` functionality from PR [#1080](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1080) and follow-up PRs. While highly useful for debugging, this feature side-introduced a significant reduction in VM performance which went along unnoticed. For now we will remove since upstream dependencies are awaiting a new release before the `Berlin` HF happening. We will try to re-introduce in a performance friendly manner in some subsequent release (we cannot promise on that though). See PR [#1198](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1198). @@ -1196,7 +1247,7 @@ If you are using this library in conjunction with other EthereumJS libraries mak ### Other Features - `{ stateRoot, gasUsed, logsBloom, receiptRoot }` have been added to `RunBlockResult` and will be emitted with `afterBlock`, PR [#853](https://github.com/ethereumjs/ethereumjs-monorepo/pull/853) -- Added `vm:eei:gas` EEI gas debug looger, PR [#1124](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1124) +- Added `vm:eei:gas` EEI gas debug logger, PR [#1124](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1124) ### Other Fixes @@ -1323,7 +1374,7 @@ const common = new Common({ chain: 'mainnet', hardfork: 'spuriousDragon' }) const vm = new VM({ common }) ``` -**Breaking**: The default HF from the VM has been updated from `petersburg` to `istanbul`. The HF setting is now automatically taken from the HF set for `Common.DEAULT_HARDFORK`, see PR [#906](https://github.com/ethereumjs/ethereumjs-monorepo/pull/906). +**Breaking**: The default HF from the VM has been updated from `petersburg` to `istanbul`. The HF setting is now automatically taken from the HF set for `Common.DEFAULT_HARDFORK`, see PR [#906](https://github.com/ethereumjs/ethereumjs-monorepo/pull/906). **Breaking**: Please note that the options to directly pass in `chain` and `hardfork` strings have been removed to simplify the API. Providing a `Common` instance is now the only way to change the chain setup, see PR [#863](https://github.com/ethereumjs/ethereumjs-monorepo/pull/863) @@ -1376,7 +1427,7 @@ The Util package also introduces a new [Address class](https://github.com/ethere We significantly updated our internal tool and CI setup along the work on PR [#913](https://github.com/ethereumjs/ethereumjs-monorepo/pull/913) with an update to `ESLint` from `TSLint` for code linting and formatting and the introduction of a new build setup. -Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes @@ -1477,7 +1528,7 @@ const vm = new VM({ common }) ``` **Breaking**: The default HF from the VM has been updated from `petersburg` to `istanbul`. -The HF setting is now automatically taken from the HF set for `Common.DEAULT_HARDFORK`, +The HF setting is now automatically taken from the HF set for `Common.DEFAULT_HARDFORK`, see PR [#906](https://github.com/ethereumjs/ethereumjs-monorepo/pull/906). **Breaking**: Please note that the options to directly pass in @@ -1566,7 +1617,7 @@ for code linting and formatting and the introduction of a new build setup. Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result -in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes @@ -1859,7 +1910,7 @@ These will be the main release notes for the `v4` feature updates, subsequent `beta` releases and the final release will just publish the delta changes and point here for reference. -Breaking changes in the release notes are preeceeded with `[BREAKING]`, do a +Breaking changes in the release notes are preceded with `[BREAKING]`, do a search for an overview. The outstanding work of [@s1na](https://github.com/s1na) has to be mentioned @@ -2015,7 +2066,7 @@ vm.runTx( // Handle errors appropriately } // Do something with the result - } + }, ) ``` @@ -2265,7 +2316,7 @@ The `StateManager` (`lib/stateManager.js`) - providing a high-level interface to This comes along with larger refactoring work throughout more-or-less the whole code base and the `StateManager` now completely encapsulates the trie structure and the cache backend used, see issue [#268](https://github.com/ethereumjs/ethereumjs-monorepo/issues/268) and associated PRs for reference. This will make it much easier in the future to bring along an own state manager serving special needs (optimized for memory and performance, run on mobile,...) by e.g. using a different trie implementation, cache or underlying storage or database backend. -We plan to completely separate the currently still integrated state manager into its own repository in one of the next releases, this will then be a breaking `v3.0.0` release. Discussion around a finalized interface (we might e.g. drop all genesis-releated methods respectively methods implemented in the `DefaultStateManager`) is still ongoing and you are very much invited to jump in and articulate your needs, just take e.g. the issue mentioned above as an entry point. +We plan to completely separate the currently still integrated state manager into its own repository in one of the next releases, this will then be a breaking `v3.0.0` release. Discussion around a finalized interface (we might e.g. drop all genesis-related methods respectively methods implemented in the `DefaultStateManager`) is still ongoing and you are very much invited to jump in and articulate your needs, just take e.g. the issue mentioned above as an entry point. Change related to the new `StateManager` interface: @@ -2331,7 +2382,7 @@ making the start being introduced in the `v2.4.0` release. Since both the scope of the `Constantinople` hardfork as well as the state of at least some of the EIPs to be included are not yet finalized, this is only meant for `EXPERIMENTAL` purposes, e.g. for developer -tools to give users early access and make themself familiar with dedicated features. +tools to give users early access and make themselves familiar with dedicated features. Once scope and EIPs from `Constantinople` are final we will target a `v2.5.0` release which will officially introduce `Constantinople` support with all the changes bundled together. @@ -2348,7 +2399,7 @@ All the changes from this release: **FEATURES/FUNCTIONALITY** - Improved chain and fork support, see PR [#304](https://github.com/ethereumjs/ethereumjs-monorepo/pull/304) -- Support for the `Constantinople` bitwise shifiting instructions `SHL`, `SHR` and `SAR`, see PR [#251](https://github.com/ethereumjs/ethereumjs-monorepo/pull/251) +- Support for the `Constantinople` bitwise shifting instructions `SHL`, `SHR` and `SAR`, see PR [#251](https://github.com/ethereumjs/ethereumjs-monorepo/pull/251) - New `newContract` event which can be used to do interrupting tasks on contract/address creation, see PR [#306](https://github.com/ethereumjs/ethereumjs-monorepo/pull/306) - Alignment of behavior of bloom filter hashing to go along with mainnet compatible clients _BREAKING_, see PR [#295](https://github.com/ethereumjs/ethereumjs-monorepo/pull/295) diff --git a/packages/evm/README.md b/packages/evm/README.md index 1537ecde9b..fa08770458 100644 --- a/packages/evm/README.md +++ b/packages/evm/README.md @@ -97,6 +97,59 @@ void main() Additionally this usage example shows the use of events to listen on the inner workings and procedural updates (`step` event) of the EVM. +### Precompiles + +This library support all EVM precompiles up to the `Prague` hardfork. + +The following code allows to run precompiles in isolation, e.g. for testing purposes: + +```ts +// ./examples/precompile.ts + +import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { EVM, getActivePrecompiles } from '@ethereumjs/evm' +import { bytesToHex, hexToBytes } from '@ethereumjs/util' + +const main = async () => { + const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Prague }) + + // Taken from test/eips/precompiles/bls/add_G1_bls.json + const data = hexToBytes( + '0x0000000000000000000000000000000017f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb0000000000000000000000000000000008b3f481e3aaa0f1a09e30ed741d8ae4fcf5e095d5d00af600db18cb2c04b3edd03cc744a2888ae40caa232946c5e7e100000000000000000000000000000000112b98340eee2777cc3c14163dea3ec97977ac3dc5c70da32e6e87578f44912e902ccef9efe28d4a78b8999dfbca942600000000000000000000000000000000186b28d92356c4dfec4b5201ad099dbdede3781f8998ddf929b4cd7756192185ca7b8f4ef7088f813270ac3d48868a21', + ) + const gasLimit = BigInt(5000000) + + const evm = await EVM.create({ common }) + const precompile = getActivePrecompiles(common).get('000000000000000000000000000000000000000b')! + + const callData = { + data, + gasLimit, + common, + _EVM: evm, + } + const result = await precompile(callData) + console.log(`Precompile result:${bytesToHex(result.returnValue)}`) +} + +main() +``` + +### EIP-2537 BLS Precompiles (Prague) + +Starting with `v3.1.0` the EVM support the BLS precompiles introduced with [EIP-2537](https://eips.ethereum.org/EIPS/eip-2537). These precompiles run natively using the [@noble/curves](https://github.com/paulmillr/noble-curves) library (❤️ to `@paulmillr`!). + +An alternative WASM implementation (using [bls-wasm](https://github.com/herumi/bls-wasm)) can be optionally used like this if needed for performance reasons: + +```ts +import { EVM, MCLBLS } from '@ethereumjs/evm' + +const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Prague }) +await mcl.init(mcl.BLS12_381) +const mclbls = new MCLBLS(mcl) +const evm = await EVM.create({ common, bls }) +``` + ## Examples This projects contain the following examples: @@ -218,11 +271,10 @@ Currently supported EIPs: - [EIP-1153](https://eips.ethereum.org/EIPS/eip-1153) - Transient storage opcodes (Cancun) - [EIP-1559](https://eips.ethereum.org/EIPS/eip-1559) - Fee market change for ETH 1.0 chain -- [EIP-2315](https://eips.ethereum.org/EIPS/eip-2315) - Simple subroutines for the EVM (`outdated`) - [EIP-2537](https://eips.ethereum.org/EIPS/eip-2537) - BLS precompiles (removed in v4.0.0, see latest v3 release) - [EIP-2565](https://eips.ethereum.org/EIPS/eip-2565) - ModExp gas cost - [EIP-2718](https://eips.ethereum.org/EIPS/eip-2718) - Transaction Types -- [EIP-2935](https://eips.ethereum.org/EIPS/eip-2935) - Save historical block hashes in state (`experimental`) +- [EIP-2935](https://eips.ethereum.org/EIPS/eip-2935) - Serve historical block hashes from state (Prague) - [EIP-2929](https://eips.ethereum.org/EIPS/eip-2929) - gas cost increases for state access opcodes - [EIP-2930](https://eips.ethereum.org/EIPS/eip-2930) - Optional access list tx type - [EIP-3074](https://eips.ethereum.org/EIPS/eip-3074) - AUTH and AUTHCALL opcodes @@ -244,8 +296,14 @@ Currently supported EIPs: - [EIP-4895](https://eips.ethereum.org/EIPS/eip-4895) - Beacon chain push withdrawals as operations (Shanghai) - [EIP-5133](https://eips.ethereum.org/EIPS/eip-5133) - Delaying Difficulty Bomb to mid-September 2022 (Gray Glacier) - [EIP-5656](https://eips.ethereum.org/EIPS/eip-5656) - MCOPY - Memory copying instruction (Cancun) +- [EIP-6110](https://eips.ethereum.org/EIPS/eip-6110) - Supply validator deposits on chain (Prague) - [EIP-6780](https://eips.ethereum.org/EIPS/eip-6780) - SELFDESTRUCT only in same transaction (Cancun) +- [EIP-7002](https://eips.ethereum.org/EIPS/eip-7002) - Execution layer triggerable withdrawals (Prague) +- [EIP-7251](https://eips.ethereum.org/EIPS/eip-7251) - Execution layer triggerable validator consolidations (Prague) +- [EIP-7702](https://eips.ethereum.org/EIPS/eip-7702) - EOA code transactions (Prague) (`outdated`) +- [EIP-7709](https://eips.ethereum.org/EIPS/eip-7709) - Read BLOCKHASH from storage and update cost (Osaka) - [EIP-7516](https://eips.ethereum.org/EIPS/eip-7516) - BLOBBASEFEE opcode (Cancun) +- [EIP-7685](https://eips.ethereum.org/EIPS/eip-7685) - General purpose execution layer requests (Prague) ### WASM Crypto Support @@ -415,7 +473,7 @@ Note that this is nevertheless a very theoretical value but pretty valuable for Another note: profiler results for at least some opcodes are heavily distorted, first to mention the `SSTORE` opcode where the major "cost" occurs after block execution on checkpoint commit, which is not taken into account by the profiler. -Generally all results should rather encourage and need "self thinking" 😋 and are not suited to be blindedly taken over without a deeper understanding/grasping of the underlying measurement conditions. +Generally all results should rather encourage and need "self thinking" 😋 and are not suited to be blindly taken over without a deeper understanding/grasping of the underlying measurement conditions. Happy EVM Profiling! 🎉 🤩 diff --git a/packages/evm/docs/interfaces/EVMOpts.md b/packages/evm/docs/interfaces/EVMOpts.md index dacab84ccd..8ae2b12c6b 100644 --- a/packages/evm/docs/interfaces/EVMOpts.md +++ b/packages/evm/docs/interfaces/EVMOpts.md @@ -67,11 +67,10 @@ Use a Common instance for EVM instantiation. - [EIP-1153](https://eips.ethereum.org/EIPS/eip-1153) - Transient storage opcodes (Cancun) - [EIP-1559](https://eips.ethereum.org/EIPS/eip-1559) - Fee market change for ETH 1.0 chain -- [EIP-2315](https://eips.ethereum.org/EIPS/eip-2315) - Simple subroutines for the EVM (`outdated`) - [EIP-2537](https://eips.ethereum.org/EIPS/eip-2537) - BLS precompiles (removed in v4.0.0, see latest v3 release) - [EIP-2565](https://eips.ethereum.org/EIPS/eip-2565) - ModExp gas cost - [EIP-2718](https://eips.ethereum.org/EIPS/eip-2565) - Transaction Types -- [EIP-2935](https://eips.ethereum.org/EIPS/eip-2935) - Save historical block hashes in state (`experimental`) +- [EIP-2935](https://eips.ethereum.org/EIPS/eip-2935) - Serve historical block hashes from state (Prague) - [EIP-2929](https://eips.ethereum.org/EIPS/eip-2929) - gas cost increases for state access opcodes - [EIP-2930](https://eips.ethereum.org/EIPS/eip-2930) - Optional access list tx type - [EIP-3074](https://eips.ethereum.org/EIPS/eip-3074) - AUTH and AUTHCALL opcodes @@ -93,8 +92,14 @@ Use a Common instance for EVM instantiation. - [EIP-4895](https://eips.ethereum.org/EIPS/eip-4895) - Beacon chain push withdrawals as operations (Shanghai) - [EIP-5133](https://eips.ethereum.org/EIPS/eip-5133) - Delaying Difficulty Bomb to mid-September 2022 (Gray Glacier) - [EIP-5656](https://eips.ethereum.org/EIPS/eip-5656) - MCOPY - Memory copying instruction (Cancun) +- [EIP-6110](https://eips.ethereum.org/EIPS/eip-5656) - Supply validator deposits on chain (Prague) - [EIP-6780](https://eips.ethereum.org/EIPS/eip-6780) - SELFDESTRUCT only in same transaction (Cancun) +- [EIP-7002](https://eips.ethereum.org/EIPS/eip-7002) - Execution layer triggerable withdrawals (Prague) +- [EIP-7251](https://eips.ethereum.org/EIPS/eip-7251) - Execution layer triggerable validator consolidations (Prague) +- [EIP-7702](https://eips.ethereum.org/EIPS/eip-7702) - EOA code transactions (Prague) (`outdated`) +- [EIP-7709](https://eips.ethereum.org/EIPS/eip-7709) - Read BLOCKHASH from storage and update cost (Osaka) - [EIP-7516](https://eips.ethereum.org/EIPS/eip-7516) - BLOBBASEFEE opcode (Cancun) +- [EIP-7685](https://eips.ethereum.org/EIPS/eip-7685) - General purpose execution layer requests (Prague) *Annotations:* diff --git a/packages/evm/examples/4844.ts b/packages/evm/examples/4844.ts index 180bf1d2a5..cdf9380144 100644 --- a/packages/evm/examples/4844.ts +++ b/packages/evm/examples/4844.ts @@ -1,3 +1,3 @@ -import { Common, Chain, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai, eips: [4844] }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Shanghai, eips: [4844] }) diff --git a/packages/evm/examples/decode-opcodes.ts b/packages/evm/examples/decode-opcodes.ts index ff551679e3..429dd9560a 100644 --- a/packages/evm/examples/decode-opcodes.ts +++ b/packages/evm/examples/decode-opcodes.ts @@ -2,15 +2,14 @@ // // 1. Takes binary EVM code and decodes it into opcodes -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { getOpcodesForHF, paramsEVM } from '@ethereumjs/evm' import { bytesToHex, hexToBytes } from '@ethereumjs/util' -import { getOpcodesForHF } from '../dist/cjs/opcodes/index.js' -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul, params: paramsEVM }) const opcodes = getOpcodesForHF(common).opcodes -const data = - '0x6107608061000e6000396000f30060003560e060020a90048063141961bc1461006e57806319ac74bd146100cf578063278ecde1146100e75780632c0f7b6f146100f8578063a87430ba1461010a578063ac273aa21461011f578063c06f4c1d14610133578063c1cbbca714610159578063e11523431461016a57005b610079600435610183565b8b6000528a60205289600160a060020a031660405288600160a060020a0316606052876080528660a0528560c0528460e05283610100528261012052816101405280600160a060020a0316610160526101806000f35b6100dd6004356024356106e8565b8060005260206000f35b6100f2600435610454565b60006000f35b61010061017c565b8060005260206000f35b6101156004356101da565b8060005260206000f35b61012d600435602435610729565b60006000f35b61015360043560243560443560643560843560a43560c43560e4356101ee565b60006000f35b610164600435610302565b60006000f35b6101756004356105dd565b60006000f35b5b60005481565b5b6000526001602052604060002080549080600101549080600201549080600301549080600401549080600501549080600601549080600701549080600801549080600901549080600c01549080600d015490508c565b5b600052600260205260406000208054905081565b600060006000600060008811801561020557504287115b61020e576102f4565b600080549081600101905593506001600085815260200190815260200160002092508b83819055508a83600101819055503383600201819055508883600301819055508783600501819055508683600401819055508583600701819055508983600c01819055508483600d01819055506002600033600160a060020a03168152602001908152602001600020915081805490816001019055905083826001016000838152602001908152602001600020819055508333600160a060020a03167f882da991e52c8933ce57314c9ba3f934798d912d862790c40d0feeb7025af08a60006000a35b505050505050505050505050565b600060006000600034116103155761044e565b600160008581526020019081526020016000209250428360040154101561033b5761044d565b82600901805490816001019055915082600a0160008381526020019081526020016000209050338181905550348160010181905550806001015483600601818154019150819055508183600b01600033600160a060020a03168152602001908152602001600020819055508333600160a060020a03167fc5e578961e5bd7481ccf1d1bdfbad97b9f1ddfad520f061ca764a57018f3febe6000866006015481526020016000a3600083600d0154600160a060020a031614156103fc5761044c565b82600d0154600160a060020a03166249f068600060008260e060020a02600052600488815260200133600160a060020a03168152602001348152602001600060008660325a03f161044957005b50505b5b5b50505050565b60006000600160008481526020019081526020016000209150816004015442118015610487575081600501548260060154105b8015610497575060008260060154115b6104a0576105d8565b81600a01600083600b01600033600160a060020a03168152602001908152602001600020548152602001908152602001600020905060008160010154116104e6576105d7565b8054600160a060020a0316600082600101546000600060006000848787f161050a57005b505050806001015482600601818154039150819055508233600160a060020a03167fe139691e7435f1fb40ec50ed3729009226be49087fd00e9e5bac276c2a8f40cf6000846001015481526020016000a360008160010181905550600082600d0154600160a060020a03161415610580576105d6565b81600d0154600160a060020a031663b71f3cde600060008260e060020a0260005260048781526020018554600160a060020a0316815260200185600101548152602001600060008660325a03f16105d357005b50505b5b5b505050565b6000600160008381526020019081526020016000209050806005015481600601541015610609576106e4565b8060030154600160a060020a0316600082600601546000600060006000848787f161063057005b5050508133600160a060020a03167f6be92574b1386f424263a096e8b66ff6cc223ab0f9d18702563aa339a372cf986000846006015481526020016000a36000816006018190555060018160080181905550600081600d0154600160a060020a0316141561069d576106e3565b80600d0154600160a060020a031663484ec26c600060008260e060020a02600052600486815260200185600601548152602001600060008660325a03f16106e057005b50505b5b5050565b600060006002600085600160a060020a0316815260200190815260200160002090508060010160008481526020019081526020016000205491505092915050565b6000600060016000858152602001908152602001600020905080600a0160008481526020019081526020016000209150509291505056' +const data = '0x6107608061000e6000396000f30060003560e060020a90048063141961bc1461006e57806319ac74bd' nameOpCodes(hexToBytes(data)) @@ -33,7 +32,7 @@ function nameOpCodes(raw: Uint8Array) { ' ' + curOpCode + ' ' + - (pushData?.length > 0 ? bytesToHex(pushData as Uint8Array) : '') + (pushData?.length > 0 ? bytesToHex(pushData as Uint8Array) : ''), ) pushData = new Uint8Array() diff --git a/packages/evm/examples/eips.ts b/packages/evm/examples/eips.ts index 91c15891d0..4d405bcf12 100644 --- a/packages/evm/examples/eips.ts +++ b/packages/evm/examples/eips.ts @@ -1,10 +1,10 @@ -import { Chain, Common } from '@ethereumjs/common' -import { EVM } from '@ethereumjs/evm' +import { Common, Mainnet } from '@ethereumjs/common' +import { createEVM } from '@ethereumjs/evm' const main = async () => { - const common = new Common({ chain: Chain.Mainnet, eips: [3074] }) - const evm = await EVM.create({ common }) - console.log(`EIP 3074 is active - ${evm.common.isActivatedEIP(3074)}`) + const common = new Common({ chain: Mainnet, eips: [7702] }) + const evm = await createEVM({ common }) + console.log(`EIP 7702 is active - ${evm.common.isActivatedEIP(7702)}`) } -main() +void main() diff --git a/packages/evm/examples/precompile.ts b/packages/evm/examples/precompile.ts new file mode 100644 index 0000000000..e6ee2083ba --- /dev/null +++ b/packages/evm/examples/precompile.ts @@ -0,0 +1,27 @@ +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createEVM, getActivePrecompiles } from '@ethereumjs/evm' +import { bytesToHex, hexToBytes } from '@ethereumjs/util' + +const main = async () => { + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Prague }) + + // Taken from test/eips/precompiles/bls/add_G1_bls.json + const data = hexToBytes( + '0x0000000000000000000000000000000017f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb0000000000000000000000000000000008b3f481e3aaa0f1a09e30ed741d8ae4fcf5e095d5d00af600db18cb2c04b3edd03cc744a2888ae40caa232946c5e7e100000000000000000000000000000000112b98340eee2777cc3c14163dea3ec97977ac3dc5c70da32e6e87578f44912e902ccef9efe28d4a78b8999dfbca942600000000000000000000000000000000186b28d92356c4dfec4b5201ad099dbdede3781f8998ddf929b4cd7756192185ca7b8f4ef7088f813270ac3d48868a21', + ) + const gasLimit = BigInt(5000000) + + const evm = await createEVM({ common }) + const precompile = getActivePrecompiles(common).get('000000000000000000000000000000000000000b')! + + const callData = { + data, + gasLimit, + common, + _EVM: evm, + } + const result = await precompile(callData) + console.log(`Precompile result:${bytesToHex(result.returnValue)}`) +} + +void main() diff --git a/packages/evm/examples/runCode.ts b/packages/evm/examples/runCode.ts index d442043e37..aeca0d4d34 100644 --- a/packages/evm/examples/runCode.ts +++ b/packages/evm/examples/runCode.ts @@ -1,13 +1,15 @@ import { createBlockchain } from '@ethereumjs/blockchain' -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { EVM } from '@ethereumjs/evm' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createEVM } from '@ethereumjs/evm' import { bytesToHex, hexToBytes } from '@ethereumjs/util' +import type { PrefixedHexString } from '@ethereumjs/util' + const main = async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const blockchain = await createBlockchain() - const evm = await EVM.create({ + const evm = await createEVM({ common, blockchain, }) @@ -26,7 +28,7 @@ const main = async () => { evm .runCode({ - code: hexToBytes('0x' + code.join('')), + code: hexToBytes(('0x' + code.join('')) as PrefixedHexString), gasLimit: BigInt(0xffff), }) .then((results) => { diff --git a/packages/evm/examples/simple.ts b/packages/evm/examples/simple.ts index 06bc94176f..66cf71b510 100644 --- a/packages/evm/examples/simple.ts +++ b/packages/evm/examples/simple.ts @@ -1,10 +1,10 @@ +import { createEVM } from '@ethereumjs/evm' import { hexToBytes } from '@ethereumjs/util' -import { EVM } from '@ethereumjs/evm' const main = async () => { - const evm = await EVM.create() + const evm = await createEVM() const res = await evm.runCode({ code: hexToBytes('0x6001') }) // PUSH1 01 -- simple bytecode to push 1 onto the stack console.log(res.executionGasUsed) // 3n } -main() +void main() diff --git a/packages/evm/examples/withBlockchain.ts b/packages/evm/examples/withBlockchain.ts index f7ba32049e..4bdda1dc48 100644 --- a/packages/evm/examples/withBlockchain.ts +++ b/packages/evm/examples/withBlockchain.ts @@ -1,15 +1,17 @@ import { createBlockchain } from '@ethereumjs/blockchain' -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { EVM } from '@ethereumjs/evm' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createEVM } from '@ethereumjs/evm' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { bytesToHex } from '@ethereumjs/util' +import { bytesToHex, hexToBytes } from '@ethereumjs/util' + +import type { PrefixedHexString } from '@ethereumjs/util' const main = async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Shanghai }) const stateManager = new DefaultStateManager() const blockchain = await createBlockchain() - const evm = await EVM.create({ + const evm = await createEVM({ common, stateManager, blockchain, @@ -28,7 +30,7 @@ const main = async () => { }) const results = await evm.runCode({ - code: Buffer.from(code.join(''), 'hex'), + code: hexToBytes(('0x' + code.join('')) as PrefixedHexString), gasLimit: BigInt(0xffff), }) diff --git a/packages/evm/package.json b/packages/evm/package.json index bdd1015585..793af7a204 100644 --- a/packages/evm/package.json +++ b/packages/evm/package.json @@ -1,6 +1,6 @@ { "name": "@ethereumjs/evm", - "version": "3.0.0", + "version": "3.1.0", "description": "JavaScript Ethereum Virtual Machine (EVM) implementation", "keywords": [ "ethereum", @@ -56,15 +56,14 @@ "visualize:bundle": "npx vite build --config=./vite.config.bundler.ts --emptyOutDir=false --outDir ." }, "dependencies": { - "@ethereumjs/common": "^4.3.0", - "@ethereumjs/statemanager": "^2.3.0", - "@ethereumjs/tx": "^5.3.0", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/common": "^4.4.0", + "@ethereumjs/statemanager": "^2.4.0", + "@ethereumjs/tx": "^5.4.0", + "@ethereumjs/util": "^9.1.0", + "@noble/curves": "^1.5.0", "@types/debug": "^4.1.9", "debug": "^4.3.3", - "ethereum-cryptography": "^2.2.1", - "@noble/curves": "^1.4.2", - "rustbn-wasm": "^0.4.0" + "ethereum-cryptography": "^2.2.1" }, "devDependencies": { "@ethersproject/abi": "^5.0.12", @@ -81,7 +80,9 @@ "minimist": "^1.2.5", "node-dir": "^0.1.17", "rollup-plugin-visualizer": "^5.12.0", - "solc": "^0.8.1" + "rustbn-wasm": "^0.4.0", + "solc": "^0.8.1", + "split": "^1.0.1" }, "engines": { "node": ">=18" diff --git a/packages/evm/scripts/eofContainerValidator.ts b/packages/evm/scripts/eofContainerValidator.ts new file mode 100644 index 0000000000..7a15bf0076 --- /dev/null +++ b/packages/evm/scripts/eofContainerValidator.ts @@ -0,0 +1,36 @@ +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { unprefixedHexToBytes } from '@ethereumjs/util' +import split from 'split' + +import { createEVM, validateEOF } from '../src/index.js' + +/** + * This script reads hex strings (either prefixed or non-prefixed with 0x) from stdin + * It tries to validate the EOF container, if it is valid, it will print "OK" + * If there is a validation error, it will print "err: " + * If the input is empty, the program will exit + */ + +const common = new Common({ chain: Mainnet }) +common.setHardfork(Hardfork.Prague) +common.setEIPs([663, 3540, 3670, 4200, 4750, 5450, 6206, 7069, 7480, 7620, 7692, 7698]) +const evm = await createEVM({ common }) + +function processLine(line) { + if (line.length === 0) { + process.exit() + } + let trimmed = line + if (line.startsWith('0x')) { + trimmed = line.slice(2) + } + const bytes = unprefixedHexToBytes(trimmed) + try { + validateEOF(bytes, evm) + console.log('OK') + } catch (e: any) { + console.log('err: ' + e.message) + } +} + +process.stdin.pipe(split()).on('data', processLine) diff --git a/packages/evm/scripts/stackDeltaGenerator.ts b/packages/evm/scripts/stackDeltaGenerator.ts new file mode 100644 index 0000000000..27b12a0c4c --- /dev/null +++ b/packages/evm/scripts/stackDeltaGenerator.ts @@ -0,0 +1,302 @@ +const stackDelta: any = {} + +class OpcodeInfo { + static terminalOpcode( + instr: string, + opcode: number, + inputs: number, + outputs: number, + opSize: number, + ) { + return this.parse(instr, opcode, inputs, outputs, opSize) + } + + static validOpcode( + instr: string, + opcode: number, + inputs: number, + outputs: number, + opSize: number, + ) { + return this.parse(instr, opcode, inputs, outputs, opSize) + } + static unallocatedOpcode(opcode: number) { + return undefined + } + + static invalidOpcode(instr: string, opcode: number) { + return undefined + } + + static parse(instr: string, opcode: number, inputs: number, outputs: number, opSize: number) { + const hexStr = '0x' + opcode.toString(16).padStart(2, '0') + stackDelta[hexStr] = { + inputs, + outputs, + name: instr, + intermediates: opSize - 1, + } + } +} + +// This code is from Besu: https://github.com/hyperledger/besu/blob/ac5d03f91d4c9e938ff5b4ba90abae1bb4afa997/evm/src/main/java/org/hyperledger/besu/evm/code/OpcodeInfo.java#L79 +const tbl = [ + OpcodeInfo.terminalOpcode('STOP', 0x00, 0, 0, 1), + OpcodeInfo.validOpcode('ADD', 0x01, 2, 1, 1), + OpcodeInfo.validOpcode('MUL', 0x02, 2, 1, 1), + OpcodeInfo.validOpcode('SUB', 0x03, 2, 1, 1), + OpcodeInfo.validOpcode('DIV', 0x04, 2, 1, 1), + OpcodeInfo.validOpcode('SDIV', 0x05, 2, 1, 1), + OpcodeInfo.validOpcode('MOD', 0x06, 2, 1, 1), + OpcodeInfo.validOpcode('SMOD', 0x07, 2, 1, 1), + OpcodeInfo.validOpcode('ADDMOD', 0x08, 3, 1, 1), + OpcodeInfo.validOpcode('MULMOD', 0x09, 3, 1, 1), + OpcodeInfo.validOpcode('EXP', 0x0a, 2, 1, 1), + OpcodeInfo.validOpcode('SIGNEXTEND', 0x0b, 2, 1, 1), + OpcodeInfo.unallocatedOpcode(0x0c), + OpcodeInfo.unallocatedOpcode(0x0d), + OpcodeInfo.unallocatedOpcode(0x0e), + OpcodeInfo.unallocatedOpcode(0x0f), + OpcodeInfo.validOpcode('LT', 0x10, 2, 1, 1), + OpcodeInfo.validOpcode('GT', 0x11, 2, 1, 1), + OpcodeInfo.validOpcode('SLT', 0x12, 2, 1, 1), + OpcodeInfo.validOpcode('SGT', 0x13, 2, 1, 1), + OpcodeInfo.validOpcode('EQ', 0x14, 2, 1, 1), + OpcodeInfo.validOpcode('ISZERO', 0x15, 1, 1, 1), + OpcodeInfo.validOpcode('AND', 0x16, 2, 1, 1), + OpcodeInfo.validOpcode('OR', 0x17, 2, 1, 1), + OpcodeInfo.validOpcode('XOR', 0x18, 2, 1, 1), + OpcodeInfo.validOpcode('NOT', 0x19, 1, 1, 1), + OpcodeInfo.validOpcode('BYTE', 0x1a, 2, 1, 1), + OpcodeInfo.validOpcode('SHL', 0x1b, 2, 1, 1), + OpcodeInfo.validOpcode('SHR', 0x1c, 2, 1, 1), + OpcodeInfo.validOpcode('SAR', 0x1d, 2, 1, 1), + OpcodeInfo.unallocatedOpcode(0x1e), + OpcodeInfo.unallocatedOpcode(0x1f), + OpcodeInfo.validOpcode('SHA3', 0x20, 2, 1, 1), + OpcodeInfo.unallocatedOpcode(0x21), + OpcodeInfo.unallocatedOpcode(0x22), + OpcodeInfo.unallocatedOpcode(0x23), + OpcodeInfo.unallocatedOpcode(0x24), + OpcodeInfo.unallocatedOpcode(0x25), + OpcodeInfo.unallocatedOpcode(0x26), + OpcodeInfo.unallocatedOpcode(0x27), + OpcodeInfo.unallocatedOpcode(0x28), + OpcodeInfo.unallocatedOpcode(0x29), + OpcodeInfo.unallocatedOpcode(0x2a), + OpcodeInfo.unallocatedOpcode(0x2b), + OpcodeInfo.unallocatedOpcode(0x2c), + OpcodeInfo.unallocatedOpcode(0x2d), + OpcodeInfo.unallocatedOpcode(0x2e), + OpcodeInfo.unallocatedOpcode(0x2f), + OpcodeInfo.validOpcode('ADDRESS', 0x30, 0, 1, 1), + OpcodeInfo.validOpcode('BALANCE', 0x31, 1, 1, 1), + OpcodeInfo.validOpcode('ORIGIN', 0x32, 0, 1, 1), + OpcodeInfo.validOpcode('CALLER', 0x33, 0, 1, 1), + OpcodeInfo.validOpcode('CALLVALUE', 0x34, 0, 1, 1), + OpcodeInfo.validOpcode('CALLDATALOAD', 0x35, 1, 1, 1), + OpcodeInfo.validOpcode('CALLDATASIZE', 0x36, 0, 1, 1), + OpcodeInfo.validOpcode('CALLDATACOPY', 0x37, 3, 0, 1), + OpcodeInfo.invalidOpcode('CODESIZE', 0x38), + OpcodeInfo.invalidOpcode('CODECOPY', 0x39), + OpcodeInfo.validOpcode('GASPRICE', 0x3a, 0, 1, 1), + OpcodeInfo.invalidOpcode('EXTCODESIZE', 0x3b), + OpcodeInfo.invalidOpcode('EXTCODECOPY', 0x3c), + OpcodeInfo.validOpcode('RETURNDATASIZE', 0x3d, 0, 1, 1), + OpcodeInfo.validOpcode('RETURNDATACOPY', 0x3e, 3, 0, 1), + OpcodeInfo.invalidOpcode('EXTCODEHASH', 0x3f), + OpcodeInfo.validOpcode('BLOCKHASH', 0x40, 1, 1, 1), + OpcodeInfo.validOpcode('COINBASE', 0x41, 0, 1, 1), + OpcodeInfo.validOpcode('TIMESTAMP', 0x42, 0, 1, 1), + OpcodeInfo.validOpcode('NUMBER', 0x43, 0, 1, 1), + OpcodeInfo.validOpcode('PREVRANDAO', 0x44, 0, 1, 1), // was DIFFICULTY + OpcodeInfo.validOpcode('GASLIMIT', 0x45, 0, 1, 1), + OpcodeInfo.validOpcode('CHAINID', 0x46, 0, 1, 1), + OpcodeInfo.validOpcode('SELFBALANCE', 0x47, 0, 1, 1), + OpcodeInfo.validOpcode('BASEFEE', 0x48, 0, 1, 1), + OpcodeInfo.validOpcode('BLOBAHASH', 0x49, 1, 1, 1), + OpcodeInfo.validOpcode('BLOBBASEFEE', 0x4a, 0, 1, 1), + OpcodeInfo.unallocatedOpcode(0x4b), + OpcodeInfo.unallocatedOpcode(0x4c), + OpcodeInfo.unallocatedOpcode(0x4d), + OpcodeInfo.unallocatedOpcode(0x4e), + OpcodeInfo.unallocatedOpcode(0x4f), + OpcodeInfo.validOpcode('POP', 0x50, 1, 0, 1), + OpcodeInfo.validOpcode('MLOAD', 0x51, 1, 1, 1), + OpcodeInfo.validOpcode('MSTORE', 0x52, 2, 0, 1), + OpcodeInfo.validOpcode('MSTORE8', 0x53, 2, 0, 1), + OpcodeInfo.validOpcode('SLOAD', 0x54, 1, 1, 1), + OpcodeInfo.validOpcode('SSTORE', 0x55, 2, 0, 1), + OpcodeInfo.invalidOpcode('JUMP', 0x56), + OpcodeInfo.invalidOpcode('JUMPI', 0x57), + OpcodeInfo.invalidOpcode('PC', 0x58), + OpcodeInfo.validOpcode('MSIZE', 0x59, 0, 1, 1), + OpcodeInfo.invalidOpcode('GAS', 0x5a), + OpcodeInfo.validOpcode('NOOP', 0x5b, 0, 0, 1), // was JUMPDEST + OpcodeInfo.validOpcode('TLOAD', 0x5c, 1, 1, 1), + OpcodeInfo.validOpcode('TSTORE', 0x5d, 2, 0, 1), + OpcodeInfo.validOpcode('MCOPY', 0x5e, 3, 0, 1), + OpcodeInfo.validOpcode('PUSH0', 0x5f, 0, 1, 1), + OpcodeInfo.validOpcode('PUSH1', 0x60, 0, 1, 2), + OpcodeInfo.validOpcode('PUSH2', 0x61, 0, 1, 3), + OpcodeInfo.validOpcode('PUSH3', 0x62, 0, 1, 4), + OpcodeInfo.validOpcode('PUSH4', 0x63, 0, 1, 5), + OpcodeInfo.validOpcode('PUSH5', 0x64, 0, 1, 6), + OpcodeInfo.validOpcode('PUSH6', 0x65, 0, 1, 7), + OpcodeInfo.validOpcode('PUSH7', 0x66, 0, 1, 8), + OpcodeInfo.validOpcode('PUSH8', 0x67, 0, 1, 9), + OpcodeInfo.validOpcode('PUSH9', 0x68, 0, 1, 10), + OpcodeInfo.validOpcode('PUSH10', 0x69, 0, 1, 11), + OpcodeInfo.validOpcode('PUSH11', 0x6a, 0, 1, 12), + OpcodeInfo.validOpcode('PUSH12', 0x6b, 0, 1, 13), + OpcodeInfo.validOpcode('PUSH13', 0x6c, 0, 1, 14), + OpcodeInfo.validOpcode('PUSH14', 0x6d, 0, 1, 15), + OpcodeInfo.validOpcode('PUSH15', 0x6e, 0, 1, 16), + OpcodeInfo.validOpcode('PUSH16', 0x6f, 0, 1, 17), + OpcodeInfo.validOpcode('PUSH17', 0x70, 0, 1, 18), + OpcodeInfo.validOpcode('PUSH18', 0x71, 0, 1, 19), + OpcodeInfo.validOpcode('PUSH19', 0x72, 0, 1, 20), + OpcodeInfo.validOpcode('PUSH20', 0x73, 0, 1, 21), + OpcodeInfo.validOpcode('PUSH21', 0x74, 0, 1, 22), + OpcodeInfo.validOpcode('PUSH22', 0x75, 0, 1, 23), + OpcodeInfo.validOpcode('PUSH23', 0x76, 0, 1, 24), + OpcodeInfo.validOpcode('PUSH24', 0x77, 0, 1, 25), + OpcodeInfo.validOpcode('PUSH25', 0x78, 0, 1, 26), + OpcodeInfo.validOpcode('PUSH26', 0x79, 0, 1, 27), + OpcodeInfo.validOpcode('PUSH27', 0x7a, 0, 1, 28), + OpcodeInfo.validOpcode('PUSH28', 0x7b, 0, 1, 29), + OpcodeInfo.validOpcode('PUSH29', 0x7c, 0, 1, 30), + OpcodeInfo.validOpcode('PUSH30', 0x7d, 0, 1, 31), + OpcodeInfo.validOpcode('PUSH31', 0x7e, 0, 1, 32), + OpcodeInfo.validOpcode('PUSH32', 0x7f, 0, 1, 33), + OpcodeInfo.validOpcode('DUP1', 0x80, 1, 2, 1), + OpcodeInfo.validOpcode('DUP2', 0x81, 2, 3, 1), + OpcodeInfo.validOpcode('DUP3', 0x82, 3, 4, 1), + OpcodeInfo.validOpcode('DUP4', 0x83, 4, 5, 1), + OpcodeInfo.validOpcode('DUP5', 0x84, 5, 6, 1), + OpcodeInfo.validOpcode('DUP6', 0x85, 6, 7, 1), + OpcodeInfo.validOpcode('DUP7', 0x86, 7, 8, 1), + OpcodeInfo.validOpcode('DUP8', 0x87, 8, 9, 1), + OpcodeInfo.validOpcode('DUP9', 0x88, 9, 10, 1), + OpcodeInfo.validOpcode('DUP10', 0x89, 10, 11, 1), + OpcodeInfo.validOpcode('DUP11', 0x8a, 11, 12, 1), + OpcodeInfo.validOpcode('DUP12', 0x8b, 12, 13, 1), + OpcodeInfo.validOpcode('DUP13', 0x8c, 13, 14, 1), + OpcodeInfo.validOpcode('DUP14', 0x8d, 14, 15, 1), + OpcodeInfo.validOpcode('DUP15', 0x8e, 15, 16, 1), + OpcodeInfo.validOpcode('DUP16', 0x8f, 16, 17, 1), + OpcodeInfo.validOpcode('SWAP1', 0x90, 2, 2, 1), + OpcodeInfo.validOpcode('SWAP2', 0x91, 3, 3, 1), + OpcodeInfo.validOpcode('SWAP3', 0x92, 4, 4, 1), + OpcodeInfo.validOpcode('SWAP4', 0x93, 5, 5, 1), + OpcodeInfo.validOpcode('SWAP5', 0x94, 6, 6, 1), + OpcodeInfo.validOpcode('SWAP6', 0x95, 7, 7, 1), + OpcodeInfo.validOpcode('SWAP7', 0x96, 8, 8, 1), + OpcodeInfo.validOpcode('SWAP8', 0x97, 9, 9, 1), + OpcodeInfo.validOpcode('SWAP9', 0x98, 10, 10, 1), + OpcodeInfo.validOpcode('SWAP10', 0x99, 11, 11, 1), + OpcodeInfo.validOpcode('SWAP11', 0x9a, 12, 12, 1), + OpcodeInfo.validOpcode('SWAP12', 0x9b, 13, 13, 1), + OpcodeInfo.validOpcode('SWAP13', 0x9c, 14, 14, 1), + OpcodeInfo.validOpcode('SWAP14', 0x9d, 15, 15, 1), + OpcodeInfo.validOpcode('SWAP15', 0x9e, 16, 16, 1), + OpcodeInfo.validOpcode('SWAP16', 0x9f, 17, 17, 1), + OpcodeInfo.validOpcode('LOG0', 0xa0, 2, 0, 1), + OpcodeInfo.validOpcode('LOG1', 0xa1, 3, 0, 1), + OpcodeInfo.validOpcode('LOG2', 0xa2, 4, 0, 1), + OpcodeInfo.validOpcode('LOG3', 0xa3, 5, 0, 1), + OpcodeInfo.validOpcode('LOG4', 0xa4, 6, 0, 1), + OpcodeInfo.unallocatedOpcode(0xa5), + OpcodeInfo.unallocatedOpcode(0xa6), + OpcodeInfo.unallocatedOpcode(0xa7), + OpcodeInfo.unallocatedOpcode(0xa8), + OpcodeInfo.unallocatedOpcode(0xa9), + OpcodeInfo.unallocatedOpcode(0xaa), + OpcodeInfo.unallocatedOpcode(0xab), + OpcodeInfo.unallocatedOpcode(0xac), + OpcodeInfo.unallocatedOpcode(0xad), + OpcodeInfo.unallocatedOpcode(0xae), + OpcodeInfo.unallocatedOpcode(0xaf), + OpcodeInfo.unallocatedOpcode(0xb0), + OpcodeInfo.unallocatedOpcode(0xb1), + OpcodeInfo.unallocatedOpcode(0xb2), + OpcodeInfo.unallocatedOpcode(0xb3), + OpcodeInfo.unallocatedOpcode(0xb4), + OpcodeInfo.unallocatedOpcode(0xb5), + OpcodeInfo.unallocatedOpcode(0xb6), + OpcodeInfo.unallocatedOpcode(0xb7), + OpcodeInfo.unallocatedOpcode(0xb8), + OpcodeInfo.unallocatedOpcode(0xb9), + OpcodeInfo.unallocatedOpcode(0xba), + OpcodeInfo.unallocatedOpcode(0xbb), + OpcodeInfo.unallocatedOpcode(0xbc), + OpcodeInfo.unallocatedOpcode(0xbd), + OpcodeInfo.unallocatedOpcode(0xbe), + OpcodeInfo.unallocatedOpcode(0xbf), + OpcodeInfo.unallocatedOpcode(0xc0), + OpcodeInfo.unallocatedOpcode(0xc1), + OpcodeInfo.unallocatedOpcode(0xc2), + OpcodeInfo.unallocatedOpcode(0xc3), + OpcodeInfo.unallocatedOpcode(0xc4), + OpcodeInfo.unallocatedOpcode(0xc5), + OpcodeInfo.unallocatedOpcode(0xc6), + OpcodeInfo.unallocatedOpcode(0xc7), + OpcodeInfo.unallocatedOpcode(0xc8), + OpcodeInfo.unallocatedOpcode(0xc9), + OpcodeInfo.unallocatedOpcode(0xca), + OpcodeInfo.unallocatedOpcode(0xcb), + OpcodeInfo.unallocatedOpcode(0xcc), + OpcodeInfo.unallocatedOpcode(0xcd), + OpcodeInfo.unallocatedOpcode(0xce), + OpcodeInfo.unallocatedOpcode(0xcf), + OpcodeInfo.validOpcode('DATALOAD', 0xd0, 1, 1, 1), + OpcodeInfo.validOpcode('DATALOADN', 0xd1, 0, 1, 3), + OpcodeInfo.validOpcode('DATASIZE', 0xd2, 0, 1, 1), + OpcodeInfo.validOpcode('DATACOPY', 0xd3, 3, 0, 1), + OpcodeInfo.unallocatedOpcode(0xd4), + OpcodeInfo.unallocatedOpcode(0xd5), + OpcodeInfo.unallocatedOpcode(0xd6), + OpcodeInfo.unallocatedOpcode(0xd7), + OpcodeInfo.unallocatedOpcode(0xd8), + OpcodeInfo.unallocatedOpcode(0xd9), + OpcodeInfo.unallocatedOpcode(0xda), + OpcodeInfo.unallocatedOpcode(0xdb), + OpcodeInfo.unallocatedOpcode(0xdc), + OpcodeInfo.unallocatedOpcode(0xdd), + OpcodeInfo.unallocatedOpcode(0xde), + OpcodeInfo.unallocatedOpcode(0xdf), + OpcodeInfo.terminalOpcode('RJUMP', 0xe0, 0, 0, 3), + OpcodeInfo.validOpcode('RJUMPI', 0xe1, 1, 0, 3), + OpcodeInfo.validOpcode('RJUMPV', 0xe2, 1, 0, 2), + OpcodeInfo.validOpcode('CALLF', 0xe3, 0, 0, 3), + OpcodeInfo.terminalOpcode('RETF', 0xe4, 0, 0, 1), + OpcodeInfo.terminalOpcode('JUMPF', 0xe5, 0, 0, 3), + OpcodeInfo.validOpcode('DUPN', 0xe6, 0, 1, 2), + OpcodeInfo.validOpcode('SWAPN', 0xe7, 0, 0, 2), + OpcodeInfo.validOpcode('EXCHANGE', 0xe8, 0, 0, 2), + OpcodeInfo.unallocatedOpcode(0xe9), + OpcodeInfo.unallocatedOpcode(0xea), + OpcodeInfo.unallocatedOpcode(0xeb), + OpcodeInfo.validOpcode('EOFCREATE', 0xec, 4, 1, 2), + OpcodeInfo.unallocatedOpcode(0xed), + OpcodeInfo.terminalOpcode('RETURNCONTRACT', 0xee, 2, 1, 2), + OpcodeInfo.unallocatedOpcode(0xef), + OpcodeInfo.invalidOpcode('CREATE', 0xf0), + OpcodeInfo.invalidOpcode('CALL', 0xf1), + OpcodeInfo.invalidOpcode('CALLCODE', 0xf2), + OpcodeInfo.terminalOpcode('RETURN', 0xf3, 2, 0, 1), + OpcodeInfo.invalidOpcode('DELEGATECALL', 0xf4), + OpcodeInfo.invalidOpcode('CREATE2', 0xf5), + OpcodeInfo.unallocatedOpcode(0xf6), + OpcodeInfo.validOpcode('RETURNDATALOAD', 0xf7, 1, 1, 1), + OpcodeInfo.validOpcode('EXTCALL', 0xf8, 4, 1, 1), + OpcodeInfo.validOpcode('EXTDELEGATECALL', 0xf9, 3, 1, 1), + OpcodeInfo.invalidOpcode('STATICCALL', 0xfa), + OpcodeInfo.validOpcode('EXTSTATICCALL', 0xfb, 3, 1, 1), + OpcodeInfo.unallocatedOpcode(0xfc), + OpcodeInfo.terminalOpcode('REVERT', 0xfd, 2, 0, 1), + OpcodeInfo.terminalOpcode('INVALID', 0xfe, 0, 0, 1), + OpcodeInfo.invalidOpcode('SELFDESTRUCT', 0xff), +] + +console.log(JSON.stringify(stackDelta)) diff --git a/packages/evm/src/constructors.ts b/packages/evm/src/constructors.ts new file mode 100644 index 0000000000..381a542806 --- /dev/null +++ b/packages/evm/src/constructors.ts @@ -0,0 +1,36 @@ +import { Common, Mainnet } from '@ethereumjs/common' +import { SimpleStateManager } from '@ethereumjs/statemanager' + +import { NobleBN254 } from './precompiles/index.js' +import { DefaultBlockchain } from './types.js' + +import { EVM } from './index.js' + +import type { EVMOpts } from './index.js' + +/** + * Use this async static constructor for the initialization + * of an EVM object + * + * @param createOpts The EVM options + * @returns A new EVM + */ +export async function createEVM(createOpts?: EVMOpts) { + const opts = createOpts ?? ({} as EVMOpts) + + opts.bn254 = new NobleBN254() + + if (opts.common === undefined) { + opts.common = new Common({ chain: Mainnet }) + } + + if (opts.blockchain === undefined) { + opts.blockchain = new DefaultBlockchain() + } + + if (opts.stateManager === undefined) { + opts.stateManager = new SimpleStateManager() + } + + return new EVM(opts) +} diff --git a/packages/evm/src/eof.ts b/packages/evm/src/eof.ts deleted file mode 100644 index 17a8ff29d6..0000000000 --- a/packages/evm/src/eof.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { handlers } from './opcodes/index.js' - -export const FORMAT = 0xef -export const MAGIC = 0x00 -export const VERSION = 0x01 - -/** - * - * @param container A `Uint8Array` containing bytecode to be checked for EOF1 compliance - * @returns an object containing the size of the code section and data sections for a valid - * EOF1 container or else undefined if `container` is not valid EOF1 bytecode - * - * Note: See https://eips.ethereum.org/EIPS/eip-3540 for further details - */ -export const codeAnalysis = (container: Uint8Array) => { - const secCode = 0x01 - const secData = 0x02 - const secTerminator = 0x00 - let computedContainerSize = 0 - const sectionSizes = { - code: 0, - data: 0, - } - if (container[0] !== FORMAT || container[1] !== MAGIC || container[2] !== VERSION) - // Bytecode does not contain EOF1 "magic" or version number in expected positions - return - - if ( - // EOF1 bytecode must be more than 7 bytes long for EOF1 header plus code section (but no data section) - container.length > 7 && - // EOF1 code section indicator - container[3] === secCode && - // EOF1 header terminator - container[6] === secTerminator - ) { - sectionSizes.code = (container[4] << 8) | container[5] - // Calculate expected length of EOF1 container based on code section - computedContainerSize = 7 + sectionSizes.code - // EOF1 code section must be at least 1 byte long - if (sectionSizes.code < 1) return - } else if ( - // EOF1 container must be more than 10 bytes long if data section is included - container.length > 10 && - // EOF1 code section indicator - container[3] === secCode && - // EOF1 data section indicator - container[6] === secData && - // EOF1 header terminator - container[9] === secTerminator - ) { - sectionSizes.code = (container[4] << 8) | container[5] - sectionSizes.data = (container[7] << 8) | container[8] - // Calculate expected length of EOF1 container based on code and data sections - computedContainerSize = 10 + sectionSizes.code + sectionSizes.data - // Code & Data sizes cannot be 0 - if (sectionSizes.code < 1 || sectionSizes.data < 1) return - } - if (container.length !== computedContainerSize) { - // Computed container length based on section details does not match length of actual bytecode - return - } - return sectionSizes -} - -export const validOpcodes = (code: Uint8Array) => { - // EIP-3670 - validate all opcodes - const opcodes = new Set(handlers.keys()) - opcodes.add(0xfe) // Add INVALID opcode to set - - let x = 0 - while (x < code.length) { - const opcode = code[x] - x++ - if (!opcodes.has(opcode)) { - // No invalid/undefined opcodes - return false - } - if (opcode >= 0x60 && opcode <= 0x7f) { - // Skip data block following push - x += opcode - 0x5f - if (x > code.length - 1) { - // Push blocks must not exceed end of code section - return false - } - } - } - const terminatingOpcodes = new Set([0x00, 0xf3, 0xfd, 0xfe, 0xff]) - // Per EIP-3670, the final opcode of a code section must be STOP, RETURN, REVERT, INVALID, or SELFDESTRUCT - if (!terminatingOpcodes.has(code[code.length - 1])) { - return false - } - return true -} - -export const getEOFCode = (code: Uint8Array) => { - const sectionSizes = codeAnalysis(code) - if (sectionSizes === undefined) { - return code - } else { - const codeStart = sectionSizes.data > 0 ? 10 : 7 - return code.subarray(codeStart, codeStart + sectionSizes.code) - } -} - -export const EOF = { FORMAT, MAGIC, VERSION, codeAnalysis, validOpcodes } diff --git a/packages/evm/src/eof/constants.ts b/packages/evm/src/eof/constants.ts new file mode 100644 index 0000000000..42ec1cc74b --- /dev/null +++ b/packages/evm/src/eof/constants.ts @@ -0,0 +1,36 @@ +// Constants, which are taken from https://eips.ethereum.org/EIPS/eip-3540 + +// The "starting bytes" of an EOF contract +export const FORMAT = 0xef +export const MAGIC = 0x00 +export const VERSION = 0x01 + +// The min/max sizes of valid headers +export const MIN_HEADER_SIZE = 15 // This min size is used to invalidate an invalid container quickly +export const MAX_HEADER_SIZE = 49152 // Max initcode size, EIP 3860 + +export const KIND_TYPE = 0x01 // The type byte of the types section +export const KIND_CODE = 0x02 // The type byte of the code section +export const KIND_CONTAINER = 0x03 // The type byte of the container section (this is the only optional section in the header) +export const KIND_DATA = 0x04 // The type byte of the data section +export const TERMINATOR = 0x00 // The terminator byte of the header + +export const TYPE_MIN = 0x0004 // The minimum size of the types section +export const TYPE_MAX = 0x1000 // The maximum size of the types section +export const TYPE_DIVISOR = 4 // The divisor of types: the type section size should be a multiple of this + +export const CODE_MIN = 0x0001 // The minimum size of the code section + +export const CODE_SIZE_MIN = 1 // The minimum size of a code section in the body (the actual code) + +export const CONTAINER_MIN = 0x0001 // The minimum size of the container section +export const CONTAINER_MAX = 0x0100 // The maximum size of the container section + +export const CONTAINER_SIZE_MIN = 1 // The minimum size of a container in the body + +// Constants regarding the type section in the body of the container +export const INPUTS_MAX = 0x7f // The maximum amounts of inputs to a code section in the body +export const OUTPUTS_MAX = 0x80 // The maximum amounts of outputs of a code section in the body +// Note: 0x80 is a special amount of outputs, this marks the code section as "terminating". +// A terminating section will exit the current call frame, such as RETURN / STOP opcodes. It will not RETF to another code section +export const MAX_STACK_HEIGHT = 0x03ff // The maximum stack height of a code section (this enforces that the stack of this section cannot overflow) diff --git a/packages/evm/src/eof/container.ts b/packages/evm/src/eof/container.ts new file mode 100644 index 0000000000..a2be68d86c --- /dev/null +++ b/packages/evm/src/eof/container.ts @@ -0,0 +1,461 @@ +import { + CODE_MIN, + CODE_SIZE_MIN, + CONTAINER_MAX, + CONTAINER_MIN, + CONTAINER_SIZE_MIN, + FORMAT, + INPUTS_MAX, + KIND_CODE, + KIND_CONTAINER, + KIND_DATA, + KIND_TYPE, + MAGIC, + MAX_HEADER_SIZE, + MAX_STACK_HEIGHT, + OUTPUTS_MAX, + TERMINATOR, + TYPE_DIVISOR, + TYPE_MAX, + TYPE_MIN, + VERSION, +} from './constants.js' +import { EOFError, validationError } from './errors.js' +import { ContainerSectionType, verifyCode } from './verify.js' + +import type { EVM } from '../evm.js' + +/* + This file creates EOF Containers + EOF Containers are described in EIP-3540. + A container consists of a header and a body. The header describes the layout of the body. + The body has the actual "interesting" contents, such as the bytecode to run, the data section, + and possibly yet-to-be-deployed containers (via EOFCREATE, to create new EOF contracts from an existing one) +*/ + +// This enum marks the "mode" of a container +// Depending on this mode, certain extra checks for validity have to be done, or some checks can be skipped +export enum EOFContainerMode { + Default, // Default container validation + Initmode, // Initmode container validation (for subcontainers pointed to by EOFCreate) + TxInitmode, // Tx initmode container validation (for txs deploying EOF contracts) +} + +// The StreamReader is a helper class to help reading byte arrays +class StreamReader { + private data: Uint8Array // Stream to read + private ptr: number // Current pointer to where the stream is being read + constructor(stream: Uint8Array) { + this.data = stream + this.ptr = 0 + } + + /** + * Read `amount` bytes from the stream. Throws when trying to read out of bounds with an optional error string. + * This also updates the internal pointer + * @param amount Bytes to read + * @param errorStr Optional error string to throw when trying to read out-of-bounds + * @returns The byte array with length `amount` + */ + readBytes(amount: number, errorStr?: string) { + const end = this.ptr + amount + if (end > this.data.length) { + validationError(EOFError.OutOfBounds, this.ptr, errorStr) + } + const ptr = this.ptr + this.ptr += amount + return this.data.slice(ptr, end) + } + + /** + * Reads an Uint8. Also updates the pointer. + * @param errorStr Optional error string + * @returns The uint8 + */ + readUint(errorStr?: string) { + if (this.ptr >= this.data.length) { + validationError(EOFError.OutOfBounds, this.ptr, errorStr) + } + return this.data[this.ptr++] + } + + /** + * Verify that the current uint8 pointed to by the pointer is the expected uint8 + * Also updates the pointer + * @param expect The uint to expect + * @param errorStr Optional error string when the read uint is not the expected uint + */ + verifyUint(expect: number, errorStr?: string) { + if (this.readUint() !== expect) { + validationError(EOFError.VerifyUint, this.ptr - 1, errorStr) + } + } + + /** + * Same as readUint, except this reads an uint16 + * @param errorStr + * @returns + */ + readUint16(errorStr?: string) { + const end = this.ptr + 2 + if (end > this.data.length) { + validationError(EOFError.OutOfBounds, this.ptr, errorStr) + } + const ptr = this.ptr + this.ptr += 2 + return new DataView(this.data.buffer).getUint16(ptr) + } + + /** + * Get the current pointer of the stream + * @returns The pointer + */ + getPtr() { + return this.ptr + } + + // Get the remainder bytes of the current stream + readRemainder() { + return this.data.slice(this.ptr) + } + + // Returns `true` if the stream is fully read, or false if there are dangling bytes + isAtEnd() { + return this.ptr === this.data.length + } +} + +// TODO add initcode flags (isEOFContract) +// TODO validation: mark sections as either initcode or runtime code to validate + +/** + * The EOFHeader, describing the header of the EOF container + */ +class EOFHeader { + typeSize: number // Size of the types section + codeSizes: number[] // Sizes of the code sections + containerSizes: number[] // Sizes of the containers + dataSize: number // Size of the data section + dataSizePtr: number // Used to edit the dataSize in RETURNCONTRACT + buffer: Uint8Array // The raw buffer of the entire header + + private codeStartPos: number[] // Internal array to track at which byte of the container the code starts (per section) + + /** + * Create an EOF header. Performs various validation checks inside the constructor + * @param input The input should either be a raw header, or a complete container + */ + constructor(input: Uint8Array) { + if (input.length > MAX_HEADER_SIZE) { + throw new Error('err: container size more than maximum valid size') + } + const stream = new StreamReader(input) + // Verify that the header starts with 0xEF0001 + stream.verifyUint(FORMAT, EOFError.FORMAT) + stream.verifyUint(MAGIC, EOFError.MAGIC) + stream.verifyUint(VERSION, EOFError.VERSION) + if (input.length < 15) { + throw new Error('err: container size less than minimum valid size') + } + // Verify that the types section is present, and verify that the type section length is valid + stream.verifyUint(KIND_TYPE, EOFError.KIND_TYPE) + const typeSize = stream.readUint16(EOFError.TypeSize) + if (typeSize < TYPE_MIN) { + validationError(EOFError.InvalidTypeSize, typeSize) + } + if (typeSize % TYPE_DIVISOR !== 0) { + validationError(EOFError.InvalidTypeSize, typeSize) + } + if (typeSize > TYPE_MAX) { + throw new Error(`err: number of code sections must not exceed 1024 (got ${typeSize})`) + } + // Verify that the code section is present, and verify that the code section size is valid + stream.verifyUint(KIND_CODE, EOFError.KIND_CODE) + const codeSize = stream.readUint16(EOFError.CodeSize) + if (codeSize < CODE_MIN) { + validationError(EOFError.MinCodeSections) + } + if (codeSize !== typeSize / TYPE_DIVISOR) { + validationError(EOFError.TypeSections, typeSize / TYPE_DIVISOR, codeSize) + } + // Read the actual code sizes in the code section, and verify that each code section has the minimum size + const codeSizes = [] + for (let i = 0; i < codeSize; i++) { + const codeSectionSize = stream.readUint16(EOFError.CodeSection) + if (codeSectionSize < CODE_SIZE_MIN) { + validationError(EOFError.CodeSectionSize) + } + codeSizes.push(codeSectionSize) + } + + // Check if there are container sections + let nextSection = stream.readUint() + const containerSizes: number[] = [] + if (nextSection === KIND_CONTAINER) { + // The optional container section is present, validate that the size is within bounds + const containerSectionSize = stream.readUint16(EOFError.ContainerSize) + + if (containerSectionSize < CONTAINER_MIN) { + validationError(EOFError.ContainerSectionSize) + } + if (containerSectionSize > CONTAINER_MAX) { + validationError(EOFError.ContainerSectionSize) + } + + // Read the actual container sections, and validate that each container section has the minimum size + for (let i = 0; i < containerSectionSize; i++) { + const containerSize = stream.readUint16(EOFError.ContainerSection) + + if (containerSize < CONTAINER_SIZE_MIN) { + validationError(EOFError.ContainerSectionMin) + } + + containerSizes.push(containerSize) + } + + nextSection = stream.readUint() + } + + // Verify that the next section is of the data type + if (nextSection !== KIND_DATA) { + validationError(EOFError.KIND_DATA) + } + + this.dataSizePtr = stream.getPtr() + + const dataSize = stream.readUint16(EOFError.DataSize) + + // Verify that the header ends with the TERMINATOR byte + stream.verifyUint(TERMINATOR, EOFError.TERMINATOR) + + // Write all values to the header object + this.typeSize = typeSize + this.codeSizes = codeSizes + this.containerSizes = containerSizes + this.dataSize = dataSize + // Slice the input such that `this.buffer` is now the complete header + // If there are dangling bytes in the stream, this is OK: this is the body section of the container + this.buffer = input.slice(0, stream.getPtr()) + const relativeOffset = this.buffer.length + this.typeSize + // Write the start of the first code section into `codeStartPos` + // Note: in EVM, if one would set the Program Counter to this byte, it would start executing the bytecode of the first code section + this.codeStartPos = [relativeOffset] + } + + sections() { + return [this.typeSize, this.codeSizes, this.containerSizes, this.dataSize] + } + sectionSizes() { + return [1, this.codeSizes.length, this.containerSizes.length, 1] + } + + // Returns the code position in the container for the requested section + // Setting the Program Counter in the EVM to a number of this array would start executing the bytecode of the indexed section + getCodePosition(section: number) { + if (this.codeStartPos[section]) { + return this.codeStartPos[section] + } + const start = this.codeStartPos.length + let offset = this.codeStartPos[start - 1] + for (let i = start; i <= section; i++) { + offset += this.codeSizes[i - 1] + this.codeStartPos[i] = offset + } + return offset + } +} + +export interface TypeSection { + inputs: number + outputs: number + maxStackHeight: number +} + +/** + * The EOF body holds the contents of the EOF container, such as the code sections (bytecode), + * the subcontainers (EOF containers to be deployed via EOFCREATE) and the data section + */ +class EOFBody { + typeSections: TypeSection[] // Array of type sections, used to index the inputs/outputs/max stack height of each section + codeSections: Uint8Array[] // The bytecode of each code section + containerSections: Uint8Array[] // The raw container bytes of each subcontainer + entireCode: Uint8Array // The `entireCode` are all code sections concatenated + dataSection: Uint8Array // The bytes of the data section + buffer: Uint8Array // The raw bytes of the body + + txCallData?: Uint8Array // Only available in TxInitmode. The `txCallData` are the dangling bytes after parsing the container, + // and these are used for the CALLDATA in the EVM when trying to create a contract via a transaction, and the deployment code is an EOF container + + constructor( + buf: Uint8Array, // The buffer of the body. This should be the entire body. It is not valid to pass an entire EOF container in here + header: EOFHeader, // The EOFHeader corresponding to this body + eofMode: EOFContainerMode = EOFContainerMode.Default, // The container mode of EOF + dataSectionAllowedSmaller = false, // Only for validation: Deployment containers are allowed to have smaller data section size + ) { + const stream = new StreamReader(buf) + const typeSections: TypeSection[] = [] + // Read and parse each type section, and validate that the type section values are within valid bounds + for (let i = 0; i < header.typeSize / 4; i++) { + const inputs = stream.readUint(EOFError.Inputs) + const outputs = stream.readUint(EOFError.Outputs) + const maxStackHeight = stream.readUint16(EOFError.MaxStackHeight) + if (i === 0) { + if (inputs !== 0) { + validationError(EOFError.Code0Inputs) + } + if (outputs !== 0x80) { + validationError(EOFError.Code0Outputs) + } + } + if (inputs > INPUTS_MAX) { + validationError(EOFError.MaxInputs, i, inputs) + } + if (outputs > OUTPUTS_MAX) { + validationError(EOFError.MaxOutputs, i, outputs) + } + if (maxStackHeight > MAX_STACK_HEIGHT) { + validationError(EOFError.MaxStackHeightLimit, i, maxStackHeight) + } + typeSections.push({ + inputs, + outputs, + maxStackHeight, + }) + } + // Read each code section + const codeStartPtr = stream.getPtr() + const codes = [] + for (const [i, codeSize] of header.codeSizes.entries()) { + try { + const code = stream.readBytes(codeSize) + codes.push(code) + } catch { + validationError(EOFError.CodeSection, i) + } + } + // Write the entire code section to the entireCodeSection + const entireCodeSection = buf.slice(codeStartPtr, stream.getPtr()) + + // Read all raw subcontainers and push those to the containers array + const containers = [] + for (const [i, containerSize] of header.containerSizes.entries()) { + try { + const container = stream.readBytes(containerSize) + containers.push(container) + } catch { + validationError(EOFError.ContainerSection, i) + } + } + + // Data section of the body + // Note: for EOF containers in Initmode (these are Subcontainers) it is allowed + // to have a data section of size lower than what is written in the header + // For details, see "Data section lifecycle" of EIP 7620 + let dataSection: Uint8Array + + // Edge case: deployment code validation + if (eofMode !== EOFContainerMode.Initmode && !dataSectionAllowedSmaller) { + dataSection = stream.readBytes(header.dataSize, EOFError.DataSection) + + if (eofMode === EOFContainerMode.Default) { + if (!stream.isAtEnd()) { + // If there are dangling bytes in default container mode, this is invalid + validationError(EOFError.DanglingBytes) + } + } else { + // Tx init mode: the remaining bytes (if any) are used as CALLDATA in the EVM, in case of a Tx init + this.txCallData = stream.readRemainder() + } + } else { + dataSection = stream.readRemainder() + } + + // Write all data to the object + this.typeSections = typeSections + this.codeSections = codes + this.containerSections = containers + this.entireCode = entireCodeSection + this.dataSection = dataSection + this.buffer = buf + } + sections() { + return [this.typeSections, this.codeSections, this.dataSection] + } + size() { + return { + typeSize: this.typeSections.length, + codeSize: this.codeSections.length, + dataSize: this.dataSection.length, + } + } + sectionSizes() { + return [ + this.typeSections.map(() => 4), + this.codeSections.map((b) => b.length), + this.dataSection.length, + ] + } +} + +/** + * Main constructor for the EOFContainer + */ +export class EOFContainer { + header: EOFHeader + body: EOFBody + buffer: Uint8Array + eofMode: EOFContainerMode + + /** + * + * @param buf Entire container buffer + * @param eofMode Container mode to validate the container on + * @param dataSectionAllowedSmaller `true` if the data section is allowed to be smaller than the data section size in the header + */ + constructor( + buf: Uint8Array, + eofMode: EOFContainerMode = EOFContainerMode.Default, + dataSectionAllowedSmaller = false, + ) { + this.eofMode = eofMode + this.header = new EOFHeader(buf) + this.body = new EOFBody( + buf.slice(this.header.buffer.length), + this.header, + eofMode, + dataSectionAllowedSmaller, + ) + this.buffer = buf + } +} + +/** + * This method validates the EOF. It also performs deeper validation of the body, such as stack/opcode validation + * This is ONLY necessary when trying to deploy contracts from a transaction: these can submit containers which are invalid + * Since all deployed EOF containers are valid by definition, `validateEOF` does not need to be called each time an EOF contract is called + * @param input Full container buffer + * @param evm EVM, to read opcodes from + * @param containerMode Container mode to validate on + * @param eofMode EOF mode to run in + * @returns + */ +export function validateEOF( + input: Uint8Array, + evm: EVM, + containerMode: ContainerSectionType = ContainerSectionType.RuntimeCode, + eofMode: EOFContainerMode = EOFContainerMode.Default, +) { + const container = new EOFContainer( + input, + eofMode, + containerMode === ContainerSectionType.DeploymentCode, + ) + const containerMap = verifyCode(container, evm, containerMode) + // Recursively validate the containerSections + for (let i = 0; i < container.body.containerSections.length; i++) { + const subContainer = container.body.containerSections[i] + const mode = containerMap.get(i)! + validateEOF(subContainer, evm, mode) + } + return container +} diff --git a/packages/evm/src/eof/errors.ts b/packages/evm/src/eof/errors.ts new file mode 100644 index 0000000000..e3182f1c97 --- /dev/null +++ b/packages/evm/src/eof/errors.ts @@ -0,0 +1,254 @@ +export enum EOFError { + // Stream Reader + OutOfBounds = 'Trying to read out of bounds', + VerifyUint = 'Uint does not match expected value ', + VerifyBytes = 'Bytes do not match expected value', + + // Section Markers + FORMAT = 'err: invalid format', + MAGIC = 'err: invalid magic', + VERSION = `err: invalid eof version`, + KIND_TYPE = `err: expected kind types`, + KIND_CODE = `err: expected kind code`, + KIND_DATA = `err: expected kind data`, + TERMINATOR = `err: expected terminator`, + + // Section Sizes + TypeSize = `missing type size`, + InvalidTypeSize = `err: type section size invalid`, + CodeSize = `missing code size`, + CodeSectionSize = `code section should be at least one byte`, + InvalidCodeSize = `code size does not match type size`, + DataSize = `missing data size`, + ContainerSize = 'missing container size', + ContainerSectionSize = 'container section should at least contain one section and at most 255 sections', + + // Type Section + TypeSections = `err: mismatch of code sections count and type signatures`, + Inputs = 'expected inputs', + Outputs = 'expected outputs', + MaxInputs = 'inputs exceeds 127, the maximum, got: ', + MaxOutputs = 'outputs exceeds 127, the maximum, got: ', + Code0Inputs = 'first code section should have 0 inputs', + Code0Outputs = 'first code section should have 0x80 (terminating section) outputs', + MaxStackHeight = `expected maxStackHeight`, + MaxStackHeightLimit = `stack height limit of 1024 exceeded: `, + + // Code/Data Section + MinCodeSections = `should have at least 1 code section`, + MaxCodeSections = `can have at most 1024 code sections`, + CodeSection = `expected a code section`, + DataSection = `Expected data section`, + + // Container section + ContainerSection = 'expected a container section', + ContainerSectionMin = 'container section should be at least 1 byte', + InvalidEOFCreateTarget = 'EOFCREATE targets an undefined container', + InvalidRETURNContractTarget = 'RETURNCONTRACT targets an undefined container', + ContainerDoubleType = 'Container is targeted by both EOFCREATE and RETURNCONTRACT', + UnreachableContainerSections = 'Unreachable containers (by both EOFCREATE and RETURNCONTRACT)', + ContainerTypeError = 'Container contains opcodes which this mode (deployment mode / init code / runtime mode) cannot have', + + // Dangling Bytes + DanglingBytes = 'got dangling bytes in body', + + // Code verification + InvalidOpcode = 'invalid opcode', + InvalidTerminator = 'invalid terminating opcode', + OpcodeIntermediatesOOB = 'invalid opcode: intermediates out-of-bounds', + + InvalidRJUMP = 'invalid rjump* target', + InvalidCallTarget = 'invalid callf/jumpf target', + InvalidCALLFReturning = 'invalid callf: calls to non-returning function', + InvalidStackHeight = 'invalid stack height', + InvalidJUMPF = 'invalid jumpf target (output count)', + InvalidReturningSection = 'invalid returning code section: section is not returning', + RJUMPVTableSize0 = 'invalid RJUMPV: table size 0', + UnreachableCodeSections = 'unreachable code sections', + UnreachableCode = 'unreachable code (by forward jumps)', + DataLoadNOutOfBounds = 'DATALOADN reading out of bounds', + MaxStackHeightViolation = 'Max stack height does not match the reported max stack height', + StackUnderflow = 'Stack underflow', + StackOverflow = 'Stack overflow', + UnstableStack = 'Unstable stack (can reach stack under/overflow by jumps)', + RetfNoReturn = 'Trying to return to undefined function', // This should never happen (this is a return stack underflow) + ReturnStackOverflow = 'Return stack overflow', + InvalidExtcallTarget = 'invalid extcall target: address > 20 bytes', + InvalidReturnContractDataSize = 'invalid RETURNCONTRACT: data size lower than expected', + + InvalidEofFormat = 'invalid EOF format', +} + +export enum SimpleErrors { + minContainerSize = 'err: container size less than minimum valid size', + invalidContainerSize = 'err: invalid container size', + typeSize = 'err: type section size invalid', + code0msh = 'err: computed max stack height for code section 0 does not match expect', + underflow = 'err: stack underflow', + code0IO = 'err: input and output of first code section must be 0', + + // Stream Reader + // OutOfBounds = 'err: relative offset out-of-bounds: ', + VerifyUint = 'Uint does not match expected value ', + VerifyBytes = 'Bytes do not match expected value', + + // Section Sizes + TypeSize = `missing type size`, + InvalidTypeSize = `err: type section invalid`, + CodeSize = `missing code size`, + CodeSectionSize = `code section should be at least one byte`, + InvalidCodeSize = `code size does not match type size`, + DataSize = `missing data size`, + + // Type Section + TypeSections = `need to have a type section for each code section`, + Inputs = 'expected inputs', + Outputs = 'expected outputs', + MaxInputs = 'inputs exceeds 127, the maximum, got: ', + MaxOutputs = 'outputs exceeds 127, the maximum, got: ', + Code0Inputs = 'first code section should have 0 inputs', + Code0Outputs = 'first code section should have 0 outputs', + MaxStackHeight = `expected maxStackHeight`, + MaxStackHeightLimit = `stack height limit of 1024 exceeded: `, + + // Code/Data Section + MinCodeSections = `should have at least 1 code section`, + MaxCodeSections = `can have at most 1024 code sections`, + CodeSection = `expected a code section`, + DataSection = `Expected data section`, + + // Dangling Bytes + DanglingBytes = 'got dangling bytes in body', +} + +export function validationErrorMsg(type: EOFError, ...args: any) { + switch (type) { + case EOFError.OutOfBounds: { + return EOFError.OutOfBounds + ` at pos: ${args[0]}: ${args[1]}` + } + case EOFError.VerifyBytes: { + return EOFError.VerifyBytes + ` at pos: ${args[0]}: ${args[1]}` + } + case EOFError.VerifyUint: { + return EOFError.VerifyUint + `at pos: ${args[0]}: ${args[1]}` + } + case EOFError.TypeSize: { + return EOFError.TypeSize + args[0] + } + case EOFError.InvalidTypeSize: { + return EOFError.InvalidTypeSize + args[0] + } + case EOFError.InvalidCodeSize: { + return EOFError.InvalidCodeSize + args[0] + } + case EOFError.Inputs: { + return `${EOFError.Inputs} - typeSection ${args[0]}` + } + case EOFError.Outputs: { + return `${EOFError.Outputs} - typeSection ${args[0]}` + } + case EOFError.Code0Inputs: { + return `first code section should have 0 inputs` + } + case EOFError.Code0Outputs: { + return `first code section should have 0 outputs` + } + case EOFError.MaxInputs: { + return EOFError.MaxInputs + `${args[1]} - code section ${args[0]}` + } + case EOFError.MaxOutputs: { + return EOFError.MaxOutputs + `${args[1]} - code section ${args[0]}` + } + case EOFError.CodeSection: { + return `expected code: codeSection ${args[0]}: ` + } + case EOFError.DataSection: { + return EOFError.DataSection + } + case EOFError.MaxStackHeight: { + return `${EOFError.MaxStackHeight} - typeSection ${args[0]}: ` + } + case EOFError.MaxStackHeightLimit: { + return `${EOFError.MaxStackHeightLimit}, got: ${args[1]} - typeSection ${args[0]}` + } + case EOFError.DanglingBytes: { + return EOFError.DanglingBytes + } + default: { + return type + } + } +} +export function validationError(type: EOFError, ...args: any): never { + switch (type) { + case EOFError.OutOfBounds: { + const pos = args[0] + if (pos === 0 || pos === 2 || pos === 3 || pos === 6) { + throw new Error(args[1]) + } + throw new Error(EOFError.OutOfBounds + ` `) + } + case EOFError.VerifyBytes: { + const pos = args[0] + if (pos === 0 || pos === 2 || pos === 3 || pos === 6) { + throw new Error(args[1]) + } + throw new Error(EOFError.VerifyBytes + ` at pos: ${args[0]}: ${args[1]}`) + } + case EOFError.VerifyUint: { + const pos = args[0] + if (pos === 0 || pos === 2 || pos === 3 || pos === 6 || pos === 18) { + throw new Error(args[1]) + } + throw new Error(EOFError.VerifyUint + `at pos: ${args[0]}: ${args[1]}`) + } + case EOFError.TypeSize: { + throw new Error(EOFError.TypeSize + args[0]) + } + case EOFError.TypeSections: { + throw new Error(`${EOFError.TypeSections} (types ${args[0]} code ${args[1]})`) + } + case EOFError.InvalidTypeSize: { + throw new Error(EOFError.InvalidTypeSize) + } + case EOFError.InvalidCodeSize: { + throw new Error(EOFError.InvalidCodeSize + args[0]) + } + case EOFError.Inputs: { + throw new Error(`${EOFError.Inputs} - typeSection ${args[0]}`) + } + case EOFError.Outputs: { + throw new Error(`${EOFError.Outputs} - typeSection ${args[0]}`) + } + case EOFError.Code0Inputs: { + throw new Error(`first code section should have 0 inputs`) + } + case EOFError.Code0Outputs: { + throw new Error(`first code section should have 0 outputs`) + } + case EOFError.MaxInputs: { + throw new Error(EOFError.MaxInputs + `${args[1]} - code section ${args[0]}`) + } + case EOFError.MaxOutputs: { + throw new Error(EOFError.MaxOutputs + `${args[1]} - code section ${args[0]}`) + } + case EOFError.CodeSection: { + throw new Error(`expected code: codeSection ${args[0]}: `) + } + case EOFError.DataSection: { + throw new Error(EOFError.DataSection) + } + case EOFError.MaxStackHeight: { + throw new Error(`${EOFError.MaxStackHeight} - typeSection ${args[0]}: `) + } + case EOFError.MaxStackHeightLimit: { + throw new Error(`${EOFError.MaxStackHeightLimit}, got: ${args[1]} - typeSection ${args[0]}`) + } + case EOFError.DanglingBytes: { + throw new Error(EOFError.DanglingBytes) + } + default: { + throw new Error(type) + } + } +} diff --git a/packages/evm/src/eof/setup.ts b/packages/evm/src/eof/setup.ts new file mode 100644 index 0000000000..6d977ecfb3 --- /dev/null +++ b/packages/evm/src/eof/setup.ts @@ -0,0 +1,27 @@ +import { EOFContainer, EOFContainerMode } from './container.js' + +import type { RunState } from '../interpreter.js' + +/** + * This method setups the EOF inside the EVM. It prepares the `RunState` to start running EVM in EOF mode + * @param runState Current run state + * @param eofMode EOF mode to run in (only changes in case of EOFCREATE) + */ +export function setupEOF(runState: RunState, eofMode: EOFContainerMode = EOFContainerMode.Default) { + runState.env.eof = { + container: new EOFContainer(runState.code, eofMode), + eofRunState: { + returnStack: [], // Return stack for RETF/CALLF/JUMPF + }, + } + + // In case that txCallData is set, then set the `callData` of the `env` to this calldata + // This ensures that CALLDATA can be read when deploying EOF contracts using transactions + if (runState.env.eof.container.body.txCallData !== undefined) { + runState.env.callData = runState.env.eof.container.body.txCallData + } + + // Set the program counter to the first code section + const pc = runState.env.eof.container.header.getCodePosition(0) + runState.programCounter = pc +} diff --git a/packages/evm/src/eof/stackDelta.ts b/packages/evm/src/eof/stackDelta.ts new file mode 100644 index 0000000000..70b37ecb93 --- /dev/null +++ b/packages/evm/src/eof/stackDelta.ts @@ -0,0 +1,168 @@ +// Generated using a script, which can be found in ./evm/scripts/stackDeltaGenerator.ts + +export const stackDelta: { + [key: number]: { + inputs: number // Number of inputs to this operation + outputs: number // Number of outputs after this operation + name: string // Name of the opcode + intermediates: number // Intermediate bytes (such as 2 intermediates after a PUSH2) + terminating?: boolean // Marks the opcode as terminating. This opcode will exit the current CALL frame. (Such as STOP/RETURN) + } +} = { + 0x00: { inputs: 0, outputs: 0, name: 'STOP', intermediates: 0, terminating: true }, + 0x01: { inputs: 2, outputs: 1, name: 'ADD', intermediates: 0 }, + 0x02: { inputs: 2, outputs: 1, name: 'MUL', intermediates: 0 }, + 0x03: { inputs: 2, outputs: 1, name: 'SUB', intermediates: 0 }, + 0x04: { inputs: 2, outputs: 1, name: 'DIV', intermediates: 0 }, + 0x05: { inputs: 2, outputs: 1, name: 'SDIV', intermediates: 0 }, + 0x06: { inputs: 2, outputs: 1, name: 'MOD', intermediates: 0 }, + 0x07: { inputs: 2, outputs: 1, name: 'SMOD', intermediates: 0 }, + 0x08: { inputs: 3, outputs: 1, name: 'ADDMOD', intermediates: 0 }, + 0x09: { inputs: 3, outputs: 1, name: 'MULMOD', intermediates: 0 }, + 0x0a: { inputs: 2, outputs: 1, name: 'EXP', intermediates: 0 }, + 0x0b: { inputs: 2, outputs: 1, name: 'SIGNEXTEND', intermediates: 0 }, + 0x10: { inputs: 2, outputs: 1, name: 'LT', intermediates: 0 }, + 0x11: { inputs: 2, outputs: 1, name: 'GT', intermediates: 0 }, + 0x12: { inputs: 2, outputs: 1, name: 'SLT', intermediates: 0 }, + 0x13: { inputs: 2, outputs: 1, name: 'SGT', intermediates: 0 }, + 0x14: { inputs: 2, outputs: 1, name: 'EQ', intermediates: 0 }, + 0x15: { inputs: 1, outputs: 1, name: 'ISZERO', intermediates: 0 }, + 0x16: { inputs: 2, outputs: 1, name: 'AND', intermediates: 0 }, + 0x17: { inputs: 2, outputs: 1, name: 'OR', intermediates: 0 }, + 0x18: { inputs: 2, outputs: 1, name: 'XOR', intermediates: 0 }, + 0x19: { inputs: 1, outputs: 1, name: 'NOT', intermediates: 0 }, + 0x1a: { inputs: 2, outputs: 1, name: 'BYTE', intermediates: 0 }, + 0x1b: { inputs: 2, outputs: 1, name: 'SHL', intermediates: 0 }, + 0x1c: { inputs: 2, outputs: 1, name: 'SHR', intermediates: 0 }, + 0x1d: { inputs: 2, outputs: 1, name: 'SAR', intermediates: 0 }, + 0x20: { inputs: 2, outputs: 1, name: 'SHA3', intermediates: 0 }, + 0x30: { inputs: 0, outputs: 1, name: 'ADDRESS', intermediates: 0 }, + 0x31: { inputs: 1, outputs: 1, name: 'BALANCE', intermediates: 0 }, + 0x32: { inputs: 0, outputs: 1, name: 'ORIGIN', intermediates: 0 }, + 0x33: { inputs: 0, outputs: 1, name: 'CALLER', intermediates: 0 }, + 0x34: { inputs: 0, outputs: 1, name: 'CALLVALUE', intermediates: 0 }, + 0x35: { inputs: 1, outputs: 1, name: 'CALLDATALOAD', intermediates: 0 }, + 0x36: { inputs: 0, outputs: 1, name: 'CALLDATASIZE', intermediates: 0 }, + 0x37: { inputs: 3, outputs: 0, name: 'CALLDATACOPY', intermediates: 0 }, + 0x3a: { inputs: 0, outputs: 1, name: 'GASPRICE', intermediates: 0 }, + 0x3d: { inputs: 0, outputs: 1, name: 'RETURNDATASIZE', intermediates: 0 }, + 0x3e: { inputs: 3, outputs: 0, name: 'RETURNDATACOPY', intermediates: 0 }, + 0x40: { inputs: 1, outputs: 1, name: 'BLOCKHASH', intermediates: 0 }, + 0x41: { inputs: 0, outputs: 1, name: 'COINBASE', intermediates: 0 }, + 0x42: { inputs: 0, outputs: 1, name: 'TIMESTAMP', intermediates: 0 }, + 0x43: { inputs: 0, outputs: 1, name: 'NUMBER', intermediates: 0 }, + 0x44: { inputs: 0, outputs: 1, name: 'PREVRANDAO', intermediates: 0 }, + 0x45: { inputs: 0, outputs: 1, name: 'GASLIMIT', intermediates: 0 }, + 0x46: { inputs: 0, outputs: 1, name: 'CHAINID', intermediates: 0 }, + 0x47: { inputs: 0, outputs: 1, name: 'SELFBALANCE', intermediates: 0 }, + 0x48: { inputs: 0, outputs: 1, name: 'BASEFEE', intermediates: 0 }, + 0x49: { inputs: 1, outputs: 1, name: 'BLOBAHASH', intermediates: 0 }, + 0x4a: { inputs: 0, outputs: 1, name: 'BLOBBASEFEE', intermediates: 0 }, + 0x50: { inputs: 1, outputs: 0, name: 'POP', intermediates: 0 }, + 0x51: { inputs: 1, outputs: 1, name: 'MLOAD', intermediates: 0 }, + 0x52: { inputs: 2, outputs: 0, name: 'MSTORE', intermediates: 0 }, + 0x53: { inputs: 2, outputs: 0, name: 'MSTORE8', intermediates: 0 }, + 0x54: { inputs: 1, outputs: 1, name: 'SLOAD', intermediates: 0 }, + 0x55: { inputs: 2, outputs: 0, name: 'SSTORE', intermediates: 0 }, + 0x59: { inputs: 0, outputs: 1, name: 'MSIZE', intermediates: 0 }, + 0x5b: { inputs: 0, outputs: 0, name: 'NOOP', intermediates: 0 }, + 0x5c: { inputs: 1, outputs: 1, name: 'TLOAD', intermediates: 0 }, + 0x5d: { inputs: 2, outputs: 0, name: 'TSTORE', intermediates: 0 }, + 0x5e: { inputs: 3, outputs: 0, name: 'MCOPY', intermediates: 0 }, + 0x5f: { inputs: 0, outputs: 1, name: 'PUSH0', intermediates: 0 }, + 0x60: { inputs: 0, outputs: 1, name: 'PUSH1', intermediates: 1 }, + 0x61: { inputs: 0, outputs: 1, name: 'PUSH2', intermediates: 2 }, + 0x62: { inputs: 0, outputs: 1, name: 'PUSH3', intermediates: 3 }, + 0x63: { inputs: 0, outputs: 1, name: 'PUSH4', intermediates: 4 }, + 0x64: { inputs: 0, outputs: 1, name: 'PUSH5', intermediates: 5 }, + 0x65: { inputs: 0, outputs: 1, name: 'PUSH6', intermediates: 6 }, + 0x66: { inputs: 0, outputs: 1, name: 'PUSH7', intermediates: 7 }, + 0x67: { inputs: 0, outputs: 1, name: 'PUSH8', intermediates: 8 }, + 0x68: { inputs: 0, outputs: 1, name: 'PUSH9', intermediates: 9 }, + 0x69: { inputs: 0, outputs: 1, name: 'PUSH10', intermediates: 10 }, + 0x6a: { inputs: 0, outputs: 1, name: 'PUSH11', intermediates: 11 }, + 0x6b: { inputs: 0, outputs: 1, name: 'PUSH12', intermediates: 12 }, + 0x6c: { inputs: 0, outputs: 1, name: 'PUSH13', intermediates: 13 }, + 0x6d: { inputs: 0, outputs: 1, name: 'PUSH14', intermediates: 14 }, + 0x6e: { inputs: 0, outputs: 1, name: 'PUSH15', intermediates: 15 }, + 0x6f: { inputs: 0, outputs: 1, name: 'PUSH16', intermediates: 16 }, + 0x70: { inputs: 0, outputs: 1, name: 'PUSH17', intermediates: 17 }, + 0x71: { inputs: 0, outputs: 1, name: 'PUSH18', intermediates: 18 }, + 0x72: { inputs: 0, outputs: 1, name: 'PUSH19', intermediates: 19 }, + 0x73: { inputs: 0, outputs: 1, name: 'PUSH20', intermediates: 20 }, + 0x74: { inputs: 0, outputs: 1, name: 'PUSH21', intermediates: 21 }, + 0x75: { inputs: 0, outputs: 1, name: 'PUSH22', intermediates: 22 }, + 0x76: { inputs: 0, outputs: 1, name: 'PUSH23', intermediates: 23 }, + 0x77: { inputs: 0, outputs: 1, name: 'PUSH24', intermediates: 24 }, + 0x78: { inputs: 0, outputs: 1, name: 'PUSH25', intermediates: 25 }, + 0x79: { inputs: 0, outputs: 1, name: 'PUSH26', intermediates: 26 }, + 0x7a: { inputs: 0, outputs: 1, name: 'PUSH27', intermediates: 27 }, + 0x7b: { inputs: 0, outputs: 1, name: 'PUSH28', intermediates: 28 }, + 0x7c: { inputs: 0, outputs: 1, name: 'PUSH29', intermediates: 29 }, + 0x7d: { inputs: 0, outputs: 1, name: 'PUSH30', intermediates: 30 }, + 0x7e: { inputs: 0, outputs: 1, name: 'PUSH31', intermediates: 31 }, + 0x7f: { inputs: 0, outputs: 1, name: 'PUSH32', intermediates: 32 }, + 0x80: { inputs: 1, outputs: 2, name: 'DUP1', intermediates: 0 }, + 0x81: { inputs: 2, outputs: 3, name: 'DUP2', intermediates: 0 }, + 0x82: { inputs: 3, outputs: 4, name: 'DUP3', intermediates: 0 }, + 0x83: { inputs: 4, outputs: 5, name: 'DUP4', intermediates: 0 }, + 0x84: { inputs: 5, outputs: 6, name: 'DUP5', intermediates: 0 }, + 0x85: { inputs: 6, outputs: 7, name: 'DUP6', intermediates: 0 }, + 0x86: { inputs: 7, outputs: 8, name: 'DUP7', intermediates: 0 }, + 0x87: { inputs: 8, outputs: 9, name: 'DUP8', intermediates: 0 }, + 0x88: { inputs: 9, outputs: 10, name: 'DUP9', intermediates: 0 }, + 0x89: { inputs: 10, outputs: 11, name: 'DUP10', intermediates: 0 }, + 0x8a: { inputs: 11, outputs: 12, name: 'DUP11', intermediates: 0 }, + 0x8b: { inputs: 12, outputs: 13, name: 'DUP12', intermediates: 0 }, + 0x8c: { inputs: 13, outputs: 14, name: 'DUP13', intermediates: 0 }, + 0x8d: { inputs: 14, outputs: 15, name: 'DUP14', intermediates: 0 }, + 0x8e: { inputs: 15, outputs: 16, name: 'DUP15', intermediates: 0 }, + 0x8f: { inputs: 16, outputs: 17, name: 'DUP16', intermediates: 0 }, + 0x90: { inputs: 2, outputs: 2, name: 'SWAP1', intermediates: 0 }, + 0x91: { inputs: 3, outputs: 3, name: 'SWAP2', intermediates: 0 }, + 0x92: { inputs: 4, outputs: 4, name: 'SWAP3', intermediates: 0 }, + 0x93: { inputs: 5, outputs: 5, name: 'SWAP4', intermediates: 0 }, + 0x94: { inputs: 6, outputs: 6, name: 'SWAP5', intermediates: 0 }, + 0x95: { inputs: 7, outputs: 7, name: 'SWAP6', intermediates: 0 }, + 0x96: { inputs: 8, outputs: 8, name: 'SWAP7', intermediates: 0 }, + 0x97: { inputs: 9, outputs: 9, name: 'SWAP8', intermediates: 0 }, + 0x98: { inputs: 10, outputs: 10, name: 'SWAP9', intermediates: 0 }, + 0x99: { inputs: 11, outputs: 11, name: 'SWAP10', intermediates: 0 }, + 0x9a: { inputs: 12, outputs: 12, name: 'SWAP11', intermediates: 0 }, + 0x9b: { inputs: 13, outputs: 13, name: 'SWAP12', intermediates: 0 }, + 0x9c: { inputs: 14, outputs: 14, name: 'SWAP13', intermediates: 0 }, + 0x9d: { inputs: 15, outputs: 15, name: 'SWAP14', intermediates: 0 }, + 0x9e: { inputs: 16, outputs: 16, name: 'SWAP15', intermediates: 0 }, + 0x9f: { inputs: 17, outputs: 17, name: 'SWAP16', intermediates: 0 }, + 0xa0: { inputs: 2, outputs: 0, name: 'LOG0', intermediates: 0 }, + 0xa1: { inputs: 3, outputs: 0, name: 'LOG1', intermediates: 0 }, + 0xa2: { inputs: 4, outputs: 0, name: 'LOG2', intermediates: 0 }, + 0xa3: { inputs: 5, outputs: 0, name: 'LOG3', intermediates: 0 }, + 0xa4: { inputs: 6, outputs: 0, name: 'LOG4', intermediates: 0 }, + 0xd0: { inputs: 1, outputs: 1, name: 'DATALOAD', intermediates: 0 }, + 0xd1: { inputs: 0, outputs: 1, name: 'DATALOADN', intermediates: 2 }, + 0xd2: { inputs: 0, outputs: 1, name: 'DATASIZE', intermediates: 0 }, + 0xd3: { inputs: 3, outputs: 0, name: 'DATACOPY', intermediates: 0 }, + 0xe0: { inputs: 0, outputs: 0, name: 'RJUMP', intermediates: 2 }, + 0xe1: { inputs: 1, outputs: 0, name: 'RJUMPI', intermediates: 2 }, + // NOTE: for RJUMPV the intermediate byte is set to 0, this has to do with the validation algorithm specifics + // This has to do with the dynamic intermediate size of RJUMPV, which depends upon the table size byte right after RJUMPV + 0xe2: { inputs: 1, outputs: 0, name: 'RJUMPV', intermediates: 0 }, + // CALLF special case for stack validation algorithm: the inputs and outputs MUST stay 0 + // (this is currently the case also in EVM) + 0xe3: { inputs: 0, outputs: 0, name: 'CALLF', intermediates: 2 }, + 0xe4: { inputs: 0, outputs: 0, name: 'RETF', intermediates: 0, terminating: true }, + 0xe5: { inputs: 0, outputs: 0, name: 'JUMPF', intermediates: 2, terminating: true }, + 0xe6: { inputs: 0, outputs: 1, name: 'DUPN', intermediates: 1 }, + 0xe7: { inputs: 0, outputs: 0, name: 'SWAPN', intermediates: 1 }, + 0xe8: { inputs: 0, outputs: 0, name: 'EXCHANGE', intermediates: 1 }, + 0xec: { inputs: 4, outputs: 1, name: 'EOFCREATE', intermediates: 1 }, + 0xee: { inputs: 2, outputs: 0, name: 'RETURNCONTRACT', intermediates: 1, terminating: true }, + 0xf3: { inputs: 2, outputs: 0, name: 'RETURN', intermediates: 0, terminating: true }, + 0xf7: { inputs: 1, outputs: 1, name: 'RETURNDATALOAD', intermediates: 0 }, + 0xf8: { inputs: 4, outputs: 1, name: 'EXTCALL', intermediates: 0 }, + 0xf9: { inputs: 3, outputs: 1, name: 'EXTDELEGATECALL', intermediates: 0 }, + 0xfb: { inputs: 3, outputs: 1, name: 'EXTSTATICCALL', intermediates: 0 }, + 0xfd: { inputs: 2, outputs: 0, name: 'REVERT', intermediates: 0, terminating: true }, + 0xfe: { inputs: 0, outputs: 0, name: 'INVALID', intermediates: 0, terminating: true }, +} diff --git a/packages/evm/src/eof/util.ts b/packages/evm/src/eof/util.ts new file mode 100644 index 0000000000..b28a5943e0 --- /dev/null +++ b/packages/evm/src/eof/util.ts @@ -0,0 +1,16 @@ +import { keccak256 } from 'ethereum-cryptography/keccak.js' +import { equalsBytes } from 'ethereum-cryptography/utils' + +import { FORMAT, MAGIC } from './constants.js' + +export const EOFBYTES = new Uint8Array([FORMAT, MAGIC]) +export const EOFHASH = keccak256(EOFBYTES) + +/** + * Returns `true` if `code` is an EOF contract, returns `false` otherwise + * @param code Code to test if it is EOF + */ +export function isEOF(code: Uint8Array): boolean { + const check = code.subarray(0, EOFBYTES.length) + return equalsBytes(EOFBYTES, check) +} diff --git a/packages/evm/src/eof/verify.ts b/packages/evm/src/eof/verify.ts new file mode 100644 index 0000000000..988f1adc04 --- /dev/null +++ b/packages/evm/src/eof/verify.ts @@ -0,0 +1,519 @@ +import { EOFError, validationError } from './errors.js' +import { stackDelta } from './stackDelta.js' + +import type { EVM } from '../evm.js' +import type { EOFContainer } from './container.js' + +/** + * Note for reviewers regarding these flags: these only reside inside `verify.ts` (this file) + * and `container.ts`. For `container.ts`, the only behavior which ever changes is in the `DeploymentCode` mode + * This `DeploymentCode` mode means that the subcontainer is flagged in such way that this container is launched + * in a "deployment" mode. This means, that the data section of the body is actually allowed to contain + * less data than is written in the header. However, once the target container (by the container in deployment) + * mode is returned by RETURNCONTRACT it should have at least the header amount of data. + * See also "data section lifecycle" + * Note: the subcontainers of a container can be marked "InitCode" or "DeploymentCode". + * InitCode cannot contain the instructions RETURN / STOP + * InitCode is the only container type which can contain RETURNCONTRACT + * A container can also be marked DeploymentCode, this is a subcontainer targeted by RETURNCONTRACT + * A container cannot be marked both InitCode and DeploymentCode + * This flag is thus to distinguish between subcontainers, and also thus also allows for data section sizes + * lower than the size in the header in case of `InitCode` + */ +export enum ContainerSectionType { + InitCode, // Targeted by EOFCreate + DeploymentCode, // Targeted by RETURNCONTRACT + RuntimeCode, // "Default" runtime code +} + +/** + * This method validates an EOF container deeply. It will validate the opcodes, validate the stack, and performs + * various checks such as checking for forbidden opcodes in certain modes, jumps to invalid places, etc. + * For more information, see "Code validation" of https://github.com/ipsilon/eof/blob/main/spec/eof.md + * This is a compilation of all the extra validation rules introduced by the various EIPs + * In particular, the stack validation EIP https://eips.ethereum.org/EIPS/eip-5450 is a big part here + * @param container EOFContainer to verify + * @param evm The EVM to run in (pulls opcodes from here) + * @param mode The validation mode to run in + * @returns Returns a Map which marks what ContainerSectionType each container is + * NOTE: this should likely not be a map, since a container section can only be of a single type, not multiple + */ +export function verifyCode( + container: EOFContainer, + evm: EVM, + mode: ContainerSectionType = ContainerSectionType.RuntimeCode, +) { + return validateOpcodes(container, evm, mode) +} + +// Helper methods to read Int16s / Uint16s +function readInt16(code: Uint8Array, start: number) { + return new DataView(code.buffer).getInt16(start) +} + +function readUint16(code: Uint8Array, start: number) { + return new DataView(code.buffer).getUint16(start) +} + +function validateOpcodes( + container: EOFContainer, + evm: EVM, + mode: ContainerSectionType = ContainerSectionType.RuntimeCode, +) { + // Track the intermediate bytes + const intermediateBytes = new Set() + // Track the jump locations (for forward jumps it is unknown at the first pass if the byte is intermediate) + const jumpLocations = new Set() + + // Track the type of the container targets + // Should at the end of the analysis have all the containers + const containerTypeMap = new Map() + + function addJump(location: number) { + if (intermediateBytes.has(location)) { + // When trying to JUMP into an intermediate byte: this is invalid + validationError(EOFError.InvalidRJUMP) + } + jumpLocations.add(location) + } + + function addIntermediate(location: number) { + if (jumpLocations.has(location)) { + // When trying to add an intermediate to a location already JUMPed to: this is invalid + validationError(EOFError.InvalidRJUMP) + } + intermediateBytes.add(location) + } + + // TODO (?) -> stackDelta currently only has active EOF opcodes, can use it directly (?) + // (so no need to generate the valid opcodeNumbers) + + // Validate each code section + const opcodes = evm.getActiveOpcodes() + + const opcodeNumbers = new Set() + + for (const [key] of opcodes) { + opcodeNumbers.add(key) + } + + // Add INVALID as valid + opcodeNumbers.add(0xfe) + + // Remove CODESIZE, CODECOPY, EXTCODESIZE, EXTCODECOPY, EXTCODEHASH, GAS + opcodeNumbers.delete(0x38) + opcodeNumbers.delete(0x39) + opcodeNumbers.delete(0x5a) + opcodeNumbers.delete(0x3b) + opcodeNumbers.delete(0x3c) + opcodeNumbers.delete(0x3f) + + // Remove CALLCODE and SELFDESTRUCT + opcodeNumbers.delete(0xf2) + opcodeNumbers.delete(0xff) + + // TODO omnibus https://github.com/ipsilon/eof/blob/main/spec/eof.md states + // JUMP / JUMPI / PC / CREATE / CREATE2 also banned + // This is not in the EIPs yet + // Add these opcodes here + + opcodeNumbers.delete(0x56) // JUMP + opcodeNumbers.delete(0x57) // JUMPI + + opcodeNumbers.delete(0x58) // PC + + opcodeNumbers.delete(0xf0) // CREATE + opcodeNumbers.delete(0xf5) // CREATE2 + + // Note: this name might be misleading since this is the list of opcodes which are OK as final opcodes in a code section + // TODO if using stackDelta for EOF it is possible to add a "termination" boolean for the opcode to mark it as terminating + // (so no need to generate this set here) + const terminatingOpcodes = new Set() + + terminatingOpcodes.add(0x00) // STOP + terminatingOpcodes.add(0xf3) // RETURN + terminatingOpcodes.add(0xfd) // REVERT + terminatingOpcodes.add(0xfe) // INVALID + + terminatingOpcodes.add(0xee) // RETURNCONTRACT + + terminatingOpcodes.add(0xe4) // RETF + terminatingOpcodes.add(0xe5) // JUMPF + + terminatingOpcodes.add(0xe0) // RJUMPing back into code section is OK + + for (const opcode of terminatingOpcodes) { + if (!opcodeNumbers.has(opcode)) { + terminatingOpcodes.delete(opcode) + } + } + + const validJumps = new Set() + + // Add all reachable code sections + const reachableSections: { [key: number]: Set } = {} + + let codeSection = -1 + for (const code of container.body.codeSections) { + codeSection++ + + reachableSections[codeSection] = new Set() + + const returningFunction = container.body.typeSections[codeSection].outputs === 0x80 + + // Tracking set of reachable opcodes + const reachableOpcodes = new Set() + reachableOpcodes.add(0) + + // Validate that each opcode is defined + let ptr = 0 + let lastOpcode: number = 0 // Note: code sections cannot be empty, so this number will always be set + + // Implement the EIP 5450 stack validation algorithm + const inputs = container.body.typeSections[codeSection].inputs + let maxStackHeight = inputs + // These arrays track the min/max stack height **before** executing the instruction + const stackHeightMin: number[] = [inputs] + const stackHeightMax: number[] = [inputs] + + // This loop will loop over the entire code section and will validate various rules + // For (most) validation rules, see https://github.com/ipsilon/eof/blob/main/spec/eof.md + // For all validation rules per opcode, find the corresponding EIP, the rules are there + while (ptr < code.length) { + // This set tracks the successor opcodes of this opcode (for stack purposes) + const successorSet = new Set() + + // ReachableOpcodes: this can likely be deleted after implementing the 5450 algorithm + if (!reachableOpcodes.has(ptr)) { + validationError(EOFError.UnreachableCode) + } + + if (stackHeightMin[ptr] === undefined || stackHeightMax[ptr] === undefined) { + // This error either means that the code is unreachable, + // or it is possible that it is only reachable via a backwards jump + validationError(EOFError.UnreachableCode) + } + + validJumps.add(ptr) + const opcode = code[ptr] + + const minStackCurrent = stackHeightMin[ptr] + const maxStackCurrent = stackHeightMax[ptr] + + const opcodeInputs = stackDelta[opcode].inputs + const opcodeOutputs = stackDelta[opcode].outputs + + if (minStackCurrent - opcodeInputs < 0) { + validationError(EOFError.StackUnderflow) + } + + const delta = opcodeOutputs - opcodeInputs + + let minStackNext = minStackCurrent + delta + let maxStackNext = maxStackCurrent + delta + + if (maxStackNext > 1023) { + // TODO verify if 1023 or 1024 is the right constant + validationError(EOFError.StackOverflow) + } + + if (returningFunction && opcode === 0xe4) { + validationError(EOFError.InvalidReturningSection) + } + + lastOpcode = opcode + if (!opcodeNumbers.has(opcode)) { + validationError(EOFError.InvalidOpcode) + } + + if (opcode === 0xe0 || opcode === 0xe1) { + // RJUMP / RJUMPI + const target = readInt16(code, ptr + 1) + ptr + 3 + if (target < 0 || target >= code.length) { + validationError(EOFError.InvalidRJUMP) + } + + successorSet.add(target) + + addJump(target) + reachableOpcodes.add(target) + + if (opcode === 0xe0) { + // For RJUMP check that the instruction after RJUMP is reachable + // If this is not the case, then it is not yet targeted by a forward jump + // And hence violates the spec + if (!reachableOpcodes.has(ptr + 3) && ptr + 3 < code.length) { + // Note: the final condition above ensures that the bytes after ptr are there + // This is an edge case, if the container ends with RJUMP (which is valid) + validationError(EOFError.UnreachableCode) + } + } + } else if (opcode === 0xe2) { + // RJUMPV + const tableSize = code[ptr + 1] + 1 + + if (tableSize === undefined) { + validationError(EOFError.OpcodeIntermediatesOOB) + } else if (tableSize === 0) { + validationError(EOFError.RJUMPVTableSize0) + } + + if (ptr + tableSize * 2 + 2 >= code.length) { + // Fall-through case + validationError(EOFError.OpcodeIntermediatesOOB) + } + + const newPc = ptr + 2 + tableSize * 2 + + for (let i = 0; i < tableSize; i++) { + const newPtr = ptr + 2 + i * 2 + // Add the table bytes to intermediates + addIntermediate(newPtr) + addIntermediate(newPtr + 1) + const target = readInt16(code, newPtr) + newPc + if (target < 0 || target >= code.length) { + validationError(EOFError.OpcodeIntermediatesOOB) + } + + successorSet.add(target) + + addJump(target) + reachableOpcodes.add(target) + } + + // Special case for RJUMPV: move ptr over the table (the immediate starting byte will be added later) + // In this special case, add the immediate starting byte + addIntermediate(ptr + 1) + ptr += 2 * tableSize + 1 + } else if (opcode === 0xe3 || opcode === 0xe5) { + // CALLF / JUMPF + const target = readUint16(code, ptr + 1) + reachableSections[codeSection].add(target) + if (target >= container.header.codeSizes.length) { + validationError(EOFError.InvalidCallTarget) + } + if (opcode === 0xe3) { + // CALLF + const targetOutputs = container.body.typeSections[target].outputs + const targetInputs = container.body.typeSections[target].inputs + if (targetOutputs === 0x80) { + // CALLF points to non-returning function which is not allowed + validationError(EOFError.InvalidCALLFReturning) + } + + if (minStackCurrent < targetInputs) { + validationError(EOFError.StackUnderflow) + } + + if ( + maxStackCurrent + container.body.typeSections[target].maxStackHeight - targetInputs > + 1024 + ) { + validationError(EOFError.StackOverflow) + } + + minStackNext += targetOutputs - targetInputs + maxStackNext += targetOutputs - targetInputs + } else { + // JUMPF + const currentOutputs = container.body.typeSections[codeSection].outputs + const targetOutputs = container.body.typeSections[target].outputs + const targetInputs = container.body.typeSections[target].inputs + const targetNonReturning = targetOutputs === 0x80 + + if (targetOutputs > currentOutputs && !targetNonReturning) { + // Spec rule: + // JUMPF operand must point to a code section with equal or fewer number of outputs as + // the section in which it resides, or to a section with 0x80 as outputs (non-returning) + validationError(EOFError.InvalidJUMPF) + } + + if (returningFunction && targetOutputs <= 0x7f) { + // Current function is returning, but target is not, cannot jump into this + validationError(EOFError.InvalidReturningSection) + } + + if (targetNonReturning) { + // Target is returning + if (minStackCurrent < targetInputs) { + validationError(EOFError.StackUnderflow) + } + } else { + // Target is returning + const expectedStack = currentOutputs + targetInputs - targetOutputs + if (!(minStackCurrent === maxStackCurrent && maxStackCurrent === expectedStack)) { + validationError(EOFError.InvalidStackHeight) + } + } + if ( + maxStackCurrent + container.body.typeSections[target].maxStackHeight - targetInputs > + 1024 + ) { + //console.log(maxStackCurrent, targetOutputs, targetInputs, targetNonReturning) + validationError(EOFError.StackOverflow) + } + } + } else if (opcode === 0xe4) { + // RETF + // Stack height must match the outputs of current code section + const outputs = container.body.typeSections[codeSection].outputs + if (!(minStackCurrent === maxStackCurrent && maxStackCurrent === outputs)) { + validationError(EOFError.InvalidStackHeight) + } + } else if (opcode === 0xe6) { + // DUPN + const toDup = code[ptr + 1] + if (toDup + 1 > minStackCurrent) { + validationError(EOFError.StackUnderflow) + } + } else if (opcode === 0xe7) { + // SWAPN + const toSwap = code[ptr + 1] + // TODO: EVMONEs test wants this to be `toSwap + 2`, but that seems to be incorrect + // Will keep `toSwap + 1` for now + if (toSwap + 1 > minStackCurrent) { + validationError(EOFError.StackUnderflow) + } + } else if (opcode === 0xe8) { + // EXCHANGE + const exchangeRaw = code[ptr + 1] + const n = (exchangeRaw >> 4) + 1 + const m = (exchangeRaw & 0x0f) + 1 + if (n + m + 1 > minStackCurrent) { + validationError(EOFError.StackUnderflow) + } + } else if (opcode === 0xec) { + // EOFCREATE + const target = code[ptr + 1] + if (target >= container.header.containerSizes.length) { + validationError(EOFError.InvalidEOFCreateTarget) + } + if (containerTypeMap.has(target)) { + if (containerTypeMap.get(target) !== ContainerSectionType.InitCode) { + validationError(EOFError.ContainerDoubleType) + } + } + containerTypeMap.set(target, ContainerSectionType.InitCode) + } else if (opcode === 0xee) { + // RETURNCONTRACT + + if (mode !== ContainerSectionType.InitCode) { + validationError(EOFError.ContainerTypeError) + } + + const target = code[ptr + 1] + if (target >= container.header.containerSizes.length) { + validationError(EOFError.InvalidRETURNContractTarget) + } + if (containerTypeMap.has(target)) { + if (containerTypeMap.get(target) !== ContainerSectionType.DeploymentCode) { + validationError(EOFError.ContainerDoubleType) + } + } + containerTypeMap.set(target, ContainerSectionType.DeploymentCode) + } else if (opcode === 0xd1) { + // DATALOADN + const dataTarget = readUint16(code, ptr + 1) + const endOfSlice = dataTarget + 32 + if (container.header.dataSize < endOfSlice) { + validationError(EOFError.DataLoadNOutOfBounds) + } + } else if (opcode === 0x00 || opcode === 0xf3) { + // STOP / RETURN + + if (mode === ContainerSectionType.InitCode) { + validationError(EOFError.ContainerTypeError) + } + } + + // Move ptr forward over any intermediates (if any) + // Note: for EOF this stackDelta is guaranteed to exist + const intermediates = stackDelta[opcode].intermediates + if (intermediates > 0) { + for (let i = 1; i <= intermediates; i++) { + addIntermediate(ptr + i) + } + ptr += intermediates // If the opcode has any intermediates, jump over it + } + if (ptr >= code.length) { + validationError(EOFError.OpcodeIntermediatesOOB) + } + ptr++ // Move to next opcode + if (stackDelta[opcode].terminating === undefined) { + // If the opcode is not terminating we can add the next opcode to the reachable opcodes + // It can be reached by sequential instruction flow + reachableOpcodes.add(ptr) + + // Add next opcode to successorSet + // NOTE: these are all opcodes except RJUMP + if (opcode !== 0xe0) { + successorSet.add(ptr) + } + } + + // TODO here validate stack / reachability and stack overflow check + + for (const successor of successorSet) { + if (successor < ptr) { + // Reached via backwards jump + if ( + stackHeightMin[successor] !== minStackNext || + stackHeightMax[successor] !== maxStackNext + ) { + validationError(EOFError.UnstableStack) + } + } + + if (stackHeightMax[successor] === undefined) { + // Target is seen for first time + stackHeightMin[successor] = minStackNext + stackHeightMax[successor] = maxStackNext + } else { + stackHeightMin[successor] = Math.min(stackHeightMin[successor], minStackNext) + stackHeightMax[successor] = Math.max(stackHeightMax[successor], maxStackNext) + } + } + + maxStackHeight = Math.max(maxStackNext, maxStackHeight) + } + + // Validate that the final opcode terminates + if (!terminatingOpcodes.has(lastOpcode)) { + validationError(EOFError.InvalidTerminator) + } + + if (container.body.typeSections[codeSection].maxStackHeight !== maxStackHeight) { + validationError(EOFError.MaxStackHeightViolation) + } + if (maxStackHeight > 1023) { + // TODO verify if 1023 or 1024 is the right constant + validationError(EOFError.MaxStackHeightLimit) + } + } + + // Verify that each code section can be reached from code section 0 + const sectionAccumulator = new Set() + sectionAccumulator.add(0) // 0 is always reachable + const toCheck = [0] + + while (toCheck.length > 0) { + const checkArray = reachableSections[toCheck.pop()!] + for (const checkSection of checkArray) { + if (!sectionAccumulator.has(checkSection)) { + // Only check the reachable section if + sectionAccumulator.add(checkSection) + toCheck.push(checkSection) + } + } + } + + if (sectionAccumulator.size !== container.header.codeSizes.length) { + validationError(EOFError.UnreachableCodeSections) + } + + if (containerTypeMap.size !== container.header.containerSizes.length) { + validationError(EOFError.UnreachableContainerSections) + } + + return containerTypeMap +} diff --git a/packages/evm/src/evm.ts b/packages/evm/src/evm.ts index 9cb4b6611c..15b0e8fc85 100644 --- a/packages/evm/src/evm.ts +++ b/packages/evm/src/evm.ts @@ -1,5 +1,4 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { SimpleStateManager } from '@ethereumjs/statemanager' +import { Hardfork } from '@ethereumjs/common' import { Account, Address, @@ -11,6 +10,7 @@ import { MAX_INTEGER, bigIntToBytes, bytesToUnprefixedHex, + createZeroAddress, equalsBytes, generateAddress, generateAddress2, @@ -18,18 +18,18 @@ import { zeros, } from '@ethereumjs/util' import debugDefault from 'debug' -import { initRustBN } from 'rustbn-wasm' -import { EOF, getEOFCode } from './eof.js' +import { FORMAT } from './eof/constants.js' +import { isEOF } from './eof/util.js' import { ERROR, EvmError } from './exceptions.js' import { Interpreter } from './interpreter.js' import { Journal } from './journal.js' import { EVMPerformanceLogger } from './logger.js' import { Message } from './message.js' import { getOpcodesForHF } from './opcodes/index.js' +import { paramsEVM } from './params.js' import { NobleBLS, getActivePrecompiles, getPrecompileName } from './precompiles/index.js' import { TransientStorage } from './transientStorage.js' -import { DefaultBlockchain } from './types.js' import type { InterpreterOpts } from './interpreter.js' import type { Timer } from './logger.js' @@ -42,6 +42,7 @@ import type { Blockchain, CustomOpcode, EVMBLSInterface, + EVMBN254Interface, EVMEvents, EVMInterface, EVMOpts, @@ -49,16 +50,13 @@ import type { EVMRunCallOpts, EVMRunCodeOpts, ExecResult, - bn128, } from './types.js' -import type { EVMStateManagerInterface } from '@ethereumjs/common' +import type { Common, StateManagerInterface } from '@ethereumjs/common' const debug = debugDefault('evm:evm') const debugGas = debugDefault('evm:gas') const debugPrecompiles = debugDefault('evm:precompiles') -let initializedRustBN: bn128 | undefined = undefined - /** * EVM is responsible for executing an EVM message fully * (including any nested calls and creates), processing the results @@ -97,7 +95,7 @@ export class EVM implements EVMInterface { public readonly common: Common public readonly events: AsyncEventEmitter - public stateManager: EVMStateManagerInterface + public stateManager: StateManagerInterface public blockchain: Blockchain public journal: Journal @@ -145,34 +143,7 @@ export class EVM implements EVMInterface { protected readonly _emit: (topic: string, data: any) => Promise - private _bn128: bn128 - - /** - * Use this async static constructor for the initialization - * of an EVM object - * - * @param createOpts The EVM options - * @returns A new EVM - */ - static async create(createOpts?: EVMOpts) { - const opts = createOpts ?? ({} as EVMOpts) - const bn128 = initializedRustBN ?? ((await initRustBN()) as bn128) - initializedRustBN = bn128 - - if (opts.common === undefined) { - opts.common = new Common({ chain: Chain.Mainnet }) - } - - if (opts.blockchain === undefined) { - opts.blockchain = new DefaultBlockchain() - } - - if (opts.stateManager === undefined) { - opts.stateManager = new SimpleStateManager() - } - - return new EVM(opts, bn128) - } + private _bn254: EVMBN254Interface /** * @@ -180,25 +151,35 @@ export class EVM implements EVMInterface { * * @deprecated The direct usage of this constructor is replaced since * non-finalized async initialization lead to side effects. Please - * use the async {@link EVM.create} constructor instead (same API). + * use the async {@link createEVM} constructor instead (same API). * * @param opts The EVM options * @param bn128 Initialized bn128 WASM object for precompile usage (internal) */ - protected constructor(opts: EVMOpts, bn128: bn128) { + constructor(opts: EVMOpts) { this.common = opts.common! this.blockchain = opts.blockchain! this.stateManager = opts.stateManager! - this._bn128 = bn128 + if (this.common.isActivatedEIP(6800)) { + const mandatory = ['checkChunkWitnessPresent'] + for (const m of mandatory) { + if (!(m in this.stateManager)) { + throw new Error( + `State manager used must implement ${m} if Verkle (EIP-6800) is activated`, + ) + } + } + } + this.events = new AsyncEventEmitter() this._optsCached = opts // Supported EIPs const supportedEIPs = [ - 1153, 1559, 2537, 2565, 2718, 2929, 2930, 2935, 3074, 3198, 3529, 3540, 3541, 3607, 3651, - 3670, 3855, 3860, 4399, 4895, 4788, 4844, 5133, 5656, 6110, 6780, 6800, 7002, 7251, 7516, - 7685, 7702, 7709, + 663, 1153, 1559, 2537, 2565, 2718, 2929, 2930, 2935, 3198, 3529, 3540, 3541, 3607, 3651, 3670, + 3855, 3860, 4200, 4399, 4750, 4788, 4844, 4895, 5133, 5450, 5656, 6110, 6206, 6780, 6800, + 7002, 7069, 7251, 7480, 7516, 7620, 7685, 7692, 7698, 7702, 7709, ] for (const eip of this.common.eips()) { @@ -209,10 +190,12 @@ export class EVM implements EVMInterface { if (!EVM.supportedHardforks.includes(this.common.hardfork() as Hardfork)) { throw new Error( - `Hardfork ${this.common.hardfork()} not set as supported in supportedHardforks` + `Hardfork ${this.common.hardfork()} not set as supported in supportedHardforks`, ) } + this.common.updateParams(opts.params ?? paramsEVM) + this.allowUnlimitedContractSize = opts.allowUnlimitedContractSize ?? false this.allowUnlimitedInitCodeSize = opts.allowUnlimitedInitCodeSize ?? false this._customOpcodes = opts.customOpcodes @@ -230,10 +213,12 @@ export class EVM implements EVMInterface { this.getActiveOpcodes() this._precompiles = getActivePrecompiles(this.common, this._customPrecompiles) + // Precompile crypto libraries if (this.common.isActivatedEIP(2537)) { this._bls = opts.bls ?? new NobleBLS() this._bls.init?.() } + this._bn254 = opts.bn254! this._emit = async (topic: string, data: any): Promise => { return new Promise((resolve) => this.events.emit(topic as keyof EVMEvents, data, resolve)) @@ -244,7 +229,7 @@ export class EVM implements EVMInterface { // Skip DEBUG calls unless 'ethjs' included in environmental DEBUG variables // Additional window check is to prevent vite browser bundling (and potentially other) to break this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } /** @@ -262,7 +247,7 @@ export class EVM implements EVMInterface { protected async _executeCall(message: MessageWithTo): Promise { let gasLimit = message.gasLimit - const fromAddress = message.authcallOrigin ?? message.caller + const fromAddress = message.caller if (this.common.isActivatedEIP(6800)) { const sendsValue = message.value !== BIGINT_0 @@ -312,19 +297,19 @@ export class EVM implements EVMInterface { if (!toAccount) { if (this.common.isActivatedEIP(6800)) { const absenceProofAccessGas = message.accessWitness!.touchAndChargeProofOfAbsence( - message.to + message.to, ) gasLimit -= absenceProofAccessGas if (gasLimit < BIGINT_0) { if (this.DEBUG) { debugGas( - `Proof of absense access charged(${absenceProofAccessGas}) caused OOG (-> ${gasLimit})` + `Proof of absence access charged(${absenceProofAccessGas}) caused OOG (-> ${gasLimit})`, ) } return { execResult: OOGResult(message.gasLimit) } } else { if (this.DEBUG) { - debugGas(`Proof of absense access used (${absenceProofAccessGas} gas (-> ${gasLimit}))`) + debugGas(`Proof of absence access used (${absenceProofAccessGas} gas (-> ${gasLimit}))`) } } } @@ -408,7 +393,7 @@ export class EVM implements EVMInterface { protected async _executeCreate(message: Message): Promise { let gasLimit = message.gasLimit - const fromAddress = message.authcallOrigin ?? message.caller + const fromAddress = message.caller if (this.common.isActivatedEIP(6800)) { if (message.depth === 0) { @@ -426,7 +411,7 @@ export class EVM implements EVMInterface { if (this.common.isActivatedEIP(3860)) { if ( - message.data.length > Number(this.common.param('vm', 'maxInitCodeSize')) && + message.data.length > Number(this.common.param('maxInitCodeSize')) && !this.allowUnlimitedInitCodeSize ) { return { @@ -440,8 +425,9 @@ export class EVM implements EVMInterface { } } + // TODO at some point, figure out why we swapped out data to code in the first place message.code = message.data - message.data = new Uint8Array(0) + message.data = message.eofCallData ?? new Uint8Array() message.to = await this._generateAddress(message) if (this.common.isActivatedEIP(6780)) { @@ -458,13 +444,13 @@ export class EVM implements EVMInterface { if (this.common.isActivatedEIP(6800)) { const contractCreateAccessGas = message.accessWitness!.touchAndChargeContractCreateInit( - message.to + message.to, ) gasLimit -= contractCreateAccessGas if (gasLimit < BIGINT_0) { if (this.DEBUG) { debugGas( - `ContractCreateInit charge(${contractCreateAccessGas}) caused OOG (-> ${gasLimit})` + `ContractCreateInit charge(${contractCreateAccessGas}) caused OOG (-> ${gasLimit})`, ) } return { execResult: OOGResult(message.gasLimit) } @@ -496,7 +482,7 @@ export class EVM implements EVMInterface { } await this.journal.putAccount(message.to, toAccount) - await this.stateManager.clearContractStorage(message.to) + await this.stateManager.clearStorage(message.to) const newContractEvent = { address: message.to, @@ -547,13 +533,13 @@ export class EVM implements EVMInterface { if (gasLimit < BIGINT_0) { if (this.DEBUG) { debug( - `ContractCreateComplete access gas (${createCompleteAccessGas}) caused OOG (-> ${gasLimit})` + `ContractCreateComplete access gas (${createCompleteAccessGas}) caused OOG (-> ${gasLimit})`, ) } return { execResult: OOGResult(message.gasLimit) } } else { debug( - `ContractCreateComplete access used (${createCompleteAccessGas}) gas (-> ${gasLimit})` + `ContractCreateComplete access used (${createCompleteAccessGas}) gas (-> ${gasLimit})`, ) } } @@ -574,15 +560,14 @@ export class EVM implements EVMInterface { } // run the message with the updated gas limit and add accessed gas used to the result - let result = await this.runInterpreter({ ...message, gasLimit } as Message) + let result = await this.runInterpreter({ ...message, gasLimit, isCreate: true } as Message) result.executionGasUsed += message.gasLimit - gasLimit // fee for size of the return value let totalGas = result.executionGasUsed let returnFee = BIGINT_0 if (!result.exceptionError && !this.common.isActivatedEIP(6800)) { - returnFee = - BigInt(result.returnValue.length) * BigInt(this.common.param('gasPrices', 'createData')) + returnFee = BigInt(result.returnValue.length) * BigInt(this.common.param('createDataGas')) totalGas = totalGas + returnFee if (this.DEBUG) { debugGas(`Add return value size fee (${returnFee} to gas used (-> ${totalGas}))`) @@ -594,7 +579,7 @@ export class EVM implements EVMInterface { if ( !result.exceptionError && this.common.gteHardfork(Hardfork.SpuriousDragon) && - result.returnValue.length > Number(this.common.param('vm', 'maxCodeSize')) + result.returnValue.length > Number(this.common.param('maxCodeSize')) ) { allowedCodeSize = false } @@ -602,36 +587,22 @@ export class EVM implements EVMInterface { // If enough gas and allowed code size let CodestoreOOG = false if (totalGas <= message.gasLimit && (this.allowUnlimitedContractSize || allowedCodeSize)) { - if (this.common.isActivatedEIP(3541) && result.returnValue[0] === EOF.FORMAT) { + if (this.common.isActivatedEIP(3541) && result.returnValue[0] === FORMAT) { if (!this.common.isActivatedEIP(3540)) { result = { ...result, ...INVALID_BYTECODE_RESULT(message.gasLimit) } - } - // Begin EOF1 contract code checks - // EIP-3540 EOF1 header check - const eof1CodeAnalysisResults = EOF.codeAnalysis(result.returnValue) - if (typeof eof1CodeAnalysisResults?.code === 'undefined') { - result = { - ...result, - ...INVALID_EOF_RESULT(message.gasLimit), - } - } else if (this.common.isActivatedEIP(3670)) { - // EIP-3670 EOF1 opcode check - const codeStart = eof1CodeAnalysisResults.data > 0 ? 10 : 7 - // The start of the code section of an EOF1 compliant contract will either be - // index 7 (if no data section is present) or index 10 (if a data section is present) - // in the bytecode of the contract - if ( - !EOF.validOpcodes( - result.returnValue.subarray(codeStart, codeStart + eof1CodeAnalysisResults.code) - ) - ) { - result = { - ...result, - ...INVALID_EOF_RESULT(message.gasLimit), - } - } else { - result.executionGasUsed = totalGas - } + } else if ( + // TODO check if this is correct + // Also likely cleanup this eofCallData stuff + /*(message.depth > 0 && message.eofCallData === undefined) || + (message.depth === 0 && !isEOF(message.code))*/ + !isEOF(message.code) + ) { + // TODO the message.eof was flagged for this to work for this first + // Running into Legacy mode: unable to deploy EOF contract + result = { ...result, ...INVALID_BYTECODE_RESULT(message.gasLimit) } + } else { + // 3541 is active and current runtime mode is EOF + result.executionGasUsed = totalGas } } else { result.executionGasUsed = totalGas @@ -671,19 +642,19 @@ export class EVM implements EVMInterface { gasLimit = message.gasLimit - result.executionGasUsed if (!result.exceptionError && this.common.isActivatedEIP(6800)) { const createCompleteAccessGas = message.accessWitness!.touchAndChargeContractCreateCompleted( - message.to + message.to, ) gasLimit -= createCompleteAccessGas if (gasLimit < BIGINT_0) { if (this.DEBUG) { debug( - `ContractCreateComplete access gas (${createCompleteAccessGas}) caused OOG (-> ${gasLimit})` + `ContractCreateComplete access gas (${createCompleteAccessGas}) caused OOG (-> ${gasLimit})`, ) } result = { ...result, ...OOGResult(message.gasLimit) } } else { debug( - `ContractCreateComplete access used (${createCompleteAccessGas}) gas (-> ${gasLimit})` + `ContractCreateComplete access used (${createCompleteAccessGas}) gas (-> ${gasLimit})`, ) result.executionGasUsed += createCompleteAccessGas } @@ -701,13 +672,13 @@ export class EVM implements EVMInterface { message.accessWitness!.touchCodeChunksRangeOnWriteAndChargeGas( message.to, 0, - result.returnValue.length - 1 + result.returnValue.length - 1, ) gasLimit -= byteCodeWriteAccessfee if (gasLimit < BIGINT_0) { if (this.DEBUG) { debug( - `byteCodeWrite access gas (${byteCodeWriteAccessfee}) caused OOG (-> ${gasLimit})` + `byteCodeWrite access gas (${byteCodeWriteAccessfee}) caused OOG (-> ${gasLimit})`, ) } result = { ...result, ...OOGResult(message.gasLimit) } @@ -717,7 +688,7 @@ export class EVM implements EVMInterface { } } - await this.stateManager.putContractCode(message.to, result.returnValue) + await this.stateManager.putCode(message.to, result.returnValue) if (this.DEBUG) { debug(`Code saved on new contract creation`) } @@ -744,28 +715,28 @@ export class EVM implements EVMInterface { */ protected async runInterpreter( message: Message, - opts: InterpreterOpts = {} + opts: InterpreterOpts = {}, ): Promise { - let contract = await this.stateManager.getAccount(message.to ?? Address.zero()) + let contract = await this.stateManager.getAccount(message.to ?? createZeroAddress()) if (!contract) { contract = new Account() } const env = { - address: message.to ?? Address.zero(), - caller: message.caller ?? Address.zero(), + address: message.to ?? createZeroAddress(), + caller: message.caller ?? createZeroAddress(), callData: message.data ?? Uint8Array.from([0]), callValue: message.value ?? BIGINT_0, code: message.code as Uint8Array, isStatic: message.isStatic ?? false, + isCreate: message.isCreate ?? false, depth: message.depth ?? 0, gasPrice: this._tx!.gasPrice, - origin: this._tx!.origin ?? message.caller ?? Address.zero(), + origin: this._tx!.origin ?? message.caller ?? createZeroAddress(), block: this._block ?? defaultBlock(), contract, codeAddress: message.codeAddress, gasRefund: message.gasRefund, - containerCode: message.containerCode, chargeCodeAccesses: message.chargeCodeAccesses, blobVersionedHashes: message.blobVersionedHashes ?? [], accessWitness: message.accessWitness, @@ -780,7 +751,7 @@ export class EVM implements EVMInterface { message.gasLimit, this.journal, this.performanceLogger, - this._optsCached.profiler + this._optsCached.profiler, ) if (message.selfdestruct) { interpreter._result.selfdestruct = message.selfdestruct @@ -844,9 +815,9 @@ export class EVM implements EVMInterface { this._block = opts.block ?? defaultBlock() this._tx = { gasPrice: opts.gasPrice ?? BIGINT_0, - origin: opts.origin ?? opts.caller ?? Address.zero(), + origin: opts.origin ?? opts.caller ?? createZeroAddress(), } - const caller = opts.caller ?? Address.zero() + const caller = opts.caller ?? createZeroAddress() const value = opts.value ?? BIGINT_0 if (opts.skipBalance === true) { @@ -914,7 +885,7 @@ export class EVM implements EVMInterface { debug( `New message caller=${caller} gasLimit=${gasLimit} to=${ to?.toString() ?? 'none' - } value=${value} delegatecall=${delegatecall ? 'yes' : 'no'}` + } value=${value} delegatecall=${delegatecall ? 'yes' : 'no'}`, ) } if (message.to) { @@ -933,7 +904,7 @@ export class EVM implements EVMInterface { debug( `Received message execResult: [ gasUsed=${executionGasUsed} exceptionError=${ exceptionError ? `'${exceptionError.error}'` : 'none' - } returnValue=${short(returnValue)} gasRefund=${result.execResult.gasRefund ?? 0} ]` + } returnValue=${short(returnValue)} gasRefund=${result.execResult.gasRefund ?? 0} ]`, ) } const err = result.execResult.exceptionError @@ -982,14 +953,14 @@ export class EVM implements EVMInterface { this._tx = { gasPrice: opts.gasPrice ?? BIGINT_0, - origin: opts.origin ?? opts.caller ?? Address.zero(), + origin: opts.origin ?? opts.caller ?? createZeroAddress(), } const message = new Message({ code: opts.code, data: opts.data, gasLimit: opts.gasLimit ?? BigInt(0xffffff), - to: opts.to ?? Address.zero(), + to: opts.to ?? createZeroAddress(), caller: opts.caller, value: opts.value, depth: opts.depth, @@ -1015,7 +986,7 @@ export class EVM implements EVMInterface { protected runPrecompile( code: PrecompileFunc, data: Uint8Array, - gasLimit: bigint + gasLimit: bigint, ): Promise | ExecResult { if (typeof code !== 'function') { throw new Error('Invalid precompile') @@ -1040,14 +1011,9 @@ export class EVM implements EVMInterface { message.code = precompile message.isCompiled = true } else { - message.containerCode = await this.stateManager.getContractCode(message.codeAddress) + message.code = await this.stateManager.getCode(message.codeAddress) message.isCompiled = false message.chargeCodeAccesses = true - if (this.common.isActivatedEIP(3540)) { - message.code = getEOFCode(message.containerCode) - } else { - message.code = message.containerCode - } } } } @@ -1072,7 +1038,7 @@ export class EVM implements EVMInterface { if (account.balance < BIGINT_0) { throw new EvmError(ERROR.INSUFFICIENT_BALANCE) } - const result = this.journal.putAccount(message.authcallOrigin ?? message.caller, account) + const result = this.journal.putAccount(message.caller, account) if (this.DEBUG) { debug(`Reduced sender (${message.caller}) balance (-> ${account.balance})`) } @@ -1119,7 +1085,7 @@ export class EVM implements EVMInterface { stateManager: this.stateManager.shallowCopy(), } ;(opts.stateManager as any).common = common - return new EVM(opts, this._bn128) + return new EVM(opts) } public getPerformanceLogs() { @@ -1183,8 +1149,7 @@ export function defaultBlock(): Block { return { header: { number: BIGINT_0, - cliqueSigner: () => Address.zero(), - coinbase: Address.zero(), + coinbase: createZeroAddress(), timestamp: BIGINT_0, difficulty: BIGINT_0, prevRandao: zeros(32), diff --git a/packages/evm/src/exceptions.ts b/packages/evm/src/exceptions.ts index 72ede8f493..9063f818ed 100644 --- a/packages/evm/src/exceptions.ts +++ b/packages/evm/src/exceptions.ts @@ -19,11 +19,9 @@ export enum ERROR { INVALID_RETURNSUB = 'invalid RETURNSUB', INVALID_JUMPSUB = 'invalid JUMPSUB', INVALID_BYTECODE_RESULT = 'invalid bytecode deployed', - INVALID_EOF_FORMAT = 'invalid EOF format', INITCODE_SIZE_VIOLATION = 'initcode exceeds max initcode size', INVALID_INPUT_LENGTH = 'invalid input length', - - AUTHCALL_UNSET = 'attempting to AUTHCALL without AUTH set', + INVALID_EOF_FORMAT = 'invalid EOF format', // BLS errors BLS_12_381_INVALID_INPUT_LENGTH = 'invalid input length', @@ -31,6 +29,9 @@ export enum ERROR { BLS_12_381_INPUT_EMPTY = 'input is empty', BLS_12_381_FP_NOT_IN_FIELD = 'fp point not in field', + // BN254 errors + BN254_FP_NOT_IN_FIELD = 'fp point not in field', + // Point Evaluation Errors INVALID_COMMITMENT = 'kzg commitment does not match versioned hash', INVALID_INPUTS = 'kzg inputs invalid', diff --git a/packages/evm/src/index.ts b/packages/evm/src/index.ts index 3c2303d301..0733f6c8ba 100644 --- a/packages/evm/src/index.ts +++ b/packages/evm/src/index.ts @@ -1,4 +1,4 @@ -import { EOF } from './eof.js' +import { EOFContainer, validateEOF } from './eof/container.js' import { EVM } from './evm.js' import { ERROR as EVMErrorMessage, EvmError } from './exceptions.js' import { Message } from './message.js' @@ -6,12 +6,16 @@ import { getOpcodesForHF } from './opcodes/index.js' import { MCLBLS, NobleBLS, + NobleBN254, type PrecompileInput, + RustBN254, getActivePrecompiles, } from './precompiles/index.js' import type { InterpreterStep } from './interpreter.js' import type { + EVMBLSInterface, + EVMBN254Interface, EVMInterface, EVMOpts, EVMResult, @@ -19,12 +23,12 @@ import type { EVMRunCodeOpts, ExecResult, Log, - bn128, } from './types.js' export * from './logger.js' export type { - bn128, + EVMBLSInterface, + EVMBN254Interface, EVMInterface, EVMOpts, EVMResult, @@ -37,7 +41,7 @@ export type { } export { - EOF, + EOFContainer, EVM, EvmError, EVMErrorMessage, @@ -46,4 +50,10 @@ export { MCLBLS, Message, NobleBLS, + NobleBN254, + RustBN254, + validateEOF, } + +export * from './constructors.js' +export * from './params.js' diff --git a/packages/evm/src/interpreter.ts b/packages/evm/src/interpreter.ts index ec46ecb3e1..6b8a3362c4 100644 --- a/packages/evm/src/interpreter.ts +++ b/packages/evm/src/interpreter.ts @@ -3,6 +3,7 @@ import { Account, BIGINT_0, BIGINT_1, + BIGINT_2, MAX_UINT64, bigIntToHex, bytesToBigInt, @@ -11,7 +12,10 @@ import { } from '@ethereumjs/util' import debugDefault from 'debug' -import { EOF } from './eof.js' +import { FORMAT, MAGIC, VERSION } from './eof/constants.js' +import { EOFContainerMode, validateEOF } from './eof/container.js' +import { setupEOF } from './eof/setup.js' +import { ContainerSectionType } from './eof/verify.js' import { ERROR, EvmError } from './exceptions.js' import { type EVMPerformanceLogger, type Timer } from './logger.js' import { Memory } from './memory.js' @@ -22,8 +26,8 @@ import { Stack } from './stack.js' import type { EVM } from './evm.js' import type { Journal } from './journal.js' import type { AsyncOpHandler, Opcode, OpcodeMapEntry } from './opcodes/index.js' -import type { Block, Blockchain, EVMProfilerOpts, EVMResult, Log } from './types.js' -import type { AccessWitnessInterface, Common, EVMStateManagerInterface } from '@ethereumjs/common' +import type { Block, Blockchain, EOFEnv, EVMProfilerOpts, EVMResult, Log } from './types.js' +import type { AccessWitnessInterface, Common, StateManagerInterface } from '@ethereumjs/common' import type { Address, PrefixedHexString } from '@ethereumjs/util' const debugGas = debugDefault('evm:gas') @@ -56,6 +60,7 @@ export interface Env { callValue: bigint code: Uint8Array isStatic: boolean + isCreate: boolean depth: number gasPrice: bigint origin: Address @@ -63,7 +68,7 @@ export interface Env { contract: Account codeAddress: Address /* Different than address for DELEGATECALL and CALLCODE */ gasRefund: bigint /* Current value (at begin of the frame) of the gas refund */ - containerCode?: Uint8Array /** Full container code for EOF1 contracts */ + eof?: EOFEnv /* Optional EOF environment in case of EOF execution */ blobVersionedHashes: Uint8Array[] /** Versioned hashes for blob transactions */ createdAddresses?: Set accessWitness?: AccessWitnessInterface @@ -81,14 +86,13 @@ export interface RunState { shouldDoJumpAnalysis: boolean validJumps: Uint8Array // array of values where validJumps[index] has value 0 (default), 1 (jumpdest), 2 (beginsub) cachedPushes: { [pc: number]: bigint } - stateManager: EVMStateManagerInterface + stateManager: StateManagerInterface blockchain: Blockchain env: Env messageGasLimit?: bigint // Cache value from `gas.ts` to save gas limit for a message call interpreter: Interpreter gasRefund: bigint // Tracks the current refund gasLeft: bigint // Current gas left - auth?: Address /** EIP-3074 AUTH parameter */ returnBytes: Uint8Array /* Current bytes in the return Uint8Array. Cleared each time a CALL/CREATE is made in the current frame. */ } @@ -100,7 +104,7 @@ export interface InterpreterResult { export interface InterpreterStep { gasLeft: bigint gasRefund: bigint - stateManager: EVMStateManagerInterface + stateManager: StateManagerInterface stack: bigint[] pc: number depth: number @@ -123,7 +127,7 @@ export interface InterpreterStep { export class Interpreter { protected _vm: any protected _runState: RunState - protected _stateManager: EVMStateManagerInterface + protected _stateManager: StateManagerInterface protected common: Common public _evm: EVM public journal: Journal @@ -142,17 +146,26 @@ export class Interpreter { // TODO remove gasLeft as constructor argument constructor( evm: EVM, - stateManager: EVMStateManagerInterface, + stateManager: StateManagerInterface, blockchain: Blockchain, env: Env, gasLeft: bigint, journal: Journal, performanceLogs: EVMPerformanceLogger, - profilerOpts?: EVMProfilerOpts + profilerOpts?: EVMProfilerOpts, ) { this._evm = evm this._stateManager = stateManager this.common = this._evm.common + + if ( + this.common.consensusType() === 'poa' && + this._evm['_optsCached'].cliqueSigner === undefined + ) + throw new Error( + 'Must include cliqueSigner function if clique/poa is being used for consensus type', + ) + this._runState = { programCounter: 0, opCode: 0xfe, // INVALID opcode @@ -184,40 +197,54 @@ export class Interpreter { } async run(code: Uint8Array, opts: InterpreterOpts = {}): Promise { - if (!this.common.isActivatedEIP(3540) || code[0] !== EOF.FORMAT) { + if (!this.common.isActivatedEIP(3540) || code[0] !== FORMAT) { // EIP-3540 isn't active and first byte is not 0xEF - treat as legacy bytecode this._runState.code = code } else if (this.common.isActivatedEIP(3540)) { - if (code[1] !== EOF.MAGIC) { + if (code[1] !== MAGIC) { // Bytecode contains invalid EOF magic byte return { runState: this._runState, exceptionError: new EvmError(ERROR.INVALID_BYTECODE_RESULT), } } - if (code[2] !== EOF.VERSION) { + if (code[2] !== VERSION) { // Bytecode contains invalid EOF version number return { runState: this._runState, exceptionError: new EvmError(ERROR.INVALID_EOF_FORMAT), } } - // Code is EOF1 format - const codeSections = EOF.codeAnalysis(code) - if (!codeSections) { - // Code is invalid EOF1 format if `codeSections` is falsy + this._runState.code = code + + const isTxCreate = this._env.isCreate && this._env.depth === 0 + const eofMode = isTxCreate ? EOFContainerMode.TxInitmode : EOFContainerMode.Default + + try { + setupEOF(this._runState, eofMode) + } catch (e) { return { runState: this._runState, - exceptionError: new EvmError(ERROR.INVALID_EOF_FORMAT), + exceptionError: new EvmError(ERROR.INVALID_EOF_FORMAT), // TODO: verify if all gas should be consumed } } - if (codeSections.data) { - // Set code to EOF container code section which starts at byte position 10 if data section is present - this._runState.code = code.subarray(10, 10 + codeSections!.code) - } else { - // Set code to EOF container code section which starts at byte position 7 if no data section is present - this._runState.code = code.subarray(7, 7 + codeSections!.code) + if (isTxCreate) { + // Tx tries to deploy container + try { + validateEOF( + this._runState.code, + this._evm, + ContainerSectionType.InitCode, + EOFContainerMode.TxInitmode, + ) + } catch (e) { + // Trying to deploy an invalid EOF container + return { + runState: this._runState, + exceptionError: new EvmError(ERROR.INVALID_EOF_FORMAT), // TODO: verify if all gas should be consumed + } + } } } this._runState.programCounter = opts.pc ?? this._runState.programCounter @@ -247,7 +274,9 @@ export class Interpreter { opCode = this._runState.code[programCounter] // Only run the jump destination analysis if `code` actually contains a JUMP/JUMPI/JUMPSUB opcode if (opCode === 0x56 || opCode === 0x57 || opCode === 0x5e) { - const { jumps, pushes, opcodesCached } = this._getValidJumpDests(this._runState.code) + const { jumps, pushes, opcodesCached } = this._getValidJumpDestinations( + this._runState.code, + ) this._runState.validJumps = jumps this._runState.cachedPushes = pushes this._runState.shouldDoJumpAnalysis = false @@ -260,7 +289,7 @@ export class Interpreter { } // if its an invalid opcode with verkle activated, then check if its because of a missing code - // chunk in the witness, and throw appropriate error to distinguish from an actual invalid opcod + // chunk in the witness, and throw appropriate error to distinguish from an actual invalid opcode if ( opCode === 0xfe && this.common.isActivatedEIP(6800) && @@ -341,7 +370,7 @@ export class Interpreter { this._runState.env.accessWitness!.touchCodeChunksRangeOnReadAndChargeGas( contract, this._runState.programCounter, - this._runState.programCounter + this._runState.programCounter, ) gas += statelessGas debugGas(`codechunk accessed statelessGas=${statelessGas} (-> ${gas})`) @@ -379,7 +408,7 @@ export class Interpreter { Number(gas), 'opcodes', opInfo.fee, - Number(gas) - opInfo.fee + Number(gas) - opInfo.fee, ) } } @@ -466,7 +495,7 @@ export class Interpreter { } // Returns all valid jump and jumpsub destinations. - _getValidJumpDests(code: Uint8Array) { + _getValidJumpDestinations(code: Uint8Array) { const jumps = new Uint8Array(code.length).fill(0) const pushes: { [pc: number]: bigint } = {} @@ -500,13 +529,13 @@ export class Interpreter { useGas(amount: bigint, context?: string | Opcode): void { this._runState.gasLeft -= amount if (this._evm.DEBUG) { - let tstr = '' + let tempString = '' if (typeof context === 'string') { - tstr = context + ': ' + tempString = context + ': ' } else if (context !== undefined) { - tstr = `${context.name} fee: ` + tempString = `${context.name} fee: ` } - debugGas(`${tstr}used ${amount} gas (-> ${this._runState.gasLeft})`) + debugGas(`${tempString}used ${amount} gas (-> ${this._runState.gasLeft})`) } if (this._runState.gasLeft < BIGINT_0) { this._runState.gasLeft = BIGINT_0 @@ -524,7 +553,7 @@ export class Interpreter { debugGas( `${typeof context === 'string' ? context + ': ' : ''}refund ${amount} gas (-> ${ this._runState.gasRefund - })` + })`, ) } this._runState.gasRefund += amount @@ -540,7 +569,7 @@ export class Interpreter { debugGas( `${typeof context === 'string' ? context + ': ' : ''}sub gas refund ${amount} (-> ${ this._runState.gasRefund - })` + })`, ) } this._runState.gasRefund -= amount @@ -582,7 +611,7 @@ export class Interpreter { * Store 256-bit a value in memory to persistent storage. */ async storageStore(key: Uint8Array, value: Uint8Array): Promise { - await this._stateManager.putContractStorage(this._env.address, key, value) + await this._stateManager.putStorage(this._env.address, key, value) const account = await this._stateManager.getAccount(this._env.address) if (!account) { throw new Error('could not read account while persisting memory') @@ -599,7 +628,7 @@ export class Interpreter { if (original) { return this._stateManager.originalStorageCache.get(this._env.address, key) } else { - return this._stateManager.getContractStorage(this._env.address, key) + return this._stateManager.getStorage(this._env.address, key) } } @@ -691,14 +720,14 @@ export class Interpreter { * Returns the size of code running in current environment. */ getCodeSize(): bigint { - return BigInt(this._env.containerCode ? this._env.containerCode.length : this._env.code.length) + return BigInt(this._env.code.length) } /** * Returns the code running in current environment. */ getCode(): Uint8Array { - return this._env.containerCode ?? this._env.code + return this._env.code } /** @@ -762,7 +791,7 @@ export class Interpreter { getBlockCoinbase(): bigint { let coinbase: Address if (this.common.consensusAlgorithm() === ConsensusAlgorithm.Clique) { - coinbase = this._env.block.header.cliqueSigner() + coinbase = this._evm['_optsCached'].cliqueSigner!(this._env.block.header) } else { coinbase = this._env.block.header.coinbase } @@ -848,31 +877,6 @@ export class Interpreter { return this._baseCall(msg) } - /** - * Sends a message with arbitrary data to a given address path. - */ - async authcall( - gasLimit: bigint, - address: Address, - value: bigint, - data: Uint8Array - ): Promise { - const msg = new Message({ - caller: this._runState.auth, - gasLimit, - to: address, - value, - data, - isStatic: this._env.isStatic, - depth: this._env.depth + 1, - authcallOrigin: this._env.address, - blobVersionedHashes: this._env.blobVersionedHashes, - accessWitness: this._env.accessWitness, - }) - - return this._baseCall(msg) - } - /** * Message-call into this account with an alternative account's code. */ @@ -880,7 +884,7 @@ export class Interpreter { gasLimit: bigint, address: Address, value: bigint, - data: Uint8Array + data: Uint8Array, ): Promise { const msg = new Message({ caller: this._env.address, @@ -907,7 +911,7 @@ export class Interpreter { gasLimit: bigint, address: Address, value: bigint, - data: Uint8Array + data: Uint8Array, ): Promise { const msg = new Message({ caller: this._env.address, @@ -932,7 +936,7 @@ export class Interpreter { gasLimit: bigint, address: Address, value: bigint, - data: Uint8Array + data: Uint8Array, ): Promise { const msg = new Message({ caller: this._env.caller, @@ -969,7 +973,7 @@ export class Interpreter { // Check if account has enough ether and max depth not exceeded if ( - this._env.depth >= Number(this.common.param('vm', 'stackLimit')) || + this._env.depth >= Number(this.common.param('stackLimit')) || (msg.delegatecall !== true && this._env.contract.balance < msg.value) ) { return BIGINT_0 @@ -1021,8 +1025,9 @@ export class Interpreter { async create( gasLimit: bigint, value: bigint, - data: Uint8Array, - salt?: Uint8Array + codeToRun: Uint8Array, + salt?: Uint8Array, + eofCallData?: Uint8Array, ): Promise { const selfdestruct = new Set(this._result.selfdestruct) const caller = this._env.address @@ -1033,7 +1038,7 @@ export class Interpreter { // Check if account has enough ether and max depth not exceeded if ( - this._env.depth >= Number(this.common.param('vm', 'stackLimit')) || + this._env.depth >= Number(this.common.param('stackLimit')) || this._env.contract.balance < value ) { return BIGINT_0 @@ -1049,7 +1054,7 @@ export class Interpreter { if (this.common.isActivatedEIP(3860)) { if ( - data.length > Number(this.common.param('vm', 'maxInitCodeSize')) && + codeToRun.length > Number(this.common.param('maxInitCodeSize')) && this._evm.allowUnlimitedInitCodeSize === false ) { return BIGINT_0 @@ -1060,7 +1065,8 @@ export class Interpreter { caller, gasLimit, value, - data, + data: codeToRun, + eofCallData, salt, depth, selfdestruct, @@ -1118,7 +1124,7 @@ export class Interpreter { } } - return this._getReturnCode(results) + return this._getReturnCode(results, true) } /** @@ -1129,11 +1135,25 @@ export class Interpreter { gasLimit: bigint, value: bigint, data: Uint8Array, - salt: Uint8Array + salt: Uint8Array, ): Promise { return this.create(gasLimit, value, data, salt) } + /** + * Creates a new contract with a given value. Generates + * a deterministic address via EOFCREATE rules. + */ + async eofcreate( + gasLimit: bigint, + value: bigint, + containerData: Uint8Array, + salt: Uint8Array, + callData: Uint8Array, + ): Promise { + return this.create(gasLimit, value, containerData, salt, callData) + } + /** * Mark account for later deletion and give the remaining balance to the * specified beneficiary address. This will cause a trap and the @@ -1147,7 +1167,7 @@ export class Interpreter { async _selfDestruct(toAddress: Address): Promise { // only add to refund if this is the first selfdestruct for the address if (!this._result.selfdestruct.has(bytesToHex(this._env.address.bytes))) { - this.refundGas(this.common.param('gasPrices', 'selfdestructRefund')) + this.refundGas(this.common.param('selfdestructRefundGas')) } this._result.selfdestruct.add(bytesToHex(this._env.address.bytes)) @@ -1205,11 +1225,25 @@ export class Interpreter { this._result.logs.push(log) } - private _getReturnCode(results: EVMResult) { - if (results.execResult.exceptionError) { - return BIGINT_0 + private _getReturnCode(results: EVMResult, isEOFCreate = false) { + if (this._runState.env.eof === undefined || isEOFCreate) { + if (results.execResult.exceptionError) { + return BIGINT_0 + } else { + return BIGINT_1 + } } else { - return BIGINT_1 + // EOF mode, call was either EXTCALL / EXTDELEGATECALL / EXTSTATICCALL + if (results.execResult.exceptionError !== undefined) { + if (results.execResult.exceptionError.error === ERROR.REVERT) { + // Revert + return BIGINT_1 + } else { + // Failure + return BIGINT_2 + } + } + return BIGINT_0 } } } diff --git a/packages/evm/src/journal.ts b/packages/evm/src/journal.ts index 60dad80bd3..c07884f620 100644 --- a/packages/evm/src/journal.ts +++ b/packages/evm/src/journal.ts @@ -10,7 +10,7 @@ import { import debugDefault from 'debug' import { hexToBytes } from 'ethereum-cryptography/utils' -import type { Common, EVMStateManagerInterface } from '@ethereumjs/common' +import type { Common, StateManagerInterface } from '@ethereumjs/common' import type { Account, PrefixedHexString } from '@ethereumjs/util' import type { Debugger } from 'debug' @@ -32,7 +32,7 @@ type JournalDiffItem = [Set, Map>, type JournalHeight = number export class Journal { - private stateManager: EVMStateManagerInterface + private stateManager: StateManagerInterface private common: Common private DEBUG: boolean private _debug: Debugger @@ -47,11 +47,11 @@ export class Journal { public accessList?: Map> public preimages?: Map - constructor(stateManager: EVMStateManagerInterface, common: Common) { + constructor(stateManager: StateManagerInterface, common: Common) { // Skip DEBUG calls unless 'ethjs' included in environmental DEBUG variables // Additional window check is to prevent vite browser bundling (and potentially other) to break this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false this._debug = debugDefault('statemanager:statemanager') @@ -100,7 +100,7 @@ export class Journal { const bytesAddress = unprefixedHexToBytes(address) if (this.stateManager.getAppliedKey === undefined) { throw new Error( - 'touchAccount: stateManager.getAppliedKey can not be undefined if preimage storing is enabled' + 'touchAccount: stateManager.getAppliedKey can not be undefined if preimage storing is enabled', ) } const hashedKey = this.stateManager.getAppliedKey(bytesAddress) diff --git a/packages/evm/src/logger.ts b/packages/evm/src/logger.ts index cd81bba043..13f383e009 100644 --- a/packages/evm/src/logger.ts +++ b/packages/evm/src/logger.ts @@ -146,7 +146,7 @@ export class EVMPerformanceLogger { gasUsed: number, targetTimer: 'precompiles' | 'opcodes' = 'opcodes', staticGas?: number, - dynamicGas?: number + dynamicGas?: number, ) { if (this.currentTimer === undefined || this.currentTimer !== timer) { throw new Error('Cannot stop timer: another timer is already running') diff --git a/packages/evm/src/message.ts b/packages/evm/src/message.ts index 85cefdf583..c22d74ce3f 100644 --- a/packages/evm/src/message.ts +++ b/packages/evm/src/message.ts @@ -1,12 +1,13 @@ -import { Address, BIGINT_0 } from '@ethereumjs/util' +import { BIGINT_0, createZeroAddress } from '@ethereumjs/util' import type { PrecompileFunc } from './precompiles/index.js' +import type { EOFEnv } from './types.js' import type { AccessWitnessInterface } from '@ethereumjs/common' -import type { PrefixedHexString } from '@ethereumjs/util' +import type { Address, PrefixedHexString } from '@ethereumjs/util' const defaults = { value: BIGINT_0, - caller: Address.zero(), + caller: createZeroAddress(), data: new Uint8Array(0), depth: 0, isStatic: false, @@ -21,6 +22,7 @@ interface MessageOpts { caller?: Address gasLimit: bigint data?: Uint8Array + eofCallData?: Uint8Array depth?: number code?: Uint8Array | PrecompileFunc codeAddress?: Address @@ -36,7 +38,6 @@ interface MessageOpts { */ createdAddresses?: Set delegatecall?: boolean - authcallOrigin?: Address gasRefund?: bigint blobVersionedHashes?: Uint8Array[] accessWitness?: AccessWitnessInterface @@ -48,13 +49,15 @@ export class Message { caller: Address gasLimit: bigint data: Uint8Array + eofCallData?: Uint8Array // Only used in EOFCreate to signal an EOF contract to be created with this calldata (via EOFCreate) + isCreate?: boolean depth: number code?: Uint8Array | PrecompileFunc _codeAddress?: Address isStatic: boolean isCompiled: boolean salt?: Uint8Array - containerCode?: Uint8Array /** container code for EOF1 contracts - used by CODECOPY/CODESIZE */ + eof?: EOFEnv chargeCodeAccesses?: boolean /** * Set of addresses to selfdestruct. Key is the unprefixed address. @@ -65,11 +68,6 @@ export class Message { */ createdAddresses?: Set delegatecall: boolean - /** - * This is used to store the origin of the AUTHCALL, - * the purpose is to figure out where `value` should be taken from (not from `caller`) - */ - authcallOrigin?: Address gasRefund: bigint // Keeps track of the gasRefund at the start of the frame (used for journaling purposes) /** * List of versioned hashes if message is a blob transaction in the outer VM @@ -83,6 +81,7 @@ export class Message { this.caller = opts.caller ?? defaults.caller this.gasLimit = opts.gasLimit this.data = opts.data ?? defaults.data + this.eofCallData = opts.eofCallData this.depth = opts.depth ?? defaults.depth this.code = opts.code this._codeAddress = opts.codeAddress @@ -92,7 +91,6 @@ export class Message { this.selfdestruct = opts.selfdestruct this.createdAddresses = opts.createdAddresses this.delegatecall = opts.delegatecall ?? defaults.delegatecall - this.authcallOrigin = opts.authcallOrigin this.gasRefund = opts.gasRefund ?? defaults.gasRefund this.blobVersionedHashes = opts.blobVersionedHashes this.accessWitness = opts.accessWitness diff --git a/packages/evm/src/opcodes/EIP1283.ts b/packages/evm/src/opcodes/EIP1283.ts index 63e6eae0da..be1412d3f0 100644 --- a/packages/evm/src/opcodes/EIP1283.ts +++ b/packages/evm/src/opcodes/EIP1283.ts @@ -17,28 +17,28 @@ export function updateSstoreGasEIP1283( currentStorage: Uint8Array, originalStorage: Uint8Array, value: Uint8Array, - common: Common + common: Common, ) { if (equalsBytes(currentStorage, value)) { // If current value equals new value (this is a no-op), 200 gas is deducted. - return common.param('gasPrices', 'netSstoreNoopGas') + return common.param('netSstoreNoopGas') } // If current value does not equal new value if (equalsBytes(originalStorage, currentStorage)) { // If original value equals current value (this storage slot has not been changed by the current execution context) if (originalStorage.length === 0) { // If original value is 0, 20000 gas is deducted. - return common.param('gasPrices', 'netSstoreInitGas') + return common.param('netSstoreInitGas') } if (value.length === 0) { // If new value is 0, add 15000 gas to refund counter. runState.interpreter.refundGas( - common.param('gasPrices', 'netSstoreClearRefund'), - 'EIP-1283 -> netSstoreClearRefund' + common.param('netSstoreClearRefundGas'), + 'EIP-1283 -> netSstoreClearRefund', ) } // Otherwise, 5000 gas is deducted. - return common.param('gasPrices', 'netSstoreCleanGas') + return common.param('netSstoreCleanGas') } // If original value does not equal current value (this storage slot is dirty), 200 gas is deducted. Apply both of the following clauses. if (originalStorage.length !== 0) { @@ -46,14 +46,14 @@ export function updateSstoreGasEIP1283( if (currentStorage.length === 0) { // If current value is 0 (also means that new value is not 0), remove 15000 gas from refund counter. We can prove that refund counter will never go below 0. runState.interpreter.subRefund( - common.param('gasPrices', 'netSstoreClearRefund'), - 'EIP-1283 -> netSstoreClearRefund' + common.param('netSstoreClearRefundGas'), + 'EIP-1283 -> netSstoreClearRefund', ) } else if (value.length === 0) { // If new value is 0 (also means that current value is not 0), add 15000 gas to refund counter. runState.interpreter.refundGas( - common.param('gasPrices', 'netSstoreClearRefund'), - 'EIP-1283 -> netSstoreClearRefund' + common.param('netSstoreClearRefundGas'), + 'EIP-1283 -> netSstoreClearRefund', ) } } @@ -62,16 +62,16 @@ export function updateSstoreGasEIP1283( if (originalStorage.length === 0) { // If original value is 0, add 19800 gas to refund counter. runState.interpreter.refundGas( - common.param('gasPrices', 'netSstoreResetClearRefund'), - 'EIP-1283 -> netSstoreResetClearRefund' + common.param('netSstoreResetClearRefundGas'), + 'EIP-1283 -> netSstoreResetClearRefund', ) } else { // Otherwise, add 4800 gas to refund counter. runState.interpreter.refundGas( - common.param('gasPrices', 'netSstoreResetRefund'), - 'EIP-1283 -> netSstoreResetRefund' + common.param('netSstoreResetRefundGas'), + 'EIP-1283 -> netSstoreResetRefund', ) } } - return common.param('gasPrices', 'netSstoreDirtyGas') + return common.param('netSstoreDirtyGas') } diff --git a/packages/evm/src/opcodes/EIP2200.ts b/packages/evm/src/opcodes/EIP2200.ts index f1c4656740..5f08305d09 100644 --- a/packages/evm/src/opcodes/EIP2200.ts +++ b/packages/evm/src/opcodes/EIP2200.ts @@ -23,65 +23,65 @@ export function updateSstoreGasEIP2200( originalStorage: Uint8Array, value: Uint8Array, key: Uint8Array, - common: Common + common: Common, ) { // Fail if not enough gas is left - if (runState.interpreter.getGasLeft() <= common.param('gasPrices', 'sstoreSentryGasEIP2200')) { + if (runState.interpreter.getGasLeft() <= common.param('sstoreSentryEIP2200Gas')) { trap(ERROR.OUT_OF_GAS) } // Noop if (equalsBytes(currentStorage, value)) { - const sstoreNoopCost = common.param('gasPrices', 'sstoreNoopGasEIP2200') + const sstoreNoopCost = common.param('sstoreNoopEIP2200Gas') return adjustSstoreGasEIP2929(runState, key, sstoreNoopCost, 'noop', common) } if (equalsBytes(originalStorage, currentStorage)) { // Create slot if (originalStorage.length === 0) { - return common.param('gasPrices', 'sstoreInitGasEIP2200') + return common.param('sstoreInitEIP2200Gas') } // Delete slot if (value.length === 0) { runState.interpreter.refundGas( - common.param('gasPrices', 'sstoreClearRefundEIP2200'), - 'EIP-2200 -> sstoreClearRefundEIP2200' + common.param('sstoreClearRefundEIP2200Gas'), + 'EIP-2200 -> sstoreClearRefundEIP2200', ) } // Write existing slot - return common.param('gasPrices', 'sstoreCleanGasEIP2200') + return common.param('sstoreCleanEIP2200Gas') } if (originalStorage.length > 0) { if (currentStorage.length === 0) { // Recreate slot runState.interpreter.subRefund( - common.param('gasPrices', 'sstoreClearRefundEIP2200'), - 'EIP-2200 -> sstoreClearRefundEIP2200' + common.param('sstoreClearRefundEIP2200Gas'), + 'EIP-2200 -> sstoreClearRefundEIP2200', ) } else if (value.length === 0) { // Delete slot runState.interpreter.refundGas( - common.param('gasPrices', 'sstoreClearRefundEIP2200'), - 'EIP-2200 -> sstoreClearRefundEIP2200' + common.param('sstoreClearRefundEIP2200Gas'), + 'EIP-2200 -> sstoreClearRefundEIP2200', ) } } if (equalsBytes(originalStorage, value)) { if (originalStorage.length === 0) { // Reset to original non-existent slot - const sstoreInitRefund = common.param('gasPrices', 'sstoreInitRefundEIP2200') + const sstoreInitRefund = common.param('sstoreInitRefundEIP2200Gas') runState.interpreter.refundGas( adjustSstoreGasEIP2929(runState, key, sstoreInitRefund, 'initRefund', common), - 'EIP-2200 -> initRefund' + 'EIP-2200 -> initRefund', ) } else { // Reset to original existing slot - const sstoreCleanRefund = common.param('gasPrices', 'sstoreCleanRefundEIP2200') + const sstoreCleanRefund = common.param('sstoreCleanRefundEIP2200Gas') runState.interpreter.refundGas( BigInt(adjustSstoreGasEIP2929(runState, key, sstoreCleanRefund, 'cleanRefund', common)), - 'EIP-2200 -> cleanRefund' + 'EIP-2200 -> cleanRefund', ) } } // Dirty update - return common.param('gasPrices', 'sstoreDirtyGasEIP2200') + return common.param('sstoreDirtyEIP2200Gas') } diff --git a/packages/evm/src/opcodes/EIP2929.ts b/packages/evm/src/opcodes/EIP2929.ts index f249c1e7dd..a273d96baa 100644 --- a/packages/evm/src/opcodes/EIP2929.ts +++ b/packages/evm/src/opcodes/EIP2929.ts @@ -18,7 +18,7 @@ export function accessAddressEIP2929( address: Uint8Array, common: Common, chargeGas = true, - isSelfdestructOrAuthcall = false + isSelfdestruct = false, ): bigint { if (!common.isActivatedEIP(2929)) return BIGINT_0 @@ -30,11 +30,11 @@ export function accessAddressEIP2929( // selfdestruct beneficiary address reads are charged an *additional* cold access // if verkle not activated if (chargeGas && !common.isActivatedEIP(6800)) { - return common.param('gasPrices', 'coldaccountaccess') + return common.param('coldaccountaccessGas') } // Warm: (selfdestruct beneficiary address reads are not charged when warm) - } else if (chargeGas && !isSelfdestructOrAuthcall) { - return common.param('gasPrices', 'warmstorageread') + } else if (chargeGas && !isSelfdestruct) { + return common.param('warmstoragereadGas') } return BIGINT_0 } @@ -52,7 +52,7 @@ export function accessStorageEIP2929( key: Uint8Array, isSstore: boolean, common: Common, - chargeGas = true + chargeGas = true, ): bigint { if (!common.isActivatedEIP(2929)) return BIGINT_0 @@ -63,10 +63,10 @@ export function accessStorageEIP2929( if (slotIsCold) { runState.interpreter.journal.addWarmedStorage(address, key) if (chargeGas && !common.isActivatedEIP(6800)) { - return common.param('gasPrices', 'coldsload') + return common.param('coldsloadGas') } } else if (chargeGas && (!isSstore || common.isActivatedEIP(6800))) { - return common.param('gasPrices', 'warmstorageread') + return common.param('warmstoragereadGas') } return BIGINT_0 } @@ -86,22 +86,22 @@ export function adjustSstoreGasEIP2929( key: Uint8Array, defaultCost: bigint, costName: string, - common: Common + common: Common, ): bigint { if (!common.isActivatedEIP(2929)) return defaultCost const address = runState.interpreter.getAddress().bytes - const warmRead = common.param('gasPrices', 'warmstorageread') - const coldSload = common.param('gasPrices', 'coldsload') + const warmRead = common.param('warmstoragereadGas') + const coldSload = common.param('coldsloadGas') if (runState.interpreter.journal.isWarmedStorage(address, key)) { switch (costName) { case 'noop': return warmRead case 'initRefund': - return common.param('gasPrices', 'sstoreInitGasEIP2200') - warmRead + return common.param('sstoreInitEIP2200Gas') - warmRead case 'cleanRefund': - return common.param('gasPrices', 'sstoreReset') - coldSload - warmRead + return common.param('sstoreResetGas') - coldSload - warmRead } } diff --git a/packages/evm/src/opcodes/codes.ts b/packages/evm/src/opcodes/codes.ts index 5b24d27c28..f782c54560 100644 --- a/packages/evm/src/opcodes/codes.ts +++ b/packages/evm/src/opcodes/codes.ts @@ -267,6 +267,14 @@ const hardforkOpcodes: { hardfork: Hardfork; opcodes: OpcodeEntry }[] = [ ] const eipOpcodes: { eip: number; opcodes: OpcodeEntry }[] = [ + { + eip: 663, + opcodes: { + 0xe6: { name: 'DUPN', isAsync: false, dynamicGas: false }, + 0xe7: { name: 'SWAPN', isAsync: false, dynamicGas: false }, + 0xe8: { name: 'EXCHANGE', isAsync: false, dynamicGas: false }, + }, + }, { eip: 1153, opcodes: { @@ -287,10 +295,18 @@ const eipOpcodes: { eip: number; opcodes: OpcodeEntry }[] = [ }, }, { - eip: 3074, + eip: 4200, + opcodes: { + 0xe0: { name: 'RJUMP', isAsync: false, dynamicGas: false }, + 0xe1: { name: 'RJUMPI', isAsync: false, dynamicGas: false }, + 0xe2: { name: 'RJUMPV', isAsync: false, dynamicGas: false }, + }, + }, + { + eip: 4750, opcodes: { - 0xf6: { name: 'AUTH', isAsync: true, dynamicGas: true }, - 0xf7: { name: 'AUTHCALL', isAsync: true, dynamicGas: true }, + 0xe3: { name: 'CALLF', isAsync: false, dynamicGas: false }, + 0xe4: { name: 'RETF', isAsync: false, dynamicGas: false }, }, }, { @@ -305,12 +321,43 @@ const eipOpcodes: { eip: number; opcodes: OpcodeEntry }[] = [ 0x5e: { name: 'MCOPY', isAsync: false, dynamicGas: true }, }, }, + { + eip: 6206, + opcodes: { + 0xe5: { name: 'JUMPF', isAsync: false, dynamicGas: false }, + }, + }, + { + eip: 7069, + opcodes: { + 0xf7: { name: 'RETURNDATALOAD', isAsync: false, dynamicGas: false }, + 0xf8: { name: 'EXTCALL', isAsync: true, dynamicGas: true }, + 0xf9: { name: 'EXTDELEGATECALL', isAsync: true, dynamicGas: true }, + 0xfb: { name: 'EXTSTATICCALL', isAsync: true, dynamicGas: true }, + }, + }, + { + eip: 7480, + opcodes: { + 0xd0: { name: 'DATALOAD', isAsync: false, dynamicGas: false }, + 0xd1: { name: 'DATALOADN', isAsync: false, dynamicGas: false }, + 0xd2: { name: 'DATASIZE', isAsync: false, dynamicGas: false }, + 0xd3: { name: 'DATACOPY', isAsync: false, dynamicGas: true }, + }, + }, { eip: 7516, opcodes: { 0x4a: { name: 'BLOBBASEFEE', isAsync: false, dynamicGas: false }, }, }, + { + eip: 7620, + opcodes: { + 0xec: { name: 'EOFCREATE', isAsync: true, dynamicGas: true }, + 0xee: { name: 'RETURNCONTRACT', isAsync: true, dynamicGas: true }, + }, + }, ] /** @@ -330,7 +377,7 @@ function createOpcodes(opcodes: OpcodeEntryFee): OpcodeList { code, fullName: getFullname(code, value.name), ...value, - }) + }), ) } return result @@ -375,7 +422,7 @@ export function getOpcodesForHF(common: Common, customOpcodes?: CustomOpcode[]): } for (const key in opcodeBuilder) { - const baseFee = Number(common.param('gasPrices', opcodeBuilder[key].name.toLowerCase())) + const baseFee = Number(common.param(`${opcodeBuilder[key].name.toLowerCase()}Gas`)) // explicitly verify that we have defined a base fee if (baseFee === undefined) { throw new Error(`base fee not defined for: ${opcodeBuilder[key].name}`) @@ -394,7 +441,7 @@ export function getOpcodesForHF(common: Common, customOpcodes?: CustomOpcode[]): // Sanity checks if (code.opcodeName === undefined || code.baseFee === undefined) { throw new Error( - `Custom opcode ${code.opcode} does not have the required values: opcodeName and baseFee are required` + `Custom opcode ${code.opcode} does not have the required values: opcodeName and baseFee are required`, ) } const entry = { diff --git a/packages/evm/src/opcodes/functions.ts b/packages/evm/src/opcodes/functions.ts index 7685432e24..65cda002e1 100644 --- a/packages/evm/src/opcodes/functions.ts +++ b/packages/evm/src/opcodes/functions.ts @@ -1,5 +1,4 @@ import { - Account, Address, BIGINT_0, BIGINT_1, @@ -8,7 +7,6 @@ import { BIGINT_224, BIGINT_255, BIGINT_256, - BIGINT_27, BIGINT_2EXP160, BIGINT_2EXP224, BIGINT_2EXP96, @@ -18,26 +16,25 @@ import { BIGINT_8, BIGINT_96, MAX_INTEGER_BIGINT, - SECP256K1_ORDER_DIV_2, TWO_POW256, bigIntToAddressBytes, bigIntToBytes, bytesToBigInt, bytesToHex, + bytesToInt, concatBytes, - ecrecover, getVerkleTreeIndexesForStorageSlot, - hexToBytes, - publicToAddress, setLengthLeft, - setLengthRight, } from '@ethereumjs/util' import { keccak256 } from 'ethereum-cryptography/keccak.js' +import { EOFContainer, EOFContainerMode } from '../eof/container.js' +import { EOFError } from '../eof/errors.js' +import { EOFBYTES, EOFHASH, isEOF } from '../eof/util.js' import { ERROR } from '../exceptions.js' import { - addresstoBytes, + createAddressFromStackBigInt, describeLocation, exponentiation, fromTwos, @@ -52,8 +49,6 @@ import { import type { RunState } from '../interpreter.js' import type { Common } from '@ethereumjs/common' -const EIP3074MAGIC = hexToBytes('0x04') - export interface SyncOpHandler { (runState: RunState, common: Common): void } @@ -424,7 +419,7 @@ export const handlers: Map = new Map([ 0x31, async function (runState) { const addressBigInt = runState.stack.pop() - const address = new Address(addresstoBytes(addressBigInt)) + const address = createAddressFromStackBigInt(addressBigInt) const balance = await runState.interpreter.getExternalBalance(address) runState.stack.push(balance) }, @@ -518,21 +513,19 @@ export const handlers: Map = new Map([ 0x3b, async function (runState) { const addressBigInt = runState.stack.pop() - - let size - if (typeof runState.stateManager.getContractCodeSize === 'function') { - size = BigInt( - await runState.stateManager.getContractCodeSize( - new Address(addresstoBytes(addressBigInt)) - ) - ) - } else { - size = BigInt( - (await runState.stateManager.getContractCode(new Address(addresstoBytes(addressBigInt)))) - .length - ) + const address = createAddressFromStackBigInt(addressBigInt) + // EOF check + const code = await runState.stateManager.getCode(address) + if (isEOF(code)) { + // In legacy code, the target code is treated as to be "EOFBYTES" code + runState.stack.push(BigInt(EOFBYTES.length)) + return } + const size = BigInt( + await runState.stateManager.getCodeSize(createAddressFromStackBigInt(addressBigInt)), + ) + runState.stack.push(size) }, ], @@ -543,9 +536,12 @@ export const handlers: Map = new Map([ const [addressBigInt, memOffset, codeOffset, dataLength] = runState.stack.popN(4) if (dataLength !== BIGINT_0) { - const code = await runState.stateManager.getContractCode( - new Address(addresstoBytes(addressBigInt)) - ) + let code = await runState.stateManager.getCode(createAddressFromStackBigInt(addressBigInt)) + + if (isEOF(code)) { + // In legacy code, the target code is treated as to be "EOFBYTES" code + code = EOFBYTES + } const data = getDataSlice(code, codeOffset, dataLength) const memOffsetNum = Number(memOffset) @@ -559,7 +555,17 @@ export const handlers: Map = new Map([ 0x3f, async function (runState) { const addressBigInt = runState.stack.pop() - const address = new Address(addresstoBytes(addressBigInt)) + const address = createAddressFromStackBigInt(addressBigInt) + + // EOF check + const code = await runState.stateManager.getCode(address) + if (isEOF(code)) { + // In legacy code, the target code is treated as to be "EOFBYTES" code + // Therefore, push the hash of EOFBYTES to the stack + runState.stack.push(bytesToBigInt(EOFHASH)) + return + } + const account = await runState.stateManager.getAccount(address) if (!account || account.isEmpty()) { runState.stack.push(BIGINT_0) @@ -586,7 +592,7 @@ export const handlers: Map = new Map([ const data = getDataSlice( runState.interpreter.getReturnData(), returnDataOffset, - dataLength + dataLength, ) const memOffsetNum = Number(memOffset) const lengthNum = Number(dataLength) @@ -623,9 +629,9 @@ export const handlers: Map = new Map([ } const historyAddress = new Address( - bigIntToAddressBytes(common.param('vm', 'historyStorageAddress')) + bigIntToAddressBytes(common.param('historyStorageAddress')), ) - const historyServeWindow = common.param('vm', 'historyServeWindow') + const historyServeWindow = common.param('historyServeWindow') const key = setLengthLeft(bigIntToBytes(number % historyServeWindow), 32) if (common.isActivatedEIP(6800)) { @@ -634,11 +640,11 @@ export const handlers: Map = new Map([ const statelessGas = runState.env.accessWitness!.touchAddressOnReadAndComputeGas( historyAddress, treeIndex, - subIndex + subIndex, ) runState.interpreter.useGas(statelessGas, `BLOCKHASH`) } - const storage = await runState.stateManager.getContractStorage(historyAddress, key) + const storage = await runState.stateManager.getStorage(historyAddress, key) runState.stack.push(bytesToBigInt(storage)) } else { @@ -930,7 +936,7 @@ export const handlers: Map = new Map([ const statelessGas = runState.env.accessWitness!.touchCodeChunksRangeOnReadAndChargeGas( contract, startOffset, - endOffset + endOffset, ) runState.interpreter.useGas(statelessGas, `PUSH`) } @@ -940,7 +946,7 @@ export const handlers: Map = new Map([ runState.programCounter += numToPush } else { const loaded = bytesToBigInt( - runState.code.subarray(runState.programCounter, runState.programCounter + numToPush) + runState.code.subarray(runState.programCounter, runState.programCounter + numToPush), ) runState.programCounter += numToPush runState.stack.push(loaded) @@ -984,6 +990,346 @@ export const handlers: Map = new Map([ runState.interpreter.log(mem, topicsCount, topicsBuf) }, ], + // 0xd0: DATALOAD + [ + 0xd0, + function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } + const pos = runState.stack.pop() + if (pos > runState.env.eof!.container.body.dataSection.length) { + runState.stack.push(BIGINT_0) + return + } + + const i = Number(pos) + let loaded = runState.env.eof!.container.body.dataSection.subarray(i, i + 32) + loaded = loaded.length ? loaded : Uint8Array.from([0]) + let r = bytesToBigInt(loaded) + // Pad the loaded length with 0 bytes in case it is smaller than 32 + if (loaded.length < 32) { + r = r << (BIGINT_8 * BigInt(32 - loaded.length)) + } + runState.stack.push(r) + }, + ], + // 0xd1: DATALOADN + [ + 0xd1, + function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } + const toLoad = Number( + bytesToBigInt(runState.code.subarray(runState.programCounter, runState.programCounter + 2)), + ) + const data = bytesToBigInt( + runState.env.eof!.container.body.dataSection.subarray(toLoad, toLoad + 32), + ) + runState.stack.push(data) + runState.programCounter += 2 + }, + ], + // 0xd2: DATASIZE + [ + 0xd2, + function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } + runState.stack.push(BigInt(runState.env.eof!.container.body.dataSection.length)) + }, + ], + // 0xd3: DATACOPY + [ + 0xd3, + function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } + const [memOffset, offset, size] = runState.stack.popN(3) + if (size !== BIGINT_0) { + const data = getDataSlice(runState.env.eof!.container.body.dataSection, offset, size) + const memOffsetNum = Number(memOffset) + const dataLengthNum = Number(size) + runState.memory.write(memOffsetNum, dataLengthNum, data) + } + }, + ], + // 0xe0: RJUMP + [ + 0xe0, + function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } else { + const code = runState.env.code + const rjumpDest = new DataView(code.buffer).getInt16(runState.programCounter) + runState.programCounter += 2 + rjumpDest + } + }, + ], + // 0xe1: RJUMPI + [ + 0xe1, + function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } else { + const cond = runState.stack.pop() + // Move PC to the PC post instruction + if (cond > 0) { + const code = runState.env.code + const rjumpDest = new DataView(code.buffer).getInt16(runState.programCounter) + runState.programCounter += rjumpDest + } + // In all cases, increment PC with 2 (also in the case if `cond` is `0`) + runState.programCounter += 2 + } + }, + ], + // 0xe2: RJUMPV + [ + 0xe2, + function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } else { + const code = runState.env.code + const jumptableEntries = code[runState.programCounter] + // Note: if the size of the immediate is `0`, this thus means that the actual size is `2` + // This allows for 256 entries in the table instead of 255 + const jumptableSize = (jumptableEntries + 1) * 2 + // Move PC to start of the jump table + runState.programCounter += 1 + const jumptableCase = runState.stack.pop() + if (jumptableCase <= jumptableEntries) { + const rjumpDest = new DataView(code.buffer).getInt16( + runState.programCounter + Number(jumptableCase) * 2, + ) + runState.programCounter += jumptableSize + rjumpDest + } else { + runState.programCounter += jumptableSize + } + } + }, + ], + // 0xe3: CALLF + [ + 0xe3, + function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } + const sectionTarget = bytesToInt( + runState.code.slice(runState.programCounter, runState.programCounter + 2), + ) + const stackItems = runState.stack.length + const typeSection = runState.env.eof!.container.body.typeSections[sectionTarget] + if (1024 < stackItems + typeSection?.inputs - typeSection?.maxStackHeight) { + trap(EOFError.StackOverflow) + } + if (runState.env.eof!.eofRunState.returnStack.length >= 1024) { + trap(EOFError.ReturnStackOverflow) + } + runState.env.eof?.eofRunState.returnStack.push(runState.programCounter + 2) + + // Find out the opcode we should jump into + runState.programCounter = runState.env.eof!.container.header.getCodePosition(sectionTarget) + }, + ], + // 0xe4: RETF + [ + 0xe4, + function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } + const newPc = runState.env.eof!.eofRunState.returnStack.pop() + if (newPc === undefined) { + // This should NEVER happen since it is validated that functions either terminate (the call frame) or return + trap(EOFError.RetfNoReturn) + } + runState.programCounter = newPc! + }, + ], + // 0xe5: JUMPF + [ + 0xe5, + function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } + // NOTE: (and also TODO) this code is exactly the same as CALLF, except pushing to the return stack is now skipped + // (and also the return stack overflow check) + // It is commented out here + const sectionTarget = bytesToInt( + runState.code.slice(runState.programCounter, runState.programCounter + 2), + ) + const stackItems = runState.stack.length + const typeSection = runState.env.eof!.container.body.typeSections[sectionTarget] + if (1024 < stackItems + typeSection?.inputs - typeSection?.maxStackHeight) { + trap(EOFError.StackOverflow) + } + /*if (runState.env.eof!.eofRunState.returnStack.length >= 1024) { + trap(EOFError.ReturnStackOverflow) + } + runState.env.eof?.eofRunState.returnStack.push(runState.programCounter + 2)*/ + + // Find out the opcode we should jump into + runState.programCounter = runState.env.eof!.container.header.getCodePosition(sectionTarget) + }, + ], + // 0xe6: DUPN + [ + 0xe6, + function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } + const toDup = + Number( + bytesToBigInt( + runState.code.subarray(runState.programCounter, runState.programCounter + 1), + ), + ) + 1 + runState.stack.dup(toDup) + runState.programCounter++ + }, + ], + // 0xe7: SWAPN + [ + 0xe7, + function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } + const toSwap = + Number( + bytesToBigInt( + runState.code.subarray(runState.programCounter, runState.programCounter + 1), + ), + ) + 1 + runState.stack.swap(toSwap) + runState.programCounter++ + }, + ], + // 0xe8: EXCHANGE + [ + 0xe8, + function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } + const toExchange = Number( + bytesToBigInt(runState.code.subarray(runState.programCounter, runState.programCounter + 1)), + ) + const n = (toExchange >> 4) + 1 + const m = (toExchange & 0x0f) + 1 + runState.stack.exchange(n, n + m) + runState.programCounter++ + }, + ], + // 0xec: EOFCREATE + [ + 0xec, + async function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } else { + // Read container index + const containerIndex = runState.env.code[runState.programCounter] + const containerCode = runState.env.eof!.container.body.containerSections[containerIndex] + + // Pop stack values + const [value, salt, inputOffset, inputSize] = runState.stack.popN(4) + + const gasLimit = runState.messageGasLimit! + runState.messageGasLimit = undefined + + let data = new Uint8Array(0) + if (inputSize !== BIGINT_0) { + data = runState.memory.read(Number(inputOffset), Number(inputSize), true) + } + + runState.programCounter++ // Jump over the immediate byte + + const ret = await runState.interpreter.eofcreate( + gasLimit, + value, + containerCode, + setLengthLeft(bigIntToBytes(salt), 32), + data, + ) + runState.stack.push(ret) + } + }, + ], + // 0xee: RETURNCONTRACT + [ + 0xee, + async function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } else { + // Read container index + const containerIndex = runState.env.code[runState.programCounter] + const containerCode = runState.env.eof!.container.body.containerSections[containerIndex] + + // Read deployContainer as EOFCreate (initcode) container + const deployContainer = new EOFContainer(containerCode, EOFContainerMode.Initmode) + + // Pop stack values + const [auxDataOffset, auxDataSize] = runState.stack.popN(2) + + let auxData = new Uint8Array(0) + if (auxDataSize !== BIGINT_0) { + auxData = runState.memory.read(Number(auxDataOffset), Number(auxDataSize)) + } + + const originalDataSize = deployContainer.header.dataSize + const preDeployDataSectionSize = deployContainer.body.dataSection.length + const actualSectionSize = preDeployDataSectionSize + Number(auxDataSize) + + if (actualSectionSize < originalDataSize) { + trap(EOFError.InvalidReturnContractDataSize) + } + + if (actualSectionSize > 0xffff) { + // Data section size is now larger than the max data section size + // Temp: trap OOG? + trap(ERROR.OUT_OF_GAS) + } + + const newSize = setLengthLeft(bigIntToBytes(BigInt(actualSectionSize)), 2) + + // Write the bytes to the containerCode + const dataSizePtr = deployContainer.header.dataSizePtr + containerCode[dataSizePtr] = newSize[0] + containerCode[dataSizePtr + 1] = newSize[1] + + const returnContainer = concatBytes(containerCode, auxData) + + runState.interpreter.finish(returnContainer) + } + }, + ], // '0xf0' range - closures // 0xf0: CREATE [ @@ -993,7 +1339,7 @@ export const handlers: Map = new Map([ if ( common.isActivatedEIP(3860) && - length > Number(common.param('vm', 'maxInitCodeSize')) && + length > Number(common.param('maxInitCodeSize')) && !runState.interpreter._evm.allowUnlimitedInitCodeSize ) { trap(ERROR.INITCODE_SIZE_VIOLATION) @@ -1007,6 +1353,12 @@ export const handlers: Map = new Map([ data = runState.memory.read(Number(offset), Number(length), true) } + if (isEOF(data)) { + // Legacy cannot deploy EOF code + runState.stack.push(BIGINT_0) + return + } + const ret = await runState.interpreter.create(gasLimit, value, data) runState.stack.push(ret) }, @@ -1023,7 +1375,7 @@ export const handlers: Map = new Map([ if ( common.isActivatedEIP(3860) && - length > Number(common.param('vm', 'maxInitCodeSize')) && + length > Number(common.param('maxInitCodeSize')) && !runState.interpreter._evm.allowUnlimitedInitCodeSize ) { trap(ERROR.INITCODE_SIZE_VIOLATION) @@ -1037,11 +1389,17 @@ export const handlers: Map = new Map([ data = runState.memory.read(Number(offset), Number(length), true) } + if (isEOF(data)) { + // Legacy cannot deploy EOF code + runState.stack.push(BIGINT_0) + return + } + const ret = await runState.interpreter.create2( gasLimit, value, data, - setLengthLeft(bigIntToBytes(salt), 32) + setLengthLeft(bigIntToBytes(salt), 32), ) runState.stack.push(ret) }, @@ -1052,7 +1410,7 @@ export const handlers: Map = new Map([ async function (runState: RunState, common: Common) { const [_currentGasLimit, toAddr, value, inOffset, inLength, outOffset, outLength] = runState.stack.popN(7) - const toAddress = new Address(addresstoBytes(toAddr)) + const toAddress = createAddressFromStackBigInt(toAddr) let data = new Uint8Array(0) if (inLength !== BIGINT_0) { @@ -1061,7 +1419,7 @@ export const handlers: Map = new Map([ let gasLimit = runState.messageGasLimit! if (value !== BIGINT_0) { - const callStipend = common.param('gasPrices', 'callStipend') + const callStipend = common.param('callStipendGas') runState.interpreter.addStipend(callStipend) gasLimit += callStipend } @@ -1080,11 +1438,11 @@ export const handlers: Map = new Map([ async function (runState: RunState, common: Common) { const [_currentGasLimit, toAddr, value, inOffset, inLength, outOffset, outLength] = runState.stack.popN(7) - const toAddress = new Address(addresstoBytes(toAddr)) + const toAddress = createAddressFromStackBigInt(toAddr) let gasLimit = runState.messageGasLimit! if (value !== BIGINT_0) { - const callStipend = common.param('gasPrices', 'callStipend') + const callStipend = common.param('callStipendGas') runState.interpreter.addStipend(callStipend) gasLimit += callStipend } @@ -1109,7 +1467,7 @@ export const handlers: Map = new Map([ const value = runState.interpreter.getCallValue() const [_currentGasLimit, toAddr, inOffset, inLength, outOffset, outLength] = runState.stack.popN(6) - const toAddress = new Address(addresstoBytes(toAddr)) + const toAddress = createAddressFromStackBigInt(toAddr) let data = new Uint8Array(0) if (inLength !== BIGINT_0) { @@ -1125,106 +1483,79 @@ export const handlers: Map = new Map([ runState.stack.push(ret) }, ], - // 0xf6: AUTH + // 0xf8: EXTCALL [ - 0xf6, - async function (runState) { - // eslint-disable-next-line prefer-const - let [authority, memOffset, memLength] = runState.stack.popN(3) + 0xf8, + async function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } else { + const [toAddr, inOffset, inLength, value] = runState.stack.popN(4) - if (memLength > BigInt(97)) { - memLength = BigInt(97) - } + const gasLimit = runState.messageGasLimit! + runState.messageGasLimit = undefined - let mem = runState.memory.read(Number(memOffset), Number(memLength)) - if (mem.length < 97) { - mem = setLengthRight(mem, 97) - } + if (gasLimit === -BIGINT_1) { + // Special case, abort doing any logic (this logic is defined in `gas.ts`), and put `1` on stack per spec + runState.stack.push(BIGINT_1) + runState.returnBytes = new Uint8Array(0) + return + } - const yParity = BigInt(mem[0]) - const r = mem.subarray(1, 33) - const s = mem.subarray(33, 65) - const commit = mem.subarray(65, 97) + const toAddress = createAddressFromStackBigInt(toAddr) - if (bytesToBigInt(s) > SECP256K1_ORDER_DIV_2) { - runState.stack.push(BIGINT_0) - runState.auth = undefined - return - } - if (yParity > BIGINT_1) { - runState.stack.push(BIGINT_0) - runState.auth = undefined - return - } + let data = new Uint8Array(0) + if (inLength !== BIGINT_0) { + data = runState.memory.read(Number(inOffset), Number(inLength), true) + } - // we don't want strick check here on authority being in address range just last 20 bytes - const expectedAddress = new Address(bigIntToAddressBytes(authority, false)) - const account = (await runState.stateManager.getAccount(expectedAddress)) ?? new Account() + const ret = await runState.interpreter.call(gasLimit, toAddress, value, data) + // Write return data to memory - if (account.isContract()) { - // EXTCODESIZE > 0 - runState.stack.push(BIGINT_0) - runState.auth = undefined - return + runState.stack.push(ret) } + }, + ], + // 0xf9: EXTDELEGATECALL + [ + 0xf9, + async function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } else { + const value = runState.interpreter.getCallValue() + const [toAddr, inOffset, inLength] = runState.stack.popN(3) - const accountNonce = account.nonce - - const invokedAddress = setLengthLeft(runState.interpreter._env.address.bytes, 32) - const chainId = setLengthLeft(bigIntToBytes(runState.interpreter.getChainId()), 32) - const nonce = setLengthLeft(bigIntToBytes(accountNonce), 32) - const message = concatBytes(EIP3074MAGIC, chainId, nonce, invokedAddress, commit) - - const keccakFunction = runState.interpreter._evm.common.customCrypto.keccak256 ?? keccak256 - const msgHash = keccakFunction(message) + const gasLimit = runState.messageGasLimit! + runState.messageGasLimit = undefined - let recover - const ecrecoverFunction = runState.interpreter._evm.common.customCrypto.ecrecover ?? ecrecover - try { - recover = ecrecoverFunction(msgHash, yParity + BIGINT_27, r, s) - } catch (e) { - // Malformed signature, push 0 on stack, clear auth variable - runState.stack.push(BIGINT_0) - runState.auth = undefined - return - } + if (gasLimit === -BIGINT_1) { + // Special case, abort doing any logic (this logic is defined in `gas.ts`), and put `1` on stack per spec + runState.stack.push(BIGINT_1) + runState.returnBytes = new Uint8Array(0) + return + } - const addressBuffer = publicToAddress(recover) - const address = new Address(addressBuffer) - runState.auth = address + const toAddress = createAddressFromStackBigInt(toAddr) - if (!expectedAddress.equals(address)) { - // expected address does not equal the recovered address, clear auth variable - runState.stack.push(BIGINT_0) - runState.auth = undefined - return - } + const code = await runState.stateManager.getCode(toAddress) - runState.auth = address - runState.stack.push(BIGINT_1) - }, - ], - // 0xf7: AUTHCALL - [ - 0xf7, - async function (runState) { - const [_currentGasLimit, addr, value, argsOffset, argsLength, retOffset, retLength] = - runState.stack.popN(7) - - const toAddress = new Address(addresstoBytes(addr)) + if (!isEOF(code)) { + // EXTDELEGATECALL cannot call legacy contracts + runState.stack.push(BIGINT_1) + return + } - const gasLimit = runState.messageGasLimit! - runState.messageGasLimit = undefined + let data = new Uint8Array(0) + if (inLength !== BIGINT_0) { + data = runState.memory.read(Number(inOffset), Number(inLength), true) + } - let data = new Uint8Array(0) - if (argsLength !== BIGINT_0) { - data = runState.memory.read(Number(argsOffset), Number(argsLength)) + const ret = await runState.interpreter.callDelegate(gasLimit, toAddress, value, data) + runState.stack.push(ret) } - - const ret = await runState.interpreter.authcall(gasLimit, toAddress, value, data) - // Write return data to memory - writeCallOutput(runState, retOffset, retLength) - runState.stack.push(ret) }, ], // 0xfa: STATICCALL @@ -1234,7 +1565,7 @@ export const handlers: Map = new Map([ const value = BIGINT_0 const [_currentGasLimit, toAddr, inOffset, inLength, outOffset, outLength] = runState.stack.popN(6) - const toAddress = new Address(addresstoBytes(toAddr)) + const toAddress = createAddressFromStackBigInt(toAddr) const gasLimit = runState.messageGasLimit! runState.messageGasLimit = undefined @@ -1250,6 +1581,39 @@ export const handlers: Map = new Map([ runState.stack.push(ret) }, ], + // 0xfb: EXTSTATICCALL + [ + 0xfb, + async function (runState, _common) { + if (runState.env.eof === undefined) { + // Opcode not available in legacy contracts + trap(ERROR.INVALID_OPCODE) + } else { + const value = BIGINT_0 + const [toAddr, inOffset, inLength] = runState.stack.popN(3) + + const gasLimit = runState.messageGasLimit! + runState.messageGasLimit = undefined + + if (gasLimit === -BIGINT_1) { + // Special case, abort doing any logic (this logic is defined in `gas.ts`), and put `1` on stack per spec + runState.stack.push(BIGINT_1) + runState.returnBytes = new Uint8Array(0) + return + } + + const toAddress = createAddressFromStackBigInt(toAddr) + + let data = new Uint8Array(0) + if (inLength !== BIGINT_0) { + data = runState.memory.read(Number(inOffset), Number(inLength), true) + } + + const ret = await runState.interpreter.callStatic(gasLimit, toAddress, value, data) + runState.stack.push(ret) + } + }, + ], // 0xf3: RETURN [ 0xf3, @@ -1280,7 +1644,7 @@ export const handlers: Map = new Map([ 0xff, async function (runState) { const selfdestructToAddressBigInt = runState.stack.pop() - const selfdestructToAddress = new Address(addresstoBytes(selfdestructToAddressBigInt)) + const selfdestructToAddress = createAddressFromStackBigInt(selfdestructToAddressBigInt) return runState.interpreter.selfDestruct(selfdestructToAddress) }, ], diff --git a/packages/evm/src/opcodes/gas.ts b/packages/evm/src/opcodes/gas.ts index ade64847b8..c06583f663 100644 --- a/packages/evm/src/opcodes/gas.ts +++ b/packages/evm/src/opcodes/gas.ts @@ -1,12 +1,11 @@ import { Hardfork } from '@ethereumjs/common' import { - Account, - Address, BIGINT_0, BIGINT_1, BIGINT_3, BIGINT_31, BIGINT_32, + BIGINT_64, VERKLE_BASIC_DATA_LEAF_KEY, VERKLE_CODE_HASH_LEAF_KEY, bigIntToBytes, @@ -14,13 +13,14 @@ import { setLengthLeft, } from '@ethereumjs/util' +import { EOFError } from '../eof/errors.js' import { ERROR } from '../exceptions.js' import { updateSstoreGasEIP1283 } from './EIP1283.js' import { updateSstoreGasEIP2200 } from './EIP2200.js' import { accessAddressEIP2929, accessStorageEIP2929 } from './EIP2929.js' import { - addresstoBytes, + createAddressFromStackBigInt, divCeil, maxCallGas, setLengthLeftStorage, @@ -32,6 +32,8 @@ import { import type { RunState } from '../interpreter.js' import type { Common } from '@ethereumjs/common' +const EXTCALL_TARGET_MAX = BigInt(2) ** BigInt(8 * 20) - BigInt(1) + /** * This file returns the dynamic parts of opcodes which have dynamic gas * These are not pure functions: some edit the size of the memory @@ -66,7 +68,7 @@ export const dynamicGasHandlers: Map 32) { trap(ERROR.OUT_OF_RANGE) } - const expPricePerByte = common.param('gasPrices', 'expByte') + const expPricePerByte = common.param('expByteGas') gas += BigInt(byteLength) * expPricePerByte return gas }, @@ -77,7 +79,7 @@ export const dynamicGasHandlers: Map { const [offset, length] = runState.stack.peek(2) gas += subMemUsage(runState, offset, length, common) - gas += common.param('gasPrices', 'keccak256Word') * divCeil(length, BIGINT_32) + gas += common.param('keccak256WordGas') * divCeil(length, BIGINT_32) return gas }, ], @@ -85,14 +87,13 @@ export const dynamicGasHandlers: Map { - const address = addresstoBytes(runState.stack.peek()[0]) + const address = createAddressFromStackBigInt(runState.stack.peek()[0]) let charge2929Gas = true if (common.isActivatedEIP(6800)) { - const balanceAddress = new Address(address) const coldAccessGas = runState.env.accessWitness!.touchAddressOnReadAndComputeGas( - balanceAddress, + address, 0, - VERKLE_BASIC_DATA_LEAF_KEY + VERKLE_BASIC_DATA_LEAF_KEY, ) gas += coldAccessGas @@ -100,7 +101,7 @@ export const dynamicGasHandlers: Map { - const addressBytes = addresstoBytes(runState.stack.peek()[0]) - const address = new Address(addressBytes) + const address = createAddressFromStackBigInt(runState.stack.peek()[0]) let charge2929Gas = true if ( @@ -163,7 +163,7 @@ export const dynamicGasHandlers: Map { const [addressBigInt, memOffset, _codeOffset, dataLength] = runState.stack.peek(4) - const addressBytes = addresstoBytes(addressBigInt) - const address = new Address(addressBytes) + const address = createAddressFromStackBigInt(addressBigInt) gas += subMemUsage(runState, memOffset, dataLength, common) @@ -197,7 +196,7 @@ export const dynamicGasHandlers: Map codeSize) { codeEnd = codeSize } @@ -222,7 +221,7 @@ export const dynamicGasHandlers: Map runState.interpreter.getReturnDataSize()) { - trap(ERROR.OUT_OF_GAS) + // For an EOF contract, the behavior is changed (see EIP 7069) + // RETURNDATACOPY in that case does not throw OOG when reading out-of-bounds + if (runState.env.eof === undefined) { + trap(ERROR.OUT_OF_GAS) + } } gas += subMemUsage(runState, memOffset, dataLength, common) if (dataLength !== BIGINT_0) { - gas += common.param('gasPrices', 'copy') * divCeil(dataLength, BIGINT_32) + gas += common.param('copyGas') * divCeil(dataLength, BIGINT_32) } return gas }, @@ -251,17 +254,15 @@ export const dynamicGasHandlers: Map { - const address = addresstoBytes(runState.stack.peek()[0]) + const address = createAddressFromStackBigInt(runState.stack.peek()[0]) let charge2929Gas = true if (common.isActivatedEIP(6800)) { - const codeAddress = new Address(address) - let coldAccessGas = BIGINT_0 coldAccessGas += runState.env.accessWitness!.touchAddressOnReadAndComputeGas( - codeAddress, + address, 0, - VERKLE_CODE_HASH_LEAF_KEY + VERKLE_CODE_HASH_LEAF_KEY, ) gas += coldAccessGas @@ -269,7 +270,7 @@ export const dynamicGasHandlers: Map { + // Note: TX_CREATE_COST is in the base fee (this is 32000 and same as CREATE / CREATE2) + + // Note: in `gas.ts` programCounter is not yet incremented (which it is in `functions.ts`) + // So have to manually add to programCounter here to get the right container index + + // Read container index + const containerIndex = runState.env.code[runState.programCounter + 1] + + // Pop stack values + const [_value, _salt, inputOffset, inputSize] = runState.stack.peek(4) + + //if (common.isActivatedEIP(2929)) { + // TODO: adding or not adding this makes test + // --test=tests/prague/eip7692_eof_v1/eip7620_eof_create/test_eofcreate.py::test_eofcreate_then_call[fork_CancunEIP7692-blockchain_test] + // still succeed. This only warms the current address?? This is also in CREATE/CREATE2 + // Can this be removed in both? + /*gas += accessAddressEIP2929( + runState, + runState.interpreter.getAddress().bytes, + common, + false + ) + }*/ + + // Expand memory + gas += subMemUsage(runState, inputOffset, inputSize, common) + + // Read container + const container = runState.env.eof!.container.body.containerSections[containerIndex] + + // Charge for hashing cost + gas += common.param('keccak256WordGas') * divCeil(BigInt(container.length), BIGINT_32) + + const gasLeft = runState.interpreter.getGasLeft() - gas + runState.messageGasLimit = maxCallGas(gasLeft, gasLeft, runState, common) + + return gas + }, + ], + /* RETURNCONTRACT */ + [ + 0xee, + async function (runState, gas, common): Promise { + // Pop stack values + const [auxDataOffset, auxDataSize] = runState.stack.peek(2) + + // Expand memory + gas += subMemUsage(runState, auxDataOffset, auxDataSize, common) + return gas }, ], @@ -450,12 +519,12 @@ export const dynamicGasHandlers: Map { const [currentGasLimit, toAddr, value, inOffset, inLength, outOffset, outLength] = runState.stack.peek(7) - const toAddress = new Address(addresstoBytes(toAddr)) + const toAddress = createAddressFromStackBigInt(toAddr) if (runState.interpreter.isStatic() && value !== BIGINT_0) { trap(ERROR.STATIC_STATE_CHANGE) @@ -492,7 +561,7 @@ export const dynamicGasHandlers: Map { const [currentGasLimit, toAddr, value, inOffset, inLength, outOffset, outLength] = runState.stack.peek(7) - const toAddress = new Address(addresstoBytes(toAddr)) + const toAddress = createAddressFromStackBigInt(toAddr) gas += subMemUsage(runState, inOffset, inLength, common) gas += subMemUsage(runState, outOffset, outLength, common) @@ -570,18 +639,23 @@ export const dynamicGasHandlers: Map { const [currentGasLimit, toAddr, inOffset, inLength, outOffset, outLength] = runState.stack.peek(6) - const toAddress = new Address(addresstoBytes(toAddr)) + const toAddress = createAddressFromStackBigInt(toAddr) gas += subMemUsage(runState, inOffset, inLength, common) gas += subMemUsage(runState, outOffset, outLength, common) @@ -626,14 +700,19 @@ export const dynamicGasHandlers: Map { - const [address, memOffset, memLength] = runState.stack.peek(3) - // Note: 2929 is always active if AUTH can be reached, - // since it needs London as minimum hardfork - gas += accessAddressEIP2929(runState, bigIntToBytes(address), common) - gas += subMemUsage(runState, memOffset, memLength, common) - return gas - }, - ], - [ - /* AUTHCALL */ - 0xf7, - async function (runState, gas, common): Promise { - if (runState.auth === undefined) { - trap(ERROR.AUTHCALL_UNSET) - } + // Charge WARM_STORAGE_READ_COST (100) -> done in accessAddressEIP2929 - const [currentGasLimit, addr, value, argsOffset, argsLength, retOffset, retLength] = - runState.stack.peek(7) + // Peek stack values + const [toAddr, inOffset, inLength, value] = runState.stack.peek(4) + + // If value is nonzero and in static mode, throw: + if (runState.interpreter.isStatic() && value !== BIGINT_0) { + trap(ERROR.STATIC_STATE_CHANGE) + } - const toAddress = new Address(addresstoBytes(addr)) + // If value > 0, charge CALL_VALUE_COST + if (value > BIGINT_0) { + gas += common.param('callValueTransferGas') + } - gas += common.param('gasPrices', 'warmstorageread') + // Check if the target address > 20 bytes + if (toAddr > EXTCALL_TARGET_MAX) { + trap(EOFError.InvalidExtcallTarget) + } - gas += accessAddressEIP2929(runState, toAddress.bytes, common, true, true) + // Charge for memory expansion + gas += subMemUsage(runState, inOffset, inLength, common) - gas += subMemUsage(runState, argsOffset, argsLength, common) - gas += subMemUsage(runState, retOffset, retLength, common) + const toAddress = createAddressFromStackBigInt(toAddr) + // Charge to make address warm (2600 gas) + // (in case if address is already warm, this charges the 100 gas) + gas += accessAddressEIP2929(runState, toAddress.bytes, common) + // Charge account creation cost if value is nonzero if (value > BIGINT_0) { - gas += common.param('gasPrices', 'authcallValueTransfer') const account = await runState.stateManager.getAccount(toAddress) - if (!account) { - gas += common.param('gasPrices', 'callNewAccount') + const deadAccount = account === undefined || account.isEmpty() + + if (deadAccount) { + gas += common.param('callNewAccountGas') } } - let gasLimit = maxCallGas( - runState.interpreter.getGasLeft() - gas, - runState.interpreter.getGasLeft() - gas, - runState, - common - ) - if (currentGasLimit !== BIGINT_0) { - if (currentGasLimit > gasLimit) { - trap(ERROR.OUT_OF_GAS) - } - gasLimit = currentGasLimit + const minRetainedGas = common.param('minRetainedGas') + const minCalleeGas = common.param('minCalleeGas') + + const currentGasAvailable = runState.interpreter.getGasLeft() - gas + const reducedGas = currentGasAvailable / BIGINT_64 + // Calculate the gas limit for the callee + // (this is the gas available for the next call frame) + let gasLimit: bigint + if (reducedGas < minRetainedGas) { + gasLimit = currentGasAvailable - minRetainedGas + } else { + gasLimit = currentGasAvailable - reducedGas + } + + if ( + runState.env.depth >= Number(common.param('stackLimit')) || + runState.env.contract.balance < value || + gasLimit < minCalleeGas + ) { + // Note: this is a hack, TODO: get around this hack and clean this up + // This special case will ensure that the actual EXT*CALL is being ran, + // But, the code in `function.ts` will note that `runState.messageGasLimit` is set to a negative number + // This special number signals that `1` should be put on the stack (per spec) + gasLimit = -BIGINT_1 } runState.messageGasLimit = gasLimit - if (value > BIGINT_0) { - const account = (await runState.stateManager.getAccount(runState.auth!)) ?? new Account() - if (account.balance < value) { - trap(ERROR.OUT_OF_GAS) - } - account.balance -= value + return gas + }, + ], + /* EXTDELEGATECALL */ + [ + 0xf9, + async function (runState, gas, common): Promise { + // Charge WARM_STORAGE_READ_COST (100) -> done in accessAddressEIP2929 + + // Peek stack values + const [toAddr, inOffset, inLength] = runState.stack.peek(3) - const toAddr = new Address(addresstoBytes(addr)) - const target = (await runState.stateManager.getAccount(toAddr)) ?? new Account() - target.balance += value + // Check if the target address > 20 bytes + if (toAddr > EXTCALL_TARGET_MAX) { + trap(EOFError.InvalidExtcallTarget) + } + + // Charge for memory expansion + gas += subMemUsage(runState, inOffset, inLength, common) - await runState.stateManager.putAccount(toAddr, target) + const toAddress = createAddressFromStackBigInt(toAddr) + // Charge to make address warm (2600 gas) + // (in case if address is already warm, this charges the 100 gas) + gas += accessAddressEIP2929(runState, toAddress.bytes, common) + + const minRetainedGas = common.param('minRetainedGas') + const minCalleeGas = common.param('minCalleeGas') + + const currentGasAvailable = runState.interpreter.getGasLeft() - gas + const reducedGas = currentGasAvailable / BIGINT_64 + // Calculate the gas limit for the callee + // (this is the gas available for the next call frame) + let gasLimit: bigint + if (reducedGas < minRetainedGas) { + gasLimit = currentGasAvailable - minRetainedGas + } else { + gasLimit = currentGasAvailable - reducedGas } + if (runState.env.depth >= Number(common.param('stackLimit')) || gasLimit < minCalleeGas) { + // Note: this is a hack, TODO: get around this hack and clean this up + // This special case will ensure that the actual EXT*CALL is being ran, + // But, the code in `function.ts` will note that `runState.messageGasLimit` is set to a negative number + // This special number signals that `1` should be put on the stack (per spec) + gasLimit = -BIGINT_1 + } + + runState.messageGasLimit = gasLimit + return gas }, ], @@ -761,7 +890,7 @@ export const dynamicGasHandlers: Map { + // Charge WARM_STORAGE_READ_COST (100) -> done in accessAddressEIP2929 + + // Peek stack values + const [toAddr, inOffset, inLength] = runState.stack.peek(3) + + // Check if the target address > 20 bytes + if (toAddr > EXTCALL_TARGET_MAX) { + trap(EOFError.InvalidExtcallTarget) + } + + // Charge for memory expansion + gas += subMemUsage(runState, inOffset, inLength, common) + + const toAddress = createAddressFromStackBigInt(toAddr) + // Charge to make address warm (2600 gas) + // (in case if address is already warm, this charges the 100 gas) + gas += accessAddressEIP2929(runState, toAddress.bytes, common) + + const minRetainedGas = common.param('minRetainedGas') + const minCalleeGas = common.param('minCalleeGas') + + const currentGasAvailable = runState.interpreter.getGasLeft() - gas + const reducedGas = currentGasAvailable / BIGINT_64 + // Calculate the gas limit for the callee + // (this is the gas available for the next call frame) + let gasLimit: bigint + if (reducedGas < minRetainedGas) { + gasLimit = currentGasAvailable - minRetainedGas + } else { + gasLimit = currentGasAvailable - reducedGas + } + + if (runState.env.depth >= Number(common.param('stackLimit')) || gasLimit < minCalleeGas) { + // Note: this is a hack, TODO: get around this hack and clean this up + // This special case will ensure that the actual EXT*CALL is being ran, + // But, the code in `function.ts` will note that `runState.messageGasLimit` is set to a negative number + // This special number signals that `1` should be put on the stack (per spec) + gasLimit = -BIGINT_1 + } + + runState.messageGasLimit = gasLimit + + return gas + }, + ], [ /* REVERT */ 0xfd, @@ -802,7 +985,7 @@ export const dynamicGasHandlers: Map BIGINT_0) { gas += runState.env.accessWitness!.touchAddressOnWriteAndComputeGas( contractAddress, 0, - VERKLE_BASIC_DATA_LEAF_KEY + VERKLE_BASIC_DATA_LEAF_KEY, ) } @@ -857,14 +1031,14 @@ export const dynamicGasHandlers: Map BIGINT_0) { selfDestructToColdAccessGas += runState.env.accessWitness!.touchAddressOnWriteAndComputeGas( selfdestructToAddress, 0, - VERKLE_BASIC_DATA_LEAF_KEY + VERKLE_BASIC_DATA_LEAF_KEY, ) } @@ -878,7 +1052,7 @@ export const dynamicGasHandlers: Map runState.highestMemCost) { const currentHighestMemCost = runState.highestMemCost @@ -209,17 +212,17 @@ export function updateSstoreGas( runState: RunState, currentStorage: Uint8Array, value: Uint8Array, - common: Common + common: Common, ): bigint { if ( (value.length === 0 && currentStorage.length === 0) || (value.length > 0 && currentStorage.length > 0) ) { - const gas = common.param('gasPrices', 'sstoreReset') + const gas = common.param('sstoreResetGas') return gas } else if (value.length === 0 && currentStorage.length > 0) { - const gas = common.param('gasPrices', 'sstoreReset') - runState.interpreter.refundGas(common.param('gasPrices', 'sstoreRefund'), 'updateSstoreGas') + const gas = common.param('sstoreResetGas') + runState.interpreter.refundGas(common.param('sstoreRefundGas'), 'updateSstoreGas') return gas } else { /* @@ -229,7 +232,7 @@ export function updateSstoreGas( -> Value is zero, but slot is nonzero Thus, the remaining case is where value is nonzero, but slot is zero, which is this clause */ - return common.param('gasPrices', 'sstoreSet') + return common.param('sstoreSetGas') } } diff --git a/packages/evm/src/params.ts b/packages/evm/src/params.ts new file mode 100644 index 0000000000..27a607511f --- /dev/null +++ b/packages/evm/src/params.ts @@ -0,0 +1,420 @@ +import type { ParamsDict } from '@ethereumjs/common' + +export const paramsEVM: ParamsDict = { + /** + * Frontier/Chainstart + */ + 1: { + // gasConfig + maxRefundQuotient: 2, // Maximum refund quotient; max tx refund is min(tx.gasUsed/maxRefundQuotient, tx.gasRefund) + // gasPrices + basefeeGas: 2, // Gas base cost, used e.g. for ChainID opcode (Istanbul) + expGas: 10, // Base fee of the EXP opcode + expByteGas: 10, // Times ceil(log256(exponent)) for the EXP instruction + keccak256Gas: 30, // Base fee of the SHA3 opcode + keccak256WordGas: 6, // Once per word of the SHA3 operation's data + sloadGas: 50, // Base fee of the SLOAD opcode + sstoreSetGas: 20000, // Once per SSTORE operation if the zeroness changes from zero + sstoreResetGas: 5000, // Once per SSTORE operation if the zeroness does not change from zero + sstoreRefundGas: 15000, // Once per SSTORE operation if the zeroness changes to zero + jumpdestGas: 1, // Base fee of the JUMPDEST opcode + logGas: 375, // Base fee of the LOG opcode + logDataGas: 8, // Per byte in a LOG* operation's data + logTopicGas: 375, // Multiplied by the * of the LOG*, per LOG transaction. e.g. LOG0 incurs 0 * c_txLogTopicGas, LOG4 incurs 4 * c_txLogTopicGas + createGas: 32000, // Base fee of the CREATE opcode + callGas: 40, // Base fee of the CALL opcode + callStipendGas: 2300, // Free gas given at beginning of call + callValueTransferGas: 9000, // Paid for CALL when the value transfer is non-zero + callNewAccountGas: 25000, // Paid for CALL when the destination address didn't exist prior + selfdestructRefundGas: 24000, // Refunded following a selfdestruct operation + memoryGas: 3, // Times the address of the (highest referenced byte in memory + 1). NOTE: referencing happens on read, write and in instructions such as RETURN and CALL + quadCoefficientDivGas: 512, // Divisor for the quadratic particle of the memory cost equation + createDataGas: 200, // + copyGas: 3, // Multiplied by the number of 32-byte words that are copied (round up) for any *COPY operation and added + ecRecoverGas: 3000, + sha256Gas: 60, + sha256WordGas: 12, + ripemd160Gas: 600, + ripemd160WordGas: 120, + identityGas: 15, + identityWordGas: 3, + stopGas: 0, // Base fee of the STOP opcode + addGas: 3, // Base fee of the ADD opcode + mulGas: 5, // Base fee of the MUL opcode + subGas: 3, // Base fee of the SUB opcode + divGas: 5, // Base fee of the DIV opcode + sdivGas: 5, // Base fee of the SDIV opcode + modGas: 5, // Base fee of the MOD opcode + smodGas: 5, // Base fee of the SMOD opcode + addmodGas: 8, // Base fee of the ADDMOD opcode + mulmodGas: 8, // Base fee of the MULMOD opcode + signextendGas: 5, // Base fee of the SIGNEXTEND opcode + ltGas: 3, // Base fee of the LT opcode + gtGas: 3, // Base fee of the GT opcode + sltGas: 3, // Base fee of the SLT opcode + sgtGas: 3, // Base fee of the SGT opcode + eqGas: 3, // Base fee of the EQ opcode + iszeroGas: 3, // Base fee of the ISZERO opcode + andGas: 3, // Base fee of the AND opcode + orGas: 3, // Base fee of the OR opcode + xorGas: 3, // Base fee of the XOR opcode + notGas: 3, // Base fee of the NOT opcode + byteGas: 3, // Base fee of the BYTE opcode + addressGas: 2, // Base fee of the ADDRESS opcode + balanceGas: 20, // Base fee of the BALANCE opcode + originGas: 2, // Base fee of the ORIGIN opcode + callerGas: 2, // Base fee of the CALLER opcode + callvalueGas: 2, // Base fee of the CALLVALUE opcode + calldataloadGas: 3, // Base fee of the CALLDATALOAD opcode + calldatasizeGas: 2, // Base fee of the CALLDATASIZE opcode + calldatacopyGas: 3, // Base fee of the CALLDATACOPY opcode + codesizeGas: 2, // Base fee of the CODESIZE opcode + codecopyGas: 3, // Base fee of the CODECOPY opcode + gaspriceGas: 2, // Base fee of the GASPRICE opcode + extcodesizeGas: 20, // Base fee of the EXTCODESIZE opcode + extcodecopyGas: 20, // Base fee of the EXTCODECOPY opcode + blockhashGas: 20, // Base fee of the BLOCKHASH opcode + coinbaseGas: 2, // Base fee of the COINBASE opcode + timestampGas: 2, // Base fee of the TIMESTAMP opcode + numberGas: 2, // Base fee of the NUMBER opcode + difficultyGas: 2, // Base fee of the DIFFICULTY opcode + gaslimitGas: 2, // Base fee of the GASLIMIT opcode + popGas: 2, // Base fee of the POP opcode + mloadGas: 3, // Base fee of the MLOAD opcode + mstoreGas: 3, // Base fee of the MSTORE opcode + mstore8Gas: 3, // Base fee of the MSTORE8 opcode + sstoreGas: 0, // Base fee of the SSTORE opcode + jumpGas: 8, // Base fee of the JUMP opcode + jumpiGas: 10, // Base fee of the JUMPI opcode + pcGas: 2, // Base fee of the PC opcode + msizeGas: 2, // Base fee of the MSIZE opcode + gasGas: 2, // Base fee of the GAS opcode + pushGas: 3, // Base fee of the PUSH opcode + dupGas: 3, // Base fee of the DUP opcode + swapGas: 3, // Base fee of the SWAP opcode + callcodeGas: 40, // Base fee of the CALLCODE opcode + returnGas: 0, // Base fee of the RETURN opcode + invalidGas: 0, // Base fee of the INVALID opcode + selfdestructGas: 0, // Base fee of the SELFDESTRUCT opcode + prevrandaoGas: 0, // TODO: these below 0-gas additions might also point to non-clean implementations in the code base + // evm + stackLimit: 1024, // Maximum size of VM stack allowed + callCreateDepth: 1024, // Maximum depth of call/create stack + }, + /** +. * Homestead HF Meta EIP +. */ + 606: { + // gasPrices + delegatecallGas: 40, // Base fee of the DELEGATECALL opcode + }, + /** +. * TangerineWhistle HF Meta EIP +. */ + 608: { + // gasPrices + sloadGas: 200, // Once per SLOAD operation + callGas: 700, // Once per CALL operation & message call transaction + extcodesizeGas: 700, // Base fee of the EXTCODESIZE opcode + extcodecopyGas: 700, // Base fee of the EXTCODECOPY opcode + balanceGas: 400, // Base fee of the BALANCE opcode + delegatecallGas: 700, // Base fee of the DELEGATECALL opcode + callcodeGas: 700, // Base fee of the CALLCODE opcode + selfdestructGas: 5000, // Base fee of the SELFDESTRUCT opcode + }, + /** +. * Spurious Dragon HF Meta EIP +. */ + 607: { + // gasPrices + expByteGas: 50, // Times ceil(log256(exponent)) for the EXP instruction + // evm + maxCodeSize: 24576, // Maximum length of contract code + }, + /** +. * Byzantium HF Meta EIP +. */ + 609: { + // gasPrices + modexpGquaddivisorGas: 20, // Gquaddivisor from modexp precompile for gas calculation + ecAddGas: 500, // Gas costs for curve addition precompile + ecMulGas: 40000, // Gas costs for curve multiplication precompile + ecPairingGas: 100000, // Base gas costs for curve pairing precompile + ecPairingWordGas: 80000, // Gas costs regarding curve pairing precompile input length + revertGas: 0, // Base fee of the REVERT opcode + staticcallGas: 700, // Base fee of the STATICCALL opcode + returndatasizeGas: 2, // Base fee of the RETURNDATASIZE opcode + returndatacopyGas: 3, // Base fee of the RETURNDATACOPY opcode + }, + /** +. * Constantinople HF Meta EIP +. */ + 1013: { + // gasPrices + netSstoreNoopGas: 200, // Once per SSTORE operation if the value doesn't change + netSstoreInitGas: 20000, // Once per SSTORE operation from clean zero + netSstoreCleanGas: 5000, // Once per SSTORE operation from clean non-zero + netSstoreDirtyGas: 200, // Once per SSTORE operation from dirty + netSstoreClearRefundGas: 15000, // Once per SSTORE operation for clearing an originally existing storage slot + netSstoreResetRefundGas: 4800, // Once per SSTORE operation for resetting to the original non-zero value + netSstoreResetClearRefundGas: 19800, // Once per SSTORE operation for resetting to the original zero value + shlGas: 3, // Base fee of the SHL opcode + shrGas: 3, // Base fee of the SHR opcode + sarGas: 3, // Base fee of the SAR opcode + extcodehashGas: 400, // Base fee of the EXTCODEHASH opcode + create2Gas: 32000, // Base fee of the CREATE2 opcode + }, + /** +. * Petersburg HF Meta EIP +. */ + 1716: { + // gasPrices + netSstoreNoopGas: null, // Removed along EIP-1283 + netSstoreInitGas: null, // Removed along EIP-1283 + netSstoreCleanGas: null, // Removed along EIP-1283 + netSstoreDirtyGas: null, // Removed along EIP-1283 + netSstoreClearRefundGas: null, // Removed along EIP-1283 + netSstoreResetRefundGas: null, // Removed along EIP-1283 + netSstoreResetClearRefundGas: null, // Removed along EIP-1283 + }, + /** +. * Istanbul HF Meta EIP +. */ + 1679: { + // gasPrices + blake2RoundGas: 1, // Gas cost per round for the Blake2 F precompile + ecAddGas: 150, // Gas costs for curve addition precompile + ecMulGas: 6000, // Gas costs for curve multiplication precompile + ecPairingGas: 45000, // Base gas costs for curve pairing precompile + ecPairingWordGas: 34000, // Gas costs regarding curve pairing precompile input length + sstoreSentryEIP2200Gas: 2300, // Minimum gas required to be present for an SSTORE call, not consumed + sstoreNoopEIP2200Gas: 800, // Once per SSTORE operation if the value doesn't change + sstoreDirtyEIP2200Gas: 800, // Once per SSTORE operation if a dirty value is changed + sstoreInitEIP2200Gas: 20000, // Once per SSTORE operation from clean zero to non-zero + sstoreInitRefundEIP2200Gas: 19200, // Once per SSTORE operation for resetting to the original zero value + sstoreCleanEIP2200Gas: 5000, // Once per SSTORE operation from clean non-zero to something else + sstoreCleanRefundEIP2200Gas: 4200, // Once per SSTORE operation for resetting to the original non-zero value + sstoreClearRefundEIP2200Gas: 15000, // Once per SSTORE operation for clearing an originally existing storage slot + balanceGas: 700, // Base fee of the BALANCE opcode + extcodehashGas: 700, // Base fee of the EXTCODEHASH opcode + chainidGas: 2, // Base fee of the CHAINID opcode + selfbalanceGas: 5, // Base fee of the SELFBALANCE opcode + sloadGas: 800, // Base fee of the SLOAD opcode + }, + + /** +. * SWAPN, DUPN and EXCHANGE instructions +. */ + 663: { + // gasPrices + dupnGas: 3, // Base fee of the DUPN opcode + swapnGas: 3, // Base fee of the SWAPN opcode + exchangeGas: 3, // Base fee of the EXCHANGE opcode + }, + /** +. * Transient storage opcodes +. */ + 1153: { + // gasPrices + tstoreGas: 100, // Base fee of the TSTORE opcode + tloadGas: 100, // Base fee of the TLOAD opcode + }, + 1559: { + elasticityMultiplier: 2, // Maximum block gas target elasticity + }, + /** +. * ModExp gas cost +. */ + 2565: { + // gasPrices + modexpGquaddivisorGas: 3, // Gquaddivisor from modexp precompile for gas calculation + }, + /** + * BLS12-381 precompiles + */ + 2537: { + // gasPrices + Bls12381G1AddGas: 500, // Gas cost of a single BLS12-381 G1 addition precompile-call + Bls12381G1MulGas: 12000, // Gas cost of a single BLS12-381 G1 multiplication precompile-call + Bls12381G2AddGas: 800, // Gas cost of a single BLS12-381 G2 addition precompile-call + Bls12381G2MulGas: 45000, // Gas cost of a single BLS12-381 G2 multiplication precompile-call + Bls12381PairingBaseGas: 65000, // Base gas cost of BLS12-381 pairing check + Bls12381PairingPerPairGas: 43000, // Per-pair gas cost of BLS12-381 pairing check + Bls12381MapG1Gas: 5500, // Gas cost of BLS12-381 map field element to G1 + Bls12381MapG2Gas: 75000, // Gas cost of BLS12-381 map field element to G2 + }, + /** +. * Gas cost increases for state access opcodes +. */ + 2929: { + // gasPrices + coldsloadGas: 2100, // Gas cost of the first read of storage from a given location (per transaction) + coldaccountaccessGas: 2600, // Gas cost of the first read of a given address (per transaction) + warmstoragereadGas: 100, // Gas cost of reading storage locations which have already loaded 'cold' + sstoreCleanEIP2200Gas: 2900, // Once per SSTORE operation from clean non-zero to something else + sstoreNoopEIP2200Gas: 100, // Once per SSTORE operation if the value doesn't change + sstoreDirtyEIP2200Gas: 100, // Once per SSTORE operation if a dirty value is changed + sstoreInitRefundEIP2200Gas: 19900, // Once per SSTORE operation for resetting to the original zero value + sstoreCleanRefundEIP2200Gas: 4900, // Once per SSTORE operation for resetting to the original non-zero value + callGas: 0, // Base fee of the CALL opcode + callcodeGas: 0, // Base fee of the CALLCODE opcode + delegatecallGas: 0, // Base fee of the DELEGATECALL opcode + staticcallGas: 0, // Base fee of the STATICCALL opcode + balanceGas: 0, // Base fee of the BALANCE opcode + extcodesizeGas: 0, // Base fee of the EXTCODESIZE opcode + extcodecopyGas: 0, // Base fee of the EXTCODECOPY opcode + extcodehashGas: 0, // Base fee of the EXTCODEHASH opcode + sloadGas: 0, // Base fee of the SLOAD opcode + sstoreGas: 0, // Base fee of the SSTORE opcode + }, + /** + * Save historical block hashes in state (Verkle related usage, UNSTABLE) + */ + 2935: { + // evm + historyStorageAddress: '0x0aae40965e6800cd9b1f4b05ff21581047e3f91e', // The address where the historical blockhashes are stored + historyServeWindow: 8192, // The amount of blocks to be served by the historical blockhash contract + }, + /** +. * BASEFEE opcode +. */ + 3198: { + // gasPrices + basefeeGas: 2, // Gas cost of the BASEFEE opcode + }, + /** +. * Reduction in refunds +. */ + 3529: { + // gasConfig + maxRefundQuotient: 5, // Maximum refund quotient; max tx refund is min(tx.gasUsed/maxRefundQuotient, tx.gasRefund) + // gasPrices + selfdestructRefundGas: 0, // Refunded following a selfdestruct operation + sstoreClearRefundEIP2200Gas: 4800, // Once per SSTORE operation for clearing an originally existing storage slot + }, + /** +. * PUSH0 instruction +. */ + 3855: { + // gasPrices + push0Gas: 2, // Base fee of the PUSH0 opcode + }, + /** +. * Limit and meter initcode +. */ + 3860: { + // gasPrices + initCodeWordGas: 2, // Gas to pay for each word (32 bytes) of initcode when creating a contract + // vm + maxInitCodeSize: 49152, // Maximum length of initialization code when creating a contract + }, + /** + * EOF - Static relative jumps + */ + 4200: { + // gasPrices + rjumpGas: 2, // Base fee of the RJUMP opcode + rjumpiGas: 4, // Base fee of the RJUMPI opcode + rjumpvGas: 4, // Base fee of the RJUMPV opcode + }, + /** +. * Supplant DIFFICULTY opcode with PREVRANDAO +. */ + 4399: { + // gasPrices + prevrandaoGas: 2, // Base fee of the PREVRANDAO opcode (previously DIFFICULTY) + }, + /** + * EOF - Functions + */ + 4750: { + // gasPrices + callfGas: 5, // Base fee of the CALLF opcode + retfGas: 3, // Base fee of the RETF opcode + }, + /** +. * Shard Blob Transactions +. */ + 4844: { + kzgPointEvaluationPrecompileGas: 50000, // The fee associated with the point evaluation precompile + blobhashGas: 3, // Base fee of the BLOBHASH opcode + // sharding + blobCommitmentVersionKzg: 1, // The number indicated a versioned hash is a KZG commitment + fieldElementsPerBlob: 4096, // The number of field elements allowed per blob + }, + /** + * MCOPY - Memory copying instruction + */ + 5656: { + // gasPrices + mcopyGas: 3, // Base fee of the MCOPY opcode + }, + /** + * EOF - JUMPF and non-returning functions + */ + 6206: { + // gasPrices + jumpfGas: 5, // Base fee of the JUMPF opcode + }, + /** + * Ethereum state using a unified verkle tree (experimental) + */ + 6800: { + // gasPrices + createGas: 1000, // Base fee of the CREATE opcode + coldsloadGas: 0, // Gas cost of the first read of storage from a given location (per transaction) + }, + /** +. * Revamped CALL instructions +. */ + 7069: { + /* Note: per EIP these are the additionally required EIPs: + EIP 150 - This is the entire Tangerine Whistle hardfork + EIP 211 - (RETURNDATASIZE / RETURNDATACOPY) - Included in Byzantium + EIP 214 - (STATICCALL) - Included in Byzantium + */ + // gasPrices + extcallGas: 0, // Base fee of the EXTCALL opcode + extdelegatecallGas: 0, // Base fee of the EXTDELEGATECALL opcode + extstaticcallGas: 0, // Base fee of the EXTSTATICCALL opcode + returndataloadGas: 3, // Base fee of the RETURNDATALOAD opcode + minRetainedGas: 5000, // Minimum gas retained prior to executing an EXT*CALL opcode (this is the minimum gas available after performing the EXT*CALL) + minCalleeGas: 2300, //Minimum gas available to the the address called by an EXT*CALL opcode + }, + /** + * EOF - Data section access instructions + */ + 7480: { + // gasPrices + dataloadGas: 4, // Base fee of the DATALOAD opcode + dataloadnGas: 3, // Base fee of the DATALOADN opcode + datasizeGas: 2, // Base fee of the DATASIZE opcode + datacopyGas: 3, // Base fee of the DATACOPY opcode + }, + /** +. * BLOBBASEFEE opcode +. */ + 7516: { + // gasPrices + blobbasefeeGas: 2, // Gas cost of the BLOBBASEFEE opcode + }, + /** +. * EOF Contract Creation +. */ + 7620: { + /* Note: per EIP these are the additionally required EIPs: + EIP 170 - (Max contract size) - Included in Spurious Dragon + */ + // gasPrices + eofcreateGas: 32000, // Base fee of the EOFCREATE opcode (Same as CREATE/CREATE2) + returncontractGas: 0, // Base fee of the RETURNCONTRACT opcode + }, + /** +. * Set EOA account code for one transaction +. */ + 7702: { + // TODO: Set correct minimum hardfork + // gasPrices + perAuthBaseGas: 2500, // Gas cost of each authority item + }, +} diff --git a/packages/evm/src/precompiles/01-ecrecover.ts b/packages/evm/src/precompiles/01-ecrecover.ts index a6a7d609d5..e395d17b78 100644 --- a/packages/evm/src/precompiles/01-ecrecover.ts +++ b/packages/evm/src/precompiles/01-ecrecover.ts @@ -17,12 +17,12 @@ import type { PrecompileInput } from './types.js' export function precompile01(opts: PrecompileInput): ExecResult { const ecrecoverFunction = opts.common.customCrypto.ecrecover ?? ecrecover - const gasUsed = opts.common.param('gasPrices', 'ecRecover') + const gasUsed = opts.common.param('ecRecoverGas') if (opts._debug !== undefined) { opts._debug( `Run ECRECOVER (0x01) precompile data=${short(opts.data)} length=${ opts.data.length - } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}` + } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}`, ) } @@ -60,8 +60,8 @@ export function precompile01(opts: PrecompileInput): ExecResult { if (opts._debug !== undefined) { opts._debug( `ECRECOVER (0x01): PK recovery with msgHash=${bytesToHex(msgHash)} v=${bytesToHex( - v - )} r=${bytesToHex(r)}s=${bytesToHex(s)}}` + v, + )} r=${bytesToHex(r)}s=${bytesToHex(s)}}`, ) } publicKey = ecrecoverFunction(msgHash, bytesToBigInt(v), r, s) diff --git a/packages/evm/src/precompiles/02-sha256.ts b/packages/evm/src/precompiles/02-sha256.ts index e6ee57fe48..f6b1240885 100644 --- a/packages/evm/src/precompiles/02-sha256.ts +++ b/packages/evm/src/precompiles/02-sha256.ts @@ -9,14 +9,14 @@ import type { PrecompileInput } from './types.js' export function precompile02(opts: PrecompileInput): ExecResult { const data = opts.data const sha256Function = opts.common.customCrypto.sha256 ?? sha256 - let gasUsed = opts.common.param('gasPrices', 'sha256') - gasUsed += opts.common.param('gasPrices', 'sha256Word') * BigInt(Math.ceil(data.length / 32)) + let gasUsed = opts.common.param('sha256Gas') + gasUsed += opts.common.param('sha256WordGas') * BigInt(Math.ceil(data.length / 32)) if (opts._debug !== undefined) { opts._debug( `Run KECCAK256 (0x02) precompile data=${short(opts.data)} length=${ opts.data.length - } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}` + } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}`, ) } diff --git a/packages/evm/src/precompiles/03-ripemd160.ts b/packages/evm/src/precompiles/03-ripemd160.ts index d244563509..366508ae0d 100644 --- a/packages/evm/src/precompiles/03-ripemd160.ts +++ b/packages/evm/src/precompiles/03-ripemd160.ts @@ -9,14 +9,14 @@ import type { PrecompileInput } from './types.js' export function precompile03(opts: PrecompileInput): ExecResult { const data = opts.data - let gasUsed = opts.common.param('gasPrices', 'ripemd160') - gasUsed += opts.common.param('gasPrices', 'ripemd160Word') * BigInt(Math.ceil(data.length / 32)) + let gasUsed = opts.common.param('ripemd160Gas') + gasUsed += opts.common.param('ripemd160WordGas') * BigInt(Math.ceil(data.length / 32)) if (opts._debug !== undefined) { opts._debug( `Run RIPEMD160 (0x03) precompile data=${short(opts.data)} length=${ opts.data.length - } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}` + } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}`, ) } diff --git a/packages/evm/src/precompiles/04-identity.ts b/packages/evm/src/precompiles/04-identity.ts index e5a73f1771..b06d5357f5 100644 --- a/packages/evm/src/precompiles/04-identity.ts +++ b/packages/evm/src/precompiles/04-identity.ts @@ -8,13 +8,13 @@ import type { PrecompileInput } from './types.js' export function precompile04(opts: PrecompileInput): ExecResult { const data = opts.data - let gasUsed = opts.common.param('gasPrices', 'identity') - gasUsed += opts.common.param('gasPrices', 'identityWord') * BigInt(Math.ceil(data.length / 32)) + let gasUsed = opts.common.param('identityGas') + gasUsed += opts.common.param('identityWordGas') * BigInt(Math.ceil(data.length / 32)) if (opts._debug !== undefined) { opts._debug( `Run IDENTITY (0x04) precompile data=${short(opts.data)} length=${ opts.data.length - } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}` + } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}`, ) } diff --git a/packages/evm/src/precompiles/05-modexp.ts b/packages/evm/src/precompiles/05-modexp.ts index 29f5970157..c472a19fed 100644 --- a/packages/evm/src/precompiles/05-modexp.ts +++ b/packages/evm/src/precompiles/05-modexp.ts @@ -31,7 +31,7 @@ const BIGINT_199680 = BigInt(199680) const maxInt = BigInt(Number.MAX_SAFE_INTEGER) const maxSize = BigInt(2147483647) // @ethereumjs/util setLengthRight limitation -function multComplexity(x: bigint): bigint { +function multiplicationComplexity(x: bigint): bigint { let fac1 let fac2 if (x <= BIGINT_64) { @@ -49,7 +49,7 @@ function multComplexity(x: bigint): bigint { } } -function multComplexityEIP2565(x: bigint): bigint { +function multiplicationComplexityEIP2565(x: bigint): bigint { const words = (x + BIGINT_7) / BIGINT_8 return words * words } @@ -89,7 +89,7 @@ function getAdjustedExponentLength(data: Uint8Array): bigint { return adjustedExpLen } -export function expmod(a: bigint, power: bigint, modulo: bigint) { +export function expMod(a: bigint, power: bigint, modulo: bigint) { if (power === BIGINT_0) { return BIGINT_1 % modulo } @@ -118,7 +118,7 @@ export function precompile05(opts: PrecompileInput): ExecResult { if (maxLen < mLen) { maxLen = mLen } - const Gquaddivisor = opts.common.param('gasPrices', 'modexpGquaddivisor') + const Gquaddivisor = opts.common.param('modexpGquaddivisorGas') let gasUsed const bStart = BIGINT_96 @@ -129,9 +129,9 @@ export function precompile05(opts: PrecompileInput): ExecResult { const mEnd = mStart + mLen if (!opts.common.isActivatedEIP(2565)) { - gasUsed = (adjustedELen * multComplexity(maxLen)) / Gquaddivisor + gasUsed = (adjustedELen * multiplicationComplexity(maxLen)) / Gquaddivisor } else { - gasUsed = (adjustedELen * multComplexityEIP2565(maxLen)) / Gquaddivisor + gasUsed = (adjustedELen * multiplicationComplexityEIP2565(maxLen)) / Gquaddivisor if (gasUsed < BIGINT_200) { gasUsed = BIGINT_200 } @@ -140,7 +140,7 @@ export function precompile05(opts: PrecompileInput): ExecResult { opts._debug( `Run MODEXP (0x05) precompile data=${short(opts.data)} length=${opts.data.length} gasLimit=${ opts.gasLimit - } gasUsed=${gasUsed}` + } gasUsed=${gasUsed}`, ) } @@ -180,7 +180,7 @@ export function precompile05(opts: PrecompileInput): ExecResult { if (M === BIGINT_0) { R = new Uint8Array() } else { - R = expmod(B, E, M) + R = expMod(B, E, M) if (R === BIGINT_0) { R = new Uint8Array() } else { diff --git a/packages/evm/src/precompiles/06-ecadd.ts b/packages/evm/src/precompiles/06-ecadd.ts index 08a8a49fac..0c498f15f1 100644 --- a/packages/evm/src/precompiles/06-ecadd.ts +++ b/packages/evm/src/precompiles/06-ecadd.ts @@ -1,20 +1,18 @@ -import { bytesToHex, bytesToUnprefixedHex, hexToBytes, short } from '@ethereumjs/util' +import { bytesToHex, setLengthRight, short } from '@ethereumjs/util' -import { OOGResult } from '../evm.js' +import { EvmErrorResult, OOGResult } from '../evm.js' import type { EVM } from '../evm.js' import type { ExecResult } from '../types.js' import type { PrecompileInput } from './types.js' export function precompile06(opts: PrecompileInput): ExecResult { - const inputData = bytesToUnprefixedHex(opts.data.subarray(0, 128)) - - const gasUsed = opts.common.param('gasPrices', 'ecAdd') + const gasUsed = opts.common.param('ecAddGas') if (opts._debug !== undefined) { opts._debug( `Run ECADD (0x06) precompile data=${short(opts.data)} length=${opts.data.length} gasLimit=${ opts.gasLimit - } gasUsed=${gasUsed}` + } gasUsed=${gasUsed}`, ) } if (opts.gasLimit < gasUsed) { @@ -24,7 +22,19 @@ export function precompile06(opts: PrecompileInput): ExecResult { return OOGResult(opts.gasLimit) } - const returnData = hexToBytes((opts._EVM as EVM)['_bn128'].ec_add(inputData)) + // > 128 bytes: chop off extra bytes + // < 128 bytes: right-pad with 0-s + const input = setLengthRight(opts.data.subarray(0, 128), 128) + + let returnData + try { + returnData = (opts._EVM as EVM)['_bn254'].add(input) + } catch (e: any) { + if (opts._debug !== undefined) { + opts._debug(`ECADD (0x06) failed: ${e.message}`) + } + return EvmErrorResult(e, opts.gasLimit) + } // check ecadd success or failure by comparing the output length if (returnData.length !== 64) { diff --git a/packages/evm/src/precompiles/07-ecmul.ts b/packages/evm/src/precompiles/07-ecmul.ts index 12a2da11fc..a1d3dda857 100644 --- a/packages/evm/src/precompiles/07-ecmul.ts +++ b/packages/evm/src/precompiles/07-ecmul.ts @@ -1,19 +1,18 @@ -import { bytesToHex, bytesToUnprefixedHex, hexToBytes, short } from '@ethereumjs/util' +import { bytesToHex, setLengthRight, short } from '@ethereumjs/util' -import { OOGResult } from '../evm.js' +import { EvmErrorResult, OOGResult } from '../evm.js' import type { EVM } from '../evm.js' import type { ExecResult } from '../types.js' import type { PrecompileInput } from './types.js' export function precompile07(opts: PrecompileInput): ExecResult { - const inputData = bytesToUnprefixedHex(opts.data.subarray(0, 128)) - const gasUsed = opts.common.param('gasPrices', 'ecMul') + const gasUsed = opts.common.param('ecMulGas') if (opts._debug !== undefined) { opts._debug( `Run ECMUL (0x07) precompile data=${short(opts.data)} length=${opts.data.length} gasLimit=${ opts.gasLimit - } gasUsed=${gasUsed}` + } gasUsed=${gasUsed}`, ) } @@ -24,7 +23,19 @@ export function precompile07(opts: PrecompileInput): ExecResult { return OOGResult(opts.gasLimit) } - const returnData = hexToBytes((opts._EVM as EVM)['_bn128'].ec_mul(inputData)) + // > 128 bytes: chop off extra bytes + // < 128 bytes: right-pad with 0-s + const input = setLengthRight(opts.data.subarray(0, 128), 128) + + let returnData + try { + returnData = (opts._EVM as EVM)['_bn254'].mul(input) + } catch (e: any) { + if (opts._debug !== undefined) { + opts._debug(`ECMUL (0x07) failed: ${e.message}`) + } + return EvmErrorResult(e, opts.gasLimit) + } // check ecmul success or failure by comparing the output length if (returnData.length !== 64) { diff --git a/packages/evm/src/precompiles/08-ecpairing.ts b/packages/evm/src/precompiles/08-ecpairing.ts index cf398be205..3715d5082b 100644 --- a/packages/evm/src/precompiles/08-ecpairing.ts +++ b/packages/evm/src/precompiles/08-ecpairing.ts @@ -1,23 +1,27 @@ -import { bytesToHex, bytesToUnprefixedHex, hexToBytes, short } from '@ethereumjs/util' +import { bytesToHex, short } from '@ethereumjs/util' -import { OOGResult } from '../evm.js' +import { EvmErrorResult, OOGResult } from '../evm.js' +import { ERROR, EvmError } from '../exceptions.js' + +import { moduloLengthCheck } from './util.js' import type { EVM } from '../evm.js' import type { ExecResult } from '../types.js' import type { PrecompileInput } from './types.js' export function precompile08(opts: PrecompileInput): ExecResult { - const inputData = opts.data - // no need to care about non-divisible-by-192, because bn128.pairing will properly fail in that case - const inputDataSize = BigInt(Math.floor(inputData.length / 192)) + if (!moduloLengthCheck(opts, 192, 'ECPAIRING (0x08)')) { + return EvmErrorResult(new EvmError(ERROR.INVALID_INPUT_LENGTH), opts.gasLimit) + } + + const inputDataSize = BigInt(Math.floor(opts.data.length / 192)) const gasUsed = - opts.common.param('gasPrices', 'ecPairing') + - inputDataSize * opts.common.param('gasPrices', 'ecPairingWord') + opts.common.param('ecPairingGas') + inputDataSize * opts.common.param('ecPairingWordGas') if (opts._debug !== undefined) { opts._debug( `Run ECPAIRING (0x08) precompile data=${short(opts.data)} length=${ opts.data.length - } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}` + } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}`, ) } @@ -28,9 +32,15 @@ export function precompile08(opts: PrecompileInput): ExecResult { return OOGResult(opts.gasLimit) } - const returnData = hexToBytes( - (opts._EVM as EVM)['_bn128'].ec_pairing(bytesToUnprefixedHex(inputData)) - ) + let returnData + try { + returnData = (opts._EVM as EVM)['_bn254'].pairing(opts.data) + } catch (e: any) { + if (opts._debug !== undefined) { + opts._debug(`ECPAIRING (0x08) failed: ${e.message}`) + } + return EvmErrorResult(e, opts.gasLimit) + } // check ecpairing success or failure by comparing the output length if (returnData.length !== 32) { diff --git a/packages/evm/src/precompiles/09-blake2f.ts b/packages/evm/src/precompiles/09-blake2f.ts index 9ac57f0a58..8fce5e8c0c 100644 --- a/packages/evm/src/precompiles/09-blake2f.ts +++ b/packages/evm/src/precompiles/09-blake2f.ts @@ -55,7 +55,7 @@ function B2B_G( c: number, d: number, ix: number, - iy: number + iy: number, ) { const x0 = mw[ix] const x1 = mw[ix + 1] @@ -110,7 +110,7 @@ const SIGMA8 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 14, 10, 4, const SIGMA82 = new Uint8Array( SIGMA8.map(function (x) { return x * 2 - }) + }), ) export function F(h: Uint32Array, m: Uint32Array, t: Uint32Array, f: boolean, rounds: number) { @@ -188,13 +188,13 @@ export function precompile09(opts: PrecompileInput): ExecResult { // final const f = lastByte === 1 - let gasUsed = opts.common.param('gasPrices', 'blake2Round') + let gasUsed = opts.common.param('blake2RoundGas') gasUsed *= BigInt(rounds) if (opts._debug !== undefined) { opts._debug( `Run BLAKE2F (0x09) precompile data=${short(opts.data)} length=${opts.data.length} gasLimit=${ opts.gasLimit - } gasUsed=${gasUsed}` + } gasUsed=${gasUsed}`, ) } diff --git a/packages/evm/src/precompiles/0a-kzg-point-evaluation.ts b/packages/evm/src/precompiles/0a-kzg-point-evaluation.ts index c9fb679678..e5ff2a5074 100644 --- a/packages/evm/src/precompiles/0a-kzg-point-evaluation.ts +++ b/packages/evm/src/precompiles/0a-kzg-point-evaluation.ts @@ -14,7 +14,7 @@ import type { ExecResult } from '../types.js' import type { PrecompileInput } from './types.js' export const BLS_MODULUS = BigInt( - '52435875175126190479447740508185965837690552500527637822603658699938581184513' + '52435875175126190479447740508185965837690552500527637822603658699938581184513', ) const modulusBuffer = setLengthLeft(bigIntToBytes(BLS_MODULUS), 32) @@ -23,12 +23,12 @@ export async function precompile0a(opts: PrecompileInput): Promise { if (opts.common.customCrypto?.kzg === undefined) { throw new Error('kzg not initialized') } - const gasUsed = opts.common.param('gasPrices', 'kzgPointEvaluationGasPrecompilePrice') + const gasUsed = opts.common.param('kzgPointEvaluationPrecompileGas') if (opts._debug !== undefined) { opts._debug( `Run KZG_POINT_EVALUATION (0x14) precompile data=${short(opts.data)} length=${ opts.data.length - } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}` + } gasLimit=${opts.gasLimit} gasUsed=${gasUsed}`, ) } @@ -43,8 +43,8 @@ export async function precompile0a(opts: PrecompileInput): Promise { return EvmErrorResult(new EvmError(ERROR.INVALID_INPUT_LENGTH), opts.gasLimit) } - const version = Number(opts.common.param('sharding', 'blobCommitmentVersionKzg')) - const fieldElementsPerBlob = opts.common.param('sharding', 'fieldElementsPerBlob') + const version = Number(opts.common.param('blobCommitmentVersionKzg')) + const fieldElementsPerBlob = opts.common.param('fieldElementsPerBlob') const versionedHash = opts.data.subarray(0, 32) const z = opts.data.subarray(32, 64) const y = opts.data.subarray(64, 96) @@ -61,8 +61,8 @@ export async function precompile0a(opts: PrecompileInput): Promise { if (opts._debug !== undefined) { opts._debug( `KZG_POINT_EVALUATION (0x14): proof verification with commitment=${bytesToHex( - commitment - )} z=${bytesToHex(z)} y=${bytesToHex(y)} kzgProof=${bytesToHex(kzgProof)}` + commitment, + )} z=${bytesToHex(z)} y=${bytesToHex(y)} kzgProof=${bytesToHex(kzgProof)}`, ) } try { @@ -89,8 +89,8 @@ export async function precompile0a(opts: PrecompileInput): Promise { if (opts._debug !== undefined) { opts._debug( `KZG_POINT_EVALUATION (0x14) return fieldElements=${bytesToHex( - fieldElementsBuffer - )} modulus=${bytesToHex(modulusBuffer)}` + fieldElementsBuffer, + )} modulus=${bytesToHex(modulusBuffer)}`, ) } diff --git a/packages/evm/src/precompiles/0b-bls12-g1add.ts b/packages/evm/src/precompiles/0b-bls12-g1add.ts index b6357ff4e1..34be7c5309 100644 --- a/packages/evm/src/precompiles/0b-bls12-g1add.ts +++ b/packages/evm/src/precompiles/0b-bls12-g1add.ts @@ -3,7 +3,8 @@ import { bytesToHex } from '@ethereumjs/util' import { EvmErrorResult, OOGResult } from '../evm.js' import { ERROR, EvmError } from '../exceptions.js' -import { equalityLengthCheck, gasCheck, leading16ZeroBytesCheck } from './bls12_381/index.js' +import { gasCheck, leading16ZeroBytesCheck } from './bls12_381/index.js' +import { equalityLengthCheck } from './util.js' import type { EVMBLSInterface, ExecResult } from '../types.js' import type { PrecompileInput } from './types.js' @@ -12,7 +13,7 @@ export async function precompile0b(opts: PrecompileInput): Promise { const bls = (opts._EVM)._bls! as EVMBLSInterface // note: the gas used is constant; even if the input is incorrect. - const gasUsed = opts.common.paramByEIP('gasPrices', 'Bls12381G1AddGas', 2537) ?? BigInt(0) + const gasUsed = opts.common.paramByEIP('Bls12381G1AddGas', 2537) ?? BigInt(0) if (!gasCheck(opts, gasUsed, 'BLS12G1ADD (0x0b)')) { return OOGResult(opts.gasLimit) } diff --git a/packages/evm/src/precompiles/0c-bls12-g1mul.ts b/packages/evm/src/precompiles/0c-bls12-g1mul.ts index 0fdc07cd14..92dd9ab8ab 100644 --- a/packages/evm/src/precompiles/0c-bls12-g1mul.ts +++ b/packages/evm/src/precompiles/0c-bls12-g1mul.ts @@ -3,7 +3,8 @@ import { bytesToHex } from '@ethereumjs/util' import { EvmErrorResult, OOGResult } from '../evm.js' import { ERROR, EvmError } from '../exceptions.js' -import { equalityLengthCheck, gasCheck, leading16ZeroBytesCheck } from './bls12_381/index.js' +import { gasCheck, leading16ZeroBytesCheck } from './bls12_381/index.js' +import { equalityLengthCheck } from './util.js' import type { EVMBLSInterface, ExecResult } from '../types.js' import type { PrecompileInput } from './types.js' @@ -12,7 +13,7 @@ export async function precompile0c(opts: PrecompileInput): Promise { const bls = (opts._EVM)._bls! as EVMBLSInterface // note: the gas used is constant; even if the input is incorrect. - const gasUsed = opts.common.paramByEIP('gasPrices', 'Bls12381G1MulGas', 2537) ?? BigInt(0) + const gasUsed = opts.common.paramByEIP('Bls12381G1MulGas', 2537) ?? BigInt(0) if (!gasCheck(opts, gasUsed, 'BLS12G1MUL (0x0c)')) { return OOGResult(opts.gasLimit) } diff --git a/packages/evm/src/precompiles/0d-bls12-g1msm.ts b/packages/evm/src/precompiles/0d-bls12-g1msm.ts index 229b46bbeb..44e667f693 100644 --- a/packages/evm/src/precompiles/0d-bls12-g1msm.ts +++ b/packages/evm/src/precompiles/0d-bls12-g1msm.ts @@ -3,12 +3,8 @@ import { bytesToHex } from '@ethereumjs/util' import { EvmErrorResult, OOGResult } from '../evm.js' import { ERROR, EvmError } from '../exceptions.js' -import { - gasCheck, - leading16ZeroBytesCheck, - moduloLengthCheck, - msmGasUsed, -} from './bls12_381/index.js' +import { gasCheck, leading16ZeroBytesCheck, msmGasUsed } from './bls12_381/index.js' +import { moduloLengthCheck } from './util.js' import type { EVMBLSInterface, ExecResult } from '../types.js' import type { PrecompileInput } from './types.js' @@ -22,14 +18,14 @@ export async function precompile0d(opts: PrecompileInput): Promise { if (opts._debug !== undefined) { opts._debug(`BLS12G1MSM (0x0d) failed: Empty input`) } - return EvmErrorResult(new EvmError(ERROR.BLS_12_381_INPUT_EMPTY), opts.gasLimit) // follow Geths implementation + return EvmErrorResult(new EvmError(ERROR.BLS_12_381_INPUT_EMPTY), opts.gasLimit) // follow Geth's implementation } // TODO: Double-check respectively confirm that this order is really correct that the gas check // on this eventually to be "floored" pair number should happen before the input length modulo // validation (same for g2msm) const numPairs = Math.floor(inputData.length / 160) - const gasUsedPerPair = opts.common.paramByEIP('gasPrices', 'Bls12381G1MulGas', 2537) ?? BigInt(0) + const gasUsedPerPair = opts.common.paramByEIP('Bls12381G1MulGas', 2537) ?? BigInt(0) const gasUsed = msmGasUsed(numPairs, gasUsedPerPair) if (!gasCheck(opts, gasUsed, 'BLS12G1MSM (0x0d)')) { diff --git a/packages/evm/src/precompiles/0e-bls12-g2add.ts b/packages/evm/src/precompiles/0e-bls12-g2add.ts index ce1a25cf3a..019f815075 100644 --- a/packages/evm/src/precompiles/0e-bls12-g2add.ts +++ b/packages/evm/src/precompiles/0e-bls12-g2add.ts @@ -3,7 +3,8 @@ import { bytesToHex } from '@ethereumjs/util' import { EvmErrorResult, OOGResult } from '../evm.js' import { ERROR, EvmError } from '../exceptions.js' -import { equalityLengthCheck, gasCheck, leading16ZeroBytesCheck } from './bls12_381/index.js' +import { gasCheck, leading16ZeroBytesCheck } from './bls12_381/index.js' +import { equalityLengthCheck } from './util.js' import type { EVMBLSInterface, ExecResult } from '../types.js' import type { PrecompileInput } from './types.js' @@ -12,7 +13,7 @@ export async function precompile0e(opts: PrecompileInput): Promise { const bls = (opts._EVM)._bls! as EVMBLSInterface // note: the gas used is constant; even if the input is incorrect. - const gasUsed = opts.common.paramByEIP('gasPrices', 'Bls12381G2AddGas', 2537) ?? BigInt(0) + const gasUsed = opts.common.paramByEIP('Bls12381G2AddGas', 2537) ?? BigInt(0) if (!gasCheck(opts, gasUsed, 'BLS12G2ADD (0x0e)')) { return OOGResult(opts.gasLimit) } diff --git a/packages/evm/src/precompiles/0f-bls12-g2mul.ts b/packages/evm/src/precompiles/0f-bls12-g2mul.ts index 7e83542b94..a8817e0a65 100644 --- a/packages/evm/src/precompiles/0f-bls12-g2mul.ts +++ b/packages/evm/src/precompiles/0f-bls12-g2mul.ts @@ -3,7 +3,8 @@ import { bytesToHex } from '@ethereumjs/util' import { EvmErrorResult, OOGResult } from '../evm.js' import { ERROR, EvmError } from '../exceptions.js' -import { equalityLengthCheck, gasCheck, leading16ZeroBytesCheck } from './bls12_381/index.js' +import { gasCheck, leading16ZeroBytesCheck } from './bls12_381/index.js' +import { equalityLengthCheck } from './util.js' import type { EVMBLSInterface, ExecResult } from '../types.js' import type { PrecompileInput } from './types.js' @@ -12,7 +13,7 @@ export async function precompile0f(opts: PrecompileInput): Promise { const bls = (opts._EVM)._bls! as EVMBLSInterface // note: the gas used is constant; even if the input is incorrect. - const gasUsed = opts.common.paramByEIP('gasPrices', 'Bls12381G2MulGas', 2537) ?? BigInt(0) + const gasUsed = opts.common.paramByEIP('Bls12381G2MulGas', 2537) ?? BigInt(0) if (!gasCheck(opts, gasUsed, 'BLS12G2MUL (0x0f)')) { return OOGResult(opts.gasLimit) } diff --git a/packages/evm/src/precompiles/10-bls12-g2msm.ts b/packages/evm/src/precompiles/10-bls12-g2msm.ts index aa3b75fea1..d62d0d0c52 100644 --- a/packages/evm/src/precompiles/10-bls12-g2msm.ts +++ b/packages/evm/src/precompiles/10-bls12-g2msm.ts @@ -3,12 +3,8 @@ import { bytesToHex } from '@ethereumjs/util' import { EvmErrorResult, OOGResult } from '../evm.js' import { ERROR, EvmError } from '../exceptions.js' -import { - gasCheck, - leading16ZeroBytesCheck, - moduloLengthCheck, - msmGasUsed, -} from './bls12_381/index.js' +import { gasCheck, leading16ZeroBytesCheck, msmGasUsed } from './bls12_381/index.js' +import { moduloLengthCheck } from './util.js' import type { EVMBLSInterface, ExecResult } from '../types.js' import type { PrecompileInput } from './types.js' @@ -20,11 +16,11 @@ export async function precompile10(opts: PrecompileInput): Promise { if (opts._debug !== undefined) { opts._debug(`BLS12G2MSM (0x10) failed: Empty input`) } - return EvmErrorResult(new EvmError(ERROR.BLS_12_381_INPUT_EMPTY), opts.gasLimit) // follow Geths implementation + return EvmErrorResult(new EvmError(ERROR.BLS_12_381_INPUT_EMPTY), opts.gasLimit) // follow Geth's implementation } const numPairs = Math.floor(opts.data.length / 288) - const gasUsedPerPair = opts.common.paramByEIP('gasPrices', 'Bls12381G2MulGas', 2537) ?? BigInt(0) + const gasUsedPerPair = opts.common.paramByEIP('Bls12381G2MulGas', 2537) ?? BigInt(0) const gasUsed = msmGasUsed(numPairs, gasUsedPerPair) if (!gasCheck(opts, gasUsed, 'BLS12G2MSM (0x10)')) { diff --git a/packages/evm/src/precompiles/11-bls12-pairing.ts b/packages/evm/src/precompiles/11-bls12-pairing.ts index d24fdc95ed..7d7eb44eed 100644 --- a/packages/evm/src/precompiles/11-bls12-pairing.ts +++ b/packages/evm/src/precompiles/11-bls12-pairing.ts @@ -3,7 +3,8 @@ import { bytesToHex } from '@ethereumjs/util' import { EvmErrorResult, OOGResult } from '../evm.js' import { ERROR, EvmError } from '../exceptions.js' -import { gasCheck, leading16ZeroBytesCheck, moduloLengthCheck } from './bls12_381/index.js' +import { gasCheck, leading16ZeroBytesCheck } from './bls12_381/index.js' +import { moduloLengthCheck } from './util.js' import type { EVMBLSInterface, ExecResult } from '../types.js' import type { PrecompileInput } from './types.js' @@ -11,7 +12,7 @@ import type { PrecompileInput } from './types.js' export async function precompile11(opts: PrecompileInput): Promise { const bls = (opts._EVM)._bls! as EVMBLSInterface - const baseGas = opts.common.paramByEIP('gasPrices', 'Bls12381PairingBaseGas', 2537) ?? BigInt(0) + const baseGas = opts.common.paramByEIP('Bls12381PairingBaseGas', 2537) ?? BigInt(0) // TODO: confirm that this is not a thing for the other precompiles if (opts.data.length === 0) { @@ -21,8 +22,7 @@ export async function precompile11(opts: PrecompileInput): Promise { return EvmErrorResult(new EvmError(ERROR.BLS_12_381_INPUT_EMPTY), opts.gasLimit) } - const gasUsedPerPair = - opts.common.paramByEIP('gasPrices', 'Bls12381PairingPerPairGas', 2537) ?? BigInt(0) + const gasUsedPerPair = opts.common.paramByEIP('Bls12381PairingPerPairGas', 2537) ?? BigInt(0) // TODO: For this precompile it is the only exception that the length check is placed before the // gas check. I will keep it there to not side-change the existing implementation, but we should diff --git a/packages/evm/src/precompiles/12-bls12-map-fp-to-g1.ts b/packages/evm/src/precompiles/12-bls12-map-fp-to-g1.ts index fc07d753a7..f95792d895 100644 --- a/packages/evm/src/precompiles/12-bls12-map-fp-to-g1.ts +++ b/packages/evm/src/precompiles/12-bls12-map-fp-to-g1.ts @@ -3,7 +3,8 @@ import { bytesToHex } from '@ethereumjs/util' import { EvmErrorResult, OOGResult } from '../evm.js' import { ERROR, EvmError } from '../exceptions.js' -import { equalityLengthCheck, gasCheck, leading16ZeroBytesCheck } from './bls12_381/index.js' +import { gasCheck, leading16ZeroBytesCheck } from './bls12_381/index.js' +import { equalityLengthCheck } from './util.js' import type { EVMBLSInterface, ExecResult } from '../types.js' import type { PrecompileInput } from './types.js' @@ -12,7 +13,7 @@ export async function precompile12(opts: PrecompileInput): Promise { const bls = (opts._EVM)._bls! as EVMBLSInterface // note: the gas used is constant; even if the input is incorrect. - const gasUsed = opts.common.paramByEIP('gasPrices', 'Bls12381MapG1Gas', 2537) ?? BigInt(0) + const gasUsed = opts.common.paramByEIP('Bls12381MapG1Gas', 2537) ?? BigInt(0) if (!gasCheck(opts, gasUsed, 'BLS12MAPFPTOG1 (0x12)')) { return OOGResult(opts.gasLimit) } diff --git a/packages/evm/src/precompiles/13-bls12-map-fp2-to-g2.ts b/packages/evm/src/precompiles/13-bls12-map-fp2-to-g2.ts index b1a39f5f8a..2e49d12a18 100644 --- a/packages/evm/src/precompiles/13-bls12-map-fp2-to-g2.ts +++ b/packages/evm/src/precompiles/13-bls12-map-fp2-to-g2.ts @@ -3,7 +3,8 @@ import { bytesToHex } from '@ethereumjs/util' import { EvmErrorResult, OOGResult } from '../evm.js' import { ERROR, EvmError } from '../exceptions.js' -import { equalityLengthCheck, gasCheck, leading16ZeroBytesCheck } from './bls12_381/index.js' +import { gasCheck, leading16ZeroBytesCheck } from './bls12_381/index.js' +import { equalityLengthCheck } from './util.js' import type { EVMBLSInterface, ExecResult } from '../types.js' import type { PrecompileInput } from './types.js' @@ -12,7 +13,7 @@ export async function precompile13(opts: PrecompileInput): Promise { const bls = (opts._EVM)._bls! as EVMBLSInterface // note: the gas used is constant; even if the input is incorrect. - const gasUsed = opts.common.paramByEIP('gasPrices', 'Bls12381MapG2Gas', 2537) ?? BigInt(0) + const gasUsed = opts.common.paramByEIP('Bls12381MapG2Gas', 2537) ?? BigInt(0) if (!gasCheck(opts, gasUsed, 'BLS12MAPFP2TOG2 (0x13)')) { return OOGResult(opts.gasLimit) } diff --git a/packages/evm/src/precompiles/bls12_381/constants.ts b/packages/evm/src/precompiles/bls12_381/constants.ts index f5f19a77b6..0ce107c5f3 100644 --- a/packages/evm/src/precompiles/bls12_381/constants.ts +++ b/packages/evm/src/precompiles/bls12_381/constants.ts @@ -2,7 +2,7 @@ import { concatBytes, hexToBytes } from '@ethereumjs/util' // base field modulus as described in the EIP export const BLS_FIELD_MODULUS = BigInt( - '0x1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab' + '0x1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab', ) export const BLS_G1_POINT_BYTE_LENGTH = 128 diff --git a/packages/evm/src/precompiles/bls12_381/mcl.ts b/packages/evm/src/precompiles/bls12_381/mcl.ts index e006a0a263..133ef18cbe 100644 --- a/packages/evm/src/precompiles/bls12_381/mcl.ts +++ b/packages/evm/src/precompiles/bls12_381/mcl.ts @@ -226,7 +226,7 @@ export class MCLBLS implements EVMBLSInterface { const p2 = BLS12_381_ToG1Point( input.subarray(BLS_G1_POINT_BYTE_LENGTH, BLS_G1_POINT_BYTE_LENGTH * 2), this._mcl, - false + false, ) const result = this._mcl.add(p1, p2) @@ -250,7 +250,7 @@ export class MCLBLS implements EVMBLSInterface { const p2 = BLS12_381_ToG2Point( input.subarray(BLS_G2_POINT_BYTE_LENGTH, BLS_G2_POINT_BYTE_LENGTH * 2), this._mcl, - false + false, ) const result = this._mcl.add(p1, p2) @@ -296,11 +296,11 @@ export class MCLBLS implements EVMBLSInterface { const pairStart = pairLength * k const G1 = BLS12_381_ToG1Point( input.subarray(pairStart, pairStart + BLS_G1_POINT_BYTE_LENGTH), - this._mcl + this._mcl, ) const Fr = BLS12_381_ToFrPoint( input.subarray(pairStart + BLS_G1_POINT_BYTE_LENGTH, pairStart + pairLength), - this._mcl + this._mcl, ) G1Array.push(G1) @@ -322,11 +322,11 @@ export class MCLBLS implements EVMBLSInterface { const pairStart = pairLength * k const G2 = BLS12_381_ToG2Point( input.subarray(pairStart, pairStart + BLS_G2_POINT_BYTE_LENGTH), - this._mcl + this._mcl, ) const Fr = BLS12_381_ToFrPoint( input.subarray(pairStart + BLS_G2_POINT_BYTE_LENGTH, pairStart + pairLength), - this._mcl + this._mcl, ) G2Array.push(G2) @@ -344,13 +344,13 @@ export class MCLBLS implements EVMBLSInterface { const pairStart = pairLength * k const G1 = BLS12_381_ToG1Point( input.subarray(pairStart, pairStart + BLS_G1_POINT_BYTE_LENGTH), - this._mcl + this._mcl, ) const g2start = pairStart + BLS_G1_POINT_BYTE_LENGTH const G2 = BLS12_381_ToG2Point( input.subarray(g2start, g2start + BLS_G2_POINT_BYTE_LENGTH), - this._mcl + this._mcl, ) pairs.push([G1, G2]) diff --git a/packages/evm/src/precompiles/bls12_381/noble.ts b/packages/evm/src/precompiles/bls12_381/noble.ts index 1d2ad640be..0742095477 100644 --- a/packages/evm/src/precompiles/bls12_381/noble.ts +++ b/packages/evm/src/precompiles/bls12_381/noble.ts @@ -66,7 +66,8 @@ function BLS12_381_FromG1Point(input: AffinePoint): Uint8Array { * @param input Input Uint8Array. Should be 256 bytes * @returns Noble G2 point */ -function BLS12_381_ToG2Point(input: Uint8Array) { +function BLS12_381_ToG2Point(input: Uint8Array): any { + // TODO: remove any type, temporary fix due to conflicting @noble/curves versions if (equalsBytes(input, BLS_G2_INFINITY_POINT_BYTES)) { return bls12_381.G2.ProjectivePoint.ZERO } @@ -110,10 +111,10 @@ function BLS12_381_ToFrPoint(input: Uint8Array): bigint { // It should be nevertheless validated if this is (fully) correct, // especially if ">" or ">=" should be applied. // - // Unfortunately the skalar in both test vectors is significantly + // Unfortunately the scalar in both test vectors is significantly // greater than the ORDER threshold, here are th values from both tests: // - // Skalar / Order + // Scalar / Order // 69732848789442042582239751384143889712113271203482973843852656394296700715236n // 52435875175126190479447740508185965837690552500527637822603658699938581184513n // @@ -141,7 +142,8 @@ function BLS12_381_ToFpPoint(fpCoordinate: Uint8Array) { return FP } -function BLS12_381_ToFp2Point(fpXCoordinate: Uint8Array, fpYCoordinate: Uint8Array) { +function BLS12_381_ToFp2Point(fpXCoordinate: Uint8Array, fpYCoordinate: Uint8Array): any { + // TODO: remove any type, temporary fix due to conflicting @noble/curves versions // check if the coordinates are in the field if (bytesToBigInt(fpXCoordinate) >= BLS_FIELD_MODULUS) { throw new EvmError(ERROR.BLS_12_381_FP_NOT_IN_FIELD) @@ -166,7 +168,7 @@ export class NobleBLS implements EVMBLSInterface { addG1(input: Uint8Array): Uint8Array { const p1 = BLS12_381_ToG1Point(input.subarray(0, BLS_G1_POINT_BYTE_LENGTH)) const p2 = BLS12_381_ToG1Point( - input.subarray(BLS_G1_POINT_BYTE_LENGTH, BLS_G1_POINT_BYTE_LENGTH * 2) + input.subarray(BLS_G1_POINT_BYTE_LENGTH, BLS_G1_POINT_BYTE_LENGTH * 2), ) const p = p1.add(p2) @@ -178,19 +180,19 @@ export class NobleBLS implements EVMBLSInterface { mulG1(input: Uint8Array): Uint8Array { // convert input to G1 points, add them, and convert the output to a Uint8Array. const p = BLS12_381_ToG1Point(input.subarray(0, BLS_G1_POINT_BYTE_LENGTH)) - const skalar = BLS12_381_ToFrPoint(input.subarray(BLS_G1_POINT_BYTE_LENGTH, 160)) + const scalar = BLS12_381_ToFrPoint(input.subarray(BLS_G1_POINT_BYTE_LENGTH, 160)) - if (skalar === BIGINT_0) { + if (scalar === BIGINT_0) { return BLS_G1_INFINITY_POINT_BYTES } - const result = p.multiply(skalar) + const result = p.multiply(scalar) return BLS12_381_FromG1Point(result) } addG2(input: Uint8Array): Uint8Array { const p1 = BLS12_381_ToG2Point(input.subarray(0, BLS_G2_POINT_BYTE_LENGTH)) const p2 = BLS12_381_ToG2Point( - input.subarray(BLS_G2_POINT_BYTE_LENGTH, BLS_G2_POINT_BYTE_LENGTH * 2) + input.subarray(BLS_G2_POINT_BYTE_LENGTH, BLS_G2_POINT_BYTE_LENGTH * 2), ) const p = p1.add(p2) const result = BLS12_381_FromG2Point(p) @@ -201,12 +203,12 @@ export class NobleBLS implements EVMBLSInterface { mulG2(input: Uint8Array): Uint8Array { // convert input to G2 point/Fr point, add them, and convert the output to a Uint8Array. const p = BLS12_381_ToG2Point(input.subarray(0, BLS_G2_POINT_BYTE_LENGTH)) - const skalar = BLS12_381_ToFrPoint(input.subarray(BLS_G2_POINT_BYTE_LENGTH, 288)) + const scalar = BLS12_381_ToFrPoint(input.subarray(BLS_G2_POINT_BYTE_LENGTH, 288)) - if (skalar === BIGINT_0) { + if (scalar === BIGINT_0) { return BLS_G2_INFINITY_POINT_BYTES } - const result = p.multiply(skalar) + const result = p.multiply(scalar) return BLS12_381_FromG2Point(result) } @@ -240,10 +242,10 @@ export class NobleBLS implements EVMBLSInterface { for (let k = 0; k < numPairs; k++) { const pairStart = pairLength * k const G1 = BLS12_381_ToG1Point( - input.subarray(pairStart, pairStart + BLS_G1_POINT_BYTE_LENGTH) + input.subarray(pairStart, pairStart + BLS_G1_POINT_BYTE_LENGTH), ) const Fr = BLS12_381_ToFrPoint( - input.subarray(pairStart + BLS_G1_POINT_BYTE_LENGTH, pairStart + pairLength) + input.subarray(pairStart + BLS_G1_POINT_BYTE_LENGTH, pairStart + pairLength), ) let pMul if (Fr === BIGINT_0) { @@ -272,10 +274,10 @@ export class NobleBLS implements EVMBLSInterface { for (let k = 0; k < numPairs; k++) { const pairStart = pairLength * k const G2 = BLS12_381_ToG2Point( - input.subarray(pairStart, pairStart + BLS_G2_POINT_BYTE_LENGTH) + input.subarray(pairStart, pairStart + BLS_G2_POINT_BYTE_LENGTH), ) const Fr = BLS12_381_ToFrPoint( - input.subarray(pairStart + BLS_G2_POINT_BYTE_LENGTH, pairStart + pairLength) + input.subarray(pairStart + BLS_G2_POINT_BYTE_LENGTH, pairStart + pairLength), ) let pMul if (Fr === BIGINT_0) { @@ -291,12 +293,13 @@ export class NobleBLS implements EVMBLSInterface { } pairingCheck(input: Uint8Array): Uint8Array { + // Extract the pairs from the input const pairLength = 384 const pairs = [] for (let k = 0; k < input.length / pairLength; k++) { const pairStart = pairLength * k const G1 = BLS12_381_ToG1Point( - input.subarray(pairStart, pairStart + BLS_G1_POINT_BYTE_LENGTH) + input.subarray(pairStart, pairStart + BLS_G1_POINT_BYTE_LENGTH), ) const g2start = pairStart + BLS_G1_POINT_BYTE_LENGTH diff --git a/packages/evm/src/precompiles/bls12_381/util.ts b/packages/evm/src/precompiles/bls12_381/util.ts index ca09ef1271..9d1b6b2a94 100644 --- a/packages/evm/src/precompiles/bls12_381/util.ts +++ b/packages/evm/src/precompiles/bls12_381/util.ts @@ -19,7 +19,7 @@ export const gasCheck = (opts: PrecompileInput, gasUsed: bigint, pName: string) opts._debug( `Run ${pName} precompile data=${short(opts.data)} length=${opts.data.length} gasLimit=${ opts.gasLimit - } gasUsed=${gasUsed}` + } gasUsed=${gasUsed}`, ) } if (opts.gasLimit < gasUsed) { @@ -56,47 +56,6 @@ export const msmGasUsed = (numPairs: number, gasUsedPerPair: bigint) => { return (BigInt(numPairs) * gasUsedPerPair * BigInt(gasDiscountMultiplier)) / BigInt(1000) } -/** - * Checks that the length of the provided data is equal to `length`. - * - * @param opts - * @param length - * @param pName - * @returns - */ -export const equalityLengthCheck = (opts: PrecompileInput, length: number, pName: string) => { - if (opts.data.length !== length) { - if (opts._debug !== undefined) { - opts._debug( - `${pName} failed: Invalid input length length=${opts.data.length} (expected: ${length})` - ) - } - return false - } - return true -} - -/** - * Checks that the total length of the provided data input can be subdivided into k equal parts - * with `length` (without leaving some remainder bytes). - * - * @param opts - * @param length - * @param pName - * @returns - */ -export const moduloLengthCheck = (opts: PrecompileInput, length: number, pName: string) => { - if (opts.data.length % length !== 0) { - if (opts._debug !== undefined) { - opts._debug( - `${pName} failed: Invalid input length length=${opts.data.length} (expected: ${length}*k bytes)` - ) - } - return false - } - return true -} - /** * BLS-specific zero check to check that the top 16 bytes of a 64 byte field element provided * are always zero (see EIP notes on field element encoding). @@ -123,12 +82,12 @@ export const leading16ZeroBytesCheck = ( opts: PrecompileInput, zeroByteRanges: number[][], pName: string, - pairStart = 0 + pairStart = 0, ) => { for (const index in zeroByteRanges) { const slicedBuffer = opts.data.subarray( zeroByteRanges[index][0] + pairStart, - zeroByteRanges[index][1] + pairStart + zeroByteRanges[index][1] + pairStart, ) if (!(equalsBytes(slicedBuffer, ZERO_BYTES_16) === true)) { if (opts._debug !== undefined) { diff --git a/packages/evm/src/precompiles/bn254/index.ts b/packages/evm/src/precompiles/bn254/index.ts new file mode 100644 index 0000000000..d54b65c7e0 --- /dev/null +++ b/packages/evm/src/precompiles/bn254/index.ts @@ -0,0 +1,2 @@ +export { NobleBN254 } from './noble.js' +export { RustBN254 } from './rustbn.js' diff --git a/packages/evm/src/precompiles/bn254/noble.ts b/packages/evm/src/precompiles/bn254/noble.ts new file mode 100644 index 0000000000..6ebe092f00 --- /dev/null +++ b/packages/evm/src/precompiles/bn254/noble.ts @@ -0,0 +1,168 @@ +import { + BIGINT_0, + bigIntToBytes, + bytesToBigInt, + concatBytes, + equalsBytes, + hexToBytes, + setLengthLeft, +} from '@ethereumjs/util' +import { bn254 } from '@noble/curves/bn254' + +import { ERROR, EvmError } from '../../exceptions.js' + +import type { EVMBN254Interface } from '../../types.js' +import type { AffinePoint } from '@noble/curves/abstract/weierstrass' + +const G1_INFINITY_POINT_BYTES = new Uint8Array(64) +const G2_INFINITY_POINT_BYTES = new Uint8Array(128) +const G1_POINT_BYTE_LENGTH = 64 +const G1_ELEMENT_BYTE_LENGTH = 32 +const G2_POINT_BYTE_LENGTH = 128 + +const ZERO_BUFFER = new Uint8Array(32) +const ONE_BUFFER = concatBytes(new Uint8Array(31), hexToBytes('0x01')) + +/** + * Converts an Uint8Array to a Noble G1 point. + * @param input Input Uint8Array. Should be 64 bytes + * @returns Noble G1 point + */ +function toG1Point(input: Uint8Array) { + if (equalsBytes(input, G1_INFINITY_POINT_BYTES)) { + return bn254.G1.ProjectivePoint.ZERO + } + + const x = bytesToBigInt(input.subarray(0, G1_ELEMENT_BYTE_LENGTH)) + const y = bytesToBigInt(input.subarray(G1_ELEMENT_BYTE_LENGTH, G1_POINT_BYTE_LENGTH)) + + const G1 = bn254.G1.ProjectivePoint.fromAffine({ + x, + y, + }) + G1.assertValidity() + return G1 +} + +function fromG1Point(input: AffinePoint): Uint8Array { + const xBytes = setLengthLeft(bigIntToBytes(input.x), G1_ELEMENT_BYTE_LENGTH) + const yBytes = setLengthLeft(bigIntToBytes(input.y), G1_ELEMENT_BYTE_LENGTH) + + return concatBytes(xBytes, yBytes) +} + +// input: a 32-byte hex scalar Uint8Array +// output: a Noble Fr point + +function toFrPoint(input: Uint8Array): bigint { + const Fr = bn254.fields.Fr.fromBytes(input) + if (Fr >= bn254.fields.Fr.ORDER) { + return Fr % bn254.fields.Fr.ORDER + } + return Fr +} + +/** + * Converts an Uint8Array to a Noble G2 point. Raises errors if the point is not on the curve + * and (if activated) if the point is in the subgroup / order check. + * @param input Input Uint8Array. Should be 256 bytes + * @returns Noble G2 point + */ +function toG2Point(input: Uint8Array): any { + // TODO: remove any type, temporary fix due to conflicting @noble/curves versions + if (equalsBytes(input, G2_INFINITY_POINT_BYTES)) { + return bn254.G2.ProjectivePoint.ZERO + } + + const p_x_2 = input.subarray(0, G1_ELEMENT_BYTE_LENGTH) + const p_x_1 = input.subarray(G1_ELEMENT_BYTE_LENGTH, G1_ELEMENT_BYTE_LENGTH * 2) + const start2 = G1_ELEMENT_BYTE_LENGTH * 2 + const p_y_2 = input.subarray(start2, start2 + G1_ELEMENT_BYTE_LENGTH) + const p_y_1 = input.subarray(start2 + G1_ELEMENT_BYTE_LENGTH, start2 + G1_ELEMENT_BYTE_LENGTH * 2) + + for (const p of [p_x_1, p_x_2, p_y_1, p_y_2]) { + const pB = bytesToBigInt(p) + if (bn254.fields.Fp.create(pB) !== pB) { + throw new EvmError(ERROR.BN254_FP_NOT_IN_FIELD) + } + } + + const Fp2X = toFp2Point(p_x_1, p_x_2) + const Fp2Y = toFp2Point(p_y_1, p_y_2) + + const pG2 = bn254.G2.ProjectivePoint.fromAffine({ + x: Fp2X, + y: Fp2Y, + }) + + pG2.assertValidity() + + return pG2 +} + +function toFp2Point(fpXCoordinate: Uint8Array, fpYCoordinate: Uint8Array) { + if (bytesToBigInt(fpXCoordinate) >= bn254.fields.Fp2.ORDER) { + throw new EvmError(ERROR.BN254_FP_NOT_IN_FIELD) + } + if (bytesToBigInt(fpYCoordinate) >= bn254.fields.Fp2.ORDER) { + throw new EvmError(ERROR.BN254_FP_NOT_IN_FIELD) + } + + const fpBytes = concatBytes(fpXCoordinate, fpYCoordinate) + + const FP = bn254.fields.Fp2.fromBytes(fpBytes) + return FP +} + +/** + * Implementation of the `EVMBN254Interface` using the `@noble/curves` JS library, + * see https://github.com/paulmillr/noble-curves. + * + * This is the EVM default implementation. + */ +export class NobleBN254 implements EVMBN254Interface { + add(input: Uint8Array): Uint8Array { + const p1 = toG1Point(input.slice(0, G1_POINT_BYTE_LENGTH)) + const p2 = toG1Point(input.slice(G1_POINT_BYTE_LENGTH, G1_POINT_BYTE_LENGTH * 2)) + + const result = fromG1Point(p1.add(p2)) + return result + } + + mul(input: Uint8Array): Uint8Array { + const p1 = toG1Point(input.slice(0, G1_POINT_BYTE_LENGTH)) + const scalar = toFrPoint(input.slice(G1_POINT_BYTE_LENGTH, 96)) + + if (scalar === BIGINT_0) { + return G1_INFINITY_POINT_BYTES + } + + const result = fromG1Point(p1.multiply(scalar)) + return result + } + pairing(input: Uint8Array): Uint8Array { + // Extract the pairs from the input + const pairLength = 192 + const pairs = [] + for (let k = 0; k < input.length / pairLength; k++) { + const pairStart = pairLength * k + const G1 = toG1Point(input.subarray(pairStart, pairStart + G1_POINT_BYTE_LENGTH)) + + const g2start = pairStart + G1_POINT_BYTE_LENGTH + const G2 = toG2Point(input.subarray(g2start, g2start + G2_POINT_BYTE_LENGTH)) + + if (G1 === bn254.G1.ProjectivePoint.ZERO || G2 === bn254.G2.ProjectivePoint.ZERO) { + continue + } + + pairs.push({ g1: G1, g2: G2 }) + } + + const res = bn254.pairingBatch(pairs) + if (bn254.fields.Fp12.eql(res, bn254.fields.Fp12.ONE) === true) { + return ONE_BUFFER + } else { + return ZERO_BUFFER + } + } +} diff --git a/packages/evm/src/precompiles/bn254/rustbn.ts b/packages/evm/src/precompiles/bn254/rustbn.ts new file mode 100644 index 0000000000..a6a9877d77 --- /dev/null +++ b/packages/evm/src/precompiles/bn254/rustbn.ts @@ -0,0 +1,32 @@ +import { bytesToUnprefixedHex, hexToBytes } from '@ethereumjs/util' + +import type { EVMBN254Interface } from '../../types.js' + +/** + * Implementation of the `EVMBN254Interface` using a WASM wrapper https://github.com/ethereumjs/rustbn.js + * around the Parity fork of the Zcash bn pairing cryptography library. + * + * This can be optionally used to replace the build-in Noble implementation (`NobleBN254`) with + * a more performant WASM variant. See EVM `bls` constructor option on how to use. + */ +export class RustBN254 implements EVMBN254Interface { + protected readonly _rustbn: any + + constructor(rustbn: any) { + this._rustbn = rustbn + } + + add(input: Uint8Array): Uint8Array { + const inputStr = bytesToUnprefixedHex(input) + return hexToBytes(this._rustbn.ec_add(inputStr)) + } + + mul(input: Uint8Array): Uint8Array { + const inputHex = bytesToUnprefixedHex(input) + return hexToBytes(this._rustbn.ec_mul(inputHex)) + } + pairing(input: Uint8Array): Uint8Array { + const inputStr = bytesToUnprefixedHex(input) + return hexToBytes(this._rustbn.ec_pairing(inputStr)) + } +} diff --git a/packages/evm/src/precompiles/index.ts b/packages/evm/src/precompiles/index.ts index 92d57e2f76..c2ec81b8dc 100644 --- a/packages/evm/src/precompiles/index.ts +++ b/packages/evm/src/precompiles/index.ts @@ -21,6 +21,7 @@ import { precompile11 } from './11-bls12-pairing.js' import { precompile12 } from './12-bls12-map-fp-to-g1.js' import { precompile13 } from './13-bls12-map-fp2-to-g2.js' import { MCLBLS, NobleBLS } from './bls12_381/index.js' +import { NobleBN254, RustBN254 } from './bn254/index.js' import type { PrecompileFunc, PrecompileInput } from './types.js' import type { Common } from '@ethereumjs/common' @@ -266,14 +267,14 @@ type CustomPrecompile = AddPrecompile | DeletePrecompile function getActivePrecompiles( common: Common, - customPrecompiles?: CustomPrecompile[] + customPrecompiles?: CustomPrecompile[], ): Map { const precompileMap = new Map() if (customPrecompiles) { for (const precompile of customPrecompiles) { precompileMap.set( bytesToUnprefixedHex(precompile.address.bytes), - 'function' in precompile ? precompile.function : undefined + 'function' in precompile ? precompile.function : undefined, ) } } @@ -307,9 +308,11 @@ export { getPrecompileName, MCLBLS, NobleBLS, + NobleBN254, precompileEntries, precompiles, ripemdPrecompileAddress, + RustBN254, } export type { AddPrecompile, CustomPrecompile, DeletePrecompile, PrecompileFunc, PrecompileInput } diff --git a/packages/evm/src/precompiles/util.ts b/packages/evm/src/precompiles/util.ts new file mode 100644 index 0000000000..e8dc82d306 --- /dev/null +++ b/packages/evm/src/precompiles/util.ts @@ -0,0 +1,42 @@ +import type { PrecompileInput } from './index.js' + +/** + * Checks that the length of the provided data is equal to `length`. + * + * @param opts + * @param length + * @param pName + * @returns + */ +export const equalityLengthCheck = (opts: PrecompileInput, length: number, pName: string) => { + if (opts.data.length !== length) { + if (opts._debug !== undefined) { + opts._debug( + `${pName} failed: Invalid input length length=${opts.data.length} (expected: ${length})`, + ) + } + return false + } + return true +} + +/** + * Checks that the total length of the provided data input can be subdivided into k equal parts + * with `length` (without leaving some remainder bytes). + * + * @param opts + * @param length + * @param pName + * @returns + */ +export const moduloLengthCheck = (opts: PrecompileInput, length: number, pName: string) => { + if (opts.data.length % length !== 0) { + if (opts._debug !== undefined) { + opts._debug( + `${pName} failed: Invalid input length length=${opts.data.length} (expected: ${length}*k bytes)`, + ) + } + return false + } + return true +} diff --git a/packages/evm/src/stack.ts b/packages/evm/src/stack.ts index 0d3a7d92ce..d8ff351cb2 100644 --- a/packages/evm/src/stack.ts +++ b/packages/evm/src/stack.ts @@ -38,7 +38,7 @@ export class Stack { // Length is checked above, so pop shouldn't return undefined // First decrease current length, then read the item and return it // Note: this does thus not delete the item from the internal array - // However, the length is decreased, so it is not accessible to external observors + // However, the length is decreased, so it is not accessible to external observers return this._store[--this._len] } @@ -127,6 +127,26 @@ export class Stack { this._store[this._len++] = this._store[i] } + /** + * Swap number 1 with number 2 on the stack + * @param swap1 + * @param swap2 + */ + exchange(swap1: number, swap2: number) { + const headIndex = this._len - 1 + const exchangeIndex1 = headIndex - swap1 + const exchangeIndex2 = headIndex - swap2 + + // Stack underflow is not possible in EOF + if (exchangeIndex1 < 0 || exchangeIndex2 < 0) { + throw new EvmError(ERROR.STACK_UNDERFLOW) + } + + const cache = this._store[exchangeIndex2] + this._store[exchangeIndex2] = this._store[exchangeIndex1] + this._store[exchangeIndex1] = cache + } + /** * Returns a copy of the current stack. This represents the actual state of the stack * (not the internal state of the stack, which might have unreachable elements in it) diff --git a/packages/evm/src/types.ts b/packages/evm/src/types.ts index fa420aa964..b359d1c69f 100644 --- a/packages/evm/src/types.ts +++ b/packages/evm/src/types.ts @@ -1,5 +1,6 @@ import { zeros } from '@ethereumjs/util' +import type { EOFContainer } from './eof/container.js' import type { EvmError } from './exceptions.js' import type { InterpreterStep, RunState } from './interpreter.js' import type { Message } from './message.js' @@ -7,7 +8,12 @@ import type { AsyncDynamicGasHandler, SyncDynamicGasHandler } from './opcodes/ga import type { OpHandler } from './opcodes/index.js' import type { CustomPrecompile } from './precompiles/index.js' import type { PrecompileFunc } from './precompiles/types.js' -import type { AccessWitnessInterface, Common, EVMStateManagerInterface } from '@ethereumjs/common' +import type { + AccessWitnessInterface, + Common, + ParamsDict, + StateManagerInterface, +} from '@ethereumjs/common' import type { Account, Address, AsyncEventEmitter, PrefixedHexString } from '@ethereumjs/util' export type DeleteOpcode = { @@ -140,6 +146,7 @@ export type EVMEvents = { } export interface EVMInterface { + common: Common journal: { commit(): Promise revert(): Promise @@ -155,7 +162,7 @@ export interface EVMInterface { startReportingAccessList(): void startReportingPreimages?(): void } - stateManager: EVMStateManagerInterface + stateManager: StateManagerInterface precompiles: Map runCall(opts: EVMRunCallOpts): Promise runCode(opts: EVMRunCodeOpts): Promise @@ -181,10 +188,9 @@ export interface EVMOpts { * - [EIP-2537](https://eips.ethereum.org/EIPS/eip-2537) - BLS precompiles (removed in v4.0.0, see latest v3 release) * - [EIP-2565](https://eips.ethereum.org/EIPS/eip-2565) - ModExp gas cost * - [EIP-2718](https://eips.ethereum.org/EIPS/eip-2565) - Transaction Types - * - [EIP-2935](https://eips.ethereum.org/EIPS/eip-2935) - Save historical block hashes in state (`experimental`) + * - [EIP-2935](https://eips.ethereum.org/EIPS/eip-2935) - Serve historical block hashes from state (Prague) * - [EIP-2929](https://eips.ethereum.org/EIPS/eip-2929) - gas cost increases for state access opcodes * - [EIP-2930](https://eips.ethereum.org/EIPS/eip-2930) - Optional access list tx type - * - [EIP-3074](https://eips.ethereum.org/EIPS/eip-3074) - AUTH and AUTHCALL opcodes * - [EIP-3198](https://eips.ethereum.org/EIPS/eip-3198) - Base fee Opcode * - [EIP-3529](https://eips.ethereum.org/EIPS/eip-3529) - Reduction in refunds * - [EIP-3540](https://eips.ethereum.org/EIPS/eip-3541) - EVM Object Format (EOF) v1 (`outdated`) @@ -200,11 +206,17 @@ export interface EVMOpts { * - [EIP-4399](https://eips.ethereum.org/EIPS/eip-4399) - Supplant DIFFICULTY opcode with PREVRANDAO (Merge) * - [EIP-4788](https://eips.ethereum.org/EIPS/eip-4788) - Beacon block root in the EVM (Cancun) * - [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844) - Shard Blob Transactions (Cancun) + * - [EIP-7702](https://eips.ethereum.org/EIPS/eip-7702) - EOA code transactions (Prague) (`outdated`) + * - [EIP-7709](https://eips.ethereum.org/EIPS/eip-7709) - Read BLOCKHASH from storage and update cost (Osaka) * - [EIP-4895](https://eips.ethereum.org/EIPS/eip-4895) - Beacon chain push withdrawals as operations (Shanghai) * - [EIP-5133](https://eips.ethereum.org/EIPS/eip-5133) - Delaying Difficulty Bomb to mid-September 2022 (Gray Glacier) * - [EIP-5656](https://eips.ethereum.org/EIPS/eip-5656) - MCOPY - Memory copying instruction (Cancun) + * - [EIP-6110](https://eips.ethereum.org/EIPS/eip-6110) - Supply validator deposits on chain (Prague) * - [EIP-6780](https://eips.ethereum.org/EIPS/eip-6780) - SELFDESTRUCT only in same transaction (Cancun) + * - [EIP-7002](https://eips.ethereum.org/EIPS/eip-7002) - Execution layer triggerable withdrawals (Prague) + * - [EIP-7251](https://eips.ethereum.org/EIPS/eip-7251) - Execution layer triggerable validator consolidations (Prague) * - [EIP-7516](https://eips.ethereum.org/EIPS/eip-7516) - BLOBBASEFEE opcode (Cancun) + * - [EIP-7685](https://eips.ethereum.org/EIPS/eip-7685) - General purpose execution layer requests (Prague) * * *Annotations:* * @@ -226,6 +238,24 @@ export interface EVMOpts { */ allowUnlimitedInitCodeSize?: boolean + /** + * EVM parameters sorted by EIP can be found in the exported `paramsEVM` dictionary, + * which is internally passed to the associated `@ethereumjs/common` instance which + * manages parameter selection based on the hardfork and EIP settings. + * + * This option allows providing a custom set of parameters. Note that parameters + * get fully overwritten, so you need to extend the default parameter dict + * to provide the full parameter set. + * + * It is recommended to deep-clone the params object for this to avoid side effects: + * + * ```ts + * const params = JSON.parse(JSON.stringify(paramsEVM)) + * params['1679']['ecAddGas'] = 100 // 150 + * ``` + */ + params?: ParamsDict + /** * Override or add custom opcodes to the EVM instruction set * These custom opcodes are EIP-agnostic and are always statically added @@ -257,9 +287,9 @@ export interface EVMOpts { customPrecompiles?: CustomPrecompile[] /** - * For the EIP-2935 BLS precompiles, the native JS `@noble/curves` + * For the EIP-2537 BLS Precompiles, the native JS `@noble/curves` * https://github.com/paulmillr/noble-curves BLS12-381 curve implementation - * is used (see `noble.ts` file in the `precompiles` folder). + * is used (see `noble.ts` file in the `precompiles/bls12_381/` folder). * * To use an alternative implementation this option can be used by passing * in a wrapper implementation integrating the desired library and adhering @@ -273,11 +303,34 @@ export interface EVMOpts { * import * as mcl from 'mcl-wasm' * * await mcl.init(mcl.BLS12_381) - * const evm = await EVM.create({ bls: new MCLBLS(mcl) }) + * const evm = await createEVM({ bls: new MCLBLS(mcl) }) * ``` */ bls?: EVMBLSInterface + /** + * For the EIP-196/EIP-197 BN254 (alt_BN128) EC precompiles, the native JS `@noble/curves` + * https://github.com/paulmillr/noble-curves BN254 curve implementation + * is used (see `noble.ts` file in the `precompiles/bn254/` folder). + * + * To use an alternative implementation this option can be used by passing + * in a wrapper implementation integrating the desired library and adhering + * to the `EVMBN254Interface` specification. + * + * An interface for a WASM wrapper https://github.com/ethereumjs/rustbn.js around the + * Parity fork of the Zcash bn pairing cryptography library is shipped with this library + * which can be used as follows (with `rustbn.js` being explicitly added to the set of + * dependencies): + * + * ```ts + * import { initRustBN } from 'rustbn-wasm' + * + * const bn254 = await initRustBN() + * const evm = await createEVM({ bn254: new RustBN254(bn254) }) + * ``` + */ + bn254?: EVMBN254Interface + /* * The EVM comes with a basic dependency-minimized `SimpleStateManager` implementation * which serves most code execution use cases and which is included in the @@ -287,7 +340,7 @@ export interface EVMOpts { * implementations for different needs (MPT-tree backed, RPC, experimental verkle) * which can be used by this option as a replacement. */ - stateManager?: EVMStateManagerInterface + stateManager?: StateManagerInterface /** * @@ -298,6 +351,13 @@ export interface EVMOpts { * */ profiler?: EVMProfilerOpts + + /** + * When running the EVM with PoA consensus, the `cliqueSigner` function from the `@ethereumjs/block` class + * must be provided along with a `BlockHeader` so that the coinbase can be correctly retrieved when the + * `Interpreter.getBlockCoinbase` method is called. + */ + cliqueSigner?: (header: Block['header']) => Address } /** @@ -357,6 +417,10 @@ export interface ExecResult { blobGasUsed?: bigint } +/** + * High level wrapper for BLS libraries used + * for the BLS precompiles + */ export type EVMBLSInterface = { init?(): void addG1(input: Uint8Array): Uint8Array @@ -370,6 +434,16 @@ export type EVMBLSInterface = { pairingCheck(input: Uint8Array): Uint8Array } +/** + * High level wrapper for BN254 (alt_BN128) libraries + * used for the BN254 (alt_BN128) EC precompiles + */ +export type EVMBN254Interface = { + add: (input: Uint8Array) => Uint8Array + mul: (input: Uint8Array) => Uint8Array + pairing: (input: Uint8Array) => Uint8Array +} + /** * Log that the contract emits. */ @@ -378,7 +452,6 @@ export type Log = [address: Uint8Array, topics: Uint8Array[], data: Uint8Array] export type Block = { header: { number: bigint - cliqueSigner(): Address coinbase: Address timestamp: bigint difficulty: bigint @@ -421,11 +494,10 @@ export class DefaultBlockchain implements Blockchain { } } -/** - * The BN128 curve package (`rustbn-wasm`) - */ -export interface bn128 { - ec_pairing: (input_str: string) => PrefixedHexString - ec_add: (input_str: string) => PrefixedHexString - ec_mul: (input_hex: string) => PrefixedHexString +// EOF type which holds the execution-related data for EOF +export type EOFEnv = { + container: EOFContainer + eofRunState: { + returnStack: number[] + } } diff --git a/packages/evm/test/asyncEvents.spec.ts b/packages/evm/test/asyncEvents.spec.ts index 88972a567e..521969adf6 100644 --- a/packages/evm/test/asyncEvents.spec.ts +++ b/packages/evm/test/asyncEvents.spec.ts @@ -1,13 +1,14 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { Address, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EVM } from '../src/index.js' +import { createEVM } from '../src/index.js' + describe('async events', () => { it('should work', async () => { const caller = new Address(hexToBytes('0x00000000000000000000000000000000000000ee')) - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Constantinople }) - const evm = await EVM.create({ + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Constantinople }) + const evm = await createEVM({ common, }) evm.events.on('step', async (event, next) => { diff --git a/packages/evm/test/blobVersionedHashes.spec.ts b/packages/evm/test/blobVersionedHashes.spec.ts index 4608c571a6..281cb5d842 100644 --- a/packages/evm/test/blobVersionedHashes.spec.ts +++ b/packages/evm/test/blobVersionedHashes.spec.ts @@ -1,8 +1,15 @@ import { Hardfork, createCommonFromGethGenesis } from '@ethereumjs/common' -import { Account, Address, bytesToHex, hexToBytes, unpadBytes } from '@ethereumjs/util' +import { + Account, + Address, + bytesToHex, + createAddressFromString, + hexToBytes, + unpadBytes, +} from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EVM } from '../src/index.js' +import { createEVM } from '../src/index.js' import type { EVMRunCallOpts } from '../src/types.js' import type { PrefixedHexString } from '@ethereumjs/util' @@ -15,7 +22,7 @@ describe('BLOBHASH / access blobVersionedHashes in calldata', () => { chain: 'custom', hardfork: Hardfork.Cancun, }) - const evm = await EVM.create({ + const evm = await createEVM({ common, }) @@ -32,7 +39,7 @@ describe('BLOBHASH / access blobVersionedHashes in calldata', () => { assert.equal( bytesToHex(unpadBytes(res.execResult.returnValue)), '0xab', - 'retrieved correct versionedHash from runState' + 'retrieved correct versionedHash from runState', ) }) }) @@ -45,13 +52,13 @@ describe(`BLOBHASH: access blobVersionedHashes within contract calls`, () => { chain: 'custom', hardfork: Hardfork.Cancun, }) - const evm = await EVM.create({ + const evm = await createEVM({ common, }) const getBlobHasIndexCode = '0x60004960005260206000F3' const contractAddress = new Address(hexToBytes('0x00000000000000000000000000000000000000ff')) // contract address - await evm.stateManager.putContractCode(contractAddress, hexToBytes(getBlobHasIndexCode)) // setup the contract code + await evm.stateManager.putCode(contractAddress, hexToBytes(getBlobHasIndexCode)) // setup the contract code const caller = new Address(hexToBytes('0x00000000000000000000000000000000000000ee')) // caller address await evm.stateManager.putAccount(caller, new Account(BigInt(0), BigInt(0x11111111))) // give the calling account a big balance so we don't run out of funds @@ -81,7 +88,7 @@ describe(`BLOBHASH: access blobVersionedHashes within contract calls`, () => { assert.equal( bytesToHex(unpadBytes(res.execResult.returnValue)), '0xab', - `retrieved correct versionedHash from runState through callCode=${callCode}` + `retrieved correct versionedHash from runState through callCode=${callCode}`, ) } }) @@ -95,7 +102,7 @@ describe(`BLOBHASH: access blobVersionedHashes in a CREATE/CREATE2 frame`, () => chain: 'custom', hardfork: Hardfork.Cancun, }) - const evm = await EVM.create({ + const evm = await createEVM({ common, }) @@ -131,13 +138,13 @@ describe(`BLOBHASH: access blobVersionedHashes in a CREATE/CREATE2 frame`, () => } const res = await evm.runCall(runCallArgs) - const address = Address.fromString(bytesToHex(res.execResult.returnValue.slice(12))) - const code = await evm.stateManager.getContractCode(address) + const address = createAddressFromString(bytesToHex(res.execResult.returnValue.slice(12))) + const code = await evm.stateManager.getCode(address) assert.equal( bytesToHex(code), '0x' + 'ab'.padStart(64, '0'), // have to padStart here, since `BLOBHASH` will push 32 bytes on stack - `retrieved correct versionedHash from runState through createOP=${createOP}` + `retrieved correct versionedHash from runState through createOP=${createOP}`, ) } }) diff --git a/packages/evm/test/customCrypto.spec.ts b/packages/evm/test/customCrypto.spec.ts index 43a3a5a5d1..7ce574ccfb 100644 --- a/packages/evm/test/customCrypto.spec.ts +++ b/packages/evm/test/customCrypto.spec.ts @@ -1,4 +1,4 @@ -import { Chain, Common } from '@ethereumjs/common' +import { Common, Mainnet } from '@ethereumjs/common' import { bytesToHex, concatBytes, @@ -9,8 +9,7 @@ import { } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EVM } from '../src/evm.js' -import { getActivePrecompiles } from '../src/index.js' +import { createEVM, getActivePrecompiles } from '../src/index.js' describe('custom crypto', () => { it('should use custom sha256 function', async () => { @@ -22,8 +21,8 @@ describe('custom crypto', () => { sha256: customSha256, } const msg = Uint8Array.from([0, 1, 2, 3]) - const common = new Common({ chain: Chain.Mainnet, customCrypto }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, customCrypto }) + const evm = await createEVM({ common }) const addressStr = '0000000000000000000000000000000000000002' const SHA256 = getActivePrecompiles(common).get(addressStr)! const result = await SHA256({ @@ -38,15 +37,15 @@ describe('custom crypto', () => { it('should use custom ecrecover function', async () => { const customEcrecover = (_msg: Uint8Array) => { return hexToBytes( - '0x84b2586da9b582d3cb260e8fd136129c734f3c80453f48a68e8217ea0b81e08342520f318d202f27a548ad8d3f814ca76d0ee621de2cc510c29e2db4d4f39418' + '0x84b2586da9b582d3cb260e8fd136129c734f3c80453f48a68e8217ea0b81e08342520f318d202f27a548ad8d3f814ca76d0ee621de2cc510c29e2db4d4f39418', ) } const customCrypto = { ecrecover: customEcrecover, } const msg = concatBytes(randomBytes(32), setLengthLeft(intToBytes(27), 32), randomBytes(32)) - const common = new Common({ chain: Chain.Mainnet, customCrypto }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, customCrypto }) + const evm = await createEVM({ common }) const addressStr = '0000000000000000000000000000000000000001' const ECRECOVER = getActivePrecompiles(common).get(addressStr)! const result = await ECRECOVER({ @@ -58,7 +57,7 @@ describe('custom crypto', () => { assert.equal( bytesToHex(result.returnValue), '0x00000000000000000000000063304c5c6884567b84b18f5bc5774d829a32d25d', - 'used custom ecrecover hashing function' + 'used custom ecrecover hashing function', ) }) }) diff --git a/packages/evm/test/customOpcodes.spec.ts b/packages/evm/test/customOpcodes.spec.ts index 52a5551c4f..753743c78d 100644 --- a/packages/evm/test/customOpcodes.spec.ts +++ b/packages/evm/test/customOpcodes.spec.ts @@ -1,7 +1,7 @@ import { equalsBytes, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EVM } from '../src/evm.js' +import { createEVM } from '../src/index.js' import type { InterpreterStep, RunState } from '../src/interpreter.js' import type { AddOpcode } from '../src/types.js' @@ -25,7 +25,7 @@ describe('VM: custom opcodes', () => { } it('should add custom opcodes to the EVM', async () => { - const evm = await EVM.create({ customOpcodes: [testOpcode] }) + const evm = await createEVM({ customOpcodes: [testOpcode] }) const gas = 123456 let correctOpcodeName = false evm.events.on('step', (e: InterpreterStep) => { @@ -43,7 +43,7 @@ describe('VM: custom opcodes', () => { }) it('should delete opcodes from the EVM', async () => { - const evm = await EVM.create({ + const evm = await createEVM({ customOpcodes: [{ opcode: 0x20 }], // deletes KECCAK opcode }) const gas = BigInt(123456) @@ -57,7 +57,7 @@ describe('VM: custom opcodes', () => { it('should not override default opcodes', async () => { // This test ensures that always the original opcode map is used // Thus, each time you recreate a EVM, it is in a clean state - const evm = await EVM.create({ + const evm = await createEVM({ customOpcodes: [{ opcode: 0x01 }], // deletes ADD opcode }) const gas = BigInt(123456) @@ -67,7 +67,7 @@ describe('VM: custom opcodes', () => { }) assert.ok(res.executionGasUsed === gas, 'successfully deleted opcode') - const evmDefault = await EVM.create() + const evmDefault = await createEVM() // PUSH 04 // PUSH 01 @@ -86,7 +86,7 @@ describe('VM: custom opcodes', () => { it('should override opcodes in the EVM', async () => { testOpcode.opcode = 0x20 // Overrides KECCAK - const evm = await EVM.create({ customOpcodes: [testOpcode] }) + const evm = await createEVM({ customOpcodes: [testOpcode] }) const gas = 123456 const res = await evm.runCode({ code: hexToBytes('0x20'), @@ -109,7 +109,7 @@ describe('VM: custom opcodes', () => { }, } - const evm = await EVM.create({ customOpcodes: [testOpcode] }) + const evm = await createEVM({ customOpcodes: [testOpcode] }) evm.events.on('beforeMessage', () => {}) evm.events.on('beforeMessage', () => {}) const evmCopy = evm.shallowCopy() @@ -117,18 +117,18 @@ describe('VM: custom opcodes', () => { assert.deepEqual( (evmCopy as any)._customOpcodes, (evmCopy as any)._customOpcodes, - 'evm.shallowCopy() successfully copied customOpcodes option' + 'evm.shallowCopy() successfully copied customOpcodes option', ) assert.equal( evm.events.listenerCount('beforeMessage'), 2, - 'original EVM instance should have two listeners' + 'original EVM instance should have two listeners', ) assert.equal( evmCopy!.events!.listenerCount('beforeMessage'), 0, - 'copied EVM instance should have zero listeners' + 'copied EVM instance should have zero listeners', ) }) }) diff --git a/packages/evm/test/customPrecompiles.spec.ts b/packages/evm/test/customPrecompiles.spec.ts index 244e0d6442..e74f13a39a 100644 --- a/packages/evm/test/customPrecompiles.spec.ts +++ b/packages/evm/test/customPrecompiles.spec.ts @@ -1,9 +1,8 @@ -import { Address, hexToBytes, utf8ToBytes } from '@ethereumjs/util' +import { Address, createZeroAddress, hexToBytes, utf8ToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EVM } from '../src/evm.js' +import { type PrecompileInput, createEVM } from '../src/index.js' -import type { PrecompileInput } from '../src/index.js' import type { ExecResult } from '../src/types.js' const sender = new Address(hexToBytes(`0x${'44'.repeat(20)}`)) @@ -28,16 +27,16 @@ function customPrecompileNoInput(): ExecResult { describe('EVM -> custom precompiles', () => { it('should work on precompiles without input arguments', async () => { - const EVMOverride = await EVM.create({ + const EVMOverride = await createEVM({ customPrecompiles: [ { - address: Address.zero(), + address: createZeroAddress(), function: customPrecompileNoInput, }, ], }) const result = await EVMOverride.runCall({ - to: Address.zero(), + to: createZeroAddress(), gasLimit: BigInt(30000), data: utf8ToBytes(''), caller: sender, @@ -47,7 +46,7 @@ describe('EVM -> custom precompiles', () => { assert.equal(result.execResult.executionGasUsed, expectedGas, 'gas used is correct') }) it('should override existing precompiles', async () => { - const EVMOverride = await EVM.create({ + const EVMOverride = await createEVM({ customPrecompiles: [ { address: shaAddress, @@ -67,7 +66,7 @@ describe('EVM -> custom precompiles', () => { }) it('should delete existing precompiles', async () => { - const EVMOverride = await EVM.create({ + const EVMOverride = await createEVM({ customPrecompiles: [ { address: shaAddress, @@ -85,7 +84,7 @@ describe('EVM -> custom precompiles', () => { }) it('should add precompiles', async () => { - const EVMOverride = await EVM.create({ + const EVMOverride = await createEVM({ customPrecompiles: [ { address: newPrecompile, @@ -104,14 +103,14 @@ describe('EVM -> custom precompiles', () => { }) it('should not persist changes to precompiles', async () => { - let EVMSha = await EVM.create() + let EVMSha = await createEVM() const shaResult = await EVMSha.runCall({ to: shaAddress, gasLimit: BigInt(30000), data: hexToBytes('0x'), caller: sender, }) - const EVMOverride = await EVM.create({ + const EVMOverride = await createEVM({ customPrecompiles: [ { address: shaAddress, @@ -128,7 +127,7 @@ describe('EVM -> custom precompiles', () => { // sanity: check we have overridden assert.deepEqual(result.execResult.returnValue, expectedReturn, 'return value is correct') assert.ok(result.execResult.executionGasUsed === expectedGas, 'gas used is correct') - EVMSha = await EVM.create() + EVMSha = await createEVM() const shaResult2 = await EVMSha.runCall({ to: shaAddress, gasLimit: BigInt(30000), @@ -138,16 +137,16 @@ describe('EVM -> custom precompiles', () => { assert.deepEqual( shaResult.execResult.returnValue, shaResult2.execResult.returnValue, - 'restored sha precompile - returndata correct' + 'restored sha precompile - returndata correct', ) assert.equal( shaResult.execResult.executionGasUsed, shaResult2.execResult.executionGasUsed, - 'restored sha precompile - gas correct' + 'restored sha precompile - gas correct', ) }) - it('shold copy custom precompiles', async () => { - const evm = await EVM.create({ + it('should copy custom precompiles', async () => { + const evm = await createEVM({ customPrecompiles: [ { address: shaAddress, @@ -159,7 +158,7 @@ describe('EVM -> custom precompiles', () => { assert.deepEqual( (evm as any)._customPrecompiles, (evmCopy as any)._customPrecompiles, - 'evm.shallowCopy() successfully copied customPrecompiles option' + 'evm.shallowCopy() successfully copied customPrecompiles option', ) }) }) diff --git a/packages/evm/test/eips/eip-3860.spec.ts b/packages/evm/test/eips/eip-3860.spec.ts index e46a3bd6d0..09a38485c1 100644 --- a/packages/evm/test/eips/eip-3860.spec.ts +++ b/packages/evm/test/eips/eip-3860.spec.ts @@ -1,8 +1,15 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { Address, concatBytes, equalsBytes, hexToBytes, privateToAddress } from '@ethereumjs/util' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { + Address, + concatBytes, + createAddressFromString, + equalsBytes, + hexToBytes, + privateToAddress, +} from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EVM } from '../../src/index.js' +import { createEVM } from '../../src/index.js' const pkey = hexToBytes(`0x${'20'.repeat(32)}`) const sender = new Address(privateToAddress(pkey)) @@ -10,11 +17,11 @@ const sender = new Address(privateToAddress(pkey)) describe('EIP 3860 tests', () => { it('code exceeds max initcode size', async () => { const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.London, eips: [3860], }) - const evm = await EVM.create({ + const evm = await createEVM({ common, }) @@ -29,47 +36,47 @@ describe('EIP 3860 tests', () => { // (since memory which is not allocated/resized to yet is always defaulted to 0) data: concatBytes( hexToBytes( - '0x7F6000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060005260206000F3' + '0x7F6000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060005260206000F3', ), - buffer + buffer, ), } const result = await evm.runCall(runCallArgs) assert.ok( (result.execResult.exceptionError?.error as string) === 'initcode exceeds max initcode size', - 'initcode exceeds max size' + 'initcode exceeds max size', ) }) it('ensure EIP-3860 gas is applied on CREATE calls', async () => { // Transaction/Contract data taken from https://github.com/ethereum/tests/pull/990 const commonWith3860 = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.London, eips: [3860], }) const commonWithout3860 = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.London, eips: [], }) - const caller = Address.fromString('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b') - const evm = await EVM.create({ + const caller = createAddressFromString('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b') + const evm = await createEVM({ common: commonWith3860, }) - const evmWithout3860 = await EVM.create({ + const evmWithout3860 = await createEVM({ common: commonWithout3860, }) - const contractFactory = Address.fromString('0xb94f5374fce5edbc8e2a8697c15331677e6ebf0b') + const contractFactory = createAddressFromString('0xb94f5374fce5edbc8e2a8697c15331677e6ebf0b') const contractAccount = await evm.stateManager.getAccount(contractFactory) await evm.stateManager.putAccount(contractFactory, contractAccount!) await evmWithout3860.stateManager.putAccount(contractFactory, contractAccount!) const factoryCode = hexToBytes( - '0x7f600a80600080396000f3000000000000000000000000000000000000000000006000526000355a8160006000f05a8203600a55806000556001600155505050' + '0x7f600a80600080396000f3000000000000000000000000000000000000000000006000526000355a8160006000f05a8203600a55806000556001600155505050', ) - await evm.stateManager.putContractCode(contractFactory, factoryCode) - await evmWithout3860.stateManager.putContractCode(contractFactory, factoryCode) + await evm.stateManager.putCode(contractFactory, factoryCode) + await evmWithout3860.stateManager.putCode(contractFactory, factoryCode) const data = hexToBytes('0x000000000000000000000000000000000000000000000000000000000000c000') const runCallArgs = { from: caller, @@ -81,39 +88,39 @@ describe('EIP 3860 tests', () => { const res2 = await evmWithout3860.runCall(runCallArgs) assert.ok( res.execResult.executionGasUsed > res2.execResult.executionGasUsed, - 'execution gas used is higher with EIP 3860 active' + 'execution gas used is higher with EIP 3860 active', ) }) it('ensure EIP-3860 gas is applied on CREATE2 calls', async () => { // Transaction/Contract data taken from https://github.com/ethereum/tests/pull/990 const commonWith3860 = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.London, eips: [3860], }) const commonWithout3860 = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.London, eips: [], }) - const caller = Address.fromString('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b') - const evm = await EVM.create({ + const caller = createAddressFromString('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b') + const evm = await createEVM({ common: commonWith3860, }) - const evmWithout3860 = await EVM.create({ + const evmWithout3860 = await createEVM({ common: commonWithout3860, }) - const contractFactory = Address.fromString('0xb94f5374fce5edbc8e2a8697c15331677e6ebf0b') + const contractFactory = createAddressFromString('0xb94f5374fce5edbc8e2a8697c15331677e6ebf0b') const contractAccount = await evm.stateManager.getAccount(contractFactory) await evm.stateManager.putAccount(contractFactory, contractAccount!) await evmWithout3860.stateManager.putAccount(contractFactory, contractAccount!) const factoryCode = hexToBytes( - '0x7f600a80600080396000f3000000000000000000000000000000000000000000006000526000355a60008260006000f55a8203600a55806000556001600155505050' + '0x7f600a80600080396000f3000000000000000000000000000000000000000000006000526000355a60008260006000f55a8203600a55806000556001600155505050', ) - await evm.stateManager.putContractCode(contractFactory, factoryCode) - await evmWithout3860.stateManager.putContractCode(contractFactory, factoryCode) + await evm.stateManager.putCode(contractFactory, factoryCode) + await evmWithout3860.stateManager.putCode(contractFactory, factoryCode) const data = hexToBytes('0x000000000000000000000000000000000000000000000000000000000000c000') const runCallArgs = { from: caller, @@ -125,17 +132,17 @@ describe('EIP 3860 tests', () => { const res2 = await evmWithout3860.runCall(runCallArgs) assert.ok( res.execResult.executionGasUsed > res2.execResult.executionGasUsed, - 'execution gas used is higher with EIP 3860 active' + 'execution gas used is higher with EIP 3860 active', ) }) it('code exceeds max initcode size: allowUnlimitedInitCodeSize active', async () => { const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.London, eips: [3860], }) - const evm = await EVM.create({ + const evm = await createEVM({ common, allowUnlimitedInitCodeSize: true, }) @@ -150,35 +157,35 @@ describe('EIP 3860 tests', () => { // It tries to deploy a contract too large, where the code is all zeros // (since memory which is not allocated/resized to yet is always defaulted to 0) data: concatBytes( - hexToBytes(`0x${'00'.repeat(Number(common.param('vm', 'maxInitCodeSize')) + 1)}`), - bytes + hexToBytes(`0x${'00'.repeat(Number(common.param('maxInitCodeSize')) + 1)}`), + bytes, ), } const result = await evm.runCall(runCallArgs) assert.ok( result.execResult.exceptionError === undefined, - 'succesfully created a contract with data size > MAX_INITCODE_SIZE and allowUnlimitedInitCodeSize active' + 'successfully created a contract with data size > MAX_INITCODE_SIZE and allowUnlimitedInitCodeSize active', ) }) it('CREATE with MAX_INITCODE_SIZE+1, allowUnlimitedContractSize active', async () => { const commonWith3860 = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.London, eips: [3860], }) - const caller = Address.fromString('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b') + const caller = createAddressFromString('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b') for (const code of ['F0', 'F5']) { - const evm = await EVM.create({ + const evm = await createEVM({ common: commonWith3860, allowUnlimitedInitCodeSize: true, }) - const evmDisabled = await EVM.create({ + const evmDisabled = await createEVM({ common: commonWith3860, allowUnlimitedInitCodeSize: false, }) - const contractFactory = Address.fromString('0xb94f5374fce5edbc8e2a8697c15331677e6ebf0b') + const contractFactory = createAddressFromString('0xb94f5374fce5edbc8e2a8697c15331677e6ebf0b') const contractAccount = await evm.stateManager.getAccount(contractFactory) await evm.stateManager.putAccount(contractFactory, contractAccount!) await evmDisabled.stateManager.putAccount(contractFactory, contractAccount!) @@ -187,11 +194,11 @@ describe('EIP 3860 tests', () => { // Attempts to create a contract of X size // (the initcode of this contract is just zeros, so STOP opcode // It stores the topmost stack item of this CREATE(2) at slot 0 - // This is either the contract address if it was succesful, or 0 in case of error + // This is either the contract address if it was successful, or 0 in case of error const factoryCode = hexToBytes(`0x600060003560006000${code}600055`) - await evm.stateManager.putContractCode(contractFactory, factoryCode) - await evmDisabled.stateManager.putContractCode(contractFactory, factoryCode) + await evm.stateManager.putCode(contractFactory, factoryCode) + await evmDisabled.stateManager.putCode(contractFactory, factoryCode) const runCallArgs = { from: caller, @@ -204,19 +211,16 @@ describe('EIP 3860 tests', () => { await evmDisabled.runCall(runCallArgs) const key0 = hexToBytes(`0x${'00'.repeat(32)}`) - const storageActive = await evm.stateManager.getContractStorage(contractFactory, key0) - const storageInactive = await evmDisabled.stateManager.getContractStorage( - contractFactory, - key0 - ) + const storageActive = await evm.stateManager.getStorage(contractFactory, key0) + const storageInactive = await evmDisabled.stateManager.getStorage(contractFactory, key0) assert.ok( !equalsBytes(storageActive, new Uint8Array()), - 'created contract with MAX_INITCODE_SIZE + 1 length, allowUnlimitedInitCodeSize=true' + 'created contract with MAX_INITCODE_SIZE + 1 length, allowUnlimitedInitCodeSize=true', ) assert.ok( equalsBytes(storageInactive, new Uint8Array()), - 'did not create contract with MAX_INITCODE_SIZE + 1 length, allowUnlimitedInitCodeSize=false' + 'did not create contract with MAX_INITCODE_SIZE + 1 length, allowUnlimitedInitCodeSize=false', ) // gas check @@ -236,7 +240,7 @@ describe('EIP 3860 tests', () => { assert.ok( res.execResult.executionGasUsed > res2.execResult.executionGasUsed, - 'charged initcode analysis gas cost on both allowUnlimitedCodeSize=true, allowUnlimitedInitCodeSize=false' + 'charged initcode analysis gas cost on both allowUnlimitedCodeSize=true, allowUnlimitedInitCodeSize=false', ) } }) diff --git a/packages/evm/test/eips/eip-4200.spec.ts b/packages/evm/test/eips/eip-4200.spec.ts new file mode 100644 index 0000000000..98072f07ce --- /dev/null +++ b/packages/evm/test/eips/eip-4200.spec.ts @@ -0,0 +1,40 @@ +import { hexToBytes } from '@ethereumjs/util' +import { assert, describe, it } from 'vitest' + +import { default as testData } from '../../../ethereum-tests/EOFTests/EIP4200/validInvalid.json' +import { validateEOF } from '../../src/eof/container.js' +import { createEVM } from '../../src/index.js' + +import { getCommon } from './eof-utils.js' + +async function getEVM() { + const common = getCommon() + const evm = createEVM({ + common, + }) + return evm +} + +describe('EIP 4200 tests', async () => { + const evm = await getEVM() + for (const key in testData.validInvalid.vectors) { + it(`Container validation tests ${key}`, () => { + //@ts-ignore + const input = testData.validInvalid.vectors[key] + const code = hexToBytes(input.code) + + const expected = input.results.Prague.result + const _exception = input.results.Prague.exception + + if (expected === true) { + validateEOF(code, evm) + } else { + //console.log(input.code) + assert.throws(() => { + // TODO verify that the correct error is thrown + validateEOF(code, evm) + }) + } + }) + } +}) diff --git a/packages/evm/test/eips/eip-5450.spec.ts b/packages/evm/test/eips/eip-5450.spec.ts new file mode 100644 index 0000000000..57c4eac593 --- /dev/null +++ b/packages/evm/test/eips/eip-5450.spec.ts @@ -0,0 +1,39 @@ +import { hexToBytes } from '@ethereumjs/util' +import { assert, describe, it } from 'vitest' + +import { default as testData } from '../../../ethereum-tests/EOFTests/EIP5450/validInvalid.json' +import { validateEOF } from '../../src/eof/container.js' +import { createEVM } from '../../src/index.js' + +import { getCommon } from './eof-utils.js' + +async function getEVM() { + const common = getCommon() + const evm = createEVM({ + common, + }) + return evm +} + +describe('EIP 5450 tests', async () => { + const evm = await getEVM() + for (const key in testData.validInvalid.vectors) { + it(`Container validation tests ${key}`, () => { + //@ts-ignore + const input = testData.validInvalid.vectors[key] + const code = hexToBytes(input.code) + + const expected = input.results.Prague.result + const _exception = input.results.Prague.exception + + if (expected === true) { + validateEOF(code, evm) + } else { + assert.throws(() => { + // TODO verify that the correct error is thrown + validateEOF(code, evm) + }) + } + }) + } +}) diff --git a/packages/evm/test/eips/eip-5656.spec.ts b/packages/evm/test/eips/eip-5656.spec.ts index cccb2639d1..185aec827e 100644 --- a/packages/evm/test/eips/eip-5656.spec.ts +++ b/packages/evm/test/eips/eip-5656.spec.ts @@ -1,8 +1,8 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { bytesToHex, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EVM } from '../../src/index.js' +import { createEVM } from '../../src/index.js' import type { PrefixedHexString } from '@ethereumjs/util' @@ -80,12 +80,12 @@ describe('should test mcopy', () => { bytecode += MCOPY + STOP const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.Shanghai, eips: [5656], }) - const evm = await EVM.create({ + const evm = await createEVM({ common, }) diff --git a/packages/evm/test/eips/eof-header-validation.ts b/packages/evm/test/eips/eof-header-validation.ts new file mode 100644 index 0000000000..5b51da3ead --- /dev/null +++ b/packages/evm/test/eips/eof-header-validation.ts @@ -0,0 +1,93 @@ +import { hexToBytes } from '@ethereumjs/util' +import * as dir from 'node-dir' +import path from 'path' +import { assert, describe, it } from 'vitest' + +import { EOFContainerMode, validateEOF } from '../../src/eof/container.js' +import { ContainerSectionType } from '../../src/eof/verify.js' +import { createEVM } from '../../src/index.js' + +import { getCommon } from './eof-utils.js' + +// Rename this test dir to the location of EOF header tests +// To test, use `npx vitest run ./scripts/eof-header-validation.spec.ts +const testDir = path.resolve('../ethereum-tests/EOFStuff/fixtures/eof_tests') + +async function getEVM() { + const common = getCommon() + const evm = createEVM({ + common, + }) + return evm +} + +await new Promise((resolve, reject) => { + const finishedCallback = (err: Error | undefined) => { + if (err) { + reject(err) + return + } + resolve() + } + const fileCallback = async ( + err: Error | undefined, + content: string | Uint8Array, + fileName: string, + next: Function, + ) => { + if (err) { + reject(err) + return + } + const name = path.parse(fileName).name + describe(`EOF Header validation tests - ${name}`, async () => { + const testData = JSON.parse(content) + const evm = await getEVM() + for (const key in testData) { + it(`Test ${key}`, () => { + //@ts-ignore + const input = testData[key] + for (const testKey in input.vectors) { + const test = input.vectors[testKey] + + const code = hexToBytes(test.code) + + const expected = test.results.Prague.result + const _exception = test.results.Prague.exception + + let containerSectionType = ContainerSectionType.RuntimeCode + let eofContainerMode = EOFContainerMode.Default + + if (test.containerKind !== undefined) { + if (test.containerKind === 'INITCODE') { + containerSectionType = ContainerSectionType.InitCode + eofContainerMode = EOFContainerMode.Initmode + } else { + throw new Error('unknown container kind: ' + test.containerKind) + } + } + + if (expected === true) { + validateEOF(code, evm, containerSectionType, eofContainerMode) + } else { + assert.throws(() => { + // TODO verify that the correct error is thrown + validateEOF(code, evm, containerSectionType, eofContainerMode) + }) + } + } + }) + } + }) + + next() + } + dir.readFiles( + testDir, + { + match: /.json$/, + }, + fileCallback, + finishedCallback, + ) +}) diff --git a/packages/evm/test/eips/eof-runner.spec.ts b/packages/evm/test/eips/eof-runner.spec.ts new file mode 100644 index 0000000000..6cfa643585 --- /dev/null +++ b/packages/evm/test/eips/eof-runner.spec.ts @@ -0,0 +1,41 @@ +import { Account, Address, hexToBytes } from '@ethereumjs/util' +import { assert, describe, it } from 'vitest' + +import { createEVM } from '../../src/index.js' + +import { getCommon } from './eof-utils.js' + +async function getEVM() { + const common = getCommon() + const evm = createEVM({ + common, + }) + return evm +} + +// Note: currently 0xE3 (RETF) and 0xE4 (JUMPF) need to be added to the valid opcodes list, otherwise 1 test will fail + +describe('EOF: should run a simple contract', async () => { + it('should run without failing', async () => { + const evm = await getEVM() + const code = hexToBytes('0xef000101000402000100030400010000800001305000ef') + + const caller = new Address(hexToBytes('0x00000000000000000000000000000000000000ee')) // caller address + const contractAddress = new Address(hexToBytes('0x00000000000000000000000000000000000000ff')) // contract address + + await evm.stateManager.putCode(contractAddress, code) + await evm.stateManager.putAccount(caller, new Account(BigInt(0), BigInt(0x11111111))) + + const runCallArgs = { + caller, + gasLimit: BigInt(0xffff), + to: contractAddress, + } + + const result = await evm.runCall(runCallArgs) + + // The code which is being ran should run ADDRESS POP STOP + // This costs 4 gas + assert.ok(result.execResult.executionGasUsed === BigInt(4)) + }) +}) diff --git a/packages/evm/test/eips/eof-utils.ts b/packages/evm/test/eips/eof-utils.ts new file mode 100644 index 0000000000..56b01b31ec --- /dev/null +++ b/packages/evm/test/eips/eof-utils.ts @@ -0,0 +1,9 @@ +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' + +export const getCommon = () => { + return new Common({ + hardfork: Hardfork.Prague, + eips: [663, 3540, 3670, 4200, 4750, 5450, 6206, 7069, 7480, 7620, 7692, 7698], + chain: Mainnet, + }) +} diff --git a/packages/evm/test/eof.spec.ts b/packages/evm/test/eof.spec.ts deleted file mode 100644 index 4d1bc33d62..0000000000 --- a/packages/evm/test/eof.spec.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { bytesToUnprefixedHex, hexToBytes } from '@ethereumjs/util' -import { assert, describe, it } from 'vitest' - -import { getEOFCode } from '../src/eof.js' - -import type { PrefixedHexString } from '@ethereumjs/util' - -function generateEOFCode(code: string): PrefixedHexString { - const len = (code.length / 2).toString(16).padStart(4, '0') - return `0xEF000101${len}00${code}` -} - -function generateInvalidEOFCode(code: string): PrefixedHexString { - const len = (code.length / 2 + 1).toString(16).padStart(4, '0') // len will be 1 too long - return `0xEF000101${len}00${code}` -} - -describe('getEOFCode()', () => { - it('should work', () => { - const code = '600100' - const validEofCode = generateEOFCode(code) - const invalidEofCode = generateInvalidEOFCode(code) - - assert.equal( - bytesToUnprefixedHex(getEOFCode(hexToBytes(validEofCode))), - code, - 'returned just code section of EOF container' - ) - assert.equal( - bytesToUnprefixedHex(getEOFCode(hexToBytes(invalidEofCode))), - invalidEofCode.toLowerCase().slice(2), - 'returns entire code string for non EOF code' - ) - }) -}) diff --git a/packages/evm/test/evm.spec.ts b/packages/evm/test/evm.spec.ts new file mode 100644 index 0000000000..30197834dc --- /dev/null +++ b/packages/evm/test/evm.spec.ts @@ -0,0 +1,24 @@ +import { assert, describe, it } from 'vitest' + +import { createEVM, paramsEVM } from '../src/index.js' + +// TODO: This whole file was missing for quite some time and now (July 2024) +// has been side introduced along another PR. We should add basic initialization +// tests for options and the like. +describe('initialization', () => { + it('basic initialization', async () => { + const evm = await createEVM() + const msg = 'should use the correct parameter defaults' + assert.isFalse(evm.allowUnlimitedContractSize, msg) + }) + + it('EVM parameter customization', async () => { + let evm = await createEVM() + assert.equal(evm.common.param('ecAddGas'), BigInt(150), 'should use default EVM parameters') + + const params = JSON.parse(JSON.stringify(paramsEVM)) + params['1679']['ecAddGas'] = 100 // 150 + evm = await createEVM({ params }) + assert.equal(evm.common.param('ecAddGas'), BigInt(100), 'should use custom parameters provided') + }) +}) diff --git a/packages/evm/test/memory.spec.ts b/packages/evm/test/memory.spec.ts index 640fe150dd..3bf538966c 100644 --- a/packages/evm/test/memory.spec.ts +++ b/packages/evm/test/memory.spec.ts @@ -38,13 +38,13 @@ describe('Memory', () => { assert.equal( memory._store.length, CONTAINER_SIZE, - 'memory should remain in CONTAINER_SIZE length' + 'memory should remain in CONTAINER_SIZE length', ) memory.write(CONTAINER_SIZE, 1, Uint8Array.from([1])) assert.equal( memory._store.length, 8192 * 2, - 'memory buffer length expanded by CONTAINER_SIZE bytes' + 'memory buffer length expanded by CONTAINER_SIZE bytes', ) }) diff --git a/packages/evm/test/opcodes.spec.ts b/packages/evm/test/opcodes.spec.ts index 3c09aef358..e4bd0c4b4a 100644 --- a/packages/evm/test/opcodes.spec.ts +++ b/packages/evm/test/opcodes.spec.ts @@ -1,74 +1,74 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { assert, describe, it } from 'vitest' -import { EVM } from '../src/index.js' +import { createEVM } from '../src/index.js' describe('EVM -> getActiveOpcodes()', () => { const DIFFICULTY_PREVRANDAO = 0x44 const CHAINID = 0x46 //istanbul opcode it('should not expose opcodes from a follow-up HF (istanbul -> petersburg)', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Petersburg }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Petersburg }) + const evm = await createEVM({ common }) assert.equal( evm.getActiveOpcodes().get(CHAINID), undefined, - 'istanbul opcode not exposed (HF: < istanbul (petersburg)' + 'istanbul opcode not exposed (HF: < istanbul (petersburg)', ) }) it('should expose opcodes when HF is active (>= istanbul)', async () => { - let common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - let evm = await EVM.create({ common }) + let common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) + let evm = await createEVM({ common }) assert.equal( evm.getActiveOpcodes().get(CHAINID)!.name, 'CHAINID', - 'istanbul opcode exposed (HF: istanbul)' + 'istanbul opcode exposed (HF: istanbul)', ) - common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.MuirGlacier }) - evm = await EVM.create({ common }) + common = new Common({ chain: Mainnet, hardfork: Hardfork.MuirGlacier }) + evm = await createEVM({ common }) assert.equal( evm.getActiveOpcodes().get(CHAINID)!.name, 'CHAINID', - 'istanbul opcode exposed (HF: > istanbul (muirGlacier)' + 'istanbul opcode exposed (HF: > istanbul (muirGlacier)', ) }) it('should switch DIFFICULTY opcode name to PREVRANDAO when >= Merge HF', async () => { - let common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - let evm = await EVM.create({ common }) + let common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) + let evm = await createEVM({ common }) assert.equal( evm.getActiveOpcodes().get(DIFFICULTY_PREVRANDAO)!.name, 'DIFFICULTY', - 'Opcode x44 named DIFFICULTY pre-Merge' + 'Opcode x44 named DIFFICULTY pre-Merge', ) - common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Paris }) - evm = await EVM.create({ common }) + common = new Common({ chain: Mainnet, hardfork: Hardfork.Paris }) + evm = await createEVM({ common }) assert.equal( evm.getActiveOpcodes().get(DIFFICULTY_PREVRANDAO)!.name, 'PREVRANDAO', - 'Opcode x44 named PREVRANDAO post-Merge' + 'Opcode x44 named PREVRANDAO post-Merge', ) }) it('should update opcodes on a hardfork change', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) + const evm = await createEVM({ common }) common.setHardfork(Hardfork.Byzantium) assert.equal( evm.getActiveOpcodes().get(CHAINID), undefined, - 'opcode not exposed after HF change (-> < istanbul)' + 'opcode not exposed after HF change (-> < istanbul)', ) common.setHardfork(Hardfork.Istanbul) assert.equal( evm.getActiveOpcodes().get(CHAINID)!.name, 'CHAINID', - 'opcode exposed after HF change (-> istanbul)' + 'opcode exposed after HF change (-> istanbul)', ) }) }) diff --git a/packages/evm/test/precompiles/01-ecrecover.spec.ts b/packages/evm/test/precompiles/01-ecrecover.spec.ts index 977d205ff1..ed6b8884d3 100644 --- a/packages/evm/test/precompiles/01-ecrecover.spec.ts +++ b/packages/evm/test/precompiles/01-ecrecover.spec.ts @@ -1,9 +1,9 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { bytesToHex, bytesToUnprefixedHex, hexToBytes, utf8ToBytes } from '@ethereumjs/util' import { keccak256 } from 'ethereum-cryptography/keccak.js' import { assert, describe, it } from 'vitest' -import { EVM, getActivePrecompiles } from '../../src/index.js' +import { createEVM, getActivePrecompiles } from '../../src/index.js' const prefix = bytesToUnprefixedHex(utf8ToBytes('\x19Ethereum Signed Message:\n32')) const _hash = '852daa74cc3c31fe64542bb9b8764cfb91cc30f9acf9389071ffb44a9eefde46' @@ -16,8 +16,8 @@ describe('Precompiles: ECRECOVER', () => { it('ECRECOVER', async () => { // Test reference: https://github.com/ethereum/go-ethereum/issues/3731#issuecomment-293866868 - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Petersburg }) - const evm = await EVM.create({ + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Petersburg }) + const evm = await createEVM({ common, }) const addressStr = '0000000000000000000000000000000000000001' @@ -34,7 +34,7 @@ describe('Precompiles: ECRECOVER', () => { assert.deepEqual( bytesToHex(result.returnValue.slice(-20)), address, - 'should recover expected address' + 'should recover expected address', ) result = await ECRECOVER({ diff --git a/packages/evm/test/precompiles/03-ripemd160.spec.ts b/packages/evm/test/precompiles/03-ripemd160.spec.ts index 96859bd65e..e7c9b20aee 100644 --- a/packages/evm/test/precompiles/03-ripemd160.spec.ts +++ b/packages/evm/test/precompiles/03-ripemd160.spec.ts @@ -1,8 +1,8 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { bytesToHex, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EVM, getActivePrecompiles } from '../../src/index.js' +import { createEVM, getActivePrecompiles } from '../../src/index.js' const input = '38d18acb67d25c8bb9942764b62f18e17054f66a817bd4295423adf9ed98873e000000000000000000000000000000000000000000000000000000000000001b38d18acb67d25c8bb9942764b62f18e17054f66a817bd4295423adf9ed98873e789d1dd423d25f0772d2748d60f7e4b81bb14d086eba8e8e8efb6dcff8a4ae02' @@ -12,8 +12,8 @@ describe('Precompiles: RIPEMD160', () => { it('RIPEMD160', async () => { // Test reference: https://github.com/ethereum/go-ethereum/blob/e206d3f8975bd98cc86d14055dca40f996bacc60/core/vm/contracts_test.go#L217 - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Petersburg }) - const evm = await EVM.create({ + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Petersburg }) + const evm = await createEVM({ common, }) const addressStr = '0000000000000000000000000000000000000003' @@ -29,7 +29,7 @@ describe('Precompiles: RIPEMD160', () => { assert.deepEqual( bytesToHex(result.returnValue), `0x${expected}`, - 'should generate expected value' + 'should generate expected value', ) result = await RIPEMD160({ diff --git a/packages/evm/test/precompiles/05-modexp.spec.ts b/packages/evm/test/precompiles/05-modexp.spec.ts index 2e975064d7..e9dc5edf4f 100644 --- a/packages/evm/test/precompiles/05-modexp.spec.ts +++ b/packages/evm/test/precompiles/05-modexp.spec.ts @@ -1,11 +1,12 @@ -import { Chain, Common } from '@ethereumjs/common' +import { Common, Mainnet } from '@ethereumjs/common' import { bytesToHex, hexToBytes } from '@ethereumjs/util' import { assert, beforeAll, describe, it } from 'vitest' -import { EVM, getActivePrecompiles } from '../../src/index.js' +import { createEVM, getActivePrecompiles } from '../../src/index.js' import fuzzer from './modexp-testdata.json' +import type { EVM } from '../../src/index.js' import type { PrecompileFunc } from '../../src/precompiles/types.js' import type { PrefixedHexString } from '@ethereumjs/util' @@ -16,8 +17,8 @@ describe('Precompiles: MODEXP', () => { let addressStr: string let MODEXP: PrecompileFunc beforeAll(async () => { - common = new Common({ chain: Chain.Mainnet }) - evm = await EVM.create({ + common = new Common({ chain: Mainnet }) + evm = await createEVM({ common, }) addressStr = '0000000000000000000000000000000000000005' @@ -34,8 +35,8 @@ describe('Precompiles: MODEXP', () => { common, _EVM: evm, }) - const oput = bytesToHex(result.returnValue) - assert.equal(oput, expect) + const output = bytesToHex(result.returnValue) + assert.equal(output, expect) }) } diff --git a/packages/evm/test/precompiles/06-ecadd.spec.ts b/packages/evm/test/precompiles/06-ecadd.spec.ts index 0a7b597311..fad052a08a 100644 --- a/packages/evm/test/precompiles/06-ecadd.spec.ts +++ b/packages/evm/test/precompiles/06-ecadd.spec.ts @@ -1,12 +1,12 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { assert, describe, it } from 'vitest' -import { EVM, getActivePrecompiles } from '../../src/index.js' +import { createEVM, getActivePrecompiles } from '../../src/index.js' describe('Precompiles: ECADD', () => { it('ECADD', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Petersburg }) - const evm = await EVM.create({ + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Petersburg }) + const evm = await createEVM({ common, }) const addressStr = '0000000000000000000000000000000000000006' diff --git a/packages/evm/test/precompiles/07-ecmul.spec.ts b/packages/evm/test/precompiles/07-ecmul.spec.ts index 4e4f088f5e..ed9ac1e9a9 100644 --- a/packages/evm/test/precompiles/07-ecmul.spec.ts +++ b/packages/evm/test/precompiles/07-ecmul.spec.ts @@ -1,12 +1,12 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { assert, describe, it } from 'vitest' -import { EVM, getActivePrecompiles } from '../../src/index.js' +import { createEVM, getActivePrecompiles } from '../../src/index.js' describe('Precompiles: ECMUL', () => { it('ECMUL', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Petersburg }) - const evm = await EVM.create({ + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Petersburg }) + const evm = await createEVM({ common, }) const ECMUL = getActivePrecompiles(common).get('0000000000000000000000000000000000000007')! diff --git a/packages/evm/test/precompiles/08-ecpairing.spec.ts b/packages/evm/test/precompiles/08-ecpairing.spec.ts index a5163d0d5f..9888b52395 100644 --- a/packages/evm/test/precompiles/08-ecpairing.spec.ts +++ b/packages/evm/test/precompiles/08-ecpairing.spec.ts @@ -1,20 +1,20 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EVM, getActivePrecompiles } from '../../src/index.js' +import { createEVM, getActivePrecompiles } from '../../src/index.js' describe('Precompiles: ECPAIRING', () => { it('ECPAIRING', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Petersburg }) - const evm = await EVM.create({ + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Petersburg }) + const evm = await createEVM({ common, }) const addressStr = '0000000000000000000000000000000000000008' const ECPAIRING = getActivePrecompiles(common).get(addressStr)! const result = await ECPAIRING({ data: hexToBytes( - '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa000000000000000000000000000000000000000000000000000000000000000130644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd45198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa' + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa000000000000000000000000000000000000000000000000000000000000000130644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd45198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c21800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa', ), gasLimit: BigInt(0xffffff), common, @@ -24,7 +24,7 @@ describe('Precompiles: ECPAIRING', () => { assert.deepEqual( result.executionGasUsed, BigInt(260000), - 'should use petersburg gas costs (k ^= 2 pairings)' + 'should use petersburg gas costs (k ^= 2 pairings)', ) }) }) diff --git a/packages/evm/test/precompiles/09-blake2f.spec.ts b/packages/evm/test/precompiles/09-blake2f.spec.ts index e9a39c2edf..5eb045139f 100644 --- a/packages/evm/test/precompiles/09-blake2f.spec.ts +++ b/packages/evm/test/precompiles/09-blake2f.spec.ts @@ -1,9 +1,10 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { Address, bytesToHex, hexToBytes } from '@ethereumjs/util' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { bytesToHex, createZeroAddress, hexToBytes } from '@ethereumjs/util' import { assert, beforeAll, describe, it } from 'vitest' -import { EVM, getActivePrecompiles } from '../../src/index.js' +import { createEVM, getActivePrecompiles } from '../../src/index.js' +import type { EVM } from '../../src/index.js' import type { PrecompileFunc } from '../../src/precompiles/types.js' const validCases = [ @@ -81,11 +82,11 @@ describe('Precompiles: BLAKE2F', () => { let addressStr: string let BLAKE2F: PrecompileFunc beforeAll(async () => { - common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) // Test references: https://github.com/ethereum/go-ethereum/blob/e206d3f8975bd98cc86d14055dca40f996bacc60/core/vm/testdata/precompiles/blake2F.json // https://github.com/ethereum/go-ethereum/blob/e206d3f8975bd98cc86d14055dca40f996bacc60/core/vm/contracts_test.go#L73 - evm = await EVM.create({ + evm = await createEVM({ common, }) addressStr = '0000000000000000000000000000000000000009' @@ -104,7 +105,7 @@ describe('Precompiles: BLAKE2F', () => { assert.equal( bytesToHex(result.returnValue), `0x${t.expected}`, - 'should generate expected value' + 'should generate expected value', ) assert.deepEqual(result.executionGasUsed, BigInt(t.gas), 'should use expected amount of gas') }) @@ -124,7 +125,7 @@ describe('Precompiles: BLAKE2F', () => { } it('should also work on non-zero aligned inputs', async () => { - const addr = Address.zero() + const addr = createZeroAddress() // Blake2f calldata from https://etherscan.io/tx/0x4f2e13a0a3f14033630ab2b8cdad09d316826375f761ded5b31253bb42e0a476 // (This tx calls into Blake2f multiple times, but one of them is taken) const calldata = @@ -135,7 +136,7 @@ describe('Precompiles: BLAKE2F', () => { // -> Calls Blake2F with this data (so, with the calldata) // -> Returns the data from Blake2F const code = `0x366000602037600080366020600060095AF1593D6000593E3D90F3` - await evm.stateManager.putContractCode(addr, hexToBytes(code)) + await evm.stateManager.putCode(addr, hexToBytes(code)) const res = await evm.runCall({ data: hexToBytes(calldata), diff --git a/packages/evm/test/precompiles/0a-pointevaluation.spec.ts b/packages/evm/test/precompiles/0a-pointevaluation.spec.ts index f9ab7c4825..f5018b03f1 100644 --- a/packages/evm/test/precompiles/0a-pointevaluation.spec.ts +++ b/packages/evm/test/precompiles/0a-pointevaluation.spec.ts @@ -9,12 +9,12 @@ import { import { loadKZG } from 'kzg-wasm' import { assert, describe, it } from 'vitest' -import { EVM, getActivePrecompiles } from '../../src/index.js' +import { createEVM, getActivePrecompiles } from '../../src/index.js' import type { PrecompileInput } from '../../src/index.js' const BLS_MODULUS = BigInt( - '52435875175126190479447740508185965837690552500527637822603658699938581184513' + '52435875175126190479447740508185965837690552500527637822603658699938581184513', ) describe('Precompiles: point evaluation', () => { @@ -29,7 +29,7 @@ describe('Precompiles: point evaluation', () => { customCrypto: { kzg }, }) - const evm = await EVM.create({ + const evm = await createEVM({ common, }) const addressStr = '000000000000000000000000000000000000000a' @@ -37,12 +37,12 @@ describe('Precompiles: point evaluation', () => { const testCase = { commitment: hexToBytes( - '0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + '0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', ), z: hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000002'), y: hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000000'), proof: hexToBytes( - '0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + '0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', ), } const versionedHash = computeVersionedHash(testCase.commitment, 1) @@ -58,7 +58,7 @@ describe('Precompiles: point evaluation', () => { assert.equal( bytesToBigInt(unpadBytes(res.returnValue.slice(32))), BLS_MODULUS, - 'point evaluation precompile returned expected output' + 'point evaluation precompile returned expected output', ) const optsWithInvalidCommitment: PrecompileInput = { @@ -67,7 +67,7 @@ describe('Precompiles: point evaluation', () => { testCase.z, testCase.y, testCase.commitment, - testCase.proof + testCase.proof, ), gasLimit: 0xfffffffffn, _EVM: evm, @@ -77,7 +77,7 @@ describe('Precompiles: point evaluation', () => { res = await pointEvaluation(optsWithInvalidCommitment) assert.ok( res.exceptionError?.error.match('kzg commitment does not match versioned hash'), - 'precompile throws when commitment does not match versioned hash' + 'precompile throws when commitment does not match versioned hash', ) }) }) diff --git a/packages/evm/test/precompiles/eip-2537-bls.spec.ts b/packages/evm/test/precompiles/eip-2537-bls.spec.ts index b4d0c0c2f9..7f552e74c5 100644 --- a/packages/evm/test/precompiles/eip-2537-bls.spec.ts +++ b/packages/evm/test/precompiles/eip-2537-bls.spec.ts @@ -1,10 +1,10 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { Address, bytesToHex, hexToBytes } from '@ethereumjs/util' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { Address, bytesToHex, createZeroAddress, hexToBytes } from '@ethereumjs/util' import { readFileSync, readdirSync } from 'fs' import * as mcl from 'mcl-wasm' import { assert, describe, it } from 'vitest' -import { EVM, MCLBLS, getActivePrecompiles } from '../../src/index.js' +import { MCLBLS, createEVM, getActivePrecompiles } from '../../src/index.js' import type { PrefixedHexString } from '@ethereumjs/util' @@ -34,7 +34,7 @@ const precompileMap: { [key: string]: string } = { 'pairing_check_bls.json': '0000000000000000000000000000000000000011', } -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Berlin, eips: [2537] }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Berlin, eips: [2537] }) // MCL Instantiation await mcl.init(mcl.BLS12_381) @@ -52,7 +52,7 @@ for (const bls of [undefined, mclbls]) { describe(`Precompiles: ${fname}`, () => { for (const data of parsedJSON) { it(`${data.Name}`, async () => { - const evm = await EVM.create({ + const evm = await createEVM({ common, bls, }) @@ -79,7 +79,7 @@ for (const bls of [undefined, mclbls]) { assert.deepEqual( '0x' + data.Expected, bytesToHex(result.returnValue), - 'return value should match testVectorResult' + 'return value should match testVectorResult', ) assert.equal(result.executionGasUsed, BigInt(data.Gas)) } catch (e) { @@ -103,15 +103,15 @@ for (let address = precompileAddressStart; address <= precompileAddressEnd; addr describe('EIP-2537 BLS precompile availability tests', () => { it('BLS precompiles should not be available if EIP not activated', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.MuirGlacier }) - const evm = await EVM.create({ + const common = new Common({ chain: Mainnet, hardfork: Hardfork.MuirGlacier }) + const evm = await createEVM({ common, }) for (const address of precompiles) { const to = new Address(hexToBytes(address)) const result = await evm.runCall({ - caller: Address.zero(), + caller: createZeroAddress(), gasLimit: BigInt(0xffffffffff), to, value: BigInt(0), diff --git a/packages/evm/test/precompiles/hardfork.spec.ts b/packages/evm/test/precompiles/hardfork.spec.ts index c617bf988c..da5495b8f8 100644 --- a/packages/evm/test/precompiles/hardfork.spec.ts +++ b/packages/evm/test/precompiles/hardfork.spec.ts @@ -1,8 +1,8 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { Address, hexToBytes } from '@ethereumjs/util' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { Address, createZeroAddress, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EVM, getActivePrecompiles } from '../../src/index.js' +import { createEVM, getActivePrecompiles } from '../../src/index.js' describe('Precompiles: hardfork availability', () => { it('Test ECPAIRING availability', async () => { @@ -10,7 +10,7 @@ describe('Precompiles: hardfork availability', () => { const ECPAIR_Address = new Address(hexToBytes(`0x${ECPAIR_AddressStr}`)) // ECPAIR was introduced in Byzantium; check if available from Byzantium. - const commonByzantium = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Byzantium }) + const commonByzantium = new Common({ chain: Mainnet, hardfork: Hardfork.Byzantium }) let ECPAIRING = getActivePrecompiles(commonByzantium).get(ECPAIR_AddressStr) @@ -20,11 +20,11 @@ describe('Precompiles: hardfork availability', () => { assert.ok(true, 'ECPAIRING available in petersburg') } - let evm = await EVM.create({ + let evm = await createEVM({ common: commonByzantium, }) let result = await evm.runCall({ - caller: Address.zero(), + caller: createZeroAddress(), gasLimit: BigInt(0xffffffffff), to: ECPAIR_Address, value: BigInt(0), @@ -33,7 +33,7 @@ describe('Precompiles: hardfork availability', () => { assert.equal(result.execResult.executionGasUsed, BigInt(100000)) // check that we are using gas (if address would contain no code we use 0 gas) // Check if ECPAIR is available in future hard forks. - const commonPetersburg = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Petersburg }) + const commonPetersburg = new Common({ chain: Mainnet, hardfork: Hardfork.Petersburg }) ECPAIRING = getActivePrecompiles(commonPetersburg).get(ECPAIR_AddressStr)! if (ECPAIRING === undefined) { assert.fail('ECPAIRING is not available in petersburg while it should be available') @@ -41,11 +41,11 @@ describe('Precompiles: hardfork availability', () => { assert.ok(true, 'ECPAIRING available in petersburg') } - evm = await EVM.create({ + evm = await createEVM({ common: commonPetersburg, }) result = await evm.runCall({ - caller: Address.zero(), + caller: createZeroAddress(), gasLimit: BigInt(0xffffffffff), to: ECPAIR_Address, value: BigInt(0), @@ -54,7 +54,7 @@ describe('Precompiles: hardfork availability', () => { assert.equal(result.execResult.executionGasUsed, BigInt(100000)) // Check if ECPAIR is not available in Homestead. - const commonHomestead = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Homestead }) + const commonHomestead = new Common({ chain: Mainnet, hardfork: Hardfork.Homestead }) ECPAIRING = getActivePrecompiles(commonHomestead).get(ECPAIR_AddressStr)! if (ECPAIRING !== undefined) { @@ -63,12 +63,12 @@ describe('Precompiles: hardfork availability', () => { assert.ok(true, 'ECPAIRING not available in homestead') } - evm = await EVM.create({ + evm = await createEVM({ common: commonHomestead, }) result = await evm.runCall({ - caller: Address.zero(), + caller: createZeroAddress(), gasLimit: BigInt(0xffffffffff), to: ECPAIR_Address, value: BigInt(0), diff --git a/packages/evm/test/runCall.spec.ts b/packages/evm/test/runCall.spec.ts index ebdc7513b3..9e3571ba8c 100644 --- a/packages/evm/test/runCall.spec.ts +++ b/packages/evm/test/runCall.spec.ts @@ -1,4 +1,4 @@ -import { Chain, Common, Hardfork, createCommonFromGethGenesis } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet, createCommonFromGethGenesis } from '@ethereumjs/common' import { Account, Address, @@ -6,6 +6,9 @@ import { bytesToBigInt, bytesToHex, concatBytes, + createAddressFromPrivateKey, + createAddressFromString, + createZeroAddress, hexToBytes, padToEven, unpadBytes, @@ -17,7 +20,7 @@ import { assert, describe, it } from 'vitest' import * as genesisJSON from '../../client/test/testdata/geth-genesis/eip4844.json' import { defaultBlock } from '../src/evm.js' import { ERROR } from '../src/exceptions.js' -import { EVM } from '../src/index.js' +import { createEVM } from '../src/index.js' import type { EVMRunCallOpts } from '../src/types.js' @@ -30,13 +33,13 @@ function create2address(sourceAddress: Address, codeHash: Uint8Array, salt: Uint describe('RunCall tests', () => { it('Create where FROM account nonce is 0', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Constantinople }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Constantinople }) + const evm = await createEVM({ common }) const res = await evm.runCall({ to: undefined }) assert.equal( res.createdAddress?.toString(), '0xbd770416a3345f91e4b34576cb804a576fa48eb1', - 'created valid address when FROM account nonce is 0' + 'created valid address when FROM account nonce is 0', ) }) @@ -53,8 +56,8 @@ describe('RunCall tests', () => { const caller = new Address(hexToBytes('0x00000000000000000000000000000000000000ee')) // caller address const contractAddress = new Address(hexToBytes('0x00000000000000000000000000000000000000ff')) // contract address // setup the vm - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Constantinople }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Constantinople }) + const evm = await createEVM({ common }) const code = '0x3460008080F560005260206000F3' /* code: remarks: (top of the stack is at the zero index) @@ -70,7 +73,7 @@ describe('RunCall tests', () => { RETURN [0x00, 0x20] */ - await evm.stateManager.putContractCode(contractAddress, hexToBytes(code)) // setup the contract code + await evm.stateManager.putCode(contractAddress, hexToBytes(code)) // setup the contract code await evm.stateManager.putAccount(caller, new Account(BigInt(0), BigInt(0x11111111))) // give the calling account a big balance so we don't run out of funds const codeHash = keccak256(new Uint8Array()) for (let value = 0; value <= 1000; value += 20) { @@ -107,11 +110,11 @@ describe('RunCall tests', () => { const caller = new Address(hexToBytes('0x00000000000000000000000000000000000000ee')) // caller address const contractAddress = new Address(hexToBytes('0x00000000000000000000000000000000000000ff')) // contract address // setup the evm - const evmByzantium = await EVM.create({ - common: new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Byzantium }), + const evmByzantium = await createEVM({ + common: new Common({ chain: Mainnet, hardfork: Hardfork.Byzantium }), }) - const evmConstantinople = await EVM.create({ - common: new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Constantinople }), + const evmConstantinople = await createEVM({ + common: new Common({ chain: Mainnet, hardfork: Hardfork.Constantinople }), }) const code = '0x600160011B00' /* @@ -122,8 +125,8 @@ describe('RunCall tests', () => { STOP */ - await evmByzantium.stateManager.putContractCode(contractAddress, hexToBytes(code)) // setup the contract code - await evmConstantinople.stateManager.putContractCode(contractAddress, hexToBytes(code)) // setup the contract code + await evmByzantium.stateManager.putCode(contractAddress, hexToBytes(code)) // setup the contract code + await evmConstantinople.stateManager.putCode(contractAddress, hexToBytes(code)) // setup the contract code const runCallArgs = { caller, // call address @@ -137,11 +140,11 @@ describe('RunCall tests', () => { assert.ok( byzantiumResult.execResult.exceptionError && byzantiumResult.execResult.exceptionError.error === 'invalid opcode', - 'byzantium cannot accept constantinople opcodes (SHL)' + 'byzantium cannot accept constantinople opcodes (SHL)', ) assert.ok( !constantinopleResult.execResult.exceptionError, - 'constantinople can access the SHL opcode' + 'constantinople can access the SHL opcode', ) }) @@ -150,8 +153,8 @@ describe('RunCall tests', () => { const caller = new Address(hexToBytes('0x00000000000000000000000000000000000000ee')) // caller address const address = new Address(hexToBytes('0x00000000000000000000000000000000000000ff')) // setup the vm - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) + const evm = await createEVM({ common }) const code = '0x61000260005561000160005500' /* idea: store the original value in the storage slot, except it is now a 1-length Uint8Array instead of a 32-length Uint8Array @@ -174,11 +177,11 @@ describe('RunCall tests', () => { */ - await evm.stateManager.putContractCode(address, hexToBytes(code)) - await evm.stateManager.putContractStorage( + await evm.stateManager.putCode(address, hexToBytes(code)) + await evm.stateManager.putStorage( address, new Uint8Array(32), - hexToBytes(`0x${'00'.repeat(31)}01`) + hexToBytes(`0x${'00'.repeat(31)}01`), ) // setup the call arguments @@ -199,12 +202,12 @@ describe('RunCall tests', () => { const caller = new Address(hexToBytes('0x00000000000000000000000000000000000000ee')) // caller address const address = new Address(hexToBytes('0x00000000000000000000000000000000000000ff')) // setup the vm - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const evm = await createEVM({ common }) // push 1 push 0 sstore stop const code = '0x600160015500' - await evm.stateManager.putContractCode(address, hexToBytes(code)) + await evm.stateManager.putCode(address, hexToBytes(code)) // setup the call arguments const runCallArgs = { @@ -224,12 +227,12 @@ describe('RunCall tests', () => { const caller = new Address(hexToBytes('0x00000000000000000000000000000000000000ee')) // caller address const address = new Address(hexToBytes('0x00000000000000000000000000000000000000ff')) // setup the vm - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Homestead }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Homestead }) + const evm = await createEVM({ common }) // code to call 0x00..00dd, which does not exist const code = '0x6000600060006000600060DD61FFFF5A03F100' - await evm.stateManager.putContractCode(address, hexToBytes(code)) + await evm.stateManager.putCode(address, hexToBytes(code)) // setup the call arguments const runCallArgs = { @@ -251,13 +254,13 @@ describe('RunCall tests', () => { const caller = new Address(hexToBytes('0x00000000000000000000000000000000000000ee')) // caller address const address = new Address(hexToBytes('0x00000000000000000000000000000000000000ff')) // setup the vm - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Homestead }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Homestead }) + const evm = await createEVM({ common }) // code to call back into the calling account (0x00..00EE), // but using too much memory const code = '0x61FFFF60FF60006000600060EE6000F200' - await evm.stateManager.putContractCode(address, hexToBytes(code)) + await evm.stateManager.putCode(address, hexToBytes(code)) // setup the call arguments const runCallArgs = { @@ -278,14 +281,14 @@ describe('RunCall tests', () => { const caller = new Address(hexToBytes('0x00000000000000000000000000000000000000ee')) // caller address const address = new Address(hexToBytes('0x00000000000000000000000000000000000000ff')) // setup the vm - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.TangerineWhistle }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.TangerineWhistle }) + const evm = await createEVM({ common }) // code to call 0x00..00fe, with the GAS opcode used as gas // this cannot be paid, since we also have to pay for CALL (40 gas) // this should thus go OOG const code = '0x60FEFF' - await evm.stateManager.putContractCode(address, hexToBytes(code)) + await evm.stateManager.putCode(address, hexToBytes(code)) // setup the call arguments const runCallArgs = { @@ -306,15 +309,15 @@ describe('RunCall tests', () => { const caller = new Address(hexToBytes('0x00000000000000000000000000000000000000ee')) // caller address const address = new Address(hexToBytes('0x00000000000000000000000000000000000000ff')) // setup the vm - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const evm = await createEVM({ common }) // code to call 0x00..00fe, with the GAS opcode used as gas // this cannot be paid, since we also have to pay for CALL (40 gas) // this should thus go OOG const code = '0x3460005500' await evm.stateManager.putAccount(caller, new Account()) - await evm.stateManager.putContractCode(address, hexToBytes(code)) + await evm.stateManager.putCode(address, hexToBytes(code)) const account = await evm.stateManager.getAccount(caller) account!.balance = BigInt(100) @@ -373,8 +376,8 @@ describe('RunCall tests', () => { const slot = hexToBytes(`0x${'00'.repeat(32)}`) const emptyBytes = hexToBytes('0x') // setup the vm - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) + const evm = await createEVM({ common }) const code = '0x60008080F060005500' /* This simple code tries to create an empty contract and then stores the address of the contract in the zero slot. @@ -388,7 +391,7 @@ describe('RunCall tests', () => { STOP */ - await evm.stateManager.putContractCode(address, hexToBytes(code)) + await evm.stateManager.putCode(address, hexToBytes(code)) const account = await evm.stateManager.getAccount(address) account!.nonce = MAX_UINT64 - BigInt(1) @@ -402,7 +405,7 @@ describe('RunCall tests', () => { } await evm.runCall(runCallArgs) - let storage = await evm.stateManager.getContractStorage(address, slot) + let storage = await evm.stateManager.getStorage(address, slot) // The nonce is MAX_UINT64 - 1, so we are allowed to create a contract (nonce of creating contract is now MAX_UINT64) assert.notDeepEqual(storage, emptyBytes, 'successfully created contract') @@ -410,11 +413,11 @@ describe('RunCall tests', () => { await evm.runCall(runCallArgs) // The nonce is MAX_UINT64, so we are NOT allowed to create a contract (nonce of creating contract is now MAX_UINT64) - storage = await evm.stateManager.getContractStorage(address, slot) + storage = await evm.stateManager.getStorage(address, slot) assert.deepEqual( storage, emptyBytes, - 'failed to create contract; nonce of creating contract is too high (MAX_UINT64)' + 'failed to create contract; nonce of creating contract is too high (MAX_UINT64)', ) }) @@ -425,8 +428,8 @@ describe('RunCall tests', () => { // setup the accounts for this test const caller = new Address(hexToBytes('0x1a02a619e51cc5f8a2a61d2a60f6c80476ee8ead')) // caller address // setup the vm - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) + const evm = await createEVM({ common }) const code = '0x3034526020600760203460045afa602034343e604034f3' const account = new Account() @@ -448,18 +451,18 @@ describe('RunCall tests', () => { '0x00000000000000000000000028373a29d17af317e669579d97e7dddc9da6e3e2e7dddc9da6e3e200000000000000000000000000000000000000000000000000' assert.equal(result.createdAddress?.toString(), expectedAddress, 'created address correct') - const deployedCode = await evm.stateManager.getContractCode(result.createdAddress!) + const deployedCode = await evm.stateManager.getCode(result.createdAddress!) assert.equal(bytesToHex(deployedCode), expectedCode, 'deployed code correct') }) it('Throws on negative call value', async () => { // setup the vm - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) + const evm = await createEVM({ common }) // setup the call arguments const runCallArgs = { - to: Address.zero(), + to: createZeroAddress(), value: BigInt(-10), } @@ -469,23 +472,23 @@ describe('RunCall tests', () => { } catch (err: any) { assert.ok( err.message.includes('value field cannot be negative'), - 'throws on negative call value' + 'throws on negative call value', ) } }) it('runCall() -> skipBalance behavior', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Berlin }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Berlin }) + const evm = await createEVM({ common }) // runCall against a contract to reach `_reduceSenderBalance` const contractCode = hexToBytes('0x00') // 00: STOP - const contractAddress = Address.fromString('0x000000000000000000000000636F6E7472616374') - await evm.stateManager.putContractCode(contractAddress, contractCode) + const contractAddress = createAddressFromString('0x000000000000000000000000636F6E7472616374') + await evm.stateManager.putCode(contractAddress, contractCode) const senderKey = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) - const sender = Address.fromPrivateKey(senderKey) + const sender = createAddressFromPrivateKey(senderKey) const runCallArgs = { gasLimit: BigInt(21000), @@ -503,7 +506,7 @@ describe('RunCall tests', () => { assert.equal( senderBalance, balance ?? BigInt(0), - 'sender balance should be the same before and after call execution with skipBalance' + 'sender balance should be the same before and after call execution with skipBalance', ) assert.equal(res.execResult.exceptionError, undefined, 'no exceptionError with skipBalance') } @@ -511,7 +514,7 @@ describe('RunCall tests', () => { const res2 = await evm.runCall({ ...runCallArgs, skipBalance: false }) assert.ok( res2.execResult.exceptionError?.error.match('insufficient balance'), - 'runCall reverts when insufficient sender balance and skipBalance is false' + 'runCall reverts when insufficient sender balance and skipBalance is false', ) }) @@ -519,8 +522,8 @@ describe('RunCall tests', () => { // setup the accounts for this test const caller = new Address(hexToBytes('0x00000000000000000000000000000000000000ee')) // caller address // setup the evm - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) + const evm = await createEVM({ common }) // setup the call arguments const runCallArgs = { @@ -536,7 +539,7 @@ describe('RunCall tests', () => { assert.equal( result.execResult.exceptionError?.error, ERROR.CODESIZE_EXCEEDS_MAXIMUM, - 'reported error is correct' + 'reported error is correct', ) }) it('runCall() => use BLOBHASH opcode from EIP 4844', async () => { @@ -545,7 +548,7 @@ describe('RunCall tests', () => { chain: 'custom', hardfork: Hardfork.Cancun, }) - const evm = await EVM.create({ common }) + const evm = await createEVM({ common }) // setup the call arguments const runCallArgs: EVMRunCallOpts = { @@ -558,7 +561,7 @@ describe('RunCall tests', () => { assert.equal( bytesToHex(unpadBytes(res.execResult.returnValue)), '0xab', - 'retrieved correct versionedHash from runState' + 'retrieved correct versionedHash from runState', ) // setup the call arguments @@ -572,7 +575,7 @@ describe('RunCall tests', () => { assert.equal( bytesToHex(unpadBytes(res2.execResult.returnValue)), '0x', - 'retrieved no versionedHash when specified versionedHash does not exist in runState' + 'retrieved no versionedHash when specified versionedHash does not exist in runState', ) }) @@ -582,13 +585,13 @@ describe('RunCall tests', () => { chain: 'custom', hardfork: Hardfork.Cancun, }) - const evm = await EVM.create({ common }) + const evm = await createEVM({ common }) const BLOBBASEFEE_OPCODE = 0x4a assert.equal( evm.getActiveOpcodes().get(BLOBBASEFEE_OPCODE)!.name, 'BLOBBASEFEE', - 'Opcode 0x4a named BLOBBASEFEE' + 'Opcode 0x4a named BLOBBASEFEE', ) const block = defaultBlock() @@ -605,18 +608,18 @@ describe('RunCall tests', () => { assert.equal( bytesToBigInt(unpadBytes(res.execResult.returnValue)), BigInt(119), - 'retrieved correct gas fee' + 'retrieved correct gas fee', ) assert.equal(res.execResult.executionGasUsed, BigInt(6417), 'correct blob gas fee (2) charged') }) it('step event: ensure EVM memory and not internal memory gets reported', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Berlin }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Berlin }) + const evm = await createEVM({ common }) const contractCode = hexToBytes('0x600060405200') // PUSH 0 PUSH 40 MSTORE STOP - const contractAddress = Address.fromString('0x000000000000000000000000636F6E7472616374') - await evm.stateManager.putContractCode(contractAddress, contractCode) + const contractAddress = createAddressFromString('0x000000000000000000000000636F6E7472616374') + await evm.stateManager.putCode(contractAddress, contractCode) const runCallArgs = { gasLimit: BigInt(21000), @@ -636,8 +639,8 @@ describe('RunCall tests', () => { }) it('ensure code deposit errors are logged correctly (>= Homestead)', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Berlin }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Berlin }) + const evm = await createEVM({ common }) // Create a contract which is too large const runCallArgs = { @@ -659,8 +662,8 @@ describe('RunCall tests', () => { }) it('ensure code deposit errors are logged correctly (Frontier)', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) + const evm = await createEVM({ common }) // Create a contract which cannot pay the code deposit fee const runCallArgs = { @@ -683,8 +686,8 @@ describe('RunCall tests', () => { it('ensure call and callcode handle gas stipend correctly', async () => { // See: https://github.com/ethereumjs/ethereumjs-monorepo/issues/3194 - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Shanghai }) - const evm = await EVM.create({ common }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Shanghai }) + const evm = await createEVM({ common }) for (const [opcode, gas, expectedOutput] of [ ['f1', 36600, '0x'], // 36600 is CALL fee @@ -692,7 +695,7 @@ describe('RunCall tests', () => { ['f1', 36600 + 7 * 3, '0x01'], // 36600 is CALL fee + 7 * 3 gas for 7 PUSH opcodes ['f2', 11600 + 7 * 3, '0x01'], // 11600 is CALLCODE fee + 7 * 3 gas for 7 PUSH opcodes ]) { - // Code to either CALL or CALLCODE into AACC empty contract, with value 1 + // Code to either CALL or CALLCODE into AACC empty contract, with value 1 // cspell:disable-line // If enough gas is provided, then since nonzero value is sent, the gas limit // in the call(coded) contract will get the "bonus gas" stipend of 2300 // Previously, we added this gas stipend to the current gas available (which is wrong) @@ -708,13 +711,13 @@ describe('RunCall tests', () => { * PUSH2 0x1a90 // Note: this is the gas available in the new call(code) frame, this value does not matter * CALLCODE/CALL */ - const callCodeAddress = Address.fromString('0x000000000000000000000000000000000000aaaa') + const callCodeAddress = createAddressFromString('0x000000000000000000000000000000000000aaaa') const callCode = hexToBytes(`0x6000600060006000600161AACC611a90${opcode}`) const gasLimit = gas.toString(16).padStart(4, '0') /*** - * Bytecode for AAAB contract (used to call contract AAAA and stores result of call execution) + * Bytecode for AAAB contract (used to call contract AAAA and stores result of call execution) // cspell:disable-line * PUSH1 0x00 * DUP1 * DUP1 @@ -726,14 +729,14 @@ describe('RunCall tests', () => { * PUSH1 0x00 * SSTORE */ - const callerAddress = Address.fromString('0x000000000000000000000000000000000000aaab') + const callerAddress = createAddressFromString('0x000000000000000000000000000000000000aaab') const callerCode = hexToBytes(`0x60008080808061AAAA61${gasLimit}f1600055`) await evm.stateManager.putAccount(callCodeAddress, new Account()) - await evm.stateManager.putContractCode(callCodeAddress, callCode) + await evm.stateManager.putCode(callCodeAddress, callCode) await evm.stateManager.putAccount(callerAddress, new Account(undefined, BigInt(1))) - await evm.stateManager.putContractCode(callerAddress, callerCode) + await evm.stateManager.putCode(callerAddress, callerCode) const runCallArgs = { to: callerAddress, @@ -741,9 +744,7 @@ describe('RunCall tests', () => { } await evm.runCall(runCallArgs) - const callResult = bytesToHex( - await evm.stateManager.getContractStorage(callerAddress, zeros(32)) - ) + const callResult = bytesToHex(await evm.stateManager.getStorage(callerAddress, zeros(32))) // Expect slot to have value of either: 0 since CALLCODE and CODE did not have enough gas to execute // Or 1, if CALL(CODE) has enough gas to enter the new call frame assert.equal(callResult, expectedOutput, `should have result ${expectedOutput}`) diff --git a/packages/evm/test/runCode.spec.ts b/packages/evm/test/runCode.spec.ts index 791654dc1c..821626a339 100644 --- a/packages/evm/test/runCode.spec.ts +++ b/packages/evm/test/runCode.spec.ts @@ -1,7 +1,7 @@ -import { Account, Address, hexToBytes } from '@ethereumjs/util' +import { Account, createAddressFromString, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EVM } from '../src/index.js' +import { createEVM } from '../src/index.js' const PUSH1 = '60' const STOP = '00' @@ -21,7 +21,7 @@ const testCases = [ describe('VM.runCode: initial program counter', () => { it('should work', async () => { - const evm = await EVM.create() + const evm = await createEVM() for (const [i, testData] of testCases.entries()) { const runCodeArgs = { @@ -37,7 +37,7 @@ describe('VM.runCode: initial program counter', () => { assert.equal( result.runState?.programCounter, testData.resultPC, - `should start the execution at the specified pc or 0, testCases[${i}]` + `should start the execution at the specified pc or 0, testCases[${i}]`, ) } } catch (e: any) { @@ -57,7 +57,7 @@ describe('VM.runCode: initial program counter', () => { describe('VM.runCode: interpreter', () => { it('should return a EvmError as an exceptionError on the result', async () => { - const evm = await EVM.create() + const evm = await createEVM() const INVALID_opcode = 'fe' const runCodeArgs = { @@ -76,15 +76,15 @@ describe('VM.runCode: interpreter', () => { }) it('should throw on non-EvmError', async () => { - const evm = await EVM.create() - // NOTE: due to now throwing on `getContractStorage` if account does not exist + const evm = await createEVM() + // NOTE: due to now throwing on `getStorage` if account does not exist // this now means that if `runCode` is called and the address it runs on (default: zero address) // does not exist, then if SSTORE/SLOAD is used, the runCode will immediately fail because StateManager now throws // TODO: is this behavior which we should fix? (Either in StateManager OR in runCode where we load the account first, // then re-put the account after (if account === undefined put empty account, such that the account exists)) - const address = Address.fromString(`0x${'00'.repeat(20)}`) + const address = createAddressFromString(`0x${'00'.repeat(20)}`) await evm.stateManager.putAccount(address, new Account()) - evm.stateManager.putContractStorage = (..._args) => { + evm.stateManager.putStorage = (..._args) => { throw new Error('Test') } @@ -105,7 +105,7 @@ describe('VM.runCode: interpreter', () => { describe('VM.runCode: RunCodeOptions', () => { it('should throw on negative value args', async () => { - const evm = await EVM.create() + const evm = await createEVM() const runCodeArgs = { value: BigInt(-10), @@ -118,7 +118,7 @@ describe('VM.runCode: RunCodeOptions', () => { } catch (err: any) { assert.ok( err.message.includes('value field cannot be negative'), - 'throws on negative call value' + 'throws on negative call value', ) } }) diff --git a/packages/evm/test/stack.spec.ts b/packages/evm/test/stack.spec.ts index 1d6b79523d..ed75419f04 100644 --- a/packages/evm/test/stack.spec.ts +++ b/packages/evm/test/stack.spec.ts @@ -1,7 +1,7 @@ import { Account, Address, bigIntToBytes, hexToBytes, setLengthLeft } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EVM } from '../src/index.js' +import { createEVM } from '../src/index.js' import { Stack } from '../src/stack.js' import { createAccount } from './utils.js' @@ -99,7 +99,7 @@ describe('Stack', () => { it('stack items should not change if they are DUPed', async () => { const caller = new Address(hexToBytes('0x00000000000000000000000000000000000000ee')) const addr = new Address(hexToBytes('0x00000000000000000000000000000000000000ff')) - const evm = await EVM.create() + const evm = await createEVM() const account = createAccount(BigInt(0), BigInt(0)) const code = '0x60008080808060013382F15060005260206000F3' const expectedReturnValue = setLengthLeft(bigIntToBytes(BigInt(0)), 32) @@ -122,7 +122,7 @@ describe('Stack', () => { RETURN stack: [0, 0x20] (we thus return the stack item which was originally pushed as 0, and then DUPed) */ await evm.stateManager.putAccount(addr, account) - await evm.stateManager.putContractCode(addr, hexToBytes(code)) + await evm.stateManager.putCode(addr, hexToBytes(code)) await evm.stateManager.putAccount(caller, new Account(BigInt(0), BigInt(0x11))) const runCallArgs = { caller, diff --git a/packages/evm/test/transientStorage.spec.ts b/packages/evm/test/transientStorage.spec.ts index 095b4b66b3..acf90275c1 100644 --- a/packages/evm/test/transientStorage.spec.ts +++ b/packages/evm/test/transientStorage.spec.ts @@ -1,4 +1,4 @@ -import { Address } from '@ethereumjs/util' +import { createAddressFromString } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' import { TransientStorage } from '../src/transientStorage.js' @@ -7,7 +7,7 @@ describe('Transient Storage', () => { it('should set and get storage', () => { const transientStorage = new TransientStorage() - const address = Address.fromString('0xff00000000000000000000000000000000000002') + const address = createAddressFromString('0xff00000000000000000000000000000000000002') const key = new Uint8Array(32).fill(0xff) const value = new Uint8Array(32).fill(0x99) @@ -19,7 +19,7 @@ describe('Transient Storage', () => { it('should return bytes32(0) if there is no key set', () => { const transientStorage = new TransientStorage() - const address = Address.fromString('0xff00000000000000000000000000000000000002') + const address = createAddressFromString('0xff00000000000000000000000000000000000002') const key = new Uint8Array(32).fill(0xff) const value = new Uint8Array(32).fill(0x11) @@ -36,7 +36,7 @@ describe('Transient Storage', () => { it('should revert', () => { const transientStorage = new TransientStorage() - const address = Address.fromString('0xff00000000000000000000000000000000000002') + const address = createAddressFromString('0xff00000000000000000000000000000000000002') const key = new Uint8Array(32).fill(0xff) const value = new Uint8Array(32).fill(0x99) @@ -58,7 +58,7 @@ describe('Transient Storage', () => { it('should commit', () => { const transientStorage = new TransientStorage() - const address = Address.fromString('0xff00000000000000000000000000000000000002') + const address = createAddressFromString('0xff00000000000000000000000000000000000002') const key = new Uint8Array(32).fill(0xff) const value = new Uint8Array(32).fill(0x99) @@ -74,7 +74,7 @@ describe('Transient Storage', () => { it('should fail with wrong size key/value', () => { const transientStorage = new TransientStorage() - const address = Address.fromString('0xff00000000000000000000000000000000000002') + const address = createAddressFromString('0xff00000000000000000000000000000000000002') assert.throws(() => { transientStorage.put(address, new Uint8Array(10), new Uint8Array(1)) @@ -88,24 +88,24 @@ describe('Transient Storage', () => { it('keys are stringified', () => { const transientStorage = new TransientStorage() - const address = Address.fromString('0xff00000000000000000000000000000000000002') + const address = createAddressFromString('0xff00000000000000000000000000000000000002') const key = new Uint8Array(32).fill(0xff) const value = new Uint8Array(32).fill(0x99) transientStorage.put(address, key, value) assert.deepEqual( transientStorage.get( - Address.fromString('0xff00000000000000000000000000000000000002'), - new Uint8Array(32).fill(0xff) + createAddressFromString('0xff00000000000000000000000000000000000002'), + new Uint8Array(32).fill(0xff), ), - value + value, ) }) it('revert applies changes in correct order', () => { const transientStorage = new TransientStorage() - const address = Address.fromString('0xff00000000000000000000000000000000000002') + const address = createAddressFromString('0xff00000000000000000000000000000000000002') const key = new Uint8Array(32).fill(0xff) const value1 = new Uint8Array(32).fill(0x01) const value2 = new Uint8Array(32).fill(0x02) @@ -123,7 +123,7 @@ describe('Transient Storage', () => { it('nested reverts', () => { const transientStorage = new TransientStorage() - const address = Address.fromString('0xff00000000000000000000000000000000000002') + const address = createAddressFromString('0xff00000000000000000000000000000000000002') const key = new Uint8Array(32).fill(0xff) const value0 = new Uint8Array(32).fill(0x00) const value1 = new Uint8Array(32).fill(0x01) @@ -153,7 +153,7 @@ describe('Transient Storage', () => { it('commit batches changes into next revert', () => { const transientStorage = new TransientStorage() - const address = Address.fromString('0xff00000000000000000000000000000000000002') + const address = createAddressFromString('0xff00000000000000000000000000000000000002') const key = new Uint8Array(32).fill(0xff) const value1 = new Uint8Array(32).fill(0x01) const value2 = new Uint8Array(32).fill(0x02) diff --git a/packages/evm/tsconfig.lint.json b/packages/evm/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/evm/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/evm/vite.config.bundler.ts b/packages/evm/vite.config.bundler.ts index 198b7cc30a..6efc4109c1 100644 --- a/packages/evm/vite.config.bundler.ts +++ b/packages/evm/vite.config.bundler.ts @@ -11,7 +11,7 @@ export default defineConfig({ treeshake: 'safest', }, lib: { - entry: 'src/index.ts', + entry: '../tx/examples/londonTx.ts', name: '@ethereumjs/evm', fileName: (format) => `ethereumjs-evm-bundle.${format}.js`, // only build for es diff --git a/packages/genesis/.eslintrc.cjs b/packages/genesis/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/genesis/.eslintrc.cjs +++ b/packages/genesis/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/genesis/CHANGELOG.md b/packages/genesis/CHANGELOG.md index cdd07f8f6f..c76dfeebcc 100644 --- a/packages/genesis/CHANGELOG.md +++ b/packages/genesis/CHANGELOG.md @@ -6,7 +6,11 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 0.2.2 - 2024-03-05 +## 0.2.3 - 2024-08-15 + +Maintenance release with downstream dependency updates, see PR [#3527](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3527) + +## 0.2.2 - 2024-03-18 Maintenance release with downstream dependency updates, see PR [#3297](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3297) diff --git a/packages/genesis/examples/simple.ts b/packages/genesis/examples/simple.ts index 2f20316aa7..91033c5bf7 100644 --- a/packages/genesis/examples/simple.ts +++ b/packages/genesis/examples/simple.ts @@ -1,9 +1,9 @@ -import { getGenesis } from '@ethereumjs/genesis' import { Chain } from '@ethereumjs/common' // or directly use chain ID +import { getGenesis } from '@ethereumjs/genesis' const mainnetGenesis = getGenesis(Chain.Mainnet) console.log( `This balance for account 0x000d836201318ec6899a67540690382780743280 in this chain's genesis state is ${parseInt( - mainnetGenesis!['0x000d836201318ec6899a67540690382780743280'] as string - )}` + mainnetGenesis!['0x000d836201318ec6899a67540690382780743280'] as string, + )}`, ) diff --git a/packages/genesis/package.json b/packages/genesis/package.json index 804470346b..e77e8d9a14 100644 --- a/packages/genesis/package.json +++ b/packages/genesis/package.json @@ -1,6 +1,6 @@ { "name": "@ethereumjs/genesis", - "version": "0.2.2", + "version": "0.2.3", "description": "A module to provide genesis states of well known networks", "keywords": [ "ethereum", @@ -59,13 +59,13 @@ "tsc": "../../config/cli/ts-compile.sh" }, "dependencies": { - "@ethereumjs/common": "^4.3.0", - "@ethereumjs/util": "^9.0.3" + "@ethereumjs/common": "^4.4.0", + "@ethereumjs/util": "^9.1.0" }, "engines": { "node": ">=18" }, "devDependencies": { - "@ethereumjs/trie": "^6.2.0" + "@ethereumjs/trie": "^6.2.1" } } diff --git a/packages/genesis/test/index.spec.ts b/packages/genesis/test/index.spec.ts index d3eb6023b4..f321627034 100644 --- a/packages/genesis/test/index.spec.ts +++ b/packages/genesis/test/index.spec.ts @@ -17,15 +17,15 @@ describe('genesis test', () => { const genesisState = getGenesis(Number(chainId)) assert.ok( genesisState !== undefined, - `network=${name} chainId=${chainId} genesis should be found` + `network=${name} chainId=${chainId} genesis should be found`, ) const stateRoot = await genGenesisStateRoot(genesisState!) assert.ok( equalsBytes(expectedRoot, stateRoot), `network=${name} chainId=${chainId} stateRoot should match expected=${bytesToHex( - expectedRoot - )} actual=${bytesToHex(stateRoot)}` + expectedRoot, + )} actual=${bytesToHex(stateRoot)}`, ) } }) diff --git a/packages/genesis/tsconfig.lint.json b/packages/genesis/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/genesis/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/rlp/.eslintrc.cjs b/packages/rlp/.eslintrc.cjs index 217a81018c..092291ee94 100644 --- a/packages/rlp/.eslintrc.cjs +++ b/packages/rlp/.eslintrc.cjs @@ -2,7 +2,14 @@ module.exports = { extends: '../../config/eslint.cjs', rules: { '@typescript-eslint/no-use-before-define': 'off', - '@typescript-eslint/no-unused-vars': 'off', - 'no-unused-vars': 'off', }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], } diff --git a/packages/rlp/CHANGELOG.md b/packages/rlp/CHANGELOG.md index 2913d88f09..089651ab8a 100644 --- a/packages/rlp/CHANGELOG.md +++ b/packages/rlp/CHANGELOG.md @@ -10,7 +10,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) ### 10-30x Decode Speedup -The `RLP.decode()` method has been optimized (thanks @wemeetagain for the contribution! ❤️) which results in a reproduceable 10-30x speedup for JS native decoding 🎉, see PR [#3243](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3243). +The `RLP.decode()` method has been optimized (thanks @wemeetagain for the contribution! ❤️) which results in a reproducible 10-30x speedup for JS native decoding 🎉, see PR [#3243](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3243). ### Self-Contained (and Working 🙂) README Examples @@ -110,7 +110,7 @@ Beta 2 release for the upcoming breaking release round on the [EthereumJS monore ### Removed Default Exports -The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all accross the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. +The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all across the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. Default exports were a common source of error and confusion when using our libraries in a CommonJS context, leading to issues like Issue [#978](https://github.com/ethereumjs/ethereumjs-monorepo/issues/978). diff --git a/packages/rlp/examples/simple.ts b/packages/rlp/examples/simple.ts index dcd6f7795d..2c7d5f2cb8 100644 --- a/packages/rlp/examples/simple.ts +++ b/packages/rlp/examples/simple.ts @@ -1,5 +1,5 @@ -import assert from 'assert' import { RLP } from '@ethereumjs/rlp' +import assert from 'assert' const nestedList = [[], [[]], [[], [[]]]] const encoded = RLP.encode(nestedList) diff --git a/packages/rlp/src/index.ts b/packages/rlp/src/index.ts index 084ed01399..175b6f89b4 100644 --- a/packages/rlp/src/index.ts +++ b/packages/rlp/src/index.ts @@ -98,7 +98,7 @@ export function decode(input: Input, stream = false): Uint8Array | NestedUint8Ar /** Decode an input with RLP */ function _decode(input: Uint8Array): Decoded { - let length: number, llength: number, data: Uint8Array, innerRemainder: Uint8Array, d: Decoded + let length: number, lLength: number, data: Uint8Array, innerRemainder: Uint8Array, d: Decoded const decoded = [] const firstByte = input[0] @@ -131,19 +131,19 @@ function _decode(input: Uint8Array): Decoded { } else if (firstByte <= 0xbf) { // string is greater than 55 bytes long. A single byte with the value (0xb7 plus the length of the length), // followed by the length, followed by the string - llength = firstByte - 0xb6 - if (input.length - 1 < llength) { + lLength = firstByte - 0xb6 + if (input.length - 1 < lLength) { throw new Error('invalid RLP: not enough bytes for string length') } - length = decodeLength(safeSlice(input, 1, llength)) + length = decodeLength(safeSlice(input, 1, lLength)) if (length <= 55) { throw new Error('invalid RLP: expected string length to be greater than 55') } - data = safeSlice(input, llength, length + llength) + data = safeSlice(input, lLength, length + lLength) return { data, - remainder: input.subarray(length + llength), + remainder: input.subarray(length + lLength), } } else if (firstByte <= 0xf7) { // a list between 0-55 bytes long @@ -161,17 +161,17 @@ function _decode(input: Uint8Array): Decoded { } } else { // a list over 55 bytes long - llength = firstByte - 0xf6 - length = decodeLength(safeSlice(input, 1, llength)) + lLength = firstByte - 0xf6 + length = decodeLength(safeSlice(input, 1, lLength)) if (length < 56) { throw new Error('invalid RLP: encoded list too short') } - const totalLength = llength + length + const totalLength = lLength + length if (totalLength > input.length) { throw new Error('invalid RLP: total length is larger than the data') } - innerRemainder = safeSlice(input, llength, totalLength) + innerRemainder = safeSlice(input, lLength, totalLength) while (innerRemainder.length) { d = _decode(innerRemainder) @@ -232,7 +232,6 @@ function concatBytes(...arrays: Uint8Array[]): Uint8Array { // Global symbols in both browsers and Node.js since v11 // See https://github.com/microsoft/TypeScript/issues/31535 declare const TextEncoder: any -declare const TextDecoder: any function utf8ToBytes(utf: string): Uint8Array { return new TextEncoder().encode(utf) diff --git a/packages/rlp/test/cli.spec.ts b/packages/rlp/test/cli.spec.ts index dae67540d5..afd4a85d45 100644 --- a/packages/rlp/test/cli.spec.ts +++ b/packages/rlp/test/cli.spec.ts @@ -5,7 +5,7 @@ import type { ChildProcessWithoutNullStreams } from 'child_process' export function cliRunHelper( cliArgs: string[], - onData: (message: string, child: ChildProcessWithoutNullStreams, resolve: Function) => void + onData: (message: string, child: ChildProcessWithoutNullStreams, resolve: Function) => void, ) { const file = require.resolve('../bin/rlp.cjs') const child = spawn(process.execPath, [file, ...cliArgs]) @@ -46,7 +46,7 @@ describe('rlp CLI', async () => { const onData = ( message: string, child: ChildProcessWithoutNullStreams, - resolve: Function + resolve: Function, ) => { assert.ok(message.includes('0x05'), 'cli correctly encoded 5') child.kill(9) diff --git a/packages/rlp/test/dataTypes.spec.ts b/packages/rlp/test/dataTypes.spec.ts index 32f8d70da6..20105c0563 100644 --- a/packages/rlp/test/dataTypes.spec.ts +++ b/packages/rlp/test/dataTypes.spec.ts @@ -11,7 +11,7 @@ describe('invalid RLPs', () => { // prettier-ignore { input: Uint8Array.from([239, 191, 189, 239, 191, 189, 239, 191, 189, 239, 191, 189, 239, 191, 189, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 239, 191, 189, 29, 239, 191, 189, 77, 239, 191, 189, 239, 191, 189, 239, 191, 189, 93, 122, 239, 191, 189, 239, 191, 189, 239, 191, 189, 103, 239, 191, 189, 239, 191, 189, 239, 191, 189, 26, 239, 191, 189, 18, 69, 27, 239, 191, 189, 239, 191, 189, 116, 19, 239, 191, 189, 239, 191, 189, 66, 239, 191, 189, 64, 212, 147, 71, 239, 191, 189, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 239, 191, 189, 11, 222, 155, 122, 54, 42, 194, 169, 239, 191, 189, 70, 239, 191, 189, 72, 239, 191, 189, 239, 191, 189, 54, 53, 239, 191, 189, 100, 73, 239, 191, 189, 55, 239, 191, 189, 239, 191, 189, 59, 1, 239, 191, 189, 109, 239, 191, 189, 239, 191, 189, 93, 239, 191, 189, 208, 128, 239, 191, 189, 239, 191, 189, 0, 239, 191, 189, 239, 191, 189, 239, 191, 189, 15, 66, 64, 239, 191, 189, 239, 191, 189, 239, 191, 189, 239, 191, 189, 4, 239, 191, 189, 79, 103, 239, 191, 189, 85, 239, 191, 189, 239, 191, 189, 239, 191, 189, 74, 239, 191, 189, 239, 191, 189, 239, 191, 189, 239, 191, 189, 54, 239, 191, 189, 239, 191, 189, 239, 191, 189, 239, 191, 189, 239, 191, 189, 83, 239, 191, 189, 14, 239, 191, 189, 239, 191, 189, 239, 191, 189, 4, 63, 239, 191, 189, 63, 239, 191, 189, 41, 239, 191, 189, 239, 191, 189, 239, 191, 189, 67, 28, 239, 191, 189, 239, 191, 189, 11, 239, 191, 189, 31, 239, 191, 189, 239, 191, 189, 104, 96, 100, 239, 191, 189, 239, 191, 189, 12, 239, 191, 189, 239, 191, 189, 206, 152, 239, 191, 189, 239, 191, 189, 31, 112, 111, 239, 191, 189, 239, 191, 189, 65, 239, 191, 189, 41, 239, 191, 189, 239, 191, 189, 53, 84, 11, 239, 191, 189, 239, 191, 189, 12, 102, 24, 12, 42, 105, 109, 239, 191, 189, 58, 239, 191, 189, 4, 239, 191, 189, 104, 82, 9, 239, 191, 189, 6, 66, 91, 43, 38, 102, 117, 239, 191, 189, 105, 239, 191, 189, 239, 191, 189, 239, 191, 189, 89, 127, 239, 191, 189, 114]) }, { - input: hexToBytes('efdebd'), + input: hexToBytes('efdebd'), // cspell:disable-line msg: 'invalid RLP (safeSlice): end slice of Uint8Array out-of-bounds', }, { @@ -19,7 +19,7 @@ describe('invalid RLPs', () => { msg: 'invalid RLP (safeSlice): end slice of Uint8Array out-of-bounds', }, { - input: hexToBytes('efdebdaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'), + input: hexToBytes('efdebdaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'), // cspell:disable-line msg: 'invalid RLP (safeSlice): end slice of Uint8Array out-of-bounds', }, ] @@ -57,7 +57,7 @@ describe('RLP encoding (string)', () => { it('length of string >55 should return 0xb7+len(len(data)) plus len(data) plus data', () => { const encodedLongString = RLP.encode( - 'zoo255zoo255zzzzzzzzzzzzssssssssssssssssssssssssssssssssssssssssssssss' + 'zoo255zoo255zzzzzzzzzzzzssssssssssssssssssssssssssssssssssssssssssssss', // cspell:disable-line ) assert.deepEqual(72, encodedLongString.length) assert.deepEqual(encodedLongString[0], 184) @@ -83,7 +83,7 @@ describe('RLP encoding (list)', () => { 'dog', 'god', 'cat', - 'zoo255zoo255zzzzzzzzzzzzssssssssssssssssssssssssssssssssssssssssssssss', + 'zoo255zoo255zzzzzzzzzzzzssssssssssssssssssssssssssssssssssssssssssssss', // cspell:disable-line ] const encodedArrayOfStrings = RLP.encode(data) const str = bytesToUtf8(encodedArrayOfStrings) @@ -92,7 +92,7 @@ describe('RLP encoding (list)', () => { } // Verified with Geth's RLPDump const expected = hexToBytes( - 'f85483646f6783676f6483636174b8467a6f6f3235357a6f6f3235357a7a7a7a7a7a7a7a7a7a7a7a73737373737373737373737373737373737373737373737373737373737373737373737373737373737373737373' + 'f85483646f6783676f6483636174b8467a6f6f3235357a6f6f3235357a7a7a7a7a7a7a7a7a7a7a7a73737373737373737373737373737373737373737373737373737373737373737373737373737373737373737373', // cspell:disable-line ) assert.deepEqual(encodedArrayOfStrings, expected) }) @@ -324,7 +324,7 @@ describe('empty values', () => { describe('bad values', () => { it('wrong encoded a zero', () => { const val = hexToBytes( - 'f9005f030182520894b94f5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca098ff921201554726367d2be8c804a7ff89ccf285ebc57dff8ae4c44b9c19ac4aa08887321be575c8095f789dd4c743dfe42c1820f9231f98a962b210e3ac2452a3' + 'f9005f030182520894b94f5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca098ff921201554726367d2be8c804a7ff89ccf285ebc57dff8ae4c44b9c19ac4aa08887321be575c8095f789dd4c743dfe42c1820f9231f98a962b210e3ac2452a3', ) let result try { @@ -337,7 +337,7 @@ describe('bad values', () => { it('invalid length', () => { const a = hexToBytes( - 'f86081000182520894b94f5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca098ff921201554726367d2be8c804a7ff89ccf285ebc57dff8ae4c44b9c19ac4aa08887321be575c8095f789dd4c743dfe42c1820f9231f98a962b210e3ac2452a3' + 'f86081000182520894b94f5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca098ff921201554726367d2be8c804a7ff89ccf285ebc57dff8ae4c44b9c19ac4aa08887321be575c8095f789dd4c743dfe42c1820f9231f98a962b210e3ac2452a3', ) let result @@ -405,11 +405,8 @@ describe('hex prefix', () => { describe('recursive typings', () => { it('should not throw compilation error', () => { - type IsType = Exclude extends never - ? Exclude extends never - ? true - : false - : false + type IsType = + Exclude extends never ? (Exclude extends never ? true : false) : false const assertType = (isTrue: IsType) => { return isTrue } diff --git a/packages/rlp/test/integration.spec.ts b/packages/rlp/test/integration.spec.ts index 107c0dcd47..4cb3a5186d 100644 --- a/packages/rlp/test/integration.spec.ts +++ b/packages/rlp/test/integration.spec.ts @@ -44,7 +44,7 @@ describe.skipIf(isBrowser)('CLI command', () => { assert.deepEqual(encodeResultTrimmed, out.toLowerCase(), `should pass encoding ${testName}`) } }, - { timeout: 10000 } + { timeout: 10000 }, ) }) @@ -54,9 +54,9 @@ describe.skipIf(isBrowser)('Cross-frame', () => { assert.deepEqual( vm.runInNewContext( "Array.from(RLP.encode(['dog', 'god', 'cat'])).map(n => n.toString(16).padStart(2, '0')).join('')", - { RLP } + { RLP }, ), - 'cc83646f6783676f6483636174' + 'cc83646f6783676f6483636174', ) }) }) diff --git a/packages/rlp/test/invalid.spec.ts b/packages/rlp/test/invalid.spec.ts index 06e1a18372..ed107b8026 100644 --- a/packages/rlp/test/invalid.spec.ts +++ b/packages/rlp/test/invalid.spec.ts @@ -19,7 +19,7 @@ describe('invalid tests', () => { }, undefined, undefined, - `should not decode invalid RLPs, input: ${out}` + `should not decode invalid RLPs, input: ${out}`, ) }) } @@ -27,7 +27,7 @@ describe('invalid tests', () => { it('should pass long string sanity check test', function () { // long string invalid test; string length > 55 const longBufferTest = RLP.encode( - 'zoo255zoo255zzzzzzzzzzzzssssssssssssssssssssssssssssssssssssssssssssss' + 'zoo255zoo255zzzzzzzzzzzzssssssssssssssssssssssssssssssssssssssssssssss', // cspell:disable-line ) // sanity checks assert.ok(longBufferTest[0] > 0xb7) @@ -40,14 +40,14 @@ describe('invalid tests', () => { }, undefined, undefined, - 'string longer than 55 bytes: should throw' + 'string longer than 55 bytes: should throw', ) }) }) // The tests below are taken from Geth // https://github.com/ethereum/go-ethereum/blob/99be62a9b16fd7b3d1e2e17f1e571d3bef34f122/rlp/decode_test.go -// Not all tests were taken; some which throw due to type errors in Geth are ran against Geth's RLPdump to +// Not all tests were taken; some which throw due to type errors in Geth are ran against Geth's RLPDump to // see if there is a decode error or not. In both cases, the test is converted to either reflect the // expected value, or if the test is invalid, it is added as error test case @@ -63,13 +63,13 @@ const invalidGethCases: string[] = [ 'F90000', 'F90055', 'FA0002FFFF', - 'BFFFFFFFFFFFFFFFFFFF', + 'BFFFFFFFFFFFFFFFFFFF', // cspell:disable-line 'C801', 'CD04040404FFFFFFFFFFFFFFFFFF0303', 'C40102030401', 'C4010203048180', '81', - 'BFFFFFFFFFFFFFFF', + 'BFFFFFFFFFFFFFFF', // cspell:disable-line 'C801', 'c330f9c030f93030ce3030303030303030bd303030303030', '8105', @@ -87,7 +87,7 @@ describe('invalid geth tests', () => { }, undefined, undefined, - `should throw: ${gethCase}` + `should throw: ${gethCase}`, ) }) } diff --git a/packages/rlp/test/official.spec.ts b/packages/rlp/test/official.spec.ts index df03b862e6..9f8aef85ef 100644 --- a/packages/rlp/test/official.spec.ts +++ b/packages/rlp/test/official.spec.ts @@ -171,13 +171,13 @@ describe('geth tests', () => { assert.deepEqual( JSON.stringify(arrayOutput), JSON.stringify(gethCase.value!), - `invalid output: ${gethCase.input}` + `invalid output: ${gethCase.input}`, ) } else { assert.deepEqual( bytesToHex(Uint8Array.from(output as any)), gethCase.value, - `invalid output: ${gethCase.input}` + `invalid output: ${gethCase.input}`, ) } }, `should not throw: ${gethCase.input}`) diff --git a/packages/rlp/test/utils.ts b/packages/rlp/test/utils.ts index 4c96e4f216..161f0bb006 100644 --- a/packages/rlp/test/utils.ts +++ b/packages/rlp/test/utils.ts @@ -4,7 +4,6 @@ const { hexToBytes } = utils // Global symbols in both browsers and Node.js since v11 // See https://github.com/microsoft/TypeScript/issues/31535 -declare const TextEncoder: any declare const TextDecoder: any export function bytesToUtf8(bytes: Uint8Array): string { diff --git a/packages/rlp/tsconfig.lint.json b/packages/rlp/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/rlp/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/statemanager/.eslintrc.cjs b/packages/statemanager/.eslintrc.cjs index 91c78776e6..9c5b0dcd15 100644 --- a/packages/statemanager/.eslintrc.cjs +++ b/packages/statemanager/.eslintrc.cjs @@ -5,4 +5,13 @@ module.exports = { 'no-invalid-this': 'off', 'no-restricted-syntax': 'off', }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], } diff --git a/packages/statemanager/CHANGELOG.md b/packages/statemanager/CHANGELOG.md index 9d0c5069cc..06e8da8576 100644 --- a/packages/statemanager/CHANGELOG.md +++ b/packages/statemanager/CHANGELOG.md @@ -6,7 +6,30 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 2.3.0 - 2024-03-05 +## 2.4.0 - 2024-08-15 + +### Verkle Updates + +- Various fixes for Kaustinen4 support (partial account integration, `getContractCodeSize()`, other), PR [#3269](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3269) +- Kaustinen5 related fixes, PR [#3343](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3343) +- Kaustinen6 adjustments, `verkle-cryptography-wasm` migration, PRs [#3355](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3355) and [#3356](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3356) +- Missing beaconroot account verkle fix, PR [#3421](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3421) +- Verkle decoupling, PR [#3462](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3462) + +### Other Features + +- Stricter prefixed hex typing, PRs [#3348](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3348), [#3427](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3427) and [#3357](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3357) (some changes removed in PR [#3382](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3382) for backwards compatibility reasons, will be reintroduced along upcoming breaking releases) + +### Other Changes + +- Modify RPCStateManager `getAccount()`, PR [#3345](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3345) + +### Bugfixes + +- Fixes an issue where under certain deployment conditions wrong storage values could be provided, PR [#3434](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3434) +- Fixes statemanager empty code bug, PR [#3483](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3483) + +## 2.3.0 - 2024-03-18 ### Full 4844 Browser Readiness @@ -52,7 +75,7 @@ Since this fits well also to be placed here relatively prominently for awareness ## 2.2.2 - 2024-02-08 -- Hotfix release moving the `@ethereumjs/verkle` dependency from a peer dependency to the main dependencis (note that this decision might be temporary) +- Hotfix release moving the `@ethereumjs/verkle` dependency from a peer dependency to the main dependencies (note that this decision might be temporary) ## 2.2.1 - 2024-02-08 @@ -99,7 +122,7 @@ This release introduces a new code cache implementation, see PR [#3022](https:// The new cache is substantially more robust towards various type of revert-based attacks and grows a more-used cache over time, since never-applied values are consecutively sorted out. -### Peformance Option to store Storage Keys with Prefix +### Performance Option to store Storage Keys with Prefix This release introduces a new option `prefixStorageTrieKeys` which triggers the underlying trie to store storage key values with a prefix based on the account address, see PR [#3023](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3023). This significantly increases performance for consecutive storage accesses for the same account on especially larger tries, since trie node accesses get noticeably faster when performed by the underlying key-value store since values are stored close to each other. @@ -139,7 +162,7 @@ While you could use our libraries in the browser libraries before, there had bee WE HAVE ELIMINATED ALL OF THEM. -The largest two undertakings: First: we have rewritten all (half) of our API and elimited the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went throuh our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. +The largest two undertakings: First: we have rewritten all (half) of our API and eliminated the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went through our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. Together with some few other modifications this now allows to run each (maybe adding an asterisk for client and devp2p) of our libraries directly in the browser - more or less without any modifications - see the `examples/browser.html` file in each package folder for an easy to set up example. @@ -374,7 +397,7 @@ Beta 2 release for the upcoming breaking release round on the [EthereumJS monore ### Removed Default Exports -The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all accross the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. +The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all across the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. Default exports were a common source of error and confusion when using our libraries in a CommonJS context, leading to issues like Issue [#978](https://github.com/ethereumjs/ethereumjs-monorepo/issues/978). @@ -382,7 +405,7 @@ Now every import is a named import and we think the long term benefits will very #### Common Library Import Updates -Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all accross our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. +Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all across our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. So Common import and usage is changing from: diff --git a/packages/statemanager/README.md b/packages/statemanager/README.md index 85e194be10..e25fa65294 100644 --- a/packages/statemanager/README.md +++ b/packages/statemanager/README.md @@ -56,7 +56,7 @@ const main = async () => { console.log( `Account at address ${address.toString()} has balance ${ (await stateManager.getAccount(address))?.balance - }` + }`, ) } main() @@ -116,10 +116,10 @@ const main = async () => { const contractAddress = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) const byteCode = hexToBytes('0x67ffffffffffffffff600160006000fb') const storageKey1 = hexToBytes( - '0x0000000000000000000000000000000000000000000000000000000000000001' + '0x0000000000000000000000000000000000000000000000000000000000000001', ) const storageKey2 = hexToBytes( - '0x0000000000000000000000000000000000000000000000000000000000000002' + '0x0000000000000000000000000000000000000000000000000000000000000002', ) const storageValue1 = hexToBytes('0x01') const storageValue2 = hexToBytes('0x02') @@ -137,11 +137,11 @@ const main = async () => { console.log(await partialStateManager.getContractCode(contractAddress)) // contract bytecode is not included in proof console.log( await partialStateManager.getContractStorage(contractAddress, storageKey1), - storageValue1 + storageValue1, ) // should match console.log( await partialStateManager.getContractStorage(contractAddress, storageKey2), - storageValue2 + storageValue2, ) // should match const accountFromNewSM = await partialStateManager.getAccount(contractAddress) @@ -188,7 +188,7 @@ main() ##### Instantiating the EVM -In order to have an EVM instance that supports the BLOCKHASH opcode (which requires access to block history), you must instantiate both the `RPCStateManager` and the `RpcBlockChain` and use that when initalizing your EVM instance as below: +In order to have an EVM instance that supports the BLOCKHASH opcode (which requires access to block history), you must instantiate both the `RPCStateManager` and the `RpcBlockChain` and use that when initializing your EVM instance as below: ```ts // ./examples/evm.ts diff --git a/packages/statemanager/examples/basicUsage.ts b/packages/statemanager/examples/basicUsage.ts index d95f08bd73..462e28b8cd 100644 --- a/packages/statemanager/examples/basicUsage.ts +++ b/packages/statemanager/examples/basicUsage.ts @@ -1,6 +1,5 @@ -import { Account, Address } from '@ethereumjs/util' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { hexToBytes } from '@ethereumjs/util' +import { Account, Address, hexToBytes } from '@ethereumjs/util' const main = async () => { const stateManager = new DefaultStateManager() @@ -15,7 +14,7 @@ const main = async () => { console.log( `Account at address ${address.toString()} has balance ${ (await stateManager.getAccount(address))?.balance - }` + }`, ) } -main() +void main() diff --git a/packages/statemanager/examples/evm.ts b/packages/statemanager/examples/evm.ts index e9f98df61a..5e767aa137 100644 --- a/packages/statemanager/examples/evm.ts +++ b/packages/statemanager/examples/evm.ts @@ -1,5 +1,5 @@ -import { RPCStateManager, RPCBlockChain } from '@ethereumjs/statemanager' -import { EVM } from '@ethereumjs/evm' +import { createEVM } from '@ethereumjs/evm' +import { RPCBlockChain, RPCStateManager } from '@ethereumjs/statemanager' const main = async () => { try { @@ -7,9 +7,9 @@ const main = async () => { const blockchain = new RPCBlockChain(provider) const blockTag = 1n const state = new RPCStateManager({ provider, blockTag }) - const evm = await EVM.create({ blockchain, stateManager: state }) // note that evm is ready to run BLOCKHASH opcodes (over RPC) + const evm = await createEVM({ blockchain, stateManager: state }) // note that evm is ready to run BLOCKHASH opcodes (over RPC) } catch (e) { console.log(e.message) // fetch would fail because provider url is not real. please replace provider with a valid rpc url string. } } -main() +void main() diff --git a/packages/statemanager/examples/fromProofInstantiation.ts b/packages/statemanager/examples/fromProofInstantiation.ts index df607aef58..2e1117f9c8 100644 --- a/packages/statemanager/examples/fromProofInstantiation.ts +++ b/packages/statemanager/examples/fromProofInstantiation.ts @@ -1,6 +1,5 @@ -import { Address } from '@ethereumjs/util' import { DefaultStateManager } from '@ethereumjs/statemanager' -import { hexToBytes } from '@ethereumjs/util' +import { Address, hexToBytes } from '@ethereumjs/util' const main = async () => { // setup `stateManager` with some existing address @@ -8,17 +7,17 @@ const main = async () => { const contractAddress = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) const byteCode = hexToBytes('0x67ffffffffffffffff600160006000fb') const storageKey1 = hexToBytes( - '0x0000000000000000000000000000000000000000000000000000000000000001' + '0x0000000000000000000000000000000000000000000000000000000000000001', ) const storageKey2 = hexToBytes( - '0x0000000000000000000000000000000000000000000000000000000000000002' + '0x0000000000000000000000000000000000000000000000000000000000000002', ) const storageValue1 = hexToBytes('0x01') const storageValue2 = hexToBytes('0x02') - await stateManager.putContractCode(contractAddress, byteCode) - await stateManager.putContractStorage(contractAddress, storageKey1, storageValue1) - await stateManager.putContractStorage(contractAddress, storageKey2, storageValue2) + await stateManager.putCode(contractAddress, byteCode) + await stateManager.putStorage(contractAddress, storageKey1, storageValue1) + await stateManager.putStorage(contractAddress, storageKey2, storageValue2) const proof = await stateManager.getProof(contractAddress) const proofWithStorage = await stateManager.getProof(contractAddress, [storageKey1, storageKey2]) @@ -26,23 +25,17 @@ const main = async () => { // To add more proof data, use `addProofData` await partialStateManager.addProofData(proofWithStorage) - console.log(await partialStateManager.getContractCode(contractAddress)) // contract bytecode is not included in proof - console.log( - await partialStateManager.getContractStorage(contractAddress, storageKey1), - storageValue1 - ) // should match - console.log( - await partialStateManager.getContractStorage(contractAddress, storageKey2), - storageValue2 - ) // should match + console.log(await partialStateManager.getCode(contractAddress)) // contract bytecode is not included in proof + console.log(await partialStateManager.getStorage(contractAddress, storageKey1), storageValue1) // should match + console.log(await partialStateManager.getStorage(contractAddress, storageKey2), storageValue2) // should match const accountFromNewSM = await partialStateManager.getAccount(contractAddress) const accountFromOldSM = await stateManager.getAccount(contractAddress) console.log(accountFromNewSM, accountFromOldSM) // should match - const slot1FromNewSM = await stateManager.getContractStorage(contractAddress, storageKey1) - const slot2FromNewSM = await stateManager.getContractStorage(contractAddress, storageKey2) + const slot1FromNewSM = await stateManager.getStorage(contractAddress, storageKey1) + const slot2FromNewSM = await stateManager.getStorage(contractAddress, storageKey2) console.log(slot1FromNewSM, storageValue1) // should match console.log(slot2FromNewSM, storageValue2) // should match } -main() +void main() diff --git a/packages/statemanager/examples/rpcStateManager.ts b/packages/statemanager/examples/rpcStateManager.ts index 9124f25843..24a2a2217c 100644 --- a/packages/statemanager/examples/rpcStateManager.ts +++ b/packages/statemanager/examples/rpcStateManager.ts @@ -1,15 +1,15 @@ -import { Address } from '@ethereumjs/util' import { RPCStateManager } from '@ethereumjs/statemanager' +import { createAddressFromString } from '@ethereumjs/util' const main = async () => { try { const provider = 'https://path.to.my.provider.com' const stateManager = new RPCStateManager({ provider, blockTag: 500000n }) - const vitalikDotEth = Address.fromString('0xd8da6bf26964af9d7eed9e03e53415d37aa96045') + const vitalikDotEth = createAddressFromString('0xd8da6bf26964af9d7eed9e03e53415d37aa96045') const account = await stateManager.getAccount(vitalikDotEth) console.log('Vitalik has a current ETH balance of ', account?.balance) } catch (e) { console.log(e.message) // fetch fails because provider url is not real. please replace provider with a valid rpc url string. } } -main() +void main() diff --git a/packages/statemanager/examples/simple.ts b/packages/statemanager/examples/simple.ts index b7618b4127..5686606ba8 100644 --- a/packages/statemanager/examples/simple.ts +++ b/packages/statemanager/examples/simple.ts @@ -1,12 +1,13 @@ +import { Account, createAddressFromPrivateKey, randomBytes } from '@ethereumjs/util' + import { SimpleStateManager } from '../src/index.js' -import { Account, Address, randomBytes } from '@ethereumjs/util' const main = async () => { const sm = new SimpleStateManager() - const address = Address.fromPrivateKey(randomBytes(32)) + const address = createAddressFromPrivateKey(randomBytes(32)) const account = new Account(0n, 0xfffffn) await sm.putAccount(address, account) console.log(await sm.getAccount(address)) } -main() +void main() diff --git a/packages/statemanager/package.json b/packages/statemanager/package.json index 051d8e8239..11f5aa9b53 100644 --- a/packages/statemanager/package.json +++ b/packages/statemanager/package.json @@ -1,6 +1,6 @@ { "name": "@ethereumjs/statemanager", - "version": "2.3.0", + "version": "2.4.0", "description": "An Ethereum statemanager implementation", "keywords": [ "ethereum", @@ -50,18 +50,18 @@ "tsc": "../../config/cli/ts-compile.sh" }, "dependencies": { - "@ethereumjs/common": "^4.3.0", + "@ethereumjs/common": "^4.4.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/trie": "^6.2.0", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/trie": "^6.2.1", + "@ethereumjs/util": "^9.1.0", + "@js-sdsl/ordered-map": "^4.4.2", "debug": "^4.3.3", "ethereum-cryptography": "^2.2.1", - "js-sdsl": "^4.1.4", "lru-cache": "10.1.0" }, "devDependencies": { - "@ethereumjs/block": "^5.2.0", - "@ethereumjs/genesis": "^0.2.2", + "@ethereumjs/block": "^5.3.0", + "@ethereumjs/genesis": "^0.2.3", "@types/debug": "^4.1.9", "rustbn-wasm": "^0.4.0", "verkle-cryptography-wasm": "^0.4.5" diff --git a/packages/statemanager/src/accessWitness.ts b/packages/statemanager/src/accessWitness.ts index db20508298..4acd61670f 100644 --- a/packages/statemanager/src/accessWitness.ts +++ b/packages/statemanager/src/accessWitness.ts @@ -73,7 +73,7 @@ export class AccessWitness implements AccessWitnessInterface { verkleCrypto?: VerkleCrypto stems?: Map chunks?: Map - } = {} + } = {}, ) { if (opts.verkleCrypto === undefined) { throw new Error('verkle crypto required') @@ -161,7 +161,7 @@ export class AccessWitness implements AccessWitnessInterface { touchCodeChunksRangeOnWriteAndChargeGas( contact: Address, startPc: number, - endPc: number + endPc: number, ): bigint { let gas = BIGINT_0 for (let chunkNum = Math.floor(startPc / 31); chunkNum <= Math.floor(endPc / 31); chunkNum++) { @@ -174,7 +174,7 @@ export class AccessWitness implements AccessWitnessInterface { touchAddressOnWriteAndComputeGas( address: Address, treeIndex: number | bigint, - subIndex: number | Uint8Array + subIndex: number | Uint8Array, ): bigint { return this.touchAddressAndChargeGas(address, treeIndex, subIndex, { isWrite: true }) } @@ -182,7 +182,7 @@ export class AccessWitness implements AccessWitnessInterface { touchAddressOnReadAndComputeGas( address: Address, treeIndex: number | bigint, - subIndex: number | Uint8Array + subIndex: number | Uint8Array, ): bigint { return this.touchAddressAndChargeGas(address, treeIndex, subIndex, { isWrite: false }) } @@ -191,7 +191,7 @@ export class AccessWitness implements AccessWitnessInterface { address: Address, treeIndex: number | bigint, subIndex: number | Uint8Array, - { isWrite }: { isWrite?: boolean } + { isWrite }: { isWrite?: boolean }, ): bigint { let gas = BIGINT_0 @@ -199,7 +199,7 @@ export class AccessWitness implements AccessWitnessInterface { address, treeIndex, subIndex, - { isWrite } + { isWrite }, ) if (stemRead === true) { @@ -220,7 +220,7 @@ export class AccessWitness implements AccessWitnessInterface { } debug( - `touchAddressAndChargeGas=${gas} address=${address} treeIndex=${treeIndex} subIndex=${subIndex}` + `touchAddressAndChargeGas=${gas} address=${address} treeIndex=${treeIndex} subIndex=${subIndex}`, ) return gas @@ -230,7 +230,7 @@ export class AccessWitness implements AccessWitnessInterface { address: Address, treeIndex: number | bigint, subIndex: number | Uint8Array, - { isWrite }: { isWrite?: boolean } = {} + { isWrite }: { isWrite?: boolean } = {}, ): AccessEventFlags { let stemRead = false, stemWrite = false, @@ -251,7 +251,7 @@ export class AccessWitness implements AccessWitnessInterface { const accessedChunkKey = getVerkleKey( accessedStemKey, - typeof subIndex === 'number' ? intToBytes(subIndex) : subIndex + typeof subIndex === 'number' ? intToBytes(subIndex) : subIndex, ) const accessedChunkKeyHex = bytesToHex(accessedChunkKey) let accessedChunk = this.chunks.get(accessedChunkKeyHex) @@ -276,7 +276,7 @@ export class AccessWitness implements AccessWitnessInterface { } debug( - `${accessedChunkKeyHex}: isWrite=${isWrite} for steamRead=${stemRead} stemWrite=${stemWrite} chunkRead=${chunkRead} chunkWrite=${chunkWrite} chunkFill=${chunkFill}` + `${accessedChunkKeyHex}: isWrite=${isWrite} for steamRead=${stemRead} stemWrite=${stemWrite} chunkRead=${chunkRead} chunkWrite=${chunkWrite} chunkFill=${chunkFill}`, ) return { stemRead, stemWrite, chunkRead, chunkWrite, chunkFill } } @@ -364,7 +364,7 @@ export function decodeAccessedState(treeIndex: number | bigint, chunkIndex: numb return { type: AccessedStateType.Storage, slot } } else { throw Error( - `Invalid treeIndex=${treeIndex} chunkIndex=${chunkIndex} for verkle tree access` + `Invalid treeIndex=${treeIndex} chunkIndex=${chunkIndex} for verkle tree access`, ) } } diff --git a/packages/statemanager/src/cache/account.ts b/packages/statemanager/src/cache/account.ts index c78ca1b41f..48548209d7 100644 --- a/packages/statemanager/src/cache/account.ts +++ b/packages/statemanager/src/cache/account.ts @@ -1,6 +1,6 @@ import { bytesToUnprefixedHex } from '@ethereumjs/util' +import { OrderedMap } from '@js-sdsl/ordered-map' import debugDefault from 'debug' -import { OrderedMap } from 'js-sdsl' import { LRUCache } from 'lru-cache' import { Cache } from './cache.js' @@ -68,14 +68,14 @@ export class AccountCache extends Cache { put( address: Address, account: Account | undefined, - couldBeParitalAccount: boolean = false + couldBePartialAccount: boolean = false, ): void { const addressHex = bytesToUnprefixedHex(address.bytes) this._saveCachePreState(addressHex) const elem = { accountRLP: account !== undefined - ? couldBeParitalAccount + ? couldBePartialAccount ? account.serializeWithPartialInfo() : account.serialize() : undefined, @@ -135,7 +135,7 @@ export class AccountCache extends Cache { }) } - this._stats.dels += 1 + this._stats.deletions += 1 } /** @@ -252,7 +252,7 @@ export class AccountCache extends Cache { reads: 0, hits: 0, writes: 0, - dels: 0, + deletions: 0, } } return stats diff --git a/packages/statemanager/src/cache/cache.ts b/packages/statemanager/src/cache/cache.ts index 3be3a71215..ec98619df1 100644 --- a/packages/statemanager/src/cache/cache.ts +++ b/packages/statemanager/src/cache/cache.ts @@ -12,7 +12,7 @@ export class Cache { reads: 0, hits: 0, writes: 0, - dels: 0, + deletions: 0, } /** @@ -29,7 +29,7 @@ export class Cache { // Skip DEBUG calls unless 'ethjs' included in environmental DEBUG variables // Additional window check is to prevent vite browser bundling (and potentially other) to break this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false this._debug = debugDefault('statemanager:cache') } diff --git a/packages/statemanager/src/cache/caches.ts b/packages/statemanager/src/cache/caches.ts new file mode 100644 index 0000000000..fdfb47c3a1 --- /dev/null +++ b/packages/statemanager/src/cache/caches.ts @@ -0,0 +1,126 @@ +import { AccountCache } from './account.js' +import { CodeCache } from './code.js' +import { StorageCache } from './storage.js' +import { CacheType, type CachesStateManagerOpts } from './types.js' + +import type { CacheOpts } from './types.js' +import type { Address } from '@ethereumjs/util' + +export class Caches { + account?: AccountCache + code?: CodeCache + storage?: StorageCache + + settings: Record<'account' | 'code' | 'storage', CacheOpts> + + constructor(opts: CachesStateManagerOpts = {}) { + const accountSettings = { + type: opts.account?.type ?? CacheType.ORDERED_MAP, + size: opts.account?.size ?? 100000, + } + + const codeSettings = { + type: opts.code?.type ?? CacheType.ORDERED_MAP, + size: opts.code?.size ?? 20000, + } + + const storageSettings = { + type: opts.storage?.type ?? CacheType.ORDERED_MAP, + size: opts.storage?.size ?? 20000, + } + + this.settings = { + account: accountSettings, + code: codeSettings, + storage: storageSettings, + } + + if (this.settings.account.size !== 0) { + this.account = new AccountCache({ + size: this.settings.account.size, + type: this.settings.account.type, + }) + } + + if (this.settings.code.size !== 0) { + this.code = new CodeCache({ + size: this.settings.code.size, + type: this.settings.code.type, + }) + } + + if (this.settings.storage.size !== 0) { + this.storage = new StorageCache({ + size: this.settings.storage.size, + type: this.settings.storage.type, + }) + } + } + + checkpoint() { + this.account?.checkpoint() + this.storage?.checkpoint() + this.code?.checkpoint() + } + + clear() { + this.account?.clear() + this.storage?.clear() + this.code?.clear() + } + + commit() { + this.account?.commit() + this.storage?.commit() + this.code?.commit() + } + + deleteAccount(address: Address) { + this.code?.del(address) + this.account?.del(address) + this.storage?.clearStorage(address) + } + + shallowCopy(downlevelCaches: boolean) { + let cacheOptions: CachesStateManagerOpts | undefined + + // Account cache options + if (this.settings.account.size !== 0) { + cacheOptions = { + account: downlevelCaches + ? { size: this.settings.account.size, type: CacheType.ORDERED_MAP } + : this.settings.account, + } + } + + // Storage cache options + if (this.settings.storage.size !== 0) { + cacheOptions = { + ...cacheOptions, + storage: downlevelCaches + ? { size: this.settings.storage.size, type: CacheType.ORDERED_MAP } + : this.settings.storage, + } + } + + // Code cache options + if (this.settings.code.size !== 0) { + cacheOptions = { + ...cacheOptions, + code: downlevelCaches + ? { size: this.settings.code.size, type: CacheType.ORDERED_MAP } + : this.settings.code, + } + } + + if (cacheOptions !== undefined) { + return new Caches(cacheOptions) + } else return undefined + } + + revert() { + this.account?.revert() + this.storage?.revert() + this.code?.revert() + } +} diff --git a/packages/statemanager/src/cache/code.ts b/packages/statemanager/src/cache/code.ts index 5579da224d..da05629f47 100644 --- a/packages/statemanager/src/cache/code.ts +++ b/packages/statemanager/src/cache/code.ts @@ -1,6 +1,6 @@ import { bytesToUnprefixedHex } from '@ethereumjs/util' +import { OrderedMap } from '@js-sdsl/ordered-map' import debugDefault from 'debug' -import { OrderedMap } from 'js-sdsl' import { LRUCache } from 'lru-cache' import { Cache } from './cache.js' @@ -133,7 +133,7 @@ export class CodeCache extends Cache { }) } - this._stats.dels += 1 + this._stats.deletions += 1 } /** @@ -252,7 +252,7 @@ export class CodeCache extends Cache { reads: 0, hits: 0, writes: 0, - dels: 0, + deletions: 0, } } return stats diff --git a/packages/statemanager/src/cache/index.ts b/packages/statemanager/src/cache/index.ts index e19c3405e7..799b350c1b 100644 --- a/packages/statemanager/src/cache/index.ts +++ b/packages/statemanager/src/cache/index.ts @@ -1,4 +1,5 @@ export * from './account.js' +export * from './caches.js' export * from './code.js' export * from './originalStorageCache.js' export * from './storage.js' diff --git a/packages/statemanager/src/cache/originalStorageCache.ts b/packages/statemanager/src/cache/originalStorageCache.ts index dd837be4e3..b7093ac9bd 100644 --- a/packages/statemanager/src/cache/originalStorageCache.ts +++ b/packages/statemanager/src/cache/originalStorageCache.ts @@ -2,23 +2,23 @@ import { bytesToUnprefixedHex } from '@ethereumjs/util' import type { Address } from '@ethereumjs/util' -type getContractStorage = (address: Address, key: Uint8Array) => Promise +type getStorage = (address: Address, key: Uint8Array) => Promise /** * Helper class to cache original storage values (so values already being present in * the pre-state of a call), mainly for correct gas cost calculation in EVM/VM. * - * TODO: Usage of this class is very implicit through the injected `getContractStorage()` + * TODO: Usage of this class is very implicit through the injected `getStorage()` * method bound to the calling state manager. It should be examined if there are alternative * designs being more transparent and direct along the next breaking release round. * */ export class OriginalStorageCache { private map: Map> - private getContractStorage: getContractStorage - constructor(getContractStorage: getContractStorage) { + private getStorage: getStorage + constructor(getStorage: getStorage) { this.map = new Map() - this.getContractStorage = getContractStorage + this.getStorage = getStorage } async get(address: Address, key: Uint8Array): Promise { @@ -31,7 +31,7 @@ export class OriginalStorageCache { return value } } - const value = await this.getContractStorage(address, key) + const value = await this.getStorage(address, key) this.put(address, key, value) return value } diff --git a/packages/statemanager/src/cache/storage.ts b/packages/statemanager/src/cache/storage.ts index eb8d52bdd9..d8dcf1759e 100644 --- a/packages/statemanager/src/cache/storage.ts +++ b/packages/statemanager/src/cache/storage.ts @@ -1,6 +1,6 @@ import { bytesToUnprefixedHex, hexToBytes } from '@ethereumjs/util' +import { OrderedMap } from '@js-sdsl/ordered-map' import debugDefault from 'debug' -import { OrderedMap } from 'js-sdsl' import { LRUCache } from 'lru-cache' import { Cache } from './cache.js' @@ -88,7 +88,7 @@ export class StorageCache extends Cache { this._debug( `Put storage for ${addressHex}: ${keyHex} -> ${ value !== undefined ? bytesToUnprefixedHex(value) : '' - }` + }`, ) } if (this._lruCache) { @@ -165,14 +165,14 @@ export class StorageCache extends Cache { this._orderedMapCache!.setElement(addressHex, storageMap) } - this._stats.dels += 1 + this._stats.deletions += 1 } /** * Deletes all storage slots for address from the cache * @param address */ - clearContractStorage(address: Address): void { + clearStorage(address: Address): void { const addressHex = bytesToUnprefixedHex(address.bytes) if (this._lruCache) { this._lruCache!.set(addressHex, new Map()) @@ -274,7 +274,7 @@ export class StorageCache extends Cache { // Go through diffMap from the pre-commit checkpoint height. // 1. Iterate through all state pre states // 2. If state pre-state is not in the new (lower) height diff map, take pre commit pre state value - // 3. If state is in new map, take this one, since this superseeds subsequent changes + // 3. If state is in new map, take this one, since this supersedes subsequent changes for (const entry of higherHeightDiffMap.entries()) { const addressHex = entry[0] const higherHeightStorageDiff = entry[1] @@ -329,7 +329,7 @@ export class StorageCache extends Cache { reads: 0, hits: 0, writes: 0, - dels: 0, + deletions: 0, } } return stats diff --git a/packages/statemanager/src/cache/types.ts b/packages/statemanager/src/cache/types.ts index 0e5b3d40f6..e0819913e0 100644 --- a/packages/statemanager/src/cache/types.ts +++ b/packages/statemanager/src/cache/types.ts @@ -4,6 +4,33 @@ export enum CacheType { } export interface CacheOpts { + /** + * Size of the cache (only for LRU cache) + * + * Default: 100000 (account cache) / 20000 (storage cache) / 20000 (code cache) + * + * Note: the cache/trie interplay mechanism is designed in a way that + * the theoretical number of max modified accounts between two flush operations + * should be smaller than the cache size, otherwise the cache will "forget" the + * old modifications resulting in an incomplete set of trie-flushed accounts. + */ size: number + /** + * Cache type to use. + * + * Available options: + * + * ORDERED_MAP: Cache with no fixed upper bound and dynamic allocation, + * use for dynamic setups like testing or similar. + * + * LRU: LRU cache with pre-allocation of memory and a fixed size. + * Use for larger and more persistent caches. + */ type: CacheType } + +export interface CachesStateManagerOpts { + account?: Partial + code?: Partial + storage?: Partial +} diff --git a/packages/statemanager/src/index.ts b/packages/statemanager/src/index.ts index d6d3d9c56a..b0059ad440 100644 --- a/packages/statemanager/src/index.ts +++ b/packages/statemanager/src/index.ts @@ -4,3 +4,4 @@ export * from './rpcStateManager.js' export * from './simpleStateManager.js' export * from './statelessVerkleStateManager.js' export * from './stateManager.js' +export * from './types.js' diff --git a/packages/statemanager/src/rpcStateManager.ts b/packages/statemanager/src/rpcStateManager.ts index 2935088ced..58c4b7f8a0 100644 --- a/packages/statemanager/src/rpcStateManager.ts +++ b/packages/statemanager/src/rpcStateManager.ts @@ -1,10 +1,12 @@ -import { Chain, Common } from '@ethereumjs/common' +import { Common, Mainnet } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' -import { Trie } from '@ethereumjs/trie' +import { verifyTrieProof } from '@ethereumjs/trie' import { Account, bigIntToHex, bytesToHex, + createAccount, + createAccountFromRLP, equalsBytes, fetchFromProvider, hexToBytes, @@ -14,36 +16,20 @@ import { import debugDefault from 'debug' import { keccak256 } from 'ethereum-cryptography/keccak.js' -import { AccountCache, CacheType, OriginalStorageCache, StorageCache } from './cache/index.js' +import { Caches, OriginalStorageCache } from './cache/index.js' +import { modifyAccountFields } from './util.js' -import type { Proof } from './index.js' -import type { - AccountFields, - EVMStateManagerInterface, - StorageDump, - StorageRange, -} from '@ethereumjs/common' +import type { Proof, RPCStateManagerOpts } from './index.js' +import type { AccountFields, StateManagerInterface, StorageDump } from '@ethereumjs/common' import type { Address, PrefixedHexString } from '@ethereumjs/util' import type { Debugger } from 'debug' -export interface RPCStateManagerOpts { - provider: string - blockTag: bigint | 'earliest' - - /** - * The common to use - */ - common?: Common -} - const KECCAK256_RLP_EMPTY_ACCOUNT = RLP.encode(new Account().serialize()).slice(2) -export class RPCStateManager implements EVMStateManagerInterface { +export class RPCStateManager implements StateManagerInterface { protected _provider: string - protected _contractCache: Map - protected _storageCache: StorageCache + protected _caches: Caches protected _blockTag: string - protected _accountCache: AccountCache originalStorageCache: OriginalStorageCache protected _debug: Debugger protected DEBUG: boolean @@ -54,7 +40,7 @@ export class RPCStateManager implements EVMStateManagerInterface { // Skip DEBUG calls unless 'ethjs' included in environmental DEBUG variables // Additional window check is to prevent vite browser bundling (and potentially other) to break this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false this._debug = debugDefault('statemanager:rpcStateManager') if (typeof opts.provider === 'string' && opts.provider.startsWith('http')) { @@ -65,12 +51,10 @@ export class RPCStateManager implements EVMStateManagerInterface { this._blockTag = opts.blockTag === 'earliest' ? opts.blockTag : bigIntToHex(opts.blockTag) - this._contractCache = new Map() - this._storageCache = new StorageCache({ size: 100000, type: CacheType.ORDERED_MAP }) - this._accountCache = new AccountCache({ size: 100000, type: CacheType.ORDERED_MAP }) + this._caches = new Caches({ storage: { size: 100000 }, code: { size: 100000 } }) - this.originalStorageCache = new OriginalStorageCache(this.getContractStorage.bind(this)) - this.common = opts.common ?? new Common({ chain: Chain.Mainnet }) + this.originalStorageCache = new OriginalStorageCache(this.getStorage.bind(this)) + this.common = opts.common ?? new Common({ chain: Mainnet }) this.keccakFunction = opts.common?.customCrypto.keccak256 ?? keccak256 } @@ -84,15 +68,8 @@ export class RPCStateManager implements EVMStateManagerInterface { provider: this._provider, blockTag: BigInt(this._blockTag), }) - newState._contractCache = new Map(this._contractCache) - newState._storageCache = new StorageCache({ - size: 100000, - type: CacheType.ORDERED_MAP, - }) - newState._accountCache = new AccountCache({ - size: 100000, - type: CacheType.ORDERED_MAP, - }) + newState._caches = new Caches({ storage: { size: 100000 } }) + return newState } @@ -112,9 +89,7 @@ export class RPCStateManager implements EVMStateManagerInterface { * initially be retrieved from the provider */ clearCaches(): void { - this._contractCache.clear() - this._storageCache.clear() - this._accountCache.clear() + this._caches.clear() } /** @@ -123,20 +98,20 @@ export class RPCStateManager implements EVMStateManagerInterface { * @returns {Promise} - Resolves with the code corresponding to the provided address. * Returns an empty `Uint8Array` if the account has no associated code. */ - async getContractCode(address: Address): Promise { - let codeBytes = this._contractCache.get(address.toString()) + async getCode(address: Address): Promise { + let codeBytes = this._caches.code?.get(address)?.code if (codeBytes !== undefined) return codeBytes const code = await fetchFromProvider(this._provider, { method: 'eth_getCode', params: [address.toString(), this._blockTag], }) codeBytes = toBytes(code) - this._contractCache.set(address.toString(), codeBytes) + this._caches.code?.put(address, codeBytes) return codeBytes } - async getContractCodeSize(address: Address): Promise { - const contractCode = await this.getContractCode(address) + async getCodeSize(address: Address): Promise { + const contractCode = await this.getCode(address) return contractCode.length } @@ -146,9 +121,9 @@ export class RPCStateManager implements EVMStateManagerInterface { * @param address - Address of the `account` to add the `code` for * @param value - The value of the `code` */ - async putContractCode(address: Address, value: Uint8Array): Promise { + async putCode(address: Address, value: Uint8Array): Promise { // Store contract code in the cache - this._contractCache.set(address.toString(), value) + this._caches.code?.put(address, value) } /** @@ -160,13 +135,13 @@ export class RPCStateManager implements EVMStateManagerInterface { * corresponding to the provided address at the provided key. * If this does not exist an empty `Uint8Array` is returned. */ - async getContractStorage(address: Address, key: Uint8Array): Promise { + async getStorage(address: Address, key: Uint8Array): Promise { // Check storage slot in cache if (key.length !== 32) { throw new Error('Storage key must be 32 bytes long') } - let value = this._storageCache!.get(address, key) + let value = this._caches.storage?.get(address, key) if (value !== undefined) { return value } @@ -178,7 +153,7 @@ export class RPCStateManager implements EVMStateManagerInterface { }) value = toBytes(storage) - await this.putContractStorage(address, key, value) + await this.putStorage(address, key, value) return value } @@ -191,16 +166,16 @@ export class RPCStateManager implements EVMStateManagerInterface { * Cannot be more than 32 bytes. Leading zeros are stripped. * If it is empty or filled with zeros, deletes the value. */ - async putContractStorage(address: Address, key: Uint8Array, value: Uint8Array): Promise { - this._storageCache.put(address, key, value) + async putStorage(address: Address, key: Uint8Array, value: Uint8Array): Promise { + this._caches.storage?.put(address, key, value) } /** * Clears all storage entries for the account corresponding to `address`. * @param address - Address to clear the storage of */ - async clearContractStorage(address: Address): Promise { - this._storageCache.clearContractStorage(address) + async clearStorage(address: Address): Promise { + this._caches.storage?.clearStorage(address) } /** @@ -211,7 +186,7 @@ export class RPCStateManager implements EVMStateManagerInterface { * Both are represented as `0x` prefixed hex strings. */ dumpStorage(address: Address): Promise { - const storageMap = this._storageCache.dump(address) + const storageMap = this._caches.storage?.dump(address) const dump: StorageDump = {} if (storageMap !== undefined) { for (const slot of storageMap) { @@ -221,11 +196,6 @@ export class RPCStateManager implements EVMStateManagerInterface { return Promise.resolve(dump) } - dumpStorageRange(_address: Address, _startKey: bigint, _limit: number): Promise { - // TODO: Implement. - return Promise.reject() - } - /** * Checks if an `account` exists at `address` * @param address - Address of the `account` to check @@ -233,7 +203,7 @@ export class RPCStateManager implements EVMStateManagerInterface { async accountExists(address: Address): Promise { if (this.DEBUG) this._debug?.(`verify if ${address.toString()} exists`) - const localAccount = this._accountCache.get(address) + const localAccount = this._caches.account?.get(address) if (localAccount !== undefined) return true // Get merkle proof for `address` from provider const proof = await fetchFromProvider(this._provider, { @@ -243,7 +213,7 @@ export class RPCStateManager implements EVMStateManagerInterface { const proofBuf = proof.accountProof.map((proofNode: PrefixedHexString) => toBytes(proofNode)) - const verified = await Trie.verifyProof(address.bytes, proofBuf, { + const verified = await verifyTrieProof(address.bytes, proofBuf, { useKeyHashing: true, }) // if not verified (i.e. verifyProof returns null), account does not exist @@ -255,11 +225,9 @@ export class RPCStateManager implements EVMStateManagerInterface { * @param address - Address of the `account` to get */ async getAccount(address: Address): Promise { - const elem = this._accountCache?.get(address) + const elem = this._caches.account?.get(address) if (elem !== undefined) { - return elem.accountRLP !== undefined - ? Account.fromRlpSerializedAccount(elem.accountRLP) - : undefined + return elem.accountRLP !== undefined ? createAccountFromRLP(elem.accountRLP) : undefined } const accountFromProvider = await this.getAccountFromProvider(address) @@ -267,9 +235,9 @@ export class RPCStateManager implements EVMStateManagerInterface { equalsBytes(accountFromProvider.codeHash, new Uint8Array(32).fill(0)) || equalsBytes(accountFromProvider.serialize(), KECCAK256_RLP_EMPTY_ACCOUNT) ? undefined - : Account.fromRlpSerializedAccount(accountFromProvider.serialize()) + : createAccountFromRLP(accountFromProvider.serialize()) - this._accountCache?.put(address, account) + this._caches.account?.put(address, account) return account } @@ -285,7 +253,7 @@ export class RPCStateManager implements EVMStateManagerInterface { method: 'eth_getProof', params: [address.toString(), [] as any, this._blockTag], }) - const account = Account.fromAccountData({ + const account = createAccount({ balance: BigInt(accountData.balance), nonce: BigInt(accountData.nonce), codeHash: toBytes(accountData.codeHash), @@ -306,13 +274,13 @@ export class RPCStateManager implements EVMStateManagerInterface { account?.balance } contract=${account && account.isContract() ? 'yes' : 'no'} empty=${ account && account.isEmpty() ? 'yes' : 'no' - }` + }`, ) } if (account !== undefined) { - this._accountCache!.put(address, account) + this._caches.account!.put(address, account) } else { - this._accountCache!.del(address) + this._caches.account!.del(address) } } @@ -333,19 +301,11 @@ export class RPCStateManager implements EVMStateManagerInterface { if (k === 'nonce') return v.toString() return v }, - 2 - ) + 2, + ), ) } - let account = await this.getAccount(address) - if (!account) { - account = new Account() - } - account.nonce = accountFields.nonce ?? account.nonce - account.balance = accountFields.balance ?? account.balance - account.storageRoot = accountFields.storageRoot ?? account.storageRoot - account.codeHash = accountFields.codeHash ?? account.codeHash - await this.putAccount(address, account) + await modifyAccountFields(this, address, accountFields) } /** @@ -356,7 +316,7 @@ export class RPCStateManager implements EVMStateManagerInterface { if (this.DEBUG) { this._debug(`deleting account corresponding to ${address.toString()}`) } - this._accountCache.del(address) + this._caches.account?.del(address) } /** @@ -369,11 +329,7 @@ export class RPCStateManager implements EVMStateManagerInterface { if (this.DEBUG) this._debug(`retrieving proof from provider for ${address.toString()}`) const proof = await fetchFromProvider(this._provider, { method: 'eth_getProof', - params: [ - address.toString(), - [storageSlots.map((slot) => bytesToHex(slot))], - this._blockTag, - ] as any, + params: [address.toString(), storageSlots.map(bytesToHex), this._blockTag], }) return proof @@ -393,12 +349,9 @@ export class RPCStateManager implements EVMStateManagerInterface { * Checkpoints the current state of the StateManager instance. * State changes that follow can then be committed by calling * `commit` or `reverted` by calling rollback. - * - * Partial implementation, called from the subclass. */ async checkpoint(): Promise { - this._accountCache.checkpoint() - this._storageCache.checkpoint() + this._caches.checkpoint() } /** @@ -409,7 +362,7 @@ export class RPCStateManager implements EVMStateManagerInterface { */ async commit(): Promise { // setup cache checkpointing - this._accountCache.commit() + this._caches.account?.commit() } /** @@ -419,13 +372,11 @@ export class RPCStateManager implements EVMStateManagerInterface { * Partial implementation , called from the subclass. */ async revert(): Promise { - this._accountCache.revert() - this._storageCache.revert() - this._contractCache.clear() + this._caches.revert() } async flush(): Promise { - this._accountCache.flush() + this._caches.account?.flush() } /** @@ -446,10 +397,6 @@ export class RPCStateManager implements EVMStateManagerInterface { hasStateRoot = () => { throw new Error('function not implemented') } - - generateCanonicalGenesis(_initState: any): Promise { - return Promise.resolve() - } } export class RPCBlockChain { diff --git a/packages/statemanager/src/simpleStateManager.ts b/packages/statemanager/src/simpleStateManager.ts index f9976f3f71..30bf009641 100644 --- a/packages/statemanager/src/simpleStateManager.ts +++ b/packages/statemanager/src/simpleStateManager.ts @@ -2,27 +2,12 @@ import { Account, bytesToHex } from '@ethereumjs/util' import { keccak256 } from 'ethereum-cryptography/keccak.js' import { OriginalStorageCache } from './cache/originalStorageCache.js' +import { modifyAccountFields } from './util.js' -import type { - AccountFields, - Common, - EVMStateManagerInterface, - Proof, - StorageDump, - StorageRange, -} from '@ethereumjs/common' +import type { SimpleStateManagerOpts } from './index.js' +import type { AccountFields, Common, StateManagerInterface } from '@ethereumjs/common' import type { Address, PrefixedHexString } from '@ethereumjs/util' -/** - * Options for constructing a {@link SimpleStateManager}. - */ -export interface SimpleStateManagerOpts { - /** - * The common to use - */ - common?: Common -} - /** * Simple and dependency-free state manager for basic state access use cases * where a merkle-patricia or verkle tree backed state manager is too heavy-weight. @@ -38,7 +23,7 @@ export interface SimpleStateManagerOpts { * For a more full fledged and MPT-backed state manager implementation * have a look at the `@ethereumjs/statemanager` package. */ -export class SimpleStateManager implements EVMStateManagerInterface { +export class SimpleStateManager implements StateManagerInterface { public accountStack: Map[] = [] public codeStack: Map[] = [] public storageStack: Map[] = [] @@ -52,7 +37,7 @@ export class SimpleStateManager implements EVMStateManagerInterface { constructor(opts: SimpleStateManagerOpts = {}) { this.checkpointSync() - this.originalStorageCache = new OriginalStorageCache(this.getContractStorage.bind(this)) + this.originalStorageCache = new OriginalStorageCache(this.getStorage.bind(this)) this.common = opts.common } @@ -94,22 +79,14 @@ export class SimpleStateManager implements EVMStateManagerInterface { } async modifyAccountFields(address: Address, accountFields: AccountFields): Promise { - let account = await this.getAccount(address) - if (!account) { - account = new Account() - } - account.nonce = accountFields.nonce ?? account.nonce - account.balance = accountFields.balance ?? account.balance - account.storageRoot = accountFields.storageRoot ?? account.storageRoot - account.codeHash = accountFields.codeHash ?? account.codeHash - await this.putAccount(address, account) + await modifyAccountFields(this, address, accountFields) } - async getContractCode(address: Address): Promise { + async getCode(address: Address): Promise { return this.topCodeStack().get(address.toString()) ?? new Uint8Array(0) } - async putContractCode(address: Address, value: Uint8Array): Promise { + async putCode(address: Address, value: Uint8Array): Promise { this.topCodeStack().set(address.toString(), value) if ((await this.getAccount(address)) === undefined) { await this.putAccount(address, new Account()) @@ -119,21 +96,23 @@ export class SimpleStateManager implements EVMStateManagerInterface { }) } - async getContractCodeSize(address: Address): Promise { - const contractCode = await this.getContractCode(address) + async getCodeSize(address: Address): Promise { + const contractCode = await this.getCode(address) return contractCode.length } - async getContractStorage(address: Address, key: Uint8Array): Promise { + async getStorage(address: Address, key: Uint8Array): Promise { return ( this.topStorageStack().get(`${address.toString()}_${bytesToHex(key)}`) ?? new Uint8Array(0) ) } - async putContractStorage(address: Address, key: Uint8Array, value: Uint8Array): Promise { + async putStorage(address: Address, key: Uint8Array, value: Uint8Array): Promise { this.topStorageStack().set(`${address.toString()}_${bytesToHex(key)}`, value) } + async clearStorage(): Promise {} + async checkpoint(): Promise { this.checkpointSync() } @@ -152,7 +131,7 @@ export class SimpleStateManager implements EVMStateManagerInterface { async flush(): Promise {} clearCaches(): void {} - shallowCopy(): EVMStateManagerInterface { + shallowCopy(): StateManagerInterface { const copy = new SimpleStateManager({ common: this.common }) for (let i = 0; i < this.accountStack.length; i++) { copy.accountStack.push(new Map(this.accountStack[i])) @@ -172,27 +151,4 @@ export class SimpleStateManager implements EVMStateManagerInterface { hasStateRoot(): Promise { throw new Error('Method not implemented.') } - - // Only goes for long term create situations, skip - async clearContractStorage(): Promise {} - - // Only "core" methods implemented - checkChunkWitnessPresent?(): Promise { - throw new Error('Method not implemented.') - } - dumpStorage(): Promise { - throw new Error('Method not implemented.') - } - dumpStorageRange(): Promise { - throw new Error('Method not implemented.') - } - generateCanonicalGenesis(): Promise { - throw new Error('Method not implemented.') - } - getProof(): Promise { - throw new Error('Method not implemented.') - } - getAppliedKey?(): Uint8Array { - throw new Error('Method not implemented.') - } } diff --git a/packages/statemanager/src/stateManager.ts b/packages/statemanager/src/stateManager.ts index 24769b8320..0fa21094f3 100644 --- a/packages/statemanager/src/stateManager.ts +++ b/packages/statemanager/src/stateManager.ts @@ -1,18 +1,25 @@ -import { Chain, Common } from '@ethereumjs/common' +import { Common, Mainnet } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' -import { Trie } from '@ethereumjs/trie' +import { + Trie, + createMerkleProof, + createTrieFromProof, + updateTrieFromMerkleProof, + verifyTrieProof, +} from '@ethereumjs/trie' import { Account, - Address, KECCAK256_NULL, KECCAK256_NULL_S, KECCAK256_RLP, KECCAK256_RLP_S, bigIntToHex, - bytesToBigInt, bytesToHex, bytesToUnprefixedHex, concatBytes, + createAccount, + createAccountFromRLP, + createAddressFromString, equalsBytes, hexToBytes, setLengthLeft, @@ -20,137 +27,26 @@ import { toBytes, unpadBytes, unprefixedHexToBytes, - utf8ToBytes, } from '@ethereumjs/util' import debugDefault from 'debug' import { keccak256 } from 'ethereum-cryptography/keccak.js' -import { - AccountCache, - CacheType, - CodeCache, - OriginalStorageCache, - StorageCache, -} from './cache/index.js' +import { OriginalStorageCache } from './cache/index.js' +import { modifyAccountFields } from './util.js' +import { CODEHASH_PREFIX, type DefaultStateManagerOpts } from './index.js' + +import type { Caches, StorageProof } from './index.js' import type { AccountFields, - EVMStateManagerInterface, + Proof, + StateManagerInterface, StorageDump, StorageRange, } from '@ethereumjs/common' -import type { DB, PrefixedHexString } from '@ethereumjs/util' +import type { Address, DB, PrefixedHexString } from '@ethereumjs/util' import type { Debugger } from 'debug' -export type StorageProof = { - key: PrefixedHexString - proof: PrefixedHexString[] - value: PrefixedHexString -} - -export type Proof = { - address: PrefixedHexString - balance: PrefixedHexString - codeHash: PrefixedHexString - nonce: PrefixedHexString - storageHash: PrefixedHexString - accountProof: PrefixedHexString[] - storageProof: StorageProof[] -} - -type CacheOptions = { - /** - * Allows for cache deactivation - * - * Depending on the use case and underlying datastore (and eventual concurrent cache - * mechanisms there), usage with or without cache can be faster - * - * Default: false - */ - deactivate?: boolean - - /** - * Cache type to use. - * - * Available options: - * - * ORDERED_MAP: Cache with no fixed upper bound and dynamic allocation, - * use for dynamic setups like testing or similar. - * - * LRU: LRU cache with pre-allocation of memory and a fixed size. - * Use for larger and more persistent caches. - */ - type?: CacheType - - /** - * Size of the cache (only for LRU cache) - * - * Default: 100000 (account cache) / 20000 (storage cache) / 20000 (code cache) - * - * Note: the cache/trie interplay mechanism is designed in a way that - * the theoretical number of max modified accounts between two flush operations - * should be smaller than the cache size, otherwise the cache will "forget" the - * old modifications resulting in an incomplete set of trie-flushed accounts. - */ - size?: number -} - -type CacheSettings = { - deactivate: boolean - type: CacheType - size: number -} - -/** - * Prefix to distinguish between a contract deployed with code `0x80` - * and `RLP([])` (also having the value `0x80`). - * - * Otherwise the creation of the code hash for the `0x80` contract - * will be the same as the hash of the empty trie which leads to - * misbehaviour in the underlying trie library. - */ -export const CODEHASH_PREFIX = utf8ToBytes('c') - -/** - * Options for constructing a {@link StateManager}. - */ -export interface DefaultStateManagerOpts { - /** - * A {@link Trie} instance - */ - trie?: Trie - /** - * Option to prefix codehashes in the database. This defaults to `true`. - * If this is disabled, note that it is possible to corrupt the trie, by deploying code - * which code is equal to the preimage of a trie-node. - * E.g. by putting the code `0x80` into the empty trie, will lead to a corrupted trie. - */ - prefixCodeHashes?: boolean - - /** - * Option to prefix the keys for the storage tries with the first 7 bytes from the - * associated account address. Activating this option gives a noticeable performance - * boost for storage DB reads when operating on larger tries. - * - * Note: Activating/deactivating this option causes continued state reads to be - * incompatible with existing databases. - * - * Default: false (for backwards compatibility reasons) - */ - prefixStorageTrieKeys?: boolean - - accountCacheOpts?: CacheOptions - - storageCacheOpts?: CacheOptions - - codeCacheOpts?: CacheOptions - - /** - * The common to use - */ - common?: Common -} - /** * Default StateManager implementation for the VM. * @@ -166,11 +62,9 @@ export interface DefaultStateManagerOpts { * package which might be an alternative to this implementation * for many basic use cases. */ -export class DefaultStateManager implements EVMStateManagerInterface { +export class DefaultStateManager implements StateManagerInterface { protected _debug: Debugger - protected _accountCache?: AccountCache - protected _storageCache?: StorageCache - protected _codeCache?: CodeCache + protected _caches?: Caches originalStorageCache: OriginalStorageCache @@ -179,9 +73,6 @@ export class DefaultStateManager implements EVMStateManagerInterface { protected readonly _prefixCodeHashes: boolean protected readonly _prefixStorageTrieKeys: boolean - protected readonly _accountCacheSettings: CacheSettings - protected readonly _storageCacheSettings: CacheSettings - protected readonly _codeCacheSettings: CacheSettings public readonly common: Common @@ -206,11 +97,11 @@ export class DefaultStateManager implements EVMStateManagerInterface { // Skip DEBUG calls unless 'ethjs' included in environmental DEBUG variables // Additional window check is to prevent vite browser bundling (and potentially other) to break this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false this._debug = debugDefault('statemanager:statemanager') - this.common = opts.common ?? new Common({ chain: Chain.Mainnet }) + this.common = opts.common ?? new Common({ chain: Mainnet }) this._checkpointCount = 0 @@ -219,51 +110,12 @@ export class DefaultStateManager implements EVMStateManagerInterface { this.keccakFunction = opts.common?.customCrypto.keccak256 ?? keccak256 - this.originalStorageCache = new OriginalStorageCache(this.getContractStorage.bind(this)) + this.originalStorageCache = new OriginalStorageCache(this.getStorage.bind(this)) this._prefixCodeHashes = opts.prefixCodeHashes ?? true this._prefixStorageTrieKeys = opts.prefixStorageTrieKeys ?? false - this._accountCacheSettings = { - deactivate: - (opts.accountCacheOpts?.deactivate === true || opts.accountCacheOpts?.size === 0) ?? false, - type: opts.accountCacheOpts?.type ?? CacheType.ORDERED_MAP, - size: opts.accountCacheOpts?.size ?? 100000, - } - if (!this._accountCacheSettings.deactivate) { - this._accountCache = new AccountCache({ - size: this._accountCacheSettings.size, - type: this._accountCacheSettings.type, - }) - } - - this._storageCacheSettings = { - deactivate: - (opts.storageCacheOpts?.deactivate === true || opts.storageCacheOpts?.size === 0) ?? false, - type: opts.storageCacheOpts?.type ?? CacheType.ORDERED_MAP, - size: opts.storageCacheOpts?.size ?? 20000, - } - - if (!this._storageCacheSettings.deactivate) { - this._storageCache = new StorageCache({ - size: this._storageCacheSettings.size, - type: this._storageCacheSettings.type, - }) - } - - this._codeCacheSettings = { - deactivate: - (opts.codeCacheOpts?.deactivate === true || opts.codeCacheOpts?.size === 0) ?? false, - type: opts.codeCacheOpts?.type ?? CacheType.ORDERED_MAP, - size: opts.codeCacheOpts?.size ?? 20000, - } - - if (!this._codeCacheSettings.deactivate) { - this._codeCache = new CodeCache({ - size: this._codeCacheSettings.size, - type: this._codeCacheSettings.type, - }) - } + this._caches = opts.caches } /** @@ -271,21 +123,17 @@ export class DefaultStateManager implements EVMStateManagerInterface { * @param address - Address of the `account` to get */ async getAccount(address: Address): Promise { - if (!this._accountCacheSettings.deactivate) { - const elem = this._accountCache!.get(address) - if (elem !== undefined) { - return elem.accountRLP !== undefined - ? Account.fromRlpSerializedAccount(elem.accountRLP) - : undefined - } + const elem = this._caches?.account?.get(address) + if (elem !== undefined) { + return elem.accountRLP !== undefined ? createAccountFromRLP(elem.accountRLP) : undefined } const rlp = await this._trie.get(address.bytes) - const account = rlp !== null ? Account.fromRlpSerializedAccount(rlp) : undefined + const account = rlp !== null ? createAccountFromRLP(rlp) : undefined if (this.DEBUG) { this._debug(`Get account ${address} from DB (${account ? 'exists' : 'non-existent'})`) } - this._accountCache?.put(address, account) + this._caches?.account?.put(address, account) return account } @@ -301,10 +149,10 @@ export class DefaultStateManager implements EVMStateManagerInterface { account?.balance } contract=${account && account.isContract() ? 'yes' : 'no'} empty=${ account && account.isEmpty() ? 'yes' : 'no' - }` + }`, ) } - if (this._accountCacheSettings.deactivate) { + if (this._caches?.account === undefined) { const trie = this._trie if (account !== undefined) { await trie.put(address.bytes, account.serialize()) @@ -313,9 +161,9 @@ export class DefaultStateManager implements EVMStateManagerInterface { } } else { if (account !== undefined) { - this._accountCache!.put(address, account) + this._caches.account?.put(address, account) } else { - this._accountCache!.del(address) + this._caches.account?.del(address) } } } @@ -328,15 +176,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { * @param accountFields - Object containing account fields and values to modify */ async modifyAccountFields(address: Address, accountFields: AccountFields): Promise { - let account = await this.getAccount(address) - if (!account) { - account = new Account() - } - account.nonce = accountFields.nonce ?? account.nonce - account.balance = accountFields.balance ?? account.balance - account.storageRoot = accountFields.storageRoot ?? account.storageRoot - account.codeHash = accountFields.codeHash ?? account.codeHash - await this.putAccount(address, account) + await modifyAccountFields(this, address, accountFields) } /** @@ -348,15 +188,10 @@ export class DefaultStateManager implements EVMStateManagerInterface { this._debug(`Delete account ${address}`) } - this._codeCache?.del(address) + this._caches?.deleteAccount(address) - if (this._accountCacheSettings.deactivate) { + if (this._caches?.account === undefined) { await this._trie.del(address.bytes) - } else { - this._accountCache!.del(address) - } - if (!this._storageCacheSettings.deactivate) { - this._storageCache?.clearContractStorage(address) } } @@ -366,8 +201,8 @@ export class DefaultStateManager implements EVMStateManagerInterface { * @param address - Address of the `account` to add the `code` for * @param value - The value of the `code` */ - async putContractCode(address: Address, value: Uint8Array): Promise { - this._codeCache?.put(address, value) + async putCode(address: Address, value: Uint8Array): Promise { + this._caches?.code?.put(address, value) const codeHash = this.keccakFunction(value) if (this.DEBUG) { @@ -386,12 +221,10 @@ export class DefaultStateManager implements EVMStateManagerInterface { * @returns {Promise} - Resolves with the code corresponding to the provided address. * Returns an empty `Uint8Array` if the account has no associated code. */ - async getContractCode(address: Address): Promise { - if (!this._codeCacheSettings.deactivate) { - const elem = this._codeCache?.get(address) - if (elem !== undefined) { - return elem.code ?? new Uint8Array(0) - } + async getCode(address: Address): Promise { + const elem = this._caches?.code?.get(address) + if (elem !== undefined) { + return elem.code ?? new Uint8Array(0) } const account = await this.getAccount(address) if (!account) { @@ -405,14 +238,12 @@ export class DefaultStateManager implements EVMStateManagerInterface { : account.codeHash const code = (await this._trie.database().get(key)) ?? new Uint8Array(0) - if (!this._codeCacheSettings.deactivate) { - this._codeCache!.put(address, code) - } + this._caches?.code?.put(address, code) return code } - async getContractCodeSize(address: Address): Promise { - const contractCode = await this.getContractCode(address) + async getCodeSize(address: Address): Promise { + const contractCode = await this.getCode(address) return contractCode.length } @@ -435,7 +266,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { // TODO PR: have a better interface for hashed address pull? protected _getStorageTrie( addressOrHash: Address | { bytes: Uint8Array } | Uint8Array, - rootAccount?: Account + rootAccount?: Account, ): Trie { // use hashed key for lookup from storage cache const addressBytes: Uint8Array = @@ -483,27 +314,23 @@ export class DefaultStateManager implements EVMStateManagerInterface { * corresponding to the provided address at the provided key. * If this does not exist an empty `Uint8Array` is returned. */ - async getContractStorage(address: Address, key: Uint8Array): Promise { + async getStorage(address: Address, key: Uint8Array): Promise { if (key.length !== 32) { throw new Error('Storage key must be 32 bytes long') } - if (!this._storageCacheSettings.deactivate) { - const value = this._storageCache!.get(address, key) - if (value !== undefined) { - const decoded = RLP.decode(value ?? new Uint8Array(0)) as Uint8Array - return decoded - } + const cachedValue = this._caches?.storage?.get(address, key) + if (cachedValue !== undefined) { + const decoded = RLP.decode(cachedValue ?? new Uint8Array(0)) as Uint8Array + return decoded } const account = await this.getAccount(address) if (!account) { - throw new Error('getContractStorage() called on non-existing account') + return new Uint8Array() } const trie = this._getStorageTrie(address, account) const value = await trie.get(key) - if (!this._storageCacheSettings.deactivate) { - this._storageCache?.put(address, key, value ?? hexToBytes('0x80')) - } + this._caches?.storage?.put(address, key, value ?? hexToBytes('0x80')) const decoded = RLP.decode(value ?? new Uint8Array(0)) as Uint8Array return decoded } @@ -517,7 +344,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { protected async _modifyContractStorage( address: Address, account: Account, - modifyTrie: (storageTrie: Trie, done: Function) => void + modifyTrie: (storageTrie: Trie, done: Function) => void, ): Promise { // eslint-disable-next-line no-async-promise-executor return new Promise(async (resolve) => { @@ -540,7 +367,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { address: Address, account: Account, key: Uint8Array, - value: Uint8Array + value: Uint8Array, ) { await this._modifyContractStorage(address, account, async (storageTrie, done) => { if (value instanceof Uint8Array && value.length) { @@ -570,7 +397,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { * Cannot be more than 32 bytes. Leading zeros are stripped. * If it is a empty or filled with zeros, deletes the value. */ - async putContractStorage(address: Address, key: Uint8Array, value: Uint8Array): Promise { + async putStorage(address: Address, key: Uint8Array, value: Uint8Array): Promise { if (key.length !== 32) { throw new Error('Storage key must be 32 bytes long') } @@ -581,28 +408,24 @@ export class DefaultStateManager implements EVMStateManagerInterface { const account = await this.getAccount(address) if (!account) { - throw new Error('putContractStorage() called on non-existing account') + throw new Error('putStorage() called on non-existing account') } value = unpadBytes(value) - if (!this._storageCacheSettings.deactivate) { - const encodedValue = RLP.encode(value) - this._storageCache!.put(address, key, encodedValue) - } else { - await this._writeContractStorage(address, account, key, value) - } + this._caches?.storage?.put(address, key, RLP.encode(value)) ?? + (await this._writeContractStorage(address, account, key, value)) } /** * Clears all storage entries for the account corresponding to `address`. * @param address - Address to clear the storage of */ - async clearContractStorage(address: Address): Promise { + async clearStorage(address: Address): Promise { let account = await this.getAccount(address) if (!account) { account = new Account() } - this._storageCache?.clearContractStorage(address) + this._caches?.storage?.clearStorage(address) await this._modifyContractStorage(address, account, (storageTrie, done) => { storageTrie.root(storageTrie.EMPTY_TRIE_ROOT) done() @@ -616,9 +439,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { */ async checkpoint(): Promise { this._trie.checkpoint() - this._storageCache?.checkpoint() - this._accountCache?.checkpoint() - this._codeCache?.checkpoint() + this._caches?.checkpoint() this._checkpointCount++ } @@ -629,9 +450,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { async commit(): Promise { // setup trie checkpointing await this._trie.commit() - this._storageCache?.commit() - this._accountCache?.commit() - this._codeCache?.commit() + this._caches?.commit() this._checkpointCount-- if (this._checkpointCount === 0) { @@ -651,9 +470,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { async revert(): Promise { // setup trie checkpointing await this._trie.revert() - this._storageCache?.revert() - this._accountCache?.revert() - this._codeCache?.revert() + this._caches?.revert() this._storageTries = {} @@ -669,56 +486,51 @@ export class DefaultStateManager implements EVMStateManagerInterface { * Writes all cache items to the trie */ async flush(): Promise { - if (!this._codeCacheSettings.deactivate) { - const items = this._codeCache!.flush() - for (const item of items) { - const addr = Address.fromString(`0x${item[0]}`) - - const code = item[1].code - if (code === undefined) { - continue - } + const codeItems = this._caches?.code?.flush() ?? [] + for (const item of codeItems) { + const addr = createAddressFromString(`0x${item[0]}`) - // update code in database - const codeHash = this.keccakFunction(code) - const key = this._prefixCodeHashes ? concatBytes(CODEHASH_PREFIX, codeHash) : codeHash - await this._getCodeDB().put(key, code) + const code = item[1].code + if (code === undefined) { + continue + } - // update code root of associated account - if ((await this.getAccount(addr)) === undefined) { - await this.putAccount(addr, new Account()) - } - await this.modifyAccountFields(addr, { codeHash }) + // update code in database + const codeHash = this.keccakFunction(code) + const key = this._prefixCodeHashes ? concatBytes(CODEHASH_PREFIX, codeHash) : codeHash + await this._getCodeDB().put(key, code) + + // update code root of associated account + if ((await this.getAccount(addr)) === undefined) { + await this.putAccount(addr, new Account()) } - } - if (!this._storageCacheSettings.deactivate) { - const items = this._storageCache!.flush() - for (const item of items) { - const address = Address.fromString(`0x${item[0]}`) - const keyHex = item[1] - const keyBytes = unprefixedHexToBytes(keyHex) - const value = item[2] - - const decoded = RLP.decode(value ?? new Uint8Array(0)) as Uint8Array - const account = await this.getAccount(address) - if (account) { - await this._writeContractStorage(address, account, keyBytes, decoded) - } + await this.modifyAccountFields(addr, { codeHash }) + } + const storageItems = this._caches?.storage?.flush() ?? [] + for (const item of storageItems) { + const address = createAddressFromString(`0x${item[0]}`) + const keyHex = item[1] + const keyBytes = unprefixedHexToBytes(keyHex) + const value = item[2] + + const decoded = RLP.decode(value ?? new Uint8Array(0)) as Uint8Array + const account = await this.getAccount(address) + if (account) { + await this._writeContractStorage(address, account, keyBytes, decoded) } } - if (!this._accountCacheSettings.deactivate) { - const items = this._accountCache!.flush() - for (const item of items) { - const addressHex = item[0] - const addressBytes = unprefixedHexToBytes(addressHex) - const elem = item[1] - if (elem.accountRLP === undefined) { - const trie = this._trie - await trie.del(addressBytes) - } else { - const trie = this._trie - await trie.put(addressBytes, elem.accountRLP) - } + + const accountItems = this._caches?.account?.flush() ?? [] + for (const item of accountItems) { + const addressHex = item[0] + const addressBytes = unprefixedHexToBytes(addressHex) + const elem = item[1] + if (elem.accountRLP === undefined) { + const trie = this._trie + await trie.del(addressBytes) + } else { + const trie = this._trie + await trie.put(addressBytes, elem.accountRLP) } } } @@ -732,27 +544,28 @@ export class DefaultStateManager implements EVMStateManagerInterface { await this.flush() const account = await this.getAccount(address) if (!account) { - // throw new Error(`getProof() can only be called for an existing account`) const returnValue: Proof = { address: address.toString(), balance: '0x0', codeHash: KECCAK256_NULL_S, nonce: '0x0', storageHash: KECCAK256_RLP_S, - accountProof: (await this._trie.createProof(address.bytes)).map((p) => bytesToHex(p)), + accountProof: (await createMerkleProof(this._trie, address.bytes)).map((p) => + bytesToHex(p), + ), storageProof: [], } return returnValue } - const accountProof: PrefixedHexString[] = (await this._trie.createProof(address.bytes)).map( - (p) => bytesToHex(p) - ) + const accountProof: PrefixedHexString[] = ( + await createMerkleProof(this._trie, address.bytes) + ).map((p) => bytesToHex(p)) const storageProof: StorageProof[] = [] const storageTrie = this._getStorageTrie(address, account) for (const storageKey of storageSlots) { - const proof = (await storageTrie.createProof(storageKey)).map((p) => bytesToHex(p)) - const value = bytesToHex(await this.getContractStorage(address, storageKey)) + const proof = (await createMerkleProof(storageTrie, storageKey)).map((p) => bytesToHex(p)) + const value = bytesToHex(await this.getStorage(address, storageKey)) const proofItem: StorageProof = { key: bytesToHex(storageKey), value: value === '0x' ? '0x0' : value, // Return '0x' values as '0x0' since this is a JSON RPC response @@ -784,7 +597,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { static async fromProof( proof: Proof | Proof[], safe: boolean = false, - opts: DefaultStateManagerOpts = {} + opts: DefaultStateManagerOpts = {}, ): Promise { if (Array.isArray(proof)) { if (proof.length === 0) { @@ -792,12 +605,12 @@ export class DefaultStateManager implements EVMStateManagerInterface { } else { const trie = opts.trie ?? - (await Trie.createFromProof( + (await createTrieFromProof( proof[0].accountProof.map((e) => hexToBytes(e)), - { useKeyHashing: true } + { useKeyHashing: true }, )) const sm = new DefaultStateManager({ ...opts, trie }) - const address = Address.fromString(proof[0].address) + const address = createAddressFromString(proof[0].address) await sm.addStorageProof(proof[0].storageProof, proof[0].storageHash, address, safe) for (let i = 1; i < proof.length; i++) { const proofItem = proof[i] @@ -822,14 +635,15 @@ export class DefaultStateManager implements EVMStateManagerInterface { storageProof: StorageProof[], storageHash: PrefixedHexString, address: Address, - safe: boolean = false + safe: boolean = false, ) { const trie = this._getStorageTrie(address) trie.root(hexToBytes(storageHash)) for (let i = 0; i < storageProof.length; i++) { - await trie.updateFromProof( + await updateTrieFromMerkleProof( + trie, storageProof[i].proof.map((e) => hexToBytes(e)), - safe + safe, ) } } @@ -843,15 +657,16 @@ export class DefaultStateManager implements EVMStateManagerInterface { async addProofData(proof: Proof | Proof[], safe: boolean = false) { if (Array.isArray(proof)) { for (let i = 0; i < proof.length; i++) { - await this._trie.updateFromProof( + await updateTrieFromMerkleProof( + this._trie, proof[i].accountProof.map((e) => hexToBytes(e)), - safe + safe, ) await this.addStorageProof( proof[i].storageProof, proof[i].storageHash, - Address.fromString(proof[i].address), - safe + createAddressFromString(proof[i].address), + safe, ) } } else { @@ -866,12 +681,12 @@ export class DefaultStateManager implements EVMStateManagerInterface { async verifyProof(proof: Proof): Promise { const key = hexToBytes(proof.address) const accountProof = proof.accountProof.map((rlpString: PrefixedHexString) => - hexToBytes(rlpString) + hexToBytes(rlpString), ) // This returns the account if the proof is valid. // Verify that it matches the reported account. - const value = await Trie.verifyProof(key, accountProof, { + const value = await verifyTrieProof(key, accountProof, { useKeyHashing: true, }) @@ -896,7 +711,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { throw new Error(`${notEmptyErrorMsg} (codeHash does not equal KECCAK256_NULL)`) } } else { - const account = Account.fromRlpSerializedAccount(value) + const account = createAccountFromRLP(value) const { nonce, balance, storageRoot, codeHash } = account const invalidErrorMsg = 'Invalid proof provided:' if (nonce !== BigInt(proof.nonce)) { @@ -917,18 +732,18 @@ export class DefaultStateManager implements EVMStateManagerInterface { const storageProof = stProof.proof.map((value: PrefixedHexString) => hexToBytes(value)) const storageValue = setLengthLeft(hexToBytes(stProof.value), 32) const storageKey = hexToBytes(stProof.key) - const proofValue = await Trie.verifyProof(storageKey, storageProof, { + const proofValue = await verifyTrieProof(storageKey, storageProof, { useKeyHashing: true, }) const reportedValue = setLengthLeft( RLP.decode(proofValue ?? new Uint8Array(0)) as Uint8Array, - 32 + 32, ) if (!equalsBytes(reportedValue, storageValue)) { throw new Error( `Reported trie value does not match storage, key: ${stProof.key}, reported: ${bytesToHex( - reportedValue - )}, actual: ${bytesToHex(storageValue)}` + reportedValue, + )}, actual: ${bytesToHex(storageValue)}`, ) } } @@ -964,14 +779,8 @@ export class DefaultStateManager implements EVMStateManagerInterface { } this._trie.root(stateRoot) - if (this._accountCache !== undefined && clearCache) { - this._accountCache.clear() - } - if (this._storageCache !== undefined && clearCache) { - this._storageCache.clear() - } - if (this._codeCache !== undefined && clearCache) { - this._codeCache!.clear() + if (clearCache) { + this._caches?.clear() } this._storageTries = {} } @@ -991,19 +800,8 @@ export class DefaultStateManager implements EVMStateManagerInterface { } const trie = this._getStorageTrie(address, account) - return new Promise((resolve, reject) => { - const storage: StorageDump = {} - const stream = trie.createReadStream() - - stream.on('data', (val: any) => { - storage[bytesToHex(val.key)] = bytesToHex(val.value) - }) - stream.on('end', () => { - resolve(storage) - }) - stream.on('error', (e) => { - reject(e) - }) + return trie.getValueMap().then((value) => { + return value.values }) } @@ -1026,44 +824,24 @@ export class DefaultStateManager implements EVMStateManagerInterface { if (!account) { throw new Error(`Account does not exist.`) } - const trie = this._getStorageTrie(address, account) - return new Promise((resolve, reject) => { - let inRange = false - let i = 0 - - /** Object conforming to {@link StorageRange.storage}. */ - const storageMap: StorageRange['storage'] = {} - const stream = trie.createReadStream() - - stream.on('data', (val: any) => { - if (!inRange) { - // Check if the key is already in the correct range. - if (bytesToBigInt(val.key) >= startKey) { - inRange = true - } else { - return - } - } + const trie = this._getStorageTrie(address, account) - if (i < limit) { - storageMap[bytesToHex(val.key)] = { key: null, value: bytesToHex(val.value) } - i++ - } else if (i === limit) { - resolve({ - storage: storageMap, - nextKey: bytesToHex(val.key), - }) + return trie.getValueMap(startKey, limit).then((value) => { + const values = value.values + const dump = Object.create(null) + for (const key of Object.keys(values)) { + const val = values[key] + dump[key] = { + key: null, + value: val, } - }) + } - stream.on('end', () => { - resolve({ - storage: storageMap, - nextKey: null, - }) - }) - stream.on('error', (e) => reject(e)) + return { + storage: dump, + nextKey: value.nextKey, + } }) } @@ -1081,23 +859,23 @@ export class DefaultStateManager implements EVMStateManagerInterface { } const addresses = Object.keys(initState) for (const address of addresses) { - const addr = Address.fromString(address) + const addr = createAddressFromString(address) const state = initState[address] if (!Array.isArray(state)) { // Prior format: address -> balance - const account = Account.fromAccountData({ balance: state }) + const account = createAccount({ balance: state }) await this.putAccount(addr, account) } else { // New format: address -> [balance, code, storage] const [balance, code, storage, nonce] = state - const account = Account.fromAccountData({ balance, nonce }) + const account = createAccount({ balance, nonce }) await this.putAccount(addr, account) if (code !== undefined) { - await this.putContractCode(addr, toBytes(code)) + await this.putCode(addr, toBytes(code)) } if (storage !== undefined) { for (const [key, value] of storage) { - await this.putContractStorage(addr, toBytes(key), toBytes(value)) + await this.putStorage(addr, toBytes(key), toBytes(value)) } } } @@ -1123,8 +901,8 @@ export class DefaultStateManager implements EVMStateManagerInterface { * This means in particular: * 1. For caches instantiated as an LRU cache type * the copy() method will instantiate with an ORDERED_MAP cache - * instead, since copied instantances are mostly used in - * short-term usage contexts and LRU cache instantation would create + * instead, since copied instances are mostly used in + * short-term usage contexts and LRU cache instantiation would create * a large overhead here. * 2. The underlying trie object is initialized with 0 cache size * @@ -1142,27 +920,13 @@ export class DefaultStateManager implements EVMStateManagerInterface { const trie = this._trie.shallowCopy(false, { cacheSize }) const prefixCodeHashes = this._prefixCodeHashes const prefixStorageTrieKeys = this._prefixStorageTrieKeys - let accountCacheOpts = { ...this._accountCacheSettings } - if (downlevelCaches && !this._accountCacheSettings.deactivate) { - accountCacheOpts = { ...accountCacheOpts, type: CacheType.ORDERED_MAP } - } - let storageCacheOpts = { ...this._storageCacheSettings } - if (downlevelCaches && !this._storageCacheSettings.deactivate) { - storageCacheOpts = { ...storageCacheOpts, type: CacheType.ORDERED_MAP } - } - let codeCacheOpts = { ...this._codeCacheSettings } - if (!this._codeCacheSettings.deactivate) { - codeCacheOpts = { ...codeCacheOpts, type: CacheType.ORDERED_MAP } - } return new DefaultStateManager({ common, trie, prefixStorageTrieKeys, prefixCodeHashes, - accountCacheOpts, - storageCacheOpts, - codeCacheOpts, + caches: this._caches?.shallowCopy(downlevelCaches), }) } @@ -1170,9 +934,7 @@ export class DefaultStateManager implements EVMStateManagerInterface { * Clears all underlying caches */ clearCaches() { - this._accountCache?.clear() - this._storageCache?.clear() - this._codeCache?.clear() + this._caches?.clear() } /** diff --git a/packages/statemanager/src/statelessVerkleStateManager.ts b/packages/statemanager/src/statelessVerkleStateManager.ts index 9525d915fe..22b882f2d4 100644 --- a/packages/statemanager/src/statelessVerkleStateManager.ts +++ b/packages/statemanager/src/statelessVerkleStateManager.ts @@ -6,6 +6,8 @@ import { VerkleLeafType, bigIntToBytes, bytesToHex, + createPartialAccount, + createPartialAccountFromRLP, decodeVerkleLeafBasicData, encodeVerkleLeafBasicData, getVerkleKey, @@ -24,24 +26,13 @@ import debugDefault from 'debug' import { keccak256 } from 'ethereum-cryptography/keccak.js' import { AccessWitness, AccessedStateType, decodeValue } from './accessWitness.js' -import { - AccountCache, - CacheType, - CodeCache, - OriginalStorageCache, - StorageCache, -} from './cache/index.js' +import { Caches, OriginalStorageCache } from './cache/index.js' +import { modifyAccountFields } from './util.js' import type { AccessedStateWithAddress } from './accessWitness.js' +import type { StatelessVerkleStateManagerOpts, VerkleState } from './index.js' import type { DefaultStateManager } from './stateManager.js' -import type { - AccountFields, - Common, - EVMStateManagerInterface, - Proof, - StorageDump, - StorageRange, -} from '@ethereumjs/common' +import type { AccountFields, Proof, StateManagerInterface } from '@ethereumjs/common' import type { Address, PrefixedHexString, @@ -52,72 +43,6 @@ import type { const debug = debugDefault('statemanager:verkle') -export interface VerkleState { - [key: PrefixedHexString]: PrefixedHexString | null -} - -export interface EncodedVerkleProof { - [key: PrefixedHexString]: PrefixedHexString -} - -type CacheOptions = { - /** - * Allows for cache deactivation - * - * Depending on the use case and underlying datastore (and eventual concurrent cache - * mechanisms there), usage with or without cache can be faster - * - * Default: false - */ - deactivate?: boolean - - /** - * Cache type to use. - * - * Available options: - * - * ORDERED_MAP: Cache with no fixed upper bound and dynamic allocation, - * use for dynamic setups like testing or similar. - * - * LRU: LRU cache with pre-allocation of memory and a fixed size. - * Use for larger and more persistent caches. - */ - type?: CacheType - - /** - * Size of the cache (only for LRU cache) - * - * Default: 100000 (account cache) / 20000 (storage cache) - * - * Note: the cache/trie interplay mechanism is designed in a way that - * the theoretical number of max modified accounts between two flush operations - * should be smaller than the cache size, otherwise the cache will "forget" the - * old modifications resulting in an incomplete set of trie-flushed accounts. - */ - size?: number -} - -type CacheSettings = { - deactivate: boolean - type: CacheType - size: number -} - -/** - * Options dictionary. - */ -export interface StatelessVerkleStateManagerOpts { - /** - * The common to use - */ - common?: Common - accountCacheOpts?: CacheOptions - storageCacheOpts?: CacheOptions - codeCacheOpts?: CacheOptions - accesses?: AccessWitness - verkleCrypto: VerkleCrypto -} - const PUSH_OFFSET = 95 // eslint-disable-next-line @typescript-eslint/no-unused-vars const PUSH1 = PUSH_OFFSET + 1 @@ -137,18 +62,14 @@ const ZEROVALUE = '0x00000000000000000000000000000000000000000000000000000000000 * to fetch data requested by the the VM. * */ -export class StatelessVerkleStateManager implements EVMStateManagerInterface { - _accountCache?: AccountCache - _storageCache?: StorageCache - _codeCache?: CodeCache +export class StatelessVerkleStateManager implements StateManagerInterface { _cachedStateRoot?: Uint8Array originalStorageCache: OriginalStorageCache verkleCrypto: VerkleCrypto - protected readonly _accountCacheSettings: CacheSettings - protected readonly _storageCacheSettings: CacheSettings - protected readonly _codeCacheSettings: CacheSettings + + protected _caches?: Caches /** * StateManager is run in DEBUG mode (default: false) @@ -183,47 +104,9 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { * Instantiate the StateManager interface. */ constructor(opts: StatelessVerkleStateManagerOpts) { - this.originalStorageCache = new OriginalStorageCache(this.getContractStorage.bind(this)) + this.originalStorageCache = new OriginalStorageCache(this.getStorage.bind(this)) - this._accountCacheSettings = { - deactivate: opts.accountCacheOpts?.deactivate ?? false, - type: opts.accountCacheOpts?.type ?? CacheType.ORDERED_MAP, - size: opts.accountCacheOpts?.size ?? 100000, - } - - if (!this._accountCacheSettings.deactivate) { - this._accountCache = new AccountCache({ - size: this._accountCacheSettings.size, - type: this._accountCacheSettings.type, - }) - } - - this._storageCacheSettings = { - deactivate: opts.storageCacheOpts?.deactivate ?? false, - type: opts.storageCacheOpts?.type ?? CacheType.ORDERED_MAP, - size: opts.storageCacheOpts?.size ?? 20000, - } - - if (!this._storageCacheSettings.deactivate) { - this._storageCache = new StorageCache({ - size: this._storageCacheSettings.size, - type: this._storageCacheSettings.type, - }) - } - - this._codeCacheSettings = { - deactivate: - (opts.codeCacheOpts?.deactivate === true || opts.codeCacheOpts?.size === 0) ?? false, - type: opts.codeCacheOpts?.type ?? CacheType.ORDERED_MAP, - size: opts.codeCacheOpts?.size ?? 20000, - } - - if (!this._codeCacheSettings.deactivate) { - this._codeCache = new CodeCache({ - size: this._codeCacheSettings.size, - type: this._codeCacheSettings.type, - }) - } + this._caches = opts.caches this.keccakFunction = opts.common?.customCrypto.keccak256 ?? keccak256 @@ -235,7 +118,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { // Skip DEBUG calls unless 'ethjs' included in environmental DEBUG variables // Additional window check is to prevent vite browser bundling (and potentially other) to break this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false } async getTransitionStateRoot(_: DefaultStateManager, __: Uint8Array): Promise { @@ -245,7 +128,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { public initVerkleExecutionWitness( blockNum: bigint, executionWitness?: VerkleExecutionWitness | null, - accessWitness?: AccessWitness + accessWitness?: AccessWitness, ) { this._blockNum = blockNum if (executionWitness === null || executionWitness === undefined) { @@ -312,7 +195,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { async checkChunkWitnessPresent(address: Address, codeOffset: number) { const chunkId = codeOffset / VERKLE_CODE_CHUNK_SIZE const chunkKey = bytesToHex( - await getVerkleTreeKeyForCodeChunk(address, chunkId, this.verkleCrypto) + await getVerkleTreeKeyForCodeChunk(address, chunkId, this.verkleCrypto), ) return this._state[chunkKey] !== undefined } @@ -322,8 +205,11 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { * at the last fully committed point, i.e. as if all current * checkpoints were reverted. */ - shallowCopy(): EVMStateManagerInterface { - const stateManager = new StatelessVerkleStateManager({ verkleCrypto: this.verkleCrypto }) + shallowCopy(): StatelessVerkleStateManager { + const stateManager = new StatelessVerkleStateManager({ + caches: this._caches !== undefined ? new Caches() : undefined, + verkleCrypto: this.verkleCrypto, + }) return stateManager } @@ -333,12 +219,12 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { * @param address - Address of the `account` to add the `code` for * @param value - The value of the `code` */ - async putContractCode(address: Address, value: Uint8Array): Promise { + async putCode(address: Address, value: Uint8Array): Promise { if (this.DEBUG) { - debug(`putContractCode address=${address.toString()} value=${short(value)}`) + debug(`putCode address=${address.toString()} value=${short(value)}`) } - this._codeCache?.put(address, value) + this._caches?.code?.put(address, value) const codeHash = keccak256(value) if (KECCAK256_NULL === codeHash) { // If the code hash is the null hash, no code has to be stored @@ -357,16 +243,14 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { * @returns {Promise} - Resolves with the code corresponding to the provided address. * Returns an empty `Uint8Array` if the account has no associated code. */ - async getContractCode(address: Address): Promise { + async getCode(address: Address): Promise { if (this.DEBUG) { - debug(`getContractCode address=${address.toString()}`) + debug(`getCode address=${address.toString()}`) } - if (!this._codeCacheSettings.deactivate) { - const elem = this._codeCache?.get(address) - if (elem !== undefined) { - return elem.code ?? new Uint8Array(0) - } + const elem = this._caches?.code?.get(address) + if (elem !== undefined) { + return elem.code ?? new Uint8Array(0) } const account = await this.getAccount(address) @@ -385,7 +269,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { const chunks = Math.floor(codeSize / VERKLE_CODE_CHUNK_SIZE) + 1 for (let chunkId = 0; chunkId < chunks; chunkId++) { const chunkKey = bytesToHex( - await getVerkleTreeKeyForCodeChunk(address, chunkId, this.verkleCrypto) + await getVerkleTreeKeyForCodeChunk(address, chunkId, this.verkleCrypto), ) const codeChunk = this._state[chunkKey] if (codeChunk === null) { @@ -409,28 +293,22 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { // Return accessedCode where only accessed code has been copied const contactCode = accessedCode.slice(0, codeSize) - if (!this._codeCacheSettings.deactivate) { - this._codeCache?.put(address, contactCode) - } + this._caches?.code?.put(address, contactCode) return contactCode } - async getContractCodeSize(address: Address): Promise { - if (!this._accountCacheSettings.deactivate) { - const elem = this._accountCache!.get(address) - if (elem !== undefined) { - const account = - elem.accountRLP !== undefined - ? Account.fromRlpSerializedPartialAccount(elem.accountRLP) - : undefined - if (account === undefined) { - const errorMsg = `account=${account} in cache` - debug(errorMsg) - throw Error(errorMsg) - } - return account.codeSize + async getCodeSize(address: Address): Promise { + const elem = this._caches?.account?.get(address) + if (elem !== undefined) { + const account = + elem.accountRLP !== undefined ? createPartialAccountFromRLP(elem.accountRLP) : undefined + if (account === undefined) { + const errorMsg = `account=${account} in cache` + debug(errorMsg) + throw Error(errorMsg) } + return account.codeSize } // load the account basic fields and codeSize should be in it @@ -450,24 +328,20 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { * corresponding to the provided address at the provided key. * If this does not exist an empty `Uint8Array` is returned. */ - async getContractStorage(address: Address, key: Uint8Array): Promise { - if (!this._storageCacheSettings.deactivate) { - const value = this._storageCache!.get(address, key) - if (value !== undefined) { - return value - } + async getStorage(address: Address, key: Uint8Array): Promise { + const value = this._caches?.storage?.get(address, key) + if (value !== undefined) { + return value } const storageKey = await getVerkleTreeKeyForStorageSlot( address, BigInt(bytesToHex(key)), - this.verkleCrypto + this.verkleCrypto, ) const storageValue = toBytes(this._state[bytesToHex(storageKey)]) - if (!this._storageCacheSettings.deactivate) { - this._storageCache?.put(address, key, storageValue ?? hexToBytes('0x80')) - } + this._caches?.storage?.put(address, key, storageValue ?? hexToBytes('0x80')) return storageValue } @@ -479,15 +353,15 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { * @param key - Key to set the value at. Must be 32 bytes long. * @param value - Value to set at `key` for account corresponding to `address`. Cannot be more than 32 bytes. Leading zeros are stripped. If it is a empty or filled with zeros, deletes the value. */ - async putContractStorage(address: Address, key: Uint8Array, value: Uint8Array): Promise { - if (!this._storageCacheSettings.deactivate) { - this._storageCache!.put(address, key, value) + async putStorage(address: Address, key: Uint8Array, value: Uint8Array): Promise { + if (this._caches?.storage !== undefined) { + this._caches.storage.put(address, key, value) } else { // TODO: Consider refactoring this in a writeContractStorage function? Like in stateManager.ts const storageKey = await getVerkleTreeKeyForStorageSlot( address, BigInt(bytesToHex(key)), - this.verkleCrypto + this.verkleCrypto, ) this._state[bytesToHex(storageKey)] = bytesToHex(setLengthRight(value, 32)) } @@ -499,22 +373,20 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { * Clears all storage entries for the account corresponding to `address`. * @param address - Address to clear the storage of */ - async clearContractStorage(address: Address): Promise { + async clearStorage(address: Address): Promise { const stem = getVerkleStem(this.verkleCrypto, address, 0) const codeHashKey = getVerkleKey(stem, VerkleLeafType.CodeHash) - this._storageCache?.clearContractStorage(address) + this._caches?.storage?.clearStorage(address) // Update codeHash to `c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470` this._state[bytesToHex(codeHashKey)] = KECCAK256_NULL_S } async getAccount(address: Address): Promise { - if (!this._accountCacheSettings.deactivate) { - const elem = this._accountCache!.get(address) - if (elem !== undefined) { - return elem.accountRLP !== undefined - ? Account.fromRlpSerializedPartialAccount(elem.accountRLP) - : undefined - } + const elem = this._caches?.account?.get(address) + if (elem !== undefined) { + return elem.accountRLP !== undefined + ? createPartialAccountFromRLP(elem.accountRLP) + : undefined } const stem = getVerkleStem(this.verkleCrypto, address, 0) @@ -529,7 +401,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { // check any of the other key shouldn't have string input available as this account didn't exist if (typeof basicDataRaw === `string` || typeof codeHashRaw === 'string') { const errorMsg = `Invalid witness for a non existing address=${address} stem=${bytesToHex( - stem + stem, )}` debug(errorMsg) throw Error(errorMsg) @@ -541,7 +413,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { // check if codehash is correct 32 bytes prefixed hex string if (codeHashRaw !== undefined && codeHashRaw !== null && codeHashRaw.length !== 66) { const errorMsg = `Invalid codeHashRaw=${codeHashRaw} for address=${address} chunkKey=${bytesToHex( - codeHashKey + codeHashKey, )}` debug(errorMsg) throw Error(errorMsg) @@ -554,15 +426,15 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { } const { version, balance, nonce, codeSize } = decodeVerkleLeafBasicData( - hexToBytes(basicDataRaw) + hexToBytes(basicDataRaw), ) - const account = Account.fromPartialAccountData({ + const account = createPartialAccount({ version, balance, nonce, codeHash: typeof codeHashRaw === 'string' ? hexToBytes(codeHashRaw) : null, - // if codeSizeRaw is null, it means account didnt exist or it was EOA either way codeSize is 0 + // if codeSizeRaw is null, it means account didn't exist or it was EOA either way codeSize is 0 // if codeSizeRaw is undefined, then we pass in null which in our context of partial account means // not specified codeSize, @@ -573,9 +445,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { debug(`getAccount address=${address.toString()} stem=${short(stem)}`) } - if (!this._accountCacheSettings.deactivate) { - this._accountCache?.put(address, account, true) - } + this._caches?.account?.put(address, account, true) return account } @@ -585,7 +455,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { debug(`putAccount address=${address.toString()}`) } - if (this._accountCacheSettings.deactivate) { + if (this._caches?.account === undefined) { const stem = getVerkleStem(this.verkleCrypto, address, 0) const basicDataKey = getVerkleKey(stem, VerkleLeafType.BasicData) const basicDataBytes = encodeVerkleLeafBasicData({ @@ -598,9 +468,9 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { this._state[bytesToHex(basicDataKey)] = bytesToHex(basicDataBytes) } else { if (account !== undefined) { - this._accountCache!.put(address, account, true) + this._caches?.account?.put(address, account, true) } else { - this._accountCache!.del(address) + this._caches?.account?.del(address) } } } @@ -614,25 +484,11 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { debug(`Delete account ${address}`) } - this._codeCache?.del(address) - this._accountCache!.del(address) - - if (!this._storageCacheSettings.deactivate) { - this._storageCache?.clearContractStorage(address) - } + this._caches?.deleteAccount(address) } async modifyAccountFields(address: Address, accountFields: AccountFields): Promise { - let account = await this.getAccount(address) - if (!account) { - account = new Account() - } - - account._nonce = accountFields.nonce ?? account._nonce - account._balance = accountFields.balance ?? account._balance - account._storageRoot = accountFields.storageRoot ?? account._storageRoot - account._codeHash = accountFields.codeHash ?? account._codeHash - await this.putAccount(address, account) + await modifyAccountFields(this, address, accountFields) } getProof(_: Address, __: Uint8Array[] = []): Promise { @@ -655,12 +511,12 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { // Verifies that the witness post-state matches the computed post-state verifyPostState(): boolean { // track what all chunks were accessed so as to compare in the end if any chunks were missed - // in access while comparising against the provided poststate in the execution witness + // in access while comparing against the provided poststate in the execution witness const accessedChunks = new Map() // switch to false if postVerify fails let postFailures = 0 - for (const accessedState of this.accessWitness!.accesses()) { + for (const accessedState of this.accessWitness?.accesses() ?? []) { const { address, type } = accessedState let extraMeta = '' if (accessedState.type === AccessedStateType.Code) { @@ -674,7 +530,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { const computedValue = this.getComputedValue(accessedState) ?? this._preState[chunkKey] if (computedValue === undefined) { debug( - `Block accesses missing in canonical address=${address} type=${type} ${extraMeta} chunkKey=${chunkKey}` + `Block accesses missing in canonical address=${address} type=${type} ${extraMeta} chunkKey=${chunkKey}`, ) postFailures++ continue @@ -684,7 +540,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { if (canonicalValue === undefined) { debug( - `Block accesses missing in canonical address=${address} type=${type} ${extraMeta} chunkKey=${chunkKey}` + `Block accesses missing in canonical address=${address} type=${type} ${extraMeta} chunkKey=${chunkKey}`, ) postFailures++ continue @@ -717,7 +573,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { : `${canonicalValue} (${decodedCanonicalValue})` debug( - `Block accesses mismatch address=${address} type=${type} ${extraMeta} chunkKey=${chunkKey}` + `Block accesses mismatch address=${address} type=${type} ${extraMeta} chunkKey=${chunkKey}`, ) debug(`expected=${displayCanonicalValue}`) debug(`computed=${displayComputedValue}`) @@ -742,7 +598,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { const { address, type } = accessedState switch (type) { case AccessedStateType.Version: { - const encodedAccount = this._accountCache?.get(address)?.accountRLP + const encodedAccount = this._caches?.account?.get(address)?.accountRLP if (encodedAccount === undefined) { return null } @@ -751,42 +607,42 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { return ZEROVALUE } case AccessedStateType.Balance: { - const encodedAccount = this._accountCache?.get(address)?.accountRLP + const encodedAccount = this._caches?.account?.get(address)?.accountRLP if (encodedAccount === undefined) { return null } - const balanceBigint = Account.fromRlpSerializedPartialAccount(encodedAccount).balance + const balanceBigint = createPartialAccountFromRLP(encodedAccount).balance return bytesToHex(setLengthRight(bigIntToBytes(balanceBigint, true), 32)) } case AccessedStateType.Nonce: { - const encodedAccount = this._accountCache?.get(address)?.accountRLP + const encodedAccount = this._caches?.account?.get(address)?.accountRLP if (encodedAccount === undefined) { return null } - const nonceBigint = Account.fromRlpSerializedPartialAccount(encodedAccount).nonce + const nonceBigint = createPartialAccountFromRLP(encodedAccount).nonce return bytesToHex(setLengthRight(bigIntToBytes(nonceBigint, true), 32)) } case AccessedStateType.CodeHash: { - const encodedAccount = this._accountCache?.get(address)?.accountRLP + const encodedAccount = this._caches?.account?.get(address)?.accountRLP if (encodedAccount === undefined) { return null } - return bytesToHex(Account.fromRlpSerializedPartialAccount(encodedAccount).codeHash) + return bytesToHex(createPartialAccountFromRLP(encodedAccount).codeHash) } case AccessedStateType.CodeSize: { - const codeSize = this._codeCache?.get(address)?.code?.length + const codeSize = this._caches?.code?.get(address)?.code?.length if (codeSize === undefined) { // it could be an EOA lets check for that - const encodedAccount = this._accountCache?.get(address)?.accountRLP + const encodedAccount = this._caches?.account?.get(address)?.accountRLP if (encodedAccount === undefined) { return null } - const account = Account.fromRlpSerializedPartialAccount(encodedAccount) + const account = createPartialAccountFromRLP(encodedAccount) if (account.isContract()) { const errorMsg = `Code cache not found for address=${address.toString()}` debug(errorMsg) @@ -801,7 +657,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { case AccessedStateType.Code: { const { codeOffset } = accessedState - const code = this._codeCache?.get(address)?.code + const code = this._caches?.code?.get(address)?.code if (code === undefined) { return null } @@ -812,8 +668,8 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { return bytesToHex( setLengthRight( code.slice(codeOffset, codeOffset + VERKLE_CODE_CHUNK_SIZE), - VERKLE_CODE_CHUNK_SIZE - ) + VERKLE_CODE_CHUNK_SIZE, + ), ) } @@ -821,7 +677,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { const { slot } = accessedState const key = setLengthLeft(bigIntToBytes(slot), 32) - const storage = this._storageCache?.get(address, key) + const storage = this._caches?.storage?.get(address, key) if (storage === undefined) { return null } @@ -837,9 +693,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { */ async checkpoint(): Promise { this._checkpoints.push(this._state) - this._accountCache?.checkpoint() - this._storageCache?.checkpoint() - this._codeCache?.checkpoint() + this._caches?.checkpoint() } /** @@ -848,9 +702,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { */ async commit(): Promise { this._checkpoints.pop() - this._accountCache!.commit() - this._storageCache?.commit() - this._codeCache?.commit() + this._caches?.commit() } // TODO @@ -865,9 +717,7 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { async revert(): Promise { // setup trie checkpointing this._checkpoints.pop() - this._accountCache?.revert() - this._storageCache?.revert() - this._codeCache?.revert() + this._caches?.revert() } /** @@ -897,35 +747,18 @@ export class StatelessVerkleStateManager implements EVMStateManagerInterface { this._cachedStateRoot = stateRoot } - /** - * Dumps the RLP-encoded storage values for an `account` specified by `address`. - * @param address - The address of the `account` to return storage for - * @returns {Promise} - The state of the account as an `Object` map. - * Keys are are the storage keys, values are the storage values as strings. - * Both are represented as hex strings without the `0x` prefix. - */ - async dumpStorage(_: Address): Promise { - throw Error('not implemented') - } - - dumpStorageRange(_: Address, __: bigint, ___: number): Promise { - throw Error('not implemented') - } - /** * Clears all underlying caches */ clearCaches() { - this._accountCache?.clear() - this._codeCache?.clear() - this._storageCache?.clear() + this._caches?.clear() } + // TODO: Removing this causes a Kaustinen6 test in client to fail + // Seems to point to a more general (non-severe) bug and can likely be fixed + // by having the `statelessVerkle` config option more properly set by the + // test for the check in the VM execution to call into this method generateCanonicalGenesis(_initState: any): Promise { return Promise.resolve() } - - getAppliedKey(_: Uint8Array): Uint8Array { - throw Error('not implemented') - } } diff --git a/packages/statemanager/src/types.ts b/packages/statemanager/src/types.ts new file mode 100644 index 0000000000..a9051bc2e0 --- /dev/null +++ b/packages/statemanager/src/types.ts @@ -0,0 +1,110 @@ +import { type PrefixedHexString, utf8ToBytes } from '@ethereumjs/util' + +import type { AccessWitness, Caches } from './index.js' +import type { Common } from '@ethereumjs/common' +import type { Trie } from '@ethereumjs/trie' +import type { VerkleCrypto } from '@ethereumjs/util' + +/** + * Basic state manager options (not to be used directly) + */ +interface BaseStateManagerOpts { + /** + * The common to use + */ + common?: Common +} + +/** + * Options for constructing a {@link SimpleStateManager}. + */ +export interface SimpleStateManagerOpts extends BaseStateManagerOpts { + // Keep this as an alias so that it might be able to extend in the future +} + +export interface RPCStateManagerOpts extends BaseStateManagerOpts { + provider: string + blockTag: bigint | 'earliest' +} + +/** + * Options for constructing a {@link StateManager}. + */ +export interface DefaultStateManagerOpts extends BaseStateManagerOpts { + /** + * A {@link Trie} instance + */ + trie?: Trie + /** + * Option to prefix codehashes in the database. This defaults to `true`. + * If this is disabled, note that it is possible to corrupt the trie, by deploying code + * which code is equal to the preimage of a trie-node. + * E.g. by putting the code `0x80` into the empty trie, will lead to a corrupted trie. + */ + prefixCodeHashes?: boolean + + /** + * Option to prefix the keys for the storage tries with the first 7 bytes from the + * associated account address. Activating this option gives a noticeable performance + * boost for storage DB reads when operating on larger tries. + * + * Note: Activating/deactivating this option causes continued state reads to be + * incompatible with existing databases. + * + * Default: false (for backwards compatibility reasons) + */ + prefixStorageTrieKeys?: boolean + + /** + * Options to enable and configure the use of a cache account, code and storage + * This can be useful for speeding up reads, especially when the trie is large. + * The cache is only used for reading from the trie and is not used for writing to the trie. + * + * Default: false + */ + caches?: Caches +} + +/** + * Options dictionary. + */ +export interface StatelessVerkleStateManagerOpts extends BaseStateManagerOpts { + accesses?: AccessWitness + verkleCrypto: VerkleCrypto + initialStateRoot?: Uint8Array + caches?: Caches +} + +export interface VerkleState { + [key: PrefixedHexString]: PrefixedHexString | null +} + +export interface EncodedVerkleProof { + [key: PrefixedHexString]: PrefixedHexString +} + +/** + * Prefix to distinguish between a contract deployed with code `0x80` + * and `RLP([])` (also having the value `0x80`). + * + * Otherwise the creation of the code hash for the `0x80` contract + * will be the same as the hash of the empty trie which leads to + * misbehaviour in the underlying trie library. + */ +export const CODEHASH_PREFIX = utf8ToBytes('c') + +export type StorageProof = { + key: PrefixedHexString + proof: PrefixedHexString[] + value: PrefixedHexString +} + +export type Proof = { + address: PrefixedHexString + balance: PrefixedHexString + codeHash: PrefixedHexString + nonce: PrefixedHexString + storageHash: PrefixedHexString + accountProof: PrefixedHexString[] + storageProof: StorageProof[] +} diff --git a/packages/statemanager/src/util.ts b/packages/statemanager/src/util.ts new file mode 100644 index 0000000000..3628ab842d --- /dev/null +++ b/packages/statemanager/src/util.ts @@ -0,0 +1,18 @@ +import { Account } from '@ethereumjs/util' + +import type { AccountFields, StateManagerInterface } from '@ethereumjs/common' +import type { Address } from '@ethereumjs/util' + +export async function modifyAccountFields( + stateManager: StateManagerInterface, + address: Address, + accountFields: AccountFields, +): Promise { + const account = (await stateManager.getAccount(address)) ?? new Account() + + account.nonce = accountFields.nonce ?? account.nonce + account.balance = accountFields.balance ?? account.balance + account.storageRoot = accountFields.storageRoot ?? account.storageRoot + account.codeHash = accountFields.codeHash ?? account.codeHash + await stateManager.putAccount(address, account) +} diff --git a/packages/statemanager/test/cache/account.spec.ts b/packages/statemanager/test/cache/account.spec.ts index a4cdffe844..533f62d095 100644 --- a/packages/statemanager/test/cache/account.spec.ts +++ b/packages/statemanager/test/cache/account.spec.ts @@ -2,7 +2,7 @@ import { Account, Address, equalsBytes, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' import { AccountCache, CacheType } from '../../src/cache/index.js' -import { createAccount } from '../util.js' +import { createAccountWithDefaults } from '../util.js' describe('Account Cache: initialization', () => { for (const type of [CacheType.LRU, CacheType.ORDERED_MAP]) { @@ -19,7 +19,7 @@ describe('Account Cache: put and get account', () => { const cache = new AccountCache({ size: 100, type }) const addr = new Address(hexToBytes(`0x${'10'.repeat(20)}`)) - const acc: Account = createAccount(BigInt(1), BigInt(0xff11)) + const acc: Account = createAccountWithDefaults(BigInt(1), BigInt(0xff11)) const accRLP = acc.serialize() it('should return undefined for CacheElement if account not present in the cache', async () => { @@ -52,10 +52,10 @@ describe('Account Cache: checkpointing', () => { const cache = new AccountCache({ size: 100, type }) const addr = new Address(hexToBytes(`0x${'10'.repeat(20)}`)) - const acc = createAccount(BigInt(1), BigInt(0xff11)) + const acc = createAccountWithDefaults(BigInt(1), BigInt(0xff11)) const accRLP = acc.serialize() - const updatedAcc = createAccount(BigInt(0x00), BigInt(0xff00)) + const updatedAcc = createAccountWithDefaults(BigInt(0x00), BigInt(0xff00)) const updatedAccRLP = updatedAcc.serialize() it(`should revert to correct state`, async () => { @@ -65,7 +65,7 @@ describe('Account Cache: checkpointing', () => { let elem = cache.get(addr) assert.ok( - elem !== undefined && elem.accountRLP && equalsBytes(elem.accountRLP, updatedAccRLP) + elem !== undefined && elem.accountRLP && equalsBytes(elem.accountRLP, updatedAccRLP), ) cache.revert() diff --git a/packages/statemanager/test/checkpointing.account.spec.ts b/packages/statemanager/test/checkpointing.account.spec.ts index 95074d2d59..edc19c25f1 100644 --- a/packages/statemanager/test/checkpointing.account.spec.ts +++ b/packages/statemanager/test/checkpointing.account.spec.ts @@ -1,9 +1,10 @@ -import { Account, Address, hexToBytes } from '@ethereumjs/util' +import { Address, createAccount, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' import { DefaultStateManager, SimpleStateManager } from '../src/index.js' import type { StateManagerInterface } from '@ethereumjs/common' +import type { Account } from '@ethereumjs/util' /** * Compares account read to none or undefined @@ -11,7 +12,7 @@ import type { StateManagerInterface } from '@ethereumjs/common' const accountEval = async ( sm: StateManagerInterface, address: Address, - compare: bigint | undefined + compare: bigint | undefined, ) => { const account = await sm.getAccount(address) if (compare === undefined) { @@ -33,31 +34,31 @@ describe('StateManager -> Account Checkpointing', () => { const stateManagers = [DefaultStateManager, SimpleStateManager] const accountN1: CompareList = [ - Account.fromAccountData({ + createAccount({ nonce: 1, }), 1n, ] const accountN2: CompareList = [ - Account.fromAccountData({ + createAccount({ nonce: 2, }), 2n, ] const accountN3: CompareList = [ - Account.fromAccountData({ + createAccount({ nonce: 3, }), 3n, ] const accountN4: CompareList = [ - Account.fromAccountData({ + createAccount({ nonce: 4, }), 4n, ] const accountN5: CompareList = [ - Account.fromAccountData({ + createAccount({ nonce: 5, }), 5n, diff --git a/packages/statemanager/test/checkpointing.code.spec.ts b/packages/statemanager/test/checkpointing.code.spec.ts index 673451c1c3..e53b39c109 100644 --- a/packages/statemanager/test/checkpointing.code.spec.ts +++ b/packages/statemanager/test/checkpointing.code.spec.ts @@ -2,15 +2,15 @@ import { type StateManagerInterface } from '@ethereumjs/common' import { Account, Address, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { DefaultStateManager, SimpleStateManager } from '../src/index.js' +import { Caches, DefaultStateManager, SimpleStateManager } from '../src/index.js' const codeEval = async ( sm: StateManagerInterface, address: Address, value: Uint8Array, - root: Uint8Array + root: Uint8Array, ) => { - assert.deepEqual(await sm.getContractCode(address), value, 'contract code value should be equal') + assert.deepEqual(await sm.getCode(address), value, 'contract code value should be equal') const accountCMP = await sm.getAccount(address) assert.deepEqual(accountCMP!.codeHash, root, 'account code root should be equal') } @@ -93,25 +93,42 @@ describe('StateManager -> Code Checkpointing', () => { for (const SM of stateManagers) { for (const c of codeSets) { it(`No CP -> C1 -> Flush() (-> C1)`, async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.flush() await codeEval(sm, address, c.c1.value, c.c1.root) sm.clearCaches() - assert.deepEqual(await sm.getContractCode(address), c.c1.value) + assert.deepEqual(await sm.getCode(address), c.c1.value) await codeEval(sm, address, c.c1.value, c.c1.root) }) it(`CP -> C1.1 -> Commit -> Flush() (-> C1.1)`, async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } await sm.putAccount(address, new Account()) await sm.checkpoint() - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.commit() await sm.flush() await codeEval(sm, address, c.c1.value, c.c1.root) @@ -121,11 +138,17 @@ describe('StateManager -> Code Checkpointing', () => { }) it(`CP -> C1.1 -> Revert -> Flush() (-> Undefined)`, async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) await sm.checkpoint() - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.revert() await sm.flush() @@ -137,10 +160,16 @@ describe('StateManager -> Code Checkpointing', () => { }) it(`C1.1 -> CP -> Commit -> Flush() (-> C1.1)`, async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.checkpoint() await sm.commit() await sm.flush() @@ -151,10 +180,16 @@ describe('StateManager -> Code Checkpointing', () => { }) it(`C1.1 -> CP -> Revert -> Flush() (-> C1.1)`, async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.checkpoint() await sm.revert() await sm.flush() @@ -165,12 +200,18 @@ describe('StateManager -> Code Checkpointing', () => { }) it(`C1.1 -> CP -> C1.2 -> Commit -> Flush() (-> C1.2)`, async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.checkpoint() - await sm.putContractCode(address, c.c2.value) + await sm.putCode(address, c.c2.value) await sm.commit() await sm.flush() await codeEval(sm, address, c.c2.value, c.c2.root) @@ -180,14 +221,20 @@ describe('StateManager -> Code Checkpointing', () => { }) it(`C1.1 -> CP -> C1.2 -> Commit -> C1.3 -> Flush() (-> C1.3)`, async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.checkpoint() - await sm.putContractCode(address, c.c2.value) + await sm.putCode(address, c.c2.value) await sm.commit() - await sm.putContractCode(address, c.c3.value) + await sm.putCode(address, c.c3.value) await sm.flush() await codeEval(sm, address, c.c3.value, c.c3.root) @@ -196,13 +243,19 @@ describe('StateManager -> Code Checkpointing', () => { }) it(`C1.1 -> CP -> C1.2 -> C1.3 -> Commit -> Flush() (-> C1.3)`, async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.checkpoint() - await sm.putContractCode(address, c.c2.value) - await sm.putContractCode(address, c.c3.value) + await sm.putCode(address, c.c2.value) + await sm.putCode(address, c.c3.value) await sm.commit() await sm.flush() await codeEval(sm, address, c.c3.value, c.c3.root) @@ -212,12 +265,18 @@ describe('StateManager -> Code Checkpointing', () => { }) it(`CP -> C1.1 -> C1.2 -> Commit -> Flush() (-> C1.2)`, async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) await sm.checkpoint() - await sm.putContractCode(address, c.c1.value) - await sm.putContractCode(address, c.c2.value) + await sm.putCode(address, c.c1.value) + await sm.putCode(address, c.c2.value) await sm.commit() await sm.flush() await codeEval(sm, address, c.c2.value, c.c2.root) @@ -227,13 +286,19 @@ describe('StateManager -> Code Checkpointing', () => { }) it(`CP -> C1.1 -> C1.2 -> Revert -> Flush() (-> Undefined)`, async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) await sm.checkpoint() - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) - await sm.putContractCode(address, c.c2.value) + await sm.putCode(address, c.c2.value) await sm.revert() await sm.flush() await codeEval(sm, address, valueEmpty, rootEmpty) @@ -243,12 +308,18 @@ describe('StateManager -> Code Checkpointing', () => { }) it(`C1.1 -> CP -> C1.2 -> Revert -> Flush() (-> C1.1)`, async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.checkpoint() - await sm.putContractCode(address, c.c2.value) + await sm.putCode(address, c.c2.value) await sm.revert() await sm.flush() await codeEval(sm, address, c.c1.value, c.c1.root) @@ -258,14 +329,20 @@ describe('StateManager -> Code Checkpointing', () => { }) it('C1.1 -> CP -> C1.2 -> CP -> C1.3 -> Commit -> Commit -> Flush() (-> C1.3)', async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.checkpoint() - await sm.putContractCode(address, c.c2.value) + await sm.putCode(address, c.c2.value) await sm.checkpoint() - await sm.putContractCode(address, c.c3.value) + await sm.putCode(address, c.c3.value) await sm.commit() await sm.commit() await sm.flush() @@ -276,14 +353,20 @@ describe('StateManager -> Code Checkpointing', () => { }) it('C1.1 -> CP -> C1.2 -> CP -> C1.3 -> Commit -> Revert -> Flush() (-> C1.1)', async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.checkpoint() - await sm.putContractCode(address, c.c2.value) + await sm.putCode(address, c.c2.value) await sm.checkpoint() - await sm.putContractCode(address, c.c3.value) + await sm.putCode(address, c.c3.value) await sm.commit() await sm.revert() await sm.flush() @@ -294,14 +377,20 @@ describe('StateManager -> Code Checkpointing', () => { }) it('C1.1 -> CP -> C1.2 -> CP -> C1.3 -> Revert -> Commit -> Flush() (-> C1.2)', async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.checkpoint() - await sm.putContractCode(address, c.c2.value) + await sm.putCode(address, c.c2.value) await sm.checkpoint() - await sm.putContractCode(address, c.c3.value) + await sm.putCode(address, c.c3.value) await sm.revert() await sm.commit() await sm.flush() @@ -312,16 +401,22 @@ describe('StateManager -> Code Checkpointing', () => { }) it('C1.1 -> CP -> C1.2 -> CP -> C1.3 -> Revert -> C1.4 -> Commit -> Flush() (-> C1.4)', async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.checkpoint() - await sm.putContractCode(address, c.c2.value) + await sm.putCode(address, c.c2.value) await sm.checkpoint() - await sm.putContractCode(address, c.c3.value) + await sm.putCode(address, c.c3.value) await sm.revert() - await sm.putContractCode(address, c.c4.value) + await sm.putCode(address, c.c4.value) await sm.commit() await sm.flush() await codeEval(sm, address, c.c4.value, c.c4.root) @@ -331,18 +426,24 @@ describe('StateManager -> Code Checkpointing', () => { }) it('C1.1 -> CP -> C1.2 -> CP -> C1.3 -> Revert -> C1.4 -> CP -> C1.5 -> Commit -> Commit -> Flush() (-> C1.5)', async () => { - const sm = new SM() + let sm: DefaultStateManager | SimpleStateManager + if (SM === DefaultStateManager) { + sm = new SM({ caches: new Caches() }) + } else { + sm = new SM() + } + await sm.putAccount(address, new Account()) - await sm.putContractCode(address, c.c1.value) + await sm.putCode(address, c.c1.value) await sm.checkpoint() - await sm.putContractCode(address, c.c2.value) + await sm.putCode(address, c.c2.value) await sm.checkpoint() - await sm.putContractCode(address, c.c3.value) + await sm.putCode(address, c.c3.value) await sm.revert() - await sm.putContractCode(address, c.c4.value) + await sm.putCode(address, c.c4.value) await sm.checkpoint() - await sm.putContractCode(address, c.c5.value) + await sm.putCode(address, c.c5.value) await sm.commit() await sm.commit() await sm.flush() diff --git a/packages/statemanager/test/checkpointing.storage.spec.ts b/packages/statemanager/test/checkpointing.storage.spec.ts index 0e6d5a2a1e..cd8fbb999c 100644 --- a/packages/statemanager/test/checkpointing.storage.spec.ts +++ b/packages/statemanager/test/checkpointing.storage.spec.ts @@ -11,13 +11,9 @@ const storageEval = async ( key: Uint8Array, value: Uint8Array, root: Uint8Array, - rootCheck = true + rootCheck = true, ) => { - assert.deepEqual( - await sm.getContractStorage(address, key), - value, - 'storage value should be equal' - ) + assert.deepEqual(await sm.getStorage(address, key), value, 'storage value should be equal') if (rootCheck) { const accountCMP = await sm.getAccount(address) assert.deepEqual(accountCMP!.storageRoot, root, 'account storage root should be equal') @@ -116,12 +112,12 @@ describe('StateManager -> Storage Checkpointing', () => { const sm = new SMDict.SM() await sm.putAccount(address, new Account()) - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.flush() await storageEval(sm, address, key, s.s1.value, s.s1.root, SMDict.rootCheck) sm.clearCaches() - assert.deepEqual(await sm.getContractStorage(address, key), s.s1.value) + assert.deepEqual(await sm.getStorage(address, key), s.s1.value) await storageEval(sm, address, key, s.s1.value, s.s1.root, SMDict.rootCheck) }) @@ -130,7 +126,7 @@ describe('StateManager -> Storage Checkpointing', () => { await sm.putAccount(address, new Account()) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.commit() await sm.flush() await storageEval(sm, address, key, s.s1.value, s.s1.root, SMDict.rootCheck) @@ -144,7 +140,7 @@ describe('StateManager -> Storage Checkpointing', () => { await sm.putAccount(address, new Account()) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.revert() await sm.flush() @@ -159,7 +155,7 @@ describe('StateManager -> Storage Checkpointing', () => { const sm = new SMDict.SM() await sm.putAccount(address, new Account()) - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.checkpoint() await sm.commit() await sm.flush() @@ -173,7 +169,7 @@ describe('StateManager -> Storage Checkpointing', () => { const sm = new SMDict.SM() await sm.putAccount(address, new Account()) - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.checkpoint() await sm.revert() await sm.flush() @@ -187,9 +183,9 @@ describe('StateManager -> Storage Checkpointing', () => { const sm = new SMDict.SM() await sm.putAccount(address, new Account()) - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s2.value) + await sm.putStorage(address, key, s.s2.value) await sm.commit() await sm.flush() await storageEval(sm, address, key, s.s2.value, s.s2.root, SMDict.rootCheck) @@ -202,11 +198,11 @@ describe('StateManager -> Storage Checkpointing', () => { const sm = new SMDict.SM() await sm.putAccount(address, new Account()) - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s2.value) + await sm.putStorage(address, key, s.s2.value) await sm.commit() - await sm.putContractStorage(address, key, s.s3.value) + await sm.putStorage(address, key, s.s3.value) await sm.flush() await storageEval(sm, address, key, s.s3.value, s.s3.root, SMDict.rootCheck) @@ -218,10 +214,10 @@ describe('StateManager -> Storage Checkpointing', () => { const sm = new SMDict.SM() await sm.putAccount(address, new Account()) - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s2.value) - await sm.putContractStorage(address, key, s.s3.value) + await sm.putStorage(address, key, s.s2.value) + await sm.putStorage(address, key, s.s3.value) await sm.commit() await sm.flush() await storageEval(sm, address, key, s.s3.value, s.s3.root, SMDict.rootCheck) @@ -235,8 +231,8 @@ describe('StateManager -> Storage Checkpointing', () => { await sm.putAccount(address, new Account()) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s1.value) - await sm.putContractStorage(address, key, s.s2.value) + await sm.putStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s2.value) await sm.commit() await sm.flush() await storageEval(sm, address, key, s.s2.value, s.s2.root, SMDict.rootCheck) @@ -250,9 +246,9 @@ describe('StateManager -> Storage Checkpointing', () => { await sm.putAccount(address, new Account()) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) - await sm.putContractStorage(address, key, s.s2.value) + await sm.putStorage(address, key, s.s2.value) await sm.revert() await sm.flush() await storageEval(sm, address, key, valueEmpty, rootEmpty) @@ -265,9 +261,9 @@ describe('StateManager -> Storage Checkpointing', () => { const sm = new SMDict.SM() await sm.putAccount(address, new Account()) - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s2.value) + await sm.putStorage(address, key, s.s2.value) await sm.revert() await sm.flush() await storageEval(sm, address, key, s.s1.value, s.s1.root, SMDict.rootCheck) @@ -280,11 +276,11 @@ describe('StateManager -> Storage Checkpointing', () => { const sm = new SMDict.SM() await sm.putAccount(address, new Account()) - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s2.value) + await sm.putStorage(address, key, s.s2.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s3.value) + await sm.putStorage(address, key, s.s3.value) await sm.commit() await sm.commit() await sm.flush() @@ -298,11 +294,11 @@ describe('StateManager -> Storage Checkpointing', () => { const sm = new SMDict.SM() await sm.putAccount(address, new Account()) - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s2.value) + await sm.putStorage(address, key, s.s2.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s3.value) + await sm.putStorage(address, key, s.s3.value) await sm.commit() await sm.revert() await sm.flush() @@ -316,11 +312,11 @@ describe('StateManager -> Storage Checkpointing', () => { const sm = new SMDict.SM() await sm.putAccount(address, new Account()) - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s2.value) + await sm.putStorage(address, key, s.s2.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s3.value) + await sm.putStorage(address, key, s.s3.value) await sm.revert() await sm.commit() await sm.flush() @@ -334,13 +330,13 @@ describe('StateManager -> Storage Checkpointing', () => { const sm = new SMDict.SM() await sm.putAccount(address, new Account()) - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s2.value) + await sm.putStorage(address, key, s.s2.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s3.value) + await sm.putStorage(address, key, s.s3.value) await sm.revert() - await sm.putContractStorage(address, key, s.s4.value) + await sm.putStorage(address, key, s.s4.value) await sm.commit() await sm.flush() await storageEval(sm, address, key, s.s4.value, s.s4.root, SMDict.rootCheck) @@ -353,15 +349,15 @@ describe('StateManager -> Storage Checkpointing', () => { const sm = new SMDict.SM() await sm.putAccount(address, new Account()) - await sm.putContractStorage(address, key, s.s1.value) + await sm.putStorage(address, key, s.s1.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s2.value) + await sm.putStorage(address, key, s.s2.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s3.value) + await sm.putStorage(address, key, s.s3.value) await sm.revert() - await sm.putContractStorage(address, key, s.s4.value) + await sm.putStorage(address, key, s.s4.value) await sm.checkpoint() - await sm.putContractStorage(address, key, s.s5.value) + await sm.putStorage(address, key, s.s5.value) await sm.commit() await sm.commit() await sm.flush() diff --git a/packages/statemanager/test/proofStateManager.spec.ts b/packages/statemanager/test/proofStateManager.spec.ts index ab83ce1fca..3a1e2bef2e 100644 --- a/packages/statemanager/test/proofStateManager.spec.ts +++ b/packages/statemanager/test/proofStateManager.spec.ts @@ -1,9 +1,12 @@ -import { Trie } from '@ethereumjs/trie' +import { Trie, createTrie } from '@ethereumjs/trie' import { Account, Address, bytesToHex, bytesToUnprefixedHex, + createAddressFromPrivateKey, + createAddressFromString, + createZeroAddress, equalsBytes, hexToBytes, randomBytes, @@ -23,7 +26,7 @@ import type { PrefixedHexString } from '@ethereumjs/util' describe('ProofStateManager', () => { it(`should return quantity-encoded RPC representation`, async () => { - const address = Address.zero() + const address = createZeroAddress() const key = zeros(32) const stateManager = new DefaultStateManager() @@ -33,21 +36,21 @@ describe('ProofStateManager', () => { }) it(`should correctly return the right storage root / account root`, async () => { - const address = Address.zero() + const address = createZeroAddress() const key = zeros(32) const stateManager = new DefaultStateManager() await stateManager.putAccount(address, new Account(BigInt(100), BigInt(200))) const storageRoot = (await stateManager.getAccount(address))!.storageRoot - await stateManager.putContractStorage(address, key, new Uint8Array([10])) + await stateManager.putStorage(address, key, new Uint8Array([10])) const proof = await stateManager.getProof(address, [key]) assert.ok(!equalsBytes(hexToBytes(proof.storageHash), storageRoot)) }) it(`should return quantity-encoded RPC representation for existing accounts`, async () => { - const address = Address.zero() + const address = createZeroAddress() const key = zeros(32) const stateManager = new DefaultStateManager() @@ -75,15 +78,15 @@ describe('ProofStateManager', () => { }) it(`should get and verify EIP 1178 proofs`, async () => { - const address = Address.zero() + const address = createZeroAddress() const key = zeros(32) const value = hexToBytes('0x0000aabb00') const code = hexToBytes('0x6000') const stateManager = new DefaultStateManager() await stateManager.checkpoint() await stateManager.putAccount(address, new Account()) - await stateManager.putContractStorage(address, key, value) - await stateManager.putContractCode(address, code) + await stateManager.putStorage(address, key, value) + await stateManager.putCode(address, code) const account = await stateManager.getAccount(address) account!.balance = BigInt(1) account!.nonce = BigInt(2) @@ -97,11 +100,13 @@ describe('ProofStateManager', () => { const proof = await stateManager.getProof(address, [key]) assert.ok(await stateManager.verifyProof(proof)) - const nonExistenceProof = await stateManager.getProof(Address.fromPrivateKey(randomBytes(32))) + const nonExistenceProof = await stateManager.getProof( + createAddressFromPrivateKey(randomBytes(32)), + ) assert.equal( await stateManager.verifyProof(nonExistenceProof), true, - 'verified proof of non-existence of account' + 'verified proof of non-existence of account', ) }) @@ -110,8 +115,8 @@ describe('ProofStateManager', () => { // Block: 11098094 (hash 0x1d9ea6981b8093a2b63f22f74426ceb6ba1acae3fddd7831442bbeba3fa4f146) // Account: 0xc626553e7c821d0f8308c28d56c60e3c15f8d55a // Storage slots: empty list - const address = Address.fromString('0xc626553e7c821d0f8308c28d56c60e3c15f8d55a') - const trie = await Trie.create({ useKeyHashing: true }) + const address = createAddressFromString('0xc626553e7c821d0f8308c28d56c60e3c15f8d55a') + const trie = await createTrie({ useKeyHashing: true }) const stateManager = new DefaultStateManager({ trie }) // Dump all the account proof data in the DB let stateRoot: Uint8Array | undefined @@ -134,7 +139,7 @@ describe('ProofStateManager', () => { // Block: 11098094 (hash 0x1d9ea6981b8093a2b63f22f74426ceb6ba1acae3fddd7831442bbeba3fa4f146) // Account: 0x68268f12253f69f66b188c95b8106b2f847859fc (this account does not exist) // Storage slots: empty list - const address = Address.fromString('0x68268f12253f69f66b188c95b8106b2f847859fc') + const address = createAddressFromString('0x68268f12253f69f66b188c95b8106b2f847859fc') const trie = new Trie({ useKeyHashing: true }) const stateManager = new DefaultStateManager({ trie }) // Dump all the account proof data in the DB @@ -158,7 +163,7 @@ describe('ProofStateManager', () => { // eth.getProof("0x2D80502854FC7304c3E3457084DE549f5016B73f", ["0x1e8bf26b05059b66f11b6e0c5b9fe941f81181d6cc9f2af65ccee86e95cea1ca", "0x1e8bf26b05059b66f11b6e0c5b9fe941f81181d6cc9f2af65ccee86e95cea1cb"], 11098094) // Note: the first slot has a value, but the second slot is empty // Note: block hash 0x1d9ea6981b8093a2b63f22f74426ceb6ba1acae3fddd7831442bbeba3fa4f146 - const address = Address.fromString('0x2D80502854FC7304c3E3457084DE549f5016B73f') + const address = createAddressFromString('0x2D80502854FC7304c3E3457084DE549f5016B73f') const trie = new Trie({ useKeyHashing: true }) const stateManager = new DefaultStateManager({ trie }) // Dump all the account proof data in the DB @@ -196,7 +201,7 @@ describe('ProofStateManager', () => { // eth.getProof("0x2D80502854FC7304c3E3457084DE549f5016B73f", ["0x1e8bf26b05059b66f11b6e0c5b9fe941f81181d6cc9f2af65ccee86e95cea1ca", "0x1e8bf26b05059b66f11b6e0c5b9fe941f81181d6cc9f2af65ccee86e95cea1cb"], 11098094) // Note: the first slot has a value, but the second slot is empty // Note: block hash 0x1d9ea6981b8093a2b63f22f74426ceb6ba1acae3fddd7831442bbeba3fa4f146 - const address = Address.fromString('0x2D80502854FC7304c3E3457084DE549f5016B73f') + const address = createAddressFromString('0x2D80502854FC7304c3E3457084DE549f5016B73f') const trie = new Trie({ useKeyHashing: true }) const stateManager = new DefaultStateManager({ trie }) // Dump all the account proof data in the DB @@ -262,7 +267,7 @@ describe('ProofStateManager', () => { // eth.getProof("0x2D80502854FC7304c3E3457084DE549f5016B73f", ["0x1e8bf26b05059b66f11b6e0c5b9fe941f81181d6cc9f2af65ccee86e95cea1ca", "0x1e8bf26b05059b66f11b6e0c5b9fe941f81181d6cc9f2af65ccee86e95cea1cb"], 11098094) // Note: the first slot has a value, but the second slot is empty // Note: block hash 0x1d9ea6981b8093a2b63f22f74426ceb6ba1acae3fddd7831442bbeba3fa4f146 - const address = Address.fromString('0x68268f12253f69f66b188c95b8106b2f847859fc') + const address = createAddressFromString('0x68268f12253f69f66b188c95b8106b2f847859fc') const trie = new Trie({ useKeyHashing: true }) const stateManager = new DefaultStateManager({ trie }) // Dump all the account proof data in the DB diff --git a/packages/statemanager/test/rpcStateManager.spec.ts b/packages/statemanager/test/rpcStateManager.spec.ts index 843698196f..53b09fee27 100644 --- a/packages/statemanager/test/rpcStateManager.spec.ts +++ b/packages/statemanager/test/rpcStateManager.spec.ts @@ -1,19 +1,20 @@ import { createBlockFromJsonRpcProvider, createBlockFromRPC } from '@ethereumjs/block' -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { EVM, type EVMRunCallOpts } from '@ethereumjs/evm' -import { FeeMarketEIP1559Transaction, TransactionFactory } from '@ethereumjs/tx' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { type EVMRunCallOpts, createEVM } from '@ethereumjs/evm' +import { createFeeMarket1559Tx, createTxFromRPC } from '@ethereumjs/tx' import { - Account, Address, bigIntToBytes, bytesToHex, bytesToUnprefixedHex, + createAccountFromRLP, + createAddressFromString, equalsBytes, hexToBytes, setLengthLeft, utf8ToBytes, } from '@ethereumjs/util' -import { VM } from '@ethereumjs/vm' +import { VM, runBlock, runTx } from '@ethereumjs/vm' import { assert, describe, expect, it, vi } from 'vitest' import { RPCBlockChain, RPCStateManager } from '../src/rpcStateManager.js' @@ -46,23 +47,15 @@ describe('RPC State Manager initialization tests', async () => { it('should work', () => { let state = new RPCStateManager({ provider, blockTag: 1n }) assert.ok(state instanceof RPCStateManager, 'was able to instantiate state manager') - assert.equal( - (state as any)._blockTag, - '0x1', - 'State manager starts with default block tag of 1' - ) + assert.equal(state['_blockTag'], '0x1', 'State manager starts with default block tag of 1') state = new RPCStateManager({ provider, blockTag: 1n }) - assert.equal( - (state as any)._blockTag, - '0x1', - 'State Manager instantiated with predefined blocktag' - ) + assert.equal(state['_blockTag'], '0x1', 'State Manager instantiated with predefined blocktag') state = new RPCStateManager({ provider: 'https://google.com', blockTag: 1n }) assert.ok( state instanceof RPCStateManager, - 'was able to instantiate state manager with valid url' + 'was able to instantiate state manager with valid url', ) const invalidProvider = 'google.com' @@ -70,7 +63,7 @@ describe('RPC State Manager initialization tests', async () => { () => new RPCStateManager({ provider: invalidProvider as any, blockTag: 1n }), undefined, undefined, - 'cannot instantiate state manager with invalid provider' + 'cannot instantiate state manager with invalid provider', ) }) }) @@ -78,57 +71,59 @@ describe('RPC State Manager initialization tests', async () => { describe('RPC State Manager API tests', () => { it('should work', async () => { const state = new RPCStateManager({ provider, blockTag: 1n }) - const vitalikDotEth = Address.fromString('0xd8da6bf26964af9d7eed9e03e53415d37aa96045') + const vitalikDotEth = createAddressFromString('0xd8da6bf26964af9d7eed9e03e53415d37aa96045') const account = await state.getAccount(vitalikDotEth) assert.ok(account!.nonce > 0n, 'Vitalik.eth returned a valid nonce') await state.putAccount(vitalikDotEth, account!) - const retrievedVitalikAccount = Account.fromRlpSerializedAccount( - (state as any)._accountCache.get(vitalikDotEth)!.accountRLP + const retrievedVitalikAccount = createAccountFromRLP( + state['_caches'].account?.get(vitalikDotEth)?.accountRLP!, ) assert.ok(retrievedVitalikAccount.nonce > 0n, 'Vitalik.eth is stored in cache') const doesThisAccountExist = await state.accountExists( - Address.fromString('0xccAfdD642118E5536024675e776d32413728DD07') + createAddressFromString('0xccAfdD642118E5536024675e776d32413728DD07'), ) assert.ok(!doesThisAccountExist, 'getAccount returns undefined for non-existent account') assert.ok(state.getAccount(vitalikDotEth) !== undefined, 'vitalik.eth does exist') - const UNIerc20ContractAddress = Address.fromString('0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984') - const UNIContractCode = await state.getContractCode(UNIerc20ContractAddress) + const UniswapERC20ContractAddress = createAddressFromString( + '0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984', + ) + const UNIContractCode = await state.getCode(UniswapERC20ContractAddress) assert.ok(UNIContractCode.length > 0, 'was able to retrieve UNI contract code') - await state.putContractCode(UNIerc20ContractAddress, UNIContractCode) + await state.putCode(UniswapERC20ContractAddress, UNIContractCode) assert.ok( - typeof (state as any)._contractCache.get(UNIerc20ContractAddress.toString()) !== 'undefined', - 'UNI ERC20 contract code was found in cache' + state['_caches'].code?.get(UniswapERC20ContractAddress) !== undefined, + 'UNI ERC20 contract code was found in cache', ) - const storageSlot = await state.getContractStorage( - UNIerc20ContractAddress, - setLengthLeft(bigIntToBytes(1n), 32) + const storageSlot = await state.getStorage( + UniswapERC20ContractAddress, + setLengthLeft(bigIntToBytes(1n), 32), ) assert.ok(storageSlot.length > 0, 'was able to retrieve storage slot 1 for the UNI contract') await expect(async () => { - await state.getContractStorage(UNIerc20ContractAddress, setLengthLeft(bigIntToBytes(1n), 31)) + await state.getStorage(UniswapERC20ContractAddress, setLengthLeft(bigIntToBytes(1n), 31)) }).rejects.toThrowError('Storage key must be 32 bytes long') - await state.putContractStorage( - UNIerc20ContractAddress, + await state.putStorage( + UniswapERC20ContractAddress, setLengthLeft(bigIntToBytes(2n), 32), - utf8ToBytes('abcd') + utf8ToBytes('abcd'), ) - const slotValue = await state.getContractStorage( - UNIerc20ContractAddress, - setLengthLeft(bigIntToBytes(2n), 32) + const slotValue = await state.getStorage( + UniswapERC20ContractAddress, + setLengthLeft(bigIntToBytes(2n), 32), ) assert.ok(equalsBytes(slotValue, utf8ToBytes('abcd')), 'should retrieve slot 2 value') - const dumpedStorage = await state.dumpStorage(UNIerc20ContractAddress) + const dumpedStorage = await state.dumpStorage(UniswapERC20ContractAddress) assert.deepEqual(dumpedStorage, { [bytesToUnprefixedHex(setLengthLeft(bigIntToBytes(1n), 32))]: '0xabcd', [bytesToUnprefixedHex(setLengthLeft(bigIntToBytes(2n), 32))]: bytesToHex(utf8ToBytes('abcd')), @@ -136,38 +131,38 @@ describe('RPC State Manager API tests', () => { const spy = vi.spyOn(state, 'getAccountFromProvider') spy.mockImplementation(() => { - throw new Error('shouldnt call me') + throw new Error("shouldn't call me") }) await state.checkpoint() - await state.putContractStorage( - UNIerc20ContractAddress, + await state.putStorage( + UniswapERC20ContractAddress, setLengthLeft(bigIntToBytes(2n), 32), - new Uint8Array(0) + new Uint8Array(0), ) await state.modifyAccountFields(vitalikDotEth, { nonce: 39n }) assert.equal( (await state.getAccount(vitalikDotEth))?.nonce, 39n, - 'modified account fields successfully' + 'modified account fields successfully', ) assert.doesNotThrow( async () => state.getAccount(vitalikDotEth), - 'does not call getAccountFromProvider' + 'does not call getAccountFromProvider', ) try { - await state.getAccount(Address.fromString('0x9Cef824A8f4b3Dc6B7389933E52e47F010488Fc8')) + await state.getAccount(createAddressFromString('0x9Cef824A8f4b3Dc6B7389933E52e47F010488Fc8')) } catch (err) { assert.ok(true, 'calls getAccountFromProvider for non-cached account') } - const deletedSlot = await state.getContractStorage( - UNIerc20ContractAddress, - setLengthLeft(bigIntToBytes(2n), 32) + const deletedSlot = await state.getStorage( + UniswapERC20ContractAddress, + setLengthLeft(bigIntToBytes(2n), 32), ) assert.equal(deletedSlot.length, 0, 'deleted slot from storage cache') @@ -175,34 +170,34 @@ describe('RPC State Manager API tests', () => { await state.deleteAccount(vitalikDotEth) assert.ok( (await state.getAccount(vitalikDotEth)) === undefined, - 'account should not exist after being deleted' + 'account should not exist after being deleted', ) await state.revert() assert.ok( (await state.getAccount(vitalikDotEth)) !== undefined, - 'account deleted since last checkpoint should exist after revert called' + 'account deleted since last checkpoint should exist after revert called', ) - const deletedSlotAfterRevert = await state.getContractStorage( - UNIerc20ContractAddress, - setLengthLeft(bigIntToBytes(2n), 32) + const deletedSlotAfterRevert = await state.getStorage( + UniswapERC20ContractAddress, + setLengthLeft(bigIntToBytes(2n), 32), ) assert.equal( deletedSlotAfterRevert.length, 4, - 'slot deleted since last checkpoint should exist in storage cache after revert' + 'slot deleted since last checkpoint should exist in storage cache after revert', ) - const cacheStorage = await state.dumpStorage(UNIerc20ContractAddress) + const cacheStorage = await state.dumpStorage(UniswapERC20ContractAddress) assert.equal( 2, Object.keys(cacheStorage).length, - 'should have 2 storage slots in cache before clear' + 'should have 2 storage slots in cache before clear', ) - await state.clearContractStorage(UNIerc20ContractAddress) - const clearedStorage = await state.dumpStorage(UNIerc20ContractAddress) + await state.clearStorage(UniswapERC20ContractAddress) + const clearedStorage = await state.dumpStorage(UniswapERC20ContractAddress) assert.deepEqual({}, clearedStorage, 'storage cache should be empty after clear') try { @@ -211,21 +206,21 @@ describe('RPC State Manager API tests', () => { } catch (err: any) { assert.ok( err.message.includes('expected blockTag to be block hash, bigint, hex prefixed string'), - 'threw with correct error when invalid blockTag provided' + 'threw with correct error when invalid blockTag provided', ) } assert.equal( - (state as any)._contractCache.get(UNIerc20ContractAddress), + state['_caches'].account?.get(UniswapERC20ContractAddress), undefined, - 'should not have any code for contract after cache is reverted' + 'should not have any code for contract after cache is reverted', ) - assert.equal((state as any)._blockTag, '0x1', 'blockTag defaults to 1') + assert.equal(state['_blockTag'], '0x1', 'blockTag defaults to 1') state.setBlockTag(5n) - assert.equal((state as any)._blockTag, '0x5', 'blockTag set to 0x5') + assert.equal(state['_blockTag'], '0x5', 'blockTag set to 0x5') state.setBlockTag('earliest') - assert.equal((state as any)._blockTag, 'earliest', 'blockTag set to earliest') + assert.equal(state['_blockTag'], 'earliest', 'blockTag set to earliest') await state.checkpoint() }) @@ -233,21 +228,21 @@ describe('RPC State Manager API tests', () => { describe('runTx custom transaction test', () => { it('should work', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const state = new RPCStateManager({ provider, blockTag: 1n }) const vm = await VM.create({ common, stateManager: state }) // TODO fix the type DefaultStateManager back to StateManagerInterface in VM - const vitalikDotEth = Address.fromString('0xd8da6bf26964af9d7eed9e03e53415d37aa96045') + const vitalikDotEth = createAddressFromString('0xd8da6bf26964af9d7eed9e03e53415d37aa96045') const privateKey = hexToBytes( - '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109' + '0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', ) - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { to: vitalikDotEth, value: '0x100', gasLimit: 500000n, maxFeePerGas: 7 }, - { common } + { common }, ).sign(privateKey) - const result = await vm.runTx({ + const result = await runTx(vm, { skipBalance: true, skipNonce: true, tx, @@ -259,29 +254,29 @@ describe('runTx custom transaction test', () => { describe('runTx test: replay mainnet transactions', () => { it('should work', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const blockTag = 15496077n common.setHardforkBy({ blockNumber: blockTag }) - const tx = await TransactionFactory.fromRPC(txData as any, { common }) + const tx = await createTxFromRPC(txData as any, { common }) const state = new RPCStateManager({ provider, // Set the state manager to look at the state of the chain before the block has been executed blockTag: blockTag - 1n, }) const vm = await VM.create({ common, stateManager: state }) - const res = await vm.runTx({ tx }) + const res = await runTx(vm, { tx }) assert.equal( res.totalGasSpent, 21000n, - 'calculated correct total gas spent for simple transfer' + 'calculated correct total gas spent for simple transfer', ) }) }) describe('runBlock test', () => { it('should work', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Chainstart }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Chainstart }) const blockTag = 500000n const state = new RPCStateManager({ @@ -295,9 +290,9 @@ describe('runBlock test', () => { common.setHardforkBy({ blockNumber: blockTag - 1n }) const vm = await VM.create({ common, stateManager: state }) - const block = createBlockFromRPC(blockData as JsonRpcBlock, [], { common }) + const block = createBlockFromRPC(blockData.default as JsonRpcBlock, [], { common }) try { - const res = await vm.runBlock({ + const res = await runBlock(vm, { block, generate: true, skipHeaderValidation: true, @@ -305,7 +300,7 @@ describe('runBlock test', () => { assert.equal( res.gasUsed, block.header.gasUsed, - 'should compute correct cumulative gas for block' + 'should compute correct cumulative gas for block', ) } catch (err: any) { assert.fail(`should have successfully ran block; got error ${err.message}`) @@ -318,17 +313,17 @@ describe('blockchain', () => const blockchain = new RPCBlockChain(provider) const blockTag = 1n const state = new RPCStateManager({ provider, blockTag }) - const evm = await EVM.create({ blockchain, stateManager: state }) + const evm = await createEVM({ blockchain, stateManager: state }) // Bytecode for returning the blockhash of the block previous to `blockTag` const code = '0x600143034060005260206000F3' const contractAddress = new Address(hexToBytes('0x00000000000000000000000000000000000000ff')) - const caller = Address.fromString('0xd8da6bf26964af9d7eed9e03e53415d37aa96045') + const caller = createAddressFromString('0xd8da6bf26964af9d7eed9e03e53415d37aa96045') await evm.stateManager.setStateRoot( - hexToBytes('0xf8506f559699a58a4724df4fcf2ad4fd242d20324db541823f128f5974feb6c7') + hexToBytes('0xf8506f559699a58a4724df4fcf2ad4fd242d20324db541823f128f5974feb6c7'), ) const block = await createBlockFromJsonRpcProvider(provider, 500000n, { setHardfork: true }) - await evm.stateManager.putContractCode(contractAddress, hexToBytes(code)) + await evm.stateManager.putCode(contractAddress, hexToBytes(code)) const runCallArgs: Partial = { caller, gasLimit: BigInt(0xffffffffff), @@ -338,7 +333,7 @@ describe('blockchain', () => const res = await evm.runCall(runCallArgs) assert.ok( bytesToHex(res.execResult.returnValue), - '0xd5ba853bc7151fc044b9d273a57e3f9ed35e66e0248ab4a571445650cc4fcaa6' + '0xd5ba853bc7151fc044b9d273a57e3f9ed35e66e0248ab4a571445650cc4fcaa6', ) })) @@ -352,7 +347,7 @@ describe('Should return same value as DefaultStateManager when account does not assert.equal( account0, account1, - 'Should return same value as DefaultStateManager when account does not exist' + 'Should return same value as DefaultStateManager when account does not exist', ) }) }) diff --git a/packages/statemanager/test/stateManager.account.spec.ts b/packages/statemanager/test/stateManager.account.spec.ts index e98256c146..6d0e010253 100644 --- a/packages/statemanager/test/stateManager.account.spec.ts +++ b/packages/statemanager/test/stateManager.account.spec.ts @@ -1,23 +1,21 @@ import { Address, KECCAK256_RLP, bytesToHex, equalsBytes, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { DefaultStateManager } from '../src/index.js' +import { Caches, DefaultStateManager } from '../src/index.js' -import { createAccount } from './util.js' +import { createAccountWithDefaults } from './util.js' describe('StateManager -> General/Account', () => { - for (const accountCacheOpts of [ - { deactivate: false }, - { deactivate: true }, - { deactivate: false, size: 0 }, - ]) { + for (const accountCacheOpts of [{ size: 1000 }, { size: 0 }]) { it(`should set the state root to empty`, async () => { - const stateManager = new DefaultStateManager({ accountCacheOpts }) + const stateManager = new DefaultStateManager({ + caches: new Caches({ account: accountCacheOpts }), + }) assert.ok(equalsBytes(stateManager['_trie'].root(), KECCAK256_RLP), 'it has default root') // commit some data to the trie const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) - const account = createAccount(BigInt(0), BigInt(1000)) + const account = createAccountWithDefaults(BigInt(0), BigInt(1000)) await stateManager.checkpoint() await stateManager.putAccount(address, account) await stateManager.commit() @@ -32,9 +30,11 @@ describe('StateManager -> General/Account', () => { }) it(`should clear the cache when the state root is set`, async () => { - const stateManager = new DefaultStateManager({ accountCacheOpts }) + const stateManager = new DefaultStateManager({ + caches: new Caches({ account: accountCacheOpts }), + }) const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) - const account = createAccount() + const account = createAccountWithDefaults() // test account storage cache const initialStateRoot = await stateManager.getStateRoot() @@ -57,26 +57,28 @@ describe('StateManager -> General/Account', () => { const key = hexToBytes('0x1234567890123456789012345678901234567890123456789012345678901234') const value = hexToBytes('0x1234') await stateManager.putAccount(address, account) - await stateManager.putContractStorage(address, key, value) + await stateManager.putStorage(address, key, value) - const contract0 = await stateManager.getContractStorage(address, key) + const contract0 = await stateManager.getStorage(address, key) assert.ok( equalsBytes(contract0, value), - "contract key's value is set in the _storageTries cache" + "contract key's value is set in the _storageTries cache", ) await stateManager.commit() await stateManager.setStateRoot(initialStateRoot) try { - await stateManager.getContractStorage(address, key) + await stateManager.getStorage(address, key) } catch (e) { - assert.ok(true, 'should throw if getContractStorage() is called on non existing address') + assert.ok(true, 'should throw if getStorage() is called on non existing address') } }) it('should put and get account, and add to the underlying cache if the account is not found', async () => { - const stateManager = new DefaultStateManager({ accountCacheOpts }) - const account = createAccount() + const stateManager = new DefaultStateManager({ + caches: new Caches({ account: accountCacheOpts }), + }) + const account = createAccountWithDefaults() const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) await stateManager.putAccount(address, account) @@ -86,7 +88,7 @@ describe('StateManager -> General/Account', () => { assert.equal(res1!.balance, BigInt(0xfff384)) await stateManager.flush() - stateManager['_accountCache']?.clear() + stateManager['_caches']?.account?.clear() const res2 = await stateManager.getAccount(address) @@ -94,7 +96,9 @@ describe('StateManager -> General/Account', () => { }) it(`should return undefined for a non-existent account`, async () => { - const stateManager = new DefaultStateManager({ accountCacheOpts }) + const stateManager = new DefaultStateManager({ + caches: new Caches({ account: accountCacheOpts }), + }) const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) const res = (await stateManager.getAccount(address)) === undefined @@ -103,8 +107,10 @@ describe('StateManager -> General/Account', () => { }) it(`should return undefined for an existent account`, async () => { - const stateManager = new DefaultStateManager({ accountCacheOpts }) - const account = createAccount(BigInt(0x1), BigInt(0x1)) + const stateManager = new DefaultStateManager({ + caches: new Caches({ account: accountCacheOpts }), + }) + const account = createAccountWithDefaults(BigInt(0x1), BigInt(0x1)) const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) await stateManager.putAccount(address, account) @@ -115,8 +121,10 @@ describe('StateManager -> General/Account', () => { }) it(`should modify account fields correctly`, async () => { - const stateManager = new DefaultStateManager({ accountCacheOpts }) - const account = createAccount() + const stateManager = new DefaultStateManager({ + caches: new Caches({ account: accountCacheOpts }), + }) + const account = createAccountWithDefaults() const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) await stateManager.putAccount(address, account) @@ -135,7 +143,7 @@ describe('StateManager -> General/Account', () => { await stateManager.modifyAccountFields(address, { codeHash: hexToBytes('0xd748bf26ab37599c944babfdbeecf6690801bd61bf2670efb0a34adfc6dca10b'), storageRoot: hexToBytes( - '0xcafd881ab193703b83816c49ff6c2bf6ba6f464a1be560c42106128c8dbc35e7' + '0xcafd881ab193703b83816c49ff6c2bf6ba6f464a1be560c42106128c8dbc35e7', ), }) @@ -143,11 +151,11 @@ describe('StateManager -> General/Account', () => { assert.equal( bytesToHex(res3!.codeHash), - '0xd748bf26ab37599c944babfdbeecf6690801bd61bf2670efb0a34adfc6dca10b' + '0xd748bf26ab37599c944babfdbeecf6690801bd61bf2670efb0a34adfc6dca10b', ) assert.equal( bytesToHex(res3!.storageRoot), - '0xcafd881ab193703b83816c49ff6c2bf6ba6f464a1be560c42106128c8dbc35e7' + '0xcafd881ab193703b83816c49ff6c2bf6ba6f464a1be560c42106128c8dbc35e7', ) }) } diff --git a/packages/statemanager/test/stateManager.code.spec.ts b/packages/statemanager/test/stateManager.code.spec.ts index fd5dca0faf..a31026d662 100644 --- a/packages/statemanager/test/stateManager.code.spec.ts +++ b/packages/statemanager/test/stateManager.code.spec.ts @@ -1,61 +1,67 @@ -import { Account, Address, equalsBytes, hexToBytes } from '@ethereumjs/util' +import { + Address, + createAccount, + createZeroAddress, + equalsBytes, + hexToBytes, +} from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { DefaultStateManager } from '../src/index.js' +import { Caches, DefaultStateManager } from '../src/index.js' -import { createAccount } from './util.js' +import { createAccountWithDefaults } from './util.js' import type { AccountData } from '@ethereumjs/util' describe('StateManager -> Code', () => { - for (const accountCacheOpts of [ - { deactivate: false }, - { deactivate: true }, - { deactivate: false, size: 0 }, - ]) { + for (const accountCacheOpts of [{ size: 1000 }, { size: 0 }]) { it(`should store codehashes using a prefix`, async () => { /* This test is mostly an example of why a code prefix is necessary I an address, we put two storage values. The preimage of the (storage trie) root hash is known This preimage is used as codeHash - + NOTE: Currently, the only problem which this code prefix fixes, is putting 0x80 as contract code -> This hashes to the empty trie node hash (0x80 = RLP([])), so keccak256(0x80) = empty trie node hash -> Therefore, each empty state trie now points to 0x80, which is not a valid trie node, which crashes @ethereumjs/trie */ // Setup - const stateManager = new DefaultStateManager({ accountCacheOpts }) - const codeStateManager = new DefaultStateManager({ accountCacheOpts }) + const stateManager = new DefaultStateManager({ + caches: new Caches({ account: accountCacheOpts }), + }) + const codeStateManager = new DefaultStateManager({ + caches: new Caches({ account: accountCacheOpts }), + }) const address1 = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) - const account = createAccount() + const account = createAccountWithDefaults() const key1 = hexToBytes(`0x${'00'.repeat(32)}`) const key2 = hexToBytes(`0x${'00'.repeat(31)}01`) await stateManager.putAccount(address1, account) - await stateManager.putContractStorage(address1, key1, key2) - await stateManager.putContractStorage(address1, key2, key2) + await stateManager.putStorage(address1, key1, key2) + await stateManager.putStorage(address1, key2, key2) const root = await stateManager.getStateRoot() const rawNode = await stateManager['_trie']['_db'].get(root) - await codeStateManager.putContractCode(address1, rawNode!) + await codeStateManager.putCode(address1, rawNode!) - let codeSlot1 = await codeStateManager.getContractStorage(address1, key1) - let codeSlot2 = await codeStateManager.getContractStorage(address1, key2) + let codeSlot1 = await codeStateManager.getStorage(address1, key1) + let codeSlot2 = await codeStateManager.getStorage(address1, key2) assert.ok(codeSlot1.length === 0, 'slot 0 is empty') assert.ok(codeSlot2.length === 0, 'slot 1 is empty') - const code = await codeStateManager.getContractCode(address1) + const code = await codeStateManager.getCode(address1) assert.ok(code.length > 0, 'code deposited correctly') - const slot1 = await stateManager.getContractStorage(address1, key1) - const slot2 = await stateManager.getContractStorage(address1, key2) + const slot1 = await stateManager.getStorage(address1, key1) + const slot2 = await stateManager.getStorage(address1, key2) assert.ok(slot1.length > 0, 'storage key0 deposited correctly') assert.ok(slot2.length > 0, 'storage key1 deposited correctly') - let slotCode = await stateManager.getContractCode(address1) + let slotCode = await stateManager.getCode(address1) assert.ok(slotCode.length === 0, 'code cannot be loaded') // Checks by either setting state root to codeHash, or codeHash to stateRoot @@ -65,7 +71,7 @@ describe('StateManager -> Code', () => { await stateManager.putAccount(address1, account1!) - slotCode = await stateManager.getContractCode(address1) + slotCode = await stateManager.getCode(address1) assert.ok(slotCode.length === 0, 'code cannot be loaded') // This test fails if no code prefix is used account1 = await codeStateManager.getAccount(address1) @@ -73,65 +79,73 @@ describe('StateManager -> Code', () => { await codeStateManager.putAccount(address1, account1!) - codeSlot1 = await codeStateManager.getContractStorage(address1, key1) - codeSlot2 = await codeStateManager.getContractStorage(address1, key2) + codeSlot1 = await codeStateManager.getStorage(address1, key1) + codeSlot2 = await codeStateManager.getStorage(address1, key2) assert.ok(codeSlot1.length === 0, 'slot 0 is empty') assert.ok(codeSlot2.length === 0, 'slot 1 is empty') }) it(`should set and get code`, async () => { - const stateManager = new DefaultStateManager({ accountCacheOpts }) + const stateManager = new DefaultStateManager({ + caches: new Caches({ account: accountCacheOpts }), + }) const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) const code = hexToBytes( - '0x73095e7baea6a6c7c4c2dfeb977efac326af552d873173095e7baea6a6c7c4c2dfeb977efac326af552d873157' + '0x73095e7baea6a6c7c4c2dfeb977efac326af552d873173095e7baea6a6c7c4c2dfeb977efac326af552d873157', ) const raw: AccountData = { nonce: '0x0', balance: '0x03e7', codeHash: '0xb30fb32201fe0486606ad451e1a61e2ae1748343cd3d411ed992ffcc0774edd4', } - const account = Account.fromAccountData(raw) + const account = createAccount(raw) await stateManager.putAccount(address, account) - await stateManager.putContractCode(address, code) - const codeRetrieved = await stateManager.getContractCode(address) + await stateManager.putCode(address, code) + const codeRetrieved = await stateManager.getCode(address) assert.ok(equalsBytes(code, codeRetrieved)) }) it(`should not get code if is not contract`, async () => { - const stateManager = new DefaultStateManager({ accountCacheOpts }) + const stateManager = new DefaultStateManager({ + caches: new Caches({ account: accountCacheOpts }), + }) const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) const raw: AccountData = { nonce: '0x0', balance: '0x03e7', } - const account = Account.fromAccountData(raw) + const account = createAccount(raw) await stateManager.putAccount(address, account) - const code = await stateManager.getContractCode(address) + const code = await stateManager.getCode(address) assert.ok(equalsBytes(code, new Uint8Array(0))) }) it(`should set empty code`, async () => { - const stateManager = new DefaultStateManager({ accountCacheOpts }) + const stateManager = new DefaultStateManager({ + caches: new Caches({ account: accountCacheOpts }), + }) const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) const raw: AccountData = { nonce: '0x0', balance: '0x03e7', } - const account = Account.fromAccountData(raw) + const account = createAccount(raw) const code = new Uint8Array(0) await stateManager.putAccount(address, account) - await stateManager.putContractCode(address, code) - const codeRetrieved = await stateManager.getContractCode(address) + await stateManager.putCode(address, code) + const codeRetrieved = await stateManager.getCode(address) assert.ok(equalsBytes(codeRetrieved, new Uint8Array(0))) }) it(`should prefix codehashes by default`, async () => { - const stateManager = new DefaultStateManager({ accountCacheOpts }) + const stateManager = new DefaultStateManager({ + caches: new Caches({ account: accountCacheOpts }), + }) const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) const code = hexToBytes('0x80') - await stateManager.putContractCode(address, code) - const codeRetrieved = await stateManager.getContractCode(address) + await stateManager.putCode(address, code) + const codeRetrieved = await stateManager.getCode(address) assert.ok(equalsBytes(codeRetrieved, code)) }) @@ -142,18 +156,18 @@ describe('StateManager -> Code', () => { const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) const code = hexToBytes('0x80') try { - await stateManager.putContractCode(address, code) + await stateManager.putCode(address, code) assert.fail('should throw') } catch (e) { assert.ok(true, 'successfully threw') } }) - it('putContractCode with empty code on existing address should correctly propagate', async () => { + it('putCode with empty code on existing address should correctly propagate', async () => { const stateManager = new DefaultStateManager() - const address = Address.zero() - await stateManager.putContractCode(address, new Uint8Array([1])) - await stateManager.putContractCode(address, new Uint8Array()) + const address = createZeroAddress() + await stateManager.putCode(address, new Uint8Array([1])) + await stateManager.putCode(address, new Uint8Array()) const account = await stateManager.getAccount(address) assert.ok(account !== undefined) assert.ok(account?.isEmpty()) diff --git a/packages/statemanager/test/stateManager.spec.ts b/packages/statemanager/test/stateManager.spec.ts index c39e61f052..f3f466a8da 100644 --- a/packages/statemanager/test/stateManager.spec.ts +++ b/packages/statemanager/test/stateManager.spec.ts @@ -1,18 +1,21 @@ -import { Trie } from '@ethereumjs/trie' +import { Trie, createTrie, createTrieFromProof } from '@ethereumjs/trie' import { Account, - Address, KECCAK256_RLP, bigIntToBytes, + createAddressFromPrivateKey, + createAddressFromString, + createZeroAddress, equalsBytes, hexToBytes, intToBytes, setLengthLeft, utf8ToBytes, + zeros, } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { CacheType, DefaultStateManager } from '../src/index.js' +import { CacheType, Caches, DefaultStateManager } from '../src/index.js' import type { PrefixedHexString } from '@ethereumjs/util' @@ -24,7 +27,7 @@ function verifyAccount( codeHash: Uint8Array nonce: BigInt storageRoot: Uint8Array - } + }, ) { assert.equal(account.balance, state.balance) assert.equal(account.nonce, state.nonce) @@ -41,26 +44,37 @@ describe('StateManager -> General', () => { assert.deepEqual(res, KECCAK256_RLP, 'it has default root') }) + it('should not throw on getContractStorage() on non-existing accounts', async () => { + const sm = new DefaultStateManager() + + try { + const storage = await sm.getStorage(createZeroAddress(), zeros(32)) + assert.ok(equalsBytes(storage, new Uint8Array())) + } catch { + assert.fail('should not throw') + } + }) + it(`should clear contract storage`, async () => { const sm = new DefaultStateManager() - const contractAddress = Address.fromString('0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984') + const contractAddress = createAddressFromString('0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984') const contractCode = Uint8Array.from([0, 1, 2, 3]) const storageKey = setLengthLeft(bigIntToBytes(2n), 32) const storedData = utf8ToBytes('abcd') - await sm.putContractCode(contractAddress, contractCode) - await sm.putContractStorage(contractAddress, storageKey, storedData) + await sm.putCode(contractAddress, contractCode) + await sm.putStorage(contractAddress, storageKey, storedData) - let storage = await sm.getContractStorage(contractAddress, storageKey) + let storage = await sm.getStorage(contractAddress, storageKey) assert.equal(JSON.stringify(storage), JSON.stringify(storedData), 'contract storage updated') - await sm.clearContractStorage(contractAddress) - storage = await sm.getContractStorage(contractAddress, storageKey) + await sm.clearStorage(contractAddress) + storage = await sm.getStorage(contractAddress, storageKey) assert.equal( JSON.stringify(storage), JSON.stringify(new Uint8Array()), - 'clears contract storage' + 'clears contract storage', ) }) @@ -75,47 +89,49 @@ describe('StateManager -> General', () => { assert.equal( smCopy['_prefixCodeHashes'], sm['_prefixCodeHashes'], - 'should retain non-default values' + 'should retain non-default values', ) sm = new DefaultStateManager({ trie, - accountCacheOpts: { - type: CacheType.LRU, - }, - storageCacheOpts: { - type: CacheType.LRU, - }, + caches: new Caches({ + account: { + type: CacheType.LRU, + }, + storage: { + type: CacheType.LRU, + }, + }), }) smCopy = sm.shallowCopy() assert.equal( - smCopy['_accountCacheSettings'].type, + smCopy['_caches']?.settings.account.type, CacheType.ORDERED_MAP, - 'should switch to ORDERED_MAP account cache on copy()' + 'should switch to ORDERED_MAP account cache on copy()', ) assert.equal( - smCopy['_storageCacheSettings'].type, + smCopy['_caches']?.settings.storage.type, CacheType.ORDERED_MAP, - 'should switch to ORDERED_MAP storage cache on copy()' + 'should switch to ORDERED_MAP storage cache on copy()', ) assert.equal(smCopy['_trie']['_opts'].cacheSize, 0, 'should set trie cache size to 0') smCopy = sm.shallowCopy(false) assert.equal( - smCopy['_accountCacheSettings'].type, + smCopy['_caches']?.settings.account.type, CacheType.LRU, - 'should retain account cache type when deactivate cache downleveling' + 'should retain account cache type when deactivate cache downleveling', ) assert.equal( - smCopy['_storageCacheSettings'].type, + smCopy['_caches']?.settings.storage.type, CacheType.LRU, - 'should retain storage cache type when deactivate cache downleveling' + 'should retain storage cache type when deactivate cache downleveling', ) assert.equal( smCopy['_trie']['_opts'].cacheSize, 1000, - 'should retain trie cache size when deactivate cache downleveling' + 'should retain trie cache size when deactivate cache downleveling', ) }) @@ -142,9 +158,9 @@ describe('StateManager -> General', () => { const address2Str = '0x2'.padEnd(42, '0') const address3Str = '0x3'.padEnd(42, '0') - const address1 = Address.fromString(address1Str) - const address2 = Address.fromString(address2Str) - const address3 = Address.fromString(address3Str) + const address1 = createAddressFromString(address1Str) + const address2 = createAddressFromString(address2Str) + const address3 = createAddressFromString(address3Str) const key1 = setLengthLeft(new Uint8Array([1]), 32) const key2 = setLengthLeft(new Uint8Array([2]), 32) @@ -184,14 +200,14 @@ describe('StateManager -> General', () => { const stateManager = new DefaultStateManager() for (const [addressStr, entry] of Object.entries(stateSetup)) { - const address = Address.fromString(addressStr) + const address = createAddressFromString(addressStr) const account = new Account(entry.nonce, entry.balance) await stateManager.putAccount(address, account) - await stateManager.putContractCode(address, entry.code) + await stateManager.putCode(address, entry.code) for (let i = 0; i < entry.keys.length; i++) { const key = entry.keys[i] const value = entry.values[i] - await stateManager.putContractStorage(address, key, value) + await stateManager.putStorage(address, key, value) } await stateManager.flush() stateSetup[addressStr].codeHash = (await stateManager.getAccount(address)!)?.codeHash @@ -218,13 +234,13 @@ describe('StateManager -> General', () => { const stProof = await stateManager.getProof(address1, [state1.keys[0], state1.keys[1]]) await partialStateManager.addProofData(stProof) - let stSlot1_0 = await partialStateManager.getContractStorage(address1, state1.keys[0]) + let stSlot1_0 = await partialStateManager.getStorage(address1, state1.keys[0]) assert.ok(equalsBytes(stSlot1_0, state1.values[0])) - let stSlot1_1 = await partialStateManager.getContractStorage(address1, state1.keys[1]) + let stSlot1_1 = await partialStateManager.getStorage(address1, state1.keys[1]) assert.ok(equalsBytes(stSlot1_1, state1.values[1])) - let stSlot1_2 = await partialStateManager.getContractStorage(address1, state1.keys[2]) + let stSlot1_2 = await partialStateManager.getStorage(address1, state1.keys[2]) assert.ok(equalsBytes(stSlot1_2, new Uint8Array())) // Check Array support as input @@ -240,13 +256,13 @@ describe('StateManager -> General', () => { account3 = await sm.getAccount(address3) assert.ok(account3 === undefined) - stSlot1_0 = await sm.getContractStorage(address1, state1.keys[0]) + stSlot1_0 = await sm.getStorage(address1, state1.keys[0]) assert.ok(equalsBytes(stSlot1_0, state1.values[0])) - stSlot1_1 = await sm.getContractStorage(address1, state1.keys[1]) + stSlot1_1 = await sm.getStorage(address1, state1.keys[1]) assert.ok(equalsBytes(stSlot1_1, state1.values[1])) - stSlot1_2 = await sm.getContractStorage(address1, state1.keys[2]) + stSlot1_2 = await sm.getStorage(address1, state1.keys[2]) assert.ok(equalsBytes(stSlot1_2, new Uint8Array())) } @@ -267,8 +283,8 @@ describe('StateManager -> General', () => { await postVerify(newPartialStateManager2) const zeroAddressNonce = BigInt(100) - await stateManager.putAccount(Address.zero(), new Account(zeroAddressNonce)) - const zeroAddressProof = await stateManager.getProof(Address.zero()) + await stateManager.putAccount(createZeroAddress(), new Account(zeroAddressNonce)) + const zeroAddressProof = await stateManager.getProof(createZeroAddress()) try { await DefaultStateManager.fromProof([proof1, zeroAddressProof], true) @@ -279,37 +295,37 @@ describe('StateManager -> General', () => { await newPartialStateManager2.addProofData(zeroAddressProof) - let zeroAccount = await newPartialStateManager2.getAccount(Address.zero()) + let zeroAccount = await newPartialStateManager2.getAccount(createZeroAddress()) assert.ok(zeroAccount === undefined) await newPartialStateManager2.setStateRoot(await stateManager.getStateRoot()) - zeroAccount = await newPartialStateManager2.getAccount(Address.zero()) + zeroAccount = await newPartialStateManager2.getAccount(createZeroAddress()) assert.ok(zeroAccount!.nonce === zeroAddressNonce) }) it.skipIf(isBrowser() === true)( 'should create a statemanager fromProof with opts preserved', async () => { - const trie = await Trie.create({ useKeyHashing: false }) + const trie = await createTrie({ useKeyHashing: false }) const sm = new DefaultStateManager({ trie }) const pk = hexToBytes('0x9f12aab647a25a81f821a5a0beec3330cd057b2346af4fb09d7a807e896701ea') const pk2 = hexToBytes('0x8724f27e2ce3714af01af3220478849db68a03c0f84edf1721d73d9a6139ad1c') - const address = Address.fromPrivateKey(pk) - const address2 = Address.fromPrivateKey(pk2) + const address = createAddressFromPrivateKey(pk) + const address2 = createAddressFromPrivateKey(pk2) const account = new Account() const account2 = new Account(undefined, 100n) await sm.putAccount(address, account) await sm.putAccount(address2, account2) - await sm.putContractStorage(address, setLengthLeft(intToBytes(0), 32), intToBytes(32)) + await sm.putStorage(address, setLengthLeft(intToBytes(0), 32), intToBytes(32)) const storage = await sm.dumpStorage(address) const keys = Object.keys(storage) as PrefixedHexString[] const proof = await sm.getProof( address, - keys.map((key) => hexToBytes(key)) + keys.map((key) => hexToBytes(key)), ) const proof2 = await sm.getProof(address2) - const newTrie = await Trie.createFromProof( + const newTrie = await createTrieFromProof( proof.accountProof.map((e) => hexToBytes(e)), - { useKeyHashing: false } + { useKeyHashing: false }, ) const partialSM = await DefaultStateManager.fromProof([proof, proof2], true, { trie: newTrie, @@ -317,12 +333,9 @@ describe('StateManager -> General', () => { assert.equal( partialSM['_trie']['_opts'].useKeyHashing, false, - 'trie opts are preserved in new sm' - ) - assert.deepEqual( - intToBytes(32), - await partialSM.getContractStorage(address, hexToBytes(keys[0])) + 'trie opts are preserved in new sm', ) + assert.deepEqual(intToBytes(32), await partialSM.getStorage(address, hexToBytes(keys[0]))) assert.equal((await partialSM.getAccount(address2))?.balance, 100n) const partialSM2 = await DefaultStateManager.fromProof(proof, true, { trie: newTrie, @@ -331,13 +344,10 @@ describe('StateManager -> General', () => { assert.equal( partialSM2['_trie']['_opts'].useKeyHashing, false, - 'trie opts are preserved in new sm' - ) - assert.deepEqual( - intToBytes(32), - await partialSM2.getContractStorage(address, hexToBytes(keys[0])) + 'trie opts are preserved in new sm', ) + assert.deepEqual(intToBytes(32), await partialSM2.getStorage(address, hexToBytes(keys[0]))) assert.equal((await partialSM2.getAccount(address2))?.balance, 100n) - } + }, ) }) diff --git a/packages/statemanager/test/stateManager.storage.spec.ts b/packages/statemanager/test/stateManager.storage.spec.ts index 48212138dd..f8a1fb48cc 100644 --- a/packages/statemanager/test/stateManager.storage.spec.ts +++ b/packages/statemanager/test/stateManager.storage.spec.ts @@ -2,6 +2,7 @@ import { Address, bytesToHex, concatBytes, + createZeroAddress, equalsBytes, hexToBytes, unpadBytes, @@ -10,28 +11,27 @@ import { import { keccak256 } from 'ethereum-cryptography/keccak.js' import { assert, describe, it } from 'vitest' -import { DefaultStateManager } from '../src/index.js' +import { Caches, DefaultStateManager } from '../src/index.js' -import { createAccount } from './util.js' +import { createAccountWithDefaults } from './util.js' const isBrowser = new Function('try {return this===window;}catch(e){ return false;}') describe('StateManager -> Storage', () => { - for (const storageCacheOpts of [ - { deactivate: false }, - { deactivate: true }, - { deactivate: false, size: 0 }, - ]) { + for (const storageCacheOpts of [{ size: 1000 }, { size: 0 }]) { for (const prefixStorageTrieKeys of [false, true]) { it.skipIf(isBrowser() === true)(`should dump storage`, async () => { - const stateManager = new DefaultStateManager({ prefixStorageTrieKeys, storageCacheOpts }) + const stateManager = new DefaultStateManager({ + prefixStorageTrieKeys, + caches: new Caches({ storage: storageCacheOpts }), + }) const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) - const account = createAccount() + const account = createAccountWithDefaults() await stateManager.putAccount(address, account) const key = hexToBytes('0x1234567890123456789012345678901234567890123456789012345678901234') const value = hexToBytes('0x0a') // We used this value as its RLP encoding is also 0a - await stateManager.putContractStorage(address, key, value) + await stateManager.putStorage(address, key, value) const data = await stateManager.dumpStorage(address) const expect = { [bytesToHex(keccak256(key))]: '0x0a' } @@ -39,13 +39,16 @@ describe('StateManager -> Storage', () => { }) it("should validate the key's length when modifying a contract's storage", async () => { - const stateManager = new DefaultStateManager({ prefixStorageTrieKeys, storageCacheOpts }) + const stateManager = new DefaultStateManager({ + prefixStorageTrieKeys, + caches: new Caches({ storage: storageCacheOpts }), + }) const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) - const account = createAccount() + const account = createAccountWithDefaults() await stateManager.putAccount(address, account) try { - await stateManager.putContractStorage(address, new Uint8Array(12), hexToBytes('0x1231')) + await stateManager.putStorage(address, new Uint8Array(12), hexToBytes('0x1231')) } catch (e: any) { assert.equal(e.message, 'Storage key must be 32 bytes long') return @@ -55,13 +58,16 @@ describe('StateManager -> Storage', () => { }) it("should validate the key's length when reading a contract's storage", async () => { - const stateManager = new DefaultStateManager({ prefixStorageTrieKeys, storageCacheOpts }) + const stateManager = new DefaultStateManager({ + prefixStorageTrieKeys, + caches: new Caches({ storage: storageCacheOpts }), + }) const address = new Address(hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b')) - const account = createAccount() + const account = createAccountWithDefaults() await stateManager.putAccount(address, account) try { - await stateManager.getContractStorage(address, new Uint8Array(12)) + await stateManager.getStorage(address, new Uint8Array(12)) } catch (e: any) { assert.equal(e.message, 'Storage key must be 32 bytes long') return @@ -71,15 +77,18 @@ describe('StateManager -> Storage', () => { }) it(`should throw on storage values larger than 32 bytes`, async () => { - const stateManager = new DefaultStateManager({ prefixStorageTrieKeys, storageCacheOpts }) - const address = Address.zero() - const account = createAccount() + const stateManager = new DefaultStateManager({ + prefixStorageTrieKeys, + caches: new Caches({ storage: storageCacheOpts }), + }) + const address = createZeroAddress() + const account = createAccountWithDefaults() await stateManager.putAccount(address, account) const key = zeros(32) const value = hexToBytes(`0x${'aa'.repeat(33)}`) try { - await stateManager.putContractStorage(address, key, value) + await stateManager.putStorage(address, key, value) assert.fail('did not throw') } catch (e: any) { assert.ok(true, 'threw on trying to set storage values larger than 32 bytes') @@ -87,29 +96,32 @@ describe('StateManager -> Storage', () => { }) it(`should strip zeros of storage values`, async () => { - const stateManager = new DefaultStateManager({ prefixStorageTrieKeys, storageCacheOpts }) - const address = Address.zero() - const account = createAccount() + const stateManager = new DefaultStateManager({ + prefixStorageTrieKeys, + caches: new Caches({ storage: storageCacheOpts }), + }) + const address = createZeroAddress() + const account = createAccountWithDefaults() await stateManager.putAccount(address, account) const key0 = zeros(32) const value0 = hexToBytes(`0x00${'aa'.repeat(30)}`) // put a value of 31-bytes length with a leading zero byte const expect0 = unpadBytes(value0) - await stateManager.putContractStorage(address, key0, value0) - const slot0 = await stateManager.getContractStorage(address, key0) + await stateManager.putStorage(address, key0, value0) + const slot0 = await stateManager.getStorage(address, key0) assert.ok(equalsBytes(slot0, expect0), 'value of 31 bytes padded correctly') const key1 = concatBytes(zeros(31), hexToBytes('0x01')) const value1 = hexToBytes(`0x0000${'aa'.repeat(1)}`) // put a value of 1-byte length with two leading zero bytes const expect1 = unpadBytes(value1) - await stateManager.putContractStorage(address, key1, value1) - const slot1 = await stateManager.getContractStorage(address, key1) + await stateManager.putStorage(address, key1, value1) + const slot1 = await stateManager.getStorage(address, key1) assert.ok(equalsBytes(slot1, expect1), 'value of 1 byte padded correctly') }) it(`should delete storage values which only consist of zero bytes`, async () => { - const address = Address.zero() + const address = createZeroAddress() const key = zeros(32) const startValue = hexToBytes('0x01') @@ -117,37 +129,43 @@ describe('StateManager -> Storage', () => { const zeroLengths = [0, 1, 31, 32] // checks for arbitrary-length zeros for (const length of zeroLengths) { - const stateManager = new DefaultStateManager({ prefixStorageTrieKeys, storageCacheOpts }) - const account = createAccount() + const stateManager = new DefaultStateManager({ + prefixStorageTrieKeys, + caches: new Caches({ storage: storageCacheOpts }), + }) + const account = createAccountWithDefaults() await stateManager.putAccount(address, account) const value = zeros(length) - await stateManager.putContractStorage(address, key, startValue) - const currentValue = await stateManager.getContractStorage(address, key) + await stateManager.putStorage(address, key, startValue) + const currentValue = await stateManager.getStorage(address, key) if (!equalsBytes(currentValue, startValue)) { // sanity check assert.fail('contract value not set correctly') } else { // delete the value - await stateManager.putContractStorage(address, key, value) - const deleted = await stateManager.getContractStorage(address, key) + await stateManager.putStorage(address, key, value) + const deleted = await stateManager.getStorage(address, key) assert.ok(equalsBytes(deleted, zeros(0)), 'the storage key should be deleted') } } }) it(`should not strip trailing zeros`, async () => { - const stateManager = new DefaultStateManager({ prefixStorageTrieKeys, storageCacheOpts }) - const address = Address.zero() - const account = createAccount() + const stateManager = new DefaultStateManager({ + prefixStorageTrieKeys, + caches: new Caches({ storage: storageCacheOpts }), + }) + const address = createZeroAddress() + const account = createAccountWithDefaults() await stateManager.putAccount(address, account) const key = zeros(32) const value = hexToBytes('0x0000aabb00') const expect = hexToBytes('0xaabb00') - await stateManager.putContractStorage(address, key, value) - const contractValue = await stateManager.getContractStorage(address, key) + await stateManager.putStorage(address, key, value) + const contractValue = await stateManager.getStorage(address, key) assert.ok(equalsBytes(contractValue, expect), 'trailing zeros are not stripped') }) } diff --git a/packages/statemanager/test/statelessVerkleStateManager.spec.ts b/packages/statemanager/test/statelessVerkleStateManager.spec.ts index 7e4ca0f915..6fc153efae 100644 --- a/packages/statemanager/test/statelessVerkleStateManager.spec.ts +++ b/packages/statemanager/test/statelessVerkleStateManager.spec.ts @@ -1,12 +1,13 @@ -import { createBlockFromBlockData } from '@ethereumjs/block' +import { createBlock } from '@ethereumjs/block' import { createCommonFromGethGenesis } from '@ethereumjs/common' -import { TransactionFactory } from '@ethereumjs/tx' +import { createTxFromSerializedData } from '@ethereumjs/tx' import { - Account, Address, VerkleLeafType, bytesToBigInt, bytesToHex, + createAccount, + createAddressFromString, getVerkleKey, getVerkleStem, hexToBytes, @@ -15,7 +16,7 @@ import { import { loadVerkleCrypto } from 'verkle-cryptography-wasm' import { assert, beforeAll, describe, it, test } from 'vitest' -import { CacheType, StatelessVerkleStateManager } from '../src/index.js' +import { CacheType, Caches, StatelessVerkleStateManager } from '../src/index.js' import * as testnetVerkleKaustinen from './testdata/testnetVerkleKaustinen.json' import * as verkleBlockJSON from './testdata/verkleKaustinen6Block72.json' @@ -28,19 +29,17 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { beforeAll(async () => { verkleCrypto = await loadVerkleCrypto() }) - const common = createCommonFromGethGenesis(testnetVerkleKaustinen, { + const common = createCommonFromGethGenesis(testnetVerkleKaustinen.default, { chain: 'customChain', eips: [2935, 4895, 6800], }) - const decodedTxs = verkleBlockJSON.transactions.map((tx) => - TransactionFactory.fromSerializedData(hexToBytes(tx as PrefixedHexString)) - ) - const block = createBlockFromBlockData( - { ...verkleBlockJSON, transactions: decodedTxs } as BlockData, - { - common, - } + + const decodedTxs = verkleBlockJSON.default.transactions.map((tx) => + createTxFromSerializedData(hexToBytes(tx as PrefixedHexString), { common }), ) + const block = createBlock({ ...verkleBlockJSON, transactions: decodedTxs } as BlockData, { + common, + }) it('initPreState()', async () => { const stateManager = new StatelessVerkleStateManager({ verkleCrypto }) @@ -54,7 +53,7 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { stateManager.initVerkleExecutionWitness(block.header.number, block.executionWitness) const account = await stateManager.getAccount( - Address.fromString('0x6177843db3138ae69679a54b95cf345ed759450d') + createAddressFromString('0x6177843db3138ae69679a54b95cf345ed759450d'), ) assert.equal(account!.balance, 288610978528114322n, 'should have correct balance') @@ -63,12 +62,16 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { assert.equal( bytesToHex(account!.codeHash), '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', - 'should have correct codeHash' + 'should have correct codeHash', ) }) it('put/delete/modify account', async () => { - const stateManager = new StatelessVerkleStateManager({ common, verkleCrypto }) + const stateManager = new StatelessVerkleStateManager({ + common, + caches: new Caches(), + verkleCrypto, + }) stateManager.initVerkleExecutionWitness(block.header.number, block.executionWitness) const address = new Address(randomBytes(20)) @@ -80,11 +83,11 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { assert.equal( e.message.slice(0, 25), 'No witness bundled for ad', - 'should throw on getting account that does not exist in cache and witness' + 'should throw on getting account that does not exist in cache and witness', ) } - const account = Account.fromAccountData({ + const account = createAccount({ nonce: BigInt(2), }) @@ -92,7 +95,7 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { assert.deepEqual( await stateManager.getAccount(address), account, - 'should return correct account' + 'should return correct account', ) await stateManager.modifyAccountFields(address, { @@ -102,14 +105,14 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { assert.deepEqual( await stateManager.getAccount(address), account, - 'should return correct account' + 'should return correct account', ) await stateManager.deleteAccount(address) assert.isUndefined( await stateManager.getAccount(address), - 'should return undefined for deleted account' + 'should return undefined for deleted account', ) }) @@ -117,7 +120,7 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { const stateManager = new StatelessVerkleStateManager({ common, verkleCrypto }) stateManager.initVerkleExecutionWitness(block.header.number, block.executionWitness) - const address = Address.fromString('0x6177843db3138ae69679a54b95cf345ed759450d') + const address = createAddressFromString('0x6177843db3138ae69679a54b95cf345ed759450d') const stem = getVerkleStem(stateManager.verkleCrypto, address, 0n) const balanceKey = getVerkleKey(stem, VerkleLeafType.Balance) @@ -133,40 +136,42 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { assert.equal( account!.balance, bytesToBigInt(hexToBytes(balanceRaw!), true), - 'should have correct balance' + 'should have correct balance', ) assert.equal( account!.nonce, bytesToBigInt(hexToBytes(nonceRaw!), true), - 'should have correct nonce' + 'should have correct nonce', ) assert.equal(bytesToHex(account!.codeHash), codeHash, 'should have correct codeHash') }) it(`copy()`, async () => { const stateManager = new StatelessVerkleStateManager({ - accountCacheOpts: { - type: CacheType.ORDERED_MAP, - }, - storageCacheOpts: { - type: CacheType.ORDERED_MAP, - }, + caches: new Caches({ + account: { + type: CacheType.ORDERED_MAP, + }, + storage: { + type: CacheType.ORDERED_MAP, + }, + }), common, verkleCrypto, }) stateManager.initVerkleExecutionWitness(block.header.number, block.executionWitness) - const stateManagerCopy = stateManager.shallowCopy() as StatelessVerkleStateManager + const stateManagerCopy = stateManager.shallowCopy() assert.equal( - (stateManagerCopy as any)['_accountCacheSettings'].type, + stateManagerCopy['_caches']?.settings.account.type, CacheType.ORDERED_MAP, - 'should switch to ORDERED_MAP account cache on copy()' + 'should switch to ORDERED_MAP account cache on copy()', ) assert.equal( - (stateManagerCopy as any)['_storageCacheSettings'].type, + stateManagerCopy['_caches']?.settings.storage.type, CacheType.ORDERED_MAP, - 'should switch to ORDERED_MAP storage cache on copy()' + 'should switch to ORDERED_MAP storage cache on copy()', ) }) @@ -175,23 +180,16 @@ describe('StatelessVerkleStateManager: Kaustinen Verkle Block', () => { const stateManager = new StatelessVerkleStateManager({ common, verkleCrypto }) stateManager.initVerkleExecutionWitness(block.header.number, block.executionWitness) - const contractAddress = Address.fromString('0x4242424242424242424242424242424242424242') + const contractAddress = createAddressFromString('0x4242424242424242424242424242424242424242') const storageKey = '0x0000000000000000000000000000000000000000000000000000000000000022' const storageValue = '0xf5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b' - await stateManager.putContractStorage( - contractAddress, - hexToBytes(storageKey), - hexToBytes(storageValue) - ) - let contractStorage = await stateManager.getContractStorage( - contractAddress, - hexToBytes(storageKey) - ) + await stateManager.putStorage(contractAddress, hexToBytes(storageKey), hexToBytes(storageValue)) + let contractStorage = await stateManager.getStorage(contractAddress, hexToBytes(storageKey)) assert.equal(bytesToHex(contractStorage), storageValue) - await stateManager.clearContractStorage(contractAddress) - contractStorage = await stateManager.getContractStorage(contractAddress, hexToBytes(storageKey)) + await stateManager.clearStorage(contractAddress) + contractStorage = await stateManager.getStorage(contractAddress, hexToBytes(storageKey)) assert.equal(bytesToHex(contractStorage), bytesToHex(new Uint8Array())) }) diff --git a/packages/statemanager/test/testdata/providerData/mockProvider.ts b/packages/statemanager/test/testdata/providerData/mockProvider.ts index 4e56e4d128..c984b0835e 100644 --- a/packages/statemanager/test/testdata/providerData/mockProvider.ts +++ b/packages/statemanager/test/testdata/providerData/mockProvider.ts @@ -17,7 +17,7 @@ export type JsonReturnType = { export const getValues = async ( method: Method, id: number, - params: any[] + params: any[], ): Promise => { switch (method) { case 'eth_getProof': diff --git a/packages/statemanager/test/util.ts b/packages/statemanager/test/util.ts index 8683e08dc6..708c7e2b81 100644 --- a/packages/statemanager/test/util.ts +++ b/packages/statemanager/test/util.ts @@ -1,5 +1,5 @@ import { Account } from '@ethereumjs/util' -export function createAccount(nonce = BigInt(0), balance = BigInt(0xfff384)) { +export function createAccountWithDefaults(nonce = BigInt(0), balance = BigInt(0xfff384)) { return new Account(nonce, balance) } diff --git a/packages/statemanager/test/vmState.spec.ts b/packages/statemanager/test/vmState.spec.ts index 79b73acca5..698a3196cc 100644 --- a/packages/statemanager/test/vmState.spec.ts +++ b/packages/statemanager/test/vmState.spec.ts @@ -20,7 +20,7 @@ describe('stateManager', () => { return } const expectedStateRoot = hexToBytes( - '0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544' + '0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544', ) const stateManager = new StateManager({}) @@ -30,7 +30,7 @@ describe('stateManager', () => { assert.deepEqual( stateRoot, expectedStateRoot, - `generateCanonicalGenesis should produce correct state root for mainnet from common` + `generateCanonicalGenesis should produce correct state root for mainnet from common`, ) }) @@ -55,7 +55,7 @@ describe('stateManager', () => { assert.deepEqual( stateRoot, expectedStateRoot, - `generateCanonicalGenesis should produce correct state root for ${Chain[chain]}` + `generateCanonicalGenesis should produce correct state root for ${Chain[chain]}`, ) } }) @@ -73,7 +73,7 @@ describe('Original storage cache', async () => { it(`should initially have empty storage value`, async () => { await stateManager.checkpoint() - const res = await stateManager.getContractStorage(address, key) + const res = await stateManager.getStorage(address, key) assert.deepEqual(res, new Uint8Array(0)) const origRes = await stateManager.originalStorageCache.get(address, key) @@ -83,8 +83,8 @@ describe('Original storage cache', async () => { }) it(`should set original storage value`, async () => { - await stateManager.putContractStorage(address, key, value) - const res = await stateManager.getContractStorage(address, key) + await stateManager.putStorage(address, key, value) + const res = await stateManager.getStorage(address, key) assert.deepEqual(res, value) }) @@ -95,8 +95,8 @@ describe('Original storage cache', async () => { it(`should return correct original value after modification`, async () => { const newValue = hexToBytes('0x1235') - await stateManager.putContractStorage(address, key, newValue) - const res = await stateManager.getContractStorage(address, key) + await stateManager.putStorage(address, key, newValue) + const res = await stateManager.getStorage(address, key) assert.deepEqual(res, newValue) const origRes = await stateManager.originalStorageCache.get(address, key) @@ -107,22 +107,22 @@ describe('Original storage cache', async () => { const key2 = hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000012') const value2 = utf8ToBytes('12') const value3 = utf8ToBytes('123') - await stateManager.putContractStorage(address, key2, value2) + await stateManager.putStorage(address, key2, value2) - let res = await stateManager.getContractStorage(address, key2) + let res = await stateManager.getStorage(address, key2) assert.deepEqual(res, value2) let origRes = await stateManager.originalStorageCache.get(address, key2) assert.deepEqual(origRes, value2) - await stateManager.putContractStorage(address, key2, value3) + await stateManager.putStorage(address, key2, value3) - res = await stateManager.getContractStorage(address, key2) + res = await stateManager.getStorage(address, key2) assert.deepEqual(res, value3) origRes = await stateManager.originalStorageCache.get(address, key2) assert.deepEqual(origRes, value2) // Check previous key - res = await stateManager.getContractStorage(address, key) + res = await stateManager.getStorage(address, key) assert.deepEqual(res, hexToBytes('0x1235')) origRes = await stateManager.originalStorageCache.get(address, key) assert.deepEqual(origRes, value) diff --git a/packages/statemanager/tsconfig.lint.json b/packages/statemanager/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/statemanager/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/trie/.eslintrc.cjs b/packages/trie/.eslintrc.cjs index 884b3d6ebe..4a13163a7d 100644 --- a/packages/trie/.eslintrc.cjs +++ b/packages/trie/.eslintrc.cjs @@ -1,11 +1,11 @@ module.exports = { extends: '../../config/eslint.cjs', parserOptions: { - project: ['./tsconfig.json', './tsconfig.benchmarks.json'], + project: ['./tsconfig.lint.json'], }, overrides: [ { - files: ['benchmarks/*.ts'], + files: ['benchmarks/*.ts', 'examples/**/*'], rules: { 'no-console': 'off', }, diff --git a/packages/trie/CHANGELOG.md b/packages/trie/CHANGELOG.md index f6d0ea01c0..322e3f3e0d 100644 --- a/packages/trie/CHANGELOG.md +++ b/packages/trie/CHANGELOG.md @@ -6,7 +6,17 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 6.2.0 - 2024-03-05 +## 6.2.1 - 2024-08-15 + +### Other Features + +- Stricter prefixed hex typing, PRs [#3348](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3348), [#3427](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3427) and [#3357](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3357) (some changes removed in PR [#3382](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3382) for backwards compatibility reasons, will be reintroduced along upcoming breaking releases) + +### Bugfixes + +- Fixes an issue in the delete operation used for unhashed tries and pruning activated which resulted in a wrong state root (bad!), PR [#3333](https://github.com/ethereumjs/ethereumjs-monorepo/issues/3333) + +## 6.2.0 - 2024-03-18 In the hope that you do not have yet integrated: we needed to remove the new more modern async trie iteration with web streams functionality (new `createAsyncReadStream()` method) introduced with the `v6.1.0` release - see PR [#3231](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3231) for context - since the related Node.js web streams API import caused relatively severe problems for all upstream libraries when being used in the browser. @@ -76,7 +86,7 @@ See [Debugging](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/pa - New parameter `skipKeyTransform` (default: `false`) for Trie `put()`, `del()` and `batch()` method to allow to pass in already hashed keys, PR [#2950](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2950) - New `keyPrefix` option tries to store node keys with a static prefix (used upstream in the `statemanager` package to speed to storage trie reads), PR [#3023](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3023) -- Peformance: `findPath()` optimizations, PR [#3066](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3066) +- Performance: `findPath()` optimizations, PR [#3066](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3066) - Make `null` available as type option for `put()` method value, PR [#3020](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3020) - Allow partial trie options for `shallowCopy()` (e.g. for a more flexible cache configuration for the trie copy), PR [#3063](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3063) - Use `lock` class from `@ethereumjs/util`, PR [#3109](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3109) @@ -121,7 +131,7 @@ While you could use our libraries in the browser libraries before, there had bee WE HAVE ELIMINATED ALL OF THEM. -The largest two undertakings: First: we have rewritten all (half) of our API and elimited the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went throuh our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. +The largest two undertakings: First: we have rewritten all (half) of our API and eliminated the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went through our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. Together with some few other modifications this now allows to run each (maybe adding an asterisk for client and devp2p) of our libraries directly in the browser - more or less without any modifications - see the `examples/browser.html` file in each package folder for an easy to set up example. @@ -361,11 +371,11 @@ See our [Upgrade Guide](https://github.com/ethereumjs/ethereumjs-monorepo/blob/m Beta 3 release for the upcoming breaking release round on the [EthereumJS monorepo](https://github.com/ethereumjs/ethereumjs-monorepo) libraries, see the Beta 1 release notes for the main long change set description as well as the Beta 2 release notes for notes on some additional changes ([CHANGELOG](https://github.com/ethereumjs/ethereumjs-monorepo/blob/master/packages/devp2p/CHANGELOG.md)). -### Root Hash Persistance +### Root Hash Persistence The trie library now comes with a new constructor option `useRootPersistence` (note that the option has been called `persistRoot` up to Beta 3) which is disabled by default but allows to persist state root updates along write operations directly in the DB and therefore omits the need to manually set to a new state root, see PR [#2071](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2071) and PR [#2123](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2123), thanks to @faustbrian for the contribution! ❤️ -To activate root hash persistance you can set the `useRootPersistence` option on instantiation: +To activate root hash persistence you can set the `useRootPersistence` option on instantiation: ```ts import { Trie, LevelDB } from '@ethereumjs/trie' @@ -387,13 +397,13 @@ Beta 2 release for the upcoming breaking release round on the [EthereumJS monore ### Removed Default Exports -The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all accross the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. +The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all across the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. Default exports were a common source of error and confusion when using our libraries in a CommonJS context, leading to issues like Issue [#978](https://github.com/ethereumjs/ethereumjs-monorepo/issues/978). Now every import is a named import and we think the long term benefits will very much outweigh the one-time hassle of some import adoptions. -So if you use the Trie library together with other EthereumJS libraries check if the respetive imports need an update. +So if you use the Trie library together with other EthereumJS libraries check if the respective imports need an update. ## Custom Hash Function @@ -486,7 +496,7 @@ Because of the upgrade, any `level` implementation compliant with the `abstract- ### API Changes -Options for the Trie constructor are now also taken in as an options dict like in the other EthereumJS libaries. This makes it easier to add additional options in the future, see PR [#1874](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1874). +Options for the Trie constructor are now also taken in as an options dict like in the other EthereumJS libraries. This makes it easier to add additional options in the future, see PR [#1874](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1874). Check your Trie instantiations and see if you use constructor options. In this case you need to update to the new format: diff --git a/packages/trie/benchmarks/suite.ts b/packages/trie/benchmarks/suite.ts index bcb68e9f4a..5b679ce116 100644 --- a/packages/trie/benchmarks/suite.ts +++ b/packages/trie/benchmarks/suite.ts @@ -1,6 +1,6 @@ import { keccak256 } from 'ethereum-cryptography/keccak.js' // @ts-ignore - package has no types... -import { run, mark, logMem } from 'micro-bmark' +import { run, mark, logMem } from 'micro-bmark' // cspell:disable-line import { Trie } from '../dist/cjs/index.js' import { keys } from './keys' diff --git a/packages/trie/examples/basicUsage.ts b/packages/trie/examples/basicUsage.ts index 9042c8b5cb..5a008b3b8b 100644 --- a/packages/trie/examples/basicUsage.ts +++ b/packages/trie/examples/basicUsage.ts @@ -1,11 +1,11 @@ -import { Trie } from '@ethereumjs/trie' -import { bytesToUtf8, MapDB, utf8ToBytes } from '@ethereumjs/util' +import { createTrie } from '@ethereumjs/trie' +import { MapDB, bytesToUtf8, utf8ToBytes } from '@ethereumjs/util' async function test() { - const trie = await Trie.create({ db: new MapDB() }) + const trie = await createTrie({ db: new MapDB() }) await trie.put(utf8ToBytes('test'), utf8ToBytes('one')) const value = await trie.get(utf8ToBytes('test')) console.log(value ? bytesToUtf8(value) : 'not found') // 'one' } -test() +void test() diff --git a/packages/trie/examples/createFromProof.ts b/packages/trie/examples/createFromProof.ts index 7b41410c42..1b8ac8e5fa 100644 --- a/packages/trie/examples/createFromProof.ts +++ b/packages/trie/examples/createFromProof.ts @@ -1,6 +1,10 @@ -import { Trie } from '@ethereumjs/trie' -import { bytesToUtf8 } from '@ethereumjs/util' -import { utf8ToBytes } from '@ethereumjs/util' +import { + Trie, + createMerkleProof, + createTrieFromProof, + updateTrieFromMerkleProof, +} from '@ethereumjs/trie' +import { bytesToUtf8, utf8ToBytes } from '@ethereumjs/util' async function main() { const k1 = utf8ToBytes('keyOne') @@ -10,12 +14,12 @@ async function main() { await someOtherTrie.put(k1, utf8ToBytes('valueOne')) await someOtherTrie.put(k2, utf8ToBytes('valueTwo')) - const proof = await someOtherTrie.createProof(k1) - const trie = await Trie.createFromProof(proof, { useKeyHashing: true }) - const otherProof = await someOtherTrie.createProof(k2) + const proof = await createMerkleProof(someOtherTrie, k1) + const trie = await createTrieFromProof(proof, { useKeyHashing: true }) + const otherProof = await createMerkleProof(someOtherTrie, k2) - // To add more proofs to the trie, use `updateFromProof` - await trie.updateFromProof(otherProof) + // To add more proofs to the trie, use `updateTrieFromMerkleProof` + await updateTrieFromMerkleProof(trie, otherProof) const value = await trie.get(k1) console.log(bytesToUtf8(value!)) // valueOne @@ -23,4 +27,4 @@ async function main() { console.log(bytesToUtf8(otherValue!)) // valueTwo } -main() +void main() diff --git a/packages/trie/examples/customLevelDB.ts b/packages/trie/examples/customLevelDB.ts index a4ca04b502..0ed657cb71 100644 --- a/packages/trie/examples/customLevelDB.ts +++ b/packages/trie/examples/customLevelDB.ts @@ -1,7 +1,6 @@ import { Trie } from '@ethereumjs/trie' -import { Level } from 'level' - import { KeyEncoding, ValueEncoding } from '@ethereumjs/util' +import { Level } from 'level' import { MemoryLevel } from 'memory-level' import type { BatchDBOp, DB, DBObject, EncodingOpts } from '@ethereumjs/util' @@ -44,7 +43,7 @@ const getEncodings = (opts: EncodingOpts = {}) => { */ export class LevelDB< TKey extends Uint8Array | string = Uint8Array | string, - TValue extends Uint8Array | string | DBObject = Uint8Array | string | DBObject + TValue extends Uint8Array | string | DBObject = Uint8Array | string | DBObject, > implements DB { _leveldb: AbstractLevel @@ -55,7 +54,7 @@ export class LevelDB< * @param leveldb - An abstract-leveldown compliant store */ constructor( - leveldb?: AbstractLevel + leveldb?: AbstractLevel, ) { this._leveldb = leveldb ?? new MemoryLevel() } @@ -126,6 +125,6 @@ export class LevelDB< async function main() { const trie = new Trie({ db: new LevelDB(new Level('MY_TRIE_DB_LOCATION') as any) }) - console.log(await trie.database().db) // LevelDB { ... + console.log(trie.database().db) // LevelDB { ... } -main() +void main() diff --git a/packages/trie/examples/level-legacy.js b/packages/trie/examples/level-legacy.js index 5f2dfeffd9..196a93cb4e 100644 --- a/packages/trie/examples/level-legacy.js +++ b/packages/trie/examples/level-legacy.js @@ -1,9 +1,10 @@ // LevelDB from https://github.com/ethereumjs/ethereumjs-monorepo/blob/ac053e1f9a364f8ae489159fecb79a3d0ddd7053/packages/trie/src/db.ts // eslint-disable-next-line implicit-dependencies/no-implicit +const { utf8ToBytes, bytesToUtf8 } = require('ethereum-cryptography/utils') const level = require('level-mem') -const { Trie } = require('../dist') +const { Trie } = require('../../dist/cjs/index.js') const ENCODING_OPTS = { keyEncoding: 'binary', valueEncoding: 'binary' } @@ -19,7 +20,7 @@ class LevelDB { try { value = await this._leveldb.get(key, ENCODING_OPTS) } catch (error) { - if (error.notFound) { + if (error.notFound !== undefined) { // not found, returning null } else { throw error @@ -48,9 +49,9 @@ class LevelDB { const trie = new Trie({ db: new LevelDB(level('MY_TRIE_DB_LOCATION')) }) async function test() { - await trie.put(Buffer.from('test'), Buffer.from('one')) - const value = await trie.get(Buffer.from('test')) - console.log(value.toString()) // 'one' + await trie.put(utf8ToBytes('test'), utf8ToBytes('one')) + const value = await trie.get(utf8ToBytes('test')) + console.log(bytesToUtf8(value)) // 'one' } -test() +void test() diff --git a/packages/trie/examples/level.js b/packages/trie/examples/level.js index 43edb67780..b67f57f491 100644 --- a/packages/trie/examples/level.js +++ b/packages/trie/examples/level.js @@ -1,7 +1,8 @@ +const { utf8ToBytes, bytesToUtf8 } = require('ethereum-cryptography/utils') const { Level } = require('level') const { MemoryLevel } = require('memory-level') -const { Trie } = require('../dist') +const { Trie } = require('../../dist/cjs/index.js') const ENCODING_OPTS = { keyEncoding: 'view', valueEncoding: 'view' } @@ -46,9 +47,9 @@ class LevelDB { const trie = new Trie({ db: new LevelDB(new Level('MY_TRIE_DB_LOCATION')) }) async function test() { - await trie.put(Buffer.from('test'), Buffer.from('one')) - const value = await trie.get(Buffer.from('test')) - console.log(value.toString()) // 'one' + await trie.put(utf8ToBytes('test'), utf8ToBytes('one')) + const value = await trie.get(utf8ToBytes('test')) + console.log(bytesToUtf8(value)) // 'one' } -test() +void test() diff --git a/packages/trie/examples/lmdb.js b/packages/trie/examples/lmdb.js index dd5bee3438..73da2d99ed 100644 --- a/packages/trie/examples/lmdb.js +++ b/packages/trie/examples/lmdb.js @@ -1,6 +1,7 @@ +const { utf8ToBytes, bytesToUtf8 } = require('ethereum-cryptography/utils') const { open } = require('lmdb') -const { Trie } = require('../dist') +const { Trie } = require('../../dist/cjs/index.js') class LMDB { constructor(path) { @@ -44,9 +45,9 @@ class LMDB { const trie = new Trie({ db: new LMDB('MY_TRIE_DB_LOCATION') }) async function test() { - await trie.put(Buffer.from('test'), Buffer.from('one')) - const value = await trie.get(Buffer.from('test')) - console.log(value.toString()) // 'one' + await trie.put(utf8ToBytes('test'), utf8ToBytes('one')) + const value = await trie.get(utf8ToBytes('test')) + console.log(bytesToUtf8(value)) // 'one' } -test() +void test() diff --git a/packages/trie/examples/logDemo.ts b/packages/trie/examples/logDemo.ts index f3b77fe7a5..2aa5bdee21 100644 --- a/packages/trie/examples/logDemo.ts +++ b/packages/trie/examples/logDemo.ts @@ -1,12 +1,12 @@ /** - * Run with DEBUG=ethjs,trie:* to see debug log ouput + * Run with DEBUG=ethjs,trie:* to see debug log output */ +import { Trie, createMerkleProof, verifyMerkleProof } from '@ethereumjs/trie' import { utf8ToBytes } from '@ethereumjs/util' -import { Trie } from '@ethereumjs/trie' const trie_entries: [string, string | null][] = [ ['do', 'verb'], - ['ether', 'wookiedoo'], + ['ether', 'wookiedoo'], // cspell:disable-line ['horse', 'stallion'], ['shaman', 'horse'], ['doge', 'coin'], @@ -22,9 +22,9 @@ const main = async () => { for (const [key, value] of trie_entries) { await trie.put(utf8ToBytes(key), value === null ? Uint8Array.from([]) : utf8ToBytes(value)) } - const proof = await trie.createProof(utf8ToBytes('doge')) - const valid = await trie.verifyProof(trie.root(), utf8ToBytes('doge'), proof) + const proof = await createMerkleProof(trie, utf8ToBytes('doge')) + const valid = await verifyMerkleProof(trie, trie.root(), utf8ToBytes('doge'), proof) console.log('valid', valid) } -main() +void main() diff --git a/packages/trie/examples/merkle_patricia_trees/README.md b/packages/trie/examples/merkle_patricia_trees/README.md index b41bf52ba1..ff801ce3d6 100644 --- a/packages/trie/examples/merkle_patricia_trees/README.md +++ b/packages/trie/examples/merkle_patricia_trees/README.md @@ -662,7 +662,7 @@ Now let's suppose that I'm provided with various pieces of information, some tha Now, I'm getting conflicting information from two other shady sources: - Marie claims that: _That branch node contains another branch, representing key-value pair "testKey000A": "testValueA"_ -- John claims that: _That branch node contains another branch, representing key-value pairs "testKey000z": "testValuez"._ +- John claims that: _That branch node contains another branch, representing key-value pairs "testKey000z": "testValues"._ How can I determine who's telling the truth? Simple: by computing the branch node hash for each possibility, and comparing them with our trusted branch node hash! @@ -673,7 +673,7 @@ How can I determine who's telling the truth? Simple: by computing the branch nod await trie2.put(utf8ToBytes('testKey'), utf8ToBytes('testValue')) await trie2.put(utf8ToBytes('testKey0001'), utf8ToBytes('testValue1')) - await trie2.put(utf8ToBytes('testKey000z'), utf8ToBytes('testValuez')) + await trie2.put(utf8ToBytes('testKey000z'), utf8ToBytes('testValues')) const temp1 = await trie1.findPath(utf8ToBytes('testKey')) const temp2 = await trie2.findPath(utf8ToBytes('testKey')) diff --git a/packages/trie/examples/merkle_patricia_trees/example1a.js b/packages/trie/examples/merkle_patricia_trees/example1a.js index 4b70ee7160..4ee751e9fc 100644 --- a/packages/trie/examples/merkle_patricia_trees/example1a.js +++ b/packages/trie/examples/merkle_patricia_trees/example1a.js @@ -1,8 +1,9 @@ /* Example 1a - Creating and Updating a Base Trie*/ -const { Trie } = require('../../dist/cjs') // We import the library required to create a basic Merkle Patricia Tree const { bytesToHex, bytesToUtf8, utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') // We import the library required to create a basic Merkle Patricia Tree + const trie = new Trie() // We create an empty Merkle Patricia Tree console.log('Empty trie root (Bytes): ', bytesToHex(trie.root())) // The trie root (32 bytes) @@ -16,7 +17,7 @@ async function test() { console.log('Updated trie root:', bytesToHex(trie.root())) // The new trie root (32 bytes) } -test() +void test() /* Results: diff --git a/packages/trie/examples/merkle_patricia_trees/example1b.js b/packages/trie/examples/merkle_patricia_trees/example1b.js index a7c690b832..fb320edc87 100644 --- a/packages/trie/examples/merkle_patricia_trees/example1b.js +++ b/packages/trie/examples/merkle_patricia_trees/example1b.js @@ -1,9 +1,10 @@ /* Example 1b - Manually Creating and Updating a Secure Trie*/ -const { Trie } = require('../../dist/cjs') const { bytesToHex, bytesToUtf8, utf8ToBytes } = require('@ethereumjs/util') const { keccak256 } = require('ethereum-cryptography/keccak') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() console.log('Empty trie root (Bytes): ', bytesToHex(trie.root())) // The trie root (32 bytes) @@ -15,7 +16,7 @@ async function test() { console.log('Updated trie root:', bytesToHex(trie.root())) // The new trie root (32 bytes) } -test() +void test() /* Results: diff --git a/packages/trie/examples/merkle_patricia_trees/example1c.js b/packages/trie/examples/merkle_patricia_trees/example1c.js index 64f2c02e54..01c175cfbc 100644 --- a/packages/trie/examples/merkle_patricia_trees/example1c.js +++ b/packages/trie/examples/merkle_patricia_trees/example1c.js @@ -1,8 +1,8 @@ /* Example 1c - Creating an empty Merkle Patricia Tree and updating it with a single key-value pair */ - -const { Trie } = require('../../dist/cjs') const { bytesToHex, bytesToUtf8, utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie({ useKeyHashing: true }) // We create an empty Merkle Patricia Tree with key hashing enabled console.log('Empty trie root (Bytes): ', bytesToHex(trie.root())) // The trie root (32 bytes) @@ -14,7 +14,7 @@ async function test() { console.log('Updated trie root:', bytesToHex(trie.root())) // The new trie root (32 bytes) } -test() +void test() /* Results: diff --git a/packages/trie/examples/merkle_patricia_trees/example1d.js b/packages/trie/examples/merkle_patricia_trees/example1d.js index d205a6f183..892aef301e 100644 --- a/packages/trie/examples/merkle_patricia_trees/example1d.js +++ b/packages/trie/examples/merkle_patricia_trees/example1d.js @@ -1,8 +1,9 @@ /* Example 1d - Deleting a Key-Value Pair from a Trie*/ -const { Trie } = require('../../dist/cjs') const { bytesToHex, bytesToUtf8, utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() console.log('Empty trie root: ', bytesToHex(trie.root())) // The trie root @@ -20,7 +21,7 @@ async function test() { console.log('Trie root after deletion:', bytesToHex(trie.root())) // Our trie root is back to its initial value } -test() +void test() /* Results: diff --git a/packages/trie/examples/merkle_patricia_trees/example2a.js b/packages/trie/examples/merkle_patricia_trees/example2a.js index 5ce8c3f514..d83537fba9 100644 --- a/packages/trie/examples/merkle_patricia_trees/example2a.js +++ b/packages/trie/examples/merkle_patricia_trees/example2a.js @@ -1,8 +1,9 @@ // Example 2a - Creating and looking up a null node -const { Trie } = require('../../dist/cjs') const { utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() async function test() { @@ -10,7 +11,7 @@ async function test() { console.log('Node 1: ', node1.node) // null } -test() +void test() /* Result: diff --git a/packages/trie/examples/merkle_patricia_trees/example2b.js b/packages/trie/examples/merkle_patricia_trees/example2b.js index e607b94e0a..1906d0ab48 100644 --- a/packages/trie/examples/merkle_patricia_trees/example2b.js +++ b/packages/trie/examples/merkle_patricia_trees/example2b.js @@ -1,8 +1,9 @@ // Example 2b - Creating and looking up a branch node -const { Trie } = require('../../dist/cjs') const { bytesToHex, bytesToUtf8, utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() async function test() { @@ -31,13 +32,13 @@ async function test() { 'Node 1 branch 3 (hex): path: ', bytesToHex(node1.node._branches[3][0]), ' | value: ', - bytesToHex(node1.node._branches[3][1]) + bytesToHex(node1.node._branches[3][1]), ) console.log( 'Node 1 branch 4 (hex): path: ', bytesToHex(node1.node._branches[4][0]), ' | value:', - bytesToHex(node1.node._branches[4][1]) + bytesToHex(node1.node._branches[4][1]), ) console.log('Value of branch at index 3: ', bytesToUtf8(node1.node._branches[3][1])) @@ -47,4 +48,4 @@ async function test() { console.log('Node 2: ', node2.node) } -test() +void test() diff --git a/packages/trie/examples/merkle_patricia_trees/example2c.js b/packages/trie/examples/merkle_patricia_trees/example2c.js index 3259817050..cb4f9381f6 100644 --- a/packages/trie/examples/merkle_patricia_trees/example2c.js +++ b/packages/trie/examples/merkle_patricia_trees/example2c.js @@ -1,8 +1,9 @@ // Example 2c - Creating and looking up a leaf node -const { Trie } = require('../../dist/cjs') const { bytesToUtf8, utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() async function test() { @@ -14,4 +15,4 @@ async function test() { console.log('Node 1 value: ', bytesToUtf8(node1.node._value)) // The leaf node's value } -test() +void test() diff --git a/packages/trie/examples/merkle_patricia_trees/example2d.js b/packages/trie/examples/merkle_patricia_trees/example2d.js index ac9933ec3a..6ecaabeec7 100644 --- a/packages/trie/examples/merkle_patricia_trees/example2d.js +++ b/packages/trie/examples/merkle_patricia_trees/example2d.js @@ -1,8 +1,9 @@ // Example 2d - Creating and looking up an extension node -const { Trie } = require('../../dist/cjs') const { bytesToHex, utf8ToBytes } = require('@ethereumjs/util') +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() async function test() { @@ -25,4 +26,4 @@ async function test() { console.log(node3) } -test() +void test() diff --git a/packages/trie/examples/merkle_patricia_trees/example3a.js b/packages/trie/examples/merkle_patricia_trees/example3a.js index ea4a8b1747..688fface41 100644 --- a/packages/trie/examples/merkle_patricia_trees/example3a.js +++ b/packages/trie/examples/merkle_patricia_trees/example3a.js @@ -1,9 +1,11 @@ // Example 3a - Generating a hash -const { Trie } = require('../../dist/cjs') const rlp = require('@ethereumjs/rlp') const { bytesToHex, utf8ToBytes } = require('@ethereumjs/util') const { keccak256 } = require('ethereum-cryptography/keccak') + +const { Trie } = require('../../dist/cjs/index.js') + const trie = new Trie() async function test() { @@ -24,7 +26,7 @@ async function test() { 'path: ', bytesToHex(node3._branches[4][0]), ' | value: ', - bytesToHex(node3._branches[4][1]) + bytesToHex(node3._branches[4][1]), ) console.log('Raw node:', bytesToHex(rlp.encode(node2.raw()))) @@ -32,4 +34,4 @@ async function test() { console.log('The extension node hash: ', bytesToHex(node1.node._branches[3])) } -test() +void test() diff --git a/packages/trie/examples/merkle_patricia_trees/example3b.js b/packages/trie/examples/merkle_patricia_trees/example3b.js index bbb2605cd8..6fa833ba58 100644 --- a/packages/trie/examples/merkle_patricia_trees/example3b.js +++ b/packages/trie/examples/merkle_patricia_trees/example3b.js @@ -1,7 +1,9 @@ // Example 3b - Verification using a hash -const { Trie } = require('../../dist/cjs') const { bytesToHex, utf8ToBytes } = require('@ethereumjs/util') + +const { Trie } = require('../../dist/cjs/index.js') + const trie1 = new Trie() const trie2 = new Trie() @@ -27,4 +29,4 @@ async function test() { console.log('Root of trie 2: ', bytesToHex(trie2.root())) } -test() +void test() diff --git a/packages/trie/examples/merkle_patricia_trees/example4a.js b/packages/trie/examples/merkle_patricia_trees/example4a.js index 3d940e470f..c649c661d3 100644 --- a/packages/trie/examples/merkle_patricia_trees/example4a.js +++ b/packages/trie/examples/merkle_patricia_trees/example4a.js @@ -1,8 +1,9 @@ // Example 4a - Retrieving a Transaction from the Ethereum Blockchain -const INFURA_ENDPOINT = require('./infura_endpoint') const https = require('https') +const INFURA_ENDPOINT = require('./infura_endpoint.js') + // Looking up an individual transaction function lookupTransaction(transactionHash) { const data = JSON.stringify({ diff --git a/packages/trie/examples/merkle_patricia_trees/example4b.js b/packages/trie/examples/merkle_patricia_trees/example4b.js index d3fb8574db..a8e0fe3b2a 100644 --- a/packages/trie/examples/merkle_patricia_trees/example4b.js +++ b/packages/trie/examples/merkle_patricia_trees/example4b.js @@ -3,9 +3,10 @@ const rlp = require('@ethereumjs/rlp') const { bytesToHex } = require('@ethereumjs/util') const { keccak256 } = require('ethereum-cryptography/keccak') -const INFURA_ENDPOINT = require('./infura_endpoint') const https = require('https') +const INFURA_ENDPOINT = require('./infura_endpoint.js') + function recomputeTransactionHash(transactionHash) { const data = JSON.stringify({ jsonrpc: '2.0', diff --git a/packages/trie/examples/proofs.ts b/packages/trie/examples/proofs.ts index fffac8b346..73a3efcb88 100644 --- a/packages/trie/examples/proofs.ts +++ b/packages/trie/examples/proofs.ts @@ -1,4 +1,4 @@ -import { Trie } from '@ethereumjs/trie' +import { Trie, createMerkleProof, verifyMerkleProof } from '@ethereumjs/trie' import { bytesToUtf8, utf8ToBytes } from '@ethereumjs/util' const trie = new Trie() @@ -11,27 +11,27 @@ async function main() { // proof-of-inclusion await trie.put(k1, v1) - let proof = await trie.createProof(k1) - let value = await trie.verifyProof(trie.root(), k1, proof) + let proof = await createMerkleProof(trie, k1) + let value = await verifyMerkleProof(trie, trie.root(), k1, proof) console.log(value ? bytesToUtf8(value) : 'not found') // 'one' // proof-of-exclusion await trie.put(k1, v1) await trie.put(k2, v2) - proof = await trie.createProof(utf8ToBytes('key3')) - value = await trie.verifyProof(trie.root(), utf8ToBytes('key3'), proof) + proof = await createMerkleProof(trie, utf8ToBytes('key3')) + value = await verifyMerkleProof(trie, trie.root(), utf8ToBytes('key3'), proof) console.log(value ? bytesToUtf8(value) : 'null') // null // invalid proof await trie.put(k1, v1) await trie.put(k2, v2) - proof = await trie.createProof(k2) + proof = await createMerkleProof(trie, k2) proof[0].reverse() try { - const value = await trie.verifyProof(trie.root(), k2, proof) // results in error + const _value = await verifyMerkleProof(trie, trie.root(), k2, proof) // results in error } catch (err) { console.log(err) } } -main() +void main() diff --git a/packages/trie/examples/rootPersistence.ts b/packages/trie/examples/rootPersistence.ts index 56a3c74d43..e22841a585 100644 --- a/packages/trie/examples/rootPersistence.ts +++ b/packages/trie/examples/rootPersistence.ts @@ -1,12 +1,12 @@ -import { Trie } from '@ethereumjs/trie' +import { createTrie } from '@ethereumjs/trie' import { bytesToHex } from '@ethereumjs/util' async function main() { - const trie = await Trie.create({ + const trie = await createTrie({ useRootPersistence: true, }) // this logs the empty root value that has been persisted to the trie db console.log(bytesToHex(trie.root())) // 0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421 } -main() +void main() diff --git a/packages/trie/examples/trieWalking.ts b/packages/trie/examples/trieWalking.ts index e99eb93543..a384fa661f 100644 --- a/packages/trie/examples/trieWalking.ts +++ b/packages/trie/examples/trieWalking.ts @@ -1,8 +1,8 @@ -import { Trie } from '@ethereumjs/trie' +import { createTrie } from '@ethereumjs/trie' import { utf8ToBytes } from '@ethereumjs/util' async function main() { - const trie = await Trie.create() + const trie = await createTrie() await trie.put(utf8ToBytes('key'), utf8ToBytes('val')) const walk = trie.walkTrieIterable(trie.root()) @@ -11,4 +11,4 @@ async function main() { console.log({ node, currentKey }) } } -main() +void main() diff --git a/packages/trie/package.json b/packages/trie/package.json index 17251fd4de..ded5cc1a92 100644 --- a/packages/trie/package.json +++ b/packages/trie/package.json @@ -1,6 +1,6 @@ { "name": "@ethereumjs/trie", - "version": "6.2.0", + "version": "6.2.1", "description": "Implementation of the modified merkle patricia tree as specified in Ethereum's yellow paper.", "keywords": [ "merkle", @@ -55,15 +55,14 @@ }, "dependencies": { "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/util": "^9.1.0", "@types/readable-stream": "^2.3.13", "debug": "^4.3.4", "lru-cache": "10.1.0", - "ethereum-cryptography": "^2.2.1", - "readable-stream": "^3.6.0" + "ethereum-cryptography": "^2.2.1" }, "devDependencies": { - "@ethereumjs/genesis": "^0.2.2", + "@ethereumjs/genesis": "^0.2.3", "@types/benchmark": "^1.0.33", "abstract-level": "^1.0.3", "level": "^8.0.0", diff --git a/packages/trie/scripts/view.ts b/packages/trie/scripts/view.ts index 5d2f600027..ee4e48d487 100644 --- a/packages/trie/scripts/view.ts +++ b/packages/trie/scripts/view.ts @@ -9,8 +9,8 @@ import { _walkTrie } from './asyncWalk.js' import type { TrieNode } from '../types.js' import type { Debugger } from 'debug' -const debug = _debug('trieview') -const delimeters = { +const debug = _debug('trieview') // cspell:disable-line +const delimiters = { 0: debug.extend(''), 1: debug.extend('::'), 2: debug.extend('::::'), @@ -18,12 +18,12 @@ const delimeters = { 4: debug.extend('::::::::'), 5: debug.extend('::::::::::'), } as const -type Delimeter = keyof typeof delimeters -const delimiter = (level: Delimeter): void => { - delimeters[level]('-'.repeat(50 - level * 2)) +type Delimiter = keyof typeof delimiters +const delimiter = (level: Delimiter): void => { + delimiters[level]('-'.repeat(50 - level * 2)) } const tNode = ['br', 'lf', 'ex', 'rt', 'nl', 'pf', 'vl'] as const -type TNode = typeof tNode[number] +type TNode = (typeof tNode)[number] const debugN = (type: TNode, d?: Debugger) => { d = d ?? debug const nodeDebuggers = { @@ -43,10 +43,10 @@ function getNodeType(node: TrieNode): TNode { return node instanceof BranchNode ? 'br' : node instanceof ExtensionNode - ? 'ex' - : node instanceof LeafNode - ? 'lf' - : 'nl' + ? 'ex' + : node instanceof LeafNode + ? 'lf' + : 'nl' } function logNode(trie: Trie, node: TrieNode, currentKey: number[]): void { @@ -56,8 +56,8 @@ function logNode(trie: Trie, node: TrieNode, currentKey: number[]): void { debugN('rt').extend(type)( `{ 0x${bytesToHex((trie as any).hash(node.serialize())).slice( 0, - 12 - )}... } ---- \uD83D\uDCA5 \u211B \u2134 \u2134 \u0164 \u0147 \u2134 \u0221 \u2211 \u2737` + 12, + )}... } ---- \uD83D\uDCA5 \u211B \u2134 \u2134 \u0164 \u0147 \u2134 \u0221 \u2211 \u2737`, ) } else { debugN(type)(`{ 0x${bytesToHex((trie as any).hash(node.serialize())).slice(0, 12)}... } ----`) diff --git a/packages/trie/src/constructors.ts b/packages/trie/src/constructors.ts new file mode 100644 index 0000000000..04b727d77e --- /dev/null +++ b/packages/trie/src/constructors.ts @@ -0,0 +1,70 @@ +import { + KeyEncoding, + ValueEncoding, + bytesToUnprefixedHex, + unprefixedHexToBytes, +} from '@ethereumjs/util' +import { keccak256 } from 'ethereum-cryptography/keccak' +import { concatBytes } from 'ethereum-cryptography/utils' + +import { ROOT_DB_KEY, Trie, updateTrieFromMerkleProof } from './index.js' + +import type { Proof, TrieOpts } from './index.js' + +export async function createTrie(opts?: TrieOpts) { + const keccakFunction = + opts?.common?.customCrypto.keccak256 ?? opts?.useKeyHashingFunction ?? keccak256 + let key = ROOT_DB_KEY + + const encoding = + opts?.valueEncoding === ValueEncoding.Bytes ? ValueEncoding.Bytes : ValueEncoding.String + + if (opts?.useKeyHashing === true) { + key = keccakFunction.call(undefined, ROOT_DB_KEY) as Uint8Array + } + if (opts?.keyPrefix !== undefined) { + key = concatBytes(opts.keyPrefix, key) + } + + if (opts?.db !== undefined && opts?.useRootPersistence === true) { + if (opts?.root === undefined) { + const root = await opts?.db.get(bytesToUnprefixedHex(key), { + keyEncoding: KeyEncoding.String, + valueEncoding: encoding, + }) + if (typeof root === 'string') { + opts.root = unprefixedHexToBytes(root) + } else { + opts.root = root + } + } else { + await opts?.db.put( + bytesToUnprefixedHex(key), + (encoding === ValueEncoding.Bytes ? opts.root : bytesToUnprefixedHex(opts.root)), + { + keyEncoding: KeyEncoding.String, + valueEncoding: encoding, + }, + ) + } + } + + return new Trie(opts) +} + +/** + * Create a trie from a given (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof. A proof contains the encoded trie nodes + * from the root node to the leaf node storing state data. + * @param proof an EIP-1186 proof to create trie from + * @param shouldVerifyRoot If `true`, verifies that the root key of the proof matches the trie root. Throws if this is not the case. + * @param trieOpts trie opts to be applied to returned trie + * @returns new trie created from given proof + */ +export async function createTrieFromProof(proof: Proof, trieOpts?: TrieOpts) { + const shouldVerifyRoot = trieOpts?.root !== undefined + const trie = new Trie(trieOpts) + const root = await updateTrieFromMerkleProof(trie, proof, shouldVerifyRoot) + trie.root(root) + await trie.persistRoot() + return trie +} diff --git a/packages/trie/src/index.ts b/packages/trie/src/index.ts index 3bde6e4903..ed566d8a3c 100644 --- a/packages/trie/src/index.ts +++ b/packages/trie/src/index.ts @@ -1,3 +1,4 @@ +export * from './constructors.js' export * from './db/index.js' export * from './node/index.js' export * from './proof/index.js' diff --git a/packages/trie/src/node/node.ts b/packages/trie/src/node/node.ts index 76b6888811..c4952532ac 100644 --- a/packages/trie/src/node/node.ts +++ b/packages/trie/src/node/node.ts @@ -1,7 +1,7 @@ import { RLP } from '@ethereumjs/rlp' import { addHexPrefix, removeHexPrefix } from '../util/hex.js' -import { nibblestoBytes } from '../util/nibbles.js' +import { nibblesTypeToPackedBytes } from '../util/nibbles.js' import type { Nibbles } from '../types.js' @@ -45,7 +45,7 @@ export class Node { } raw(): [Uint8Array, Uint8Array] { - return [nibblestoBytes(this.encodedKey()), this._value] + return [nibblesTypeToPackedBytes(this.encodedKey()), this._value] } serialize(): Uint8Array { diff --git a/packages/trie/src/proof/index.ts b/packages/trie/src/proof/index.ts index 6f7de1df89..c3462c9318 100644 --- a/packages/trie/src/proof/index.ts +++ b/packages/trie/src/proof/index.ts @@ -1 +1,172 @@ +import { bytesToHex, concatBytes, equalsBytes } from '@ethereumjs/util' +import { keccak256 } from 'ethereum-cryptography/keccak' + +import { createTrieFromProof } from '../constructors.js' +import { Trie, verifyRangeProof } from '../index.js' +import { bytesToNibbles } from '../util/nibbles.js' + +import type { Proof, TrieOpts } from '../index.js' +import type { PutBatch } from '@ethereumjs/util' + +/** + * An (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof contains the encoded trie nodes + * from the root node to the leaf node storing state data. + * @param rootHash Root hash of the trie that this proof was created from and is being verified for + * @param key Key that is being verified and that the proof is created for + * @param proof An (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof contains the encoded trie nodes from the root node to the leaf node storing state data. + * @param opts optional, the opts may include a custom hashing function to use with the trie for proof verification + * @throws If proof is found to be invalid. + * @returns The value from the key, or null if valid proof of non-existence. + */ +export async function verifyTrieProof( + key: Uint8Array, + proof: Proof, + opts?: TrieOpts, +): Promise { + try { + const proofTrie = await createTrieFromProof(proof, opts) + const value = await proofTrie.get(key, true) + return value + } catch (err: any) { + throw new Error('Invalid proof provided') + } +} + +// /** +// * A range proof is a proof that includes the encoded trie nodes from the root node to leaf node for one or more branches of a trie, +// * allowing an entire range of leaf nodes to be validated. This is useful in applications such as snap sync where contiguous ranges +// * of state trie data is received and validated for constructing world state, locally. Also see {@link verifyRangeProof}. +// * @param rootHash - root hash of state trie this proof is being verified against. +// * @param firstKey - first key of range being proven. +// * @param lastKey - last key of range being proven. +// * @param keys - key list of leaf data being proven. +// * @param values - value list of leaf data being proven, one-to-one correspondence with keys. +// * @param proof - proof node list, if all-elements-proof where no proof is needed, proof should be null, and both `firstKey` and `lastKey` must be null as well +// * @param opts - optional, the opts may include a custom hashing function to use with the trie for proof verification +// * @returns a flag to indicate whether there exists more trie node in the trie +// */ +export function verifyTrieRangeProof( + rootHash: Uint8Array, + firstKey: Uint8Array | null, + lastKey: Uint8Array | null, + keys: Uint8Array[], + values: Uint8Array[], + proof: Uint8Array[] | null, + opts?: TrieOpts, +): Promise { + return verifyRangeProof( + rootHash, + firstKey && bytesToNibbles(firstKey), + lastKey && bytesToNibbles(lastKey), + keys.map((k) => k).map(bytesToNibbles), + values, + proof, + opts?.useKeyHashingFunction ?? keccak256, + ) +} + +/** + * Creates a proof from a trie and key that can be verified using {@link verifyTrieProof}. An (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof contains + * the encoded trie nodes from the root node to the leaf node storing state data. The returned proof will be in the format of an array that contains Uint8Arrays of + * serialized branch, extension, and/or leaf nodes. + * @param key key to create a proof for + */ +export async function createMerkleProof(trie: Trie, key: Uint8Array): Promise { + trie['DEBUG'] && trie['debug'](`Creating Proof for Key: ${bytesToHex(key)}`, ['CREATE_PROOF']) + const { stack } = await trie.findPath(trie['appliedKey'](key)) + const p = stack.map((stackElem) => { + return stackElem.serialize() + }) + trie['DEBUG'] && trie['debug'](`Proof created with (${stack.length}) nodes`, ['CREATE_PROOF']) + return p +} + +/** + * Updates a trie from a proof by putting all the nodes in the proof into the trie. Pass {@param shouldVerifyRoot} as true to check + * that root key of proof matches root of trie and throw if not. + * An (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof contains the encoded trie nodes from the root node to the leaf node storing state data. + * @param proof An (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof to update the trie from. + * @param shouldVerifyRoot - defaults to false. If `true`, verifies that the root key of the proof matches the trie root and throws if not (i.e invalid proof). + * @returns The root of the proof + */ +export async function updateTrieFromMerkleProof( + trie: Trie, + proof: Proof, + shouldVerifyRoot: boolean = false, +) { + trie['DEBUG'] && trie['debug'](`Saving (${proof.length}) proof nodes in DB`, ['FROM_PROOF']) + const opStack = proof.map((nodeValue) => { + let key = Uint8Array.from(trie['hash'](nodeValue)) + key = trie['_opts'].keyPrefix ? concatBytes(trie['_opts'].keyPrefix, key) : key + return { + type: 'put', + key, + value: nodeValue, + } as PutBatch + }) + + if (shouldVerifyRoot) { + if (opStack[0] !== undefined && opStack[0] !== null) { + if (!equalsBytes(trie.root(), opStack[0].key)) { + throw new Error('The provided proof does not have the expected trie root') + } + } + } + + await trie['_db'].batch(opStack) + if (opStack[0] !== undefined) { + return opStack[0].key + } +} + +/** + * Verifies a proof by putting all of its nodes into a trie and attempting to get the proven key. An (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof + * contains the encoded trie nodes from the root node to the leaf node storing state data. + * @param rootHash Root hash of the trie that this proof was created from and is being verified for + * @param key Key that is being verified and that the proof is created for + * @param proof an EIP-1186 proof to verify the key against + * @throws If proof is found to be invalid. + * @returns The value from the key, or null if valid proof of non-existence. + */ +export async function verifyMerkleProof( + trie: Trie, + rootHash: Uint8Array, + key: Uint8Array, + proof: Proof, +): Promise { + trie['DEBUG'] && + trie['debug']( + `Verifying Proof:\n|| Key: ${bytesToHex(key)}\n|| Root: ${bytesToHex( + rootHash, + )}\n|| Proof: (${proof.length}) nodes + `, + ['VERIFY_PROOF'], + ) + const proofTrie = new Trie({ + root: rootHash, + useKeyHashingFunction: trie['_opts'].useKeyHashingFunction, + common: trie['_opts'].common, + }) + try { + await updateTrieFromMerkleProof(proofTrie, proof, true) + } catch (e: any) { + throw new Error('Invalid proof nodes given') + } + try { + trie['DEBUG'] && + trie['debug'](`Verifying proof by retrieving key: ${bytesToHex(key)} from proof trie`, [ + 'VERIFY_PROOF', + ]) + const value = await proofTrie.get(trie['appliedKey'](key), true) + trie['DEBUG'] && trie['debug'](`PROOF VERIFIED`, ['VERIFY_PROOF']) + return value + } catch (err: any) { + if (err.message === 'Missing node in DB') { + throw new Error('Invalid proof provided') + } else { + throw err + } + } +} + export * from './range.js' diff --git a/packages/trie/src/proof/range.ts b/packages/trie/src/proof/range.ts index af7938269d..a3699989fc 100644 --- a/packages/trie/src/proof/range.ts +++ b/packages/trie/src/proof/range.ts @@ -1,8 +1,9 @@ import { equalsBytes } from '@ethereumjs/util' +import { createTrieFromProof } from '../index.js' import { BranchNode, ExtensionNode, LeafNode } from '../node/index.js' import { Trie } from '../trie.js' -import { nibblesCompare, nibblestoBytes } from '../util/nibbles.js' +import { nibblesCompare, nibblesTypeToPackedBytes } from '../util/nibbles.js' import type { HashKeysFunction, Nibbles, TrieNode } from '../types.js' @@ -26,7 +27,7 @@ async function unset( key: Nibbles, pos: number, removeLeft: boolean, - stack: TrieNode[] + stack: TrieNode[], ): Promise { if (child instanceof BranchNode) { /** @@ -276,7 +277,7 @@ async function unsetInternal(trie: Trie, left: Nibbles, right: Nibbles): Promise return false } else if (node instanceof BranchNode) { - // Unset all internal nodes in the forkpoint + // Unset all internal nodes in the forkPoint for (let i = left[pos] + 1; i < right[pos]; i++) { node.setBranch(i, null) } @@ -316,13 +317,13 @@ async function unsetInternal(trie: Trie, left: Nibbles, right: Nibbles): Promise * @throws If proof is found to be invalid. * @returns The value from the key, or null if valid proof of non-existence. */ -async function verifyProof( +async function verifyMerkleProof( rootHash: Uint8Array, key: Uint8Array, proof: Uint8Array[], - useKeyHashingFunction: HashKeysFunction + useKeyHashingFunction: HashKeysFunction, ): Promise<{ value: Uint8Array | null; trie: Trie }> { - const proofTrie = await Trie.fromProof(proof, { + const proofTrie = await createTrieFromProof(proof, { root: rootHash, useKeyHashingFunction, }) @@ -415,7 +416,7 @@ export async function verifyRangeProof( keys: Nibbles[], values: Uint8Array[], proof: Uint8Array[] | null, - useKeyHashingFunction: HashKeysFunction + useKeyHashingFunction: HashKeysFunction, ): Promise { if (keys.length !== values.length) { throw new Error('invalid keys length or values length') @@ -438,7 +439,7 @@ export async function verifyRangeProof( if (proof === null && firstKey === null && lastKey === null) { const trie = new Trie({ useKeyHashingFunction }) for (let i = 0; i < keys.length; i++) { - await trie.put(nibblestoBytes(keys[i]), values[i]) + await trie.put(nibblesTypeToPackedBytes(keys[i]), values[i]) } if (!equalsBytes(rootHash, trie.root())) { throw new Error('invalid all elements proof: root mismatch') @@ -449,11 +450,11 @@ export async function verifyRangeProof( if (proof !== null && firstKey !== null && lastKey === null) { // Zero element proof if (keys.length === 0) { - const { trie, value } = await verifyProof( + const { trie, value } = await verifyMerkleProof( rootHash, - nibblestoBytes(firstKey), + nibblesTypeToPackedBytes(firstKey), proof, - useKeyHashingFunction + useKeyHashingFunction, ) if (value !== null || (await hasRightElement(trie, firstKey))) { @@ -466,17 +467,17 @@ export async function verifyRangeProof( if (proof === null || firstKey === null || lastKey === null) { throw new Error( - 'invalid all elements proof: proof, firstKey, lastKey must be null at the same time' + 'invalid all elements proof: proof, firstKey, lastKey must be null at the same time', ) } // One element proof if (keys.length === 1 && nibblesCompare(firstKey, lastKey) === 0) { - const { trie, value } = await verifyProof( + const { trie, value } = await verifyMerkleProof( rootHash, - nibblestoBytes(firstKey), + nibblesTypeToPackedBytes(firstKey), proof, - useKeyHashingFunction + useKeyHashingFunction, ) if (nibblesCompare(firstKey, keys[0]) !== 0) { @@ -495,11 +496,11 @@ export async function verifyRangeProof( } if (firstKey.length !== lastKey.length) { throw new Error( - 'invalid two edge elements proof: the length of firstKey should be equal to the length of lastKey' + 'invalid two edge elements proof: the length of firstKey should be equal to the length of lastKey', ) } - const trie = await Trie.fromProof(proof, { + const trie = await createTrieFromProof(proof, { useKeyHashingFunction, root: rootHash, }) @@ -512,7 +513,7 @@ export async function verifyRangeProof( // Put all elements to the trie for (let i = 0; i < keys.length; i++) { - await trie.put(nibblestoBytes(keys[i]), values[i]) + await trie.put(nibblesTypeToPackedBytes(keys[i]), values[i]) } // Compare rootHash diff --git a/packages/trie/src/trie.ts b/packages/trie/src/trie.ts index 582c4f5e06..9914d46af1 100644 --- a/packages/trie/src/trie.ts +++ b/packages/trie/src/trie.ts @@ -3,17 +3,18 @@ import { RLP } from '@ethereumjs/rlp' import { + BIGINT_0, KeyEncoding, Lock, MapDB, RLP_EMPTY_STRING, ValueEncoding, + bytesToBigInt, bytesToHex, bytesToUnprefixedHex, bytesToUtf8, concatBytes, equalsBytes, - unprefixedHexToBytes, } from '@ethereumjs/util' import debug from 'debug' import { keccak256 } from 'ethereum-cryptography/keccak.js' @@ -27,11 +28,9 @@ import { decodeRawNode, isRawNode, } from './node/index.js' -import { verifyRangeProof } from './proof/range.js' import { ROOT_DB_KEY } from './types.js' import { _walkTrie } from './util/asyncWalk.js' -import { bytesToNibbles, matchingNibbleLength } from './util/nibbles.js' -import { TrieReadStream as ReadStream } from './util/readStream.js' +import { bytesToNibbles, matchingNibbleLength, nibblesTypeToPackedBytes } from './util/nibbles.js' import { WalkController } from './util/walkController.js' import type { @@ -39,14 +38,13 @@ import type { FoundNodeFunction, Nibbles, Path, - Proof, TrieNode, TrieOpts, TrieOptsWithDefaults, TrieShallowCopyOpts, } from './types.js' import type { OnFound } from './util/asyncWalk.js' -import type { BatchDBOp, DB, PutBatch } from '@ethereumjs/util' +import type { BatchDBOp, DB } from '@ethereumjs/util' import type { Debugger } from 'debug' /** @@ -81,7 +79,7 @@ export class Trie { * Creates a new trie. * @param opts Options for instantiating the trie * - * Note: in most cases, the static {@link Trie.create} constructor should be used. It uses the same API but provides sensible defaults + * Note: in most cases, {@link createTrie} constructor should be used. It uses the same API but provides sensible defaults */ constructor(opts?: TrieOpts) { let valueEncoding: ValueEncoding @@ -96,7 +94,7 @@ export class Trie { opts.common?.customCrypto.keccak256 ?? opts.useKeyHashingFunction ?? keccak256 valueEncoding = - opts.db !== undefined ? opts.valueEncoding ?? ValueEncoding.String : ValueEncoding.Bytes + opts.db !== undefined ? (opts.valueEncoding ?? ValueEncoding.String) : ValueEncoding.Bytes } else { // No opts are given, so create a MapDB later on // Use `Bytes` for ValueEncoding @@ -104,7 +102,7 @@ export class Trie { } this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false this.debug = this.DEBUG ? (message: string, namespaces: string[] = []) => { let log = this._debug @@ -134,293 +132,6 @@ export class Trie { || ----------------`) } - /** - * Create a trie from a given (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof. A proof contains the encoded trie nodes - * from the root node to the leaf node storing state data. - * @param proof an EIP-1186 proof to create trie from - * @param shouldVerifyRoot If `true`, verifies that the root key of the proof matches the trie root. Throws if this is not the case. - * @param trieOpts trie opts to be applied to returned trie - * @returns new trie created from given proof - */ - static async createFromProof( - proof: Proof, - trieOpts?: TrieOpts, - shouldVerifyRoot: boolean = false - ) { - const trie = new Trie(trieOpts) - const root = await trie.updateFromProof(proof, shouldVerifyRoot) - trie.root(root) - await trie.persistRoot() - return trie - } - - /** - * Static version of verifyProof function with the same behavior. An (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof contains the encoded trie nodes - * from the root node to the leaf node storing state data. - * @param rootHash Root hash of the trie that this proof was created from and is being verified for - * @param key Key that is being verified and that the proof is created for - * @param proof An (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof contains the encoded trie nodes from the root node to the leaf node storing state data. - * @param opts optional, the opts may include a custom hashing function to use with the trie for proof verification - * @throws If proof is found to be invalid. - * @returns The value from the key, or null if valid proof of non-existence. - */ - static async verifyProof( - key: Uint8Array, - proof: Proof, - opts?: TrieOpts - ): Promise { - try { - const proofTrie = await Trie.createFromProof(proof, opts) - const value = await proofTrie.get(key, true) - return value - } catch (err: any) { - throw new Error('Invalid proof provided') - } - } - - /** - * A range proof is a proof that includes the encoded trie nodes from the root node to leaf node for one or more branches of a trie, - * allowing an entire range of leaf nodes to be validated. This is useful in applications such as snap sync where contiguous ranges - * of state trie data is received and validated for constructing world state, locally. Also see {@link verifyRangeProof}. A static - * version of this function also exists. - * @param rootHash - root hash of state trie this proof is being verified against. - * @param firstKey - first key of range being proven. - * @param lastKey - last key of range being proven. - * @param keys - key list of leaf data being proven. - * @param values - value list of leaf data being proven, one-to-one correspondence with keys. - * @param proof - proof node list, if all-elements-proof where no proof is needed, proof should be null, and both `firstKey` and `lastKey` must be null as well - * @param opts - optional, the opts may include a custom hashing function to use with the trie for proof verification - * @returns a flag to indicate whether there exists more trie node in the trie - */ - static verifyRangeProof( - rootHash: Uint8Array, - firstKey: Uint8Array | null, - lastKey: Uint8Array | null, - keys: Uint8Array[], - values: Uint8Array[], - proof: Uint8Array[] | null, - opts?: TrieOpts - ): Promise { - return verifyRangeProof( - rootHash, - firstKey && bytesToNibbles(firstKey), - lastKey && bytesToNibbles(lastKey), - keys.map((k) => k).map(bytesToNibbles), - values, - proof, - opts?.useKeyHashingFunction ?? keccak256 - ) - } - - /** - * Static version of fromProof function. If a root is provided in the opts param, the proof will be checked to have the same expected root. An - * (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof contains the encoded trie nodes from the root node to the leaf node storing state data. - * @param proof An (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof contains the encoded trie nodes from the root node to the leaf node storing state data. - * @deprecated Use `createFromProof` - */ - static async fromProof(proof: Proof, opts?: TrieOpts): Promise { - const trie = await Trie.create(opts) - if (opts?.root && !equalsBytes(opts.root, trie.hash(proof[0]))) { - throw new Error('Invalid proof provided') - } - const root = await trie.updateFromProof(proof) - trie.root(root) - await trie.persistRoot() - return trie - } - - /** - * A range proof is a proof that includes the encoded trie nodes from the root node to leaf node for one or more branches of a trie, - * allowing an entire range of leaf nodes to be validated. This is useful in applications such as snap sync where contiguous ranges - * of state trie data is received and validated for constructing world state, locally. Also see {@link verifyRangeProof}. A static - * version of this function also exists. - * @param rootHash - root hash of state trie this proof is being verified against. - * @param firstKey - first key of range being proven. - * @param lastKey - last key of range being proven. - * @param keys - key list of leaf data being proven. - * @param values - value list of leaf data being proven, one-to-one correspondence with keys. - * @param proof - proof node list, if all-elements-proof where no proof is needed, proof should be null, and both `firstKey` and `lastKey` must be null as well - * @returns a flag to indicate whether there exists more trie node in the trie - */ - verifyRangeProof( - rootHash: Uint8Array, - firstKey: Uint8Array | null, - lastKey: Uint8Array | null, - keys: Uint8Array[], - values: Uint8Array[], - proof: Uint8Array[] | null - ): Promise { - return verifyRangeProof( - rootHash, - firstKey && bytesToNibbles(this.appliedKey(firstKey)), - lastKey && bytesToNibbles(this.appliedKey(lastKey)), - keys.map((k) => this.appliedKey(k)).map(bytesToNibbles), - values, - proof, - this._opts.useKeyHashingFunction - ) - } - - /** - * Creates a proof from a trie and key that can be verified using {@link Trie.verifyProof}. An (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof contains - * the encoded trie nodes from the root node to the leaf node storing state data. The returned proof will be in the format of an array that contains Uint8Arrays of - * serialized branch, extension, and/or leaf nodes. - * @param key key to create a proof for - */ - async createProof(key: Uint8Array): Promise { - this.DEBUG && this.debug(`Creating Proof for Key: ${bytesToHex(key)}`, ['CREATE_PROOF']) - const { stack } = await this.findPath(this.appliedKey(key)) - const p = stack.map((stackElem) => { - return stackElem.serialize() - }) - this.DEBUG && this.debug(`Proof created with (${stack.length}) nodes`, ['CREATE_PROOF']) - return p - } - - /** - * Updates a trie from a proof by putting all the nodes in the proof into the trie. If a trie is being updated with multiple proofs, {@param shouldVerifyRoot} can - * be passed as false in order to not immediately throw on an unexpected root, so that root verification can happen after all proofs and their nodes have been added. - * An (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof contains the encoded trie nodes from the root node to the leaf node storing state data. - * @param proof An (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof to update the trie from. - * @param shouldVerifyRoot If `true`, verifies that the root key of the proof matches the trie root. Throws if this is not the case. - * @returns The root of the proof - */ - async updateFromProof(proof: Proof, shouldVerifyRoot: boolean = false) { - this.DEBUG && this.debug(`Saving (${proof.length}) proof nodes in DB`, ['FROM_PROOF']) - const opStack = proof.map((nodeValue) => { - let key = Uint8Array.from(this.hash(nodeValue)) - key = this._opts.keyPrefix ? concatBytes(this._opts.keyPrefix, key) : key - return { - type: 'put', - key, - value: nodeValue, - } as PutBatch - }) - - if (shouldVerifyRoot) { - if (opStack[0] !== undefined && opStack[0] !== null) { - if (!equalsBytes(this.root(), opStack[0].key)) { - throw new Error('The provided proof does not have the expected trie root') - } - } - } - - await this._db.batch(opStack) - if (opStack[0] !== undefined) { - return opStack[0].key - } - } - - /** - * Verifies a proof by putting all of its nodes into a trie and attempting to get the proven key. An (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof - * contains the encoded trie nodes from the root node to the leaf node storing state data. A static version of this function exists with the same name. - * @param rootHash Root hash of the trie that this proof was created from and is being verified for - * @param key Key that is being verified and that the proof is created for - * @param proof an EIP-1186 proof to verify the key against - * @throws If proof is found to be invalid. - * @returns The value from the key, or null if valid proof of non-existence. - */ - async verifyProof( - rootHash: Uint8Array, - key: Uint8Array, - proof: Proof - ): Promise { - this.DEBUG && - this.debug( - `Verifying Proof:\n|| Key: ${bytesToHex(key)}\n|| Root: ${bytesToHex( - rootHash - )}\n|| Proof: (${proof.length}) nodes - `, - ['VERIFY_PROOF'] - ) - const proofTrie = new Trie({ - root: rootHash, - useKeyHashingFunction: this._opts.useKeyHashingFunction, - common: this._opts.common, - }) - try { - await proofTrie.updateFromProof(proof, true) - } catch (e: any) { - throw new Error('Invalid proof nodes given') - } - try { - this.DEBUG && - this.debug(`Verifying proof by retrieving key: ${bytesToHex(key)} from proof trie`, [ - 'VERIFY_PROOF', - ]) - const value = await proofTrie.get(this.appliedKey(key), true) - this.DEBUG && this.debug(`PROOF VERIFIED`, ['VERIFY_PROOF']) - return value - } catch (err: any) { - if (err.message === 'Missing node in DB') { - throw new Error('Invalid proof provided') - } else { - throw err - } - } - } - - /** - * Create a trie from a given (EIP-1186)[https://eips.ethereum.org/EIPS/eip-1186] proof. An EIP-1186 proof contains the encoded trie nodes from the root - * node to the leaf node storing state data. This function does not check if the proof has the same expected root. A static version of this function exists - * with the same name. - * @param proof an EIP-1186 proof to update the trie from - * @deprecated Use `updateFromProof` - */ - async fromProof(proof: Proof): Promise { - await this.updateFromProof(proof, false) - - if (equalsBytes(this.root(), this.EMPTY_TRIE_ROOT) && proof[0] !== undefined) { - let rootKey = Uint8Array.from(this.hash(proof[0])) - // TODO: what if we have keyPrefix and we set root? This should not work, right? (all trie nodes are non-reachable) - rootKey = this._opts.keyPrefix ? concatBytes(this._opts.keyPrefix, rootKey) : rootKey - this.root(rootKey) - await this.persistRoot() - } - return - } - - static async create(opts?: TrieOpts) { - const keccakFunction = - opts?.common?.customCrypto.keccak256 ?? opts?.useKeyHashingFunction ?? keccak256 - let key = ROOT_DB_KEY - - const encoding = - opts?.valueEncoding === ValueEncoding.Bytes ? ValueEncoding.Bytes : ValueEncoding.String - - if (opts?.useKeyHashing === true) { - key = keccakFunction.call(undefined, ROOT_DB_KEY) as Uint8Array - } - if (opts?.keyPrefix !== undefined) { - key = concatBytes(opts.keyPrefix, key) - } - - if (opts?.db !== undefined && opts?.useRootPersistence === true) { - if (opts?.root === undefined) { - const root = await opts?.db.get(bytesToUnprefixedHex(key), { - keyEncoding: KeyEncoding.String, - valueEncoding: encoding, - }) - if (typeof root === 'string') { - opts.root = unprefixedHexToBytes(root) - } else { - opts.root = root - } - } else { - await opts?.db.put( - bytesToUnprefixedHex(key), - (encoding === ValueEncoding.Bytes ? opts.root : bytesToUnprefixedHex(opts.root)), - { - keyEncoding: KeyEncoding.String, - valueEncoding: encoding, - } - ) - } - } - - return new Trie(opts) - } - database(db?: DB, valueEncoding?: ValueEncoding) { if (db !== undefined) { if (db instanceof CheckpointDB) { @@ -444,7 +155,7 @@ export class Trie { this.DEBUG && this.debug(`Setting root to ${bytesToHex(value)}`) if (value.length !== this._hashLen) { throw new Error( - `Invalid root length. Roots are ${this._hashLen} bytes, got ${value.length} bytes` + `Invalid root length. Roots are ${this._hashLen} bytes, got ${value.length} bytes`, ) } @@ -496,7 +207,7 @@ export class Trie { async put( key: Uint8Array, value: Uint8Array | null, - skipKeyTransform: boolean = false + skipKeyTransform: boolean = false, ): Promise { this.DEBUG && this.debug(`Key: ${bytesToHex(key)}`, ['PUT']) this.DEBUG && this.debug(`Value: ${value === null ? 'null' : bytesToHex(key)}`, ['PUT']) @@ -525,7 +236,7 @@ export class Trie { if (val === null || equalsBytes(val, value) === false) { // All items of the stack are going to change. // (This is the path from the root node to wherever it needs to insert nodes) - // The items change, because the leaf value is updated, thus all keyhashes in the + // The items change, because the leaf value is updated, thus all keyHashes in the // stack should be updated as well, so that it points to the right key/value pairs of the path const deleteHashes = stack.map((e) => this.hash(e.serialize())) ops = deleteHashes.map((deletedHash) => { @@ -546,7 +257,7 @@ export class Trie { // then update await this._updateNode(appliedKey, value, remaining, stack) if (this._opts.useNodePruning) { - // Only after updating the node we can delete the keyhashes + // Only after updating the node we can delete the keyHashes await this._db.batch(ops) } } @@ -570,7 +281,7 @@ export class Trie { // Only delete if the `key` currently has any value if (this._opts.useNodePruning && node !== null) { const deleteHashes = stack.map((e) => this.hash(e.serialize())) - // Just as with `put`, the stack items all will have their keyhashes updated + // Just as with `put`, the stack items all will have their keyHashes updated // So after deleting the node, one can safely delete these from the DB ops = deleteHashes.map((deletedHash) => { @@ -590,7 +301,7 @@ export class Trie { await this._deleteNode(appliedKey, stack) } if (this._opts.useNodePruning) { - // Only after deleting the node it is possible to delete the keyhashes + // Only after deleting the node it is possible to delete the keyHashes await this._db.batch(ops) } await this.persistRoot() @@ -610,7 +321,7 @@ export class Trie { stack: TrieNode[] } = { stack: [], - } + }, ): Promise { const targetKey = bytesToNibbles(key) const keyLen = targetKey.length @@ -641,9 +352,9 @@ export class Trie { branchNode === null ? 'NULL' : branchNode instanceof Uint8Array - ? `NodeHash: ${bytesToHex(branchNode)}` - : `Raw_Node: ${branchNode.toString()}`, - ['FIND_PATH', 'BranchNode', branchIndex.toString()] + ? `NodeHash: ${bytesToHex(branchNode)}` + : `Raw_Node: ${branchNode.toString()}`, + ['FIND_PATH', 'BranchNode', branchIndex.toString()], ) if (!branchNode) { result = { node: null, remaining: targetKey.slice(progress), stack } @@ -671,13 +382,13 @@ export class Trie { this.debug( `Comparing node key to expected\n|| Node_Key: [${node.key()}]\n|| Expected: [${targetKey.slice( progress, - progress + node.key().length + progress + node.key().length, )}]\n|| Matching: [${ targetKey.slice(progress, progress + node.key().length).toString() === node.key().toString() }] `, - ['FIND_PATH', 'ExtensionNode'] + ['FIND_PATH', 'ExtensionNode'], ) const _progress = progress for (const k of node.key()) { @@ -697,9 +408,9 @@ export class Trie { this.DEBUG && this.debug( `Walking trie from ${startingNode === undefined ? 'ROOT' : 'NODE'}: ${bytesToHex( - start as Uint8Array + start as Uint8Array, )}`, - ['FIND_PATH'] + ['FIND_PATH'], ) await this.walkTrie(start, onFound) } catch (error: any) { @@ -716,7 +427,7 @@ export class Trie { result.node !== null ? `Target Node FOUND for ${bytesToNibbles(key)}` : `Target Node NOT FOUND`, - ['FIND_PATH'] + ['FIND_PATH'], ) result.stack = result.stack.filter((e) => e !== undefined) @@ -727,7 +438,7 @@ export class Trie { || Remaining: [${result.remaining}]\n|| Stack: ${result.stack .map((e) => e.constructor.name) .join(', ')}`, - ['FIND_PATH'] + ['FIND_PATH'], ) return result } @@ -767,7 +478,7 @@ export class Trie { undefined, async (node) => { return node instanceof LeafNode || (node instanceof BranchNode && node.value() !== null) - } + }, )) { await onFound(node, currentKey) } @@ -802,7 +513,7 @@ export class Trie { const value = (await this._db.get(key)) ?? null if (value === null) { - // Dev note: this error message text is used for error checking in `checkRoot`, `verifyProof`, and `findPath` + // Dev note: this error message text is used for error checking in `checkRoot`, `verifyMerkleProof`, and `findPath` throw new Error('Missing node in DB') } @@ -823,7 +534,7 @@ export class Trie { k: Uint8Array, value: Uint8Array, keyRemainder: Nibbles, - stack: TrieNode[] + stack: TrieNode[], ): Promise { const toSave: BatchDBOp[] = [] const lastNode = stack.pop() @@ -928,7 +639,7 @@ export class Trie { branchKey: number, branchNode: TrieNode, parentNode: TrieNode, - stack: TrieNode[] + stack: TrieNode[], ) => { // branchNode is the node ON the branch node not THE branch node if (parentNode === null || parentNode === undefined || parentNode instanceof BranchNode) { @@ -1025,7 +736,7 @@ export class Trie { // However, this violates the trie spec; this should be converted in either an ExtensionNode // Or a LeafNode // Since this branch is deleted, one can thus also delete this branch from the DB - // So add this to the `opStack` and mark the keyhash to be deleted + // So add this to the `opStack` and mark the keyHash to be deleted if (this._opts.useNodePruning) { // If the branchNode has length < 32, it will be a RawNode (Uint8Array[]) instead of a Uint8Array // In that case, we need to serialize and hash it into a Uint8Array, otherwise the operation will throw @@ -1102,7 +813,7 @@ export class Trie { node: TrieNode, topLevel: boolean, opStack: BatchDBOp[], - remove: boolean = false + remove: boolean = false, ): Uint8Array | (EmbeddedNode | null)[] { const encoded = node.serialize() @@ -1137,9 +848,9 @@ export class Trie { * @example * const ops = [ * { type: 'del', key: Uint8Array.from('father') } - * , { type: 'put', key: Uint8Array.from('name'), value: Uint8Array.from('Yuri Irsenovich Kim') } + * , { type: 'put', key: Uint8Array.from('name'), value: Uint8Array.from('Yuri Irsenovich Kim') } // cspell:disable-line * , { type: 'put', key: Uint8Array.from('dob'), value: Uint8Array.from('16 February 1941') } - * , { type: 'put', key: Uint8Array.from('spouse'), value: Uint8Array.from('Kim Young-sook') } + * , { type: 'put', key: Uint8Array.from('spouse'), value: Uint8Array.from('Kim Young-sook') } // cspell:disable-line * , { type: 'put', key: Uint8Array.from('occupation'), value: Uint8Array.from('Clown') } * ] * await trie.batch(ops) @@ -1189,7 +900,7 @@ export class Trie { if ( item !== null && bytesToUnprefixedHex( - isRawNode(item) ? controller.trie.appliedKey(RLP.encode(item)) : item + isRawNode(item) ? controller.trie.appliedKey(RLP.encode(item)) : item, ) === dbkey ) { found = true @@ -1218,14 +929,6 @@ export class Trie { return true } - /** - * The `data` event is given an `Object` that has two properties; the `key` and the `value`. Both should be Uint8Arrays. - * @return Returns a [stream](https://nodejs.org/dist/latest-v12.x/docs/api/stream.html#stream_class_stream_readable) of the contents of the `trie` - */ - createReadStream(): ReadStream { - return new ReadStream(this) - } - /** * Returns a copy of the underlying trie. * @@ -1261,9 +964,9 @@ export class Trie { this.DEBUG && this.debug( `Persisting root: \n|| RootHash: ${bytesToHex(this.root())}\n|| RootKey: ${bytesToHex( - this.appliedKey(ROOT_DB_KEY) + this.appliedKey(ROOT_DB_KEY), )}`, - ['PERSIST_ROOT'] + ['PERSIST_ROOT'], ) let key = this.appliedKey(ROOT_DB_KEY) key = this._opts.keyPrefix ? concatBytes(this._opts.keyPrefix, key) : key @@ -1364,4 +1067,45 @@ export class Trie { this.debug(`Deleting ${this._db.checkpoints.length} checkpoints.`, ['FLUSH_CHECKPOINTS']) this._db.checkpoints = [] } + + /** + * Returns a list of values stored in the trie + * @param startKey first unhashed key in the range to be returned (defaults to 0) + * @param limit - the number of keys to be returned (undefined means all keys) + * @returns an object with two properties (a map of all key/value pairs in the trie - or in the specified range) and then a `nextKey` reference if a range is specified + */ + async getValueMap( + startKey = BIGINT_0, + limit?: number, + ): Promise<{ values: { [key: string]: string }; nextKey: null | string }> { + // If limit is undefined, all keys are inRange + let inRange = limit !== undefined ? false : true + let i = 0 + const values: { [key: string]: string } = {} + let nextKey: string | null = null + await this.walkAllValueNodes(async (node: TrieNode, currentKey: number[]) => { + if (node instanceof LeafNode) { + const keyBytes = nibblesTypeToPackedBytes(currentKey.concat(node.key())) + if (!inRange) { + // Check if the key is already in the correct range. + if (bytesToBigInt(keyBytes) >= startKey) { + inRange = true + } else { + return + } + } + + if (limit === undefined || i < limit) { + values[bytesToHex(keyBytes)] = bytesToHex(node._value) + i++ + } else if (i === limit) { + nextKey = bytesToHex(keyBytes) + } + } + }) + return { + values, + nextKey, + } + } } diff --git a/packages/trie/src/types.ts b/packages/trie/src/types.ts index 5928c2ee8a..f17ecc424c 100644 --- a/packages/trie/src/types.ts +++ b/packages/trie/src/types.ts @@ -30,7 +30,7 @@ export type FoundNodeFunction = ( nodeRef: Uint8Array, node: TrieNode | null, key: Nibbles, - walkController: WalkController + walkController: WalkController, ) => void export type HashKeysFunction = (msg: Uint8Array) => Uint8Array diff --git a/packages/trie/src/util/asyncWalk.ts b/packages/trie/src/util/asyncWalk.ts index 2074302193..28e57586eb 100644 --- a/packages/trie/src/util/asyncWalk.ts +++ b/packages/trie/src/util/asyncWalk.ts @@ -29,7 +29,7 @@ export async function* _walkTrie( currentKey: number[] = [], onFound: OnFound = async (_trieNode: TrieNode, _key: number[]) => {}, filter: NodeFilter = async (_trieNode: TrieNode, _key: number[]) => true, - visited: Set = new Set() + visited: Set = new Set(), ): AsyncIterable<{ node: TrieNode; currentKey: number[] }> { if (equalsBytes(nodeHash, this.EMPTY_TRIE_ROOT)) { return diff --git a/packages/trie/src/util/encoding.ts b/packages/trie/src/util/encoding.ts index 43b1ea10d6..8e6234b80d 100644 --- a/packages/trie/src/util/encoding.ts +++ b/packages/trie/src/util/encoding.ts @@ -1,4 +1,6 @@ -import { hexToBytes, toBytes, unprefixedHexToBytes } from '@ethereumjs/util' +import { concatBytes, hexToBytes, toBytes, unprefixedHexToBytes } from '@ethereumjs/util' + +import { nibblesTypeToPackedBytes } from './nibbles.js' import type { Nibbles } from '../types.js' @@ -32,10 +34,13 @@ export const hasTerminator = (nibbles: Uint8Array) => { return nibbles.length > 0 && nibbles[nibbles.length - 1] === 16 } -export const nibblesToBytes = (nibbles: Uint8Array, bytes: Uint8Array) => { +export const nibblesToBytes = (nibbles: Uint8Array) => { + const bytes = new Uint8Array(nibbles.length / 2) for (let bi = 0, ni = 0; ni < nibbles.length; bi += 1, ni += 2) { bytes[bi] = (nibbles[ni] << 4) | nibbles[ni + 1] } + + return bytes } export const hexToKeybytes = (hex: Uint8Array) => { @@ -45,10 +50,8 @@ export const hexToKeybytes = (hex: Uint8Array) => { if (hex.length % 2 === 1) { throw Error("Can't convert hex key of odd length") } - const key = new Uint8Array(hex.length / 2) - nibblesToBytes(hex, key) - return key + return nibblesToBytes(hex) } // hex to compact @@ -69,9 +72,9 @@ export const nibblesToCompactBytes = (nibbles: Uint8Array) => { buf[0] |= nibbles[0] nibbles = nibbles.subarray(1) } + // create bytes out of the rest even nibbles - nibblesToBytes(nibbles, buf.subarray(1)) - return buf + return concatBytes(buf.subarray(0, 1), nibblesToBytes(nibbles)) } export const bytesToNibbles = (str: Uint8Array) => { @@ -103,22 +106,6 @@ export const compactBytesToNibbles = (compact: Uint8Array) => { return base.subarray(chop) } -/** - * Packs every two nibbles into a single byte - * - * @param arr Nibble typed nibble array - * @returns Uint8Array typed byte array - */ -export const nibbleTypeToPackedBytes = (arr: Nibbles): Uint8Array => { - const buf = new Uint8Array(arr.length / 2) - for (let i = 0; i < buf.length; i++) { - let q = i * 2 - buf[i] = (arr[q] << 4) + arr[++q] - } - - return buf -} - /** * Converts each nibble into a single byte * @@ -142,12 +129,12 @@ export const nibbleTypeToByteType = (arr: Nibbles): Uint8Array => { * @returns Nibble typed nibble array */ export const byteTypeToNibbleType = (key: Uint8Array): Nibbles => { - const bkey = toBytes(key) + const bKey = toBytes(key) const nibbles = [] as Nibbles - for (let i = 0; i < bkey.length; i++) { + for (let i = 0; i < bKey.length; i++) { const q = i - nibbles[q] = bkey[i] % 16 + nibbles[q] = bKey[i] % 16 } return nibbles @@ -167,7 +154,7 @@ export const pathToHexKey = (path: string, extension: Nibbles, retType: string): if (retType === 'hex') { return nibbleTypeToByteType(n.concat(extension)) } else if (retType === 'keybyte') { - return nibbleTypeToPackedBytes(n.concat(extension)) + return nibblesTypeToPackedBytes(n.concat(extension)) } throw Error('retType must be either "keybyte" or "hex"') } @@ -177,13 +164,13 @@ export const mergeAndFormatKeyPaths = (pathStrings: string[]) => { let paths: string[] = [] let i = 0 while (i < pathStrings.length) { - const outterPathString = pathStrings[i]!.split('/') - const outterAccountPath = outterPathString[0] - const outterStoragePath = outterPathString[1] + const outerPathString = pathStrings[i]!.split('/') + const outerAccountPath = outerPathString[0] + const outerStoragePath = outerPathString[1] - paths.push(outterAccountPath) - if (outterStoragePath !== undefined) { - paths.push(outterStoragePath) + paths.push(outerAccountPath) + if (outerStoragePath !== undefined) { + paths.push(outerStoragePath) } let j = ++i @@ -192,7 +179,7 @@ export const mergeAndFormatKeyPaths = (pathStrings: string[]) => { const innerAccountPath = innerPathString[0] const innerStoragePath = innerPathString[1] - if (innerAccountPath === outterAccountPath) { + if (innerAccountPath === outerAccountPath) { paths.push(innerStoragePath) } else { ret.push(paths) @@ -218,6 +205,6 @@ export const mergeAndFormatKeyPaths = (pathStrings: string[]) => { // full path is keybyte encoded return hexToKeybytes(unprefixedHexToBytes(s)) } - }) + }), ) } diff --git a/packages/trie/src/util/genesisState.ts b/packages/trie/src/util/genesisState.ts index ed8adce049..d6171d8088 100644 --- a/packages/trie/src/util/genesisState.ts +++ b/packages/trie/src/util/genesisState.ts @@ -36,7 +36,7 @@ export async function genesisStateRoot(genesisState: GenesisState) { for (const [k, val] of storage) { const storageKey = isHexString(k) ? hexToBytes(k) : unprefixedHexToBytes(k) const storageVal = RLP.encode( - unpadBytes(isHexString(val) ? hexToBytes(val) : unprefixedHexToBytes(val)) + unpadBytes(isHexString(val) ? hexToBytes(val) : unprefixedHexToBytes(val)), ) await storageTrie.put(storageKey, storageVal) } diff --git a/packages/trie/src/util/index.ts b/packages/trie/src/util/index.ts index 1297a1dbca..14bcecbdab 100644 --- a/packages/trie/src/util/index.ts +++ b/packages/trie/src/util/index.ts @@ -1,5 +1,3 @@ export * from './encoding.js' export * from './genesisState.js' -export * from './readStream.js' -export * from './tasks.js' export * from './walkController.js' diff --git a/packages/trie/src/util/nibbles.ts b/packages/trie/src/util/nibbles.ts index 3c5d9c231f..15ec98bf7e 100644 --- a/packages/trie/src/util/nibbles.ts +++ b/packages/trie/src/util/nibbles.ts @@ -8,14 +8,14 @@ import type { Nibbles } from '../types.js' * @param key */ export function bytesToNibbles(key: Uint8Array): Nibbles { - const bkey = toBytes(key) + const bKey = toBytes(key) const nibbles = [] as Nibbles - for (let i = 0; i < bkey.length; i++) { + for (let i = 0; i < bKey.length; i++) { let q = i * 2 - nibbles[q] = bkey[i] >> 4 + nibbles[q] = bKey[i] >> 4 ++q - nibbles[q] = bkey[i] % 16 + nibbles[q] = bKey[i] % 16 } return nibbles @@ -26,7 +26,7 @@ export function bytesToNibbles(key: Uint8Array): Nibbles { * @private * @param arr - Nibble array */ -export function nibblestoBytes(arr: Nibbles): Uint8Array { +export function nibblesTypeToPackedBytes(arr: Nibbles): Uint8Array { const buf = new Uint8Array(arr.length / 2) for (let i = 0; i < buf.length; i++) { let q = i * 2 diff --git a/packages/trie/src/util/readStream.ts b/packages/trie/src/util/readStream.ts deleted file mode 100644 index dfc002d29c..0000000000 --- a/packages/trie/src/util/readStream.ts +++ /dev/null @@ -1,66 +0,0 @@ -// eslint-disable-next-line implicit-dependencies/no-implicit -import { Readable } from 'readable-stream' - -import { BranchNode, LeafNode } from '../node/index.js' - -import { nibblestoBytes } from './nibbles.js' - -import type { Trie } from '../trie.js' -import type { FoundNodeFunction } from '../types.js' - -const _findValueNodes = async (trie: Trie, onFound: FoundNodeFunction): Promise => { - const outerOnFound: FoundNodeFunction = async (nodeRef, node, key, walkController) => { - let fullKey = key - if (node instanceof LeafNode) { - fullKey = key.concat(node.key()) - // found leaf node! - onFound(nodeRef, node, fullKey, walkController) - } else if (node instanceof BranchNode && node.value()) { - // found branch with value - onFound(nodeRef, node, fullKey, walkController) - } else { - // keep looking for value nodes - if (node !== null) { - walkController.allChildren(node, key) - } - } - } - await trie.walkTrie(trie.root(), outerOnFound) -} - -export class TrieReadStream extends Readable { - private trie: Trie - private _started: boolean - - constructor(trie: Trie) { - super({ objectMode: true }) - - this.trie = trie - this._started = false - } - - async _read() { - if (this._started) { - return - } - this._started = true - try { - await _findValueNodes(this.trie, async (_, node, key, walkController) => { - if (node !== null) { - this.push({ - key: nibblestoBytes(key), - value: node.value(), - }) - walkController.allChildren(node, key) - } - }) - } catch (error: any) { - if (error.message === 'Missing node in DB') { - // pass - } else { - throw error - } - } - this.push(null) - } -} diff --git a/packages/trie/src/util/walkController.ts b/packages/trie/src/util/walkController.ts index 593bf45683..0339f3fafa 100644 --- a/packages/trie/src/util/walkController.ts +++ b/packages/trie/src/util/walkController.ts @@ -1,6 +1,6 @@ -import { BranchNode, ExtensionNode, LeafNode } from '../node/index.js' +import { PrioritizedTaskExecutor } from '@ethereumjs/util' -import { PrioritizedTaskExecutor } from './tasks.js' +import { BranchNode, ExtensionNode, LeafNode } from '../node/index.js' import type { Trie } from '../trie.js' import type { FoundNodeFunction, Nibbles, TrieNode } from '../types.js' @@ -40,7 +40,7 @@ export class WalkController { onNode: FoundNodeFunction, trie: Trie, root: Uint8Array, - poolSize?: number + poolSize?: number, ): Promise { const strategy = new WalkController(onNode, trie, poolSize ?? 500) await strategy.startWalk(root) @@ -106,7 +106,7 @@ export class WalkController { } taskFinishedCallback() // this marks the current task as finished. If there are any tasks left in the queue, this will immediately execute the first task. this.processNode(nodeRef as Uint8Array, childNode as TrieNode, key) - } + }, ) } diff --git a/packages/trie/test/encoding.spec.ts b/packages/trie/test/encoding.spec.ts index d02d80b368..dc4ecace68 100644 --- a/packages/trie/test/encoding.spec.ts +++ b/packages/trie/test/encoding.spec.ts @@ -51,7 +51,7 @@ describe('support for Uint8Array', () => { for (const value of db._database.values()) { assert.ok( typeof value === 'string', - 'if a database is provided, string values will be used internally' + 'if a database is provided, string values will be used internally', ) } }) diff --git a/packages/trie/test/index.spec.ts b/packages/trie/test/index.spec.ts index 1d901575e9..276c51cd0b 100644 --- a/packages/trie/test/index.spec.ts +++ b/packages/trie/test/index.spec.ts @@ -22,7 +22,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { describe('simple save and retrieve', () => { it('should not crash if given a non-existent root', async () => { const root = hexToBytes( - '0x3f4399b08efe68945c1cf90ffe85bbe3ce978959da753f9e649f034015b8817d' + '0x3f4399b08efe68945c1cf90ffe85bbe3ce978959da753f9e649f034015b8817d', ) const trie = new Trie({ root, keyPrefix }) @@ -67,7 +67,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trie.put(utf8ToBytes('doge'), utf8ToBytes('coin')) assert.equal( '0xde8a34a8c1d558682eae1528b47523a483dd8685d6db14b291451a66066bf0fc', - bytesToHex(trie.root()) + bytesToHex(trie.root()), ) }) @@ -114,7 +114,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trie.put(utf8ToBytes('do'), utf8ToBytes('verb')) assert.equal( '0xf803dfcb7e8f1afd45e88eedb4699a7138d6c07b71243d9ae9bff720c99925f9', - bytesToHex(trie.root()) + bytesToHex(trie.root()), ) }) @@ -122,7 +122,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trie.put(utf8ToBytes('done'), utf8ToBytes('finished')) assert.equal( '0x409cff4d820b394ed3fb1cd4497bdd19ffa68d30ae34157337a7043c94a3e8cb', - bytesToHex(trie.root()) + bytesToHex(trie.root()), ) }) }) @@ -142,7 +142,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trie.put(utf8ToBytes('done'), utf8ToBytes('finished')) assert.equal( '0x409cff4d820b394ed3fb1cd4497bdd19ffa68d30ae34157337a7043c94a3e8cb', - bytesToHex(trie.root()) + bytesToHex(trie.root()), ) }) }) @@ -158,11 +158,11 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trieSetup.trie.put(new Uint8Array([11, 11, 11]), utf8ToBytes('first')) await trieSetup.trie.put( new Uint8Array([12, 22, 22]), - utf8ToBytes('create the first branch') + utf8ToBytes('create the first branch'), ) await trieSetup.trie.put( new Uint8Array([12, 34, 44]), - utf8ToBytes('create the last branch') + utf8ToBytes('create the last branch'), ) await trieSetup.trie.del(new Uint8Array([12, 22, 22])) @@ -174,15 +174,15 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trieSetup.trie.put(new Uint8Array([11, 11, 11]), utf8ToBytes('first')) await trieSetup.trie.put( new Uint8Array([12, 22, 22]), - utf8ToBytes('create the first branch') + utf8ToBytes('create the first branch'), ) await trieSetup.trie.put( new Uint8Array([12, 33, 33]), - utf8ToBytes('create the middle branch') + utf8ToBytes('create the middle branch'), ) await trieSetup.trie.put( new Uint8Array([12, 34, 44]), - utf8ToBytes('create the last branch') + utf8ToBytes('create the last branch'), ) await trieSetup.trie.del(new Uint8Array([12, 22, 22])) @@ -194,15 +194,15 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trieSetup.trie.put(new Uint8Array([11, 11, 11]), utf8ToBytes('first')) await trieSetup.trie.put( new Uint8Array([12, 22, 22]), - utf8ToBytes('create the first branch') + utf8ToBytes('create the first branch'), ) await trieSetup.trie.put( new Uint8Array([12, 33, 33]), - utf8ToBytes('create the middle branch') + utf8ToBytes('create the middle branch'), ) await trieSetup.trie.put( new Uint8Array([12, 34, 44]), - utf8ToBytes('create the last branch') + utf8ToBytes('create the last branch'), ) // delete the middle branch @@ -215,15 +215,15 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { await trieSetup.trie.put(new Uint8Array([11, 11, 11]), utf8ToBytes('first')) await trieSetup.trie.put( new Uint8Array([12, 22, 22]), - utf8ToBytes('create the first branch') + utf8ToBytes('create the first branch'), ) await trieSetup.trie.put( new Uint8Array([12, 33, 33]), - utf8ToBytes('create the middle branch') + utf8ToBytes('create the middle branch'), ) await trieSetup.trie.put( new Uint8Array([12, 34, 44]), - utf8ToBytes('create the last branch') + utf8ToBytes('create the last branch'), ) // delete the middle branch await trieSetup.trie.del(new Uint8Array([11, 11, 11])) @@ -259,7 +259,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { assert.ok(path.node === null, 'findPath should not return a node now') assert.ok( path.stack.length === 1, - 'findPath should find the first extension node which is still in the DB' + 'findPath should find the first extension node which is still in the DB', ) }) }) @@ -310,7 +310,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { const v2 = utf8ToBytes('this-is-some-longer-value-to-test-the-delete-operation-value2') const rootAfterK1 = hexToBytes( - '0x809e75931f394603657e113eb7244794f35b8d326cff99407111d600722e9425' + '0x809e75931f394603657e113eb7244794f35b8d326cff99407111d600722e9425', ) const trieSetup = { @@ -325,7 +325,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { assert.equal( await trieSetup.trie.get(k1), null, - 'should return null on latest state root independently from deleteFromDB setting' + 'should return null on latest state root independently from deleteFromDB setting', ) trieSetup.trie.root(rootAfterK1) @@ -341,7 +341,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { return concatBytes( utf8ToBytes('hash_'), new Uint8Array(hashLen - msg.length).fill(0), - msg + msg, ) } else { return concatBytes(utf8ToBytes('hash_'), msg.slice(0, hashLen - 5)) @@ -364,7 +364,7 @@ for (const keyPrefix of [undefined, hexToBytes('0x1234')]) { assert.equal( bytesToHex(trie.root()), - '0xe118db4e01512253df38daafa16fc1d69e03e755595b5847d275d7404ebdc74a' + '0xe118db4e01512253df38daafa16fc1d69e03e755595b5847d275d7404ebdc74a', ) }) }) diff --git a/packages/trie/test/official.spec.ts b/packages/trie/test/official.spec.ts index d70f921067..a8581d4108 100644 --- a/packages/trie/test/official.spec.ts +++ b/packages/trie/test/official.spec.ts @@ -3,7 +3,7 @@ import { assert, describe, it } from 'vitest' import { Trie } from '../src/index.js' -import trieAnyOrderTests from './fixtures/trieanyorder.json' +import trieAnyOrderTests from './fixtures/trieanyorder.json' // cspell:disable-line import trieTests from './fixtures/trietest.json' describe('official tests', () => { diff --git a/packages/trie/test/proof.spec.ts b/packages/trie/test/proof.spec.ts index 3c30e2f992..99455e0569 100644 --- a/packages/trie/test/proof.spec.ts +++ b/packages/trie/test/proof.spec.ts @@ -2,58 +2,64 @@ import { RLP } from '@ethereumjs/rlp' import { bytesToUtf8, equalsBytes, setLengthLeft, utf8ToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { Trie } from '../src/index.js' +import { + Trie, + createMerkleProof, + createTrieFromProof, + updateTrieFromMerkleProof, + verifyTrieProof, +} from '../src/index.js' describe('simple merkle proofs generation and verification', () => { it('create a merkle proof and verify it', async () => { const trie = new Trie() await trie.put(utf8ToBytes('key1aa'), utf8ToBytes('0123456789012345678901234567890123456789xx')) - await trie.put(utf8ToBytes('key2bb'), utf8ToBytes('aval2')) - await trie.put(utf8ToBytes('key3cc'), utf8ToBytes('aval3')) + await trie.put(utf8ToBytes('key2bb'), utf8ToBytes('aVal2')) + await trie.put(utf8ToBytes('key3cc'), utf8ToBytes('aVal3')) - let proof = await trie.createProof(utf8ToBytes('key2bb')) - let val = await Trie.verifyProof(utf8ToBytes('key2bb'), proof) - assert.equal(bytesToUtf8(val!), 'aval2') + let proof = await createMerkleProof(trie, utf8ToBytes('key2bb')) + let val = await verifyTrieProof(utf8ToBytes('key2bb'), proof) + assert.equal(bytesToUtf8(val!), 'aVal2') - proof = await trie.createProof(utf8ToBytes('key1aa')) - val = await Trie.verifyProof(utf8ToBytes('key1aa'), proof) + proof = await createMerkleProof(trie, utf8ToBytes('key1aa')) + val = await verifyTrieProof(utf8ToBytes('key1aa'), proof) assert.equal(bytesToUtf8(val!), '0123456789012345678901234567890123456789xx') - proof = await trie.createProof(utf8ToBytes('key2bb')) - val = await Trie.verifyProof(utf8ToBytes('key2'), proof) + proof = await createMerkleProof(trie, utf8ToBytes('key2bb')) + val = await verifyTrieProof(utf8ToBytes('key2'), proof) // In this case, the proof _happens_ to contain enough nodes to prove `key2` because // traversing into `key22` would touch all the same nodes as traversing into `key2` assert.equal(val, null, 'Expected value at a random key to be null') - let myKey = utf8ToBytes('anyrandomkey') - proof = await trie.createProof(myKey) - val = await Trie.verifyProof(myKey, proof) + let myKey = utf8ToBytes('anyRandomKey') + proof = await createMerkleProof(trie, myKey) + val = await verifyTrieProof(myKey, proof) assert.equal(val, null, 'Expected value to be null') - myKey = utf8ToBytes('anothergarbagekey') // should generate a valid proof of null - proof = await trie.createProof(myKey) + myKey = utf8ToBytes('anotherGarbageKey') // should generate a valid proof of null + proof = await createMerkleProof(trie, myKey) proof.push(utf8ToBytes('123456')) // extra nodes are just ignored - val = await Trie.verifyProof(myKey, proof) + val = await verifyTrieProof(myKey, proof) assert.equal(val, null, 'Expected value to be null') - await trie.put(utf8ToBytes('another'), utf8ToBytes('3498h4riuhgwe')) + await trie.put(utf8ToBytes('another'), utf8ToBytes('3498h4riuhgwe')) // cspell:disable-line // to fail our proof we can request a proof for one key - proof = await trie.createProof(utf8ToBytes('another')) + proof = await createMerkleProof(trie, utf8ToBytes('another')) // and try to use that proof on another key try { - await Trie.verifyProof(utf8ToBytes('key1aa'), proof) + await verifyTrieProof(utf8ToBytes('key1aa'), proof) assert.fail('expected error: Invalid proof provided') } catch (e: any) { assert.equal(e.message, 'Invalid proof provided') } // we can also corrupt a valid proof - proof = await trie.createProof(utf8ToBytes('key2bb')) + proof = await createMerkleProof(trie, utf8ToBytes('key2bb')) proof[0].reverse() try { - await Trie.verifyProof(utf8ToBytes('key2bb'), proof) + await verifyTrieProof(utf8ToBytes('key2bb'), proof) assert.fail('expected error: Invalid proof provided') } catch (e: any) { assert.equal(e.message, 'Invalid proof provided') @@ -61,17 +67,17 @@ describe('simple merkle proofs generation and verification', () => { // test an invalid exclusion proof by creating // a valid exclusion proof then making it non-null - myKey = utf8ToBytes('anyrandomkey') - proof = await trie.createProof(myKey) - val = await Trie.verifyProof(myKey, proof) + myKey = utf8ToBytes('anyRandomKey') + proof = await createMerkleProof(trie, myKey) + val = await verifyTrieProof(myKey, proof) assert.equal(val, null, 'Expected value to be null') // now make the key non-null so the exclusion proof becomes invalid - await trie.put(myKey, utf8ToBytes('thisisavalue')) + await trie.put(myKey, utf8ToBytes('thisIsaValue')) try { - await Trie.fromProof(proof, { root: trie.root() }) - assert.fail(`expected error: Invalid proof provided`) + await createTrieFromProof(proof, { root: trie.root() }) + assert.fail(`expected error: 'The provided proof does not have the expected trie root'`) } catch (e: any) { - assert.equal(e.message, 'Invalid proof provided') + assert.equal(e.message, 'The provided proof does not have the expected trie root') } }) @@ -80,8 +86,8 @@ describe('simple merkle proofs generation and verification', () => { await trie.put(utf8ToBytes('key1aa'), utf8ToBytes('0123456789012345678901234567890123456789xx')) - const proof = await trie.createProof(utf8ToBytes('key1aa')) - const val = await Trie.verifyProof(utf8ToBytes('key1aa'), proof) + const proof = await createMerkleProof(trie, utf8ToBytes('key1aa')) + const val = await verifyTrieProof(utf8ToBytes('key1aa'), proof) assert.equal(bytesToUtf8(val!), '0123456789012345678901234567890123456789xx') }) @@ -90,8 +96,8 @@ describe('simple merkle proofs generation and verification', () => { await trie.put(utf8ToBytes('key1aa'), utf8ToBytes('01234')) - const proof = await trie.createProof(utf8ToBytes('key1aa')) - const val = await Trie.verifyProof(utf8ToBytes('key1aa'), proof) + const proof = await createMerkleProof(trie, utf8ToBytes('key1aa')) + const val = await verifyTrieProof(utf8ToBytes('key1aa'), proof) assert.equal(bytesToUtf8(val!), '01234') }) @@ -100,27 +106,27 @@ describe('simple merkle proofs generation and verification', () => { await trie.put( utf8ToBytes('key1aa'), - utf8ToBytes('0123456789012345678901234567890123456789xxx') + utf8ToBytes('0123456789012345678901234567890123456789xxx'), ) await trie.put( utf8ToBytes('key1'), - utf8ToBytes('0123456789012345678901234567890123456789Very_Long') + utf8ToBytes('0123456789012345678901234567890123456789Very_Long'), ) - await trie.put(utf8ToBytes('key2bb'), utf8ToBytes('aval3')) + await trie.put(utf8ToBytes('key2bb'), utf8ToBytes('aVal3')) await trie.put(utf8ToBytes('key2'), utf8ToBytes('short')) - await trie.put(utf8ToBytes('key3cc'), utf8ToBytes('aval3')) + await trie.put(utf8ToBytes('key3cc'), utf8ToBytes('aVal3')) await trie.put(utf8ToBytes('key3'), utf8ToBytes('1234567890123456789012345678901')) - let proof = await trie.createProof(utf8ToBytes('key1')) - let val = await Trie.verifyProof(utf8ToBytes('key1'), proof) + let proof = await createMerkleProof(trie, utf8ToBytes('key1')) + let val = await verifyTrieProof(utf8ToBytes('key1'), proof) assert.equal(bytesToUtf8(val!), '0123456789012345678901234567890123456789Very_Long') - proof = await trie.createProof(utf8ToBytes('key2')) - val = await Trie.verifyProof(utf8ToBytes('key2'), proof) + proof = await createMerkleProof(trie, utf8ToBytes('key2')) + val = await verifyTrieProof(utf8ToBytes('key2'), proof) assert.equal(bytesToUtf8(val!), 'short') - proof = await trie.createProof(utf8ToBytes('key3')) - val = await Trie.verifyProof(utf8ToBytes('key3'), proof) + proof = await createMerkleProof(trie, utf8ToBytes('key3')) + val = await verifyTrieProof(utf8ToBytes('key3'), proof) assert.equal(bytesToUtf8(val!), '1234567890123456789012345678901') }) @@ -131,16 +137,16 @@ describe('simple merkle proofs generation and verification', () => { await trie.put(utf8ToBytes('b'), utf8ToBytes('b')) await trie.put(utf8ToBytes('c'), utf8ToBytes('c')) - let proof = await trie.createProof(utf8ToBytes('a')) - let val = await Trie.verifyProof(utf8ToBytes('a'), proof) + let proof = await createMerkleProof(trie, utf8ToBytes('a')) + let val = await verifyTrieProof(utf8ToBytes('a'), proof) assert.equal(bytesToUtf8(val!), 'a') - proof = await trie.createProof(utf8ToBytes('b')) - val = await Trie.verifyProof(utf8ToBytes('b'), proof) + proof = await createMerkleProof(trie, utf8ToBytes('b')) + val = await verifyTrieProof(utf8ToBytes('b'), proof) assert.equal(bytesToUtf8(val!), 'b') - proof = await trie.createProof(utf8ToBytes('c')) - val = await Trie.verifyProof(utf8ToBytes('c'), proof) + proof = await createMerkleProof(trie, utf8ToBytes('c')) + val = await verifyTrieProof(utf8ToBytes('c'), proof) assert.equal(bytesToUtf8(val!), 'c') }) @@ -159,19 +165,19 @@ describe('simple merkle proofs generation and verification', () => { await trie.put(key, encodedValue) await trie.put(key2, encodedValue2) await trie.put(key3, encodedValue3) - const proof = await trie.createProof(key) + const proof = await createMerkleProof(trie, key) - const newTrie = await Trie.createFromProof(proof, { useKeyHashing: true }) + const newTrie = await createTrieFromProof(proof, { useKeyHashing: true }) const trieValue = await newTrie.get(key) - assert.ok(equalsBytes(trieValue!, encodedValue), 'trie value sucessfully copied') + assert.ok(equalsBytes(trieValue!, encodedValue), 'trie value successfully copied') assert.ok(equalsBytes(trie.root(), newTrie.root()), 'root set correctly') - const proof2 = await trie.createProof(key2) - await newTrie.updateFromProof(proof2) + const proof2 = await createMerkleProof(trie, key2) + await updateTrieFromMerkleProof(newTrie, proof2) const trieValue2 = await newTrie.get(key2) - assert.ok(equalsBytes(trieValue2!, encodedValue2), 'trie value succesfully updated') + assert.ok(equalsBytes(trieValue2!, encodedValue2), 'trie value successfully updated') assert.ok(equalsBytes(trie.root(), newTrie.root()), 'root set correctly') const trieValue3 = await newTrie.get(key3) @@ -182,16 +188,16 @@ describe('simple merkle proofs generation and verification', () => { const safeValue = RLP.encode(new Uint8Array([1337])) await safeTrie.put(safeKey, safeValue) - const safeProof = await safeTrie.createProof(safeKey) + const safeProof = await createMerkleProof(safeTrie, safeKey) try { - await newTrie.updateFromProof(safeProof, true) + await updateTrieFromMerkleProof(newTrie, safeProof, true) assert.fail('cannot reach this') } catch (e) { assert.ok(true, 'throws on unmatching proof') } - await newTrie.updateFromProof(safeProof) + await updateTrieFromMerkleProof(newTrie, safeProof) assert.ok(equalsBytes(trie.root(), newTrie.root()), 'root set correctly') const newSafeValue = await newTrie.get(safeKey) @@ -203,7 +209,7 @@ describe('simple merkle proofs generation and verification', () => { const updatedNewSafeValue = await newTrie.get(safeKey) assert.ok( equalsBytes(updatedNewSafeValue!, safeValue), - 'succesfully set the trie to the new root and got the correct value' + 'successfully set the trie to the new root and got the correct value', ) }) }) diff --git a/packages/trie/test/proof/range.spec.ts b/packages/trie/test/proof/range.spec.ts index 8bde035b5f..ba6fde5d9c 100644 --- a/packages/trie/test/proof/range.spec.ts +++ b/packages/trie/test/proof/range.spec.ts @@ -9,7 +9,7 @@ import { } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { Trie } from '../../src/index.js' +import { Trie, createMerkleProof, verifyTrieRangeProof } from '../../src/index.js' import type { DB } from '@ethereumjs/util' @@ -83,18 +83,18 @@ async function verify( startKey?: Uint8Array, endKey?: Uint8Array, keys?: Uint8Array[], - vals?: Uint8Array[] + values?: Uint8Array[], ) { startKey = startKey ?? entries[start][0] endKey = endKey ?? entries[end][0] const targetRange = entries.slice(start, end + 1) - return trie.verifyRangeProof( + return verifyTrieRangeProof( trie.root(), startKey, endKey, keys ?? targetRange.map(([key]) => key), - vals ?? targetRange.map(([, val]) => val), - [...(await trie.createProof(startKey)), ...(await trie.createProof(endKey))] + values ?? targetRange.map(([, val]) => val), + [...(await createMerkleProof(trie, startKey)), ...(await createMerkleProof(trie, endKey))], ) } @@ -136,7 +136,7 @@ describe('simple merkle range proofs generation and verification', () => { assert.equal( await verify(trie, entries, start, end, startKey, endKey), - end !== entries.length - 1 + end !== entries.length - 1, ) } @@ -195,7 +195,7 @@ describe('simple merkle range proofs generation and verification', () => { // One element with two non-existent edge proofs assert.equal( await verify(trie, entries, start, start, decreasedStartKey, increasedEndKey), - true + true, ) // Test the mini trie with only a single element. @@ -211,15 +211,15 @@ describe('simple merkle range proofs generation and verification', () => { const { trie, entries } = await randomTrie(new MapDB()) assert.equal( - await trie.verifyRangeProof( + await verifyTrieRangeProof( trie.root(), null, null, entries.map(([key]) => key), entries.map(([, val]) => val), - null + null, ), - false + false, ) // With edge proofs, it should still work. @@ -233,9 +233,9 @@ describe('simple merkle range proofs generation and verification', () => { 0, entries.length - 1, hexToBytes(`0x${'00'.repeat(32)}`), - hexToBytes(`0x${'ff'.repeat(32)}`) + hexToBytes(`0x${'ff'.repeat(32)}`), ), - false + false, ) }) @@ -261,7 +261,7 @@ describe('simple merkle range proofs generation and verification', () => { it('create a bad range proof and verify it', async () => { const runTest = async ( - cb: (trie: Trie, entries: [Uint8Array, Uint8Array][]) => Promise + cb: (trie: Trie, entries: [Uint8Array, Uint8Array][]) => Promise, ) => { const { trie, entries } = await randomTrie(new MapDB(), false) @@ -349,7 +349,7 @@ describe('simple merkle range proofs generation and verification', () => { undefined, undefined, targetRange.map(([key]) => key), - targetRange.map(([, val]) => val) + targetRange.map(([, val]) => val), ) result = true } catch (err) { @@ -474,7 +474,7 @@ describe('simple merkle range proofs generation and verification', () => { let bloatedProof: Uint8Array[] = [] for (let i = 0; i < TRIE_SIZE; i++) { - bloatedProof = bloatedProof.concat(await trie.createProof(entries[i][0])) + bloatedProof = bloatedProof.concat(await createMerkleProof(trie, entries[i][0])) } assert.equal(await verify(trie, entries, 0, entries.length - 1), false) diff --git a/packages/trie/test/stream.spec.ts b/packages/trie/test/stream.spec.ts deleted file mode 100644 index 5c4d05ff43..0000000000 --- a/packages/trie/test/stream.spec.ts +++ /dev/null @@ -1,164 +0,0 @@ -import { utf8ToBytes } from '@ethereumjs/util' -import { assert, describe, it } from 'vitest' - -import { Trie } from '../src/index.js' - -import type { BatchDBOp } from '@ethereumjs/util' - -describe('kv stream test', () => { - const trie = new Trie() - const ops = [ - { - type: 'del', - key: utf8ToBytes('father'), - }, - { - type: 'put', - key: utf8ToBytes('name'), - value: utf8ToBytes('Yuri Irsenovich Kim'), - }, - { - type: 'put', - key: utf8ToBytes('dob'), - value: utf8ToBytes('16 February 1941'), - }, - { - type: 'put', - key: utf8ToBytes('spouse'), - value: utf8ToBytes('Kim Young-sook'), - }, - { - type: 'put', - key: utf8ToBytes('occupation'), - value: utf8ToBytes('Clown'), - }, - { - type: 'put', - key: utf8ToBytes('nameads'), - value: utf8ToBytes('Yuri Irsenovich Kim'), - }, - { - type: 'put', - key: utf8ToBytes('namfde'), - value: utf8ToBytes('Yuri Irsenovich Kim'), - }, - { - type: 'put', - key: utf8ToBytes('namsse'), - value: utf8ToBytes('Yuri Irsenovich Kim'), - }, - { - type: 'put', - key: utf8ToBytes('dofab'), - value: utf8ToBytes('16 February 1941'), - }, - { - type: 'put', - key: utf8ToBytes('spoudse'), - value: utf8ToBytes('Kim Young-sook'), - }, - { - type: 'put', - key: utf8ToBytes('occupdsation'), - value: utf8ToBytes('Clown'), - }, - { - type: 'put', - key: utf8ToBytes('dozzzb'), - value: utf8ToBytes('16 February 1941'), - }, - { - type: 'put', - key: utf8ToBytes('spouszze'), - value: utf8ToBytes('Kim Young-sook'), - }, - { - type: 'put', - key: utf8ToBytes('occupatdfion'), - value: utf8ToBytes('Clown'), - }, - { - type: 'put', - key: utf8ToBytes('dssob'), - value: utf8ToBytes('16 February 1941'), - }, - { - type: 'put', - key: utf8ToBytes('spossuse'), - value: utf8ToBytes('Kim Young-sook'), - }, - { - type: 'put', - key: utf8ToBytes('occupssation'), - value: utf8ToBytes('Clown'), - }, - ] as BatchDBOp[] - - const valObj1 = {} as any - const valObj2 = {} as any - for (const op of ops) { - if (op.type === 'put') { - valObj1[op.key.toString()] = op.value.toString() - valObj2[op.key.toString()] = op.value.toString() - } - } - - it('should populate trie', async () => { - await trie.batch(ops) - }) - - it('should fetch all of the nodes', () => { - const stream = trie.createReadStream() - stream.on('data', (d: any) => { - const key = d.key.toString() - const value = d.value.toString() - assert.equal(valObj1[key], value) - delete valObj1[key] - }) - stream.on('end', () => { - const keys = Object.keys(valObj1) - assert.equal(keys.length, 0) - }) - }) -}) - -describe('db stream test', () => { - const trie = new Trie() - const ops = [ - { - type: 'put', - key: utf8ToBytes('color'), - value: utf8ToBytes('purple'), - }, - { - type: 'put', - key: utf8ToBytes('food'), - value: utf8ToBytes('sushi'), - }, - { - type: 'put', - key: utf8ToBytes('fight'), - value: utf8ToBytes('fire'), - }, - { - type: 'put', - key: utf8ToBytes('colo'), - value: utf8ToBytes('trolo'), - }, - { - type: 'put', - key: utf8ToBytes('color'), - value: utf8ToBytes('blue'), - }, - { - type: 'put', - key: utf8ToBytes('color'), - value: utf8ToBytes('pink'), - }, - ] as BatchDBOp[] - - it('should populate trie', async () => { - trie.checkpoint() - await trie.batch(ops) - }) -}) diff --git a/packages/trie/test/trie/checkpoint.spec.ts b/packages/trie/test/trie/checkpoint.spec.ts index 3e0d320b7e..85ee293b10 100644 --- a/packages/trie/test/trie/checkpoint.spec.ts +++ b/packages/trie/test/trie/checkpoint.spec.ts @@ -10,7 +10,7 @@ import { keccak256 } from 'ethereum-cryptography/keccak.js' import { sha256 } from 'ethereum-cryptography/sha256.js' import { assert, describe, it } from 'vitest' -import { ROOT_DB_KEY, Trie } from '../../src/index.js' +import { ROOT_DB_KEY, Trie, createTrie } from '../../src/index.js' import type { BatchDBOp } from '@ethereumjs/util' @@ -208,8 +208,8 @@ describe('testing checkpoints', () => { const KEY = utf8ToBytes('last_block_height') const KEY_ROOT = keccak256(ROOT_DB_KEY) - // Initialise State - const CommittedState = await Trie.create({ + // Initialize State + const CommittedState = await createTrie({ useKeyHashing: true, useNodePruning: true, useRootPersistence: true, @@ -234,11 +234,11 @@ describe('testing checkpoints', () => { assert.equal(bytesToUtf8((await CommittedState.get(KEY))!), '1') assert.equal( bytesToHex((await CommittedState['_db'].get(KEY_ROOT))!), - '0x77ddd505d2a5b76a2a6ee34b827a0d35ca19f8d358bee3d74a84eab59794487c' + '0x77ddd505d2a5b76a2a6ee34b827a0d35ca19f8d358bee3d74a84eab59794487c', ) assert.equal( bytesToHex(CommittedState.root()), - '0x77ddd505d2a5b76a2a6ee34b827a0d35ca19f8d358bee3d74a84eab59794487c' + '0x77ddd505d2a5b76a2a6ee34b827a0d35ca19f8d358bee3d74a84eab59794487c', ) // From MemoryState, now take the final checkpoint @@ -264,7 +264,7 @@ describe('testing checkpoints', () => { [ hexToBytes('0xd7eba6ee0f011acb031b79554d57001c42fbfabb150eb9fdd3b6d434f7b791eb'), hexToBytes('0xe3a1202418cf7414b1e6c2c8d92b4673eecdb4aac88f7f58623e3be903aefb2fd4655c32'), - ] + ], ) // Verify that the key is updated assert.equal(bytesToUtf8((await CommittedState.get(KEY))!), '2') diff --git a/packages/trie/test/trie/prune.spec.ts b/packages/trie/test/trie/prune.spec.ts index bee6d082bb..dc332a5420 100644 --- a/packages/trie/test/trie/prune.spec.ts +++ b/packages/trie/test/trie/prune.spec.ts @@ -1,7 +1,7 @@ import { KECCAK256_RLP, equalsBytes, hexToBytes, randomBytes, utf8ToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { Trie, isRawNode } from '../../src/index.js' +import { Trie, createTrie, isRawNode } from '../../src/index.js' import type { BranchNode } from '../../src/index.js' @@ -207,7 +207,7 @@ describe('Pruned trie tests', () => { it('should prune when keys are updated or deleted (with `useRootPersistence` enabled)', async () => { for (let testID = 0; testID < 1; testID++) { - const trie = await Trie.create({ useNodePruning: true, useRootPersistence: true }) + const trie = await createTrie({ useNodePruning: true, useRootPersistence: true }) const keys: Uint8Array[] = [] for (let i = 0; i < 100; i++) { keys.push(randomBytes(32)) diff --git a/packages/trie/test/trie/secure.spec.ts b/packages/trie/test/trie/secure.spec.ts index 7700b596ac..228f4d7b87 100644 --- a/packages/trie/test/trie/secure.spec.ts +++ b/packages/trie/test/trie/secure.spec.ts @@ -10,7 +10,7 @@ import { keccak256 } from 'ethereum-cryptography/keccak.js' import { sha256 } from 'ethereum-cryptography/sha256.js' import { assert, describe, it } from 'vitest' -import { ROOT_DB_KEY, Trie } from '../../src/index.js' +import { ROOT_DB_KEY, Trie, createMerkleProof, verifyTrieProof } from '../../src/index.js' import secureTrieTests from '../fixtures/trietest_secureTrie.json' describe('SecureTrie', () => { @@ -52,8 +52,8 @@ describe('SecureTrie proof', () => { const trie = new Trie({ useKeyHashing: true, db: new MapDB() }) await trie.put(utf8ToBytes('key1aa'), utf8ToBytes('01234')) - const proof = await trie.createProof(utf8ToBytes('key1aa')) - const val = await Trie.verifyProof(utf8ToBytes('key1aa'), proof, { + const proof = await createMerkleProof(trie, utf8ToBytes('key1aa')) + const val = await verifyTrieProof(utf8ToBytes('key1aa'), proof, { useKeyHashing: true, }) assert.deepEqual(val, utf8ToBytes('01234')) @@ -61,7 +61,7 @@ describe('SecureTrie proof', () => { it('read back data written with hashed key', async () => { const trie = new Trie({ useKeyHashing: true, db: new MapDB() }) - // skip key transformation if the key is already hashed like data recieved in snapsync + // skip key transformation if the key is already hashed like data received in snapsync await trie.put(keccak256(utf8ToBytes('key1aa')), utf8ToBytes('01234'), true) const val = await trie.get(utf8ToBytes('key1aa')) @@ -128,35 +128,35 @@ describe('secure tests', () => { const trie = new Trie({ useKeyHashing: true, db: new MapDB() }) const a = hexToBytes( - '0xf8448080a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0a155280bc3c09fd31b0adebbdd4ef3d5128172c0d2008be964dc9e10e0f0fedf' + '0xf8448080a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0a155280bc3c09fd31b0adebbdd4ef3d5128172c0d2008be964dc9e10e0f0fedf', ) const ak = hexToBytes('0x095e7baea6a6c7c4c2dfeb977efac326af552d87') const b = hexToBytes( - '0xf844802ea056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0db94dc4aab9b6a1a11956906ea34f3252f394576aece12199b23b269bb2738ab' + '0xf844802ea056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0db94dc4aab9b6a1a11956906ea34f3252f394576aece12199b23b269bb2738ab', ) const bk = hexToBytes('0x945304eb96065b2a98b57a48a06ae28d285a71b5') const c = hexToBytes( - '0xf84c80880de0b6b3a7640000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' + '0xf84c80880de0b6b3a7640000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', ) const ck = hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b') // checkpoint // checkpoint // commit const d = hexToBytes( - '0xf8488084535500b1a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0a155280bc3c09fd31b0adebbdd4ef3d5128172c0d2008be964dc9e10e0f0fedf' + '0xf8488084535500b1a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0a155280bc3c09fd31b0adebbdd4ef3d5128172c0d2008be964dc9e10e0f0fedf', ) const dk = hexToBytes('0x095e7baea6a6c7c4c2dfeb977efac326af552d87') const e = hexToBytes( - '0xf8478083010851a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0db94dc4aab9b6a1a11956906ea34f3252f394576aece12199b23b269bb2738ab' + '0xf8478083010851a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0db94dc4aab9b6a1a11956906ea34f3252f394576aece12199b23b269bb2738ab', ) const ek = hexToBytes('0x945304eb96065b2a98b57a48a06ae28d285a71b5') const f = hexToBytes( - '0xf84c01880de0b6b3540df72ca056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' + '0xf84c01880de0b6b3540df72ca056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', ) const fk = hexToBytes('0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b') // commit const g = hexToBytes( - '0xf8488084535500b1a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0a155280bc3c09fd31b0adebbdd4ef3d5128172c0d2008be964dc9e10e0f0fedf' + '0xf8488084535500b1a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0a155280bc3c09fd31b0adebbdd4ef3d5128172c0d2008be964dc9e10e0f0fedf', ) const gk = hexToBytes('0x095e7baea6a6c7c4c2dfeb977efac326af552d87') diff --git a/packages/trie/test/trie/trie.spec.ts b/packages/trie/test/trie/trie.spec.ts index 879fd0fb65..b74d13d717 100644 --- a/packages/trie/test/trie/trie.spec.ts +++ b/packages/trie/test/trie/trie.spec.ts @@ -1,6 +1,7 @@ import { KECCAK256_RLP, MapDB, + bigIntToBytes, bytesToHex, concatBytes, equalsBytes, @@ -11,7 +12,7 @@ import { import { keccak256 } from 'ethereum-cryptography/keccak.js' import { assert, describe, it } from 'vitest' -import { ROOT_DB_KEY as BASE_DB_KEY, Trie } from '../../src/index.js' +import { ROOT_DB_KEY as BASE_DB_KEY, Trie, createTrie } from '../../src/index.js' for (const { constructor, defaults, title } of [ { @@ -43,48 +44,48 @@ for (const { constructor, defaults, title } of [ } describe(`${title} (Persistence)`, () => { - it('creates an instance via the static constructor `create` function and defaults to `false` with a database', async () => { + it('creates an instance via createTrie and defaults to `false` with a database', async () => { // TODO: check this test assert.isUndefined( - ((await constructor.create({ ...defaults, db: new MapDB() })) as any)._useRootPersistence + ((await createTrie({ ...defaults, db: new MapDB() })) as any)._useRootPersistence, ) }) - it('creates an instance via the static constructor `create` function and respects the `useRootPersistence` option with a database', async () => { + it('creates an instance via createTrie and respects the `useRootPersistence` option with a database', async () => { // TODO: check this test assert.isUndefined( ( - (await constructor.create({ + (await createTrie({ ...defaults, db: new MapDB(), useRootPersistence: false, })) as any - )._useRootPersistence + )._useRootPersistence, ) }) - it('creates an instance via the static constructor `create` function and respects the `useRootPersistence` option with a database', async () => { + it('creates an instance via createTrie and respects the `useRootPersistence` option with a database', async () => { // TODO: check this test assert.isUndefined( ( - (await constructor.create({ + (await createTrie({ ...defaults, db: new MapDB(), useRootPersistence: false, })) as any - )._useRootPersistence + )._useRootPersistence, ) }) - it('creates an instance via the static constructor `create` function and defaults to `false` without a database', async () => { + it('creates an instance via createTrie and defaults to `false` without a database', async () => { // TODO: check this test assert.isUndefined( - ((await constructor.create({ ...defaults, db: new MapDB() })) as any)._useRootPersistence + ((await createTrie({ ...defaults, db: new MapDB() })) as any)._useRootPersistence, ) }) it('persist the root if the `useRootPersistence` option is `true`', async () => { - const trie = await constructor.create({ + const trie = await createTrie({ ...defaults, db: new MapDB(), useRootPersistence: true, @@ -98,7 +99,7 @@ for (const { constructor, defaults, title } of [ }) it('persist the root if the `root` option is given', async () => { - const trie = await constructor.create({ + const trie = await createTrie({ ...defaults, db: new MapDB(), root: KECCAK256_RLP, @@ -113,7 +114,7 @@ for (const { constructor, defaults, title } of [ }) it('does not persist the root if the `useRootPersistence` option is `false`', async () => { - const trie = await constructor.create({ + const trie = await createTrie({ ...defaults, db: new MapDB(), useRootPersistence: false, @@ -127,7 +128,7 @@ for (const { constructor, defaults, title } of [ }) it('persists the root if the `db` option is not provided', async () => { - const trie = await constructor.create({ ...defaults, useRootPersistence: true }) + const trie = await createTrie({ ...defaults, useRootPersistence: true }) assert.equal(await trie['_db'].get(ROOT_DB_KEY), undefined) @@ -139,17 +140,17 @@ for (const { constructor, defaults, title } of [ it('persist and restore the root', async () => { const db = new MapDB() - const trie = await constructor.create({ ...defaults, db, useRootPersistence: true }) + const trie = await createTrie({ ...defaults, db, useRootPersistence: true }) assert.equal(await trie['_db'].get(ROOT_DB_KEY), undefined) await trie.put(utf8ToBytes('foo'), utf8ToBytes('bar')) assert.equal(bytesToHex((await trie['_db'].get(ROOT_DB_KEY))!), EXPECTED_ROOTS) // Using the same database as `trie` so we should have restored the root - const copy = await constructor.create({ ...defaults, db, useRootPersistence: true }) + const copy = await createTrie({ ...defaults, db, useRootPersistence: true }) assert.equal(bytesToHex((await copy['_db'].get(ROOT_DB_KEY))!), EXPECTED_ROOTS) // New trie with a new database so we shouldn't find a root to restore - const empty = await constructor.create({ + const empty = await createTrie({ ...defaults, db: new MapDB(), useRootPersistence: true, @@ -175,7 +176,7 @@ for (const { constructor, defaults, title } of [ const value = randomBytes(10) return { key, value } }) - const trie = await constructor.create({ + const trie = await createTrie({ ...defaults, db: new MapDB(), }) @@ -188,7 +189,7 @@ for (const { constructor, defaults, title } of [ for (const root of roots) { assert.isTrue( await trie.checkRoot(unprefixedHexToBytes(root)), - 'Should return true for all nodes in trie' + 'Should return true for all nodes in trie', ) } }) @@ -199,28 +200,28 @@ for (const { constructor, defaults, title } of [ } }) it('should return false for all keys if trie is empty', async () => { - const emptyTrie = await constructor.create({ + const emptyTrie = await createTrie({ ...defaults, db: new MapDB(), }) assert.deepEqual(emptyTrie.EMPTY_TRIE_ROOT, emptyTrie.root(), 'Should return empty trie root') assert.isTrue( await emptyTrie.checkRoot(emptyTrie.EMPTY_TRIE_ROOT), - 'Should return true for empty root' + 'Should return true for empty root', ) assert.isFalse( await emptyTrie.checkRoot(emptyTrie['appliedKey'](ROOT_DB_KEY)), - 'Should return false for persistence key' + 'Should return false for persistence key', ) for (const root of roots) { assert.isFalse( await emptyTrie.checkRoot(unprefixedHexToBytes(root)), - 'Should always return false' + 'Should always return false', ) } }) it('Should throw on unrelated errors', async () => { - const emptyTrie = await constructor.create({ + const emptyTrie = await createTrie({ ...defaults, db: new MapDB(), useRootPersistence: true, @@ -234,7 +235,7 @@ for (const { constructor, defaults, title } of [ assert.notEqual( 'Missing node in DB', e.message, - 'Should throw when error is unrelated to checkroot' + 'Should throw when error is unrelated to checkroot', ) } }) @@ -252,13 +253,13 @@ describe('keyHashingFunction', async () => { }, } - const trieWithHashFunction = await Trie.create({ useKeyHashingFunction: keyHashingFunction }) - const trieWithCommon = await Trie.create({ common: c }) + const trieWithHashFunction = await createTrie({ useKeyHashingFunction: keyHashingFunction }) + const trieWithCommon = await createTrie({ common: c }) assert.equal( bytesToHex(trieWithHashFunction.root()), '0x8001', - 'used hash function from customKeyHashingFunction' + 'used hash function from customKeyHashingFunction', ) assert.equal(bytesToHex(trieWithCommon.root()), '0x80', 'used hash function from common') }) @@ -273,16 +274,32 @@ describe('keyHashingFunction', async () => { }, } - const trieWithHashFunction = await Trie.create({ useKeyHashingFunction: keyHashingFunction }) + const trieWithHashFunction = await createTrie({ useKeyHashingFunction: keyHashingFunction }) const trieWithHashFunctionCopy = trieWithHashFunction.shallowCopy() - const trieWithCommon = await Trie.create({ common: c }) + const trieWithCommon = await createTrie({ common: c }) const trieWithCommonCopy = trieWithCommon.shallowCopy() assert.equal( bytesToHex(trieWithHashFunctionCopy.root()), '0x8001', - 'used hash function from customKeyHashingFunction' + 'used hash function from customKeyHashingFunction', ) assert.equal(bytesToHex(trieWithCommonCopy.root()), '0x80', 'used hash function from common') }) }) + +describe('getValueMap', () => { + it('should return a map of all hashed keys and values', async () => { + const trie = await createTrie({}) + const entries: [Uint8Array, string][] = [ + [bigIntToBytes(1n), '0x' + '0a'.repeat(32)], + [bigIntToBytes(2n), '0x' + '0b'.repeat(32)], + [bigIntToBytes(3n), '0x' + '0c'.repeat(32)], + ] + for (const entry of entries) { + await trie.put(entry[0], utf8ToBytes(entry[1])) + } + const dump = await trie.getValueMap() + assert.equal(Object.entries(dump.values).length, 3) + }) +}) diff --git a/packages/trie/test/util/asyncWalk.spec.ts b/packages/trie/test/util/asyncWalk.spec.ts index e2bf4b9088..7b875625bf 100644 --- a/packages/trie/test/util/asyncWalk.spec.ts +++ b/packages/trie/test/util/asyncWalk.spec.ts @@ -1,7 +1,13 @@ import { bytesToHex, equalsBytes, hexToBytes, utf8ToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { LeafNode, Trie } from '../../src/index.js' +import { + LeafNode, + Trie, + createMerkleProof, + createTrieFromProof, + verifyTrieProof, +} from '../../src/index.js' import { _walkTrie } from '../../src/util/asyncWalk.js' import { bytesToNibbles } from '../../src/util/nibbles.js' import trieTests from '../fixtures/trietest.json' @@ -78,11 +84,11 @@ describe('walk a sparse trie', async () => { }) // Generate a proof for inputs[0] const proofKey = inputs[0][0] - const proof = await trie.createProof(proofKey) - assert.ok(await Trie.verifyProof(proofKey, proof)) + const proof = await createMerkleProof(trie, proofKey) + assert.ok(await verifyTrieProof(proofKey, proof)) // Build a sparse trie from the proof - const fromProof = await Trie.fromProof(proof, { root: trie.root() }) + const fromProof = await createTrieFromProof(proof, { root: trie.root() }) // Walk the sparse trie const walker = fromProof.walkTrieIterable(fromProof.root()) diff --git a/packages/trie/test/util/encodingUtils.spec.ts b/packages/trie/test/util/encodingUtils.spec.ts index 08152f8406..ba30a5120e 100644 --- a/packages/trie/test/util/encodingUtils.spec.ts +++ b/packages/trie/test/util/encodingUtils.spec.ts @@ -7,10 +7,10 @@ import { compactBytesToNibbles, mergeAndFormatKeyPaths, nibbleTypeToByteType, - nibbleTypeToPackedBytes, nibblesToCompactBytes, pathToHexKey, } from '../../src/util/encoding.js' +import { nibblesTypeToPackedBytes } from '../../src/util/nibbles.js' import type { Nibbles } from '../../src/types.js' @@ -69,7 +69,7 @@ describe('encoding', () => { }) it('should return the correct hex-encoded key in "hex" encoding', () => { - const path = 'aabbcc' + const path = 'aabbcc' // cspell:disable-line const extension: Nibbles = [10, 11, 12] const result = pathToHexKey(path, extension, 'hex') @@ -81,7 +81,7 @@ describe('encoding', () => { assert.deepEqual( result, expected, - 'Returned hex-encoded key does not match the expected result' + 'Returned hex-encoded key does not match the expected result', ) }) @@ -93,12 +93,12 @@ describe('encoding', () => { // Calculate the expected result manually based on the functions used in the pathToHexKey function const b = hexToBytes(`0x${path}`) const n = byteTypeToNibbleType(b) - const expected = nibbleTypeToPackedBytes(n.concat(extension)) + const expected = nibblesTypeToPackedBytes(n.concat(extension)) assert.deepEqual( result, expected, - 'Returned hex-encoded key in "keybyte" encoding does not match the expected result' + 'Returned hex-encoded key in "keybyte" encoding does not match the expected result', ) }) @@ -110,7 +110,7 @@ describe('encoding', () => { assert.throws( () => pathToHexKey(path, extension, 'invalid'), Error, - 'retType must be either "keybyte" or "hex"' + 'retType must be either "keybyte" or "hex"', ) }) @@ -122,17 +122,17 @@ describe('encoding', () => { assert.equal( paths.reduce((count, subArray) => count + subArray.length, 0), pathStrings.length, - 'should have correct number of paths' + 'should have correct number of paths', ) assert.deepEqual( paths[0], [Uint8Array.of(26), Uint8Array.of(27), Uint8Array.of(28), Uint8Array.of(29)], - 'should merge paths correctly' + 'should merge paths correctly', ) assert.deepEqual( paths[1], [Uint8Array.of(30), Uint8Array.of(26)], - 'should merge paths correctly' + 'should merge paths correctly', ) assert.deepEqual(paths[2], [Uint8Array.of(31)], 'should merge paths correctly') }) diff --git a/packages/trie/test/util/genesisState.spec.ts b/packages/trie/test/util/genesisState.spec.ts index 77fc886905..9a672da74f 100644 --- a/packages/trie/test/util/genesisState.spec.ts +++ b/packages/trie/test/util/genesisState.spec.ts @@ -15,7 +15,7 @@ describe('[Util/genesisStateRoot]', () => { assert.equal( bytesToHex(stateRoot), '0x52e628c7f35996ba5a0402d02b34535993c89ff7fc4c430b2763ada8554bee62', - 'kiln stateRoot matches' + 'kiln stateRoot matches', ) }) }) @@ -25,6 +25,6 @@ it('should correctly derive mainnet stateRoot from ethereumjs genesis', async () assert.equal( bytesToHex(stateRoot), '0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544', - 'mainnet stateRoot matches' + 'mainnet stateRoot matches', ) }) diff --git a/packages/trie/test/util/log.spec.ts b/packages/trie/test/util/log.spec.ts index bb58340d2b..0131defbcd 100644 --- a/packages/trie/test/util/log.spec.ts +++ b/packages/trie/test/util/log.spec.ts @@ -1,12 +1,13 @@ import { utf8ToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' +import { createMerkleProof, createTrieFromProof, verifyMerkleProof } from '../../src/index.js' import { Trie } from '../../src/trie.js' describe('Run Trie script with DEBUG enabled', async () => { const trie_entries: [string, string | null][] = [ ['do', 'verb'], - ['ether', 'wookiedoo'], + ['ether', 'wookiedoo'], // cspell:disable-line ['doge', 'coin'], ['ether', null], ['dog', 'puppy'], @@ -19,8 +20,8 @@ describe('Run Trie script with DEBUG enabled', async () => { await trie.put(utf8ToBytes(key), value === null ? Uint8Array.from([]) : utf8ToBytes(value)) } - const proof = await trie.createProof(utf8ToBytes('doge')) - const valid = await trie.verifyProof(trie.root(), utf8ToBytes('doge'), proof) + const proof = await createMerkleProof(trie, utf8ToBytes('doge')) + const valid = await verifyMerkleProof(trie, trie.root(), utf8ToBytes('doge'), proof) it('should be valid', async () => { assert.deepEqual(valid, utf8ToBytes('coin')) @@ -32,9 +33,8 @@ describe('Run Trie script with DEBUG enabled', async () => { trie.checkpoint() await trie.revert() process.env.DEBUG = '' - const trie2 = new Trie({}) + const trie2 = await createTrieFromProof(proof) trie2['DEBUG'] = true - await trie2.fromProof(proof) it('tries should share root', async () => { assert.deepEqual(trie.root(), trie2.root()) }) diff --git a/packages/trie/tsconfig.lint.json b/packages/trie/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/trie/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/tx/.eslintrc.cjs b/packages/tx/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/tx/.eslintrc.cjs +++ b/packages/tx/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/tx/CHANGELOG.md b/packages/tx/CHANGELOG.md index de7c0520f6..c6003555d1 100644 --- a/packages/tx/CHANGELOG.md +++ b/packages/tx/CHANGELOG.md @@ -6,7 +6,62 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 5.3.0 - 2024-03-05 +## 5.4.0 - 2024-08-15 + +#### EOA Code Transaction (EIP-7702) (outdated) + +This release introduces support for a non-final version of [EIP-7702](https://eips.ethereum.org/EIPS/eip-7702) EOA code transactions, see PR [#3470](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3470). This tx type allows to run code in the context of an EOA and therefore extend the functionality which can be "reached" from respectively integrated into the scope of an otherwise limited EOA account. + +The following is a simple example how to use an `EOACodeEIP7702Transaction` with one authorization list item: + +```ts +// ./examples/EOACodeTx.ts + +import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { EOACodeEIP7702Transaction } from '@ethereumjs/tx' +import type { PrefixedHexString } from '@ethereumjs/util' + +const ones32 = `0x${'01'.repeat(32)}` as PrefixedHexString + +const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Cancun, eips: [7702] }) +const tx = EOACodeEIP7702Transaction.fromTxData( + { + authorizationList: [ + { + chainId: '0x1', + address: `0x${'20'.repeat(20)}`, + nonce: ['0x1'], + yParity: '0x1', + r: ones32, + s: ones32, + }, + ], + }, + { common }, +) + +console.log( + `EIP-7702 EOA code tx created with ${tx.authorizationList.length} authorization list item(s).`, +) +``` + +Note: Things move fast with `EIP-7702` and the released implementation is based on [this](https://github.com/ethereum/EIPs/blob/14400434e1199c57d912082127b1d22643788d11/EIPS/eip-7702.md) commit and therefore already outdated. An up-to-date version will be released along our breaking release round planned for early September 2024. + +### Verkle Updates + +- Update `kzg-wasm` to `0.4.0`, PR [#3358](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3358) +- Shift Verkle to `osaka` hardfork, PR [#3371](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3371) + +### Other Features + +- Extend `BlobEIP4844Transaction.networkWrapperToJson()` to also include the 4844 fields, PR [#3365](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3365) +- Stricter prefixed hex typing, PRs [#3348](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3348), [#3427](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3427) and [#3357](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3357) (some changes removed in PR [#3382](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3382) for backwards compatibility reasons, will be reintroduced along upcoming breaking releases) + +### Bugfixes + +- Fix bug in generic error message regarding chain ID reporting, PR [#3386](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3386) + +## 5.3.0 - 2024-03-18 ### Full 4844 Browser Readiness @@ -150,7 +205,7 @@ While you could use our libraries in the browser libraries before, there had bee WE HAVE ELIMINATED ALL OF THEM. -The largest two undertakings: First: we have rewritten all (half) of our API and elimited the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went throuh our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. +The largest two undertakings: First: we have rewritten all (half) of our API and eliminated the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went through our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. Together with some few other modifications this now allows to run each (maybe adding an asterisk for client and devp2p) of our libraries directly in the browser - more or less without any modifications - see the `examples/browser.html` file in each package folder for an easy to set up example. @@ -241,7 +296,7 @@ const simpleBlobTx = BlobEIP4844Transaction.fromTxData( gasLimit: 0xffffffn, to: 0x1122334455667788991011121314151617181920, }, - { common } + { common }, ) ``` @@ -525,7 +580,7 @@ const tx = LegacyTransaction.fromTxData( { // Provide your tx data here or use default values }, - { common } + { common }, ) ``` @@ -535,7 +590,7 @@ Beta 2 release for the upcoming breaking release round on the [EthereumJS monore ### Removed Default Exports -The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all accross the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. +The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all across the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. Default exports were a common source of error and confusion when using our libraries in a CommonJS context, leading to issues like Issue [#978](https://github.com/ethereumjs/ethereumjs-monorepo/issues/978). @@ -543,7 +598,7 @@ Now every import is a named import and we think the long term benefits will very #### Common Library Import Updates -Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all accross our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. +Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all across our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. So Common import and usage is changing from: @@ -597,7 +652,7 @@ const tx = LegacyTransaction.fromTxData( { // Provide your tx data here or use default values }, - { common } + { common }, ) ``` @@ -724,7 +779,7 @@ Invalid Signature: s-values greater than secp256k1n/2 are considered invalid (tx The extended errors give substantial more object and chain context and should ease debugging. -**Potentially breaking**: Attention! If you do react on errors in your code and do exact errror matching (`error.message === 'invalid transaction trie'`) things will break. Please make sure to do error comparisons with something like `error.message.includes('invalid transaction trie')` instead. This should generally be the pattern used for all error message comparisions and is assured to be future proof on all error messages (we won't change the core text in non-breaking releases). +**Potentially breaking**: Attention! If you do react on errors in your code and do exact error matching (`error.message === 'invalid transaction trie'`) things will break. Please make sure to do error comparisons with something like `error.message.includes('invalid transaction trie')` instead. This should generally be the pattern used for all error message comparisons and is assured to be future proof on all error messages (we won't change the core text in non-breaking releases). ## Other Changes @@ -806,7 +861,7 @@ if (tx.supports(Capability.EIP2930AccessLists)) { The following capabilities are currently supported: ```ts -enum Capabilitiy { +enum Capability { EIP155ReplayProtection: 155, // Only for legacy txs EIP1559FeeMarket: 1559, EIP2718TypedTransaction: 2718, // Use for a typed-tx-or-not switch @@ -1086,9 +1141,9 @@ Learn more about the full API in the [docs](./docs/README.md). #### Immutability -The returned transaction is now frozen and immutable. To work with a maliable transaction, copy it with `const fakeTx = Object.create(tx)`. For security reasons it is highly recommended to stay in a freezed `Transaction` context on usage. +The returned transaction is now frozen and immutable. To work with a mutable transaction, copy it with `const fakeTx = Object.create(tx)`. For security reasons it is highly recommended to stay in a freezed `Transaction` context on usage. -If you need `Transaction` mutability - e.g. because you want to subclass `Transaction` and modifiy its behavior - there is a `freeze` option to prevent the `Object.freeze()` call on initialization, see PR [#941](https://github.com/ethereumjs/ethereumjs-monorepo/pull/941). +If you need `Transaction` mutability - e.g. because you want to subclass `Transaction` and modify its behavior - there is a `freeze` option to prevent the `Object.freeze()` call on initialization, see PR [#941](https://github.com/ethereumjs/ethereumjs-monorepo/pull/941). #### from @@ -1100,19 +1155,19 @@ Getting a message to sign has been changed from calling `tx.hash(false)` to `tx. #### Fake Transaction -The `FakeTransaction` class was removed since its functionality can now be implemented with less code. To create a fake tansaction for use in e.g. `VM.runTx()` overwrite `getSenderAddress` with your own `Address`. See a full example in the section in the [README](./README.md#fake-transaction). +The `FakeTransaction` class was removed since its functionality can now be implemented with less code. To create a fake transaction for use in e.g. `VM.runTx()` overwrite `getSenderAddress` with your own `Address`. See a full example in the section in the [README](./README.md#fake-transaction). ### New Default Hardfork **Breaking:** The default HF on the library has been updated from `petersburg` to `istanbul`, see PR [#906](https://github.com/ethereumjs/ethereumjs-monorepo/pull/906). -The HF setting is now automatically taken from the HF set for `Common.DEAULT_HARDFORK`, see PR [#863](https://github.com/ethereumjs/ethereumjs-monorepo/pull/863). +The HF setting is now automatically taken from the HF set for `Common.DEFAULT_HARDFORK`, see PR [#863](https://github.com/ethereumjs/ethereumjs-monorepo/pull/863). ### Dual ES5 and ES2017 Builds We significantly updated our internal tool and CI setup along the work on PR [#913](https://github.com/ethereumjs/ethereumjs-monorepo/pull/913) with an update to `ESLint` from `TSLint` for code linting and formatting and the introduction of a new build setup. -Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes @@ -1199,7 +1254,7 @@ Learn more about the full API in the [docs](./docs/README.md). #### Immutability -The returned transaction is now frozen and immutable. To work with a maliable transaction, copy it with `const fakeTx = Object.create(tx)`. +The returned transaction is now frozen and immutable. To work with a mutable transaction, copy it with `const fakeTx = Object.create(tx)`. #### from @@ -1211,12 +1266,12 @@ Getting a message to sign has been changed from calling `tx.hash(false)` to `tx. #### Fake Transaction -The `FakeTransaction` class was removed since its functionality can now be implemented with less code. To create a fake tansaction for use in e.g. `VM.runTx()` overwrite `getSenderAddress` with your own `Address`. See a full example in the section in the [README](./README.md#fake-transaction). +The `FakeTransaction` class was removed since its functionality can now be implemented with less code. To create a fake transaction for use in e.g. `VM.runTx()` overwrite `getSenderAddress` with your own `Address`. See a full example in the section in the [README](./README.md#fake-transaction). ### New Default Hardfork **Breaking:** The default HF on the library has been updated from `petersburg` to `istanbul`, see PR [#906](https://github.com/ethereumjs/ethereumjs-monorepo/pull/906). -The HF setting is now automatically taken from the HF set for `Common.DEAULT_HARDFORK`, +The HF setting is now automatically taken from the HF set for `Common.DEFAULT_HARDFORK`, see PR [#863](https://github.com/ethereumjs/ethereumjs-monorepo/pull/863). ### Dual ES5 and ES2017 Builds @@ -1228,7 +1283,7 @@ for code linting and formatting and the introduction of a new build setup. Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result -in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes @@ -1324,7 +1379,7 @@ see PRs [#153](https://github.com/ethereumjs/ethereumjs-tx/pull/153), [#147](https://github.com/ethereumjs/ethereumjs-tx/pull/147) and [#143](https://github.com/ethereumjs/ethereumjs-tx/pull/143). -This comes with some changes in how different `v` values passed on instantation +This comes with some changes in how different `v` values passed on instantiation or changed on runtime are handled: - The constructor throws if the `v` value is present, indicates that `EIP-155` @@ -1347,7 +1402,7 @@ pre-`Spurious Dragon` hardfork option. ## [1.3.6] - 2018-07-02 -- Fixes issue [#108](https://github.com/ethereumjs/ethereumjs-tx/issues/108) with the `FakeTransaction.hash()` function by reverting the introduced signature handling changes in Fake transaction hash creation from PR [#94](https://github.com/ethereumjs/ethereumjs-tx/pull/94) introduced in `v1.3.5`. The signature is now again only created and added to the hash when `from` address is set and `from` is not defaulting to the zero adress any more, see PR [#110](https://github.com/ethereumjs/ethereumjs-tx/pull/110) +- Fixes issue [#108](https://github.com/ethereumjs/ethereumjs-tx/issues/108) with the `FakeTransaction.hash()` function by reverting the introduced signature handling changes in Fake transaction hash creation from PR [#94](https://github.com/ethereumjs/ethereumjs-tx/pull/94) introduced in `v1.3.5`. The signature is now again only created and added to the hash when `from` address is set and `from` is not defaulting to the zero address any more, see PR [#110](https://github.com/ethereumjs/ethereumjs-tx/pull/110) - Added additional tests to cover issue described above [1.3.6]: https://github.com/ethereumjs/ethereumjs-monorepo/compare/%40ethereumjs%2Ftx%401.3.5...%40ethereumjs%2Ftx%401.3.6 diff --git a/packages/tx/README.md b/packages/tx/README.md index f205171237..d4c96f2f07 100644 --- a/packages/tx/README.md +++ b/packages/tx/README.md @@ -102,6 +102,7 @@ This library supports the following transaction types ([EIP-2718](https://eips.e - `BlobEIP4844Transaction` ([EIP-4844](https://eips.ethereum.org/EIPS/eip-4844), proto-danksharding) - `FeeMarketEIP1559Transaction` ([EIP-1559](https://eips.ethereum.org/EIPS/eip-1559), gas fee market) +- `EOACodeEIP7702Transaction` (experimental) ([EIP-7702](https://eips.ethereum.org/EIPS/eip-7702), EOA code delegation) - `AccessListEIP2930Transaction` ([EIP-2930](https://eips.ethereum.org/EIPS/eip-2930), optional access lists) - `BlobEIP4844Transaction` ([EIP-4844](https://eips.ethereum.org/EIPS/eip-4844), blob transactions) - `LegacyTransaction`, the Ethereum standard tx up to `berlin`, now referred to as legacy txs with the introduction of tx types @@ -207,6 +208,49 @@ const tx = FeeMarketEIP1559Transaction.fromTxData(txData, { common }) console.log(bytesToHex(tx.hash())) // 0x6f9ef69ccb1de1aea64e511efd6542541008ced321887937c95b03779358ec8a ``` +#### EOA Code Transaction (EIP-7702) (outdated) + +- Class: `EOACodeEIP7702Transaction` +- Activation: `prague` (or per EIP setting) +- Type: `4` + +This library supports a non-final version of [EIP-7702](https://eips.ethereum.org/EIPS/eip-7702) starting with `v5.4.0`. This tx type allows to run code in the context of an EOA and therefore extend the functionality which can be "reached" from respectively integrated into the scope of an otherwise limited EOA account. + +The following is a simple example how to use an `EOACodeEIP7702Transaction` with one authorization list item: + +```ts +// ./examples/EOACodeTx.ts + +import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { EOACodeEIP7702Transaction } from '@ethereumjs/tx' +import type { PrefixedHexString } from '@ethereumjs/util' + +const ones32 = `0x${'01'.repeat(32)}` as PrefixedHexString + +const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Cancun, eips: [7702] }) +const tx = EOACodeEIP7702Transaction.fromTxData( + { + authorizationList: [ + { + chainId: '0x1', + address: `0x${'20'.repeat(20)}`, + nonce: ['0x1'], + yParity: '0x1', + r: ones32, + s: ones32, + }, + ], + }, + { common }, +) + +console.log( + `EIP-7702 EOA code tx created with ${tx.authorizationList.length} authorization list item(s).`, +) +``` + +Note: Things move fast with `EIP-7702` and the currently released implementation is based on [this](https://github.com/ethereum/EIPs/blob/14400434e1199c57d912082127b1d22643788d11/EIPS/eip-7702.md) commit and therefore already outdated. An up-to-date version will be released along our breaking release round planned for early September 2024. + #### Access List Transactions (EIP-2930) - Class: `AccessListEIP2930Transaction` @@ -284,7 +328,7 @@ const tx = LegacyTransaction.fromTxData(txParams, { common }) const privateKey = Buffer.from( 'e331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', - 'hex' + 'hex', ) const signedTx = tx.sign(privateKey) @@ -305,12 +349,12 @@ import { Capability, EIP1559CompatibleTx, TransactionFactory } from '@ethereumjs const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) -const txData = { type: 2, maxFeePerGas: BigInt(20) } // Creates an EIP-1559 compatible transac +const txData = { type: 2, maxFeePerGas: BigInt(20) } // Creates an EIP-1559 compatible transaction const tx = TransactionFactory.fromTxData(txData, { common }) if (tx.supports(Capability.EIP1559FeeMarket)) { console.log( - `The max fee per gas for this transaction is ${(tx as EIP1559CompatibleTx).maxFeePerGas}` + `The max fee per gas for this transaction is ${(tx as EIP1559CompatibleTx).maxFeePerGas}`, ) } ``` diff --git a/packages/tx/examples/EOACodeTx.ts b/packages/tx/examples/EOACodeTx.ts new file mode 100644 index 0000000000..482600669b --- /dev/null +++ b/packages/tx/examples/EOACodeTx.ts @@ -0,0 +1,27 @@ +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createEOACode7702Tx } from '@ethereumjs/tx' + +import type { PrefixedHexString } from '@ethereumjs/util' + +const ones32 = `0x${'01'.repeat(32)}` as PrefixedHexString + +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Cancun, eips: [7702] }) +const tx = createEOACode7702Tx( + { + authorizationList: [ + { + chainId: '0x2', + address: `0x${'20'.repeat(20)}`, + nonce: ['0x1'], + yParity: '0x1', + r: ones32, + s: ones32, + }, + ], + }, + { common }, +) + +console.log( + `EIP-7702 EOA code tx created with ${tx.authorizationList.length} authorization list item(s).`, +) diff --git a/packages/tx/examples/accessListTx.ts b/packages/tx/examples/accessListTx.ts index 1c7346b3c2..8bd91d2af4 100644 --- a/packages/tx/examples/accessListTx.ts +++ b/packages/tx/examples/accessListTx.ts @@ -1,8 +1,8 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { AccessListEIP2930Transaction } from '@ethereumjs/tx' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createAccessList2930Tx } from '@ethereumjs/tx' import { bytesToHex } from '@ethereumjs/util' -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Berlin }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Berlin }) const txData = { data: '0x1a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', @@ -27,5 +27,5 @@ const txData = { type: '0x01', } -const tx = AccessListEIP2930Transaction.fromTxData(txData, { common }) +const tx = createAccessList2930Tx(txData, { common }) console.log(bytesToHex(tx.hash())) // 0x9150cdebad74e88b038e6c6b964d99af705f9c0883d7f0bbc0f3e072358f5b1d diff --git a/packages/tx/examples/blobTx.ts b/packages/tx/examples/blobTx.ts index 9600e22186..4671502367 100644 --- a/packages/tx/examples/blobTx.ts +++ b/packages/tx/examples/blobTx.ts @@ -1,5 +1,5 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { BlobEIP4844Transaction } from '@ethereumjs/tx' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createBlob4844Tx } from '@ethereumjs/tx' import { bytesToHex } from '@ethereumjs/util' import { loadKZG } from 'kzg-wasm' @@ -7,7 +7,7 @@ const main = async () => { const kzg = await loadKZG() const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.Shanghai, eips: [4844], customCrypto: { kzg }, @@ -31,9 +31,9 @@ const main = async () => { blobsData: ['abcd'], } - const tx = BlobEIP4844Transaction.fromTxData(txData, { common }) + const tx = createBlob4844Tx(txData, { common }) console.log(bytesToHex(tx.hash())) //0x3c3e7c5e09c250d2200bcc3530f4a9088d7e3fb4ea3f4fccfd09f535a3539e84 } -main() +void main() diff --git a/packages/tx/examples/custom-chain-id-tx.ts b/packages/tx/examples/custom-chain-id-tx.ts index 4d9b911a89..50a65adabb 100644 --- a/packages/tx/examples/custom-chain-id-tx.ts +++ b/packages/tx/examples/custom-chain-id-tx.ts @@ -1,14 +1,14 @@ -import { LegacyTransaction } from '../dist/cjs' +import { Hardfork, Mainnet, createCustomCommon } from '@ethereumjs/common' +import { createLegacyTxFromRLP } from '@ethereumjs/tx' import { toBytes } from '@ethereumjs/util' -import { Common, createCustomCommon, Hardfork } from '@ethereumjs/common' const txData = toBytes( - '0xf9010b82930284d09dc30083419ce0942d18de92e0f9aee1a29770c3b15c6cf8ac5498e580b8a42f43f4fb0000000000000000000000000000000000000000000000000000016b78998da900000000000000000000000000000000000000000000000000000000000cb1b70000000000000000000000000000000000000000000000000000000000000fa00000000000000000000000000000000000000000000000000000000001363e4f00000000000000000000000000000000000000000000000000000000000186a029a0fac36e66d329af0e831b2e61179b3ec8d7c7a8a2179e303cfed3364aff2bc3e4a07cb73d56e561ccbd838818dd3dea5fa0b5158577ffc61c0e6ec1f0ed55716891' + '0xf9010b82930284d09dc30083419ce0942d18de92e0f9aee1a29770c3b15c6cf8ac5498e580b8a42f43f4fb0000000000000000000000000000000000000000000000000000016b78998da900000000000000000000000000000000000000000000000000000000000cb1b70000000000000000000000000000000000000000000000000000000000000fa00000000000000000000000000000000000000000000000000000000001363e4f00000000000000000000000000000000000000000000000000000000000186a029a0fac36e66d329af0e831b2e61179b3ec8d7c7a8a2179e303cfed3364aff2bc3e4a07cb73d56e561ccbd838818dd3dea5fa0b5158577ffc61c0e6ec1f0ed55716891', ) -const common = createCustomCommon({ chainId: 3 }) +const common = createCustomCommon({ chainId: 3 }, Mainnet) common.setHardfork(Hardfork.Petersburg) -const tx = LegacyTransaction.fromSerializedTx(txData, { common }) +const tx = createLegacyTxFromRLP(txData, { common }) if ( tx.isValid() && diff --git a/packages/tx/examples/custom-chain-tx.ts b/packages/tx/examples/custom-chain-tx.ts index b4f4c64a7c..4abb1703bf 100644 --- a/packages/tx/examples/custom-chain-tx.ts +++ b/packages/tx/examples/custom-chain-tx.ts @@ -1,35 +1,32 @@ -import { Address } from '@ethereumjs/util' -import { Common, createCustomCommon } from '@ethereumjs/common' -import { LegacyTransaction } from '../dist/cjs/index' -import { hexToBytes } from '@ethereumjs/util' +import { Mainnet, createCustomCommon } from '@ethereumjs/common' +import { createLegacyTx } from '@ethereumjs/tx' +import { createAddressFromPrivateKey, hexToBytes } from '@ethereumjs/util' // In this example we create a transaction for a custom network. // This custom network has the same params as mainnet, -// except for name, chainId, and networkId, -// so we use the `Common.custom` method. +// except for name, chainId, so we use the `Common.custom` method. const customCommon = createCustomCommon( { name: 'my-network', - networkId: 123, chainId: 2134, }, + Mainnet, { - baseChain: 'mainnet', hardfork: 'petersburg', - } + }, ) // We pass our custom Common object whenever we create a transaction const opts = { common: customCommon } -const tx = LegacyTransaction.fromTxData( +const tx = createLegacyTx( { nonce: 0, gasPrice: 100, gasLimit: 1000000000, value: 100000, }, - opts + opts, ) // Once we created the transaction using the custom Common object, we can use it as a normal tx. @@ -38,7 +35,7 @@ const tx = LegacyTransaction.fromTxData( const privateKey = hexToBytes('0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109') const signedTx = tx.sign(privateKey) -const address = Address.fromPrivateKey(privateKey) +const address = createAddressFromPrivateKey(privateKey) if (signedTx.isValid() && signedTx.getSenderAddress().equals(address)) { console.log('Valid signature') diff --git a/packages/tx/examples/initKzg.ts b/packages/tx/examples/initKzg.ts index de6276d97f..6bf7a98e04 100644 --- a/packages/tx/examples/initKzg.ts +++ b/packages/tx/examples/initKzg.ts @@ -1,12 +1,12 @@ +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { loadKZG } from 'kzg-wasm' -import { Chain, Common, Hardfork } from '@ethereumjs/common' const main = async () => { const kzg = await loadKZG() // Instantiate `common` const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.Cancun, customCrypto: { kzg }, }) @@ -14,4 +14,4 @@ const main = async () => { console.log(common.customCrypto.kzg) // should output the KZG API as an object } -main() +void main() diff --git a/packages/tx/examples/l2tx.ts b/packages/tx/examples/l2tx.ts index 427f2dc425..b015ef46e6 100644 --- a/packages/tx/examples/l2tx.ts +++ b/packages/tx/examples/l2tx.ts @@ -1,10 +1,11 @@ -import { Common, createCustomCommon, CustomChain } from '@ethereumjs/common' -import { LegacyTransaction } from '@ethereumjs/tx' -import { Address, bytesToHex, hexToBytes } from '@ethereumjs/util' +import { Mainnet, createCustomCommon } from '@ethereumjs/common' +import { createLegacyTx } from '@ethereumjs/tx' +import { bytesToHex, createAddressFromString, hexToBytes } from '@ethereumjs/util' const pk = hexToBytes('0x076247989df60a82f6e86e58104368676096f84e60972282ee00d4673a2bc9b9') -const to = Address.fromString('0x256e8f0ba532ad83a0debde7501669511a41a1f3') -const common = createCustomCommon(CustomChain.xDaiChain) +// xDai chain ID +const common = createCustomCommon({ chainId: 100 }, Mainnet) +const to = createAddressFromString('0x256e8f0ba532ad83a0debde7501669511a41a1f3') const txData = { nonce: 0, @@ -14,6 +15,6 @@ const txData = { value: 1, } -const tx = LegacyTransaction.fromTxData(txData, { common }) +const tx = createLegacyTx(txData, { common }) const signedTx = tx.sign(pk) console.log(bytesToHex(signedTx.hash())) // 0xbf98f6f8700812ed6f2314275070256e11945fa48afd80fb301265f6a41a2dc2 diff --git a/packages/tx/examples/legacyTx.ts b/packages/tx/examples/legacyTx.ts index 1755327c9d..f1f9fa8249 100644 --- a/packages/tx/examples/legacyTx.ts +++ b/packages/tx/examples/legacyTx.ts @@ -1,9 +1,10 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { LegacyTransaction } from '@ethereumjs/tx' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createLegacyTx } from '@ethereumjs/tx' import { bytesToHex } from '@ethereumjs/util' +import { hexToBytes } from 'ethereum-cryptography/utils' const txParams = { - nonce: '0x00', + nonce: '0x0', gasPrice: '0x09184e72a000', gasLimit: '0x2710', to: '0x0000000000000000000000000000000000000000', @@ -11,15 +12,12 @@ const txParams = { data: '0x7f7465737432000000000000000000000000000000000000000000000000000000600057', } -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) -const tx = LegacyTransaction.fromTxData(txParams, { common }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) +const tx = createLegacyTx(txParams, { common }) -const privateKey = Buffer.from( - 'e331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', - 'hex' -) +const privateKey = hexToBytes('0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109') const signedTx = tx.sign(privateKey) -const serializedTx = signedTx.serialize() +const _serializedTx = signedTx.serialize() console.log(bytesToHex(signedTx.hash())) // 0x894b72d87f8333fccd29d1b3aca39af69d97a6bc281e7e7a3a60640690a3cd2b diff --git a/packages/tx/examples/londonTx.ts b/packages/tx/examples/londonTx.ts index 50877045a8..70cefbbc10 100644 --- a/packages/tx/examples/londonTx.ts +++ b/packages/tx/examples/londonTx.ts @@ -1,8 +1,8 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { FeeMarketEIP1559Transaction } from '@ethereumjs/tx' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createFeeMarket1559Tx } from '@ethereumjs/tx' import { bytesToHex } from '@ethereumjs/util' -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const txData = { data: '0x1a8451e600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', @@ -20,5 +20,5 @@ const txData = { type: '0x02', } -const tx = FeeMarketEIP1559Transaction.fromTxData(txData, { common }) +const tx = createFeeMarket1559Tx(txData, { common }) console.log(bytesToHex(tx.hash())) // 0x6f9ef69ccb1de1aea64e511efd6542541008ced321887937c95b03779358ec8a diff --git a/packages/tx/examples/transactions.ts b/packages/tx/examples/transactions.ts index df864181d3..34d3a4e0fa 100644 --- a/packages/tx/examples/transactions.ts +++ b/packages/tx/examples/transactions.ts @@ -2,13 +2,13 @@ // You can run them with tsx, as this project is developed in TypeScript. // Install the dependencies and run `npx tsx examples/transactions.ts` -import { LegacyTransaction } from '../dist/cjs' -import { bytesToHex, toBytes, hexToBytes } from '@ethereumjs/util' +import { createLegacyTx, createLegacyTxFromBytesArray } from '@ethereumjs/tx' +import { bytesToHex, hexToBytes, toBytes } from '@ethereumjs/util' // We create an unsigned transaction. // Notice we don't set the `to` field because we are creating a new contract. // This transaction's chain is set to mainnet. -const tx = LegacyTransaction.fromTxData({ +const tx = createLegacyTx({ nonce: 0, gasPrice: 100, gasLimit: 1000000000, @@ -22,7 +22,7 @@ const privateKey = hexToBytes('0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3 const signedTx = tx.sign(privateKey) // We have a signed transaction. -// Now for it to be fully fundable the account that we signed it with needs to have a certain amount of wei in to. +// In order to send the transaction, the account that we signed it with needs to have a certain amount of wei in to. // To see how much this account needs we can use the getUpfrontCost() method. const feeCost = signedTx.getUpfrontCost() console.log('Total Amount of wei needed:' + feeCost.toString()) @@ -50,7 +50,7 @@ const rawTx = [ '0x5bd428537f05f9830e93792f90ea6a3e2d1ee84952dd96edbae9f658f831ab13', ] -const tx2 = LegacyTransaction.fromValuesArray(rawTx.map(toBytes)) // This is also a mainnet transaction +const tx2 = createLegacyTxFromBytesArray(rawTx.map(toBytes)) // This is also a mainnet transaction // So assuming that you were able to parse the transaction, we will now get the sender's address. diff --git a/packages/tx/examples/txFactory.ts b/packages/tx/examples/txFactory.ts index 5e99b25fe7..9929712363 100644 --- a/packages/tx/examples/txFactory.ts +++ b/packages/tx/examples/txFactory.ts @@ -1,13 +1,15 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { Capability, EIP1559CompatibleTx, TransactionFactory } from '@ethereumjs/tx' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { Capability, createTxFromTxData } from '@ethereumjs/tx' -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) +import type { EIP1559CompatibleTx } from '@ethereumjs/tx' -const txData = { type: 2, maxFeePerGas: BigInt(20) } // Creates an EIP-1559 compatible transac -const tx = TransactionFactory.fromTxData(txData, { common }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) + +const txData = { type: 2, maxFeePerGas: BigInt(20) } // Creates an EIP-1559 compatible transaction +const tx = createTxFromTxData(txData, { common }) if (tx.supports(Capability.EIP1559FeeMarket)) { console.log( - `The max fee per gas for this transaction is ${(tx as EIP1559CompatibleTx).maxFeePerGas}` + `The max fee per gas for this transaction is ${(tx as EIP1559CompatibleTx).maxFeePerGas}`, ) } diff --git a/packages/tx/package.json b/packages/tx/package.json index 2ca85c6749..66c02acdd2 100644 --- a/packages/tx/package.json +++ b/packages/tx/package.json @@ -1,6 +1,6 @@ { "name": "@ethereumjs/tx", - "version": "5.3.0", + "version": "5.4.0", "description": "Implementation of the various Ethereum Transaction Types", "keywords": [ "ethereum", @@ -20,11 +20,7 @@ { "name": "Alex Beregszaszi", "email": "alex@rtfs.hu", - "url": "https://github.com/axic", - "additions": 27562, - "contributions": 22, - "deletions": 42613, - "hireable": true + "url": "https://github.com/axic" } ], "type": "module", @@ -58,9 +54,9 @@ "tsc": "../../config/cli/ts-compile.sh" }, "dependencies": { - "@ethereumjs/common": "^4.3.0", + "@ethereumjs/common": "^4.4.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/util": "^9.0.3", + "@ethereumjs/util": "^9.1.0", "ethereum-cryptography": "^2.2.1" }, "devDependencies": { diff --git a/packages/tx/src/1559/constructors.ts b/packages/tx/src/1559/constructors.ts new file mode 100644 index 0000000000..623659832e --- /dev/null +++ b/packages/tx/src/1559/constructors.ts @@ -0,0 +1,100 @@ +import { RLP } from '@ethereumjs/rlp' +import { bytesToBigInt, bytesToHex, equalsBytes, validateNoLeadingZeroes } from '@ethereumjs/util' + +import { TransactionType } from '../types.js' +import { txTypeBytes, validateNotArray } from '../util.js' + +import { FeeMarket1559Tx } from './tx.js' + +import type { TxOptions } from '../types.js' +import type { TxData, TxValuesArray } from './tx.js' + +/** + * Instantiate a transaction from a data dictionary. + * + * Format: { chainId, nonce, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, data, + * accessList, v, r, s } + * + * Notes: + * - `chainId` will be set automatically if not provided + * - All parameters are optional and have some basic default values + */ +export function createFeeMarket1559Tx(txData: TxData, opts: TxOptions = {}) { + return new FeeMarket1559Tx(txData, opts) +} + +/** + * Create a transaction from an array of byte encoded values ordered according to the devp2p network encoding - format noted below. + * + * Format: `[chainId, nonce, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, data, + * accessList, signatureYParity, signatureR, signatureS]` + */ +export function create1559FeeMarketTxFromBytesArray(values: TxValuesArray, opts: TxOptions = {}) { + if (values.length !== 9 && values.length !== 12) { + throw new Error( + 'Invalid EIP-1559 transaction. Only expecting 9 values (for unsigned tx) or 12 values (for signed tx).', + ) + } + + const [ + chainId, + nonce, + maxPriorityFeePerGas, + maxFeePerGas, + gasLimit, + to, + value, + data, + accessList, + v, + r, + s, + ] = values + + validateNotArray({ chainId, v }) + validateNoLeadingZeroes({ nonce, maxPriorityFeePerGas, maxFeePerGas, gasLimit, value, v, r, s }) + + return new FeeMarket1559Tx( + { + chainId: bytesToBigInt(chainId), + nonce, + maxPriorityFeePerGas, + maxFeePerGas, + gasLimit, + to, + value, + data, + accessList: accessList ?? [], + v: v !== undefined ? bytesToBigInt(v) : undefined, // EIP2930 supports v's with value 0 (empty Uint8Array) + r, + s, + }, + opts, + ) +} + +/** + * Instantiate a transaction from an RLP serialized tx. + * + * Format: `0x02 || rlp([chainId, nonce, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, data, + * accessList, signatureYParity, signatureR, signatureS])` + */ +export function createFeeMarket1559TxFromRLP(serialized: Uint8Array, opts: TxOptions = {}) { + if ( + equalsBytes(serialized.subarray(0, 1), txTypeBytes(TransactionType.FeeMarketEIP1559)) === false + ) { + throw new Error( + `Invalid serialized tx input: not an EIP-1559 transaction (wrong tx type, expected: ${ + TransactionType.FeeMarketEIP1559 + }, received: ${bytesToHex(serialized.subarray(0, 1))}`, + ) + } + + const values = RLP.decode(serialized.subarray(1)) + + if (!Array.isArray(values)) { + throw new Error('Invalid serialized tx input: must be array') + } + + return create1559FeeMarketTxFromBytesArray(values as TxValuesArray, opts) +} diff --git a/packages/tx/src/1559/index.ts b/packages/tx/src/1559/index.ts new file mode 100644 index 0000000000..2ce2f4365a --- /dev/null +++ b/packages/tx/src/1559/index.ts @@ -0,0 +1,2 @@ +export * from './constructors.js' +export { FeeMarket1559Tx } from './tx.js' diff --git a/packages/tx/src/eip1559Transaction.ts b/packages/tx/src/1559/tx.ts similarity index 66% rename from packages/tx/src/eip1559Transaction.ts rename to packages/tx/src/1559/tx.ts index b33d12e1c5..6324a2b984 100644 --- a/packages/tx/src/eip1559Transaction.ts +++ b/packages/tx/src/1559/tx.ts @@ -1,4 +1,4 @@ -import { RLP } from '@ethereumjs/rlp' +import { Common } from '@ethereumjs/common' import { BIGINT_0, BIGINT_27, @@ -6,19 +6,19 @@ import { bigIntToHex, bigIntToUnpaddedBytes, bytesToBigInt, - bytesToHex, - equalsBytes, toBytes, - validateNoLeadingZeroes, } from '@ethereumjs/util' -import { BaseTransaction } from './baseTransaction.js' -import * as EIP1559 from './capabilities/eip1559.js' -import * as EIP2718 from './capabilities/eip2718.js' -import * as EIP2930 from './capabilities/eip2930.js' -import * as Legacy from './capabilities/legacy.js' -import { TransactionType } from './types.js' -import { AccessLists, txTypeBytes } from './util.js' +import { BaseTransaction } from '../baseTransaction.js' +import * as EIP1559 from '../capabilities/eip1559.js' +import * as EIP2718 from '../capabilities/eip2718.js' +import * as EIP2930 from '../capabilities/eip2930.js' +import * as Legacy from '../capabilities/legacy.js' +import { paramsTx } from '../params.js' +import { TransactionType } from '../types.js' +import { AccessLists, validateNotArray } from '../util.js' + +import { createFeeMarket1559Tx } from './constructors.js' import type { AccessList, @@ -27,11 +27,10 @@ import type { TxValuesArray as AllTypesTxValuesArray, JsonTx, TxOptions, -} from './types.js' -import type { Common } from '@ethereumjs/common' +} from '../types.js' -type TxData = AllTypesTxData[TransactionType.FeeMarketEIP1559] -type TxValuesArray = AllTypesTxValuesArray[TransactionType.FeeMarketEIP1559] +export type TxData = AllTypesTxData[TransactionType.FeeMarketEIP1559] +export type TxValuesArray = AllTypesTxValuesArray[TransactionType.FeeMarketEIP1559] /** * Typed transaction with a new gas fee market mechanism @@ -39,7 +38,7 @@ type TxValuesArray = AllTypesTxValuesArray[TransactionType.FeeMarketEIP1559] * - TransactionType: 2 * - EIP: [EIP-1559](https://eips.ethereum.org/EIPS/eip-1559) */ -export class FeeMarketEIP1559Transaction extends BaseTransaction { +export class FeeMarket1559Tx extends BaseTransaction { // implements EIP1559CompatibleTx public readonly chainId: bigint public readonly accessList: AccessListBytes @@ -49,97 +48,6 @@ export class FeeMarketEIP1559Transaction extends BaseTransaction MAX_INTEGER) { const msg = this._errorMsg('gasLimit * maxFeePerGas cannot exceed MAX_INTEGER (2^256-1)') @@ -183,7 +97,7 @@ export class FeeMarketEIP1559Transaction extends BaseTransaction { +export class AccessList2930Transaction extends BaseTransaction { public readonly chainId: bigint public readonly accessList: AccessListBytes public readonly AccessListJSON: AccessList @@ -45,85 +44,6 @@ export class AccessListEIP2930Transaction extends BaseTransaction MAX_INTEGER) { const msg = this._errorMsg('gasLimit * gasPrice cannot exceed MAX_INTEGER') @@ -180,8 +106,8 @@ export class AccessListEIP2930Transaction extends BaseTransaction { + if (!(blobVersionedHashes.length === blobs.length && blobs.length === commitments.length)) { + throw new Error('Number of blobVersionedHashes, blobs, and commitments not all equal') + } + if (blobVersionedHashes.length === 0) { + throw new Error('Invalid transaction with empty blobs') + } + + let isValid + try { + isValid = kzg.verifyBlobKzgProofBatch(blobs, commitments, kzgProofs) + } catch (error) { + throw new Error(`KZG verification of blobs fail with error=${error}`) + } + if (!isValid) { + throw new Error('KZG proof cannot be verified from blobs/commitments') + } + + for (let x = 0; x < blobVersionedHashes.length; x++) { + const computedVersionedHash = computeVersionedHash(commitments[x], version) + if (!equalsBytes(computedVersionedHash, blobVersionedHashes[x])) { + throw new Error(`commitment for blob at index ${x} does not match versionedHash`) + } + } +} + +/** + * Instantiate a transaction from a data dictionary. + * + * Format: { chainId, nonce, gasPrice, gasLimit, to, value, data, accessList, + * v, r, s, blobs, kzgCommitments, blobVersionedHashes, kzgProofs } + * + * Notes: + * - `chainId` will be set automatically if not provided + * - All parameters are optional and have some basic default values + * - `blobs` cannot be supplied as well as `kzgCommitments`, `blobVersionedHashes`, `kzgProofs` + * - If `blobs` is passed in, `kzgCommitments`, `blobVersionedHashes`, `kzgProofs` will be derived by the constructor + */ +export function createBlob4844Tx(txData: TxData, opts?: TxOptions) { + if (opts?.common?.customCrypto?.kzg === undefined) { + throw new Error( + 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx', + ) + } + const kzg = opts!.common!.customCrypto!.kzg! + if (txData.blobsData !== undefined) { + if (txData.blobs !== undefined) { + throw new Error('cannot have both raw blobs data and encoded blobs in constructor') + } + if (txData.kzgCommitments !== undefined) { + throw new Error('cannot have both raw blobs data and KZG commitments in constructor') + } + if (txData.blobVersionedHashes !== undefined) { + throw new Error('cannot have both raw blobs data and versioned hashes in constructor') + } + if (txData.kzgProofs !== undefined) { + throw new Error('cannot have both raw blobs data and KZG proofs in constructor') + } + txData.blobs = getBlobs(txData.blobsData.reduce((acc, cur) => acc + cur)) + txData.kzgCommitments = blobsToCommitments(kzg, txData.blobs as Uint8Array[]) + txData.blobVersionedHashes = commitmentsToVersionedHashes(txData.kzgCommitments as Uint8Array[]) + txData.kzgProofs = blobsToProofs( + kzg, + txData.blobs as Uint8Array[], + txData.kzgCommitments as Uint8Array[], + ) + } + + return new Blob4844Tx(txData, opts) +} + +/** + * Create a transaction from an array of byte encoded values ordered according to the devp2p network encoding - format noted below. + * + * Format: `[chainId, nonce, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, data, + * accessList, signatureYParity, signatureR, signatureS]` + */ +export function createBlob4844TxFromBytesArray(values: TxValuesArray, opts: TxOptions = {}) { + if (opts.common?.customCrypto?.kzg === undefined) { + throw new Error( + 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx', + ) + } + + if (values.length !== 11 && values.length !== 14) { + throw new Error( + 'Invalid EIP-4844 transaction. Only expecting 11 values (for unsigned tx) or 14 values (for signed tx).', + ) + } + + const [ + chainId, + nonce, + maxPriorityFeePerGas, + maxFeePerGas, + gasLimit, + to, + value, + data, + accessList, + maxFeePerBlobGas, + blobVersionedHashes, + v, + r, + s, + ] = values + + validateNotArray({ chainId, v }) + validateNoLeadingZeroes({ + nonce, + maxPriorityFeePerGas, + maxFeePerGas, + gasLimit, + value, + maxFeePerBlobGas, + v, + r, + s, + }) + + return new Blob4844Tx( + { + chainId: bytesToBigInt(chainId), + nonce, + maxPriorityFeePerGas, + maxFeePerGas, + gasLimit, + to, + value, + data, + accessList: accessList ?? [], + maxFeePerBlobGas, + blobVersionedHashes, + v: v !== undefined ? bytesToBigInt(v) : undefined, // EIP2930 supports v's with value 0 (empty Uint8Array) + r, + s, + }, + opts, + ) +} + +/** + * Instantiate a transaction from a RLP serialized tx. + * + * Format: `0x03 || rlp([chain_id, nonce, max_priority_fee_per_gas, max_fee_per_gas, gas_limit, to, value, data, + * access_list, max_fee_per_data_gas, blob_versioned_hashes, y_parity, r, s])` + */ +export function createBlob4844TxFromRLP(serialized: Uint8Array, opts: TxOptions = {}) { + if (opts.common?.customCrypto?.kzg === undefined) { + throw new Error( + 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx', + ) + } + + if (equalsBytes(serialized.subarray(0, 1), txTypeBytes(TransactionType.BlobEIP4844)) === false) { + throw new Error( + `Invalid serialized tx input: not an EIP-4844 transaction (wrong tx type, expected: ${ + TransactionType.BlobEIP4844 + }, received: ${bytesToHex(serialized.subarray(0, 1))}`, + ) + } + + const values = RLP.decode(serialized.subarray(1)) + + if (!Array.isArray(values)) { + throw new Error('Invalid serialized tx input: must be array') + } + + return createBlob4844TxFromBytesArray(values as TxValuesArray, opts) +} + +/** + * Creates a transaction from the network encoding of a blob transaction (with blobs/commitments/proof) + * @param serialized a buffer representing a serialized BlobTransactionNetworkWrapper + * @param opts any TxOptions defined + * @returns a Blob4844Tx + */ +export function createBlob4844TxFromSerializedNetworkWrapper( + serialized: Uint8Array, + opts?: TxOptions, +): Blob4844Tx { + if (!opts || !opts.common) { + throw new Error('common instance required to validate versioned hashes') + } + + if (opts.common?.customCrypto?.kzg === undefined) { + throw new Error( + 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx', + ) + } + + if (equalsBytes(serialized.subarray(0, 1), txTypeBytes(TransactionType.BlobEIP4844)) === false) { + throw new Error( + `Invalid serialized tx input: not an EIP-4844 transaction (wrong tx type, expected: ${ + TransactionType.BlobEIP4844 + }, received: ${bytesToHex(serialized.subarray(0, 1))}`, + ) + } + + // Validate network wrapper + const networkTxValues = RLP.decode(serialized.subarray(1)) + if (networkTxValues.length !== 4) { + throw Error(`Expected 4 values in the deserialized network transaction`) + } + const [txValues, blobs, kzgCommitments, kzgProofs] = + networkTxValues as BlobEIP4844NetworkValuesArray + + // Construct the tx but don't freeze yet, we will assign blobs etc once validated + const decodedTx = createBlob4844TxFromBytesArray(txValues, { ...opts, freeze: false }) + if (decodedTx.to === undefined) { + throw Error('Blob4844Tx can not be send without a valid `to`') + } + + const commonCopy = opts.common.copy() + commonCopy.updateParams(opts.params ?? paramsTx) + + const version = Number(commonCopy.param('blobCommitmentVersionKzg')) + validateBlobTransactionNetworkWrapper( + decodedTx.blobVersionedHashes, + blobs, + kzgCommitments, + kzgProofs, + version, + opts.common.customCrypto.kzg, + ) + + // set the network blob data on the tx + decodedTx.blobs = blobs + decodedTx.kzgCommitments = kzgCommitments + decodedTx.kzgProofs = kzgProofs + + // freeze the tx + const freeze = opts?.freeze ?? true + if (freeze) { + Object.freeze(decodedTx) + } + + return decodedTx +} + +/** + * Creates the minimal representation of a blob transaction from the network wrapper version. + * The minimal representation is used when adding transactions to an execution payload/block + * @param txData a {@link Blob4844Tx} containing optional blobs/kzg commitments + * @param opts - dictionary of {@link TxOptions} + * @returns the "minimal" representation of a Blob4844Tx (i.e. transaction object minus blobs and kzg commitments) + */ +export function createMinimal4844TxFromNetworkWrapper( + txData: Blob4844Tx, + opts?: TxOptions, +): Blob4844Tx { + if (opts?.common?.customCrypto?.kzg === undefined) { + throw new Error( + 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx', + ) + } + + const tx = createBlob4844Tx( + { + ...txData, + ...{ blobs: undefined, kzgCommitments: undefined, kzgProofs: undefined }, + }, + opts, + ) + return tx +} + +/** + * Returns the EIP 4844 transaction network wrapper in JSON format similar to toJSON, including + * blobs, commitments, and proofs fields + * @param serialized a buffer representing a serialized BlobTransactionNetworkWrapper + * @param opts any TxOptions defined + * @returns JsonBlobTxNetworkWrapper with blobs, KZG commitments, and KZG proofs fields + */ +export function blobTxNetworkWrapperToJSON( + serialized: Uint8Array, + opts?: TxOptions, +): JsonBlobTxNetworkWrapper { + const tx = createBlob4844TxFromSerializedNetworkWrapper(serialized, opts) + + const accessListJSON = AccessLists.getAccessListJSON(tx.accessList) + const baseJson = tx.toJSON() + + return { + ...baseJson, + chainId: bigIntToHex(tx.chainId), + maxPriorityFeePerGas: bigIntToHex(tx.maxPriorityFeePerGas), + maxFeePerGas: bigIntToHex(tx.maxFeePerGas), + accessList: accessListJSON, + maxFeePerBlobGas: bigIntToHex(tx.maxFeePerBlobGas), + blobVersionedHashes: tx.blobVersionedHashes.map((hash) => bytesToHex(hash)), + blobs: tx.blobs!.map((bytes) => bytesToHex(bytes)), + kzgCommitments: tx.kzgCommitments!.map((bytes) => bytesToHex(bytes)), + kzgProofs: tx.kzgProofs!.map((bytes) => bytesToHex(bytes)), + } +} diff --git a/packages/tx/src/4844/index.ts b/packages/tx/src/4844/index.ts new file mode 100644 index 0000000000..124d02e7b6 --- /dev/null +++ b/packages/tx/src/4844/index.ts @@ -0,0 +1,2 @@ +export * from './constructors.js' +export { Blob4844Tx } from './tx.js' diff --git a/packages/tx/src/4844/tx.ts b/packages/tx/src/4844/tx.ts new file mode 100644 index 0000000000..f19a031df4 --- /dev/null +++ b/packages/tx/src/4844/tx.ts @@ -0,0 +1,362 @@ +import { Common } from '@ethereumjs/common' +import { + BIGINT_0, + BIGINT_27, + MAX_INTEGER, + bigIntToHex, + bigIntToUnpaddedBytes, + bytesToBigInt, + bytesToHex, + toBytes, +} from '@ethereumjs/util' + +import { BaseTransaction } from '../baseTransaction.js' +import * as EIP1559 from '../capabilities/eip1559.js' +import * as EIP2718 from '../capabilities/eip2718.js' +import * as EIP2930 from '../capabilities/eip2930.js' +import * as Legacy from '../capabilities/legacy.js' +import { LIMIT_BLOBS_PER_TX } from '../constants.js' +import { paramsTx } from '../index.js' +import { TransactionType } from '../types.js' +import { AccessLists, validateNotArray } from '../util.js' + +import { createBlob4844Tx } from './constructors.js' + +import type { + AccessList, + AccessListBytes, + TxData as AllTypesTxData, + TxValuesArray as AllTypesTxValuesArray, + JsonTx, + TxOptions, +} from '../types.js' + +export type TxData = AllTypesTxData[TransactionType.BlobEIP4844] +export type TxValuesArray = AllTypesTxValuesArray[TransactionType.BlobEIP4844] + +/** + * Typed transaction with a new gas fee market mechanism for transactions that include "blobs" of data + * + * - TransactionType: 3 + * - EIP: [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844) + */ +export class Blob4844Tx extends BaseTransaction { + public readonly chainId: bigint + public readonly accessList: AccessListBytes + public readonly AccessListJSON: AccessList + public readonly maxPriorityFeePerGas: bigint + public readonly maxFeePerGas: bigint + public readonly maxFeePerBlobGas: bigint + + public readonly common: Common + public blobVersionedHashes: Uint8Array[] + blobs?: Uint8Array[] // This property should only be populated when the transaction is in the "Network Wrapper" format + kzgCommitments?: Uint8Array[] // This property should only be populated when the transaction is in the "Network Wrapper" format + kzgProofs?: Uint8Array[] // This property should only be populated when the transaction is in the "Network Wrapper" format + + /** + * This constructor takes the values, validates them, assigns them and freezes the object. + * + * It is not recommended to use this constructor directly. Instead use + * the static constructors or factory methods to assist in creating a Transaction object from + * varying data types. + */ + constructor(txData: TxData, opts: TxOptions = {}) { + super({ ...txData, type: TransactionType.BlobEIP4844 }, opts) + const { chainId, accessList, maxFeePerGas, maxPriorityFeePerGas, maxFeePerBlobGas } = txData + + this.common = opts.common?.copy() ?? new Common({ chain: this.DEFAULT_CHAIN }) + if (chainId !== undefined && bytesToBigInt(toBytes(chainId)) !== this.common.chainId()) { + throw new Error( + `Common chain ID ${this.common.chainId} not matching the derived chain ID ${chainId}`, + ) + } + this.common.updateParams(opts.params ?? paramsTx) + this.chainId = this.common.chainId() + + if (!this.common.isActivatedEIP(1559)) { + throw new Error('EIP-1559 not enabled on Common') + } + + if (!this.common.isActivatedEIP(4844)) { + throw new Error('EIP-4844 not enabled on Common') + } + this.activeCapabilities = this.activeCapabilities.concat([1559, 2718, 2930]) + + // Populate the access list fields + const accessListData = AccessLists.getAccessListData(accessList ?? []) + this.accessList = accessListData.accessList + this.AccessListJSON = accessListData.AccessListJSON + // Verify the access list format. + AccessLists.verifyAccessList(this.accessList) + + this.maxFeePerGas = bytesToBigInt(toBytes(maxFeePerGas)) + this.maxPriorityFeePerGas = bytesToBigInt(toBytes(maxPriorityFeePerGas)) + + this._validateCannotExceedMaxInteger({ + maxFeePerGas: this.maxFeePerGas, + maxPriorityFeePerGas: this.maxPriorityFeePerGas, + }) + + validateNotArray(txData) + + if (this.gasLimit * this.maxFeePerGas > MAX_INTEGER) { + const msg = this._errorMsg('gasLimit * maxFeePerGas cannot exceed MAX_INTEGER (2^256-1)') + throw new Error(msg) + } + + if (this.maxFeePerGas < this.maxPriorityFeePerGas) { + const msg = this._errorMsg( + 'maxFeePerGas cannot be less than maxPriorityFeePerGas (The total must be the larger of the two)', + ) + throw new Error(msg) + } + + this.maxFeePerBlobGas = bytesToBigInt( + toBytes((maxFeePerBlobGas ?? '') === '' ? '0x' : maxFeePerBlobGas), + ) + + this.blobVersionedHashes = (txData.blobVersionedHashes ?? []).map((vh) => toBytes(vh)) + EIP2718.validateYParity(this) + Legacy.validateHighS(this) + + for (const hash of this.blobVersionedHashes) { + if (hash.length !== 32) { + const msg = this._errorMsg('versioned hash is invalid length') + throw new Error(msg) + } + if (BigInt(hash[0]) !== this.common.param('blobCommitmentVersionKzg')) { + const msg = this._errorMsg('versioned hash does not start with KZG commitment version') + throw new Error(msg) + } + } + if (this.blobVersionedHashes.length > LIMIT_BLOBS_PER_TX) { + const msg = this._errorMsg(`tx can contain at most ${LIMIT_BLOBS_PER_TX} blobs`) + throw new Error(msg) + } else if (this.blobVersionedHashes.length === 0) { + const msg = this._errorMsg(`tx should contain at least one blob`) + throw new Error(msg) + } + if (this.to === undefined) { + const msg = this._errorMsg( + `tx should have a "to" field and cannot be used to create contracts`, + ) + throw new Error(msg) + } + + this.blobs = txData.blobs?.map((blob) => toBytes(blob)) + this.kzgCommitments = txData.kzgCommitments?.map((commitment) => toBytes(commitment)) + this.kzgProofs = txData.kzgProofs?.map((proof) => toBytes(proof)) + const freeze = opts?.freeze ?? true + if (freeze) { + Object.freeze(this) + } + } + + /** + * Returns the minimum of calculated priority fee (from maxFeePerGas and baseFee) and maxPriorityFeePerGas + * @param baseFee Base fee retrieved from block + */ + getEffectivePriorityFee(baseFee: bigint): bigint { + return EIP1559.getEffectivePriorityFee(this, baseFee) + } + + /** + * The amount of gas paid for the data in this tx + */ + getDataGas(): bigint { + return EIP2930.getDataGas(this) + } + + /** + * The up front amount that an account must have for this transaction to be valid + * @param baseFee The base fee of the block (will be set to 0 if not provided) + */ + getUpfrontCost(baseFee: bigint = BIGINT_0): bigint { + return EIP1559.getUpfrontCost(this, baseFee) + } + + /** + * Returns a Uint8Array Array of the raw Bytes of the EIP-4844 transaction, in order. + * + * Format: [chain_id, nonce, max_priority_fee_per_gas, max_fee_per_gas, gas_limit, to, value, data, + * access_list, max_fee_per_data_gas, blob_versioned_hashes, y_parity, r, s]`. + * + * Use {@link Blob4844Tx.serialize} to add a transaction to a block + * with {@link createBlockFromBytesArray}. + * + * For an unsigned tx this method uses the empty Bytes values for the + * signature parameters `v`, `r` and `s` for encoding. For an EIP-155 compliant + * representation for external signing use {@link Blob4844Tx.getMessageToSign}. + */ + raw(): TxValuesArray { + return [ + bigIntToUnpaddedBytes(this.chainId), + bigIntToUnpaddedBytes(this.nonce), + bigIntToUnpaddedBytes(this.maxPriorityFeePerGas), + bigIntToUnpaddedBytes(this.maxFeePerGas), + bigIntToUnpaddedBytes(this.gasLimit), + this.to !== undefined ? this.to.bytes : new Uint8Array(0), + bigIntToUnpaddedBytes(this.value), + this.data, + this.accessList, + bigIntToUnpaddedBytes(this.maxFeePerBlobGas), + this.blobVersionedHashes, + this.v !== undefined ? bigIntToUnpaddedBytes(this.v) : new Uint8Array(0), + this.r !== undefined ? bigIntToUnpaddedBytes(this.r) : new Uint8Array(0), + this.s !== undefined ? bigIntToUnpaddedBytes(this.s) : new Uint8Array(0), + ] + } + + /** + * Returns the serialized encoding of the EIP-4844 transaction. + * + * Format: `0x03 || rlp([chainId, nonce, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, data, + * access_list, max_fee_per_data_gas, blob_versioned_hashes, y_parity, r, s])`. + * + * Note that in contrast to the legacy tx serialization format this is not + * valid RLP any more due to the raw tx type preceding and concatenated to + * the RLP encoding of the values. + */ + serialize(): Uint8Array { + return EIP2718.serialize(this) + } + + /** + * @returns the serialized form of a blob transaction in the network wrapper format (used for gossipping mempool transactions over devp2p) + */ + serializeNetworkWrapper(): Uint8Array { + if ( + this.blobs === undefined || + this.kzgCommitments === undefined || + this.kzgProofs === undefined + ) { + throw new Error( + 'cannot serialize network wrapper without blobs, KZG commitments and KZG proofs provided', + ) + } + + return EIP2718.serialize(this, [this.raw(), this.blobs, this.kzgCommitments, this.kzgProofs]) + } + + /** + * Returns the raw serialized unsigned tx, which can be used + * to sign the transaction (e.g. for sending to a hardware wallet). + * + * Note: in contrast to the legacy tx the raw message format is already + * serialized and doesn't need to be RLP encoded any more. + * + * ```javascript + * const serializedMessage = tx.getMessageToSign() // use this for the HW wallet input + * ``` + */ + getMessageToSign(): Uint8Array { + return EIP2718.serialize(this, this.raw().slice(0, 11)) + } + + /** + * Returns the hashed serialized unsigned tx, which can be used + * to sign the transaction (e.g. for sending to a hardware wallet). + * + * Note: in contrast to the legacy tx the raw message format is already + * serialized and doesn't need to be RLP encoded any more. + */ + getHashedMessageToSign(): Uint8Array { + return EIP2718.getHashedMessageToSign(this) + } + + /** + * Computes a sha3-256 hash of the serialized tx. + * + * This method can only be used for signed txs (it throws otherwise). + * Use {@link Blob4844Tx.getMessageToSign} to get a tx hash for the purpose of signing. + */ + public hash(): Uint8Array { + return Legacy.hash(this) + } + + getMessageToVerifySignature(): Uint8Array { + return this.getHashedMessageToSign() + } + + /** + * Returns the public key of the sender + */ + public getSenderPublicKey(): Uint8Array { + return Legacy.getSenderPublicKey(this) + } + + toJSON(): JsonTx { + const accessListJSON = AccessLists.getAccessListJSON(this.accessList) + const baseJson = super.toJSON() + + return { + ...baseJson, + chainId: bigIntToHex(this.chainId), + maxPriorityFeePerGas: bigIntToHex(this.maxPriorityFeePerGas), + maxFeePerGas: bigIntToHex(this.maxFeePerGas), + accessList: accessListJSON, + maxFeePerBlobGas: bigIntToHex(this.maxFeePerBlobGas), + blobVersionedHashes: this.blobVersionedHashes.map((hash) => bytesToHex(hash)), + } + } + + addSignature( + v: bigint, + r: Uint8Array | bigint, + s: Uint8Array | bigint, + convertV: boolean = false, + ): Blob4844Tx { + r = toBytes(r) + s = toBytes(s) + const opts = { ...this.txOptions, common: this.common } + + return createBlob4844Tx( + { + chainId: this.chainId, + nonce: this.nonce, + maxPriorityFeePerGas: this.maxPriorityFeePerGas, + maxFeePerGas: this.maxFeePerGas, + gasLimit: this.gasLimit, + to: this.to, + value: this.value, + data: this.data, + accessList: this.accessList, + v: convertV ? v - BIGINT_27 : v, // This looks extremely hacky: @ethereumjs/util actually adds 27 to the value, the recovery bit is either 0 or 1. + r: bytesToBigInt(r), + s: bytesToBigInt(s), + maxFeePerBlobGas: this.maxFeePerBlobGas, + blobVersionedHashes: this.blobVersionedHashes, + blobs: this.blobs, + kzgCommitments: this.kzgCommitments, + kzgProofs: this.kzgProofs, + }, + opts, + ) + } + /** + * Return a compact error string representation of the object + */ + public errorStr() { + let errorStr = this._getSharedErrorPostfix() + errorStr += ` maxFeePerGas=${this.maxFeePerGas} maxPriorityFeePerGas=${this.maxPriorityFeePerGas}` + return errorStr + } + + /** + * Internal helper function to create an annotated error message + * + * @param msg Base error message + * @hidden + */ + protected _errorMsg(msg: string) { + return Legacy.errorMsg(this, msg) + } + + /** + * @returns the number of blobs included with this transaction + */ + public numBlobs(): number { + return this.blobVersionedHashes.length + } +} diff --git a/packages/tx/src/7702/constructors.ts b/packages/tx/src/7702/constructors.ts new file mode 100644 index 0000000000..5c8d0ab9ab --- /dev/null +++ b/packages/tx/src/7702/constructors.ts @@ -0,0 +1,102 @@ +import { RLP } from '@ethereumjs/rlp' +import { bytesToBigInt, bytesToHex, equalsBytes, validateNoLeadingZeroes } from '@ethereumjs/util' + +import { TransactionType } from '../types.js' +import { txTypeBytes, validateNotArray } from '../util.js' + +import { EOACode7702Transaction } from './tx.js' + +import type { TxOptions } from '../types.js' +import type { TxData, TxValuesArray } from './tx.js' + +/** + * Instantiate a transaction from a data dictionary. + * + * Format: { chainId, nonce, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, data, + * accessList, v, r, s } + * + * Notes: + * - `chainId` will be set automatically if not provided + * - All parameters are optional and have some basic default values + */ +export function createEOACode7702Tx(txData: TxData, opts: TxOptions = {}) { + return new EOACode7702Transaction(txData, opts) +} + +/** + * Create a transaction from an array of byte encoded values ordered according to the devp2p network encoding - format noted below. + * + * Format: `[chainId, nonce, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, data, + * accessList, signatureYParity, signatureR, signatureS]` + */ +export function createEOACode7702TxFromBytesArray(values: TxValuesArray, opts: TxOptions = {}) { + if (values.length !== 10 && values.length !== 13) { + throw new Error( + 'Invalid EIP-7702 transaction. Only expecting 10 values (for unsigned tx) or 13 values (for signed tx).', + ) + } + + const [ + chainId, + nonce, + maxPriorityFeePerGas, + maxFeePerGas, + gasLimit, + to, + value, + data, + accessList, + authorityList, + v, + r, + s, + ] = values + + validateNotArray({ chainId, v }) + validateNoLeadingZeroes({ nonce, maxPriorityFeePerGas, maxFeePerGas, gasLimit, value, v, r, s }) + + return new EOACode7702Transaction( + { + chainId: bytesToBigInt(chainId), + nonce, + maxPriorityFeePerGas, + maxFeePerGas, + gasLimit, + to, + value, + data, + accessList: accessList ?? [], + authorizationList: authorityList ?? [], + v: v !== undefined ? bytesToBigInt(v) : undefined, // EIP2930 supports v's with value 0 (empty Uint8Array) + r, + s, + }, + opts, + ) +} + +/** + * Instantiate a transaction from a RLP serialized tx. + * + * Format: `0x04 || rlp([chainId, nonce, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, data, + * accessList, signatureYParity, signatureR, signatureS])` + */ +export function createEOACode7702TxFromRLP(serialized: Uint8Array, opts: TxOptions = {}) { + if ( + equalsBytes(serialized.subarray(0, 1), txTypeBytes(TransactionType.EOACodeEIP7702)) === false + ) { + throw new Error( + `Invalid serialized tx input: not an EIP-7702 transaction (wrong tx type, expected: ${ + TransactionType.EOACodeEIP7702 + }, received: ${bytesToHex(serialized.subarray(0, 1))}`, + ) + } + + const values = RLP.decode(serialized.subarray(1)) + + if (!Array.isArray(values)) { + throw new Error('Invalid serialized tx input: must be array') + } + + return createEOACode7702TxFromBytesArray(values as TxValuesArray, opts) +} diff --git a/packages/tx/src/7702/index.ts b/packages/tx/src/7702/index.ts new file mode 100644 index 0000000000..4130393815 --- /dev/null +++ b/packages/tx/src/7702/index.ts @@ -0,0 +1,2 @@ +export * from './constructors.js' +export { EOACode7702Transaction } from './tx.js' diff --git a/packages/tx/src/eip7702Transaction.ts b/packages/tx/src/7702/tx.ts similarity index 67% rename from packages/tx/src/eip7702Transaction.ts rename to packages/tx/src/7702/tx.ts index bb20761771..c22449fa0e 100644 --- a/packages/tx/src/eip7702Transaction.ts +++ b/packages/tx/src/7702/tx.ts @@ -1,4 +1,4 @@ -import { RLP } from '@ethereumjs/rlp' +import { Common } from '@ethereumjs/common' import { BIGINT_0, BIGINT_27, @@ -6,19 +6,19 @@ import { bigIntToHex, bigIntToUnpaddedBytes, bytesToBigInt, - bytesToHex, - equalsBytes, toBytes, - validateNoLeadingZeroes, } from '@ethereumjs/util' -import { BaseTransaction } from './baseTransaction.js' -import * as EIP1559 from './capabilities/eip1559.js' -import * as EIP2718 from './capabilities/eip2718.js' -import * as EIP7702 from './capabilities/eip7702.js' -import * as Legacy from './capabilities/legacy.js' -import { TransactionType } from './types.js' -import { AccessLists, AuthorizationLists, txTypeBytes } from './util.js' +import { BaseTransaction } from '../baseTransaction.js' +import * as EIP1559 from '../capabilities/eip1559.js' +import * as EIP2718 from '../capabilities/eip2718.js' +import * as EIP7702 from '../capabilities/eip7702.js' +import * as Legacy from '../capabilities/legacy.js' +import { paramsTx } from '../index.js' +import { TransactionType } from '../types.js' +import { AccessLists, AuthorizationLists, validateNotArray } from '../util.js' + +import { createEOACode7702Tx } from './constructors.js' import type { AccessList, @@ -29,11 +29,10 @@ import type { AuthorizationListBytes, JsonTx, TxOptions, -} from './types.js' -import type { Common } from '@ethereumjs/common' +} from '../types.js' -type TxData = AllTypesTxData[TransactionType.EOACodeEIP7702] -type TxValuesArray = AllTypesTxValuesArray[TransactionType.EOACodeEIP7702] +export type TxData = AllTypesTxData[TransactionType.EOACodeEIP7702] +export type TxValuesArray = AllTypesTxValuesArray[TransactionType.EOACodeEIP7702] /** * Typed transaction with the ability to set codes on EOA accounts @@ -41,7 +40,7 @@ type TxValuesArray = AllTypesTxValuesArray[TransactionType.EOACodeEIP7702] * - TransactionType: 4 * - EIP: [EIP-7702](https://github.com/ethereum/EIPs/blob/62419ca3f45375db00b04a368ea37c0bfb05386a/EIPS/eip-7702.md) */ -export class EOACodeEIP7702Transaction extends BaseTransaction { +export class EOACode7702Transaction extends BaseTransaction { public readonly chainId: bigint public readonly accessList: AccessListBytes public readonly AccessListJSON: AccessList @@ -52,98 +51,6 @@ export class EOACodeEIP7702Transaction extends BaseTransaction MAX_INTEGER) { const msg = this._errorMsg('gasLimit * maxFeePerGas cannot exceed MAX_INTEGER (2^256-1)') @@ -196,7 +109,7 @@ export class EOACodeEIP7702Transaction extends BaseTransaction /** * List of tx type defining EIPs, * e.g. 1559 (fee market) and 2930 (access lists) - * for FeeMarketEIP1559Transaction objects + * for FeeMarket1559Tx objects */ protected activeCapabilities: number[] = [] @@ -74,9 +74,11 @@ export abstract class BaseTransaction * * @hidden */ - protected DEFAULT_CHAIN = Chain.Mainnet + protected DEFAULT_CHAIN = Mainnet constructor(txData: TxData[T], opts: TxOptions) { + this.common = opts.common?.copy() ?? new Common({ chain: this.DEFAULT_CHAIN }) + const { nonce, gasLimit, to, value, data, v, r, s, type } = txData this._type = Number(bytesToBigInt(toBytes(type))) @@ -107,9 +109,14 @@ export abstract class BaseTransaction const createContract = this.to === undefined || this.to === null const allowUnlimitedInitCodeSize = opts.allowUnlimitedInitCodeSize ?? false - const common = opts.common ?? this._getCommon() - if (createContract && common.isActivatedEIP(3860) && allowUnlimitedInitCodeSize === false) { - checkMaxInitCodeSize(common, this.data.length) + + this.common.updateParams(opts.params ?? paramsTx) + if ( + createContract && + this.common.isActivatedEIP(3860) && + allowUnlimitedInitCodeSize === false + ) { + checkMaxInitCodeSize(this.common, this.data.length) } } @@ -153,8 +160,10 @@ export abstract class BaseTransaction errors.push('Invalid Signature') } - if (this.getBaseFee() > this.gasLimit) { - errors.push(`gasLimit is too low. given ${this.gasLimit}, need at least ${this.getBaseFee()}`) + if (this.getIntrinsicGas() > this.gasLimit) { + errors.push( + `gasLimit is too low. given ${this.gasLimit}, need at least ${this.getIntrinsicGas()}`, + ) } return errors @@ -171,25 +180,28 @@ export abstract class BaseTransaction } /** - * The minimum amount of gas the tx must have (DataFee + TxFee + Creation Fee) + * The minimum gas limit which the tx to have to be valid. + * This covers costs as the standard fee (21000 gas), the data fee (paid for each calldata byte), + * the optional creation fee (if the transaction creates a contract), and if relevant the gas + * to be paid for access lists (EIP-2930) and authority lists (EIP-7702). */ - getBaseFee(): bigint { - const txFee = this.common.param('gasPrices', 'tx') - let fee = this.getDataFee() + getIntrinsicGas(): bigint { + const txFee = this.common.param('txGas') + let fee = this.getDataGas() if (txFee) fee += txFee if (this.common.gteHardfork('homestead') && this.toCreationAddress()) { - const txCreationFee = this.common.param('gasPrices', 'txCreation') + const txCreationFee = this.common.param('txCreationGas') if (txCreationFee) fee += txCreationFee } return fee } /** - * The amount of gas paid for the data in this tx + * The amount of gas paid for the calldata in this tx */ - getDataFee(): bigint { - const txDataZero = this.common.param('gasPrices', 'txDataZero') - const txDataNonZero = this.common.param('gasPrices', 'txDataNonZero') + getDataGas(): bigint { + const txDataZero = this.common.param('txDataZeroGas') + const txDataNonZero = this.common.param('txDataNonZeroGas') let cost = BIGINT_0 for (let i = 0; i < this.data.length; i++) { @@ -198,7 +210,7 @@ export abstract class BaseTransaction if ((this.to === undefined || this.to === null) && this.common.isActivatedEIP(3860)) { const dataLength = BigInt(Math.ceil(this.data.length / 32)) - const initCodeCost = this.common.param('gasPrices', 'initCodeWordCost') * dataLength + const initCodeCost = this.common.param('initCodeWordGas') * dataLength cost += initCodeCost } @@ -213,7 +225,7 @@ export abstract class BaseTransaction abstract getEffectivePriorityFee(baseFee: bigint | undefined): bigint /** - * The up front amount that an account must have for this transaction to be valid + * The upfront amount of wei to be paid in order for this tx to be valid and included in a block */ abstract getUpfrontCost(): bigint @@ -228,7 +240,7 @@ export abstract class BaseTransaction * Returns a Uint8Array Array of the raw Bytes of this transaction, in order. * * Use {@link BaseTransaction.serialize} to add a transaction to a block - * with {@link createBlockFromValuesArray}. + * with {@link createBlockFromBytesArray}. * * For an unsigned tx this method uses the empty Bytes values for the * signature parameters `v`, `r` and `s` for encoding. For an EIP-155 compliant @@ -362,56 +374,9 @@ export abstract class BaseTransaction v: bigint, r: Uint8Array | bigint, s: Uint8Array | bigint, - convertV?: boolean + convertV?: boolean, ): Transaction[T] - /** - * Does chain ID checks on common and returns a common - * to be used on instantiation - * @hidden - * - * @param common - {@link Common} instance from tx options - * @param chainId - Chain ID from tx options (typed txs) or signature (legacy tx) - */ - protected _getCommon(common?: Common, chainId?: BigIntLike) { - // Chain ID provided - if (chainId !== undefined) { - const chainIdBigInt = bytesToBigInt(toBytes(chainId)) - if (common) { - if (common.chainId() !== chainIdBigInt) { - const msg = this._errorMsg( - `The chain ID does not match the chain ID of Common. Got: ${chainIdBigInt}, expected: ${common.chainId()}` - ) - throw new Error(msg) - } - // Common provided, chain ID does match - // -> Return provided Common - return common.copy() - } else { - if (isSupportedChainId(chainIdBigInt)) { - // No Common, chain ID supported by Common - // -> Instantiate Common with chain ID - return new Common({ chain: chainIdBigInt }) - } else { - // No Common, chain ID not supported by Common - // -> Instantiate custom Common derived from DEFAULT_CHAIN - return createCustomCommon( - { - name: 'custom-chain', - networkId: chainIdBigInt, - chainId: chainIdBigInt, - }, - { baseChain: this.DEFAULT_CHAIN } - ) - } - } - } else { - // No chain ID provided - // -> return Common provided or create new default Common - return common?.copy() ?? new Common({ chain: this.DEFAULT_CHAIN }) - } - } - /** * Validates that an object with BigInt values cannot exceed the specified bit limit. * @param values Object containing string keys and BigInt values @@ -421,7 +386,7 @@ export abstract class BaseTransaction protected _validateCannotExceedMaxInteger( values: { [key: string]: bigint | undefined }, bits = 256, - cannotEqual = false + cannotEqual = false, ) { for (const [key, value] of Object.entries(values)) { switch (bits) { @@ -429,7 +394,7 @@ export abstract class BaseTransaction if (cannotEqual) { if (value !== undefined && value >= MAX_UINT64) { const msg = this._errorMsg( - `${key} cannot equal or exceed MAX_UINT64 (2^64-1), given ${value}` + `${key} cannot equal or exceed MAX_UINT64 (2^64-1), given ${value}`, ) throw new Error(msg) } @@ -444,14 +409,14 @@ export abstract class BaseTransaction if (cannotEqual) { if (value !== undefined && value >= MAX_INTEGER) { const msg = this._errorMsg( - `${key} cannot equal or exceed MAX_INTEGER (2^256-1), given ${value}` + `${key} cannot equal or exceed MAX_INTEGER (2^256-1), given ${value}`, ) throw new Error(msg) } } else { if (value !== undefined && value > MAX_INTEGER) { const msg = this._errorMsg( - `${key} cannot exceed MAX_INTEGER (2^256-1), given ${value}` + `${key} cannot exceed MAX_INTEGER (2^256-1), given ${value}`, ) throw new Error(msg) } @@ -465,31 +430,6 @@ export abstract class BaseTransaction } } - protected static _validateNotArray(values: { [key: string]: any }) { - const txDataKeys = [ - 'nonce', - 'gasPrice', - 'gasLimit', - 'to', - 'value', - 'data', - 'v', - 'r', - 's', - 'type', - 'baseFee', - 'maxFeePerGas', - 'chainId', - ] - for (const [key, value] of Object.entries(values)) { - if (txDataKeys.includes(key)) { - if (Array.isArray(value)) { - throw new Error(`${key} cannot be an array`) - } - } - } - } - /** * Return a compact error string representation of the object */ diff --git a/packages/tx/src/capabilities/eip1559.ts b/packages/tx/src/capabilities/eip1559.ts index d76c744f39..7c9047ff9a 100644 --- a/packages/tx/src/capabilities/eip1559.ts +++ b/packages/tx/src/capabilities/eip1559.ts @@ -10,7 +10,7 @@ export function getUpfrontCost(tx: EIP1559CompatibleTx, baseFee: bigint): bigint export function getEffectivePriorityFee( tx: EIP1559CompatibleTx, - baseFee: bigint | undefined + baseFee: bigint | undefined, ): bigint { if (baseFee === undefined || baseFee > tx.maxFeePerGas) { throw new Error('Tx cannot pay baseFee') diff --git a/packages/tx/src/capabilities/eip2930.ts b/packages/tx/src/capabilities/eip2930.ts index 1d9daa12b5..38043b782e 100644 --- a/packages/tx/src/capabilities/eip2930.ts +++ b/packages/tx/src/capabilities/eip2930.ts @@ -7,6 +7,6 @@ import type { EIP2930CompatibleTx } from '../types.js' /** * The amount of gas paid for the data in this tx */ -export function getDataFee(tx: EIP2930CompatibleTx): bigint { - return Legacy.getDataFee(tx, BigInt(AccessLists.getDataFeeEIP2930(tx.accessList, tx.common))) +export function getDataGas(tx: EIP2930CompatibleTx): bigint { + return Legacy.getDataGas(tx, BigInt(AccessLists.getDataGasEIP2930(tx.accessList, tx.common))) } diff --git a/packages/tx/src/capabilities/eip7702.ts b/packages/tx/src/capabilities/eip7702.ts index 06813dc996..3839485a0b 100644 --- a/packages/tx/src/capabilities/eip7702.ts +++ b/packages/tx/src/capabilities/eip7702.ts @@ -7,10 +7,10 @@ import type { EIP7702CompatibleTx } from '../types.js' /** * The amount of gas paid for the data in this tx */ -export function getDataFee(tx: EIP7702CompatibleTx): bigint { - const eip2930Cost = BigInt(AccessLists.getDataFeeEIP2930(tx.accessList, tx.common)) +export function getDataGas(tx: EIP7702CompatibleTx): bigint { + const eip2930Cost = BigInt(AccessLists.getDataGasEIP2930(tx.accessList, tx.common)) const eip7702Cost = BigInt( - tx.authorizationList.length * Number(tx.common.param('gasPrices', 'perAuthBaseCost')) + tx.authorizationList.length * Number(tx.common.param('perAuthBaseGas')), ) - return Legacy.getDataFee(tx, eip2930Cost + eip7702Cost) + return Legacy.getDataGas(tx, eip2930Cost + eip7702Cost) } diff --git a/packages/tx/src/capabilities/legacy.ts b/packages/tx/src/capabilities/legacy.ts index e2eda09199..34ebd741dc 100644 --- a/packages/tx/src/capabilities/legacy.ts +++ b/packages/tx/src/capabilities/legacy.ts @@ -22,12 +22,12 @@ export function isSigned(tx: LegacyTxInterface): boolean { /** * The amount of gas paid for the data in this tx */ -export function getDataFee(tx: LegacyTxInterface, extraCost?: bigint): bigint { +export function getDataGas(tx: LegacyTxInterface, extraCost?: bigint): bigint { if (tx.cache.dataFee && tx.cache.dataFee.hardfork === tx.common.hardfork()) { return tx.cache.dataFee.value } - const cost = BaseTransaction.prototype.getDataFee.bind(tx)() + (extraCost ?? 0n) + const cost = BaseTransaction.prototype.getDataGas.bind(tx)() + (extraCost ?? 0n) if (Object.isFrozen(tx)) { tx.cache.dataFee = { @@ -66,7 +66,7 @@ export function validateHighS(tx: LegacyTxInterface): void { if (tx.common.gteHardfork('homestead') && s !== undefined && s > SECP256K1_ORDER_DIV_2) { const msg = errorMsg( tx, - 'Invalid Signature: s-values greater than secp256k1n/2 are considered invalid' + 'Invalid Signature: s-values greater than secp256k1n/2 are considered invalid', ) throw new Error(msg) } @@ -90,7 +90,7 @@ export function getSenderPublicKey(tx: LegacyTxInterface): Uint8Array { v!, bigIntToUnpaddedBytes(r!), bigIntToUnpaddedBytes(s!), - tx.supports(Capability.EIP155ReplayProtection) ? tx.common.chainId() : undefined + tx.supports(Capability.EIP155ReplayProtection) ? tx.common.chainId() : undefined, ) if (Object.isFrozen(tx)) { tx.cache.senderPubKey = sender diff --git a/packages/tx/src/eip4844Transaction.ts b/packages/tx/src/eip4844Transaction.ts deleted file mode 100644 index 31f0726583..0000000000 --- a/packages/tx/src/eip4844Transaction.ts +++ /dev/null @@ -1,659 +0,0 @@ -import { RLP } from '@ethereumjs/rlp' -import { - BIGINT_0, - BIGINT_27, - MAX_INTEGER, - bigIntToHex, - bigIntToUnpaddedBytes, - blobsToCommitments, - blobsToProofs, - bytesToBigInt, - bytesToHex, - commitmentsToVersionedHashes, - computeVersionedHash, - equalsBytes, - getBlobs, - toBytes, - validateNoLeadingZeroes, -} from '@ethereumjs/util' - -import { BaseTransaction } from './baseTransaction.js' -import * as EIP1559 from './capabilities/eip1559.js' -import * as EIP2718 from './capabilities/eip2718.js' -import * as EIP2930 from './capabilities/eip2930.js' -import * as Legacy from './capabilities/legacy.js' -import { LIMIT_BLOBS_PER_TX } from './constants.js' -import { TransactionType } from './types.js' -import { AccessLists, txTypeBytes } from './util.js' - -import type { - AccessList, - AccessListBytes, - TxData as AllTypesTxData, - TxValuesArray as AllTypesTxValuesArray, - BlobEIP4844NetworkValuesArray, - JsonBlobTxNetworkWrapper, - JsonTx, - TxOptions, -} from './types.js' -import type { Common } from '@ethereumjs/common' -import type { Kzg } from '@ethereumjs/util' - -type TxData = AllTypesTxData[TransactionType.BlobEIP4844] -type TxValuesArray = AllTypesTxValuesArray[TransactionType.BlobEIP4844] - -const validateBlobTransactionNetworkWrapper = ( - blobVersionedHashes: Uint8Array[], - blobs: Uint8Array[], - commitments: Uint8Array[], - kzgProofs: Uint8Array[], - version: number, - kzg: Kzg -) => { - if (!(blobVersionedHashes.length === blobs.length && blobs.length === commitments.length)) { - throw new Error('Number of blobVersionedHashes, blobs, and commitments not all equal') - } - if (blobVersionedHashes.length === 0) { - throw new Error('Invalid transaction with empty blobs') - } - - let isValid - try { - isValid = kzg.verifyBlobKzgProofBatch(blobs, commitments, kzgProofs) - } catch (error) { - throw new Error(`KZG verification of blobs fail with error=${error}`) - } - if (!isValid) { - throw new Error('KZG proof cannot be verified from blobs/commitments') - } - - for (let x = 0; x < blobVersionedHashes.length; x++) { - const computedVersionedHash = computeVersionedHash(commitments[x], version) - if (!equalsBytes(computedVersionedHash, blobVersionedHashes[x])) { - throw new Error(`commitment for blob at index ${x} does not match versionedHash`) - } - } -} - -/** - * Typed transaction with a new gas fee market mechanism for transactions that include "blobs" of data - * - * - TransactionType: 3 - * - EIP: [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844) - */ -export class BlobEIP4844Transaction extends BaseTransaction { - public readonly chainId: bigint - public readonly accessList: AccessListBytes - public readonly AccessListJSON: AccessList - public readonly maxPriorityFeePerGas: bigint - public readonly maxFeePerGas: bigint - public readonly maxFeePerBlobGas: bigint - - public readonly common: Common - public blobVersionedHashes: Uint8Array[] - blobs?: Uint8Array[] // This property should only be populated when the transaction is in the "Network Wrapper" format - kzgCommitments?: Uint8Array[] // This property should only be populated when the transaction is in the "Network Wrapper" format - kzgProofs?: Uint8Array[] // This property should only be populated when the transaction is in the "Network Wrapper" format - - /** - * This constructor takes the values, validates them, assigns them and freezes the object. - * - * It is not recommended to use this constructor directly. Instead use - * the static constructors or factory methods to assist in creating a Transaction object from - * varying data types. - */ - constructor(txData: TxData, opts: TxOptions = {}) { - super({ ...txData, type: TransactionType.BlobEIP4844 }, opts) - const { chainId, accessList, maxFeePerGas, maxPriorityFeePerGas, maxFeePerBlobGas } = txData - - this.common = this._getCommon(opts.common, chainId) - this.chainId = this.common.chainId() - - if (!this.common.isActivatedEIP(1559)) { - throw new Error('EIP-1559 not enabled on Common') - } - - if (!this.common.isActivatedEIP(4844)) { - throw new Error('EIP-4844 not enabled on Common') - } - this.activeCapabilities = this.activeCapabilities.concat([1559, 2718, 2930]) - - // Populate the access list fields - const accessListData = AccessLists.getAccessListData(accessList ?? []) - this.accessList = accessListData.accessList - this.AccessListJSON = accessListData.AccessListJSON - // Verify the access list format. - AccessLists.verifyAccessList(this.accessList) - - this.maxFeePerGas = bytesToBigInt(toBytes(maxFeePerGas)) - this.maxPriorityFeePerGas = bytesToBigInt(toBytes(maxPriorityFeePerGas)) - - this._validateCannotExceedMaxInteger({ - maxFeePerGas: this.maxFeePerGas, - maxPriorityFeePerGas: this.maxPriorityFeePerGas, - }) - - BaseTransaction._validateNotArray(txData) - - if (this.gasLimit * this.maxFeePerGas > MAX_INTEGER) { - const msg = this._errorMsg('gasLimit * maxFeePerGas cannot exceed MAX_INTEGER (2^256-1)') - throw new Error(msg) - } - - if (this.maxFeePerGas < this.maxPriorityFeePerGas) { - const msg = this._errorMsg( - 'maxFeePerGas cannot be less than maxPriorityFeePerGas (The total must be the larger of the two)' - ) - throw new Error(msg) - } - - this.maxFeePerBlobGas = bytesToBigInt( - toBytes((maxFeePerBlobGas ?? '') === '' ? '0x' : maxFeePerBlobGas) - ) - - this.blobVersionedHashes = (txData.blobVersionedHashes ?? []).map((vh) => toBytes(vh)) - EIP2718.validateYParity(this) - Legacy.validateHighS(this) - - for (const hash of this.blobVersionedHashes) { - if (hash.length !== 32) { - const msg = this._errorMsg('versioned hash is invalid length') - throw new Error(msg) - } - if (BigInt(hash[0]) !== this.common.param('sharding', 'blobCommitmentVersionKzg')) { - const msg = this._errorMsg('versioned hash does not start with KZG commitment version') - throw new Error(msg) - } - } - if (this.blobVersionedHashes.length > LIMIT_BLOBS_PER_TX) { - const msg = this._errorMsg(`tx can contain at most ${LIMIT_BLOBS_PER_TX} blobs`) - throw new Error(msg) - } else if (this.blobVersionedHashes.length === 0) { - const msg = this._errorMsg(`tx should contain at least one blob`) - throw new Error(msg) - } - if (this.to === undefined) { - const msg = this._errorMsg( - `tx should have a "to" field and cannot be used to create contracts` - ) - throw new Error(msg) - } - - this.blobs = txData.blobs?.map((blob) => toBytes(blob)) - this.kzgCommitments = txData.kzgCommitments?.map((commitment) => toBytes(commitment)) - this.kzgProofs = txData.kzgProofs?.map((proof) => toBytes(proof)) - const freeze = opts?.freeze ?? true - if (freeze) { - Object.freeze(this) - } - } - - public static fromTxData(txData: TxData, opts?: TxOptions) { - if (opts?.common?.customCrypto?.kzg === undefined) { - throw new Error( - 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx' - ) - } - const kzg = opts!.common!.customCrypto!.kzg! - if (txData.blobsData !== undefined) { - if (txData.blobs !== undefined) { - throw new Error('cannot have both raw blobs data and encoded blobs in constructor') - } - if (txData.kzgCommitments !== undefined) { - throw new Error('cannot have both raw blobs data and KZG commitments in constructor') - } - if (txData.blobVersionedHashes !== undefined) { - throw new Error('cannot have both raw blobs data and versioned hashes in constructor') - } - if (txData.kzgProofs !== undefined) { - throw new Error('cannot have both raw blobs data and KZG proofs in constructor') - } - txData.blobs = getBlobs(txData.blobsData.reduce((acc, cur) => acc + cur)) - txData.kzgCommitments = blobsToCommitments(kzg, txData.blobs as Uint8Array[]) - txData.blobVersionedHashes = commitmentsToVersionedHashes( - txData.kzgCommitments as Uint8Array[] - ) - txData.kzgProofs = blobsToProofs( - kzg, - txData.blobs as Uint8Array[], - txData.kzgCommitments as Uint8Array[] - ) - } - - return new BlobEIP4844Transaction(txData, opts) - } - - /** - * Returns the minimum of calculated priority fee (from maxFeePerGas and baseFee) and maxPriorityFeePerGas - * @param baseFee Base fee retrieved from block - */ - getEffectivePriorityFee(baseFee: bigint): bigint { - return EIP1559.getEffectivePriorityFee(this, baseFee) - } - - /** - * Creates the minimal representation of a blob transaction from the network wrapper version. - * The minimal representation is used when adding transactions to an execution payload/block - * @param txData a {@link BlobEIP4844Transaction} containing optional blobs/kzg commitments - * @param opts - dictionary of {@link TxOptions} - * @returns the "minimal" representation of a BlobEIP4844Transaction (i.e. transaction object minus blobs and kzg commitments) - */ - public static minimalFromNetworkWrapper( - txData: BlobEIP4844Transaction, - opts?: TxOptions - ): BlobEIP4844Transaction { - if (opts?.common?.customCrypto?.kzg === undefined) { - throw new Error( - 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx' - ) - } - - const tx = BlobEIP4844Transaction.fromTxData( - { - ...txData, - ...{ blobs: undefined, kzgCommitments: undefined, kzgProofs: undefined }, - }, - opts - ) - return tx - } - - /** - * Instantiate a transaction from the serialized tx. - * - * Format: `0x03 || rlp([chain_id, nonce, max_priority_fee_per_gas, max_fee_per_gas, gas_limit, to, value, data, - * access_list, max_fee_per_data_gas, blob_versioned_hashes, y_parity, r, s])` - */ - public static fromSerializedTx(serialized: Uint8Array, opts: TxOptions = {}) { - if (opts.common?.customCrypto?.kzg === undefined) { - throw new Error( - 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx' - ) - } - - if ( - equalsBytes(serialized.subarray(0, 1), txTypeBytes(TransactionType.BlobEIP4844)) === false - ) { - throw new Error( - `Invalid serialized tx input: not an EIP-4844 transaction (wrong tx type, expected: ${ - TransactionType.BlobEIP4844 - }, received: ${bytesToHex(serialized.subarray(0, 1))}` - ) - } - - const values = RLP.decode(serialized.subarray(1)) - - if (!Array.isArray(values)) { - throw new Error('Invalid serialized tx input: must be array') - } - - return BlobEIP4844Transaction.fromValuesArray(values as TxValuesArray, opts) - } - - /** - * Create a transaction from a values array. - * - * Format: `[chainId, nonce, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, data, - * accessList, signatureYParity, signatureR, signatureS]` - */ - public static fromValuesArray(values: TxValuesArray, opts: TxOptions = {}) { - if (opts.common?.customCrypto?.kzg === undefined) { - throw new Error( - 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx' - ) - } - - if (values.length !== 11 && values.length !== 14) { - throw new Error( - 'Invalid EIP-4844 transaction. Only expecting 11 values (for unsigned tx) or 14 values (for signed tx).' - ) - } - - const [ - chainId, - nonce, - maxPriorityFeePerGas, - maxFeePerGas, - gasLimit, - to, - value, - data, - accessList, - maxFeePerBlobGas, - blobVersionedHashes, - v, - r, - s, - ] = values - - this._validateNotArray({ chainId, v }) - validateNoLeadingZeroes({ - nonce, - maxPriorityFeePerGas, - maxFeePerGas, - gasLimit, - value, - maxFeePerBlobGas, - v, - r, - s, - }) - - return new BlobEIP4844Transaction( - { - chainId: bytesToBigInt(chainId), - nonce, - maxPriorityFeePerGas, - maxFeePerGas, - gasLimit, - to, - value, - data, - accessList: accessList ?? [], - maxFeePerBlobGas, - blobVersionedHashes, - v: v !== undefined ? bytesToBigInt(v) : undefined, // EIP2930 supports v's with value 0 (empty Uint8Array) - r, - s, - }, - opts - ) - } - - /** - * Creates a transaction from the network encoding of a blob transaction (with blobs/commitments/proof) - * @param serialized a buffer representing a serialized BlobTransactionNetworkWrapper - * @param opts any TxOptions defined - * @returns a BlobEIP4844Transaction - */ - public static fromSerializedBlobTxNetworkWrapper( - serialized: Uint8Array, - opts?: TxOptions - ): BlobEIP4844Transaction { - if (!opts || !opts.common) { - throw new Error('common instance required to validate versioned hashes') - } - - if (opts.common?.customCrypto?.kzg === undefined) { - throw new Error( - 'A common object with customCrypto.kzg initialized required to instantiate a 4844 blob tx' - ) - } - - if ( - equalsBytes(serialized.subarray(0, 1), txTypeBytes(TransactionType.BlobEIP4844)) === false - ) { - throw new Error( - `Invalid serialized tx input: not an EIP-4844 transaction (wrong tx type, expected: ${ - TransactionType.BlobEIP4844 - }, received: ${bytesToHex(serialized.subarray(0, 1))}` - ) - } - - // Validate network wrapper - const networkTxValues = RLP.decode(serialized.subarray(1)) - if (networkTxValues.length !== 4) { - throw Error(`Expected 4 values in the deserialized network transaction`) - } - const [txValues, blobs, kzgCommitments, kzgProofs] = - networkTxValues as BlobEIP4844NetworkValuesArray - - // Construct the tx but don't freeze yet, we will assign blobs etc once validated - const decodedTx = BlobEIP4844Transaction.fromValuesArray(txValues, { ...opts, freeze: false }) - if (decodedTx.to === undefined) { - throw Error('BlobEIP4844Transaction can not be send without a valid `to`') - } - - const version = Number(opts.common.param('sharding', 'blobCommitmentVersionKzg')) - validateBlobTransactionNetworkWrapper( - decodedTx.blobVersionedHashes, - blobs, - kzgCommitments, - kzgProofs, - version, - opts.common.customCrypto.kzg - ) - - // set the network blob data on the tx - decodedTx.blobs = blobs - decodedTx.kzgCommitments = kzgCommitments - decodedTx.kzgProofs = kzgProofs - - // freeze the tx - const freeze = opts?.freeze ?? true - if (freeze) { - Object.freeze(decodedTx) - } - - return decodedTx - } - - /** - * The amount of gas paid for the data in this tx - */ - getDataFee(): bigint { - return EIP2930.getDataFee(this) - } - - /** - * The up front amount that an account must have for this transaction to be valid - * @param baseFee The base fee of the block (will be set to 0 if not provided) - */ - getUpfrontCost(baseFee: bigint = BIGINT_0): bigint { - return EIP1559.getUpfrontCost(this, baseFee) - } - - /** - * Returns a Uint8Array Array of the raw Bytes of the EIP-4844 transaction, in order. - * - * Format: [chain_id, nonce, max_priority_fee_per_gas, max_fee_per_gas, gas_limit, to, value, data, - * access_list, max_fee_per_data_gas, blob_versioned_hashes, y_parity, r, s]`. - * - * Use {@link BlobEIP4844Transaction.serialize} to add a transaction to a block - * with {@link createBlockFromValuesArray}. - * - * For an unsigned tx this method uses the empty Bytes values for the - * signature parameters `v`, `r` and `s` for encoding. For an EIP-155 compliant - * representation for external signing use {@link BlobEIP4844Transaction.getMessageToSign}. - */ - raw(): TxValuesArray { - return [ - bigIntToUnpaddedBytes(this.chainId), - bigIntToUnpaddedBytes(this.nonce), - bigIntToUnpaddedBytes(this.maxPriorityFeePerGas), - bigIntToUnpaddedBytes(this.maxFeePerGas), - bigIntToUnpaddedBytes(this.gasLimit), - this.to !== undefined ? this.to.bytes : new Uint8Array(0), - bigIntToUnpaddedBytes(this.value), - this.data, - this.accessList, - bigIntToUnpaddedBytes(this.maxFeePerBlobGas), - this.blobVersionedHashes, - this.v !== undefined ? bigIntToUnpaddedBytes(this.v) : new Uint8Array(0), - this.r !== undefined ? bigIntToUnpaddedBytes(this.r) : new Uint8Array(0), - this.s !== undefined ? bigIntToUnpaddedBytes(this.s) : new Uint8Array(0), - ] - } - - /** - * Returns the serialized encoding of the EIP-4844 transaction. - * - * Format: `0x03 || rlp([chainId, nonce, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, data, - * access_list, max_fee_per_data_gas, blob_versioned_hashes, y_parity, r, s])`. - * - * Note that in contrast to the legacy tx serialization format this is not - * valid RLP any more due to the raw tx type preceding and concatenated to - * the RLP encoding of the values. - */ - serialize(): Uint8Array { - return EIP2718.serialize(this) - } - - /** - * @returns the serialized form of a blob transaction in the network wrapper format (used for gossipping mempool transactions over devp2p) - */ - serializeNetworkWrapper(): Uint8Array { - if ( - this.blobs === undefined || - this.kzgCommitments === undefined || - this.kzgProofs === undefined - ) { - throw new Error( - 'cannot serialize network wrapper without blobs, KZG commitments and KZG proofs provided' - ) - } - - return EIP2718.serialize(this, [this.raw(), this.blobs, this.kzgCommitments, this.kzgProofs]) - } - - /** - * Returns the raw serialized unsigned tx, which can be used - * to sign the transaction (e.g. for sending to a hardware wallet). - * - * Note: in contrast to the legacy tx the raw message format is already - * serialized and doesn't need to be RLP encoded any more. - * - * ```javascript - * const serializedMessage = tx.getMessageToSign() // use this for the HW wallet input - * ``` - */ - getMessageToSign(): Uint8Array { - return EIP2718.serialize(this, this.raw().slice(0, 11)) - } - - /** - * Returns the hashed serialized unsigned tx, which can be used - * to sign the transaction (e.g. for sending to a hardware wallet). - * - * Note: in contrast to the legacy tx the raw message format is already - * serialized and doesn't need to be RLP encoded any more. - */ - getHashedMessageToSign(): Uint8Array { - return EIP2718.getHashedMessageToSign(this) - } - - /** - * Computes a sha3-256 hash of the serialized tx. - * - * This method can only be used for signed txs (it throws otherwise). - * Use {@link BlobEIP4844Transaction.getMessageToSign} to get a tx hash for the purpose of signing. - */ - public hash(): Uint8Array { - return Legacy.hash(this) - } - - getMessageToVerifySignature(): Uint8Array { - return this.getHashedMessageToSign() - } - - /** - * Returns the public key of the sender - */ - public getSenderPublicKey(): Uint8Array { - return Legacy.getSenderPublicKey(this) - } - - /** - * Returns the EIP 4844 transaction network wrapper in JSON format similar to toJSON, including - * blobs, commitments, and proofs fields - * @param serialized a buffer representing a serialized BlobTransactionNetworkWrapper - * @param opts any TxOptions defined - * @returns JsonBlobTxNetworkWrapper with blobs, KZG commitments, and KZG proofs fields - */ - public static networkWrapperToJson( - serialized: Uint8Array, - opts?: TxOptions - ): JsonBlobTxNetworkWrapper { - const tx = this.fromSerializedBlobTxNetworkWrapper(serialized, opts) - - const accessListJSON = AccessLists.getAccessListJSON(tx.accessList) - const baseJson = tx.toJSON() - - return { - ...baseJson, - chainId: bigIntToHex(tx.chainId), - maxPriorityFeePerGas: bigIntToHex(tx.maxPriorityFeePerGas), - maxFeePerGas: bigIntToHex(tx.maxFeePerGas), - accessList: accessListJSON, - maxFeePerBlobGas: bigIntToHex(tx.maxFeePerBlobGas), - blobVersionedHashes: tx.blobVersionedHashes.map((hash) => bytesToHex(hash)), - blobs: tx.blobs!.map((bytes) => bytesToHex(bytes)), - kzgCommitments: tx.kzgCommitments!.map((bytes) => bytesToHex(bytes)), - kzgProofs: tx.kzgProofs!.map((bytes) => bytesToHex(bytes)), - } - } - - toJSON(): JsonTx { - const accessListJSON = AccessLists.getAccessListJSON(this.accessList) - const baseJson = super.toJSON() - - return { - ...baseJson, - chainId: bigIntToHex(this.chainId), - maxPriorityFeePerGas: bigIntToHex(this.maxPriorityFeePerGas), - maxFeePerGas: bigIntToHex(this.maxFeePerGas), - accessList: accessListJSON, - maxFeePerBlobGas: bigIntToHex(this.maxFeePerBlobGas), - blobVersionedHashes: this.blobVersionedHashes.map((hash) => bytesToHex(hash)), - } - } - - addSignature( - v: bigint, - r: Uint8Array | bigint, - s: Uint8Array | bigint, - convertV: boolean = false - ): BlobEIP4844Transaction { - r = toBytes(r) - s = toBytes(s) - const opts = { ...this.txOptions, common: this.common } - - return BlobEIP4844Transaction.fromTxData( - { - chainId: this.chainId, - nonce: this.nonce, - maxPriorityFeePerGas: this.maxPriorityFeePerGas, - maxFeePerGas: this.maxFeePerGas, - gasLimit: this.gasLimit, - to: this.to, - value: this.value, - data: this.data, - accessList: this.accessList, - v: convertV ? v - BIGINT_27 : v, // This looks extremely hacky: @ethereumjs/util actually adds 27 to the value, the recovery bit is either 0 or 1. - r: bytesToBigInt(r), - s: bytesToBigInt(s), - maxFeePerBlobGas: this.maxFeePerBlobGas, - blobVersionedHashes: this.blobVersionedHashes, - blobs: this.blobs, - kzgCommitments: this.kzgCommitments, - kzgProofs: this.kzgProofs, - }, - opts - ) - } - /** - * Return a compact error string representation of the object - */ - public errorStr() { - let errorStr = this._getSharedErrorPostfix() - errorStr += ` maxFeePerGas=${this.maxFeePerGas} maxPriorityFeePerGas=${this.maxPriorityFeePerGas}` - return errorStr - } - - /** - * Internal helper function to create an annotated error message - * - * @param msg Base error message - * @hidden - */ - protected _errorMsg(msg: string) { - return Legacy.errorMsg(this, msg) - } - - /** - * @returns the number of blobs included with this transaction - */ - public numBlobs(): number { - return this.blobVersionedHashes.length - } -} diff --git a/packages/tx/src/fromRpc.ts b/packages/tx/src/fromRpc.ts deleted file mode 100644 index 6b69bf9957..0000000000 --- a/packages/tx/src/fromRpc.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { TypeOutput, setLengthLeft, toBytes, toType } from '@ethereumjs/util' - -import type { TypedTxData } from './types.js' - -export const normalizeTxParams = (_txParams: any): TypedTxData => { - const txParams = Object.assign({}, _txParams) - - txParams.gasLimit = toType(txParams.gasLimit ?? txParams.gas, TypeOutput.BigInt) - txParams.data = txParams.data === undefined ? txParams.input : txParams.data - - // check and convert gasPrice and value params - txParams.gasPrice = txParams.gasPrice !== undefined ? BigInt(txParams.gasPrice) : undefined - txParams.value = txParams.value !== undefined ? BigInt(txParams.value) : undefined - - // strict byte length checking - txParams.to = - txParams.to !== null && txParams.to !== undefined - ? setLengthLeft(toBytes(txParams.to), 20) - : null - - // Normalize the v/r/s values. If RPC returns '0x0', ensure v/r/s are set to `undefined` in the tx. - // If this is not done, then the transaction creation will throw, because `v` is `0`. - // Note: this still means that `isSigned` will return `false`. - // v/r/s values are `0x0` on networks like Optimism, where the tx is a system tx. - // For instance: https://optimistic.etherscan.io/tx/0xf4304cb09b3f58a8e5d20fec5f393c96ccffe0269aaf632cb2be7a8a0f0c91cc - - txParams.v = txParams.v === '0x0' ? '0x' : txParams.v - txParams.r = txParams.r === '0x0' ? '0x' : txParams.r - txParams.s = txParams.s === '0x0' ? '0x' : txParams.s - - if (txParams.v !== '0x' || txParams.r !== '0x' || txParams.s !== '0x') { - txParams.v = toType(txParams.v, TypeOutput.BigInt) - } - - return txParams -} diff --git a/packages/tx/src/index.ts b/packages/tx/src/index.ts index ff79af716d..efadb29f92 100644 --- a/packages/tx/src/index.ts +++ b/packages/tx/src/index.ts @@ -1,7 +1,23 @@ -export { FeeMarketEIP1559Transaction } from './eip1559Transaction.js' -export { AccessListEIP2930Transaction } from './eip2930Transaction.js' -export { BlobEIP4844Transaction } from './eip4844Transaction.js' -export { EOACodeEIP7702Transaction } from './eip7702Transaction.js' -export { LegacyTransaction } from './legacyTransaction.js' -export { TransactionFactory } from './transactionFactory.js' +// Tx constructors +export * from './1559/index.js' +export * from './2930/index.js' +export * from './4844/index.js' +export * from './7702/index.js' +export * from './legacy/index.js' +// Parameters +export * from './params.js' + +// Transaction factory +export { + createTxFromBlockBodyData, + createTxFromJsonRpcProvider, + createTxFromRPC, + createTxFromSerializedData, + createTxFromTxData, +} from './transactionFactory.js' + +// Types export * from './types.js' + +// Utils +export * from './util.js' diff --git a/packages/tx/src/legacy/constructors.ts b/packages/tx/src/legacy/constructors.ts new file mode 100644 index 0000000000..f69acedb25 --- /dev/null +++ b/packages/tx/src/legacy/constructors.ts @@ -0,0 +1,69 @@ +import { RLP } from '@ethereumjs/rlp' +import { validateNoLeadingZeroes } from '@ethereumjs/util' + +import { LegacyTx } from './tx.js' + +import type { TxOptions } from '../types.js' +import type { TxData, TxValuesArray } from './tx.js' + +/** + * Instantiate a transaction from a data dictionary. + * + * Format: { nonce, gasPrice, gasLimit, to, value, data, v, r, s } + * + * Notes: + * - All parameters are optional and have some basic default values + */ +export function createLegacyTx(txData: TxData, opts: TxOptions = {}) { + return new LegacyTx(txData, opts) +} + +/** + * Create a transaction from an array of byte encoded values ordered according to the devp2p network encoding - format noted below. + * + * Format: `[nonce, gasPrice, gasLimit, to, value, data, v, r, s]` + */ +export function createLegacyTxFromBytesArray(values: TxValuesArray, opts: TxOptions = {}) { + // If length is not 6, it has length 9. If v/r/s are empty Uint8Arrays, it is still an unsigned transaction + // This happens if you get the RLP data from `raw()` + if (values.length !== 6 && values.length !== 9) { + throw new Error( + 'Invalid transaction. Only expecting 6 values (for unsigned tx) or 9 values (for signed tx).', + ) + } + + const [nonce, gasPrice, gasLimit, to, value, data, v, r, s] = values + + validateNoLeadingZeroes({ nonce, gasPrice, gasLimit, value, v, r, s }) + + return new LegacyTx( + { + nonce, + gasPrice, + gasLimit, + to, + value, + data, + v, + r, + s, + }, + opts, + ) +} + +/** + * Instantiate a transaction from a RLP serialized tx. + * + * Format: `rlp([nonce, gasPrice, gasLimit, to, value, data, + * signatureV, signatureR, signatureS])` + */ +export function createLegacyTxFromRLP(serialized: Uint8Array, opts: TxOptions = {}) { + const values = RLP.decode(serialized) + + if (!Array.isArray(values)) { + throw new Error('Invalid serialized tx input. Must be array') + } + + return createLegacyTxFromBytesArray(values as TxValuesArray, opts) +} diff --git a/packages/tx/src/legacy/index.ts b/packages/tx/src/legacy/index.ts new file mode 100644 index 0000000000..bec97c0e53 --- /dev/null +++ b/packages/tx/src/legacy/index.ts @@ -0,0 +1,2 @@ +export * from './constructors.js' +export { LegacyTx } from './tx.js' diff --git a/packages/tx/src/legacyTransaction.ts b/packages/tx/src/legacy/tx.ts similarity index 71% rename from packages/tx/src/legacyTransaction.ts rename to packages/tx/src/legacy/tx.ts index 74b9c7bdea..1dedfc519e 100644 --- a/packages/tx/src/legacyTransaction.ts +++ b/packages/tx/src/legacy/tx.ts @@ -1,3 +1,4 @@ +import { Common } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' import { BIGINT_2, @@ -8,24 +9,26 @@ import { bytesToBigInt, toBytes, unpadBytes, - validateNoLeadingZeroes, } from '@ethereumjs/util' import { keccak256 } from 'ethereum-cryptography/keccak.js' -import { BaseTransaction } from './baseTransaction.js' -import * as Legacy from './capabilities/legacy.js' -import { Capability, TransactionType } from './types.js' +import { BaseTransaction } from '../baseTransaction.js' +import * as Legacy from '../capabilities/legacy.js' +import { paramsTx } from '../index.js' +import { Capability, TransactionType } from '../types.js' +import { validateNotArray } from '../util.js' + +import { createLegacyTx } from './constructors.js' import type { TxData as AllTypesTxData, TxValuesArray as AllTypesTxValuesArray, JsonTx, TxOptions, -} from './types.js' -import type { Common } from '@ethereumjs/common' +} from '../types.js' -type TxData = AllTypesTxData[TransactionType.Legacy] -type TxValuesArray = AllTypesTxValuesArray[TransactionType.Legacy] +export type TxData = AllTypesTxData[TransactionType.Legacy] +export type TxValuesArray = AllTypesTxValuesArray[TransactionType.Legacy] function meetsEIP155(_v: bigint, chainId: bigint) { const v = Number(_v) @@ -36,73 +39,12 @@ function meetsEIP155(_v: bigint, chainId: bigint) { /** * An Ethereum non-typed (legacy) transaction */ -export class LegacyTransaction extends BaseTransaction { +export class LegacyTx extends BaseTransaction { public readonly gasPrice: bigint public readonly common: Common private keccakFunction: (msg: Uint8Array) => Uint8Array - /** - * Instantiate a transaction from a data dictionary. - * - * Format: { nonce, gasPrice, gasLimit, to, value, data, v, r, s } - * - * Notes: - * - All parameters are optional and have some basic default values - */ - public static fromTxData(txData: TxData, opts: TxOptions = {}) { - return new LegacyTransaction(txData, opts) - } - - /** - * Instantiate a transaction from the serialized tx. - * - * Format: `rlp([nonce, gasPrice, gasLimit, to, value, data, v, r, s])` - */ - public static fromSerializedTx(serialized: Uint8Array, opts: TxOptions = {}) { - const values = RLP.decode(serialized) - - if (!Array.isArray(values)) { - throw new Error('Invalid serialized tx input. Must be array') - } - - return this.fromValuesArray(values as TxValuesArray, opts) - } - - /** - * Create a transaction from a values array. - * - * Format: `[nonce, gasPrice, gasLimit, to, value, data, v, r, s]` - */ - public static fromValuesArray(values: TxValuesArray, opts: TxOptions = {}) { - // If length is not 6, it has length 9. If v/r/s are empty Uint8Arrays, it is still an unsigned transaction - // This happens if you get the RLP data from `raw()` - if (values.length !== 6 && values.length !== 9) { - throw new Error( - 'Invalid transaction. Only expecting 6 values (for unsigned tx) or 9 values (for signed tx).' - ) - } - - const [nonce, gasPrice, gasLimit, to, value, data, v, r, s] = values - - validateNoLeadingZeroes({ nonce, gasPrice, gasLimit, value, v, r, s }) - - return new LegacyTransaction( - { - nonce, - gasPrice, - gasLimit, - to, - value, - data, - v, - r, - s, - }, - opts - ) - } - /** * This constructor takes the values, validates them, assigns them and freezes the object. * @@ -113,7 +55,15 @@ export class LegacyTransaction extends BaseTransaction { public constructor(txData: TxData, opts: TxOptions = {}) { super({ ...txData, type: TransactionType.Legacy }, opts) - this.common = this._validateTxV(this.v, opts.common) + this.common = opts.common?.copy() ?? new Common({ chain: this.DEFAULT_CHAIN }) + const chainId = this._validateTxV(this.common, this.v) + if (chainId !== undefined && chainId !== this.common.chainId()) { + throw new Error( + `Common chain ID ${this.common.chainId} not matching the derived chain ID ${chainId}`, + ) + } + + this.common.updateParams(opts.params ?? paramsTx) this.keccakFunction = this.common.customCrypto.keccak256 ?? keccak256 this.gasPrice = bytesToBigInt(toBytes(txData.gasPrice)) @@ -123,7 +73,7 @@ export class LegacyTransaction extends BaseTransaction { } this._validateCannotExceedMaxInteger({ gasPrice: this.gasPrice }) - BaseTransaction._validateNotArray(txData) + validateNotArray(txData) if (this.common.gteHardfork('spuriousDragon')) { if (!this.isSigned()) { @@ -157,7 +107,7 @@ export class LegacyTransaction extends BaseTransaction { * Format: `[nonce, gasPrice, gasLimit, to, value, data, v, r, s]` * * For legacy txs this is also the correct format to add transactions - * to a block with {@link createBlockFromValuesArray} (use the `serialize()` method + * to a block with {@link createBlockFromBytesArray} (use the `serialize()` method * for typed txs). * * For an unsigned tx this method returns the empty Bytes values @@ -235,8 +185,8 @@ export class LegacyTransaction extends BaseTransaction { /** * The amount of gas paid for the data in this tx */ - getDataFee(): bigint { - return Legacy.getDataFee(this) + getDataGas(): bigint { + return Legacy.getDataGas(this) } /** @@ -278,8 +228,8 @@ export class LegacyTransaction extends BaseTransaction { v: bigint, r: Uint8Array | bigint, s: Uint8Array | bigint, - convertV: boolean = false - ): LegacyTransaction { + convertV: boolean = false, + ): LegacyTx { r = toBytes(r) s = toBytes(s) if (convertV && this.supports(Capability.EIP155ReplayProtection)) { @@ -288,7 +238,7 @@ export class LegacyTransaction extends BaseTransaction { const opts = { ...this.txOptions, common: this.common } - return LegacyTransaction.fromTxData( + return createLegacyTx( { nonce: this.nonce, gasPrice: this.gasPrice, @@ -300,7 +250,7 @@ export class LegacyTransaction extends BaseTransaction { r: bytesToBigInt(r), s: bytesToBigInt(s), }, - opts + opts, ) } @@ -318,7 +268,7 @@ export class LegacyTransaction extends BaseTransaction { /** * Validates tx's `v` value */ - protected _validateTxV(_v?: bigint, common?: Common): Common { + protected _validateTxV(common: Common, _v?: bigint): BigInt | undefined { let chainIdBigInt const v = _v !== undefined ? Number(_v) : undefined // Check for valid v values in the scope of a signed legacy tx @@ -327,7 +277,7 @@ export class LegacyTransaction extends BaseTransaction { // v is 2. not matching the classic v=27 or v=28 case if (v < 37 && v !== 27 && v !== 28) { throw new Error( - `Legacy txs need either v = 27/28 or v >= 37 (EIP-155 replay protection), got v = ${v}` + `Legacy txs need either v = 27/28 or v >= 37 (EIP-155 replay protection), got v = ${v}`, ) } } @@ -336,29 +286,26 @@ export class LegacyTransaction extends BaseTransaction { if ( v !== undefined && v !== 0 && - (!common || common.gteHardfork('spuriousDragon')) && + common.gteHardfork('spuriousDragon') && v !== 27 && v !== 28 ) { - if (common) { - if (!meetsEIP155(BigInt(v), common.chainId())) { - throw new Error( - `Incompatible EIP155-based V ${v} and chain id ${common.chainId()}. See the Common parameter of the Transaction constructor to set the chain id.` - ) - } + if (!meetsEIP155(BigInt(v), common.chainId())) { + throw new Error( + `Incompatible EIP155-based V ${v} and chain id ${common.chainId()}. See the Common parameter of the Transaction constructor to set the chain id.`, + ) + } + // Derive the original chain ID + let numSub + if ((v - 35) % 2 === 0) { + numSub = 35 } else { - // Derive the original chain ID - let numSub - if ((v - 35) % 2 === 0) { - numSub = 35 - } else { - numSub = 36 - } - // Use derived chain ID to create a proper Common - chainIdBigInt = BigInt(v - numSub) / BIGINT_2 + numSub = 36 } + // Use derived chain ID to create a proper Common + chainIdBigInt = BigInt(v - numSub) / BIGINT_2 } - return this._getCommon(common, chainIdBigInt) + return chainIdBigInt } /** diff --git a/packages/tx/src/params.ts b/packages/tx/src/params.ts new file mode 100644 index 0000000000..02f487b583 --- /dev/null +++ b/packages/tx/src/params.ts @@ -0,0 +1,46 @@ +import type { ParamsDict } from '@ethereumjs/common' + +export const paramsTx: ParamsDict = { + /** + * Frontier/Chainstart + */ + 1: { + // gasPrices + txGas: 21000, // Per transaction. NOTE: Not payable on data of calls between transactions + txCreationGas: 32000, // The cost of creating a contract via tx + txDataZeroGas: 4, // Per byte of data attached to a transaction that equals zero. NOTE: Not payable on data of calls between transactions + txDataNonZeroGas: 68, // Per byte of data attached to a transaction that is not equal to zero. NOTE: Not payable on data of calls between transactions + accessListStorageKeyGas: 0, + accessListAddressGas: 0, + }, + /** +. * Istanbul HF Meta EIP +. */ + 1679: { + // gasPrices + txDataNonZeroGas: 16, // Per byte of data attached to a transaction that is not equal to zero. NOTE: Not payable on data of calls between transactions + }, + /** +. * Optional access lists +. */ + 2930: { + // gasPrices + accessListStorageKeyGas: 1900, // Gas cost per storage key in an Access List transaction + accessListAddressGas: 2400, // Gas cost per storage key in an Access List transaction + }, + /** +. * Limit and meter initcode +. */ + 3860: { + // gasPrices + initCodeWordGas: 2, // Gas to pay for each word (32 bytes) of initcode when creating a contract + // format + maxInitCodeSize: 49152, // Maximum length of initialization code when creating a contract + }, + /** +. * Shard Blob Transactions +. */ + 4844: { + blobCommitmentVersionKzg: 1, // The number indicated a versioned hash is a KZG commitment + }, +} diff --git a/packages/tx/src/transactionFactory.ts b/packages/tx/src/transactionFactory.ts index 4a91fe2e9c..68d04a2a68 100644 --- a/packages/tx/src/transactionFactory.ts +++ b/packages/tx/src/transactionFactory.ts @@ -1,140 +1,141 @@ import { fetchFromProvider, getProvider } from '@ethereumjs/util' -import { FeeMarketEIP1559Transaction } from './eip1559Transaction.js' -import { AccessListEIP2930Transaction } from './eip2930Transaction.js' -import { BlobEIP4844Transaction } from './eip4844Transaction.js' -import { EOACodeEIP7702Transaction } from './eip7702Transaction.js' -import { normalizeTxParams } from './fromRpc.js' -import { LegacyTransaction } from './legacyTransaction.js' +import { createFeeMarket1559Tx, createFeeMarket1559TxFromRLP } from './1559/constructors.js' +import { createAccessList2930Tx, createAccessList2930TxFromRLP } from './2930/constructors.js' +import { createBlob4844Tx, createBlob4844TxFromRLP } from './4844/constructors.js' +import { createEOACode7702Tx, createEOACode7702TxFromRLP } from './7702/constructors.js' +import { + createLegacyTx, + createLegacyTxFromBytesArray, + createLegacyTxFromRLP, +} from './legacy/constructors.js' import { TransactionType, - isAccessListEIP2930TxData, - isBlobEIP4844TxData, - isEOACodeEIP7702TxData, - isFeeMarketEIP1559TxData, + isAccessList2930TxData, + isBlob4844TxData, + isEOACode7702TxData, + isFeeMarket1559TxData, isLegacyTxData, } from './types.js' +import { normalizeTxParams } from './util.js' import type { Transaction, TxData, TxOptions, TypedTxData } from './types.js' import type { EthersProvider } from '@ethereumjs/util' - -export class TransactionFactory { - // It is not possible to instantiate a TransactionFactory object. - private constructor() {} - - /** - * Create a transaction from a `txData` object - * - * @param txData - The transaction data. The `type` field will determine which transaction type is returned (if undefined, creates a legacy transaction) - * @param txOptions - Options to pass on to the constructor of the transaction - */ - public static fromTxData( - txData: TypedTxData, - txOptions: TxOptions = {} - ): Transaction[T] { - if (!('type' in txData) || txData.type === undefined) { - // Assume legacy transaction - return LegacyTransaction.fromTxData(txData, txOptions) as Transaction[T] +/** + * Create a transaction from a `txData` object + * + * @param txData - The transaction data. The `type` field will determine which transaction type is returned (if undefined, creates a legacy transaction) + * @param txOptions - Options to pass on to the constructor of the transaction + */ +export function createTxFromTxData( + txData: TypedTxData, + txOptions: TxOptions = {}, +): Transaction[T] { + if (!('type' in txData) || txData.type === undefined) { + // Assume legacy transaction + return createLegacyTx(txData, txOptions) as Transaction[T] + } else { + if (isLegacyTxData(txData)) { + return createLegacyTx(txData, txOptions) as Transaction[T] + } else if (isAccessList2930TxData(txData)) { + return createAccessList2930Tx(txData, txOptions) as Transaction[T] + } else if (isFeeMarket1559TxData(txData)) { + return createFeeMarket1559Tx(txData, txOptions) as Transaction[T] + } else if (isBlob4844TxData(txData)) { + return createBlob4844Tx(txData, txOptions) as Transaction[T] + } else if (isEOACode7702TxData(txData)) { + return createEOACode7702Tx(txData, txOptions) as Transaction[T] } else { - if (isLegacyTxData(txData)) { - return LegacyTransaction.fromTxData(txData, txOptions) as Transaction[T] - } else if (isAccessListEIP2930TxData(txData)) { - return AccessListEIP2930Transaction.fromTxData(txData, txOptions) as Transaction[T] - } else if (isFeeMarketEIP1559TxData(txData)) { - return FeeMarketEIP1559Transaction.fromTxData(txData, txOptions) as Transaction[T] - } else if (isBlobEIP4844TxData(txData)) { - return BlobEIP4844Transaction.fromTxData(txData, txOptions) as Transaction[T] - } else if (isEOACodeEIP7702TxData(txData)) { - return EOACodeEIP7702Transaction.fromTxData(txData, txOptions) as Transaction[T] - } else { - throw new Error(`Tx instantiation with type ${(txData as TypedTxData)?.type} not supported`) - } + throw new Error(`Tx instantiation with type ${(txData as TypedTxData)?.type} not supported`) } } +} - /** - * This method tries to decode serialized data. - * - * @param data - The data Uint8Array - * @param txOptions - The transaction options - */ - public static fromSerializedData( - data: Uint8Array, - txOptions: TxOptions = {} - ): Transaction[T] { - if (data[0] <= 0x7f) { - // Determine the type. - switch (data[0]) { - case TransactionType.AccessListEIP2930: - return AccessListEIP2930Transaction.fromSerializedTx(data, txOptions) as Transaction[T] - case TransactionType.FeeMarketEIP1559: - return FeeMarketEIP1559Transaction.fromSerializedTx(data, txOptions) as Transaction[T] - case TransactionType.BlobEIP4844: - return BlobEIP4844Transaction.fromSerializedTx(data, txOptions) as Transaction[T] - case TransactionType.EOACodeEIP7702: - return EOACodeEIP7702Transaction.fromSerializedTx(data, txOptions) as Transaction[T] - default: - throw new Error(`TypedTransaction with ID ${data[0]} unknown`) - } - } else { - return LegacyTransaction.fromSerializedTx(data, txOptions) as Transaction[T] +/** + * This method tries to decode serialized data. + * + * @param data - The data Uint8Array + * @param txOptions - The transaction options + */ +export function createTxFromSerializedData( + data: Uint8Array, + txOptions: TxOptions = {}, +): Transaction[T] { + if (data[0] <= 0x7f) { + // Determine the type. + switch (data[0]) { + case TransactionType.AccessListEIP2930: + return createAccessList2930TxFromRLP(data, txOptions) as Transaction[T] + case TransactionType.FeeMarketEIP1559: + return createFeeMarket1559TxFromRLP(data, txOptions) as Transaction[T] + case TransactionType.BlobEIP4844: + return createBlob4844TxFromRLP(data, txOptions) as Transaction[T] + case TransactionType.EOACodeEIP7702: + return createEOACode7702TxFromRLP(data, txOptions) as Transaction[T] + default: + throw new Error(`TypedTransaction with ID ${data[0]} unknown`) } + } else { + return createLegacyTxFromRLP(data, txOptions) as Transaction[T] } +} - /** - * When decoding a BlockBody, in the transactions field, a field is either: - * A Uint8Array (a TypedTransaction - encoded as TransactionType || rlp(TransactionPayload)) - * A Uint8Array[] (Legacy Transaction) - * This method returns the right transaction. - * - * @param data - A Uint8Array or Uint8Array[] - * @param txOptions - The transaction options - */ - public static fromBlockBodyData(data: Uint8Array | Uint8Array[], txOptions: TxOptions = {}) { - if (data instanceof Uint8Array) { - return this.fromSerializedData(data, txOptions) - } else if (Array.isArray(data)) { - // It is a legacy transaction - return LegacyTransaction.fromValuesArray(data, txOptions) - } else { - throw new Error('Cannot decode transaction: unknown type input') - } +/** + * When decoding a BlockBody, in the transactions field, a field is either: + * A Uint8Array (a TypedTransaction - encoded as TransactionType || rlp(TransactionPayload)) + * A Uint8Array[] (Legacy Transaction) + * This method returns the right transaction. + * + * @param data - A Uint8Array or Uint8Array[] + * @param txOptions - The transaction options + */ +export function createTxFromBlockBodyData( + data: Uint8Array | Uint8Array[], + txOptions: TxOptions = {}, +) { + if (data instanceof Uint8Array) { + return createTxFromSerializedData(data, txOptions) + } else if (Array.isArray(data)) { + // It is a legacy transaction + return createLegacyTxFromBytesArray(data, txOptions) + } else { + throw new Error('Cannot decode transaction: unknown type input') } +} - /** - * Method to retrieve a transaction from the provider - * @param provider - a url string for a JSON-RPC provider or an Ethers JsonRPCProvider object - * @param txHash - Transaction hash - * @param txOptions - The transaction options - * @returns the transaction specified by `txHash` - */ - public static async fromJsonRpcProvider( - provider: string | EthersProvider, - txHash: string, - txOptions?: TxOptions - ) { - const prov = getProvider(provider) - const txData = await fetchFromProvider(prov, { - method: 'eth_getTransactionByHash', - params: [txHash], - }) - if (txData === null) { - throw new Error('No data returned from provider') - } - return TransactionFactory.fromRPC(txData, txOptions) - } +/** + * Method to decode data retrieved from RPC, such as `eth_getTransactionByHash` + * Note that this normalizes some of the parameters + * @param txData The RPC-encoded data + * @param txOptions The transaction options + * @returns + */ +export async function createTxFromRPC( + txData: TxData[T], + txOptions: TxOptions = {}, +): Promise { + return createTxFromTxData(normalizeTxParams(txData), txOptions) +} - /** - * Method to decode data retrieved from RPC, such as `eth_getTransactionByHash` - * Note that this normalizes some of the parameters - * @param txData The RPC-encoded data - * @param txOptions The transaction options - * @returns - */ - public static async fromRPC( - txData: TxData[T], - txOptions: TxOptions = {} - ): Promise { - return TransactionFactory.fromTxData(normalizeTxParams(txData), txOptions) +/** + * Method to retrieve a transaction from the provider + * @param provider - a url string for a JSON-RPC provider or an Ethers JsonRPCProvider object + * @param txHash - Transaction hash + * @param txOptions - The transaction options + * @returns the transaction specified by `txHash` + */ +export async function createTxFromJsonRpcProvider( + provider: string | EthersProvider, + txHash: string, + txOptions?: TxOptions, +) { + const prov = getProvider(provider) + const txData = await fetchFromProvider(prov, { + method: 'eth_getTransactionByHash', + params: [txHash], + }) + if (txData === null) { + throw new Error('No data returned from provider') } + return createTxFromRPC(txData, txOptions) } diff --git a/packages/tx/src/types.ts b/packages/tx/src/types.ts index f37ee76b9a..b27d56873e 100644 --- a/packages/tx/src/types.ts +++ b/packages/tx/src/types.ts @@ -1,18 +1,11 @@ import { bytesToBigInt, toBytes } from '@ethereumjs/util' -import type { FeeMarketEIP1559Transaction } from './eip1559Transaction.js' -import type { AccessListEIP2930Transaction } from './eip2930Transaction.js' -import type { BlobEIP4844Transaction } from './eip4844Transaction.js' -import type { EOACodeEIP7702Transaction } from './eip7702Transaction.js' -import type { LegacyTransaction } from './legacyTransaction.js' -import type { - AccessList, - AccessListBytes, - AuthorizationList, - AuthorizationListBytes, - Common, - Hardfork, -} from '@ethereumjs/common' +import type { FeeMarket1559Tx } from './1559/tx.js' +import type { AccessList2930Transaction } from './2930/tx.js' +import type { Blob4844Tx } from './4844/tx.js' +import type { EOACode7702Transaction } from './7702/tx.js' +import type { LegacyTx } from './legacy/tx.js' +import type { Common, Hardfork, ParamsDict } from '@ethereumjs/common' import type { Address, AddressLike, @@ -20,17 +13,6 @@ import type { BytesLike, PrefixedHexString, } from '@ethereumjs/util' -export type { - AccessList, - AccessListBytes, - AccessListBytesItem, - AccessListItem, - AuthorizationList, - AuthorizationListBytes, - AuthorizationListBytesItem, - AuthorizationListItem, -} from '@ethereumjs/common' - /** * Can be used in conjunction with {@link Transaction[TransactionType].supports} * to query on tx capabilities @@ -82,6 +64,23 @@ export interface TxOptions { * Current default hardfork: `istanbul` */ common?: Common + /** + * Tx parameters sorted by EIP can be found in the exported `paramsTx` dictionary, + * which is internally passed to the associated `@ethereumjs/common` instance which + * manages parameter selection based on the hardfork and EIP settings. + * + * This option allows providing a custom set of parameters. Note that parameters + * get fully overwritten, so you need to extend the default parameter dict + * to provide the full parameter set. + * + * It is recommended to deep-clone the params object for this to avoid side effects: + * + * ```ts + * const params = JSON.parse(JSON.stringify(paramsTx)) + * params['1']['txGas'] = 30000 // 21000 + * ``` + */ + params?: ParamsDict /** * A transaction object by default gets frozen along initialization. This gives you * strong additional security guarantees on the consistency of the tx parameters. @@ -118,7 +117,7 @@ export function isAccessList(input: AccessListBytes | AccessList): input is Acce } export function isAuthorizationListBytes( - input: AuthorizationListBytes | AuthorizationList + input: AuthorizationListBytes | AuthorizationList, ): input is AuthorizationListBytes { if (input.length === 0) { return true @@ -131,7 +130,7 @@ export function isAuthorizationListBytes( } export function isAuthorizationList( - input: AuthorizationListBytes | AuthorizationList + input: AuthorizationListBytes | AuthorizationList, ): input is AuthorizationList { return !isAuthorizationListBytes(input) // This is exactly the same method, except the output is negated. } @@ -157,32 +156,32 @@ export enum TransactionType { } export interface Transaction { - [TransactionType.Legacy]: LegacyTransaction - [TransactionType.FeeMarketEIP1559]: FeeMarketEIP1559Transaction - [TransactionType.AccessListEIP2930]: AccessListEIP2930Transaction - [TransactionType.BlobEIP4844]: BlobEIP4844Transaction - [TransactionType.EOACodeEIP7702]: EOACodeEIP7702Transaction + [TransactionType.Legacy]: LegacyTx + [TransactionType.FeeMarketEIP1559]: FeeMarket1559Tx + [TransactionType.AccessListEIP2930]: AccessList2930Transaction + [TransactionType.BlobEIP4844]: Blob4844Tx + [TransactionType.EOACodeEIP7702]: EOACode7702Transaction } export type TypedTransaction = Transaction[TransactionType] -export function isLegacyTx(tx: TypedTransaction): tx is LegacyTransaction { +export function isLegacyTx(tx: TypedTransaction): tx is LegacyTx { return tx.type === TransactionType.Legacy } -export function isAccessListEIP2930Tx(tx: TypedTransaction): tx is AccessListEIP2930Transaction { +export function isAccessList2930Tx(tx: TypedTransaction): tx is AccessList2930Transaction { return tx.type === TransactionType.AccessListEIP2930 } -export function isFeeMarketEIP1559Tx(tx: TypedTransaction): tx is FeeMarketEIP1559Transaction { +export function isFeeMarket1559Tx(tx: TypedTransaction): tx is FeeMarket1559Tx { return tx.type === TransactionType.FeeMarketEIP1559 } -export function isBlobEIP4844Tx(tx: TypedTransaction): tx is BlobEIP4844Transaction { +export function isBlob4844Tx(tx: TypedTransaction): tx is Blob4844Tx { return tx.type === TransactionType.BlobEIP4844 } -export function isEOACodeEIP7702Tx(tx: TypedTransaction): tx is EOACodeEIP7702Transaction { +export function isEOACode7702Tx(tx: TypedTransaction): tx is EOACode7702Transaction { return tx.type === TransactionType.EOACodeEIP7702 } @@ -199,8 +198,8 @@ export interface TransactionInterface maxInitCodeSize) { throw new Error( `the initcode size of this transaction is too large: it is ${length} while the max is ${common.param( - 'vm', - 'maxInitCodeSize' - )}` + 'maxInitCodeSize', + )}`, ) } } @@ -81,7 +85,7 @@ export class AccessLists { const storageSlots = accessListItem[1] if ((accessListItem)[2] !== undefined) { throw new Error( - 'Access list item cannot have 3 elements. It can only have an address, and an array of storage slots.' + 'Access list item cannot have 3 elements. It can only have an address, and an array of storage slots.', ) } if (address.length !== 20) { @@ -113,9 +117,9 @@ export class AccessLists { return accessListJSON } - public static getDataFeeEIP2930(accessList: AccessListBytes, common: Common): number { - const accessListStorageKeyCost = common.param('gasPrices', 'accessListStorageKeyCost') - const accessListAddressCost = common.param('gasPrices', 'accessListAddressCost') + public static getDataGasEIP2930(accessList: AccessListBytes, common: Common): number { + const accessListStorageKeyCost = common.param('accessListStorageKeyGas') + const accessListAddressCost = common.param('accessListAddressGas') let slots = 0 for (let index = 0; index < accessList.length; index++) { @@ -131,7 +135,7 @@ export class AccessLists { export class AuthorizationLists { public static getAuthorizationListData( - authorizationList: AuthorizationListBytes | AuthorizationList + authorizationList: AuthorizationListBytes | AuthorizationList, ) { let AuthorizationListJSON let bufferAuthorizationList @@ -215,8 +219,8 @@ export class AuthorizationLists { } } - public static getDataFeeEIP7702(authorityList: AuthorizationListBytes, common: Common): number { - const perAuthBaseCost = common.param('gasPrices', 'perAuthBaseCost') + public static getDataGasEIP7702(authorityList: AuthorizationListBytes, common: Common): number { + const perAuthBaseCost = common.param('perAuthBaseGas') return authorityList.length * Number(perAuthBaseCost) } } @@ -224,3 +228,67 @@ export class AuthorizationLists { export function txTypeBytes(txType: TransactionType): Uint8Array { return hexToBytes(`0x${txType.toString(16).padStart(2, '0')}`) } + +export function validateNotArray(values: { [key: string]: any }) { + const txDataKeys = [ + 'nonce', + 'gasPrice', + 'gasLimit', + 'to', + 'value', + 'data', + 'v', + 'r', + 's', + 'type', + 'baseFee', + 'maxFeePerGas', + 'chainId', + ] + for (const [key, value] of Object.entries(values)) { + if (txDataKeys.includes(key)) { + if (Array.isArray(value)) { + throw new Error(`${key} cannot be an array`) + } + } + } +} + +/** + * Normalizes values for transactions that are received from an RPC provider to be properly usable within + * the ethereumjs context + * @param txParamsFromRPC a transaction in the standard JSON-RPC format + * @returns a normalized {@link TypedTxData} object with valid values + */ +export const normalizeTxParams = (txParamsFromRPC: any): TypedTxData => { + const txParams = Object.assign({}, txParamsFromRPC) + + txParams.gasLimit = toType(txParams.gasLimit ?? txParams.gas, TypeOutput.BigInt) + txParams.data = txParams.data === undefined ? txParams.input : txParams.data + + // check and convert gasPrice and value params + txParams.gasPrice = txParams.gasPrice !== undefined ? BigInt(txParams.gasPrice) : undefined + txParams.value = txParams.value !== undefined ? BigInt(txParams.value) : undefined + + // strict byte length checking + txParams.to = + txParams.to !== null && txParams.to !== undefined + ? setLengthLeft(toBytes(txParams.to), 20) + : null + + // Normalize the v/r/s values. If RPC returns '0x0', ensure v/r/s are set to `undefined` in the tx. + // If this is not done, then the transaction creation will throw, because `v` is `0`. + // Note: this still means that `isSigned` will return `false`. + // v/r/s values are `0x0` on networks like Optimism, where the tx is a system tx. + // For instance: https://optimistic.etherscan.io/tx/0xf4304cb09b3f58a8e5d20fec5f393c96ccffe0269aaf632cb2be7a8a0f0c91cc + + txParams.v = txParams.v === '0x0' ? '0x' : txParams.v + txParams.r = txParams.r === '0x0' ? '0x' : txParams.r + txParams.s = txParams.s === '0x0' ? '0x' : txParams.s + + if (txParams.v !== '0x' || txParams.r !== '0x' || txParams.s !== '0x') { + txParams.v = toType(txParams.v, TypeOutput.BigInt) + } + + return txParams +} diff --git a/packages/tx/test/base.spec.ts b/packages/tx/test/base.spec.ts index a3bceeb764..ea38403bd4 100644 --- a/packages/tx/test/base.spec.ts +++ b/packages/tx/test/base.spec.ts @@ -1,4 +1,4 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { MAX_INTEGER, MAX_UINT64, @@ -13,11 +13,21 @@ import { import { assert, describe, it } from 'vitest' import { - AccessListEIP2930Transaction, + AccessList2930Transaction, Capability, - FeeMarketEIP1559Transaction, - LegacyTransaction, + FeeMarket1559Tx, + LegacyTx, TransactionType, + create1559FeeMarketTxFromBytesArray, + createAccessList2930Tx, + createAccessList2930TxFromBytesArray, + createAccessList2930TxFromRLP, + createFeeMarket1559Tx, + createFeeMarket1559TxFromRLP, + createLegacyTx, + createLegacyTxFromBytesArray, + createLegacyTxFromRLP, + paramsTx, } from '../src/index.js' import eip1559Fixtures from './json/eip1559txs.json' @@ -25,41 +35,42 @@ import eip2930Fixtures from './json/eip2930txs.json' import legacyFixtures from './json/txs.json' import type { BaseTransaction } from '../src/baseTransaction.js' -import type { AccessListEIP2930TxData, FeeMarketEIP1559TxData, LegacyTxData } from '../src/index.js' +import type { AccessList2930TxData, FeeMarketEIP1559TxData, LegacyTxData } from '../src/index.js' describe('[BaseTransaction]', () => { // EIP-2930 is not enabled in Common by default (2021-03-06) - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const legacyTxs: BaseTransaction[] = [] for (const tx of legacyFixtures.slice(0, 4)) { - legacyTxs.push(LegacyTransaction.fromTxData(tx.data as LegacyTxData, { common })) + legacyTxs.push(createLegacyTx(tx.data as LegacyTxData, { common })) } const eip2930Txs: BaseTransaction[] = [] for (const tx of eip2930Fixtures) { - eip2930Txs.push( - AccessListEIP2930Transaction.fromTxData(tx.data as AccessListEIP2930TxData, { common }) - ) + eip2930Txs.push(createAccessList2930Tx(tx.data as AccessList2930TxData, { common })) } const eip1559Txs: BaseTransaction[] = [] for (const tx of eip1559Fixtures) { - eip1559Txs.push( - FeeMarketEIP1559Transaction.fromTxData(tx.data as FeeMarketEIP1559TxData, { common }) - ) + eip1559Txs.push(createFeeMarket1559Tx(tx.data as FeeMarketEIP1559TxData, { common })) } const zero = new Uint8Array(0) const txTypes = [ { - class: LegacyTransaction, - name: 'LegacyTransaction', + class: LegacyTx, + name: 'LegacyTx', type: TransactionType.Legacy, values: Array(6).fill(zero), txs: legacyTxs, fixtures: legacyFixtures, activeCapabilities: [], + create: { + txData: createLegacyTx, + rlp: createLegacyTxFromRLP, + bytesArray: createLegacyTxFromBytesArray, + }, notActiveCapabilities: [ Capability.EIP1559FeeMarket, Capability.EIP2718TypedTransaction, @@ -68,18 +79,23 @@ describe('[BaseTransaction]', () => { ], }, { - class: AccessListEIP2930Transaction, - name: 'AccessListEIP2930Transaction', + class: AccessList2930Transaction, + name: 'AccessList2930Transaction', type: TransactionType.AccessListEIP2930, values: [new Uint8Array([1])].concat(Array(7).fill(zero)), txs: eip2930Txs, fixtures: eip2930Fixtures, activeCapabilities: [Capability.EIP2718TypedTransaction, Capability.EIP2930AccessLists], + create: { + txData: createAccessList2930Tx, + rlp: createAccessList2930TxFromRLP, + bytesArray: createAccessList2930TxFromBytesArray, + }, notActiveCapabilities: [Capability.EIP1559FeeMarket, 9999], }, { - class: FeeMarketEIP1559Transaction, - name: 'FeeMarketEIP1559Transaction', + class: FeeMarket1559Tx, + name: 'FeeMarket1559Tx', type: TransactionType.FeeMarketEIP1559, values: [new Uint8Array([1])].concat(Array(8).fill(zero)), txs: eip1559Txs, @@ -89,71 +105,81 @@ describe('[BaseTransaction]', () => { Capability.EIP2718TypedTransaction, Capability.EIP2930AccessLists, ], + create: { + txData: createFeeMarket1559Tx, + rlp: createFeeMarket1559TxFromRLP, + bytesArray: create1559FeeMarketTxFromBytesArray, + }, notActiveCapabilities: [9999], }, ] it('Initialization', () => { for (const txType of txTypes) { - let tx = txType.class.fromTxData({}, { common }) + let tx = txType.create.txData({}, { common }) assert.equal( tx.common.hardfork(), 'london', - `${txType.name}: should initialize with correct HF provided` + `${txType.name}: should initialize with correct HF provided`, ) assert.ok(Object.isFrozen(tx), `${txType.name}: tx should be frozen by default`) const initCommon = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.London, }) - tx = txType.class.fromTxData({}, { common: initCommon }) + tx = txType.create.txData({}, { common: initCommon }) assert.equal( tx.common.hardfork(), 'london', - `${txType.name}: should initialize with correct HF provided` + `${txType.name}: should initialize with correct HF provided`, ) initCommon.setHardfork(Hardfork.Byzantium) assert.equal( tx.common.hardfork(), 'london', - `${txType.name}: should stay on correct HF if outer common HF changes` + `${txType.name}: should stay on correct HF if outer common HF changes`, ) - tx = txType.class.fromTxData({}, { common, freeze: false }) + tx = txType.create.txData({}, { common, freeze: false }) assert.ok( !Object.isFrozen(tx), - `${txType.name}: tx should not be frozen when freeze deactivated in options` + `${txType.name}: tx should not be frozen when freeze deactivated in options`, ) + const params = JSON.parse(JSON.stringify(paramsTx)) + params['1']['txGas'] = 30000 // 21000 + tx = txType.create.txData({}, { common, params }) + assert.equal(tx.common.param('txGas'), BigInt(30000), 'should use custom parameters provided') + // Perform the same test as above, but now using a different construction method. This also implies that passing on the // options object works as expected. - tx = txType.class.fromTxData({}, { common, freeze: false }) + tx = txType.create.txData({}, { common, freeze: false }) const rlpData = tx.serialize() - tx = txType.class.fromSerializedTx(rlpData, { common }) + tx = txType.create.rlp(rlpData, { common }) assert.equal( tx.type, txType.type, - `${txType.name}: fromSerializedTx() -> should initialize correctly` + `${txType.name}: fromSerializedTx() -> should initialize correctly`, ) assert.ok(Object.isFrozen(tx), `${txType.name}: tx should be frozen by default`) - tx = txType.class.fromSerializedTx(rlpData, { common, freeze: false }) + tx = txType.create.rlp(rlpData, { common, freeze: false }) assert.ok( !Object.isFrozen(tx), - `${txType.name}: tx should not be frozen when freeze deactivated in options` + `${txType.name}: tx should not be frozen when freeze deactivated in options`, ) - tx = txType.class.fromValuesArray(txType.values as any, { common }) + tx = txType.create.bytesArray(txType.values as any, { common }) assert.ok(Object.isFrozen(tx), `${txType.name}: tx should be frozen by default`) - tx = txType.class.fromValuesArray(txType.values as any, { common, freeze: false }) + tx = txType.create.bytesArray(txType.values as any, { common, freeze: false }) assert.ok( !Object.isFrozen(tx), - `${txType.name}: tx should not be frozen when freeze deactivated in options` + `${txType.name}: tx should not be frozen when freeze deactivated in options`, ) } }) @@ -162,45 +188,45 @@ describe('[BaseTransaction]', () => { let rlpData: any = legacyTxs[0].raw() rlpData[0] = toBytes('0x0') try { - LegacyTransaction.fromValuesArray(rlpData) + createLegacyTxFromBytesArray(rlpData) assert.fail('should have thrown when nonce has leading zeroes') } catch (err: any) { assert.ok( err.message.includes('nonce cannot have leading zeroes'), - 'should throw with nonce with leading zeroes' + 'should throw with nonce with leading zeroes', ) } rlpData[0] = toBytes('0x') rlpData[6] = toBytes('0x0') try { - LegacyTransaction.fromValuesArray(rlpData) + createLegacyTxFromBytesArray(rlpData) assert.fail('should have thrown when v has leading zeroes') } catch (err: any) { assert.ok( err.message.includes('v cannot have leading zeroes'), - 'should throw with v with leading zeroes' + 'should throw with v with leading zeroes', ) } rlpData = eip2930Txs[0].raw() rlpData[3] = toBytes('0x0') try { - AccessListEIP2930Transaction.fromValuesArray(rlpData) + createAccessList2930TxFromBytesArray(rlpData) assert.fail('should have thrown when gasLimit has leading zeroes') } catch (err: any) { assert.ok( err.message.includes('gasLimit cannot have leading zeroes'), - 'should throw with gasLimit with leading zeroes' + 'should throw with gasLimit with leading zeroes', ) } rlpData = eip1559Txs[0].raw() rlpData[2] = toBytes('0x0') try { - FeeMarketEIP1559Transaction.fromValuesArray(rlpData) + create1559FeeMarketTxFromBytesArray(rlpData) assert.fail('should have thrown when maxPriorityFeePerGas has leading zeroes') } catch (err: any) { assert.ok( err.message.includes('maxPriorityFeePerGas cannot have leading zeroes'), - 'should throw with maxPriorityFeePerGas with leading zeroes' + 'should throw with maxPriorityFeePerGas with leading zeroes', ) } }) @@ -209,12 +235,12 @@ describe('[BaseTransaction]', () => { for (const txType of txTypes) { for (const tx of txType.txs) { assert.ok( - txType.class.fromSerializedTx(tx.serialize(), { common }), - `${txType.name}: should do roundtrip serialize() -> fromSerializedTx()` + txType.create.rlp(tx.serialize(), { common }), + `${txType.name}: should do roundtrip serialize() -> fromSerializedTx()`, ) assert.ok( - txType.class.fromSerializedTx(tx.serialize(), { common }), - `${txType.name}: should do roundtrip serialize() -> fromSerializedTx()` + txType.create.rlp(tx.serialize(), { common }), + `${txType.name}: should do roundtrip serialize() -> fromSerializedTx()`, ) } } @@ -226,13 +252,13 @@ describe('[BaseTransaction]', () => { for (const activeCapability of txType.activeCapabilities) { assert.ok( tx.supports(activeCapability), - `${txType.name}: should recognize all supported capabilities` + `${txType.name}: should recognize all supported capabilities`, ) } for (const notActiveCapability of txType.notActiveCapabilities) { assert.notOk( tx.supports(notActiveCapability), - `${txType.name}: should reject non-active existing and not existing capabilities` + `${txType.name}: should reject non-active existing and not existing capabilities`, ) } } @@ -243,8 +269,8 @@ describe('[BaseTransaction]', () => { for (const txType of txTypes) { for (const tx of txType.txs) { assert.ok( - txType.class.fromValuesArray(tx.raw() as any, { common }), - `${txType.name}: should do roundtrip raw() -> fromValuesArray()` + txType.create.bytesArray(tx.raw() as any, { common }), + `${txType.name}: should do roundtrip raw() -> fromValuesArray()`, ) } } @@ -263,11 +289,11 @@ describe('[BaseTransaction]', () => { for (const txFixture of txType.fixtures.slice(0, 4)) { // set `s` to a single zero txFixture.data.s = '0x' + '0' - const tx = txType.class.fromTxData((txFixture as any).data, { common }) + const tx = txType.create.txData((txFixture as any).data, { common }) assert.equal(tx.verifySignature(), false, `${txType.name}: signature should not be valid`) assert.ok( tx.getValidationErrors().includes('Invalid Signature'), - `${txType.name}: should return an error string about not verifying signatures` + `${txType.name}: should return an error string about not verifying signatures`, ) assert.notOk(tx.isValid(), `${txType.name}: should not validate correctly`) } @@ -286,7 +312,7 @@ describe('[BaseTransaction]', () => { () => tx.sign(utf8ToBytes('invalid')), undefined, undefined, - `${txType.name}: should fail with invalid PK` + `${txType.name}: should fail with invalid PK`, ) } } @@ -298,19 +324,19 @@ describe('[BaseTransaction]', () => { ...txType.txs, // add unsigned variants ...txType.txs.map((tx) => - txType.class.fromTxData({ + txType.create.txData({ ...tx, v: undefined, r: undefined, s: undefined, - }) + }), ), ] for (const tx of txs) { assert.equal( tx.isSigned(), tx.v !== undefined && tx.r !== undefined && tx.s !== undefined, - 'isSigned() returns correctly' + 'isSigned() returns correctly', ) } } @@ -325,7 +351,7 @@ describe('[BaseTransaction]', () => { assert.equal( signedTx.getSenderAddress().toString(), `0x${sendersAddress}`, - `${txType.name}: should get sender's address after signing it` + `${txType.name}: should get sender's address after signing it`, ) } } @@ -342,7 +368,7 @@ describe('[BaseTransaction]', () => { const pubKeyFromPriv = privateToPublic(hexToBytes(`0x${privateKey}`)) assert.ok( equalsBytes(txPubKey, pubKeyFromPriv), - `${txType.name}: should get sender's public key after signing it` + `${txType.name}: should get sender's public key after signing it`, ) } } @@ -365,7 +391,7 @@ describe('[BaseTransaction]', () => { }, undefined, undefined, - 'should throw when s-value is greater than secp256k1n/2' + 'should throw when s-value is greater than secp256k1n/2', ) } } @@ -386,7 +412,7 @@ describe('[BaseTransaction]', () => { it('initialization with defaults', () => { const bufferZero = toBytes('0x') - const tx = LegacyTransaction.fromTxData({ + const tx = createLegacyTx({ nonce: undefined, gasLimit: undefined, gasPrice: undefined, @@ -409,13 +435,13 @@ describe('[BaseTransaction]', () => { }) it('_validateCannotExceedMaxInteger()', () => { - const tx = FeeMarketEIP1559Transaction.fromTxData(eip1559Txs[0]) + const tx = createFeeMarket1559Tx(eip1559Txs[0]) try { ;(tx as any)._validateCannotExceedMaxInteger({ a: MAX_INTEGER }, 256, true) } catch (err: any) { assert.ok( err.message.includes('equal or exceed MAX_INTEGER'), - 'throws when value equals or exceeds MAX_INTEGER' + 'throws when value equals or exceeds MAX_INTEGER', ) } try { @@ -428,7 +454,7 @@ describe('[BaseTransaction]', () => { } catch (err: any) { assert.ok( err.message.includes('unimplemented bits value'), - 'throws when bits value other than 64 or 256 provided' + 'throws when bits value other than 64 or 256 provided', ) } try { @@ -441,7 +467,7 @@ describe('[BaseTransaction]', () => { } catch (err: any) { assert.ok( err.message.includes('2^64'), - 'throws when 64 bit integer equals or exceeds MAX_UINT64' + 'throws when 64 bit integer equals or exceeds MAX_UINT64', ) } }) diff --git a/packages/tx/test/eip1559.spec.ts b/packages/tx/test/eip1559.spec.ts index a0833bfef6..dfedd47708 100644 --- a/packages/tx/test/eip1559.spec.ts +++ b/packages/tx/test/eip1559.spec.ts @@ -1,23 +1,23 @@ -import { Hardfork, createCustomCommon } from '@ethereumjs/common' +import { Hardfork, Mainnet, createCustomCommon } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' import { TWO_POW256, ecsign, equalsBytes, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { FeeMarketEIP1559Transaction } from '../src/index.js' +import { createFeeMarket1559Tx } from '../src/index.js' import testdata from './json/eip1559.json' // Source: Besu import type { FeeMarketEIP1559TxData, JsonTx } from '../src/index.js' import type { PrefixedHexString } from '@ethereumjs/util' -const common = createCustomCommon({ chainId: 4 }) +const common = createCustomCommon({ chainId: 4 }, Mainnet) common.setHardfork(Hardfork.London) const validAddress = hexToBytes(`0x${'01'.repeat(20)}`) const validSlot = hexToBytes(`0x${'01'.repeat(32)}`) const chainId = BigInt(4) -describe('[FeeMarketEIP1559Transaction]', () => { +describe('[FeeMarket1559Tx]', () => { it(`cannot input decimal or negative values`, () => { const values = [ 'maxFeePerGas', @@ -60,7 +60,7 @@ describe('[FeeMarketEIP1559Transaction]', () => { ) { txData[value] = testCase assert.throws(() => { - FeeMarketEIP1559Transaction.fromTxData(txData) + createFeeMarket1559Tx(txData) }) } } @@ -68,14 +68,14 @@ describe('[FeeMarketEIP1559Transaction]', () => { }) it('getUpfrontCost()', () => { - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { maxFeePerGas: 10, maxPriorityFeePerGas: 8, gasLimit: 100, value: 6, }, - { common } + { common }, ) assert.equal(tx.getUpfrontCost(), BigInt(806), 'correct upfront cost with default base fee') let baseFee = BigInt(0) @@ -84,17 +84,17 @@ describe('[FeeMarketEIP1559Transaction]', () => { assert.equal( tx.getUpfrontCost(baseFee), BigInt(1006), - 'correct upfront cost with cost-changing base fee value' + 'correct upfront cost with cost-changing base fee value', ) }) it('getEffectivePriorityFee()', () => { - const tx = FeeMarketEIP1559Transaction.fromTxData( + const tx = createFeeMarket1559Tx( { maxFeePerGas: 10, maxPriorityFeePerGas: 8, }, - { common } + { common }, ) assert.equal(tx.getEffectivePriorityFee(BigInt(10)), BigInt(0)) assert.equal(tx.getEffectivePriorityFee(BigInt(9)), BigInt(1)) @@ -109,19 +109,19 @@ describe('[FeeMarketEIP1559Transaction]', () => { for (let index = 0; index < testdata.length; index++) { const data = testdata[index] const pkey = hexToBytes(data.privateKey as PrefixedHexString) - const txn = FeeMarketEIP1559Transaction.fromTxData(data as FeeMarketEIP1559TxData, { common }) + const txn = createFeeMarket1559Tx(data as FeeMarketEIP1559TxData, { common }) const signed = txn.sign(pkey) const rlpSerialized = RLP.encode(Uint8Array.from(signed.serialize())) assert.ok( equalsBytes(rlpSerialized, hexToBytes(data.signedTransactionRLP as PrefixedHexString)), - 'Should sign txs correctly' + 'Should sign txs correctly', ) } }) it('addSignature() -> correctly adds correct signature values', () => { const privKey = hexToBytes(testdata[0].privateKey as PrefixedHexString) - const tx = FeeMarketEIP1559Transaction.fromTxData({}) + const tx = createFeeMarket1559Tx({}) const signedTx = tx.sign(privKey) const addSignatureTx = tx.addSignature(signedTx.v!, signedTx.r!, signedTx.s!) @@ -130,7 +130,7 @@ describe('[FeeMarketEIP1559Transaction]', () => { it('addSignature() -> correctly converts raw ecrecover values', () => { const privKey = hexToBytes(testdata[0].privateKey as PrefixedHexString) - const tx = FeeMarketEIP1559Transaction.fromTxData({}) + const tx = createFeeMarket1559Tx({}) const msgHash = tx.getHashedMessageToSign() const { v, r, s } = ecsign(msgHash, privKey) @@ -143,7 +143,7 @@ describe('[FeeMarketEIP1559Transaction]', () => { it('addSignature() -> throws when adding the wrong v value', () => { const privKey = hexToBytes(testdata[0].privateKey as PrefixedHexString) - const tx = FeeMarketEIP1559Transaction.fromTxData({}) + const tx = createFeeMarket1559Tx({}) const msgHash = tx.getHashedMessageToSign() const { v, r, s } = ecsign(msgHash, privKey) @@ -157,30 +157,30 @@ describe('[FeeMarketEIP1559Transaction]', () => { it('hash()', () => { const data = testdata[0] const pkey = hexToBytes(data.privateKey as PrefixedHexString) - let txn = FeeMarketEIP1559Transaction.fromTxData(data as FeeMarketEIP1559TxData, { common }) + let txn = createFeeMarket1559Tx(data as FeeMarketEIP1559TxData, { common }) let signed = txn.sign(pkey) const expectedHash = hexToBytes( - '0x2e564c87eb4b40e7f469b2eec5aa5d18b0b46a24e8bf0919439cfb0e8fcae446' + '0x2e564c87eb4b40e7f469b2eec5aa5d18b0b46a24e8bf0919439cfb0e8fcae446', ) assert.ok( equalsBytes(signed.hash(), expectedHash), - 'Should provide the correct hash when frozen' + 'Should provide the correct hash when frozen', ) - txn = FeeMarketEIP1559Transaction.fromTxData(data as FeeMarketEIP1559TxData, { + txn = createFeeMarket1559Tx(data as FeeMarketEIP1559TxData, { common, freeze: false, }) signed = txn.sign(pkey) assert.ok( equalsBytes(signed.hash(), expectedHash), - 'Should provide the correct hash when not frozen' + 'Should provide the correct hash when not frozen', ) }) it('freeze property propagates from unsigned tx to signed tx', () => { const data = testdata[0] const pkey = hexToBytes(data.privateKey as PrefixedHexString) - const txn = FeeMarketEIP1559Transaction.fromTxData(data as FeeMarketEIP1559TxData, { + const txn = createFeeMarket1559Tx(data as FeeMarketEIP1559TxData, { common, freeze: false, }) @@ -192,12 +192,12 @@ describe('[FeeMarketEIP1559Transaction]', () => { it('common propagates from the common of tx, not the common in TxOptions', () => { const data = testdata[0] const pkey = hexToBytes(data.privateKey as PrefixedHexString) - const txn = FeeMarketEIP1559Transaction.fromTxData(data as FeeMarketEIP1559TxData, { + const txn = createFeeMarket1559Tx(data as FeeMarketEIP1559TxData, { common, freeze: false, }) - const newCommon = createCustomCommon({ chainId: 4 }) + const newCommon = createCustomCommon({ chainId: 4 }, Mainnet) newCommon.setHardfork(Hardfork.Paris) assert.notDeepEqual(newCommon, common, 'new common is different than original common') @@ -209,39 +209,39 @@ describe('[FeeMarketEIP1559Transaction]', () => { const signedTxn = txn.sign(pkey) assert.ok( signedTxn.common.hardfork() === Hardfork.Paris, - 'signed tx common is taken from tx.common' + 'signed tx common is taken from tx.common', ) }) it('unsigned tx -> getMessageToSign()/getHashedMessageToSign()', () => { - const unsignedTx = FeeMarketEIP1559Transaction.fromTxData( + const unsignedTx = createFeeMarket1559Tx( { data: hexToBytes('0x010200'), to: validAddress, accessList: [[validAddress, [validSlot]]], chainId, }, - { common } + { common }, ) const expectedHash = hexToBytes( - '0xfa81814f7dd57bad435657a05eabdba2815f41e3f15ddd6139027e7db56b0dea' + '0xfa81814f7dd57bad435657a05eabdba2815f41e3f15ddd6139027e7db56b0dea', ) assert.deepEqual(unsignedTx.getHashedMessageToSign(), expectedHash), 'correct hashed version' const expectedSerialization = hexToBytes( - '0x02f85904808080809401010101010101010101010101010101010101018083010200f838f7940101010101010101010101010101010101010101e1a00101010101010101010101010101010101010101010101010101010101010101' + '0x02f85904808080809401010101010101010101010101010101010101018083010200f838f7940101010101010101010101010101010101010101e1a00101010101010101010101010101010101010101010101010101010101010101', ) assert.deepEqual( unsignedTx.getMessageToSign(), expectedSerialization, - 'correct serialized unhashed version' + 'correct serialized unhashed version', ) }) it('toJSON()', () => { const data = testdata[0] const pkey = hexToBytes(data.privateKey as PrefixedHexString) - const txn = FeeMarketEIP1559Transaction.fromTxData(data as FeeMarketEIP1559TxData, { common }) + const txn = createFeeMarket1559Tx(data as FeeMarketEIP1559TxData, { common }) const signed = txn.sign(pkey) const json = signed.toJSON() @@ -266,47 +266,47 @@ describe('[FeeMarketEIP1559Transaction]', () => { it('Fee validation', () => { assert.doesNotThrow(() => { - FeeMarketEIP1559Transaction.fromTxData( + createFeeMarket1559Tx( { maxFeePerGas: TWO_POW256 - BigInt(1), maxPriorityFeePerGas: 100, gasLimit: 1, value: 6, }, - { common } + { common }, ) }, 'fee can be 2^256 - 1') assert.throws( () => { - FeeMarketEIP1559Transaction.fromTxData( + createFeeMarket1559Tx( { maxFeePerGas: TWO_POW256 - BigInt(1), maxPriorityFeePerGas: 100, gasLimit: 100, value: 6, }, - { common } + { common }, ) }, undefined, undefined, - 'fee must be less than 2^256' + 'fee must be less than 2^256', ) assert.throws( () => { - FeeMarketEIP1559Transaction.fromTxData( + createFeeMarket1559Tx( { maxFeePerGas: 1, maxPriorityFeePerGas: 2, gasLimit: 100, value: 6, }, - { common } + { common }, ) }, undefined, undefined, - 'total fee must be the larger of the two' + 'total fee must be the larger of the two', ) }) }) diff --git a/packages/tx/test/eip3860.spec.ts b/packages/tx/test/eip3860.spec.ts index fd7c7edbb3..a9c1ab0df7 100644 --- a/packages/tx/test/eip3860.spec.ts +++ b/packages/tx/test/eip3860.spec.ts @@ -1,30 +1,31 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { Address } from '@ethereumjs/util' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createZeroAddress } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { TransactionFactory, TransactionType } from '../src/index.js' +import { TransactionType, createTxFromTxData, paramsTx } from '../src/index.js' const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.Paris, eips: [3860, 4844, 4895], + params: paramsTx, }) -const maxInitCodeSize = common.param('vm', 'maxInitCodeSize') +const maxInitCodeSize = common.param('maxInitCodeSize') const txTypes = [ TransactionType.Legacy, TransactionType.AccessListEIP2930, TransactionType.FeeMarketEIP1559, //TransactionType.BlobEIP4844, // Explicitly commented out: BlobEIP4844 txs cannot create contracts ] -const addressZero = Address.zero() +const addressZero = createZeroAddress() describe('[EIP3860 tests]', () => { it(`Should instantiate create txs with MAX_INITCODE_SIZE`, () => { const data = new Uint8Array(Number(maxInitCodeSize)) for (const txType of txTypes) { try { - TransactionFactory.fromTxData({ data, type: txType }, { common }) + createTxFromTxData({ data, type: txType }, { common }) assert.ok('Instantiated create tx with MAX_INITCODE_SIZE data') } catch (e) { assert.fail('Did not instantiate create tx with MAX_INITCODE_SIZE') @@ -36,7 +37,7 @@ describe('[EIP3860 tests]', () => { const data = new Uint8Array(Number(maxInitCodeSize)) for (const txType of txTypes) { try { - TransactionFactory.fromTxData({ data, type: txType, to: addressZero }, { common }) + createTxFromTxData({ data, type: txType, to: addressZero }, { common }) assert.ok('Instantiated tx with MAX_INITCODE_SIZE') } catch (e) { assert.fail('Did not instantiated tx with MAX_INITCODE_SIZE') @@ -48,7 +49,7 @@ describe('[EIP3860 tests]', () => { const data = new Uint8Array(Number(maxInitCodeSize) + 1) for (const txType of txTypes) { try { - TransactionFactory.fromTxData({ data, type: txType }, { common }) + createTxFromTxData({ data, type: txType }, { common }) assert.fail('Instantiated create tx with MAX_INITCODE_SIZE+1') } catch (e) { assert.ok('Did not instantiate create tx with MAX_INITCODE_SIZE+1') @@ -60,7 +61,7 @@ describe('[EIP3860 tests]', () => { const data = new Uint8Array(Number(maxInitCodeSize) + 1) for (const txType of txTypes) { try { - TransactionFactory.fromTxData({ data, type: txType, to: addressZero }, { common }) + createTxFromTxData({ data, type: txType, to: addressZero }, { common }) assert.ok('Instantiated tx with MAX_INITCODE_SIZE+1') } catch (e) { assert.fail('Did not instantiate tx with MAX_INITCODE_SIZE+1') @@ -73,10 +74,7 @@ describe('[EIP3860 tests]', () => { const data = new Uint8Array(Number(maxInitCodeSize) + 1) for (const txType of txTypes) { try { - TransactionFactory.fromTxData( - { data, type: txType }, - { common, allowUnlimitedInitCodeSize: true } - ) + createTxFromTxData({ data, type: txType }, { common, allowUnlimitedInitCodeSize: true }) assert.ok('Instantiated create tx with MAX_INITCODE_SIZE+1') } catch (e) { assert.fail('Did not instantiate tx with MAX_INITCODE_SIZE+1') @@ -89,17 +87,17 @@ describe('[EIP3860 tests]', () => { it('should work', () => { const data = new Uint8Array(Number(maxInitCodeSize)) for (const txType of txTypes) { - const eip3860ActiveTx = TransactionFactory.fromTxData( + const eip3860ActiveTx = createTxFromTxData( { data, type: txType }, - { common, allowUnlimitedInitCodeSize: true } + { common, allowUnlimitedInitCodeSize: true }, ) - const eip3860DeactivedTx = TransactionFactory.fromTxData( + const eip3860DeactivatedTx = createTxFromTxData( { data, type: txType }, - { common, allowUnlimitedInitCodeSize: false } + { common, allowUnlimitedInitCodeSize: false }, ) assert.ok( - eip3860ActiveTx.getDataFee() === eip3860DeactivedTx.getDataFee(), - 'charged initcode analysis gas' + eip3860ActiveTx.getDataGas() === eip3860DeactivatedTx.getDataGas(), + 'charged initcode analysis gas', ) } }) diff --git a/packages/tx/test/eip4844.spec.ts b/packages/tx/test/eip4844.spec.ts index f3aca67c40..7a31570cdb 100644 --- a/packages/tx/test/eip4844.spec.ts +++ b/packages/tx/test/eip4844.spec.ts @@ -1,11 +1,11 @@ import { Hardfork, createCommonFromGethGenesis } from '@ethereumjs/common' import { - Address, blobsToCommitments, blobsToProofs, bytesToHex, commitmentsToVersionedHashes, concatBytes, + createZeroAddress, ecsign, equalsBytes, getBlobs, @@ -16,7 +16,15 @@ import { loadKZG } from 'kzg-wasm' import { assert, beforeAll, describe, it } from 'vitest' import gethGenesis from '../../block/test/testdata/4844-hardfork.json' -import { BlobEIP4844Transaction, TransactionFactory } from '../src/index.js' +import { + blobTxNetworkWrapperToJSON, + createBlob4844Tx, + createBlob4844TxFromRLP, + createBlob4844TxFromSerializedNetworkWrapper, + createMinimal4844TxFromNetworkWrapper, + createTxFromTxData, + paramsTx, +} from '../src/index.js' import blobTx from './json/serialized4844tx.json' @@ -37,12 +45,12 @@ describe('EIP4844 addSignature tests', () => { }) it('addSignature() -> correctly adds correct signature values', () => { const privateKey = pk - const tx = BlobEIP4844Transaction.fromTxData( + const tx = createBlob4844Tx( { - to: Address.zero(), + to: createZeroAddress(), blobVersionedHashes: [concatBytes(new Uint8Array([1]), randomBytes(31))], }, - { common } + { common }, ) const signedTx = tx.sign(privateKey) const addSignatureTx = tx.addSignature(signedTx.v!, signedTx.r!, signedTx.s!) @@ -52,12 +60,12 @@ describe('EIP4844 addSignature tests', () => { it('addSignature() -> correctly converts raw ecrecover values', () => { const privKey = pk - const tx = BlobEIP4844Transaction.fromTxData( + const tx = createBlob4844Tx( { - to: Address.zero(), + to: createZeroAddress(), blobVersionedHashes: [concatBytes(new Uint8Array([1]), randomBytes(31))], }, - { common } + { common }, ) const msgHash = tx.getHashedMessageToSign() @@ -71,12 +79,12 @@ describe('EIP4844 addSignature tests', () => { it('addSignature() -> throws when adding the wrong v value', () => { const privKey = pk - const tx = BlobEIP4844Transaction.fromTxData( + const tx = createBlob4844Tx( { - to: Address.zero(), + to: createZeroAddress(), blobVersionedHashes: [concatBytes(new Uint8Array([1]), randomBytes(31))], }, - { common } + { common }, ) const msgHash = tx.getHashedMessageToSign() @@ -104,25 +112,25 @@ describe('EIP4844 constructor tests - valid scenarios', () => { type: 0x03, blobVersionedHashes: [concatBytes(new Uint8Array([1]), randomBytes(31))], maxFeePerBlobGas: 1n, - to: Address.zero(), + to: createZeroAddress(), } - const tx = BlobEIP4844Transaction.fromTxData(txData, { common }) + const tx = createBlob4844Tx(txData, { common }) assert.equal(tx.type, 3, 'successfully instantiated a blob transaction from txData') - const factoryTx = TransactionFactory.fromTxData(txData, { common }) + const factoryTx = createTxFromTxData(txData, { common }) assert.equal(factoryTx.type, 3, 'instantiated a blob transaction from the tx factory') const serializedTx = tx.serialize() assert.equal(serializedTx[0], 3, 'successfully serialized a blob tx') - const deserializedTx = BlobEIP4844Transaction.fromSerializedTx(serializedTx, { common }) + const deserializedTx = createBlob4844TxFromRLP(serializedTx, { common }) assert.equal(deserializedTx.type, 3, 'deserialized a blob tx') const signedTx = tx.sign(pk) const sender = signedTx.getSenderAddress().toString() - const decodedTx = BlobEIP4844Transaction.fromSerializedTx(signedTx.serialize(), { common }) + const decodedTx = createBlob4844TxFromRLP(signedTx.serialize(), { common }) assert.equal( decodedTx.getSenderAddress().toString(), sender, - 'signature and sender were deserialized correctly' + 'signature and sender were deserialized correctly', ) }) }) @@ -168,7 +176,7 @@ describe('fromTxData using from a json', () => { chainId: Number(txData.chainId), }) try { - const tx = BlobEIP4844Transaction.fromTxData(txData as BlobEIP4844TxData, { common: c }) + const tx = createBlob4844Tx(txData as BlobEIP4844TxData, { common: c }) assert.ok(true, 'Should be able to parse a json data and hash it') assert.equal(typeof tx.maxFeePerBlobGas, 'bigint', 'should be able to parse correctly') @@ -181,17 +189,17 @@ describe('fromTxData using from a json', () => { assert.deepEqual( { ...txData, accessList: [] }, { gasPrice: null, ...jsonData }, - 'toJSON should give correct json' + 'toJSON should give correct json', ) - const fromSerializedTx = BlobEIP4844Transaction.fromSerializedTx( + const fromSerializedTx = createBlob4844TxFromRLP( hexToBytes(txMeta.serialized as PrefixedHexString), - { common: c } + { common: c }, ) assert.equal( bytesToHex(fromSerializedTx.hash()), txMeta.hash, - 'fromSerializedTx hash should match' + 'fromSerializedTx hash should match', ) } catch (e) { assert.fail('failed to parse json data') @@ -213,7 +221,7 @@ describe('EIP4844 constructor tests - invalid scenarios', () => { const baseTxData = { type: 0x03, maxFeePerBlobGas: 1n, - to: Address.zero(), + to: createZeroAddress(), } const shortVersionHash = { blobVersionedHashes: [concatBytes(new Uint8Array([3]), randomBytes(3))], @@ -229,27 +237,27 @@ describe('EIP4844 constructor tests - invalid scenarios', () => { ], } try { - BlobEIP4844Transaction.fromTxData({ ...baseTxData, ...shortVersionHash }, { common }) + createBlob4844Tx({ ...baseTxData, ...shortVersionHash }, { common }) } catch (err: any) { assert.ok( err.message.includes('versioned hash is invalid length'), - 'throws on invalid versioned hash length' + 'throws on invalid versioned hash length', ) } try { - BlobEIP4844Transaction.fromTxData({ ...baseTxData, ...invalidVersionHash }, { common }) + createBlob4844Tx({ ...baseTxData, ...invalidVersionHash }, { common }) } catch (err: any) { assert.ok( err.message.includes('does not start with KZG commitment'), - 'throws on invalid commitment version' + 'throws on invalid commitment version', ) } try { - BlobEIP4844Transaction.fromTxData({ ...baseTxData, ...tooManyBlobs }, { common }) + createBlob4844Tx({ ...baseTxData, ...tooManyBlobs }, { common }) } catch (err: any) { assert.ok( err.message.includes('tx can contain at most'), - 'throws on too many versioned hashes' + 'throws on too many versioned hashes', ) } }) @@ -263,6 +271,7 @@ describe('Network wrapper tests', () => { common = createCommonFromGethGenesis(gethGenesis, { chain: 'customChain', hardfork: Hardfork.Cancun, + params: paramsTx, customCrypto: { kzg }, }) }) @@ -271,7 +280,7 @@ describe('Network wrapper tests', () => { const commitments = blobsToCommitments(kzg, blobs) const blobVersionedHashes = commitmentsToVersionedHashes(commitments) const proofs = blobsToProofs(kzg, blobs, commitments) - const unsignedTx = BlobEIP4844Transaction.fromTxData( + const unsignedTx = createBlob4844Tx( { blobVersionedHashes, blobs, @@ -281,14 +290,14 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ) const signedTx = unsignedTx.sign(pk) const sender = signedTx.getSenderAddress().toString() const wrapper = signedTx.serializeNetworkWrapper() - const jsonData = BlobEIP4844Transaction.networkWrapperToJson(wrapper, { common }) + const jsonData = blobTxNetworkWrapperToJSON(wrapper, { common }) assert.equal(jsonData.blobs?.length, blobs.length, 'contains the correct number of blobs') for (let i = 0; i < jsonData.blobs.length; i++) { const b1 = jsonData.blobs[i] @@ -298,7 +307,7 @@ describe('Network wrapper tests', () => { assert.equal( jsonData.kzgCommitments.length, signedTx.kzgCommitments!.length, - 'contains the correct number of commitments' + 'contains the correct number of commitments', ) for (let i = 0; i < jsonData.kzgCommitments.length; i++) { const c1 = jsonData.kzgCommitments[i] @@ -308,7 +317,7 @@ describe('Network wrapper tests', () => { assert.equal( jsonData.kzgProofs?.length, signedTx.kzgProofs!.length, - 'contains the correct number of proofs' + 'contains the correct number of proofs', ) for (let i = 0; i < jsonData.kzgProofs.length; i++) { const p1 = jsonData.kzgProofs[i] @@ -316,47 +325,47 @@ describe('Network wrapper tests', () => { assert.equal(p1, p2, 'contains the same proofs') } - const deserializedTx = BlobEIP4844Transaction.fromSerializedBlobTxNetworkWrapper(wrapper, { + const deserializedTx = createBlob4844TxFromSerializedNetworkWrapper(wrapper, { common, }) assert.equal( deserializedTx.type, 0x03, - 'successfully deserialized a blob transaction network wrapper' + 'successfully deserialized a blob transaction network wrapper', ) assert.equal(deserializedTx.blobs?.length, blobs.length, 'contains the correct number of blobs') assert.equal( deserializedTx.getSenderAddress().toString(), sender, - 'decoded sender address correctly' + 'decoded sender address correctly', ) - const minimalTx = BlobEIP4844Transaction.minimalFromNetworkWrapper(deserializedTx, { common }) + const minimalTx = createMinimal4844TxFromNetworkWrapper(deserializedTx, { common }) assert.ok(minimalTx.blobs === undefined, 'minimal representation contains no blobs') assert.ok( equalsBytes(minimalTx.hash(), deserializedTx.hash()), - 'has the same hash as the network wrapper version' + 'has the same hash as the network wrapper version', ) - const simpleBlobTx = BlobEIP4844Transaction.fromTxData( + const simpleBlobTx = createBlob4844Tx( { blobsData: ['hello world'], maxFeePerBlobGas: 100000000n, gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ) assert.equal( bytesToHex(unsignedTx.blobVersionedHashes[0]), bytesToHex(simpleBlobTx.blobVersionedHashes[0]), - 'tx versioned hash for simplified blob txData constructor matches fully specified versioned hashes' + 'tx versioned hash for simplified blob txData constructor matches fully specified versioned hashes', ) assert.throws( () => - BlobEIP4844Transaction.fromTxData( + createBlob4844Tx( { blobsData: ['hello world'], blobs: ['hello world' as any], @@ -364,16 +373,16 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ), 'encoded blobs', undefined, - 'throws on blobsData and blobs in txData' + 'throws on blobsData and blobs in txData', ) assert.throws( () => - BlobEIP4844Transaction.fromTxData( + createBlob4844Tx( { blobsData: ['hello world'], kzgCommitments: ['0xabcd'], @@ -381,16 +390,16 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ), 'KZG commitments', undefined, - 'throws on blobsData and KZG commitments in txData' + 'throws on blobsData and KZG commitments in txData', ) assert.throws( () => - BlobEIP4844Transaction.fromTxData( + createBlob4844Tx( { blobsData: ['hello world'], blobVersionedHashes: ['0x01cd'], @@ -398,16 +407,16 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ), 'versioned hashes', undefined, - 'throws on blobsData and versioned hashes in txData' + 'throws on blobsData and versioned hashes in txData', ) assert.throws( () => - BlobEIP4844Transaction.fromTxData( + createBlob4844Tx( { blobsData: ['hello world'], kzgProofs: ['0x01cd'], @@ -415,16 +424,16 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ), 'KZG proofs', undefined, - 'throws on blobsData and KZG proofs in txData' + 'throws on blobsData and KZG proofs in txData', ) assert.throws( () => { - BlobEIP4844Transaction.fromTxData( + createBlob4844Tx( { blobVersionedHashes: [], blobs: [], @@ -434,15 +443,15 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ) }, 'tx should contain at least one blob', undefined, - 'throws a transaction with no blobs' + 'throws a transaction with no blobs', ) - const txWithMissingBlob = BlobEIP4844Transaction.fromTxData( + const txWithMissingBlob = createBlob4844Tx( { blobVersionedHashes, blobs: blobs.slice(1), @@ -452,25 +461,25 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ) const serializedWithMissingBlob = txWithMissingBlob.serializeNetworkWrapper() assert.throws( () => - BlobEIP4844Transaction.fromSerializedBlobTxNetworkWrapper(serializedWithMissingBlob, { + createBlob4844TxFromSerializedNetworkWrapper(serializedWithMissingBlob, { common, }), 'Number of blobVersionedHashes, blobs, and commitments not all equal', undefined, - 'throws when blobs/commitments/hashes mismatch' + 'throws when blobs/commitments/hashes mismatch', ) const mangledValue = commitments[0][0] commitments[0][0] = 154 - const txWithInvalidCommitment = BlobEIP4844Transaction.fromTxData( + const txWithInvalidCommitment = createBlob4844Tx( { blobVersionedHashes, blobs, @@ -480,25 +489,25 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ) const serializedWithInvalidCommitment = txWithInvalidCommitment.serializeNetworkWrapper() assert.throws( () => - BlobEIP4844Transaction.fromSerializedBlobTxNetworkWrapper(serializedWithInvalidCommitment, { + createBlob4844TxFromSerializedNetworkWrapper(serializedWithInvalidCommitment, { common, }), 'KZG proof cannot be verified from blobs/commitments', undefined, - 'throws when kzg proof cant be verified' + 'throws when kzg proof cant be verified', ) blobVersionedHashes[0][1] = 2 commitments[0][0] = mangledValue - const txWithInvalidVersionedHashes = BlobEIP4844Transaction.fromTxData( + const txWithInvalidVersionedHashes = createBlob4844Tx( { blobVersionedHashes, blobs, @@ -508,22 +517,19 @@ describe('Network wrapper tests', () => { gasLimit: 0xffffffn, to: randomBytes(20), }, - { common } + { common }, ) const serializedWithInvalidVersionedHashes = txWithInvalidVersionedHashes.serializeNetworkWrapper() assert.throws( () => - BlobEIP4844Transaction.fromSerializedBlobTxNetworkWrapper( - serializedWithInvalidVersionedHashes, - { - common, - } - ), + createBlob4844TxFromSerializedNetworkWrapper(serializedWithInvalidVersionedHashes, { + common, + }), 'commitment for blob at index 0 does not match versionedHash', undefined, - 'throws when versioned hashes dont match kzg commitments' + "throws when versioned hashes don't match kzg commitments", ) }) }) @@ -539,7 +545,7 @@ describe('hash() and signature verification', () => { }) }) it('should work', async () => { - const unsignedTx = BlobEIP4844Transaction.fromTxData( + const unsignedTx = createBlob4844Tx( { chainId: 1, nonce: 1, @@ -556,23 +562,23 @@ describe('hash() and signature verification', () => { storageKeys: ['0x0000000000000000000000000000000000000000000000000000000000000000'], }, ], - to: Address.zero(), + to: createZeroAddress(), }, - { common } + { common }, ) assert.equal( bytesToHex(unsignedTx.getHashedMessageToSign()), '0x02560c5173b0d793ce019cfa515ece6a04a4b3f3d67eab67fbca78dd92d4ed76', - 'produced the correct transaction hash' + 'produced the correct transaction hash', ) const signedTx = unsignedTx.sign( - hexToBytes('0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8') + hexToBytes('0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8'), ) assert.equal( signedTx.getSenderAddress().toString(), '0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b', - 'was able to recover sender address' + 'was able to recover sender address', ) assert.ok(signedTx.verifySignature(), 'signature is valid') }) @@ -585,14 +591,14 @@ it('getEffectivePriorityFee()', async () => { hardfork: Hardfork.Cancun, customCrypto: { kzg }, }) - const tx = BlobEIP4844Transaction.fromTxData( + const tx = createBlob4844Tx( { maxFeePerGas: 10, maxPriorityFeePerGas: 8, - to: Address.zero(), + to: createZeroAddress(), blobVersionedHashes: [concatBytes(new Uint8Array([1]), randomBytes(31))], }, - { common } + { common }, ) assert.equal(tx.getEffectivePriorityFee(BigInt(10)), BigInt(0)) assert.equal(tx.getEffectivePriorityFee(BigInt(9)), BigInt(1)) @@ -611,6 +617,7 @@ describe('Network wrapper deserialization test', () => { common = createCommonFromGethGenesis(gethGenesis, { chain: 'customChain', hardfork: Hardfork.Cancun, + params: paramsTx, customCrypto: { kzg, }, @@ -651,7 +658,7 @@ describe('Network wrapper deserialization test', () => { const proofs = blobsToProofs(kzg, blobs, commitments) const wrapper = hexToBytes(blobTx.tx as PrefixedHexString) - const deserializedTx = BlobEIP4844Transaction.fromSerializedBlobTxNetworkWrapper(wrapper, { + const deserializedTx = createBlob4844TxFromSerializedNetworkWrapper(wrapper, { common, }) const jsonData = deserializedTx.toJSON() @@ -661,7 +668,7 @@ describe('Network wrapper deserialization test', () => { assert.ok(equalsBytes(deserializedTx.blobs![0], blobs[0]), 'blobs should match') assert.ok( equalsBytes(deserializedTx.kzgCommitments![0], commitments[0]), - 'commitments should match' + 'commitments should match', ) assert.ok(equalsBytes(deserializedTx.kzgProofs![0], proofs[0]), 'proofs should match') @@ -681,7 +688,7 @@ describe('Network wrapper deserialization test', () => { sender, networkSerializedHexLength: networkSerialized.length, }, - 'txMeta should match' + 'txMeta should match', ) }) }) diff --git a/packages/tx/test/eip7702.spec.ts b/packages/tx/test/eip7702.spec.ts index 3e7383681b..d2c3935388 100644 --- a/packages/tx/test/eip7702.spec.ts +++ b/packages/tx/test/eip7702.spec.ts @@ -1,21 +1,43 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' -import { Address, hexToBytes, privateToAddress } from '@ethereumjs/util' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { createAddressFromPrivateKey, createZeroAddress, hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { EOACodeEIP7702Transaction } from '../src/index.js' +import { createEOACode7702Tx } from '../src/index.js' +import type { TxData } from '../src/7702/tx.js' +import type { AuthorizationListItem } from '../src/index.js' import type { PrefixedHexString } from '@ethereumjs/util' -const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Cancun, eips: [7702] }) +const common = new Common({ chain: Mainnet, hardfork: Hardfork.Cancun, eips: [7702] }) -const pkey = hexToBytes('0x' + '20'.repeat(32)) -const addr = new Address(privateToAddress(pkey)) +const pkey = hexToBytes(`0x${'20'.repeat(32)}`) +const addr = createAddressFromPrivateKey(pkey) const ones32 = `0x${'01'.repeat(32)}` as PrefixedHexString -describe('[EOACodeEIP7702Transaction]', () => { +function getTxData(override: Partial = {}): TxData { + const validAuthorizationList: AuthorizationListItem = { + chainId: '0x', + address: `0x${'20'.repeat(20)}`, + nonce: ['0x1'], + yParity: '0x1', + r: ones32, + s: ones32, + } + + return { + authorizationList: [ + { + ...validAuthorizationList, + ...override, + }, + ], + } +} + +describe('[EOACode7702Transaction]', () => { it('sign()', () => { - const txn = EOACodeEIP7702Transaction.fromTxData( + const txn = createEOACode7702Tx( { value: 1, maxFeePerGas: 1, @@ -24,10 +46,10 @@ describe('[EOACodeEIP7702Transaction]', () => { authorizationList: [], chainId: 1, gasLimit: 100000, - to: Address.zero(), + to: createZeroAddress(), data: new Uint8Array(1), }, - { common } + { common }, ) const signed = txn.sign(pkey) assert.ok(signed.getSenderAddress().equals(addr)) @@ -36,166 +58,37 @@ describe('[EOACodeEIP7702Transaction]', () => { }) it('valid and invalid authorizationList values', () => { - assert.throws(() => { - EOACodeEIP7702Transaction.fromTxData( - { - authorizationList: [ - { - chainId: '0x', - address: `0x${'20'.repeat(21)}`, - nonce: [], - yParity: '0x1', - r: ones32, - s: ones32, - }, - ], - }, - { common } - ) - }, 'address length should be 20 bytes') - - assert.throws(() => { - EOACodeEIP7702Transaction.fromTxData( - { - authorizationList: [ - { - chainId: '0x', - address: `0x${'20'.repeat(20)}`, - nonce: ['0x1', '0x2'], - yParity: '0x1', - r: ones32, - s: ones32, - }, - ], - }, - { common } - ) - }, 'nonce list should consist of at most 1 item') - - assert.throws(() => { - EOACodeEIP7702Transaction.fromTxData( - { - authorizationList: [ - { - chainId: '0x', - address: `0x${'20'.repeat(20)}`, - nonce: ['0x1'], - yParity: '0x1', - r: ones32, - s: undefined as never, - }, - ], - }, - { common } - ) - }, 's is not defined') - - assert.throws(() => { - EOACodeEIP7702Transaction.fromTxData( - { - authorizationList: [ - { - chainId: '0x', - address: `0x${'20'.repeat(20)}`, - nonce: ['0x1'], - yParity: '0x1', - r: undefined as never, - s: ones32, - }, - ], - }, - { common } - ) - }, 'r is not defined') - - assert.throws(() => { - EOACodeEIP7702Transaction.fromTxData( - { - authorizationList: [ - { - chainId: '0x', - address: `0x${'20'.repeat(20)}`, - nonce: ['0x1'], - yParity: undefined as never, - r: ones32, - s: ones32, - }, - ], - }, - { common } - ) - }, 'yParity is not defined') - - assert.throws(() => { - EOACodeEIP7702Transaction.fromTxData( + const tests: [Partial, string][] = [ + [ { - authorizationList: [ - { - chainId: '0x', - address: `0x${'20'.repeat(20)}`, - nonce: undefined as never, - yParity: '0x1', - r: ones32, - s: ones32, - }, - ], + address: `0x${'20'.repeat(21)}`, }, - { common } - ) - }, 'nonce is not defined') - - assert.throws(() => { - EOACodeEIP7702Transaction.fromTxData( + 'address length should be 20 bytes', + ], + [ { - authorizationList: [ - { - chainId: '0x', - address: undefined as never, - nonce: ['0x1'], - yParity: '0x1', - r: ones32, - s: ones32, - }, - ], + nonce: ['0x1', '0x2'], }, - { common } - ) - }, 'address is not defined') + 'nonce list should consist of at most 1 item', + ], + [{ s: undefined as never }, 's is not defined'], + [{ r: undefined as never }, 'r is not defined'], + [{ yParity: undefined as never }, 'yParity is not defined'], + [{ nonce: undefined as never }, 'nonce is not defined'], + [{ address: undefined as never }, 'address is not defined'], + [{ chainId: undefined as never }, 'chainId is not defined'], + ] - assert.throws(() => { - EOACodeEIP7702Transaction.fromTxData( - { - authorizationList: [ - { - chainId: undefined as never, - address: `0x${'20'.repeat(20)}`, - nonce: ['0x1'], - yParity: '0x1', - r: ones32, - s: ones32, - }, - ], - }, - { common } - ) - }, 'chainId is not defined') + for (const test of tests) { + const txData = getTxData(test[0]) + const testName = test[1] + assert.throws(() => { + createEOACode7702Tx(txData, { common }), testName + }) + } assert.doesNotThrow(() => { - EOACodeEIP7702Transaction.fromTxData( - { - authorizationList: [ - { - chainId: '0x', - address: `0x${'20'.repeat(20)}`, - nonce: ['0x1'], - yParity: '0x1', - r: ones32, - s: ones32, - }, - ], - }, - { common } - ) + createEOACode7702Tx(getTxData(), { common }) }) }) }) diff --git a/packages/tx/test/fromRpc.spec.ts b/packages/tx/test/fromRpc.spec.ts index a0940408b8..05786b983f 100644 --- a/packages/tx/test/fromRpc.spec.ts +++ b/packages/tx/test/fromRpc.spec.ts @@ -1,9 +1,14 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet, createCustomCommon } from '@ethereumjs/common' import { bytesToHex, randomBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { normalizeTxParams } from '../src/fromRpc.js' -import { TransactionFactory, TransactionType } from '../src/index.js' +import { + TransactionType, + createTxFromJsonRpcProvider, + createTxFromRPC, + createTxFromTxData, +} from '../src/index.js' +import { normalizeTxParams } from '../src/util.js' import optimismTx from './json/optimismTx.json' import rpcTx from './json/rpcTx.json' @@ -19,7 +24,7 @@ const txTypes = [ describe('[fromJsonRpcProvider]', () => { it('should work', async () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const provider = 'https://my.json.rpc.provider.com:8545' const realFetch = global.fetch @@ -51,15 +56,15 @@ describe('[fromJsonRpcProvider]', () => { } const txHash = '0xed1960aa7d0d7b567c946d94331dddb37a1c67f51f30bf51f256ea40db88cfb0' - const tx = await TransactionFactory.fromJsonRpcProvider(provider, txHash, { common }) + const tx = await createTxFromJsonRpcProvider(provider, txHash, { common }) assert.equal(bytesToHex(tx.hash()), txHash, 'generated correct tx from transaction RPC data') try { - await TransactionFactory.fromJsonRpcProvider(provider, bytesToHex(randomBytes(32)), {}) + await createTxFromJsonRpcProvider(provider, bytesToHex(randomBytes(32)), {}) assert.fail('should throw') } catch (err: any) { assert.ok( err.message.includes('No data returned from provider'), - 'throws correct error when no tx returned' + 'throws correct error when no tx returned', ) } global.fetch = realFetch @@ -69,17 +74,21 @@ describe('[fromJsonRpcProvider]', () => { describe('[normalizeTxParams]', () => { it('should work', () => { const normedTx = normalizeTxParams(rpcTx) - const tx = TransactionFactory.fromTxData(normedTx) + const tx = createTxFromTxData(normedTx) assert.equal(normedTx.gasLimit, 21000n, 'correctly converted "gas" to "gasLimit"') - assert.equal(bytesToHex(tx.hash()), rpcTx.hash, 'converted normed tx data to transaction objec') + assert.equal( + bytesToHex(tx.hash()), + rpcTx.hash, + 'converted normed tx data to transaction object', + ) }) }) -describe('fromRPC: interpret v/r/s vals of 0x0 as undefined for Optimism system txs', () => { +describe('fromRPC: interpret v/r/s values of 0x0 as undefined for Optimism system txs', () => { it('should work', async () => { for (const txType of txTypes) { ;(optimismTx as any).type = txType - const tx = await TransactionFactory.fromRPC(optimismTx as TypedTxData) + const tx = await createTxFromRPC(optimismTx as TypedTxData) assert.ok(tx.v === undefined) assert.ok(tx.s === undefined) assert.ok(tx.r === undefined) @@ -98,7 +107,8 @@ describe('fromRPC: ensure `v="0x0"` is correctly decoded for signed txs', () => continue } ;(v0Tx as any).type = txType - const tx = await TransactionFactory.fromRPC(v0Tx as TypedTxData) + const common = createCustomCommon({ chainId: 0x10f2c }, Mainnet) + const tx = await createTxFromRPC(v0Tx as TypedTxData, { common }) assert.ok(tx.isSigned()) } }) diff --git a/packages/tx/test/inputValue.spec.ts b/packages/tx/test/inputValue.spec.ts index b5f3aaf53f..e0edc4f32b 100644 --- a/packages/tx/test/inputValue.spec.ts +++ b/packages/tx/test/inputValue.spec.ts @@ -1,13 +1,14 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { Address, hexToBytes, toBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' import { - AccessListEIP2930Transaction, - FeeMarketEIP1559Transaction, - LegacyTransaction, - TransactionFactory, TransactionType, + create1559FeeMarketTxFromBytesArray, + createAccessList2930TxFromBytesArray, + createLegacyTx, + createLegacyTxFromBytesArray, + createTxFromTxData, } from '../src/index.js' import type { TxValuesArray } from '../src/index.js' @@ -110,20 +111,20 @@ const eip1559TxValues = { describe('[Transaction Input Values]', () => { it('Legacy Transaction Values', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Homestead }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Homestead }) const options = { ...baseTxValues, ...legacyTxValues, type: '0' } const legacyTxData = generateCombinations({ options, }) const randomSample = getRandomSubarray(legacyTxData, 100) for (const txData of randomSample) { - const tx = LegacyTransaction.fromTxData(txData, { common }) + const tx = createLegacyTx(txData, { common }) assert.throws(() => tx.hash(), undefined, undefined, 'tx.hash() throws if tx is unsigned') } }) it('EIP-1559 Transaction Values', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const options = { ...baseTxValues, ...accessListEip2930TxValues, @@ -136,7 +137,7 @@ describe('[Transaction Input Values]', () => { const randomSample = getRandomSubarray(eip1559TxData, 100) for (const txData of randomSample) { - const tx = LegacyTransaction.fromTxData(txData, { common }) + const tx = createLegacyTx(txData, { common }) assert.throws(() => tx.hash(), undefined, undefined, 'tx.hash() should throw if unsigned') } }) @@ -151,7 +152,7 @@ describe('[Invalid Array Input values]', () => { ] for (const signed of [false, true]) { for (const txType of txTypes) { - let tx = TransactionFactory.fromTxData({ type: txType }) + let tx = createTxFromTxData({ type: txType }) if (signed) { tx = tx.sign(hexToBytes(`0x${'42'.repeat(32)}`)) } @@ -161,23 +162,21 @@ describe('[Invalid Array Input values]', () => { switch (txType) { case TransactionType.Legacy: assert.throws(() => - LegacyTransaction.fromValuesArray( - rawValues as TxValuesArray[TransactionType.Legacy] - ) + createLegacyTxFromBytesArray(rawValues as TxValuesArray[TransactionType.Legacy]), ) break case TransactionType.AccessListEIP2930: assert.throws(() => - AccessListEIP2930Transaction.fromValuesArray( - rawValues as TxValuesArray[TransactionType.AccessListEIP2930] - ) + createAccessList2930TxFromBytesArray( + rawValues as TxValuesArray[TransactionType.AccessListEIP2930], + ), ) break case TransactionType.FeeMarketEIP1559: assert.throws(() => - FeeMarketEIP1559Transaction.fromValuesArray( - rawValues as TxValuesArray[TransactionType.FeeMarketEIP1559] - ) + create1559FeeMarketTxFromBytesArray( + rawValues as TxValuesArray[TransactionType.FeeMarketEIP1559], + ), ) break } @@ -217,7 +216,7 @@ describe('[Invalid Access Lists]', () => { for (const invalidAccessListItem of invalidAccessLists) { let tx: any try { - tx = TransactionFactory.fromTxData({ + tx = createTxFromTxData({ type: txType, accessList: invalidAccessListItem, }) @@ -227,7 +226,7 @@ describe('[Invalid Access Lists]', () => { assert.fail('did not fail on `fromTxData`') } catch (e: any) { assert.ok(true, 'failed ok on decoding in `fromTxData`') - tx = TransactionFactory.fromTxData({ type: txType }) + tx = createTxFromTxData({ type: txType }) if (signed) { tx = tx.sign(hexToBytes(`0x${'42'.repeat(32)}`)) } @@ -243,16 +242,16 @@ describe('[Invalid Access Lists]', () => { switch (txType) { case TransactionType.AccessListEIP2930: assert.throws(() => - AccessListEIP2930Transaction.fromValuesArray( - rawValues as TxValuesArray[TransactionType.AccessListEIP2930] - ) + createAccessList2930TxFromBytesArray( + rawValues as TxValuesArray[TransactionType.AccessListEIP2930], + ), ) break case TransactionType.FeeMarketEIP1559: assert.throws(() => - FeeMarketEIP1559Transaction.fromValuesArray( - rawValues as TxValuesArray[TransactionType.FeeMarketEIP1559] - ) + create1559FeeMarketTxFromBytesArray( + rawValues as TxValuesArray[TransactionType.FeeMarketEIP1559], + ), ) break } diff --git a/packages/tx/test/legacy.spec.ts b/packages/tx/test/legacy.spec.ts index c3d23bfc7a..c9253deadb 100644 --- a/packages/tx/test/legacy.spec.ts +++ b/packages/tx/test/legacy.spec.ts @@ -1,4 +1,4 @@ -import { Chain, Common, Hardfork, createCustomCommon } from '@ethereumjs/common' +import { Common, Goerli, Hardfork, Mainnet, Sepolia, createCustomCommon } from '@ethereumjs/common' import { RLP } from '@ethereumjs/rlp' import { bytesToBigInt, @@ -11,9 +11,13 @@ import { } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' -import { LegacyTransaction } from '../src/index.js' +import { + createLegacyTx, + createLegacyTxFromBytesArray, + createLegacyTxFromRLP, +} from '../src/index.js' -import txFixturesEip155 from './json/ttTransactionTestEip155VitaliksTests.json' +import txFixturesEip155 from './json/ttTransactionTestEip155VitaliksTests.json' // cspell:disable-line import txFixtures from './json/txs.json' import type { TransactionType, TxData, TypedTransaction } from '../src/index.js' @@ -48,55 +52,57 @@ describe('[Transaction]', () => { for (const testCase of cases) { txData[value] = testCase assert.throws(() => { - LegacyTransaction.fromTxData(txData) + createLegacyTx(txData) }) } } }) it('Initialization', () => { - const nonEIP2930Common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const nonEIP2930Common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) assert.ok( - LegacyTransaction.fromTxData({}, { common: nonEIP2930Common }), - 'should initialize on a pre-Berlin Harfork (EIP-2930 not activated)' + createLegacyTx({}, { common: nonEIP2930Common }), + 'should initialize on a pre-Berlin Hardfork (EIP-2930 not activated)', ) - + let common = new Common({ chain: Goerli }) const txData = txFixtures[3].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)) txData[6] = intToBytes(45) // v with 0-parity and chain ID 5 - let tx = LegacyTransaction.fromValuesArray(txData) + let tx = createLegacyTxFromBytesArray(txData, { common }) assert.ok( tx.common.chainId() === BigInt(5), - 'should initialize Common with chain ID (supported) derived from v value (v with 0-parity)' + 'should initialize Common with chain ID (supported) derived from v value (v with 0-parity)', ) txData[6] = intToBytes(46) // v with 1-parity and chain ID 5 - tx = LegacyTransaction.fromValuesArray(txData) + tx = createLegacyTxFromBytesArray(txData, { common }) assert.ok( tx.common.chainId() === BigInt(5), - 'should initialize Common with chain ID (supported) derived from v value (v with 1-parity)' + 'should initialize Common with chain ID (supported) derived from v value (v with 1-parity)', ) + common = createCustomCommon({ chainId: 999 }, Mainnet) + txData[6] = intToBytes(2033) // v with 0-parity and chain ID 999 - tx = LegacyTransaction.fromValuesArray(txData) + tx = createLegacyTxFromBytesArray(txData, { common }) assert.equal( tx.common.chainId(), BigInt(999), - 'should initialize Common with chain ID (unsupported) derived from v value (v with 0-parity)' + 'should initialize Common with chain ID (unsupported) derived from v value (v with 0-parity)', ) txData[6] = intToBytes(2034) // v with 1-parity and chain ID 999 - tx = LegacyTransaction.fromValuesArray(txData) + tx = createLegacyTxFromBytesArray(txData, { common }) assert.equal( tx.common.chainId(), BigInt(999), - 'should initialize Common with chain ID (unsupported) derived from v value (v with 1-parity)' + 'should initialize Common with chain ID (unsupported) derived from v value (v with 1-parity)', ) }) it('Initialization -> decode with fromValuesArray()', () => { for (const tx of txFixtures.slice(0, 4)) { const txData = tx.raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)) - const pt = LegacyTransaction.fromValuesArray(txData) + const pt = createLegacyTxFromBytesArray(txData) assert.equal(bytesToHex(unpadBytes(toBytes(pt.nonce))), tx.raw[0]) assert.equal(bytesToHex(toBytes(pt.gasPrice)), tx.raw[1]) @@ -113,31 +119,31 @@ describe('[Transaction]', () => { }) it('Initialization -> should accept lesser r values', () => { - const tx = LegacyTransaction.fromTxData({ r: bytesToBigInt(hexToBytes('0x0005')) }) + const tx = createLegacyTx({ r: bytesToBigInt(hexToBytes('0x0005')) }) assert.equal(tx.r!.toString(16), '5') }) it('Initialization -> throws when creating a a transaction with incompatible chainid and v value', () => { - let common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Petersburg }) - let tx = LegacyTransaction.fromTxData({}, { common }) + let common = new Common({ chain: Goerli, hardfork: Hardfork.Petersburg }) + let tx = createLegacyTx({}, { common }) assert.equal(tx.common.chainId(), BigInt(5)) const privKey = hexToBytes(`0x${txFixtures[0].privateKey}`) tx = tx.sign(privKey) const serialized = tx.serialize() - common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Petersburg }) - assert.throws(() => LegacyTransaction.fromSerializedTx(serialized, { common })) + common = new Common({ chain: Mainnet, hardfork: Hardfork.Petersburg }) + assert.throws(() => createLegacyTxFromRLP(serialized, { common })) }) it('Initialization -> throws if v is set to an EIP155-encoded value incompatible with the chain id', () => { assert.throws(() => { - const common = new Common({ chain: 42, hardfork: Hardfork.Petersburg }) - LegacyTransaction.fromTxData({ v: BigInt(1) }, { common }) + const common = new Common({ chain: Sepolia, hardfork: Hardfork.Petersburg }) + createLegacyTx({ v: BigInt(1) }, { common }) }) }) it('addSignature() -> correctly adds correct signature values', () => { const privKey = hexToBytes(`0x${txFixtures[0].privateKey}`) - const tx = LegacyTransaction.fromTxData({}) + const tx = createLegacyTx({}) const signedTx = tx.sign(privKey) const addSignatureTx = tx.addSignature(signedTx.v!, signedTx.r!, signedTx.s!) @@ -146,7 +152,7 @@ describe('[Transaction]', () => { it('addSignature() -> correctly adds correct signature values from ecrecover with ChainID protection enabled', () => { const privKey = hexToBytes(`0x${txFixtures[0].privateKey}`) - const tx = LegacyTransaction.fromTxData({}, { common: new Common({ chain: Chain.Sepolia }) }) + const tx = createLegacyTx({}, { common: new Common({ chain: Sepolia }) }) const signedTx = tx.sign(privKey) // `convertV` set to false, since we use the raw value from the signed tx const addSignatureTx = tx.addSignature(signedTx.v!, signedTx.r!, signedTx.s!, false) @@ -157,7 +163,7 @@ describe('[Transaction]', () => { it('addSignature() -> throws when adding the wrong v value', () => { const privKey = hexToBytes(`0x${txFixtures[0].privateKey}`) - const tx = LegacyTransaction.fromTxData({}, { common: new Common({ chain: Chain.Sepolia }) }) + const tx = createLegacyTx({}, { common: new Common({ chain: Sepolia }) }) const signedTx = tx.sign(privKey) // `convertV` set to true: this will apply EIP-155 replay transaction twice, so it should throw! assert.throws(() => { @@ -177,56 +183,56 @@ describe('[Transaction]', () => { } }) - it('getBaseFee() -> should return base fee', () => { - const tx = LegacyTransaction.fromTxData({}) - assert.equal(tx.getBaseFee(), BigInt(53000)) + it('getIntrinsicGas() -> should return base fee', () => { + const tx = createLegacyTx({}) + assert.equal(tx.getIntrinsicGas(), BigInt(53000)) }) - it('getDataFee() -> should return data fee', () => { - let tx = LegacyTransaction.fromTxData({}) - assert.equal(tx.getDataFee(), BigInt(0)) + it('getDataGas() -> should return data fee', () => { + let tx = createLegacyTx({}) + assert.equal(tx.getDataGas(), BigInt(0)) - tx = LegacyTransaction.fromValuesArray( - txFixtures[3].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)) + tx = createLegacyTxFromBytesArray( + txFixtures[3].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), ) - assert.equal(tx.getDataFee(), BigInt(1716)) + assert.equal(tx.getDataGas(), BigInt(1716)) - tx = LegacyTransaction.fromValuesArray( + tx = createLegacyTxFromBytesArray( txFixtures[3].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), - { freeze: false } + { freeze: false }, ) - assert.equal(tx.getDataFee(), BigInt(1716)) + assert.equal(tx.getDataGas(), BigInt(1716)) }) - it('getDataFee() -> should return correct data fee for istanbul', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) - let tx = LegacyTransaction.fromTxData({}, { common }) - assert.equal(tx.getDataFee(), BigInt(0)) + it('getDataGas() -> should return correct data fee for istanbul', () => { + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) + let tx = createLegacyTx({}, { common }) + assert.equal(tx.getDataGas(), BigInt(0)) - tx = LegacyTransaction.fromValuesArray( + tx = createLegacyTxFromBytesArray( txFixtures[3].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), { common, - } + }, ) - assert.equal(tx.getDataFee(), BigInt(1716)) + assert.equal(tx.getDataGas(), BigInt(1716)) }) - it('getDataFee() -> should invalidate cached value on hardfork change', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Byzantium }) - const tx = LegacyTransaction.fromValuesArray( + it('getDataGas() -> should invalidate cached value on hardfork change', () => { + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Byzantium }) + const tx = createLegacyTxFromBytesArray( txFixtures[0].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), { common, - } + }, ) - assert.equal(tx.getDataFee(), BigInt(656)) + assert.equal(tx.getDataGas(), BigInt(656)) tx.common.setHardfork(Hardfork.Istanbul) - assert.equal(tx.getDataFee(), BigInt(240)) + assert.equal(tx.getDataGas(), BigInt(240)) }) it('getEffectivePriorityFee() -> should return correct values', () => { - const tx = LegacyTransaction.fromTxData({ + const tx = createLegacyTx({ gasPrice: BigInt(100), }) @@ -237,7 +243,7 @@ describe('[Transaction]', () => { }) it('getUpfrontCost() -> should return upfront cost', () => { - const tx = LegacyTransaction.fromTxData({ + const tx = createLegacyTx({ gasPrice: 1000, gasLimit: 10000000, value: 42, @@ -254,10 +260,10 @@ describe('[Transaction]', () => { }) it('serialize() -> should round trip decode a tx', () => { - const tx = LegacyTransaction.fromTxData({ value: 5000 }) + const tx = createLegacyTx({ value: 5000 }) const s1 = tx.serialize() - const tx2 = LegacyTransaction.fromSerializedTx(s1) + const tx2 = createLegacyTxFromRLP(s1) const s2 = tx2.serialize() assert.ok(equalsBytes(s1, s2)) @@ -265,15 +271,15 @@ describe('[Transaction]', () => { it('hash() / getHashedMessageToSign() / getMessageToSign()', () => { const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.TangerineWhistle, }) - let tx = LegacyTransaction.fromValuesArray( + let tx = createLegacyTxFromBytesArray( txFixtures[3].raw.slice(0, 6).map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), { common, - } + }, ) assert.throws( () => { @@ -281,50 +287,50 @@ describe('[Transaction]', () => { }, undefined, undefined, - 'should throw calling hash with unsigned tx' + 'should throw calling hash with unsigned tx', ) - tx = LegacyTransaction.fromValuesArray( + tx = createLegacyTxFromBytesArray( txFixtures[3].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), { common, - } + }, ) assert.deepEqual( tx.hash(), - hexToBytes('0x375a8983c9fc56d7cfd118254a80a8d7403d590a6c9e105532b67aca1efb97aa') + hexToBytes('0x375a8983c9fc56d7cfd118254a80a8d7403d590a6c9e105532b67aca1efb97aa'), ) assert.deepEqual( tx.getHashedMessageToSign(), - hexToBytes('0x61e1ec33764304dddb55348e7883d4437426f44ab3ef65e6da1e025734c03ff0') + hexToBytes('0x61e1ec33764304dddb55348e7883d4437426f44ab3ef65e6da1e025734c03ff0'), ) assert.equal(tx.getMessageToSign().length, 6) assert.deepEqual( tx.hash(), - hexToBytes('0x375a8983c9fc56d7cfd118254a80a8d7403d590a6c9e105532b67aca1efb97aa') + hexToBytes('0x375a8983c9fc56d7cfd118254a80a8d7403d590a6c9e105532b67aca1efb97aa'), ) }) it('hash() -> with defined chainId', () => { - const tx = LegacyTransaction.fromValuesArray( - txFixtures[4].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)) + const tx = createLegacyTxFromBytesArray( + txFixtures[4].raw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), ) assert.equal( bytesToHex(tx.hash()), - '0x0f09dc98ea85b7872f4409131a790b91e7540953992886fc268b7ba5c96820e4' + '0x0f09dc98ea85b7872f4409131a790b91e7540953992886fc268b7ba5c96820e4', ) assert.equal( bytesToHex(tx.hash()), - '0x0f09dc98ea85b7872f4409131a790b91e7540953992886fc268b7ba5c96820e4' + '0x0f09dc98ea85b7872f4409131a790b91e7540953992886fc268b7ba5c96820e4', ) assert.equal( bytesToHex(tx.getHashedMessageToSign()), - '0xf97c73fdca079da7652dbc61a46cd5aeef804008e057be3e712c43eac389aaf0' + '0xf97c73fdca079da7652dbc61a46cd5aeef804008e057be3e712c43eac389aaf0', ) }) it("getHashedMessageToSign(), getSenderPublicKey() (implicit call) -> verify EIP155 signature based on Vitalik's tests", () => { for (const tx of txFixturesEip155) { - const pt = LegacyTransaction.fromSerializedTx(hexToBytes(tx.rlp as PrefixedHexString)) + const pt = createLegacyTxFromRLP(hexToBytes(tx.rlp as PrefixedHexString)) assert.equal(bytesToHex(pt.getHashedMessageToSign()), `0x${tx.hash}`) assert.equal(bytesToHex(pt.serialize()), tx.rlp) assert.equal(pt.getSenderAddress().toString(), `0x${tx.sender}`) @@ -342,38 +348,38 @@ describe('[Transaction]', () => { '0x', ] const privateKey = hexToBytes( - '0x4646464646464646464646464646464646464646464646464646464646464646' + '0x4646464646464646464646464646464646464646464646464646464646464646', ) - const pt = LegacyTransaction.fromValuesArray( - txRaw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)) + const pt = createLegacyTxFromBytesArray( + txRaw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), ) // Note that Vitalik's example has a very similar value denoted "signing data". // It's not the output of `serialize()`, but the pre-image of the hash returned by `tx.hash(false)`. - // We don't have a getter for such a value in LegacyTransaction. + // We don't have a getter for such a value in LegacyTx. assert.equal( bytesToHex(pt.serialize()), - '0xec098504a817c800825208943535353535353535353535353535353535353535880de0b6b3a764000080808080' + '0xec098504a817c800825208943535353535353535353535353535353535353535880de0b6b3a764000080808080', ) const signedTx = pt.sign(privateKey) assert.equal( bytesToHex(signedTx.getHashedMessageToSign()), - '0xdaf5a779ae972f972197303d7b574746c7ef83eadac0f2791ad23db92e4c8e53' + '0xdaf5a779ae972f972197303d7b574746c7ef83eadac0f2791ad23db92e4c8e53', ) assert.equal( bytesToHex(signedTx.serialize()), - '0xf86c098504a817c800825208943535353535353535353535353535353535353535880de0b6b3a76400008025a028ef61340bd939bc2195fe537567866003e1a15d3c71ff63e1590620aa636276a067cbe9d8997f761aecb703304b3800ccf555c9f3dc64214b297fb1966a3b6d83' + '0xf86c098504a817c800825208943535353535353535353535353535353535353535880de0b6b3a76400008025a028ef61340bd939bc2195fe537567866003e1a15d3c71ff63e1590620aa636276a067cbe9d8997f761aecb703304b3800ccf555c9f3dc64214b297fb1966a3b6d83', ) }) it('sign(), getSenderPublicKey() (implicit call) -> EIP155 hashing when singing', () => { - const common = new Common({ chain: 1, hardfork: Hardfork.Petersburg }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.Petersburg }) for (const txData of txFixtures.slice(0, 3)) { - const tx = LegacyTransaction.fromValuesArray( + const tx = createLegacyTxFromBytesArray( txData.raw.slice(0, 6).map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), { common, - } + }, ) const privKey = hexToBytes(`0x${txData.privateKey}`) @@ -382,7 +388,7 @@ describe('[Transaction]', () => { assert.equal( txSigned.getSenderAddress().toString(), '0x' + txData.sendersAddress, - "computed sender address should equal the fixture's one" + "computed sender address should equal the fixture's one", ) } }) @@ -397,17 +403,17 @@ describe('[Transaction]', () => { '0x', ] const privateKey = hexToBytes( - '0xDE3128752F183E8930D7F00A2AAA302DCB5E700B2CBA2D8CA5795660F07DEFD5' + '0xDE3128752F183E8930D7F00A2AAA302DCB5E700B2CBA2D8CA5795660F07DEFD5', ) - const common = createCustomCommon({ chainId: 3 }) - const tx = LegacyTransaction.fromValuesArray( + const common = createCustomCommon({ chainId: 3 }, Mainnet) + const tx = createLegacyTxFromBytesArray( txRaw.map((rawTxData) => hexToBytes(rawTxData as PrefixedHexString)), - { common } + { common }, ) const signedTx = tx.sign(privateKey) assert.equal( bytesToHex(signedTx.serialize()), - '0xf86c018502540be40082520894d7250824390ec5c8b71d856b5de895e271170d9d880de0b6b3a76400008029a0d3512c68099d184ccf54f44d9d6905bff303128574b663dcf10b4c726ddd8133a0628acc8f481dea593f13309dfc5f0340f83fdd40cf9fbe47f782668f6f3aec74' + '0xf86c018502540be40082520894d7250824390ec5c8b71d856b5de895e271170d9d880de0b6b3a76400008029a0d3512c68099d184ccf54f44d9d6905bff303128574b663dcf10b4c726ddd8133a0628acc8f481dea593f13309dfc5f0340f83fdd40cf9fbe47f782668f6f3aec74', ) }) @@ -422,50 +428,48 @@ describe('[Transaction]', () => { } const privateKey = hexToBytes( - '0x4646464646464646464646464646464646464646464646464646464646464646' + '0x4646464646464646464646464646464646464646464646464646464646464646', ) const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.TangerineWhistle, }) - const fixtureTxSignedWithoutEIP155 = LegacyTransaction.fromTxData(txData, { + const fixtureTxSignedWithoutEIP155 = createLegacyTx(txData, { common, }).sign(privateKey) - let signedWithEIP155 = LegacyTransaction.fromTxData(txData).sign(privateKey) + let signedWithEIP155 = createLegacyTx(txData).sign(privateKey) assert.isTrue(signedWithEIP155.verifySignature()) assert.notEqual(signedWithEIP155.v?.toString(16), '1c') assert.notEqual(signedWithEIP155.v?.toString(16), '1b') - signedWithEIP155 = LegacyTransaction.fromTxData( - fixtureTxSignedWithoutEIP155.toJSON() - ).sign(privateKey) + signedWithEIP155 = createLegacyTx(fixtureTxSignedWithoutEIP155.toJSON()).sign(privateKey) assert.isTrue(signedWithEIP155.verifySignature()) assert.notEqual(signedWithEIP155.v?.toString(16), '1c') assert.notEqual(signedWithEIP155.v?.toString(16), '1b') - let signedWithoutEIP155 = LegacyTransaction.fromTxData(txData, { + let signedWithoutEIP155 = createLegacyTx(txData, { common, }).sign(privateKey) assert.isTrue(signedWithoutEIP155.verifySignature()) assert.isTrue( signedWithoutEIP155.v?.toString(16) === '1c' || signedWithoutEIP155.v?.toString(16) === '1b', - "v shouldn't be EIP155 encoded" + "v shouldn't be EIP155 encoded", ) - signedWithoutEIP155 = LegacyTransaction.fromTxData(txData, { + signedWithoutEIP155 = createLegacyTx(txData, { common, }).sign(privateKey) assert.isTrue(signedWithoutEIP155.verifySignature()) assert.isTrue( signedWithoutEIP155.v?.toString(16) === '1c' || signedWithoutEIP155.v?.toString(16) === '1b', - "v shouldn't be EIP155 encoded" + "v shouldn't be EIP155 encoded", ) }) @@ -476,19 +480,19 @@ describe('[Transaction]', () => { } } for (let n = 0; n < 27; n++) { - assert.throws(() => LegacyTransaction.fromTxData(getTxData(n))) + assert.throws(() => createLegacyTx(getTxData(n))) } - assert.throws(() => LegacyTransaction.fromTxData(getTxData(29))) - assert.throws(() => LegacyTransaction.fromTxData(getTxData(36))) + assert.throws(() => createLegacyTx(getTxData(29))) + assert.throws(() => createLegacyTx(getTxData(36))) - assert.doesNotThrow(() => LegacyTransaction.fromTxData(getTxData(27))) - assert.doesNotThrow(() => LegacyTransaction.fromTxData(getTxData(28))) - assert.doesNotThrow(() => LegacyTransaction.fromTxData(getTxData(37))) + assert.doesNotThrow(() => createLegacyTx(getTxData(27))) + assert.doesNotThrow(() => createLegacyTx(getTxData(28))) + assert.doesNotThrow(() => createLegacyTx(getTxData(37))) }) it('sign(), verifySignature(): sign tx with chainId specified in params', () => { - const common = new Common({ chain: Chain.Goerli, hardfork: Hardfork.Petersburg }) - let tx = LegacyTransaction.fromTxData({}, { common }) + const common = new Common({ chain: Goerli, hardfork: Hardfork.Petersburg }) + let tx = createLegacyTx({}, { common }) assert.equal(tx.common.chainId(), BigInt(5)) const privKey = hexToBytes(`0x${txFixtures[0].privateKey}`) @@ -496,13 +500,13 @@ describe('[Transaction]', () => { const serialized = tx.serialize() - const reTx = LegacyTransaction.fromSerializedTx(serialized, { common }) + const reTx = createLegacyTxFromRLP(serialized, { common }) assert.equal(reTx.verifySignature(), true) assert.equal(reTx.common.chainId(), BigInt(5)) }) it('freeze property propagates from unsigned tx to signed tx', () => { - const tx = LegacyTransaction.fromTxData({}, { freeze: false }) + const tx = createLegacyTx({}, { freeze: false }) assert.notOk(Object.isFrozen(tx), 'tx object is not frozen') const privKey = hexToBytes(`0x${txFixtures[0].privateKey}`) const signedTxn = tx.sign(privKey) @@ -510,10 +514,10 @@ describe('[Transaction]', () => { }) it('common propagates from the common of tx, not the common in TxOptions', () => { - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.London }) const pkey = hexToBytes(`0x${txFixtures[0].privateKey}`) - const txn = LegacyTransaction.fromTxData({}, { common, freeze: false }) - const newCommon = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Paris }) + const txn = createLegacyTx({}, { common, freeze: false }) + const newCommon = new Common({ chain: Mainnet, hardfork: Hardfork.Paris }) assert.notDeepEqual(newCommon, common, 'new common is different than original common') Object.defineProperty(txn, 'common', { get() { @@ -523,12 +527,12 @@ describe('[Transaction]', () => { const signedTxn = txn.sign(pkey) assert.ok( signedTxn.common.hardfork() === Hardfork.Paris, - 'signed tx common is taken from tx.common' + 'signed tx common is taken from tx.common', ) }) it('isSigned() -> returns correct values', () => { - let tx = LegacyTransaction.fromTxData({}) + let tx = createLegacyTx({}) assert.notOk(tx.isSigned()) const txData: TxData[TransactionType.Legacy] = { @@ -540,31 +544,31 @@ describe('[Transaction]', () => { value: '0x0', } const privateKey = hexToBytes( - '0x4646464646464646464646464646464646464646464646464646464646464646' + '0x4646464646464646464646464646464646464646464646464646464646464646', ) - tx = LegacyTransaction.fromTxData(txData) + tx = createLegacyTx(txData) assert.notOk(tx.isSigned()) tx = tx.sign(privateKey) assert.ok(tx.isSigned()) - tx = LegacyTransaction.fromTxData(txData) + tx = createLegacyTx(txData) assert.notOk(tx.isSigned()) const rawUnsigned = tx.serialize() tx = tx.sign(privateKey) const rawSigned = tx.serialize() assert.ok(tx.isSigned()) - tx = LegacyTransaction.fromSerializedTx(rawUnsigned) + tx = createLegacyTxFromRLP(rawUnsigned) assert.notOk(tx.isSigned()) tx = tx.sign(privateKey) assert.ok(tx.isSigned()) - tx = LegacyTransaction.fromSerializedTx(rawSigned) + tx = createLegacyTxFromRLP(rawSigned) assert.ok(tx.isSigned()) const signedValues = RLP.decode(Uint8Array.from(rawSigned)) as Uint8Array[] - tx = LegacyTransaction.fromValuesArray(signedValues) + tx = createLegacyTxFromBytesArray(signedValues) assert.ok(tx.isSigned()) - tx = LegacyTransaction.fromValuesArray(signedValues.slice(0, 6)) + tx = createLegacyTxFromBytesArray(signedValues.slice(0, 6)) assert.notOk(tx.isSigned()) }) }) diff --git a/packages/tx/test/testLoader.ts b/packages/tx/test/testLoader.ts index 62bcaa6adf..eea8a394fe 100644 --- a/packages/tx/test/testLoader.ts +++ b/packages/tx/test/testLoader.ts @@ -23,7 +23,7 @@ export async function getTests( fileFilter: RegExp | string[] = /.json$/, skipPredicate: (...args: any[]) => boolean = falsePredicate, directory: string, - excludeDir: RegExp | string[] = [] + excludeDir: RegExp | string[] = [], ): Promise { const options = { match: fileFilter, @@ -41,7 +41,7 @@ export async function getTests( err: Error | undefined, content: string | Uint8Array, fileName: string, - next: Function + next: Function, ) => { if (err) { reject(err) diff --git a/packages/tx/test/transactionFactory.spec.ts b/packages/tx/test/transactionFactory.spec.ts index a7764f5358..ac12f38a73 100644 --- a/packages/tx/test/transactionFactory.spec.ts +++ b/packages/tx/test/transactionFactory.spec.ts @@ -1,53 +1,55 @@ -import { Chain, Common, Hardfork } from '@ethereumjs/common' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' import { hexToBytes } from '@ethereumjs/util' import { assert, describe, it } from 'vitest' import { - AccessListEIP2930Transaction, - FeeMarketEIP1559Transaction, - LegacyTransaction, - TransactionFactory, + AccessList2930Transaction, + FeeMarket1559Tx, + LegacyTx, TransactionType, + createAccessList2930Tx, + createFeeMarket1559Tx, + createLegacyTx, + createTxFromBlockBodyData, + createTxFromSerializedData, + createTxFromTxData, } from '../src/index.js' const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.London, }) const pKey = hexToBytes('0x4646464646464646464646464646464646464646464646464646464646464646') -const unsignedLegacyTx = LegacyTransaction.fromTxData({}) +const unsignedLegacyTx = createLegacyTx({}) const signedLegacyTx = unsignedLegacyTx.sign(pKey) -const unsignedEIP2930Tx = AccessListEIP2930Transaction.fromTxData( - { chainId: BigInt(1) }, - { common } -) +const unsignedEIP2930Tx = createAccessList2930Tx({ chainId: BigInt(1) }, { common }) const signedEIP2930Tx = unsignedEIP2930Tx.sign(pKey) -const unsignedEIP1559Tx = FeeMarketEIP1559Transaction.fromTxData({ chainId: BigInt(1) }, { common }) +const unsignedEIP1559Tx = createFeeMarket1559Tx({ chainId: BigInt(1) }, { common }) const signedEIP1559Tx = unsignedEIP1559Tx.sign(pKey) const txTypes = [ { - class: LegacyTransaction, - name: 'LegacyTransaction', + class: LegacyTx, + name: 'LegacyTx', unsigned: unsignedLegacyTx, signed: signedLegacyTx, eip2718: false, type: TransactionType.Legacy, }, { - class: AccessListEIP2930Transaction, - name: 'AccessListEIP2930Transaction', + class: AccessList2930Transaction, + name: 'AccessList2930Transaction', unsigned: unsignedEIP2930Tx, signed: signedEIP2930Tx, eip2718: true, type: TransactionType.AccessListEIP2930, }, { - class: FeeMarketEIP1559Transaction, - name: 'FeeMarketEIP1559Transaction', + class: FeeMarket1559Tx, + name: 'FeeMarket1559Tx', unsigned: unsignedEIP1559Tx, signed: signedEIP1559Tx, eip2718: true, @@ -59,11 +61,11 @@ describe('[TransactionFactory]: Basic functions', () => { it('fromSerializedData() -> success cases', () => { for (const txType of txTypes) { const serialized = txType.unsigned.serialize() - const factoryTx = TransactionFactory.fromSerializedData(serialized, { common }) + const factoryTx = createTxFromSerializedData(serialized, { common }) assert.equal( factoryTx.constructor.name, txType.class.name, - `should return the right type (${txType.name})` + `should return the right type (${txType.name})`, ) } }) @@ -71,27 +73,27 @@ describe('[TransactionFactory]: Basic functions', () => { it('fromSerializedData() -> error cases', () => { for (const txType of txTypes) { if (txType.eip2718) { - const unsupportedCommon = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const unsupportedCommon = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) assert.throws( () => { - TransactionFactory.fromSerializedData(txType.unsigned.serialize(), { + createTxFromSerializedData(txType.unsigned.serialize(), { common: unsupportedCommon, }) }, undefined, undefined, - `should throw when trying to create typed tx when not allowed in Common (${txType.name})` + `should throw when trying to create typed tx when not allowed in Common (${txType.name})`, ) assert.throws( () => { const serialized = txType.unsigned.serialize() serialized[0] = 99 // edit the transaction type - TransactionFactory.fromSerializedData(serialized, { common }) + createTxFromSerializedData(serialized, { common }) }, undefined, undefined, - `should throw when trying to create typed tx with wrong type (${txType.name})` + `should throw when trying to create typed tx with wrong type (${txType.name})`, ) } } @@ -105,23 +107,23 @@ describe('[TransactionFactory]: Basic functions', () => { } else { rawTx = txType.signed.raw() as Uint8Array[] } - const tx = TransactionFactory.fromBlockBodyData(rawTx, { common }) + const tx = createTxFromBlockBodyData(rawTx, { common }) assert.equal( tx.constructor.name, txType.name, - `should return the right type (${txType.name})` + `should return the right type (${txType.name})`, ) if (txType.eip2718) { assert.deepEqual( tx.serialize(), rawTx, - `round-trip serialization should match (${txType.name})` + `round-trip serialization should match (${txType.name})`, ) } else { assert.deepEqual( tx.raw(), rawTx as Uint8Array[], - `round-trip raw() creation should match (${txType.name})` + `round-trip raw() creation should match (${txType.name})`, ) } } @@ -129,35 +131,35 @@ describe('[TransactionFactory]: Basic functions', () => { it('fromTxData() -> success cases', () => { for (const txType of txTypes) { - const tx = TransactionFactory.fromTxData({ type: txType.type }, { common }) + const tx = createTxFromTxData({ type: txType.type }, { common }) assert.equal( tx.constructor.name, txType.class.name, - `should return the right type (${txType.name})` + `should return the right type (${txType.name})`, ) if (!txType.eip2718) { - const tx = TransactionFactory.fromTxData({}) + const tx = createTxFromTxData({}) assert.equal( tx.constructor.name, txType.class.name, - `should return the right type (${txType.name})` + `should return the right type (${txType.name})`, ) } } }) it('fromTxData() -> error cases', () => { - const unsupportedCommon = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const unsupportedCommon = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) assert.throws(() => { - TransactionFactory.fromTxData({ type: 1 }, { common: unsupportedCommon }) + createTxFromTxData({ type: 1 }, { common: unsupportedCommon }) }) assert.throws(() => { - TransactionFactory.fromTxData({ type: 999 }) + createTxFromTxData({ type: 999 }) }) assert.throws(() => { - TransactionFactory.fromTxData({ value: BigInt('-100') }) + createTxFromTxData({ value: BigInt('-100') }) }) }) }) diff --git a/packages/tx/test/transactionRunner.spec.ts b/packages/tx/test/transactionRunner.spec.ts index 24bdc3dfcd..7090cdc50d 100644 --- a/packages/tx/test/transactionRunner.spec.ts +++ b/packages/tx/test/transactionRunner.spec.ts @@ -1,9 +1,9 @@ -import { Common } from '@ethereumjs/common' +import { Common, Mainnet } from '@ethereumjs/common' import { bytesToHex, hexToBytes } from '@ethereumjs/util' import minimist from 'minimist' import { assert, describe, it } from 'vitest' -import { TransactionFactory } from '../src/index.js' +import { createTxFromSerializedData } from '../src/transactionFactory.js' import { getTests } from './testLoader.js' @@ -52,7 +52,7 @@ describe('TransactionTests', async () => { _filename: string, subDir: string, testName: string, - testData: OfficialTransactionTestData + testData: OfficialTransactionTestData, ) => { it(testName, () => { for (const forkName of forkNames) { @@ -65,12 +65,12 @@ describe('TransactionTests', async () => { try { const rawTx = hexToBytes(testData.txbytes as PrefixedHexString) const hardfork = forkNameMap[forkName] - const common = new Common({ chain: 1, hardfork }) + const common = new Common({ chain: Mainnet, hardfork }) const activateEIPs = EIPs[forkName] if (activateEIPs !== undefined) { common.setEIPs(activateEIPs) } - const tx = TransactionFactory.fromSerializedData(rawTx, { common }) + const tx = createTxFromSerializedData(rawTx, { common }) const sender = tx.getSenderAddress().toString() const hash = bytesToHex(tx.hash()) const txIsValid = tx.isValid() @@ -83,7 +83,7 @@ describe('TransactionTests', async () => { } else { assert.ok( hashAndSenderAreCorrect && txIsValid, - `Transaction should be valid on ${forkName}` + `Transaction should be valid on ${forkName}`, ) } } catch (e: any) { @@ -98,6 +98,6 @@ describe('TransactionTests', async () => { }, fileFilterRegex, undefined, - 'TransactionTests' + 'TransactionTests', ) }) diff --git a/packages/tx/test/typedTxsAndEIP2930.spec.ts b/packages/tx/test/typedTxsAndEIP2930.spec.ts index 63199482ce..c55d71764e 100644 --- a/packages/tx/test/typedTxsAndEIP2930.spec.ts +++ b/packages/tx/test/typedTxsAndEIP2930.spec.ts @@ -1,4 +1,4 @@ -import { Chain, Common, Hardfork, createCustomCommon } from '@ethereumjs/common' +import { Common, Goerli, Hardfork, Mainnet, createCustomCommon } from '@ethereumjs/common' import { Address, MAX_INTEGER, @@ -15,9 +15,15 @@ import { import { assert, describe, it } from 'vitest' import { - AccessListEIP2930Transaction, - FeeMarketEIP1559Transaction, + AccessList2930Transaction, + FeeMarket1559Tx, TransactionType, + createAccessList2930Tx, + createAccessList2930TxFromBytesArray, + createAccessList2930TxFromRLP, + createFeeMarket1559Tx, + createFeeMarket1559TxFromRLP, + paramsTx, } from '../src/index.js' import type { AccessList, AccessListBytesItem, JsonTx } from '../src/index.js' @@ -26,85 +32,89 @@ const pKey = hexToBytes('0x46464646464646464646464646464646464646464646464646464 const address = privateToAddress(pKey) const common = new Common({ - chain: Chain.Mainnet, + chain: Mainnet, hardfork: Hardfork.London, + params: paramsTx, }) const txTypes = [ { - class: AccessListEIP2930Transaction, - name: 'AccessListEIP2930Transaction', + class: AccessList2930Transaction, + name: 'AccessList2930Transaction', type: TransactionType.AccessListEIP2930, + create: { + txData: createAccessList2930Tx, + rlp: createAccessList2930TxFromRLP, + }, }, { - class: FeeMarketEIP1559Transaction, - name: 'FeeMarketEIP1559Transaction', + class: FeeMarket1559Tx, + name: 'FeeMarket1559Tx', type: TransactionType.FeeMarketEIP1559, + create: { + txData: createFeeMarket1559Tx, + rlp: createFeeMarket1559TxFromRLP, + }, }, ] const validAddress = hexToBytes(`0x${'01'.repeat(20)}`) const validSlot = hexToBytes(`0x${'01'.repeat(32)}`) -const chainId = BigInt(Chain.Mainnet) +const chainId = 1 -describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-2930 Compatibility', () => { +describe('[AccessList2930Transaction / FeeMarket1559Tx] -> EIP-2930 Compatibility', () => { it('Initialization / Getter -> fromTxData()', () => { for (const txType of txTypes) { - let tx = txType.class.fromTxData({}, { common }) + let tx = txType.create.txData({}, { common }) assert.ok(tx, `should initialize correctly (${txType.name})`) - tx = txType.class.fromTxData({ - chainId: Chain.Goerli, - }) - assert.ok( - tx.common.chainId() === BigInt(5), - 'should initialize Common with chain ID provided (supported chain ID)' + tx = txType.create.txData( + { + chainId: 5, + }, + { common: new Common({ chain: Goerli }) }, ) - - tx = txType.class.fromTxData({ - chainId: 99999, - }) assert.ok( - tx.common.chainId() === BigInt(99999), - 'should initialize Common with chain ID provided (unsupported chain ID)' + tx.common.chainId() === BigInt(5), + 'should initialize Common with chain ID provided (supported chain ID)', ) - const nonEIP2930Common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Istanbul }) + const nonEIP2930Common = new Common({ chain: Mainnet, hardfork: Hardfork.Istanbul }) assert.throws( () => { - txType.class.fromTxData({}, { common: nonEIP2930Common }) + txType.create.txData({}, { common: nonEIP2930Common }) }, undefined, undefined, - `should throw on a pre-Berlin Hardfork (EIP-2930 not activated) (${txType.name})` + `should throw on a pre-Berlin Hardfork (EIP-2930 not activated) (${txType.name})`, ) assert.throws( () => { - txType.class.fromTxData( + txType.create.txData( { - chainId: chainId + BigInt(1), + chainId: chainId + 1, }, - { common } + { common }, ) }, undefined, undefined, - `should reject transactions with wrong chain ID (${txType.name})` + `should reject transactions with wrong chain ID (${txType.name})`, ) assert.throws( () => { - txType.class.fromTxData( + txType.create.txData( { v: 2, }, - { common } + { common }, ) }, undefined, undefined, - `should reject transactions with invalid yParity (v) values (${txType.name})` + `should reject transactions with invalid yParity (v) values (${txType.name})`, ) } }) @@ -141,7 +151,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 ) { txData[value] = testCase assert.throws(() => { - AccessListEIP2930Transaction.fromTxData(txData) + createAccessList2930Tx(txData) }) } } @@ -151,33 +161,33 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 it('Initialization / Getter -> fromSerializedTx()', () => { for (const txType of txTypes) { try { - txType.class.fromSerializedTx(new Uint8Array([99]), {}) + txType.create.rlp(new Uint8Array([99]), {}) } catch (e: any) { assert.ok( e.message.includes('wrong tx type'), - `should throw on wrong tx type (${txType.name})` + `should throw on wrong tx type (${txType.name})`, ) } try { // Correct tx type + RLP-encoded 5 const serialized = concatBytes(new Uint8Array([txType.type]), new Uint8Array([5])) - txType.class.fromSerializedTx(serialized, {}) + txType.create.rlp(serialized, {}) } catch (e: any) { assert.ok( e.message.includes('must be array'), - `should throw when RLP payload not an array (${txType.name})` + `should throw when RLP payload not an array (${txType.name})`, ) } try { // Correct tx type + RLP-encoded empty list const serialized = concatBytes(new Uint8Array([txType.type]), hexToBytes('0xc0')) - txType.class.fromSerializedTx(serialized, {}) + txType.create.rlp(serialized, {}) } catch (e: any) { assert.ok( e.message.includes('values (for unsigned tx)'), - `should throw with invalid number of values (${txType.name})` + `should throw with invalid number of values (${txType.name})`, ) } } @@ -191,12 +201,12 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 storageKeys: [bytesToHex(validSlot)], }, ] - const txn = txType.class.fromTxData( + const txn = txType.create.txData( { accessList: access, - chainId: Chain.Mainnet, + chainId: 1, }, - { common } + { common }, ) // Check if everything is converted @@ -211,12 +221,12 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 // also verify that we can always get the json access list, even if we don't provide one. - const txnRaw = txType.class.fromTxData( + const txnRaw = txType.create.txData( { accessList: bytes, - chainId: Chain.Mainnet, + chainId: 1, }, - { common } + { common }, ) const JSONRaw = txnRaw.AccessListJSON @@ -236,11 +246,11 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 assert.throws( () => { - txType.class.fromTxData({ chainId, accessList }, { common }) + txType.create.txData({ chainId, accessList }, { common }) }, undefined, undefined, - txType.name + txType.name, ) accessList = [ @@ -254,89 +264,89 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 assert.throws( () => { - txType.class.fromTxData({ chainId, accessList }, { common }) + txType.create.txData({ chainId, accessList }, { common }) }, undefined, undefined, - txType.name + txType.name, ) accessList = [[]] // Address does not exist assert.throws( () => { - txType.class.fromTxData({ chainId, accessList }, { common }) + txType.create.txData({ chainId, accessList }, { common }) }, undefined, undefined, - txType.name + txType.name, ) accessList = [[validAddress]] // Slots does not exist assert.throws( () => { - txType.class.fromTxData({ chainId, accessList }, { common }) + txType.create.txData({ chainId, accessList }, { common }) }, undefined, undefined, - txType.name + txType.name, ) accessList = [[validAddress, validSlot]] // Slots is not an array assert.throws( () => { - txType.class.fromTxData({ chainId, accessList }, { common }) + txType.create.txData({ chainId, accessList }, { common }) }, undefined, undefined, - txType.name + txType.name, ) accessList = [[validAddress, [], []]] // 3 items where 2 are expected assert.throws( () => { - txType.class.fromTxData({ chainId, accessList }, { common }) + txType.create.txData({ chainId, accessList }, { common }) }, undefined, undefined, - txType.name + txType.name, ) } }) it('sign()', () => { for (const txType of txTypes) { - let tx = txType.class.fromTxData( + let tx = txType.create.txData( { data: hexToBytes('0x010200'), to: validAddress, accessList: [[validAddress, [validSlot]]], chainId, }, - { common } + { common }, ) let signed = tx.sign(pKey) const signedAddress = signed.getSenderAddress() assert.ok( equalsBytes(signedAddress.bytes, address), - `should sign a transaction (${txType.name})` + `should sign a transaction (${txType.name})`, ) signed.verifySignature() // If this throws, test will not end. - tx = txType.class.fromTxData({}, { common }) + tx = txType.create.txData({}, { common }) signed = tx.sign(pKey) assert.deepEqual( tx.accessList, [], - `should create and sign transactions without passing access list value (${txType.name})` + `should create and sign transactions without passing access list value (${txType.name})`, ) assert.deepEqual(signed.accessList, []) - tx = txType.class.fromTxData({}, { common }) + tx = txType.create.txData({}, { common }) assert.throws( () => { @@ -344,7 +354,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 }, undefined, undefined, - `should throw calling hash with unsigned tx (${txType.name})` + `should throw calling hash with unsigned tx (${txType.name})`, ) assert.throws(() => { @@ -354,20 +364,20 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 assert.throws( () => { const high = SECP256K1_ORDER_DIV_2 + BigInt(1) - const tx = txType.class.fromTxData({ s: high, r: 1, v: 1 }, { common }) + const tx = txType.create.txData({ s: high, r: 1, v: 1 }, { common }) const signed = tx.sign(pKey) signed.getSenderPublicKey() }, undefined, undefined, - `should throw with invalid s value (${txType.name})` + `should throw with invalid s value (${txType.name})`, ) } }) it('addSignature() -> correctly adds correct signature values', () => { const privateKey = pKey - const tx = AccessListEIP2930Transaction.fromTxData({}) + const tx = createAccessList2930Tx({}) const signedTx = tx.sign(privateKey) const addSignatureTx = tx.addSignature(signedTx.v!, signedTx.r!, signedTx.s!) @@ -376,7 +386,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 it('addSignature() -> correctly converts raw ecrecover values', () => { const privKey = pKey - const tx = AccessListEIP2930Transaction.fromTxData({}) + const tx = createAccessList2930Tx({}) const msgHash = tx.getHashedMessageToSign() const { v, r, s } = ecsign(msgHash, privKey) @@ -389,7 +399,7 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 it('addSignature() -> throws when adding the wrong v value', () => { const privKey = pKey - const tx = AccessListEIP2930Transaction.fromTxData({}) + const tx = createAccessList2930Tx({}) const msgHash = tx.getHashedMessageToSign() const { v, r, s } = ecsign(msgHash, privKey) @@ -400,35 +410,35 @@ describe('[AccessListEIP2930Transaction / FeeMarketEIP1559Transaction] -> EIP-29 }) }) - it('getDataFee()', () => { + it('getDataGas()', () => { for (const txType of txTypes) { - let tx = txType.class.fromTxData({}, { common }) - assert.equal(tx.getDataFee(), BigInt(0), 'Should return data fee when frozen') + let tx = txType.create.txData({}, { common }) + assert.equal(tx.getDataGas(), BigInt(0), 'Should return data fee when frozen') - tx = txType.class.fromTxData({}, { common, freeze: false }) - assert.equal(tx.getDataFee(), BigInt(0), 'Should return data fee when not frozen') + tx = txType.create.txData({}, { common, freeze: false }) + assert.equal(tx.getDataGas(), BigInt(0), 'Should return data fee when not frozen') - const mutableCommon = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.London }) - tx = txType.class.fromTxData({}, { common: mutableCommon }) + const mutableCommon = new Common({ chain: Mainnet, hardfork: Hardfork.London }) + tx = txType.create.txData({}, { common: mutableCommon }) tx.common.setHardfork(Hardfork.Istanbul) - assert.equal(tx.getDataFee(), BigInt(0), 'Should invalidate cached value on hardfork change') + assert.equal(tx.getDataGas(), BigInt(0), 'Should invalidate cached value on hardfork change') } }) }) -describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { +describe('[AccessList2930Transaction] -> Class Specific Tests', () => { it(`Initialization`, () => { - const tx = AccessListEIP2930Transaction.fromTxData({}, { common }) + const tx = createAccessList2930Tx({}, { common }) assert.ok( - AccessListEIP2930Transaction.fromTxData(tx, { common }), - 'should initialize correctly from its own data' + createAccessList2930Tx(tx, { common }), + 'should initialize correctly from its own data', ) const validAddress = hexToBytes(`0x${'01'.repeat(20)}`) const validSlot = hexToBytes(`0x${'01'.repeat(32)}`) const chainId = BigInt(1) try { - AccessListEIP2930Transaction.fromTxData( + createAccessList2930Tx( { data: hexToBytes('0x010200'), to: validAddress, @@ -437,12 +447,12 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { gasLimit: MAX_UINT64, gasPrice: MAX_INTEGER, }, - { common } + { common }, ) } catch (err: any) { assert.ok( err.message.includes('gasLimit * gasPrice cannot exceed MAX_INTEGER'), - 'throws when gasLimit * gasPrice exceeds MAX_INTEGER' + 'throws when gasLimit * gasPrice exceeds MAX_INTEGER', ) } }) @@ -453,72 +463,70 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { const address = new Uint8Array(0) const storageKeys = [new Uint8Array(0), new Uint8Array(0)] const aclBytes: AccessListBytesItem = [address, storageKeys] - AccessListEIP2930Transaction.fromValuesArray( + createAccessList2930TxFromBytesArray( [bytes, bytes, bytes, bytes, bytes, bytes, bytes, [aclBytes], bytes], - {} + {}, ) }, undefined, undefined, - 'should throw with values array with length different than 8 or 11' + 'should throw with values array with length different than 8 or 11', ) it(`should return right upfront cost`, () => { - let tx = AccessListEIP2930Transaction.fromTxData( + let tx = createAccessList2930Tx( { data: hexToBytes('0x010200'), to: validAddress, accessList: [[validAddress, [validSlot]]], chainId, }, - { common } + { common }, ) // Cost should be: // Base fee + 2*TxDataNonZero + TxDataZero + AccessListAddressCost + AccessListSlotCost - const txDataZero: number = Number(common.param('gasPrices', 'txDataZero')) - const txDataNonZero: number = Number(common.param('gasPrices', 'txDataNonZero')) - const accessListStorageKeyCost: number = Number( - common.param('gasPrices', 'accessListStorageKeyCost') - ) - const accessListAddressCost: number = Number(common.param('gasPrices', 'accessListAddressCost')) - const baseFee: number = Number(common.param('gasPrices', 'tx')) - const creationFee: number = Number(common.param('gasPrices', 'txCreation')) + const txDataZero: number = Number(common.param('txDataZeroGas')) + const txDataNonZero: number = Number(common.param('txDataNonZeroGas')) + const accessListStorageKeyCost: number = Number(common.param('accessListStorageKeyGas')) + const accessListAddressCost: number = Number(common.param('accessListAddressGas')) + const baseFee: number = Number(common.param('txGas')) + const creationFee: number = Number(common.param('txCreationGas')) assert.ok( - tx.getBaseFee() === + tx.getIntrinsicGas() === BigInt( txDataNonZero * 2 + txDataZero + baseFee + accessListAddressCost + - accessListStorageKeyCost - ) + accessListStorageKeyCost, + ), ) // In this Tx, `to` is `undefined`, so we should charge homestead creation gas. - tx = AccessListEIP2930Transaction.fromTxData( + tx = createAccessList2930Tx( { data: hexToBytes('0x010200'), accessList: [[validAddress, [validSlot]]], chainId, }, - { common } + { common }, ) assert.ok( - tx.getBaseFee() === + tx.getIntrinsicGas() === BigInt( txDataNonZero * 2 + txDataZero + creationFee + baseFee + accessListAddressCost + - accessListStorageKeyCost - ) + accessListStorageKeyCost, + ), ) // Explicitly check that even if we have duplicates in our list, we still charge for those - tx = AccessListEIP2930Transaction.fromTxData( + tx = createAccessList2930Tx( { to: validAddress, accessList: [ @@ -527,16 +535,17 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { ], chainId, }, - { common } + { common }, ) assert.ok( - tx.getBaseFee() === BigInt(baseFee + accessListAddressCost * 2 + accessListStorageKeyCost * 3) + tx.getIntrinsicGas() === + BigInt(baseFee + accessListAddressCost * 2 + accessListStorageKeyCost * 3), ) }) it('getEffectivePriorityFee() -> should return correct values', () => { - const tx = AccessListEIP2930Transaction.fromTxData({ + const tx = createAccessList2930Tx({ gasPrice: BigInt(100), }) @@ -547,39 +556,39 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { }) it('getUpfrontCost() -> should return upfront cost', () => { - const tx = AccessListEIP2930Transaction.fromTxData( + const tx = createAccessList2930Tx( { gasPrice: 1000, gasLimit: 10000000, value: 42, }, - { common } + { common }, ) assert.equal(tx.getUpfrontCost(), BigInt(10000000042)) }) it('unsigned tx -> getHashedMessageToSign()/getMessageToSign()', () => { - const unsignedTx = AccessListEIP2930Transaction.fromTxData( + const unsignedTx = createAccessList2930Tx( { data: hexToBytes('0x010200'), to: validAddress, accessList: [[validAddress, [validSlot]]], chainId, }, - { common } + { common }, ) const expectedHash = hexToBytes( - '0x78528e2724aa359c58c13e43a7c467eb721ce8d410c2a12ee62943a3aaefb60b' + '0x78528e2724aa359c58c13e43a7c467eb721ce8d410c2a12ee62943a3aaefb60b', ) assert.deepEqual(unsignedTx.getHashedMessageToSign(), expectedHash), 'correct hashed version' const expectedSerialization = hexToBytes( - '0x01f858018080809401010101010101010101010101010101010101018083010200f838f7940101010101010101010101010101010101010101e1a00101010101010101010101010101010101010101010101010101010101010101' + '0x01f858018080809401010101010101010101010101010101010101018083010200f838f7940101010101010101010101010101010101010101e1a00101010101010101010101010101010101010101010101010101010101010101', ) assert.deepEqual( unsignedTx.getMessageToSign(), expectedSerialization, - 'correct serialized unhashed version' + 'correct serialized unhashed version', ) }) @@ -602,40 +611,39 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { const customChainParams = { name: 'custom', - chainId: txData.chainId, + chainId: txData.chainId.toString(), eips: [2718, 2929, 2930], } - const usedCommon = createCustomCommon(customChainParams, { - baseChain: Chain.Mainnet, + const usedCommon = createCustomCommon(customChainParams, Mainnet, { hardfork: Hardfork.Berlin, }) usedCommon.setEIPs([2718, 2929, 2930]) const expectedUnsignedRaw = hexToBytes( - '0x01f86587796f6c6f76337880843b9aca008262d494df0a88b2b68c673713a8ec826003676f272e35730180f838f7940000000000000000000000000000000000001337e1a00000000000000000000000000000000000000000000000000000000000000000808080' + '0x01f86587796f6c6f76337880843b9aca008262d494df0a88b2b68c673713a8ec826003676f272e35730180f838f7940000000000000000000000000000000000001337e1a00000000000000000000000000000000000000000000000000000000000000000808080', ) const pkey = hexToBytes('0xfad9c8855b740a0b7ed4c221dbad0f33a83a49cad6b3fe8d5817ac83d38b6a19') const expectedSigned = hexToBytes( - '0x01f8a587796f6c6f76337880843b9aca008262d494df0a88b2b68c673713a8ec826003676f272e35730180f838f7940000000000000000000000000000000000001337e1a0000000000000000000000000000000000000000000000000000000000000000080a0294ac94077b35057971e6b4b06dfdf55a6fbed819133a6c1d31e187f1bca938da00be950468ba1c25a5cb50e9f6d8aa13c8cd21f24ba909402775b262ac76d374d' + '0x01f8a587796f6c6f76337880843b9aca008262d494df0a88b2b68c673713a8ec826003676f272e35730180f838f7940000000000000000000000000000000000001337e1a0000000000000000000000000000000000000000000000000000000000000000080a0294ac94077b35057971e6b4b06dfdf55a6fbed819133a6c1d31e187f1bca938da00be950468ba1c25a5cb50e9f6d8aa13c8cd21f24ba909402775b262ac76d374d', ) const expectedHash = hexToBytes( - '0xbbd570a3c6acc9bb7da0d5c0322fe4ea2a300db80226f7df4fef39b2d6649eec' + '0xbbd570a3c6acc9bb7da0d5c0322fe4ea2a300db80226f7df4fef39b2d6649eec', ) const v = BigInt(0) const r = bytesToBigInt( - hexToBytes('0x294ac94077b35057971e6b4b06dfdf55a6fbed819133a6c1d31e187f1bca938d') + hexToBytes('0x294ac94077b35057971e6b4b06dfdf55a6fbed819133a6c1d31e187f1bca938d'), ) const s = bytesToBigInt( - hexToBytes('0x0be950468ba1c25a5cb50e9f6d8aa13c8cd21f24ba909402775b262ac76d374d') + hexToBytes('0x0be950468ba1c25a5cb50e9f6d8aa13c8cd21f24ba909402775b262ac76d374d'), ) - const unsignedTx = AccessListEIP2930Transaction.fromTxData(txData, { common: usedCommon }) + const unsignedTx = createAccessList2930Tx(txData, { common: usedCommon }) const serializedMessageRaw = unsignedTx.serialize() assert.ok( equalsBytes(expectedUnsignedRaw, serializedMessageRaw), - 'serialized unsigned message correct' + 'serialized unsigned message correct', ) const signed = unsignedTx.sign(pkey) @@ -671,15 +679,15 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { }) it('freeze property propagates from unsigned tx to signed tx', () => { - const tx = AccessListEIP2930Transaction.fromTxData({}, { freeze: false }) + const tx = createAccessList2930Tx({}, { freeze: false }) assert.notOk(Object.isFrozen(tx), 'tx object is not frozen') const signedTxn = tx.sign(pKey) assert.notOk(Object.isFrozen(signedTxn), 'tx object is not frozen') }) it('common propagates from the common of tx, not the common in TxOptions', () => { - const txn = AccessListEIP2930Transaction.fromTxData({}, { common, freeze: false }) - const newCommon = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.Paris }) + const txn = createAccessList2930Tx({}, { common, freeze: false }) + const newCommon = new Common({ chain: Mainnet, hardfork: Hardfork.Paris }) assert.notDeepEqual(newCommon, common, 'new common is different than original common') Object.defineProperty(txn, 'common', { get() { @@ -689,7 +697,7 @@ describe('[AccessListEIP2930Transaction] -> Class Specific Tests', () => { const signedTxn = txn.sign(pKey) assert.ok( signedTxn.common.hardfork() === Hardfork.Paris, - 'signed tx common is taken from tx.common' + 'signed tx common is taken from tx.common', ) }) }) diff --git a/packages/tx/test/types.ts b/packages/tx/test/types.ts index c37d36a18b..d0974b38a7 100644 --- a/packages/tx/test/types.ts +++ b/packages/tx/test/types.ts @@ -26,8 +26,10 @@ export interface TxData { s: string } -// The type of each entry from ./ttTransactionTestEip155VitaliksTests.json +// The type of each entry from ./ttTransactionTestEip155VitaliksTests.json // cspell:disable-line +// cspell:disable export interface VitaliksTestsDataEntry { + // cspell:enable blocknumber: string hash: string rlp: string @@ -52,8 +54,8 @@ export type ForksData = { export type OfficialTransactionTestData = { _info: { comment: string - filledwith: string - lllcversion: string + filledwith: string // cspell:disable-line + lllcversion: string // cspell:disable-line source: string sourceHash: string } diff --git a/packages/tx/tsconfig.lint.json b/packages/tx/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/tx/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/util/.eslintrc.cjs b/packages/util/.eslintrc.cjs index 80869b21ea..ed6ce7f539 100644 --- a/packages/util/.eslintrc.cjs +++ b/packages/util/.eslintrc.cjs @@ -1 +1,15 @@ -module.exports = require('../../config/eslint.cjs') +module.exports = { + extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, + overrides: [ + { + files: ['examples/**/*'], + rules: { + 'no-console': 'off', + '@typescript-eslint/no-unused-vars': 'off', + }, + }, + ], + } \ No newline at end of file diff --git a/packages/util/CHANGELOG.md b/packages/util/CHANGELOG.md index 2e4fb43ece..98fbb516db 100644 --- a/packages/util/CHANGELOG.md +++ b/packages/util/CHANGELOG.md @@ -6,7 +6,52 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 9.0.3 - 2024-03-05 +## 9.1.0 - 2024-08-15 + +### Support for Partial Accounts + +For Verkle or other contexts it can be useful to create partial accounts not containing all the account parameters. This is now supported starting with this release, see PR [#3269](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3269): + +```ts +import { Account } from '@ethereumjs/util' + +const account = Account.fromPartialAccountData({ + nonce: '0x02', + balance: '0x0384', +}) +console.log(`Partial account with nonce=${account.nonce} and balance=${account.balance} created`) +``` + +### New `requests` Module + +This release introduces a new `requests` module (see PRs [#3372](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3372), [#3393](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3393), [#3398](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3398) and [#3477](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3477)) with various type and an abstract base class for [EIP-7685](https://eips.ethereum.org/EIPS/eip-7685) general purpose execution layer requests to the CL (Prague hardfork) as well as concrete implementations for the currently supported request types: + +- [EIP-6110](https://eips.ethereum.org/EIPS/eip-6110): `DepositRequest` (Prague Hardfork) +- [EIP-7002](https://eips.ethereum.org/EIPS/eip-7002): `WithdrawalRequest` (Prague Hardfork) +- [EIP-7251](https://eips.ethereum.org/EIPS/eip-7251): `ConsolidationRequest` (Prague Hardfork) + +These request types are mainly used within the [@ethereumjs/block](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/block) library where applied usage instructions are provided in the README. + +### Verkle Updates + +- Update `kzg-wasm` to `0.4.0`, PR [#3358](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3358) +- Shift Verkle to `osaka` hardfork, PR [#3371](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3371) +- New `verkle` module with utility methods and interfaces, PR [#3462](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3462) +- Rename verkle utils and refactor, PR [#3468](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3468) + +### Other Features + +- Stricter prefixed hex typing, PRs [#3348](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3348), [#3427](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3427) and [#3357](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3357) (some changes removed in PR [#3382](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3382) for backwards compatibility reasons, will be reintroduced along upcoming breaking releases) + +### Other Changes + +- Adjust `Account.isContract()` (in Verkle context work), PR [#3343](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3343) +- Rename deposit receipt to deposit request, PR [#3408](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3408) +- Adjust `Account.isEmpty()` to also work for partial accounts, PR [#3405](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3405) +- Enhances typing of CL requests, PR [#3398](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3398) +- Rename withdrawal request's `validatorPublicKey` to `validatorPubkey`, PR [#3474](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3474) + +## 9.0.3 - 2024-03-18 - Allow optional `trustedSetupPath` for the `initKZG()` method, PR [#3296](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3296) @@ -32,7 +77,7 @@ This release contains various fixes and spec updates related to the Dencun (Dene ### Other Changes -- Performance: New reoccurringly used BigInt constants (`BIGINT_0`, `BIGINT_32`, `BIGINT_2EXP96`,...) in the `bytes` module for reusage along performance optimizations, PR [#3050](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3050) +- Performance: New BigInt constants (`BIGINT_0`, `BIGINT_32`, `BIGINT_2EXP96`,...) in the `bytes` module for re-usage along performance optimizations, PR [#3050](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3050) - Performance: `bytesToBigInt()` performance optimization for 1-byte bytes, PR [#3054](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3054) - Fix a bug in `fromUtf8()`, PR [#3112](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3112) @@ -56,7 +101,7 @@ While you could use our libraries in the browser libraries before, there had bee WE HAVE ELIMINATED ALL OF THEM. -The largest two undertakings: First: we have rewritten all (half) of our API and elimited the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went throuh our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. +The largest two undertakings: First: we have rewritten all (half) of our API and eliminated the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went through our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. Together with some few other modifications this now allows to run each (maybe adding an asterisk for client and devp2p) of our libraries directly in the browser - more or less without any modifications - see the `examples/browser.html` file in each package folder for an easy to set up example. @@ -122,7 +167,7 @@ new Address() Address.fromPublicKey(pubKey: Uint8Array): Address Address.fromPrivateKey(privateKey: Uint8Array): Address Address.generate2(from: Address, salt: Uint8Array, initCode: Uint8Array): Address -Adress.toBytes // old: Address.toBuffer() +Address.toBytes // old: Address.toBuffer() // bytes // All Buffer related functionality removed, do "Buffer" search @@ -179,7 +224,7 @@ We have converted existing Buffer conversion methods to Uint8Array conversion me - New `GWEI_TO_WEI` constant in a newly created `units` module, PR [#2483](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2483) - Change withdrawal amount representation from Wei to Gwei (see EIP-4895 PR [#6325](https://github.com/ethereum/EIPs/pull/6325)) in `withdrawal` module `Withdrawal` class, PR [#2483](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2483) ) -- Added `@chainsafe/ssz` dependency, new prepartory `ssz` container module, PR [#2488](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2488) +- Added `@chainsafe/ssz` dependency, new preparatory `ssz` container module, PR [#2488](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2488) - Use literal value instead of formula for `MAX_INTEGER_BIGINT`, PR [#2536](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2536) ## 8.0.3 - 2022-12-09 @@ -228,7 +273,7 @@ Beta 2 release for the upcoming breaking release round on the [EthereumJS monore ### Removed Default Exports -The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all accross the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. +The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all across the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. Default exports were a common source of error and confusion when using our libraries in a CommonJS context, leading to issues like Issue [#978](https://github.com/ethereumjs/ethereumjs-monorepo/issues/978). @@ -396,7 +441,7 @@ See: PR [#1517](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1517) We significantly updated our internal tool and CI setup along the work on PR [#913](https://github.com/ethereumjs/ethereumjs-monorepo/pull/913) with an update to `ESLint` from `TSLint` for code linting and formatting and the introduction of a new build setup. -Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. #### Included Source Files @@ -423,7 +468,7 @@ Function signatures for methods in `address` and `signature` are therefore expan All function signatures are still taking in a `number` input for backwards-compatibility reasons. If you use one of the following functions to implement generic use cases in your library where the chain ID is not yet known it is recommended to updated to one of the other input types (with plain `Buffer` likely be the most future-proof). Note that on some functions this changes the return value as well. -- `account`: `toChecksumAddresss(hexAddress: string, eip1191ChainId?: number): string` +- `account`: `toChecksumAddress(hexAddress: string, eip1191ChainId?: number): string` - -> `toChecksumAddress = function(hexAddress: string, eip1191ChainId?: BNLike): string` - `account`: `isValidChecksumAddress(hexAddress: string, eip1191ChainId?: number)` - -> `isValidChecksumAddress(hexAddress: string, eip1191ChainId?: BNLike)` @@ -443,7 +488,7 @@ Along there is a new `toType()` helper function which can be used to easily conv ## [7.0.8] - 2021-02-01 -- New `Address.equals(address: Address)` function for easier address equality comparions, PR [#285](https://github.com/ethereumjs/ethereumjs-util/pull/285) +- New `Address.equals(address: Address)` function for easier address equality comparisons, PR [#285](https://github.com/ethereumjs/ethereumjs-util/pull/285) - Fixed a bug in `fromRpcSig()` in the `signature` module not working correctly for chain IDs greater than 110, PR [#287](https://github.com/ethereumjs/ethereumjs-util/pull/287) [7.0.8]: https://github.com/ethereumjs/ethereumjs-util/compare/v7.0.7...v7.0.8 @@ -477,7 +522,7 @@ const account = new Account( new BN(0), // nonce, default: 0 new BN(10).pow(new BN(18)), // balance, default: 0 undefined, // stateRoot, default: KECCAK256_RLP (hash of RLP of null) - undefined // codeHash, default: KECCAK256_NULL (hash of null) + undefined, // codeHash, default: KECCAK256_NULL (hash of null) ) ``` @@ -515,7 +560,7 @@ import { Address } from 'ethereumjs-util' const pubKey = Buffer.from( '3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', - 'hex' + 'hex', ) const address = Address.fromPublicKey(pubKey) ``` @@ -610,14 +655,14 @@ Changes to the API have been discussed in Issue [#172](https://github.com/ethereumjs/ethereumjs-util/issues/172) and are guided by the principles of: -- Make the API more typestrict +- Make the API more type-strict - Be less ambiguous regarding accepted values - Avoid implicit type conversions - Be more explicit on wrong input (just: throw) While the implemented changes come with some additional need for manual type conversions depending on the usage context, they should finally lead to -cleaner usage patterns on the cosuming side and a more predictable, robust and +cleaner usage patterns on the consuming side and a more predictable, robust and less error-prone control flow. Some note: for methods where `Buffer` usage is now enforced you can use the @@ -655,7 +700,7 @@ and `Buffer` inputs are now enforced: #### Bytes Module -##### Typestrict Methods and Type-Explicit Method Split-Up +##### Type-strict Methods and Type-Explicit Method Split-Up PR: [#244](https://github.com/ethereumjs/ethereumjs-util/pull/244) @@ -669,7 +714,7 @@ PR: [#244](https://github.com/ethereumjs/ethereumjs-util/pull/244) #### Hash Module -##### Typestrict Methods and Type-Explicit Method Split-Up +##### Type-strict Methods and Type-Explicit Method Split-Up PR [#247](https://github.com/ethereumjs/ethereumjs-util/pull/247) @@ -704,10 +749,10 @@ The following methods are now `Buffer`-only: PR [#228](https://github.com/ethereumjs/ethereumjs-util/pull/228) - Updated `BN.js` library re-export from `4.x` to `5.x`, PR [#249], https://github.com/ethereumjs/ethereumjs-util/pull/249 -- Removed `secp2561` re-export (use methods provided or import directly), +- Removed `secp256k1` re-export (use methods provided or import directly), PR [#228](https://github.com/ethereumjs/ethereumjs-util/pull/228) -### Cryto Library Updates: Keccak, secp2561 +### Crypto Library Updates: Keccak, secp256k1 `Keccak` dependency has been updated from `2.1.0` to `3.0.0`. This version comes with prebuilds for Linux, MacOS and Windows so most users won't need @@ -716,7 +761,7 @@ to have `node-gyp` run on installation. The version update also brings in feature compatibility with newer Node.js versions. -The `secp2561` ECDSA dependency has been updated from `3.0.1` to `4.0.1`. +The `secp256k1` ECDSA dependency has been updated from `3.0.1` to `4.0.1`. ### Developer Improvements @@ -877,7 +922,7 @@ see PR [#170](https://github.com/ethereumjs/ethereumjs-util/pull/170). ## [5.1.1] - 2017-02-10 - Use hex utils from `ethjs-util` -- Move secp vars into functions +- Move secp256k1 vars into functions - Dependency updates [5.1.1]: https://github.com/ethereumjs/ethereumjs-util/compare/v5.1.0...v5.1.1 diff --git a/packages/util/README.md b/packages/util/README.md index c493df976b..63ff8bdf8b 100644 --- a/packages/util/README.md +++ b/packages/util/README.md @@ -45,6 +45,20 @@ const account = Account.fromAccountData({ console.log(`Account with nonce=${account.nonce} and balance=${account.balance} created`) ``` +For Verkle or other contexts it can be useful to create partial accounts not containing all the account parameters. This is supported starting with v9.1.0: + +```ts +// ./examples/accountPartial.ts + +import { Account } from '@ethereumjs/util' + +const account = Account.fromPartialAccountData({ + nonce: '0x02', + balance: '0x0384', +}) +console.log(`Partial account with nonce=${account.nonce} and balance=${account.balance} created`) +``` + ### Module: [address](src/address.ts) Class representing an Ethereum `Address` with instantiation helpers and validation methods. @@ -127,6 +141,16 @@ KZG interface (used for 4844 blob txs), see [@ethereumjs/tx](https://github.com/ Simple map DB implementation using the `DB` interface (see above). +### Module: [requests](src/requests.ts) + +Module with various type and an abstract base class for [EIP-7685](https://eips.ethereum.org/EIPS/eip-7685) general purpose execution layer requests to the CL (Prague hardfork) as well as concrete implementations for the currently supported request types: + +- [EIP-6110](https://eips.ethereum.org/EIPS/eip-6110): `DepositRequest` (Prague Hardfork) +- [EIP-7002](https://eips.ethereum.org/EIPS/eip-7002): `WithdrawalRequest` (Prague Hardfork) +- [EIP-7251](https://eips.ethereum.org/EIPS/eip-7251): `ConsolidationRequest` (Prague Hardfork) + +These request types are mainly used within the [@ethereumjs/block](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/block) library where applied usage instructions are provided in the README. + ### Module: [signature](src/signature.ts) Functionality for signing, signature validation, conversion, recovery. diff --git a/packages/util/examples/account.ts b/packages/util/examples/account.ts index 088e793049..b0d1f25cbb 100644 --- a/packages/util/examples/account.ts +++ b/packages/util/examples/account.ts @@ -1,6 +1,6 @@ -import { Account } from '@ethereumjs/util' +import { createAccount } from '@ethereumjs/util' -const account = Account.fromAccountData({ +const account = createAccount({ nonce: '0x02', balance: '0x0384', storageRoot: '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', diff --git a/packages/util/examples/accountPartial.ts b/packages/util/examples/accountPartial.ts new file mode 100644 index 0000000000..9693f707bb --- /dev/null +++ b/packages/util/examples/accountPartial.ts @@ -0,0 +1,7 @@ +import { createPartialAccount } from '@ethereumjs/util' + +const account = createPartialAccount({ + nonce: '0x02', + balance: '0x0384', +}) +console.log(`Partial account with nonce=${account.nonce} and balance=${account.balance} created`) diff --git a/packages/util/examples/address.ts b/packages/util/examples/address.ts index 86362e4d44..853004f109 100644 --- a/packages/util/examples/address.ts +++ b/packages/util/examples/address.ts @@ -1,4 +1,4 @@ -import { Address } from '@ethereumjs/util' +import { createAddressFromString } from '@ethereumjs/util' -const address = Address.fromString('0x2f015c60e0be116b1f0cd534704db9c92118fb6a') +const address = createAddressFromString('0x2f015c60e0be116b1f0cd534704db9c92118fb6a') console.log(`Ethereum address ${address.toString()} created`) diff --git a/packages/util/examples/browser.html b/packages/util/examples/browser.html index 6d8fa69162..1f7436a9db 100644 --- a/packages/util/examples/browser.html +++ b/packages/util/examples/browser.html @@ -4,12 +4,12 @@ EthereumJS Browser Examples @@ -38,4 +38,4 @@

Interactive CLI

- \ No newline at end of file + diff --git a/packages/util/examples/signature.ts b/packages/util/examples/signature.ts index 2ff0a1a4c2..2c3faf77f9 100644 --- a/packages/util/examples/signature.ts +++ b/packages/util/examples/signature.ts @@ -2,11 +2,11 @@ import { bytesToHex, ecrecover, hexToBytes } from '@ethereumjs/util' const chainId = BigInt(3) // Ropsten -const echash = hexToBytes('0x82ff40c0a986c6a5cfad4ddf4c3aa6996f1a7837f9c398e17e5de5cbd5a12b28') +const ecHash = hexToBytes('0x82ff40c0a986c6a5cfad4ddf4c3aa6996f1a7837f9c398e17e5de5cbd5a12b28') const r = hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') const s = hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66') const v = BigInt(41) -const pubkey = ecrecover(echash, v, r, s, chainId) +const pubkey = ecrecover(ecHash, v, r, s, chainId) console.log(`Recovered public key ${bytesToHex(pubkey)} from valid signature values`) diff --git a/packages/util/package.json b/packages/util/package.json index 785eb9a4d7..eac3280c82 100644 --- a/packages/util/package.json +++ b/packages/util/package.json @@ -1,6 +1,6 @@ { "name": "@ethereumjs/util", - "version": "9.0.3", + "version": "9.1.0", "description": "A collection of utility functions for Ethereum", "keywords": [ "ethereum", diff --git a/packages/util/src/account.ts b/packages/util/src/account.ts index 0397805c7e..081288cba3 100644 --- a/packages/util/src/account.ts +++ b/packages/util/src/account.ts @@ -124,148 +124,6 @@ export class Account { this._codeSize = _codeSize } - static fromAccountData(accountData: AccountData) { - const { nonce, balance, storageRoot, codeHash } = accountData - if (nonce === null || balance === null || storageRoot === null || codeHash === null) { - throw Error(`Partial fields not supported in fromAccountData`) - } - - return new Account( - nonce !== undefined ? bytesToBigInt(toBytes(nonce)) : undefined, - balance !== undefined ? bytesToBigInt(toBytes(balance)) : undefined, - storageRoot !== undefined ? toBytes(storageRoot) : undefined, - codeHash !== undefined ? toBytes(codeHash) : undefined - ) - } - - static fromPartialAccountData(partialAccountData: PartialAccountData) { - const { nonce, balance, storageRoot, codeHash, codeSize, version } = partialAccountData - - if ( - nonce === null && - balance === null && - storageRoot === null && - codeHash === null && - codeSize === null && - version === null - ) { - throw Error(`All partial fields null`) - } - - return new Account( - nonce !== undefined && nonce !== null ? bytesToBigInt(toBytes(nonce)) : nonce, - balance !== undefined && balance !== null ? bytesToBigInt(toBytes(balance)) : balance, - storageRoot !== undefined && storageRoot !== null ? toBytes(storageRoot) : storageRoot, - codeHash !== undefined && codeHash !== null ? toBytes(codeHash) : codeHash, - codeSize !== undefined && codeSize !== null ? bytesToInt(toBytes(codeSize)) : codeSize, - version !== undefined && version !== null ? bytesToInt(toBytes(version)) : version - ) - } - - public static fromRlpSerializedAccount(serialized: Uint8Array) { - const values = RLP.decode(serialized) as Uint8Array[] - - if (!Array.isArray(values)) { - throw new Error('Invalid serialized account input. Must be array') - } - - return this.fromValuesArray(values) - } - - public static fromRlpSerializedPartialAccount(serialized: Uint8Array) { - const values = RLP.decode(serialized) as Uint8Array[][] - - if (!Array.isArray(values)) { - throw new Error('Invalid serialized account input. Must be array') - } - - let nonce = null - if (!Array.isArray(values[0])) { - throw new Error('Invalid partial nonce encoding. Must be array') - } else { - const isNotNullIndicator = bytesToInt(values[0][0]) - if (isNotNullIndicator !== 0 && isNotNullIndicator !== 1) { - throw new Error(`Invalid isNullIndicator=${isNotNullIndicator} for nonce`) - } - if (isNotNullIndicator === 1) { - nonce = bytesToBigInt(values[0][1]) - } - } - - let balance = null - if (!Array.isArray(values[1])) { - throw new Error('Invalid partial balance encoding. Must be array') - } else { - const isNotNullIndicator = bytesToInt(values[1][0]) - if (isNotNullIndicator !== 0 && isNotNullIndicator !== 1) { - throw new Error(`Invalid isNullIndicator=${isNotNullIndicator} for balance`) - } - if (isNotNullIndicator === 1) { - balance = bytesToBigInt(values[1][1]) - } - } - - let storageRoot = null - if (!Array.isArray(values[2])) { - throw new Error('Invalid partial storageRoot encoding. Must be array') - } else { - const isNotNullIndicator = bytesToInt(values[2][0]) - if (isNotNullIndicator !== 0 && isNotNullIndicator !== 1) { - throw new Error(`Invalid isNullIndicator=${isNotNullIndicator} for storageRoot`) - } - if (isNotNullIndicator === 1) { - storageRoot = values[2][1] - } - } - - let codeHash = null - if (!Array.isArray(values[3])) { - throw new Error('Invalid partial codeHash encoding. Must be array') - } else { - const isNotNullIndicator = bytesToInt(values[3][0]) - if (isNotNullIndicator !== 0 && isNotNullIndicator !== 1) { - throw new Error(`Invalid isNullIndicator=${isNotNullIndicator} for codeHash`) - } - if (isNotNullIndicator === 1) { - codeHash = values[3][1] - } - } - - let codeSize = null - if (!Array.isArray(values[4])) { - throw new Error('Invalid partial codeSize encoding. Must be array') - } else { - const isNotNullIndicator = bytesToInt(values[4][0]) - if (isNotNullIndicator !== 0 && isNotNullIndicator !== 1) { - throw new Error(`Invalid isNullIndicator=${isNotNullIndicator} for codeSize`) - } - if (isNotNullIndicator === 1) { - codeSize = bytesToInt(values[4][1]) - } - } - - let version = null - if (!Array.isArray(values[5])) { - throw new Error('Invalid partial version encoding. Must be array') - } else { - const isNotNullIndicator = bytesToInt(values[5][0]) - if (isNotNullIndicator !== 0 && isNotNullIndicator !== 1) { - throw new Error(`Invalid isNullIndicator=${isNotNullIndicator} for version`) - } - if (isNotNullIndicator === 1) { - version = bytesToInt(values[5][1]) - } - } - - return this.fromPartialAccountData({ balance, nonce, storageRoot, codeHash, codeSize, version }) - } - - public static fromValuesArray(values: Uint8Array[]) { - const [nonce, balance, storageRoot, codeHash] = values - - return new Account(bytesToBigInt(nonce), bytesToBigInt(balance), storageRoot, codeHash) - } - /** * This constructor assigns and validates the values. * Use the static factory methods to assist in creating an Account from varying data types. @@ -277,7 +135,7 @@ export class Account { storageRoot: Uint8Array | null = KECCAK256_RLP, codeHash: Uint8Array | null = KECCAK256_NULL, codeSize: number | null = null, - version: number | null = 0 + version: number | null = 0, ) { this._nonce = nonce this._balance = balance @@ -410,6 +268,150 @@ export class Account { } } +// Account constructors + +export function createAccount(accountData: AccountData) { + const { nonce, balance, storageRoot, codeHash } = accountData + if (nonce === null || balance === null || storageRoot === null || codeHash === null) { + throw Error(`Partial fields not supported in fromAccountData`) + } + + return new Account( + nonce !== undefined ? bytesToBigInt(toBytes(nonce)) : undefined, + balance !== undefined ? bytesToBigInt(toBytes(balance)) : undefined, + storageRoot !== undefined ? toBytes(storageRoot) : undefined, + codeHash !== undefined ? toBytes(codeHash) : undefined, + ) +} + +export function createAccountFromBytesArray(values: Uint8Array[]) { + const [nonce, balance, storageRoot, codeHash] = values + + return new Account(bytesToBigInt(nonce), bytesToBigInt(balance), storageRoot, codeHash) +} + +export function createPartialAccount(partialAccountData: PartialAccountData) { + const { nonce, balance, storageRoot, codeHash, codeSize, version } = partialAccountData + + if ( + nonce === null && + balance === null && + storageRoot === null && + codeHash === null && + codeSize === null && + version === null + ) { + throw Error(`All partial fields null`) + } + + return new Account( + nonce !== undefined && nonce !== null ? bytesToBigInt(toBytes(nonce)) : nonce, + balance !== undefined && balance !== null ? bytesToBigInt(toBytes(balance)) : balance, + storageRoot !== undefined && storageRoot !== null ? toBytes(storageRoot) : storageRoot, + codeHash !== undefined && codeHash !== null ? toBytes(codeHash) : codeHash, + codeSize !== undefined && codeSize !== null ? bytesToInt(toBytes(codeSize)) : codeSize, + version !== undefined && version !== null ? bytesToInt(toBytes(version)) : version, + ) +} + +export function createAccountFromRLP(serialized: Uint8Array) { + const values = RLP.decode(serialized) as Uint8Array[] + + if (!Array.isArray(values)) { + throw new Error('Invalid serialized account input. Must be array') + } + + return createAccountFromBytesArray(values) +} + +export function createPartialAccountFromRLP(serialized: Uint8Array) { + const values = RLP.decode(serialized) as Uint8Array[][] + + if (!Array.isArray(values)) { + throw new Error('Invalid serialized account input. Must be array') + } + + let nonce = null + if (!Array.isArray(values[0])) { + throw new Error('Invalid partial nonce encoding. Must be array') + } else { + const isNotNullIndicator = bytesToInt(values[0][0]) + if (isNotNullIndicator !== 0 && isNotNullIndicator !== 1) { + throw new Error(`Invalid isNullIndicator=${isNotNullIndicator} for nonce`) + } + if (isNotNullIndicator === 1) { + nonce = bytesToBigInt(values[0][1]) + } + } + + let balance = null + if (!Array.isArray(values[1])) { + throw new Error('Invalid partial balance encoding. Must be array') + } else { + const isNotNullIndicator = bytesToInt(values[1][0]) + if (isNotNullIndicator !== 0 && isNotNullIndicator !== 1) { + throw new Error(`Invalid isNullIndicator=${isNotNullIndicator} for balance`) + } + if (isNotNullIndicator === 1) { + balance = bytesToBigInt(values[1][1]) + } + } + + let storageRoot = null + if (!Array.isArray(values[2])) { + throw new Error('Invalid partial storageRoot encoding. Must be array') + } else { + const isNotNullIndicator = bytesToInt(values[2][0]) + if (isNotNullIndicator !== 0 && isNotNullIndicator !== 1) { + throw new Error(`Invalid isNullIndicator=${isNotNullIndicator} for storageRoot`) + } + if (isNotNullIndicator === 1) { + storageRoot = values[2][1] + } + } + + let codeHash = null + if (!Array.isArray(values[3])) { + throw new Error('Invalid partial codeHash encoding. Must be array') + } else { + const isNotNullIndicator = bytesToInt(values[3][0]) + if (isNotNullIndicator !== 0 && isNotNullIndicator !== 1) { + throw new Error(`Invalid isNullIndicator=${isNotNullIndicator} for codeHash`) + } + if (isNotNullIndicator === 1) { + codeHash = values[3][1] + } + } + + let codeSize = null + if (!Array.isArray(values[4])) { + throw new Error('Invalid partial codeSize encoding. Must be array') + } else { + const isNotNullIndicator = bytesToInt(values[4][0]) + if (isNotNullIndicator !== 0 && isNotNullIndicator !== 1) { + throw new Error(`Invalid isNullIndicator=${isNotNullIndicator} for codeSize`) + } + if (isNotNullIndicator === 1) { + codeSize = bytesToInt(values[4][1]) + } + } + + let version = null + if (!Array.isArray(values[5])) { + throw new Error('Invalid partial version encoding. Must be array') + } else { + const isNotNullIndicator = bytesToInt(values[5][0]) + if (isNotNullIndicator !== 0 && isNotNullIndicator !== 1) { + throw new Error(`Invalid isNullIndicator=${isNotNullIndicator} for version`) + } + if (isNotNullIndicator === 1) { + version = bytesToInt(values[5][1]) + } + } + + return createPartialAccount({ balance, nonce, storageRoot, codeHash, codeSize, version }) +} + /** * Checks if the address is a valid. Accepts checksummed addresses too. */ @@ -437,7 +439,7 @@ export const isValidAddress = function (hexAddress: string): hexAddress is Prefi */ export const toChecksumAddress = function ( hexAddress: string, - eip1191ChainId?: BigIntLike + eip1191ChainId?: BigIntLike, ): PrefixedHexString { assertIsHexString(hexAddress) const address = stripHexPrefix(hexAddress).toLowerCase() @@ -470,7 +472,7 @@ export const toChecksumAddress = function ( */ export const isValidChecksumAddress = function ( hexAddress: string, - eip1191ChainId?: BigIntLike + eip1191ChainId?: BigIntLike, ): boolean { return isValidAddress(hexAddress) && toChecksumAddress(hexAddress, eip1191ChainId) === hexAddress } @@ -503,7 +505,7 @@ export const generateAddress = function (from: Uint8Array, nonce: Uint8Array): U export const generateAddress2 = function ( from: Uint8Array, salt: Uint8Array, - initCode: Uint8Array + initCode: Uint8Array, ): Uint8Array { assertIsBytes(from) assertIsBytes(salt) diff --git a/packages/util/src/address.ts b/packages/util/src/address.ts index 2cb37d498e..795052e436 100644 --- a/packages/util/src/address.ts +++ b/packages/util/src/address.ts @@ -11,6 +11,7 @@ import { bytesToHex, equalsBytes, hexToBytes, + setLengthLeft, zeros, } from './bytes.js' import { BIGINT_0 } from './constants.js' @@ -30,76 +31,6 @@ export class Address { this.bytes = bytes } - /** - * Returns the zero address. - */ - static zero(): Address { - return new Address(zeros(20)) - } - - /** - * Returns an Address object from a hex-encoded string. - * @param str - Hex-encoded address - */ - static fromString(str: string): Address { - if (!isValidAddress(str)) { - throw new Error(`Invalid address input=${str}`) - } - return new Address(hexToBytes(str)) - } - - /** - * Returns an address for a given public key. - * @param pubKey The two points of an uncompressed key - */ - static fromPublicKey(pubKey: Uint8Array): Address { - if (!(pubKey instanceof Uint8Array)) { - throw new Error('Public key should be Uint8Array') - } - const bytes = pubToAddress(pubKey) - return new Address(bytes) - } - - /** - * Returns an address for a given private key. - * @param privateKey A private key must be 256 bits wide - */ - static fromPrivateKey(privateKey: Uint8Array): Address { - if (!(privateKey instanceof Uint8Array)) { - throw new Error('Private key should be Uint8Array') - } - const bytes = privateToAddress(privateKey) - return new Address(bytes) - } - - /** - * Generates an address for a newly created contract. - * @param from The address which is creating this new address - * @param nonce The nonce of the from account - */ - static generate(from: Address, nonce: bigint): Address { - if (typeof nonce !== 'bigint') { - throw new Error('Expected nonce to be a bigint') - } - return new Address(generateAddress(from.bytes, bigIntToBytes(nonce))) - } - - /** - * Generates an address for a contract created using CREATE2. - * @param from The address which is creating this new address - * @param salt A salt - * @param initCode The init code of the contract being created - */ - static generate2(from: Address, salt: Uint8Array, initCode: Uint8Array): Address { - if (!(salt instanceof Uint8Array)) { - throw new Error('Expected salt to be a Uint8Array') - } - if (!(initCode instanceof Uint8Array)) { - throw new Error('Expected initCode to be a Uint8Array') - } - return new Address(generateAddress2(from.bytes, salt, initCode)) - } - /** * Is address equal to another. */ @@ -111,7 +42,7 @@ export class Address { * Is address zero. */ isZero(): boolean { - return this.equals(Address.zero()) + return this.equals(new Address(zeros(20))) } /** @@ -139,3 +70,89 @@ export class Address { return new Uint8Array(this.bytes) } } + +/** + * Returns the zero address. + */ +export function createZeroAddress(): Address { + return new Address(zeros(20)) +} + +/** + * Returns an Address object from a bigint address (they are stored as bigints on the stack) + * @param value The bigint address + */ +export function createAddressFromBigInt(value: bigint): Address { + const bytes = bigIntToBytes(value) + if (bytes.length > 20) { + throw new Error(`Invalid address, too long: ${bytes.length}`) + } + return new Address(setLengthLeft(bytes, 20)) +} + +/** + * Returns an Address object from a hex-encoded string. + * @param str - Hex-encoded address + */ +export function createAddressFromString(str: string): Address { + if (!isValidAddress(str)) { + throw new Error(`Invalid address input=${str}`) + } + return new Address(hexToBytes(str)) +} + +/** + * Returns an address for a given public key. + * @param pubKey The two points of an uncompressed key + */ +export function createAddressFromPublicKey(pubKey: Uint8Array): Address { + if (!(pubKey instanceof Uint8Array)) { + throw new Error('Public key should be Uint8Array') + } + const bytes = pubToAddress(pubKey) + return new Address(bytes) +} + +/** + * Returns an address for a given private key. + * @param privateKey A private key must be 256 bits wide + */ +export function createAddressFromPrivateKey(privateKey: Uint8Array): Address { + if (!(privateKey instanceof Uint8Array)) { + throw new Error('Private key should be Uint8Array') + } + const bytes = privateToAddress(privateKey) + return new Address(bytes) +} + +/** + * Generates an address for a newly created contract. + * @param from The address which is creating this new address + * @param nonce The nonce of the from account + */ +export function createContractAddress(from: Address, nonce: bigint): Address { + if (typeof nonce !== 'bigint') { + throw new Error('Expected nonce to be a bigint') + } + return new Address(generateAddress(from.bytes, bigIntToBytes(nonce))) +} + +/** + * Generates an address for a contract created using CREATE2. + * @param from The address which is creating this new address + * @param salt A salt + * @param initCode The init code of the contract being created + */ +export function createContractAddress2( + from: Address, + salt: Uint8Array, + initCode: Uint8Array, +): Address { + if (!(salt instanceof Uint8Array)) { + throw new Error('Expected salt to be a Uint8Array') + } + if (!(initCode instanceof Uint8Array)) { + throw new Error('Expected initCode to be a Uint8Array') + } + return new Address(generateAddress2(from.bytes, salt, initCode)) +} diff --git a/packages/util/src/asyncEventEmitter.ts b/packages/util/src/asyncEventEmitter.ts index 7085b0a672..e5eefc1ffe 100644 --- a/packages/util/src/asyncEventEmitter.ts +++ b/packages/util/src/asyncEventEmitter.ts @@ -18,7 +18,7 @@ export interface EventMap { async function runInSeries( context: any, tasks: Array<(data: unknown, callback?: (error?: Error) => void) => void>, - data: unknown + data: unknown, ): Promise { let error: Error | undefined for await (const task of tasks) { @@ -132,7 +132,7 @@ export class AsyncEventEmitter extends EventEmitter { event: E & string, target: T[E], listener: T[E], - beforeOrAfter?: string + beforeOrAfter?: string, ) { let listeners = (this as any)._events[event] ?? [] let i diff --git a/packages/util/src/blobs.ts b/packages/util/src/blobs.ts index 1aec3aeb5c..71c98f895e 100644 --- a/packages/util/src/blobs.ts +++ b/packages/util/src/blobs.ts @@ -15,10 +15,10 @@ const MAX_USEFUL_BYTES_PER_TX = USEFUL_BYTES_PER_BLOB * MAX_BLOBS_PER_TX - 1 const BLOB_SIZE = BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_BLOB function get_padded(data: Uint8Array, blobs_len: number): Uint8Array { - const pdata = new Uint8Array(blobs_len * USEFUL_BYTES_PER_BLOB).fill(0) - pdata.set(data) - pdata[data.byteLength] = 0x80 - return pdata + const pData = new Uint8Array(blobs_len * USEFUL_BYTES_PER_BLOB).fill(0) + pData.set(data) + pData[data.byteLength] = 0x80 + return pData } function get_blob(data: Uint8Array): Uint8Array { @@ -44,11 +44,11 @@ export const getBlobs = (input: string) => { const blobs_len = Math.ceil(len / USEFUL_BYTES_PER_BLOB) - const pdata = get_padded(data, blobs_len) + const pData = get_padded(data, blobs_len) const blobs: Uint8Array[] = [] for (let i = 0; i < blobs_len; i++) { - const chunk = pdata.subarray(i * USEFUL_BYTES_PER_BLOB, (i + 1) * USEFUL_BYTES_PER_BLOB) + const chunk = pData.subarray(i * USEFUL_BYTES_PER_BLOB, (i + 1) * USEFUL_BYTES_PER_BLOB) const blob = get_blob(chunk) blobs.push(blob) } diff --git a/packages/util/src/bytes.ts b/packages/util/src/bytes.ts index 907bf2ad3d..7bc047c55f 100644 --- a/packages/util/src/bytes.ts +++ b/packages/util/src/bytes.ts @@ -5,7 +5,7 @@ import { bytesToHex as _bytesToUnprefixedHex } from 'ethereum-cryptography/utils import { assertIsArray, assertIsBytes, assertIsHexString } from './helpers.js' import { isHexString, padToEven, stripHexPrefix } from './internal.js' -import type { PrefixedHexString, TransformabletoBytes } from './types.js' +import type { PrefixedHexString, TransformableToBytes } from './types.js' const BIGINT_0 = BigInt(0) @@ -107,14 +107,13 @@ export const bytesToInt = (bytes: Uint8Array): number => { return res } -// TODO: Restrict the input type to only PrefixedHexString /** * Converts a {@link PrefixedHexString} to a {@link Uint8Array} - * @param {PrefixedHexString | string} hex The 0x-prefixed hex string to convert + * @param {PrefixedHexString} hex The 0x-prefixed hex string to convert * @returns {Uint8Array} The converted bytes * @throws If the input is not a valid 0x-prefixed hex string */ -export const hexToBytes = (hex: PrefixedHexString | string): Uint8Array => { +export const hexToBytes = (hex: PrefixedHexString): Uint8Array => { if (typeof hex !== 'string') { throw new Error(`hex argument type ${typeof hex} must be of type string`) } @@ -126,7 +125,7 @@ export const hexToBytes = (hex: PrefixedHexString | string): Uint8Array => { const unprefixedHex = hex.slice(2) return _unprefixedHexToBytes( - unprefixedHex.length % 2 === 0 ? unprefixedHex : padToEven(unprefixedHex) + unprefixedHex.length % 2 === 0 ? unprefixedHex : padToEven(unprefixedHex), ) } @@ -227,7 +226,7 @@ export const setLengthRight = (msg: Uint8Array, length: number): Uint8Array => { * @return {Uint8Array|number[]|string} */ const stripZeros = ( - a: T + a: T, ): T => { let first = a[0] while (a.length > 0 && first.toString() === '0') { @@ -257,26 +256,23 @@ export const unpadArray = (a: number[]): number[] => { return stripZeros(a) } -// TODO: Restrict the input type to only PrefixedHexString /** * Trims leading zeros from a `PrefixedHexString`. - * @param {PrefixedHexString | string} a + * @param {PrefixedHexString} a * @return {PrefixedHexString} */ -export const unpadHex = (a: PrefixedHexString | string): PrefixedHexString => { +export const unpadHex = (a: PrefixedHexString): PrefixedHexString => { assertIsHexString(a) return `0x${stripZeros(stripHexPrefix(a))}` } -// TODO: remove the string type from this function (only keep PrefixedHexString) export type ToBytesInputTypes = | PrefixedHexString - | string | number | bigint | Uint8Array | number[] - | TransformabletoBytes + | TransformableToBytes | null | undefined @@ -300,7 +296,7 @@ export const toBytes = (v: ToBytesInputTypes): Uint8Array => { if (typeof v === 'string') { if (!isHexString(v)) { throw new Error( - `Cannot convert string to Uint8Array. toBytes only supports 0x-prefixed hex strings and this string was given: ${v}` + `Cannot convert string to Uint8Array. toBytes only supports 0x-prefixed hex strings and this string was given: ${v}`, ) } return hexToBytes(v) @@ -477,7 +473,7 @@ export const randomBytes = (length: number): Uint8Array => { /** * This mirrors the functionality of the `ethereum-cryptography` export except - * it skips the check to validate that every element of `arrays` is indead a `uint8Array` + * it skips the check to validate that every element of `arrays` is indeed a `uint8Array` * Can give small performance gains on large arrays * @param {Uint8Array[]} arrays an array of Uint8Arrays * @returns {Uint8Array} one Uint8Array with all the elements of the original set @@ -552,7 +548,29 @@ export function bigInt64ToBytes(value: bigint, littleEndian: boolean = false): U // eslint-disable-next-line no-restricted-imports export { bytesToUtf8, equalsBytes, utf8ToBytes } from 'ethereum-cryptography/utils.js' -// TODO: Restrict the input type to only PrefixedHexString -export function hexToBigInt(input: PrefixedHexString | string): bigint { +export function hexToBigInt(input: PrefixedHexString): bigint { return bytesToBigInt(hexToBytes(isHexString(input) ? input : `0x${input}`)) } + +/** + * Compares two byte arrays and returns the count of consecutively matching items from the start. + * + * @function + * @param {Uint8Array} bytes1 - The first Uint8Array to compare. + * @param {Uint8Array} bytes2 - The second Uint8Array to compare. + * @returns {number} The count of consecutively matching items from the start. + */ +export function matchingBytesLength(bytes1: Uint8Array, bytes2: Uint8Array): number { + let count = 0 + const minLength = Math.min(bytes1.length, bytes2.length) + + for (let i = 0; i < minLength; i++) { + if (bytes1[i] === bytes2[i]) { + count++ + } else { + // Break early if a mismatch is found + break + } + } + return count +} diff --git a/packages/util/src/constants.ts b/packages/util/src/constants.ts index d1c8110465..b1587c9bc7 100644 --- a/packages/util/src/constants.ts +++ b/packages/util/src/constants.ts @@ -11,7 +11,7 @@ export const MAX_UINT64 = BigInt('0xffffffffffffffff') * The max integer that the evm can handle (2^256-1) */ export const MAX_INTEGER = BigInt( - '0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff' + '0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', ) /** @@ -20,7 +20,7 @@ export const MAX_INTEGER = BigInt( * We use literal value instead of calculated value for compatibility issue. */ export const MAX_INTEGER_BIGINT = BigInt( - '115792089237316195423570985008687907853269984665640564039457584007913129639935' + '115792089237316195423570985008687907853269984665640564039457584007913129639935', ) export const SECP256K1_ORDER = secp256k1.CURVE.n @@ -30,7 +30,7 @@ export const SECP256K1_ORDER_DIV_2 = secp256k1.CURVE.n / BigInt(2) * 2^256 */ export const TWO_POW256 = BigInt( - '0x10000000000000000000000000000000000000000000000000000000000000000' + '0x10000000000000000000000000000000000000000000000000000000000000000', ) /** diff --git a/packages/util/src/db.ts b/packages/util/src/db.ts index 9bdfabcd90..cc0e8a2a21 100644 --- a/packages/util/src/db.ts +++ b/packages/util/src/db.ts @@ -3,7 +3,7 @@ export type DBObject = { } export type BatchDBOp< TKey extends Uint8Array | string | number = Uint8Array, - TValue extends Uint8Array | string | DBObject = Uint8Array + TValue extends Uint8Array | string | DBObject = Uint8Array, > = PutBatch | DelBatch export enum KeyEncoding { @@ -24,7 +24,7 @@ export type EncodingOpts = { } export interface PutBatch< TKey extends Uint8Array | string | number = Uint8Array, - TValue extends Uint8Array | string | DBObject = Uint8Array + TValue extends Uint8Array | string | DBObject = Uint8Array, > { type: 'put' key: TKey @@ -40,7 +40,7 @@ export interface DelBatch { /** * Retrieves a raw value from db. diff --git a/packages/util/src/genesis.ts b/packages/util/src/genesis.ts index 8ad5426d6f..4cfebc2f70 100644 --- a/packages/util/src/genesis.ts +++ b/packages/util/src/genesis.ts @@ -9,7 +9,7 @@ export type AccountState = [ balance: PrefixedHexString, code: PrefixedHexString, storage: Array, - nonce: PrefixedHexString + nonce: PrefixedHexString, ] /** diff --git a/packages/util/src/index.ts b/packages/util/src/index.ts index bc3e1462cf..f1e333bd48 100644 --- a/packages/util/src/index.ts +++ b/packages/util/src/index.ts @@ -65,4 +65,5 @@ export * from './lock.js' export * from './mapDB.js' export * from './provider.js' export * from './requests.js' +export * from './tasks.js' export * from './verkle.js' diff --git a/packages/util/src/internal.ts b/packages/util/src/internal.ts index 6333fe40a5..4af836eaa4 100644 --- a/packages/util/src/internal.ts +++ b/packages/util/src/internal.ts @@ -93,16 +93,16 @@ export function getBinarySize(str: string) { export function arrayContainsArray( superset: unknown[], subset: unknown[], - some?: boolean + some?: boolean, ): boolean { if (Array.isArray(superset) !== true) { throw new Error( - `[arrayContainsArray] method requires input 'superset' to be an array, got type '${typeof superset}'` + `[arrayContainsArray] method requires input 'superset' to be an array, got type '${typeof superset}'`, ) } if (Array.isArray(subset) !== true) { throw new Error( - `[arrayContainsArray] method requires input 'subset' to be an array, got type '${typeof subset}'` + `[arrayContainsArray] method requires input 'subset' to be an array, got type '${typeof subset}'`, ) } @@ -179,7 +179,7 @@ export function getKeys(params: Record[], key: string, allowEmpt } if (typeof key !== 'string') { throw new Error( - `[getKeys] method expects input 'key' to be type 'string', got ${typeof params}` + `[getKeys] method expects input 'key' to be type 'string', got ${typeof params}`, ) } diff --git a/packages/util/src/kzg.ts b/packages/util/src/kzg.ts index 4930b161ca..a2d91301d7 100644 --- a/packages/util/src/kzg.ts +++ b/packages/util/src/kzg.ts @@ -14,18 +14,18 @@ export interface Kzg { polynomialKzg: Uint8Array, z: Uint8Array, y: Uint8Array, - kzgProof: Uint8Array + kzgProof: Uint8Array, ): boolean verifyBlobKzgProofBatch( blobs: Uint8Array[], expectedKzgCommitments: Uint8Array[], - kzgProofs: Uint8Array[] + kzgProofs: Uint8Array[], ): boolean } /** * @deprecated This initialization method is deprecated since trusted setup loading is done directly in the reference KZG library - * initialization or should othewise be assured independently before KZG libary usage. + * initialization or should otherwise be assured independently before KZG library usage. * * @param kzgLib a KZG implementation (defaults to c-kzg) * @param a dictionary of trusted setup options diff --git a/packages/util/src/mapDB.ts b/packages/util/src/mapDB.ts index 4df7954fdb..88957cb19a 100644 --- a/packages/util/src/mapDB.ts +++ b/packages/util/src/mapDB.ts @@ -4,7 +4,7 @@ import type { BatchDBOp, DB, DBObject } from './db.js' export class MapDB< TKey extends Uint8Array | string | number, - TValue extends Uint8Array | string | DBObject + TValue extends Uint8Array | string | DBObject, > implements DB { _database: Map diff --git a/packages/util/src/provider.ts b/packages/util/src/provider.ts index bc8dfa009b..e61e6bfad3 100644 --- a/packages/util/src/provider.ts +++ b/packages/util/src/provider.ts @@ -1,6 +1,6 @@ type rpcParams = { method: string - params: (string | boolean | number)[] + params: (string | string[] | boolean | number)[] } /** @@ -47,8 +47,8 @@ export const fetchFromProvider = async (url: string, params: rpcParams) => { }), }, null, - 2 - )}` + 2, + )}`, ) } const json = await res.json() diff --git a/packages/util/src/requests.ts b/packages/util/src/requests.ts index 576ae52856..3490e06977 100644 --- a/packages/util/src/requests.ts +++ b/packages/util/src/requests.ts @@ -96,7 +96,7 @@ export class DepositRequest extends CLRequest { public readonly withdrawalCredentials: Uint8Array, public readonly amount: bigint, public readonly signature: Uint8Array, - public readonly index: bigint + public readonly index: bigint, ) { super(CLRequestType.Deposit) } @@ -124,7 +124,13 @@ export class DepositRequest extends CLRequest { return concatBytes( Uint8Array.from([this.type]), - RLP.encode([this.pubkey, this.withdrawalCredentials, amountBytes, this.signature, indexBytes]) + RLP.encode([ + this.pubkey, + this.withdrawalCredentials, + amountBytes, + this.signature, + indexBytes, + ]), ) } @@ -140,7 +146,7 @@ export class DepositRequest extends CLRequest { public static deserialize(bytes: Uint8Array): DepositRequest { const [pubkey, withdrawalCredentials, amount, signature, index] = RLP.decode( - bytes.slice(1) + bytes.slice(1), ) as [Uint8Array, Uint8Array, Uint8Array, Uint8Array, Uint8Array] return this.fromRequestData({ pubkey, @@ -156,7 +162,7 @@ export class WithdrawalRequest extends CLRequest { constructor( public readonly sourceAddress: Uint8Array, public readonly validatorPubkey: Uint8Array, - public readonly amount: bigint + public readonly amount: bigint, ) { super(CLRequestType.Withdrawal) } @@ -180,7 +186,7 @@ export class WithdrawalRequest extends CLRequest { return concatBytes( Uint8Array.from([this.type]), - RLP.encode([this.sourceAddress, this.validatorPubkey, amountBytes]) + RLP.encode([this.sourceAddress, this.validatorPubkey, amountBytes]), ) } @@ -196,7 +202,7 @@ export class WithdrawalRequest extends CLRequest { const [sourceAddress, validatorPubkey, amount] = RLP.decode(bytes.slice(1)) as [ Uint8Array, Uint8Array, - Uint8Array + Uint8Array, ] return this.fromRequestData({ sourceAddress, @@ -210,7 +216,7 @@ export class ConsolidationRequest extends CLRequest constructor( public readonly sourceAddress: Uint8Array, public readonly sourcePubkey: Uint8Array, - public readonly targetPubkey: Uint8Array + public readonly targetPubkey: Uint8Array, ) { super(CLRequestType.Consolidation) } @@ -232,7 +238,7 @@ export class ConsolidationRequest extends CLRequest serialize() { return concatBytes( Uint8Array.from([this.type]), - RLP.encode([this.sourceAddress, this.sourcePubkey, this.targetPubkey]) + RLP.encode([this.sourceAddress, this.sourcePubkey, this.targetPubkey]), ) } @@ -248,7 +254,7 @@ export class ConsolidationRequest extends CLRequest const [sourceAddress, sourcePubkey, targetPubkey] = RLP.decode(bytes.slice(1)) as [ Uint8Array, Uint8Array, - Uint8Array + Uint8Array, ] return this.fromRequestData({ sourceAddress, diff --git a/packages/util/src/signature.ts b/packages/util/src/signature.ts index 9dd8466836..03d43da741 100644 --- a/packages/util/src/signature.ts +++ b/packages/util/src/signature.ts @@ -37,7 +37,7 @@ export interface ECDSASignature { export function ecsign( msgHash: Uint8Array, privateKey: Uint8Array, - chainId?: bigint + chainId?: bigint, ): ECDSASignature { const sig = secp256k1.sign(msgHash, privateKey) const buf = sig.toCompactRawBytes() @@ -75,7 +75,7 @@ export const ecrecover = function ( v: bigint, r: Uint8Array, s: Uint8Array, - chainId?: bigint + chainId?: bigint, ): Uint8Array { const signature = concatBytes(setLengthLeft(r, 32), setLengthLeft(s, 32)) const recovery = calculateSigRecovery(v, chainId) @@ -97,7 +97,7 @@ export const toRpcSig = function ( v: bigint, r: Uint8Array, s: Uint8Array, - chainId?: bigint + chainId?: bigint, ): string { const recovery = calculateSigRecovery(v, chainId) if (!isValidSigRecovery(recovery)) { @@ -118,7 +118,7 @@ export const toCompactSig = function ( v: bigint, r: Uint8Array, s: Uint8Array, - chainId?: bigint + chainId?: bigint, ): string { const recovery = calculateSigRecovery(v, chainId) if (!isValidSigRecovery(recovery)) { @@ -183,7 +183,7 @@ export const isValidSignature = function ( r: Uint8Array, s: Uint8Array, homesteadOrLater: boolean = true, - chainId?: bigint + chainId?: bigint, ): boolean { if (r.length !== 32 || s.length !== 32) { return false diff --git a/packages/trie/src/util/tasks.ts b/packages/util/src/tasks.ts similarity index 100% rename from packages/trie/src/util/tasks.ts rename to packages/util/src/tasks.ts diff --git a/packages/util/src/types.ts b/packages/util/src/types.ts index f81a7ed70e..f9bc736eb7 100644 --- a/packages/util/src/types.ts +++ b/packages/util/src/types.ts @@ -17,7 +17,7 @@ export type BytesLike = | number[] | number | bigint - | TransformabletoBytes + | TransformableToBytes | PrefixedHexString /* @@ -30,7 +30,7 @@ export type PrefixedHexString = `0x${string}` */ export type AddressLike = Address | Uint8Array | PrefixedHexString -export interface TransformabletoBytes { +export interface TransformableToBytes { toBytes?(): Uint8Array } @@ -79,11 +79,11 @@ export function toType(input: null, outputType: T): null export function toType(input: undefined, outputType: T): undefined export function toType( input: ToBytesInputTypes, - outputType: T + outputType: T, ): TypeOutputReturnType[T] export function toType( input: ToBytesInputTypes, - outputType: T + outputType: T, ): TypeOutputReturnType[T] | undefined | null { if (input === null) { return null @@ -96,7 +96,7 @@ export function toType( throw new Error(`A string must be provided with a 0x-prefix, given: ${input}`) } else if (typeof input === 'number' && !Number.isSafeInteger(input)) { throw new Error( - 'The provided number is greater than MAX_SAFE_INTEGER (please use an alternative input type)' + 'The provided number is greater than MAX_SAFE_INTEGER (please use an alternative input type)', ) } @@ -111,7 +111,7 @@ export function toType( const bigInt = bytesToBigInt(output) if (bigInt > BigInt(Number.MAX_SAFE_INTEGER)) { throw new Error( - 'The provided number is greater than MAX_SAFE_INTEGER (please use an alternative output type)' + 'The provided number is greater than MAX_SAFE_INTEGER (please use an alternative output type)', ) } return Number(bigInt) as TypeOutputReturnType[T] diff --git a/packages/util/src/units.ts b/packages/util/src/units.ts index dfbca2d2de..5465baf5b1 100644 --- a/packages/util/src/units.ts +++ b/packages/util/src/units.ts @@ -5,7 +5,7 @@ export const GWEI_TO_WEI = BigInt(1000000000) export function formatBigDecimal( numerator: bigint, denominator: bigint, - maxDecimalFactor: bigint + maxDecimalFactor: bigint, ): string { if (denominator === BIGINT_0) { denominator = BIGINT_1 diff --git a/packages/util/src/verkle.ts b/packages/util/src/verkle.ts index 9f1ce2b569..9c0995848a 100644 --- a/packages/util/src/verkle.ts +++ b/packages/util/src/verkle.ts @@ -27,7 +27,7 @@ export interface VerkleCrypto { commitment: Uint8Array, commitmentIndex: number, oldScalarValue: Uint8Array, - newScalarValue: Uint8Array + newScalarValue: Uint8Array, ) => Uint8Array // Commitment zeroCommitment: Uint8Array verifyExecutionWitnessPreState: (prestateRoot: string, execution_witness_json: string) => boolean @@ -38,7 +38,7 @@ export interface VerkleCrypto { /** * @dev Returns the 31-bytes verkle tree stem for a given address and tree index. * @dev Assumes that the verkle node width = 256 - * @param ffi The verkle ffi object from verkle-crypotography-wasm. + * @param ffi The verkle ffi object from verkle-cryptography-wasm. * @param address The address to generate the tree key for. * @param treeIndex The index of the tree to generate the key for. Defaults to 0. * @return The 31-bytes verkle tree stem as a Uint8Array. @@ -46,7 +46,7 @@ export interface VerkleCrypto { export function getVerkleStem( ffi: VerkleCrypto, address: Address, - treeIndex: number | bigint = 0 + treeIndex: number | bigint = 0, ): Uint8Array { const address32 = setLengthLeft(address.toBytes(), 32) @@ -64,13 +64,13 @@ export function getVerkleStem( /** * Verifies that the executionWitness is valid for the given prestateRoot. - * @param ffi The verkle ffi object from verkle-crypotography-wasm. + * @param ffi The verkle ffi object from verkle-cryptography-wasm. * @param executionWitness The verkle execution witness. * @returns {boolean} Whether or not the executionWitness belongs to the prestateRoot. */ export function verifyVerkleProof( ffi: VerkleCrypto, - executionWitness: VerkleExecutionWitness + executionWitness: VerkleExecutionWitness, ): boolean { const { parentStateRoot, ...parsedExecutionWitness } = executionWitness return ffi.verifyExecutionWitnessPreState(parentStateRoot, JSON.stringify(parsedExecutionWitness)) @@ -197,7 +197,7 @@ export function getVerkleTreeIndicesForCodeChunk(chunkId: number) { export const getVerkleTreeKeyForCodeChunk = async ( address: Address, chunkId: number, - verkleCrypto: VerkleCrypto + verkleCrypto: VerkleCrypto, ) => { const { treeIndex, subIndex } = getVerkleTreeIndicesForCodeChunk(chunkId) return concatBytes(getVerkleStem(verkleCrypto, address, treeIndex), toBytes(subIndex)) @@ -216,7 +216,7 @@ export const chunkifyCode = (code: Uint8Array) => { export const getVerkleTreeKeyForStorageSlot = async ( address: Address, storageKey: bigint, - verkleCrypto: VerkleCrypto + verkleCrypto: VerkleCrypto, ) => { const { treeIndex, subIndex } = getVerkleTreeIndexesForStorageSlot(storageKey) @@ -227,15 +227,15 @@ export function decodeVerkleLeafBasicData(encodedBasicData: Uint8Array): VerkleL const versionBytes = encodedBasicData.slice(0, VERKLE_VERSION_BYTES_LENGTH) const nonceBytes = encodedBasicData.slice( VERKLE_NONCE_OFFSET, - VERKLE_NONCE_OFFSET + VERKLE_NONCE_BYTES_LENGTH + VERKLE_NONCE_OFFSET + VERKLE_NONCE_BYTES_LENGTH, ) const codeSizeBytes = encodedBasicData.slice( VERKLE_CODE_SIZE_OFFSET, - VERKLE_CODE_SIZE_OFFSET + VERKLE_CODE_SIZE_BYTES_LENGTH + VERKLE_CODE_SIZE_OFFSET + VERKLE_CODE_SIZE_BYTES_LENGTH, ) const balanceBytes = encodedBasicData.slice( VERKLE_BALANCE_OFFSET, - VERKLE_BALANCE_OFFSET + VERKLE_BALANCE_BYTES_LENGTH + VERKLE_BALANCE_OFFSET + VERKLE_BALANCE_BYTES_LENGTH, ) const version = bytesToInt32(versionBytes, true) @@ -249,19 +249,19 @@ export function decodeVerkleLeafBasicData(encodedBasicData: Uint8Array): VerkleL export function encodeVerkleLeafBasicData(basicData: VerkleLeafBasicData): Uint8Array { const encodedVersion = setLengthRight( int32ToBytes(basicData.version, true), - VERKLE_VERSION_BYTES_LENGTH + VERKLE_VERSION_BYTES_LENGTH, ) const encodedNonce = setLengthRight( bigIntToBytes(basicData.nonce, true), - VERKLE_NONCE_BYTES_LENGTH + VERKLE_NONCE_BYTES_LENGTH, ) const encodedCodeSize = setLengthRight( int32ToBytes(basicData.codeSize, true), - VERKLE_CODE_SIZE_BYTES_LENGTH + VERKLE_CODE_SIZE_BYTES_LENGTH, ) const encodedBalance = setLengthRight( bigIntToBytes(basicData.balance, true), - VERKLE_BALANCE_BYTES_LENGTH + VERKLE_BALANCE_BYTES_LENGTH, ) return concatBytes(encodedVersion, encodedNonce, encodedCodeSize, encodedBalance) } diff --git a/packages/util/src/withdrawal.ts b/packages/util/src/withdrawal.ts index 1724aef448..daa2e53f10 100644 --- a/packages/util/src/withdrawal.ts +++ b/packages/util/src/withdrawal.ts @@ -43,9 +43,9 @@ export class Withdrawal { public readonly validatorIndex: bigint, public readonly address: Address, /** - * withdrawal amount in Gwei to match the CL repesentation and eventually ssz withdrawalsRoot + * withdrawal amount in Gwei to match the CL representation and eventually ssz withdrawalsRoot */ - public readonly amount: bigint + public readonly amount: bigint, ) {} public static fromWithdrawalData(withdrawalData: WithdrawalData) { diff --git a/packages/util/test/account.spec.ts b/packages/util/test/account.spec.ts index 4d2bca3d4b..ac15005be7 100644 --- a/packages/util/test/account.spec.ts +++ b/packages/util/test/account.spec.ts @@ -10,6 +10,9 @@ import { accountBodyToSlim, bytesToBigInt, bytesToHex, + createAccount, + createAccountFromBytesArray, + createAccountFromRLP, equalsBytes, generateAddress, generateAddress2, @@ -45,12 +48,12 @@ describe('Account', () => { assert.equal( bytesToHex(account.storageRoot), '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', - 'should have storageRoot equal to KECCAK256_RLP' + 'should have storageRoot equal to KECCAK256_RLP', ) assert.equal( bytesToHex(account.codeHash), '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', - 'should have codeHash equal to KECCAK256_NULL' + 'should have codeHash equal to KECCAK256_NULL', ) }) @@ -61,19 +64,19 @@ describe('Account', () => { '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', // storageRoot '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', // codeHash ] - const account = Account.fromValuesArray(raw.map((el) => hexToBytes(el))) + const account = createAccountFromBytesArray(raw.map((el) => hexToBytes(el))) assert.equal(account.nonce, BigInt(2), 'should have correct nonce') assert.equal(account.balance, BigInt(900), 'should have correct balance') assert.equal( bytesToHex(account.storageRoot), '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', - 'should have correct storageRoot' + 'should have correct storageRoot', ) assert.equal( bytesToHex(account.codeHash), '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', - 'should have correct codeHash' + 'should have correct codeHash', ) }) @@ -84,37 +87,37 @@ describe('Account', () => { storageRoot: '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', codeHash: '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', } - const account = Account.fromAccountData(raw) + const account = createAccount(raw) assert.equal(account.nonce, BigInt(2), 'should have correct nonce') assert.equal(account.balance, BigInt(900), 'should have correct balance') assert.equal( bytesToHex(account.storageRoot), '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', - 'should have correct storageRoot' + 'should have correct storageRoot', ) assert.equal( bytesToHex(account.codeHash), '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', - 'should have correct codeHash' + 'should have correct codeHash', ) }) it('from RLP data', () => { const accountRlp = hexToBytes( - '0xf84602820384a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' + '0xf84602820384a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', ) - const account = Account.fromRlpSerializedAccount(accountRlp) + const account = createAccountFromRLP(accountRlp) assert.equal(account.nonce, BigInt(2), 'should have correct nonce') assert.equal(account.balance, BigInt(900), 'should have correct balance') assert.equal( bytesToHex(account.storageRoot), '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', - 'should have correct storageRoot' + 'should have correct storageRoot', ) assert.equal( bytesToHex(account.codeHash), '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', - 'should have correct codeHash' + 'should have correct codeHash', ) }) @@ -125,7 +128,7 @@ describe('Account', () => { storageRoot: '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', codeHash: '0xc5d2461236f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', } - const account = Account.fromAccountData(raw) + const account = createAccount(raw) const accountRlp = RLP.encode([raw.nonce, raw.balance, raw.storageRoot, raw.codeHash] as Input) assert.ok(equalsBytes(account.serialize(), accountRlp), 'should serialize correctly') @@ -133,9 +136,9 @@ describe('Account', () => { it('isContract', () => { const accountRlp = hexToBytes( - '0xf84602820384a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' + '0xf84602820384a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', ) - let account = Account.fromRlpSerializedAccount(accountRlp) + let account = createAccountFromRLP(accountRlp) assert.notOk(account.isContract(), 'should return false for a non-contract account') const raw: AccountData = { @@ -144,7 +147,7 @@ describe('Account', () => { storageRoot: '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', codeHash: '0xc5d2461236f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', } - account = Account.fromAccountData(raw) + account = createAccount(raw) assert.ok(account.isContract(), 'should return true for a contract account') }) @@ -158,7 +161,7 @@ describe('Account', () => { storageRoot: '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', codeHash: '0xd748bf26ab37599c944babfdbeecf6690801bd61bf2670efb0a34adfc6dca10b', } - account = Account.fromAccountData(raw) + account = createAccount(raw) assert.notOk(account.isEmpty(), 'should return false for a non-empty account') }) @@ -169,7 +172,7 @@ describe('Account', () => { }, undefined, undefined, - 'should only accept length 32 buffer for storageRoot' + 'should only accept length 32 buffer for storageRoot', ) assert.throws( @@ -178,17 +181,17 @@ describe('Account', () => { }, undefined, undefined, - 'should only accept length 32 buffer for codeHash' + 'should only accept length 32 buffer for codeHash', ) const data = { balance: BigInt(5) } assert.throws( () => { - Account.fromRlpSerializedAccount(data as any) + createAccountFromRLP(data as any) }, undefined, undefined, - 'should only accept an array in fromRlpSerializedAccount' + 'should only accept an array in fromRlpSerializedAccount', ) assert.throws( @@ -197,7 +200,7 @@ describe('Account', () => { }, undefined, undefined, - 'should not accept nonce less than 0' + 'should not accept nonce less than 0', ) assert.throws( @@ -206,7 +209,7 @@ describe('Account', () => { }, undefined, undefined, - 'should not accept balance less than 0' + 'should not accept balance less than 0', ) }) }) @@ -220,95 +223,95 @@ describe('Utility Functions', () => { assert.notOk( isValidPrivate( hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' - ) + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', + ), ), - 'should fail on too big input' + 'should fail on too big input', ) assert.notOk( isValidPrivate(('WRONG_INPUT_TYPE') as Uint8Array), - 'should fail on wrong input type' + 'should fail on wrong input type', ) assert.notOk( isValidPrivate( - hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000000') + hexToBytes('0x0000000000000000000000000000000000000000000000000000000000000000'), ), - 'should fail on invalid curve (zero)' + 'should fail on invalid curve (zero)', ) assert.notOk( isValidPrivate(hexToBytes(`0x${SECP256K1_N.toString(16)}`)), - 'should fail on invalid curve (== N)' + 'should fail on invalid curve (== N)', ) assert.notOk( isValidPrivate(hexToBytes(`0x${(SECP256K1_N + BigInt(1)).toString(16)}`)), - 'should fail on invalid curve (>= N)' + 'should fail on invalid curve (>= N)', ) assert.ok( isValidPrivate(hexToBytes(`0x${(SECP256K1_N - BigInt(1)).toString(16)}`)), - 'should work otherwise (< N)' + 'should work otherwise (< N)', ) }) it('isValidPublic', () => { let pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae744' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae744', ) assert.notOk(isValidPublic(pubKey), 'should fail on too short input') pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d00' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d00', ) assert.notOk(isValidPublic(pubKey), 'should fail on too big input') pubKey = hexToBytes( - '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) assert.notOk(isValidPublic(pubKey), 'should fail on SEC1 key') pubKey = hexToBytes( - '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) assert.ok( isValidPublic(pubKey, true), - "shouldn't fail on SEC1 key wt.testh sant.testize enabled" + "shouldn't fail on SEC1 key wt.testh sant.testize enabled", // cspell:disable-line ) pubKey = hexToBytes( - '0x023a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x023a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) - assert.notOk(isValidPublic(pubKey), 'should fail wt.testh an invalid SEC1 public key') + assert.notOk(isValidPublic(pubKey), 'should fail wt.testh an invalid SEC1 public key') // cspell:disable-line pubKey = hexToBytes('0x03fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f') assert.notOk(isValidPublic(pubKey), 'should fail an invalid 33-byte public key') pubKey = hexToBytes( - '0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f0000000000000000000000000000000000000000000000000000000000000001' + '0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f0000000000000000000000000000000000000000000000000000000000000001', ) assert.notOk(isValidPublic(pubKey), 'should fail an invalid 64-byte public key') pubKey = hexToBytes( - '0x04fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f0000000000000000000000000000000000000000000000000000000000000001' + '0x04fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f0000000000000000000000000000000000000000000000000000000000000001', ) assert.notOk(isValidPublic(pubKey, true), 'should fail an invalid 65-byte public key') pubKey = hexToBytes('0x033a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a') assert.ok( isValidPublic(pubKey, true), - 'should work wt.testh compressed keys wt.testh sant.testize enabled' + 'should work wt.testh compressed keys wt.testh sant.testize enabled', // cspell:disable-line ) pubKey = hexToBytes( - '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) - assert.ok(isValidPublic(pubKey, true), 'should work wt.testh sant.testize enabled') + assert.ok(isValidPublic(pubKey, true), 'should work wt.testh sant.testize enabled') // cspell:disable-line pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) assert.ok(isValidPublic(pubKey), 'should work otherwise') @@ -319,7 +322,7 @@ describe('Utility Functions', () => { } catch (err: any) { assert.ok( err.message.includes('This method only supports Uint8Array'), - 'should throw if input is not Uint8Array' + 'should throw if input is not Uint8Array', ) } }) @@ -332,34 +335,34 @@ describe('Utility Functions', () => { bytesToHex( importPublic( hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' - ) - ) + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', + ), + ), ), pubKey, - 'should work wt.testh an Ethereum public key' + 'should work wt.testh an Ethereum public key', // cspell:disable-line ) assert.equal( bytesToHex( importPublic( hexToBytes( - '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' - ) - ) + '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', + ), + ), ), pubKey, - 'should work wt.testh uncompressed SEC1 keys' + 'should work wt.testh uncompressed SEC1 keys', // cspell:disable-line ) assert.equal( bytesToHex( importPublic( - hexToBytes('0x033a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a') - ) + hexToBytes('0x033a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a'), + ), ), pubKey, - 'should work wt.testh compressed SEC1 keys' + 'should work wt.testh compressed SEC1 keys', // cspell:disable-line ) assert.throws( @@ -368,27 +371,27 @@ describe('Utility Functions', () => { }, undefined, undefined, - 'should throw if input is not Uint8Array' + 'should throw if input is not Uint8Array', ) }) it('publicToAddress', () => { let pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) let address = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a' let r = publicToAddress(pubKey) assert.equal(bytesToHex(r), address, 'should produce an address given a public key') pubKey = hexToBytes( - '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x043a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) address = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a' r = publicToAddress(pubKey, true) assert.equal(bytesToHex(r), address, 'should produce an address given a SEC1 public key') pubKey = hexToBytes( - '0x023a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x023a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) assert.throws( function () { @@ -396,11 +399,11 @@ describe('Utility Functions', () => { }, undefined, undefined, - "shouldn't produce an address given an invalid SEC1 public key" + "shouldn't produce an address given an invalid SEC1 public key", ) pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae744' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae744', ) assert.throws( function () { @@ -408,7 +411,7 @@ describe('Utility Functions', () => { }, undefined, undefined, - "shouldn't produce an address given an invalid public key" + "shouldn't produce an address given an invalid public key", ) pubKey = @@ -419,7 +422,7 @@ describe('Utility Functions', () => { }, undefined, undefined, - 'should throw if input is not a Uint8Array' + 'should throw if input is not a Uint8Array', ) }) @@ -427,7 +430,7 @@ describe('Utility Functions', () => { const pubKey = '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' let privateKey = hexToBytes( - '0xea54bdc52d163f88c93ab0615782cf718a2efb9e51a7989aab1b08067e9c1c5f' + '0xea54bdc52d163f88c93ab0615782cf718a2efb9e51a7989aab1b08067e9c1c5f', ) const r = privateToPublic(privateKey) assert.equal(bytesToHex(r), pubKey, 'should produce a public key given a private key') @@ -439,7 +442,7 @@ describe('Utility Functions', () => { }, undefined, undefined, - "shouldn't produce a public key given an invalid private key" + "shouldn't produce a public key given an invalid private key", ) privateKey = hexToBytes('0xea54bdc52d163f88c93ab0615782cf718a2efb9e51a7989aab1b08067e9c1c') @@ -449,7 +452,7 @@ describe('Utility Functions', () => { }, undefined, undefined, - "shouldn't produce a public key given an invalid private key" + "shouldn't produce a public key given an invalid private key", ) privateKey = '0xea54bdc52d163f88c93ab0615782cf718a2efb9e51a7989aab1b08067e9c1c5f' as any @@ -458,7 +461,7 @@ describe('Utility Functions', () => { } catch (err: any) { assert.ok( err.message.includes('This method only supports Uint8Array'), - 'should throw if private key is not Uint8Array' + 'should throw if private key is not Uint8Array', ) assert.ok(err.message.includes(privateKey), 'should throw if private key is not Uint8Array') } @@ -468,7 +471,7 @@ describe('Utility Functions', () => { const address = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a' // Our private key const privateKey = hexToBytes( - '0xea54bdc52d163f88c93ab0615782cf718a2efb9e51a7989aab1b08067e9c1c5f' + '0xea54bdc52d163f88c93ab0615782cf718a2efb9e51a7989aab1b08067e9c1c5f', ) const r = privateToAddress(privateKey) assert.equal(bytesToHex(r), address, 'should produce an address given a private key') @@ -477,12 +480,12 @@ describe('Utility Functions', () => { it('generateAddress', () => { const addr = generateAddress( utf8ToBytes('990ccf8a0de58091c028d6ff76bb235ee67c1c39'), - toBytes(14) + toBytes(14), ) assert.equal( bytesToHex(addr), '0x936a4295d8d74e310c0c95f0a63e53737b998d12', - 'should produce an address given a public key' + 'should produce an address given a public key', ) }) @@ -491,42 +494,46 @@ describe('Utility Functions', () => { assert.equal( bytesToHex(addr), '0xd658a4b8247c14868f3c512fa5cbb6e458e4a989', - 'should produce an address given a public key' + 'should produce an address given a public key', ) }) + // cspell:disable it('generateAddress wt.testh nonce 0 (special case)', () => { + // cspell:enable const addr = generateAddress(toBytes('0x990ccf8a0de58091c028d6ff76bb235ee67c1c39'), toBytes(0)) assert.equal( bytesToHex(addr), '0xbfa69ba91385206bfdd2d8b9c1a5d6c10097a85b', - 'should produce an address given a public key' + 'should produce an address given a public key', ) }) + // cspell:disable it('generateAddress wt.testh non-buffer inputs', () => { + // cspell:enable assert.throws( function () { generateAddress( ('0x990ccf8a0de58091c028d6ff76bb235ee67c1c39') as Uint8Array, - toBytes(0) + toBytes(0), ) }, undefined, undefined, - 'should throw if address is not Uint8Array' + 'should throw if address is not Uint8Array', ) assert.throws( function () { generateAddress( toBytes('0x990ccf8a0de58091c028d6ff76bb235ee67c1c39'), - (0) as Uint8Array + (0) as Uint8Array, ) }, undefined, undefined, - 'should throw if nonce is not Uint8Array' + 'should throw if nonce is not Uint8Array', ) }) @@ -536,7 +543,7 @@ describe('Utility Functions', () => { const addr = generateAddress2( hexToBytes(address as PrefixedHexString), hexToBytes(salt as PrefixedHexString), - hexToBytes(initCode as PrefixedHexString) + hexToBytes(initCode as PrefixedHexString), ) assert.equal(bytesToHex(addr), result, `${comment}: should generate the addresses provided`) } @@ -550,12 +557,12 @@ describe('Utility Functions', () => { generateAddress2( (address) as Uint8Array, hexToBytes(salt as PrefixedHexString), - hexToBytes(initCode as PrefixedHexString) + hexToBytes(initCode as PrefixedHexString), ) }, undefined, undefined, - 'should throw if address is not Uint8Array' + 'should throw if address is not Uint8Array', ) assert.throws( @@ -563,12 +570,12 @@ describe('Utility Functions', () => { generateAddress2( hexToBytes(address as PrefixedHexString), (salt) as Uint8Array, - hexToBytes(initCode as PrefixedHexString) + hexToBytes(initCode as PrefixedHexString), ) }, undefined, undefined, - 'should throw if salt is not Uint8Array' + 'should throw if salt is not Uint8Array', ) assert.throws( @@ -576,12 +583,12 @@ describe('Utility Functions', () => { generateAddress2( hexToBytes(address as PrefixedHexString), hexToBytes(salt as PrefixedHexString), - (initCode) as Uint8Array + (initCode) as Uint8Array, ) }, undefined, undefined, - 'should throw if initCode is not Uint8Array' + 'should throw if initCode is not Uint8Array', ) }) @@ -599,7 +606,7 @@ describe('Utility Functions', () => { '0xD1220A0cf47c7B9Be7A2E6BA89F429762e7b9aDb', ] - const eip1191ChecksummAddresses = { + const eip1191ChecksumAddresses = { 1: [ '0x88021160c5C792225E4E5452585947470010289d', '0x27b1FdB04752bBc536007a920D24ACB045561c26', @@ -648,23 +655,23 @@ describe('Utility Functions', () => { describe('EIP1191', () => { it('Should encode the example addresses correctly', () => { - for (const [chainId, addresses] of Object.entries(eip1191ChecksummAddresses)) { + for (const [chainId, addresses] of Object.entries(eip1191ChecksumAddresses)) { for (const addr of addresses) { assert.equal(toChecksumAddress(addr.toLowerCase(), Number(chainId)), addr) assert.equal( toChecksumAddress( addr.toLowerCase(), - hexToBytes(`0x${padToEven(chainId)}`) + hexToBytes(`0x${padToEven(chainId)}`), ).toLowerCase(), - addr.toLowerCase() + addr.toLowerCase(), ) assert.equal( toChecksumAddress(addr.toLowerCase(), BigInt(chainId)).toLowerCase(), - addr.toLowerCase() + addr.toLowerCase(), ) assert.equal( toChecksumAddress(addr.toLowerCase(), `0x${padToEven(chainId)}`).toLowerCase(), - addr.toLowerCase() + addr.toLowerCase(), ) } } @@ -685,7 +692,7 @@ describe('Utility Functions', () => { }, undefined, undefined, - 'Should throw when the address is not hex-prefixed' + 'Should throw when the address is not hex-prefixed', ) assert.throws( @@ -694,7 +701,7 @@ describe('Utility Functions', () => { }, undefined, undefined, - 'Should throw when the chainId is not hex-prefixed' + 'Should throw when the chainId is not hex-prefixed', ) }) }) @@ -709,13 +716,13 @@ describe('Utility Functions', () => { describe('EIP1191', () => { it('Should return true for the example addresses', () => { - for (const [chainId, addresses] of Object.entries(eip1191ChecksummAddresses)) { + for (const [chainId, addresses] of Object.entries(eip1191ChecksumAddresses)) { for (const addr of addresses) { assert.ok(isValidChecksumAddress(addr, Number(chainId))) assert.ok(isValidChecksumAddress(addr, intToBytes(parseInt(chainId)))) assert.ok(isValidChecksumAddress(addr, BigInt(chainId))) assert.ok( - isValidChecksumAddress(addr, `0x${padToEven(intToHex(parseInt(chainId)).slice(2))}`) + isValidChecksumAddress(addr, `0x${padToEven(intToHex(parseInt(chainId)).slice(2))}`), ) } } @@ -731,7 +738,7 @@ describe('Utility Functions', () => { }) it('Should return false if the wrong chain id is used', () => { - for (const [chainId, addresses] of Object.entries(eip1191ChecksummAddresses)) { + for (const [chainId, addresses] of Object.entries(eip1191ChecksumAddresses)) { for (const addr of addresses) { assert.notOk(isValidChecksumAddress(addr, Number(chainId) + 1)) } @@ -770,12 +777,12 @@ describe('Utility Functions', () => { assert.equal( JSON.stringify(result[2]), JSON.stringify(KECCAK256_RLP), - 'Empty storageRoot should be changed to hash of RLP of null' + 'Empty storageRoot should be changed to hash of RLP of null', ) assert.equal( JSON.stringify(result[3]), JSON.stringify(KECCAK256_NULL), - 'Empty codeRoot should be changed to hash of RLP of null' + 'Empty codeRoot should be changed to hash of RLP of null', ) }) diff --git a/packages/util/test/address.spec.ts b/packages/util/test/address.spec.ts index c0fc0b0e76..a0c3238db8 100644 --- a/packages/util/test/address.spec.ts +++ b/packages/util/test/address.spec.ts @@ -1,6 +1,18 @@ import { assert, describe, it } from 'vitest' -import { Address, equalsBytes, hexToBytes, toBytes } from '../src/index.js' +import { + Address, + createAddressFromBigInt, + createAddressFromPrivateKey, + createAddressFromPublicKey, + createAddressFromString, + createContractAddress, + createContractAddress2, + createZeroAddress, + equalsBytes, + hexToBytes, + toBytes, +} from '../src/index.js' import eip1014Testdata from './testdata/eip1014Examples.json' @@ -11,82 +23,93 @@ describe('Address', () => { it('should validate address length', () => { const str = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a11' - assert.throws(() => Address.fromString(str)) + assert.throws(() => createAddressFromString(str)) const shortStr = '0x2f015c60e0be116b1f0cd534704db9c92118fb' - assert.throws(() => Address.fromString(shortStr)) + assert.throws(() => createAddressFromString(shortStr)) const buf = toBytes(str) assert.throws(() => new Address(buf)) }) it('should generate a zero address', () => { - const addr = Address.zero() + const addr = createZeroAddress() assert.deepEqual(addr.bytes, toBytes(ZERO_ADDR_S)) assert.equal(addr.toString(), ZERO_ADDR_S) }) it('should instantiate address from zero address string', () => { - const addr = Address.fromString(ZERO_ADDR_S) + const addr = createAddressFromString(ZERO_ADDR_S) assert.deepEqual(addr.toString(), ZERO_ADDR_S) assert.ok(addr.isZero()) }) it('should detect non-zero address', () => { const str = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a' - const addr = Address.fromString(str) + const addr = createAddressFromString(str) assert.notOk(addr.isZero()) }) + it('should create an address from a bigint', () => { + const addr = createAddressFromBigInt(BigInt(0)) + assert.ok(addr.isZero()) + const addr2 = createAddressFromBigInt(BigInt(1)) + assert.notOk(addr2.isZero()) + }) + + it('should throw if bigint is too long', () => { + assert.throws(() => createAddressFromBigInt(BigInt(2) ** BigInt(160))) + }) + it('should instantiate from public key', () => { const pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae7441e1d', ) const str = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a' - const addr = Address.fromPublicKey(pubKey) + const addr = createAddressFromPublicKey(pubKey) assert.equal(addr.toString(), str) }) it('should fail to instantiate from invalid public key', () => { const pubKey = hexToBytes( - '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae744' + '0x3a443d8381a6798a70c6ff9304bdc8cb0163c23211d11628fae52ef9e0dca11a001cf066d56a8156fc201cd5df8a36ef694eecd258903fca7086c1fae744', ) - assert.throws(() => Address.fromPublicKey(pubKey)) + assert.throws(() => createAddressFromPublicKey(pubKey)) }) it('should instantiate from private key', () => { // prettier-ignore const privateKey = Uint8Array.from([234, 84, 189, 197, 45, 22, 63, 136, 201, 58, 176, 97, 87, 130, 207, 113, 138, 46, 251, 158, 81, 167, 152, 154, 171, 27, 8, 6, 126, 156, 28, 95]) const str = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a' - const addr = Address.fromPrivateKey(privateKey) + const addr = createAddressFromPrivateKey(privateKey) assert.equal(addr.toString(), str) }) it('should generate address for created contract', () => { - const from = Address.fromString('0x990ccf8a0de58091c028d6ff76bb235ee67c1c39') - const addr = Address.generate(from, BigInt(14)) + const from = createAddressFromString('0x990ccf8a0de58091c028d6ff76bb235ee67c1c39') + const addr = createContractAddress(from, BigInt(14)) assert.equal(addr.toString(), '0xd658a4b8247c14868f3c512fa5cbb6e458e4a989') - const addr2 = Address.generate(from, BigInt(0)) + const addr2 = createContractAddress(from, BigInt(0)) assert.equal(addr2.toString(), '0xbfa69ba91385206bfdd2d8b9c1a5d6c10097a85b') }) it('should provide correct precompile check', () => { - const precompile = Address.fromString('0x0000000000000000000000000000000000000009') + const precompile = createAddressFromString('0x0000000000000000000000000000000000000009') assert.isTrue(precompile.isPrecompileOrSystemAddress(), 'should detect precompile address') - const nonPrecompile = Address.fromString('0x990ccf8a0de58091c028d6ff76bb235ee67c1c39') + const nonPrecompile = createAddressFromString('0x990ccf8a0de58091c028d6ff76bb235ee67c1c39') assert.isFalse( nonPrecompile.isPrecompileOrSystemAddress(), - 'should detect non-precompile address' + 'should detect non-precompile address', ) }) it('should generate address for CREATE2', () => { for (const testdata of eip1014Testdata) { const { address, salt, initCode, result } = testdata - const from = Address.fromString(address) - const addr = Address.generate2( + const from = createAddressFromString(address) + const addr = createContractAddress2( from, hexToBytes(salt as PrefixedHexString), - hexToBytes(initCode as PrefixedHexString) + hexToBytes(initCode as PrefixedHexString), ) assert.equal(addr.toString(), result) } @@ -94,7 +117,7 @@ describe('Address', () => { it('should provide a Uint8Array that does not mutate the original address', () => { const str = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a' - const address = Address.fromString(str) + const address = createAddressFromString(str) const addressBytes = address.toBytes() addressBytes.fill(0) assert.equal(address.toString(), str) @@ -102,19 +125,19 @@ describe('Address', () => { it('should compare equality properly', () => { const str = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a' - const address1 = Address.fromString(str) + const address1 = createAddressFromString(str) const address2 = new Address(hexToBytes(str)) assert.ok(address1.equals(address2)) assert.ok(equalsBytes(address1.bytes, address2.bytes)) const str2 = '0xcd4EC7b66fbc029C116BA9Ffb3e59351c20B5B06' - const address3 = Address.fromString(str2) + const address3 = createAddressFromString(str2) assert.ok(!address1.equals(address3)) - const address3LowerCase = Address.fromString(str2.toLowerCase()) + const address3LowerCase = createAddressFromString(str2.toLowerCase()) assert.ok(address3.equals(address3LowerCase)) - const address4 = Address.zero() + const address4 = createZeroAddress() assert.ok(!address1.equals(address4)) }) }) diff --git a/packages/util/test/bytes.spec.ts b/packages/util/test/bytes.spec.ts index 3d6954b2b6..7f9ece488f 100644 --- a/packages/util/test/bytes.spec.ts +++ b/packages/util/test/bytes.spec.ts @@ -1,7 +1,6 @@ import { assert, describe, it } from 'vitest' import { - Address, addHexPrefix, bigIntToAddressBytes, bigIntToBytes, @@ -10,6 +9,7 @@ import { bytesToBigInt, bytesToHex, bytesToInt, + createAddressFromString, equalsBytes, fromSigned, hexToBytes, @@ -17,6 +17,7 @@ import { intToHex, intToUnpaddedBytes, isZeroAddress, + matchingBytesLength, setLengthLeft, setLengthRight, short, @@ -242,7 +243,7 @@ describe('toBytes', () => { return Uint8Array.from([1]) }, }), - Uint8Array.from([1]) + Uint8Array.from([1]), ) }) it('should fail', () => { @@ -260,9 +261,9 @@ describe('toBytes', () => { assert.throws(() => toBytes('0xR'), '0xR') }) - it('should convert a TransformabletoBytes like the Address class (i.e. provides a toBytes method)', () => { + it('should convert a TransformableToBytes like the Address class (i.e. provides a toBytes method)', () => { const str = '0x2f015c60e0be116b1f0cd534704db9c92118fb6a' - const address = Address.fromString(str) + const address = createAddressFromString(str) const addressBytes = toBytes(address) assert.deepEqual(addressBytes, address.toBytes()) }) @@ -287,7 +288,7 @@ describe('intToBytes', () => { () => intToBytes(Number.MAX_SAFE_INTEGER + 1), undefined, undefined, - 'throws on unsafe integers' + 'throws on unsafe integers', ) }) @@ -316,7 +317,7 @@ describe('intToHex', () => { () => intToHex(Number.MAX_SAFE_INTEGER + 1), undefined, undefined, - 'throws on unsafe integers' + 'throws on unsafe integers', ) }) it('should pass on correct input', () => { @@ -329,7 +330,7 @@ describe('validateNoLeadingZeroes', () => { const noLeadingZeroes = { a: toBytes('0x123'), } - const noleadingZeroBytes = { + const noLeadingZeroBytes = { a: toBytes('0x01'), } const leadingZeroBytes = { @@ -349,19 +350,19 @@ describe('validateNoLeadingZeroes', () => { it('should pass on correct input', () => { assert.doesNotThrow( () => validateNoLeadingZeroes(noLeadingZeroes), - 'does not throw when no leading zeroes' + 'does not throw when no leading zeroes', ) assert.doesNotThrow( () => validateNoLeadingZeroes(emptyBuffer), - 'does not throw with empty buffer' + 'does not throw with empty buffer', ) assert.doesNotThrow( () => validateNoLeadingZeroes(undefinedValue), - 'does not throw when undefined passed in' + 'does not throw when undefined passed in', ) assert.doesNotThrow( - () => validateNoLeadingZeroes(noleadingZeroBytes), - 'does not throw when value has leading zero bytes' + () => validateNoLeadingZeroes(noLeadingZeroBytes), + 'does not throw when value has leading zero bytes', ) }) @@ -370,13 +371,13 @@ describe('validateNoLeadingZeroes', () => { () => validateNoLeadingZeroes(leadingZeroBytes), undefined, undefined, - 'throws when value has leading zero bytes' + 'throws when value has leading zero bytes', ) assert.throws( () => validateNoLeadingZeroes(onlyZeroes), undefined, undefined, - 'throws when value has only zeroes' + 'throws when value has only zeroes', ) }) }) @@ -423,10 +424,10 @@ describe('bigIntToAddressBytes', () => { for (const [addressHex, addressBigInt, isSafe] of testCases) { it('should correctly convert', () => { - const addressHexFromBigInt = bytesToHex(bigIntToAddressBytes(addressBigInt, false)) + const addressHexFromBigInt = bytesToHex(bigIntToAddressBytes(addressBigInt as bigint, false)) assert.equal(addressHex, addressHexFromBigInt, `should correctly convert ${addressBigInt}`) if (isSafe === false) { - assert.throw(() => bigIntToAddressBytes(addressBigInt)) + assert.throw(() => bigIntToAddressBytes(addressBigInt as bigint)) } }) } @@ -454,7 +455,7 @@ describe('hexToBytes', () => { it('should throw on invalid hex', () => { assert.throws(() => { - hexToBytes('0xinvalidhexstring') + hexToBytes('0xInvalidHexString') }) assert.throws(() => { hexToBytes('0xfz') @@ -482,3 +483,47 @@ describe('unprefixedHexToBytes', () => { assert.deepEqual(converted, new Uint8Array([17])) }) }) + +describe('matchingBytesLength', () => { + it('should return 0 when both arrays are empty', () => { + const bytes1 = new Uint8Array([]) + const bytes2 = new Uint8Array([]) + assert.equal(matchingBytesLength(bytes1, bytes2), 0) + }) + + it('should return 0 when one of the arrays is empty', () => { + const bytes1 = new Uint8Array([1, 2, 3]) + const bytes2 = new Uint8Array([]) + assert.equal(matchingBytesLength(bytes1, bytes2), 0) + }) + + it('should return 0 when arrays have no matching elements', () => { + const bytes1 = new Uint8Array([1, 2, 3]) + const bytes2 = new Uint8Array([4, 5, 6]) + assert.equal(matchingBytesLength(bytes1, bytes2), 0) + }) + + it('should handle arrays with same elements but different lengths', () => { + const bytes1 = new Uint8Array([1, 2, 3]) + const bytes2 = new Uint8Array([1, 2, 3, 4]) + assert.equal(matchingBytesLength(bytes1, bytes2), 3) + }) + + it('should handle arrays with matching elements at end', () => { + const bytes1 = new Uint8Array([1, 2, 3]) + const bytes2 = new Uint8Array([0, 1, 2, 3]) + assert.equal(matchingBytesLength(bytes1, bytes2), 0) + }) + + it('should handle arrays with matching elements at start', () => { + const bytes1 = new Uint8Array([1, 2, 3]) + const bytes2 = new Uint8Array([1, 2, 3, 4, 5]) + assert.equal(matchingBytesLength(bytes1, bytes2), 3) + }) + + it('should handle arrays with large number of elements', () => { + const bytes1 = new Uint8Array(Array.from({ length: 1000000 }, (_, i) => i)) + const bytes2 = new Uint8Array(Array.from({ length: 1000000 }, (_, i) => i)) + assert.equal(matchingBytesLength(bytes1, bytes2), 1000000) + }) +}) diff --git a/packages/util/test/constants.spec.ts b/packages/util/test/constants.spec.ts index dc2c9f758e..8b6e56f969 100644 --- a/packages/util/test/constants.spec.ts +++ b/packages/util/test/constants.spec.ts @@ -16,47 +16,47 @@ describe('constants', () => { it('should match constants', () => { assert.equal( MAX_INTEGER.toString(16), - 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff' + 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', ) assert.equal( TWO_POW256.toString(16), - '10000000000000000000000000000000000000000000000000000000000000000' + '10000000000000000000000000000000000000000000000000000000000000000', ) assert.equal( TWO_POW256.toString(16), - '10000000000000000000000000000000000000000000000000000000000000000' + '10000000000000000000000000000000000000000000000000000000000000000', ) assert.equal( KECCAK256_NULL_S, - '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' + '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', ) assert.equal( bytesToHex(KECCAK256_NULL), - '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' + '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470', ) assert.equal( KECCAK256_RLP_ARRAY_S, - '0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347' + '0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347', ) assert.equal( bytesToHex(KECCAK256_RLP_ARRAY), - '0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347' + '0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347', ) assert.equal( KECCAK256_RLP_S, - '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421' + '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', ) assert.equal( bytesToHex(KECCAK256_RLP), - '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421' + '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', ) }) }) diff --git a/packages/util/test/genesis.spec.ts b/packages/util/test/genesis.spec.ts index 053a998f0e..43f89947a0 100644 --- a/packages/util/test/genesis.spec.ts +++ b/packages/util/test/genesis.spec.ts @@ -12,10 +12,10 @@ describe('[Util/genesis]', () => { assert.equal( genesisState['0x4242424242424242424242424242424242424242'][1].includes( // sample data check - '0x60806040526004361061003' + '0x60806040526004361061003', ), true, - 'should have deposit contract' + 'should have deposit contract', ) }) }) diff --git a/packages/util/test/internal.spec.ts b/packages/util/test/internal.spec.ts index 29f71ef84a..c8c94faf49 100644 --- a/packages/util/test/internal.spec.ts +++ b/packages/util/test/internal.spec.ts @@ -51,9 +51,9 @@ describe('internal', () => { { a: '1', b: '2' }, { a: '3', b: '4' }, ], - 'a' + 'a', ), - ['1', '3'] + ['1', '3'], ) assert.deepEqual( getKeys( @@ -62,9 +62,9 @@ describe('internal', () => { { a: '3', b: '4' }, ], 'a', - true + true, ), - ['', '3'] + ['', '3'], ) }) diff --git a/packages/util/test/provider.spec.ts b/packages/util/test/provider.spec.ts index f8a40b5b7a..5a8eb08f84 100644 --- a/packages/util/test/provider.spec.ts +++ b/packages/util/test/provider.spec.ts @@ -18,13 +18,13 @@ describe('getProvider', () => { assert.equal( getProvider(fakeEthersProvider), fakeEthersProvider._getConnection().url, - 'returned correct provider url string' + 'returned correct provider url string', ) assert.throws( () => getProvider(1), 'Must provide valid provider URL or Web3Provider', undefined, - 'throws correct error' + 'throws correct error', ) }) }) @@ -86,7 +86,7 @@ describe('fetchFromProvider', () => { vi.unstubAllGlobals() }) - it('handles the corner case of res.text() failing because of a network error not recieving the full response', async () => { + it('handles the corner case of res.text() failing because of a network error not receiving the full response', async () => { vi.stubGlobal('fetch', async (_url: string, _req: any) => { return { text: async () => { diff --git a/packages/util/test/requests.spec.ts b/packages/util/test/requests.spec.ts index c2f51e7f47..96b4a49f57 100644 --- a/packages/util/test/requests.spec.ts +++ b/packages/util/test/requests.spec.ts @@ -49,7 +49,7 @@ describe('Requests', () => { for (const [requestName, requestData, requestType, RequestInstanceType] of testCases) { it(`${requestName}`, () => { const requestObject = RequestInstanceType.fromRequestData( - requestData + requestData, ) as CLRequest const requestJSON = requestObject.toJSON() const serialized = requestObject.serialize() diff --git a/packages/util/test/signature.spec.ts b/packages/util/test/signature.spec.ts index b7bc655ccf..4bd493ea24 100644 --- a/packages/util/test/signature.spec.ts +++ b/packages/util/test/signature.spec.ts @@ -15,46 +15,46 @@ import { utf8ToBytes, } from '../src/index.js' -const echash = hexToBytes('0x82ff40c0a986c6a5cfad4ddf4c3aa6996f1a7837f9c398e17e5de5cbd5a12b28') -const ecprivkey = hexToBytes('0x3c9229289a6125f7fdf1885a77bb12c37a8d3b4962d936f7e3084dece32a3ca1') +const ecHash = hexToBytes('0x82ff40c0a986c6a5cfad4ddf4c3aa6996f1a7837f9c398e17e5de5cbd5a12b28') +const ecPrivKey = hexToBytes('0x3c9229289a6125f7fdf1885a77bb12c37a8d3b4962d936f7e3084dece32a3ca1') const chainId = BigInt(3) // ropsten describe('ecsign', () => { it('should produce a signature', () => { - const sig = ecsign(echash, ecprivkey) + const sig = ecsign(ecHash, ecPrivKey) assert.deepEqual( sig.r, - hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') + hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9'), ) assert.deepEqual( sig.s, - hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66') + hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66'), ) assert.equal(sig.v, BigInt(27)) }) it('should produce a signature for Ropsten testnet', () => { - const sig = ecsign(echash, ecprivkey, chainId) + const sig = ecsign(ecHash, ecPrivKey, chainId) assert.deepEqual( sig.r, - hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') + hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9'), ) assert.deepEqual( sig.s, - hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66') + hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66'), ) assert.equal(sig.v, BigInt(41)) }) it('should produce a signature for chainId=150', () => { const expectedSigR = hexToBytes( - '0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9' + '0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9', ) const expectedSigS = hexToBytes( - '0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66' + '0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66', ) - const sig = ecsign(echash, ecprivkey, BigInt(150)) + const sig = ecsign(ecHash, ecPrivKey, BigInt(150)) assert.deepEqual(sig.r, expectedSigR) assert.deepEqual(sig.s, expectedSigS) assert.equal(sig.v, BigInt(150 * 2 + 35)) @@ -63,14 +63,14 @@ describe('ecsign', () => { it('should produce a signature for a high number chainId greater than MAX_SAFE_INTEGER', () => { const chainIDBuffer = hexToBytes('0x796f6c6f763378') const expectedSigR = hexToBytes( - '0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9' + '0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9', ) const expectedSigS = hexToBytes( - '0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66' + '0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66', ) const expectedSigV = BigInt('68361967398315795') - const sigBuffer = ecsign(echash, ecprivkey, bytesToBigInt(chainIDBuffer)) + const sigBuffer = ecsign(ecHash, ecPrivKey, bytesToBigInt(chainIDBuffer)) assert.deepEqual(sigBuffer.r, expectedSigR) assert.deepEqual(sigBuffer.s, expectedSigS) assert.equal(sigBuffer.v, expectedSigV) @@ -82,50 +82,50 @@ describe('ecrecover', () => { const r = hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') const s = hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66') const v = BigInt(27) - const pubkey = ecrecover(echash, v, r, s) - assert.deepEqual(pubkey, privateToPublic(ecprivkey)) + const pubkey = ecrecover(ecHash, v, r, s) + assert.deepEqual(pubkey, privateToPublic(ecPrivKey)) }) it('should recover a public key (chainId = 3)', () => { const r = hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') const s = hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66') const v = BigInt(41) - const pubkey = ecrecover(echash, v, r, s, chainId) - assert.deepEqual(pubkey, privateToPublic(ecprivkey)) + const pubkey = ecrecover(ecHash, v, r, s, chainId) + assert.deepEqual(pubkey, privateToPublic(ecPrivKey)) }) it('should recover a public key (chainId = 150)', () => { const chainId = BigInt(150) const r = hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') const s = hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66') const v = BigInt(chainId * BigInt(2) + BigInt(35)) - const pubkey = ecrecover(echash, v, r, s, chainId) - assert.deepEqual(pubkey, privateToPublic(ecprivkey)) + const pubkey = ecrecover(ecHash, v, r, s, chainId) + assert.deepEqual(pubkey, privateToPublic(ecPrivKey)) }) it('should recover a public key (v = 0)', () => { const r = hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') const s = hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66') const v = BigInt(0) - const pubkey = ecrecover(echash, v, r, s) - assert.deepEqual(pubkey, privateToPublic(ecprivkey)) + const pubkey = ecrecover(ecHash, v, r, s) + assert.deepEqual(pubkey, privateToPublic(ecPrivKey)) }) it('should fail on an invalid signature (v = 21)', () => { const r = hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') const s = hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66') assert.throws(function () { - ecrecover(echash, BigInt(21), r, s) + ecrecover(ecHash, BigInt(21), r, s) }) }) it('should fail on an invalid signature (v = 29)', () => { const r = hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') const s = hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66') assert.throws(function () { - ecrecover(echash, BigInt(29), r, s) + ecrecover(ecHash, BigInt(29), r, s) }) }) it('should fail on an invalid signature (swapped points)', () => { const r = hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') const s = hexToBytes('0x129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca66') assert.throws(function () { - ecrecover(echash, BigInt(27), s, r) + ecrecover(ecHash, BigInt(27), s, r) }) }) it('should return the right sender when using very high chain id / v values', () => { @@ -144,7 +144,7 @@ describe('ecrecover', () => { } */ const senderPubKey = hexToBytes( - '0x78988201fbceed086cfca7b64e382d08d0bd776898731443d2907c097745b7324c54f522087f5964412cddba019f192de0fd57a0ffa63f098c2b200e53594b15' + '0x78988201fbceed086cfca7b64e382d08d0bd776898731443d2907c097745b7324c54f522087f5964412cddba019f192de0fd57a0ffa63f098c2b200e53594b15', ) const msgHash = hexToBytes('0x8ae8cb685a7a9f29494b07b287c3f6a103b73fa178419d10d1184861a40f6afe') @@ -163,7 +163,7 @@ describe('hashPersonalMessage', () => { const h = hashPersonalMessage(utf8ToBytes('Hello world')) assert.deepEqual( h, - hexToBytes('0x8144a6fa26be252b86456491fbcd43c1de7e022241845ffea1c3df066f7cfede') + hexToBytes('0x8144a6fa26be252b86456491fbcd43c1de7e022241845ffea1c3df066f7cfede'), ) }) it('should throw if input is not a Uint8Array', () => { @@ -198,7 +198,7 @@ describe('isValidSignature', () => { }) it('should fail when on homestead and s > secp256k1n/2', () => { const SECP256K1_N_DIV_2 = BigInt( - '0x7fffffffffffffffffffffffffffffff5d576e7357a4501ddfe92f46681b20a0' + '0x7fffffffffffffffffffffffffffffff5d576e7357a4501ddfe92f46681b20a0', ) const r = hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') @@ -209,7 +209,7 @@ describe('isValidSignature', () => { }) it('should not fail when not on homestead but s > secp256k1n/2', () => { const SECP256K1_N_DIV_2 = BigInt( - '0x7fffffffffffffffffffffffffffffff5d576e7357a4501ddfe92f46681b20a0' + '0x7fffffffffffffffffffffffffffffff5d576e7357a4501ddfe92f46681b20a0', ) const r = hexToBytes('0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9') @@ -336,7 +336,7 @@ describe('message sig', () => { }) assert.throws(function () { fromRpcSig( - '0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca' + '0x99e71a99cb2270b8cac5254f9e99b6210c6c10224a1579cf389ef88b20a1abe9129ff05af364204442bdb53ab6f18a99ab48acc9326fa689f228040429e3ca', ) }) }) @@ -344,7 +344,7 @@ describe('message sig', () => { it('pad short r and s values', () => { assert.equal( toRpcSig(BigInt(27), r.slice(20), s.slice(20)), - '0x00000000000000000000000000000000000000004a1579cf389ef88b20a1abe90000000000000000000000000000000000000000326fa689f228040429e3ca661b' + '0x00000000000000000000000000000000000000004a1579cf389ef88b20a1abe90000000000000000000000000000000000000000326fa689f228040429e3ca661b', ) }) diff --git a/packages/trie/test/util/tasks.spec.ts b/packages/util/test/tasks.spec.ts similarity index 91% rename from packages/trie/test/util/tasks.spec.ts rename to packages/util/test/tasks.spec.ts index 0ef1cea504..6a56326ae6 100644 --- a/packages/trie/test/util/tasks.spec.ts +++ b/packages/util/test/tasks.spec.ts @@ -1,6 +1,6 @@ import { assert, describe, it } from 'vitest' -import { PrioritizedTaskExecutor } from '../../src/index.js' +import { PrioritizedTaskExecutor } from '../src/index.js' const taskExecutor = new PrioritizedTaskExecutor(2) diff --git a/packages/util/test/verkle.spec.ts b/packages/util/test/verkle.spec.ts index 99d08a195c..d8aa627ff2 100644 --- a/packages/util/test/verkle.spec.ts +++ b/packages/util/test/verkle.spec.ts @@ -3,12 +3,12 @@ import { assert, beforeAll, describe, it } from 'vitest' import * as verkleBlockJSON from '../../statemanager/test/testdata/verkleKaustinen6Block72.json' import { - Address, type VerkleCrypto, type VerkleExecutionWitness, VerkleLeafType, bytesToHex, concatBytes, + createAddressFromString, getVerkleKey, getVerkleStem, hexToBytes, @@ -27,24 +27,30 @@ describe('Verkle cryptographic helpers', () => { // Empty address assert.equal( bytesToHex( - getVerkleStem(verkle, Address.fromString('0x0000000000000000000000000000000000000000')) + getVerkleStem( + verkle, + createAddressFromString('0x0000000000000000000000000000000000000000'), + ), ), - '0x1a100684fd68185060405f3f160e4bb6e034194336b547bdae323f888d5332' + '0x1a100684fd68185060405f3f160e4bb6e034194336b547bdae323f888d5332', ) // Non-empty address assert.equal( bytesToHex( - getVerkleStem(verkle, Address.fromString('0x71562b71999873DB5b286dF957af199Ec94617f7')) + getVerkleStem( + verkle, + createAddressFromString('0x71562b71999873DB5b286dF957af199Ec94617f7'), + ), ), - '0x1540dfad7755b40be0768c6aa0a5096fbf0215e0e8cf354dd928a178346466' + '0x1540dfad7755b40be0768c6aa0a5096fbf0215e0e8cf354dd928a178346466', ) }) it('verifyVerkleProof(): should verify verkle proofs', () => { // Src: Kaustinen6 testnet, block 71 state root (parent of block 72) const prestateRoot = hexToBytes( - '0x64e1a647f42e5c2e3c434531ccf529e1b3e93363a40db9fc8eec81f492123510' + '0x64e1a647f42e5c2e3c434531ccf529e1b3e93363a40db9fc8eec81f492123510', ) const executionWitness = { ...verkleBlockJSON.default.executionWitness, diff --git a/packages/util/test/withdrawal.spec.ts b/packages/util/test/withdrawal.spec.ts index 502d654e1f..2aa7754164 100644 --- a/packages/util/test/withdrawal.spec.ts +++ b/packages/util/test/withdrawal.spec.ts @@ -70,7 +70,7 @@ describe('Withdrawal', () => { const gethWithdrawalsRlp = bytesToHex(encode(gethWithdrawalsBuffer)) it('fromWithdrawalData and toBytesArray', () => { const withdrawals = withdrawalsGethVector.map((withdrawal) => - Withdrawal.fromWithdrawalData(withdrawal as WithdrawalData) + Withdrawal.fromWithdrawalData(withdrawal as WithdrawalData), ) const withdrawalstoBytesArr = withdrawals.map((wt) => wt.raw()) const withdrawalsToRlp = bytesToHex(encode(withdrawalstoBytesArr)) @@ -78,10 +78,10 @@ describe('Withdrawal', () => { }) it('toBytesArray from withdrawalData', () => { - const withdrawalsDatatoBytesArr = withdrawalsGethVector.map((withdrawal) => - Withdrawal.toBytesArray(withdrawal as WithdrawalData) + const withdrawalsDataToBytesArr = withdrawalsGethVector.map((withdrawal) => + Withdrawal.toBytesArray(withdrawal as WithdrawalData), ) - const withdrawalsDataToRlp = bytesToHex(encode(withdrawalsDatatoBytesArr)) + const withdrawalsDataToRlp = bytesToHex(encode(withdrawalsDataToBytesArr)) assert.equal(gethWithdrawalsRlp, withdrawalsDataToRlp, 'The withdrawals to buffer should match') }) @@ -93,7 +93,7 @@ describe('Withdrawal', () => { const withdrawalsValue = withdrawals.map((wt) => wt.toValue()) assert.deepEqual( withdrawalsValue.map((wt) => bytesToHex(wt.address)), - withdrawalsJson.map((wt) => wt.address) + withdrawalsJson.map((wt) => wt.address), ) }) }) diff --git a/packages/util/tsconfig.lint.json b/packages/util/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/util/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/verkle/.eslintrc.cjs b/packages/verkle/.eslintrc.cjs index 1fa27a8fea..887e31b0e6 100644 --- a/packages/verkle/.eslintrc.cjs +++ b/packages/verkle/.eslintrc.cjs @@ -1,12 +1,12 @@ module.exports = { extends: '../../config/eslint.cjs', parserOptions: { - project: ['./tsconfig.json', './tsconfig.benchmarks.json'], + project: ['./tsconfig.lint.json'], }, ignorePatterns: ['src/rust-verkle-wasm/rust_verkle_wasm.js', '**/vendor/*.js'], overrides: [ { - files: ['benchmarks/*.ts'], + files: ['benchmarks/*.ts', 'examples/*.ts'], rules: { 'no-console': 'off', }, diff --git a/packages/verkle/CHANGELOG.md b/packages/verkle/CHANGELOG.md index 536f107d59..c52c3915fd 100644 --- a/packages/verkle/CHANGELOG.md +++ b/packages/verkle/CHANGELOG.md @@ -6,7 +6,22 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 0.0.2 - 2024-03-05 +## 0.1.0 - 2024-08-15 + +This is the first (still experimental) Verkle library release with some basic `put()` and `get()` functionality working! 🎉 Still highly moving and evolving parts, but early experiments and feedback welcome! + +- Kaustinen6 adjustments, `verkle-cryptography-wasm` migration, PRs [#3355](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3355) and [#3356](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3356) +- Move tree key computation to verkle and simplify, PR [#3420](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3420) +- Rename code keccak, PR [#3426](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3426) +- Add tests for verkle bytes helper, PR [#3441](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3441) +- Verkle decoupling, PR [#3462](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3462) +- Rename verkle utils and refactor, PR [#3468](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3468) +- Optimize storage of default values in VerkleNode, PR [#3476](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3476) +- Build out trie processing, PR [#3430](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3430) +- Implement `trie.put()`, PR [#3473](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3473) +- Add `trie.del()`, PR [#3486](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3486) + +## 0.0.2 - 2024-03-18 - Fix a type error related to the `lru-cache` dependency, PR [#3285](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3285) - Downstream dependency updates, see PR [#3297](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3297) diff --git a/packages/verkle/package.json b/packages/verkle/package.json index e97a1a9baf..243ad6544e 100644 --- a/packages/verkle/package.json +++ b/packages/verkle/package.json @@ -1,6 +1,6 @@ { "name": "@ethereumjs/verkle", - "version": "0.0.2", + "version": "0.1.0", "description": "Implementation of verkle trees as used in Ethereum.", "keywords": [ "verkle", @@ -56,9 +56,9 @@ "debug": "^4.3.4", "lru-cache": "10.1.0", "verkle-cryptography-wasm": "^0.4.5", - "@ethereumjs/block": "^5.2.0", + "@ethereumjs/block": "^5.3.0", "@ethereumjs/rlp": "^5.0.2", - "@ethereumjs/util": "^9.0.3" + "@ethereumjs/util": "^9.1.0" }, "engines": { "node": ">=18" diff --git a/packages/verkle/src/index.ts b/packages/verkle/src/index.ts index d7fde388d7..af73aaea16 100644 --- a/packages/verkle/src/index.ts +++ b/packages/verkle/src/index.ts @@ -1,5 +1,4 @@ export * from './db/index.js' export * from './node/index.js' export * from './types.js' -export * from './util/index.js' export * from './verkleTree.js' diff --git a/packages/verkle/src/node/internalNode.ts b/packages/verkle/src/node/internalNode.ts index 2a401dc8d0..86a16a500e 100644 --- a/packages/verkle/src/node/internalNode.ts +++ b/packages/verkle/src/node/internalNode.ts @@ -33,7 +33,7 @@ export class InternalNode extends BaseVerkleNode { childIndex, // The hashed child commitments are used when updating the internal node commitment this.verkleCrypto.hashCommitment(oldChildReference.commitment), - this.verkleCrypto.hashCommitment(child.commitment) + this.verkleCrypto.hashCommitment(child.commitment), ) } diff --git a/packages/verkle/src/node/leafNode.ts b/packages/verkle/src/node/leafNode.ts index 8592fa4d38..1ce9704681 100644 --- a/packages/verkle/src/node/leafNode.ts +++ b/packages/verkle/src/node/leafNode.ts @@ -38,7 +38,7 @@ export class LeafNode extends BaseVerkleNode { static async create( stem: Uint8Array, verkleCrypto: VerkleCrypto, - values?: (Uint8Array | VerkleLeafNodeValue)[] + values?: (Uint8Array | VerkleLeafNodeValue)[], ): Promise { // Generate the value arrays for c1 and c2 values = values !== undefined ? values : createDefaultLeafValues() @@ -66,13 +66,13 @@ export class LeafNode extends BaseVerkleNode { verkleCrypto.zeroCommitment, 0, new Uint8Array(32), - setLengthLeft(intToBytes(1), 32) + setLengthLeft(intToBytes(1), 32), ) commitment = verkleCrypto.updateCommitment( commitment, 1, new Uint8Array(32), - setLengthRight(stem, 32) + setLengthRight(stem, 32), ) commitment = verkleCrypto.updateCommitment( commitment, @@ -80,13 +80,13 @@ export class LeafNode extends BaseVerkleNode { new Uint8Array(32), // We hash the commitment when using in the leaf node commitment since c1 is 64 bytes long // and we need a 32 byte input for the scalar value in `updateCommitment` - verkleCrypto.hashCommitment(c1) + verkleCrypto.hashCommitment(c1), ) commitment = verkleCrypto.updateCommitment( commitment, 3, new Uint8Array(32), - verkleCrypto.hashCommitment(c2) + verkleCrypto.hashCommitment(c2), ) return new LeafNode({ stem, @@ -127,7 +127,7 @@ export class LeafNode extends BaseVerkleNode { case VerkleLeafNodeValue.Untouched: return undefined case VerkleLeafNodeValue.Deleted: - // Return zeroes if a value is "deleted" (i.e. overwitten with zeroes) + // Return zeroes if a value is "deleted" (i.e. overwritten with zeroes) return new Uint8Array(32) default: return value @@ -164,7 +164,7 @@ export class LeafNode extends BaseVerkleNode { commitmentIndex, cValues[commitmentIndex], // Right pad the value with zeroes since commitments require 32 byte scalars - setLengthRight(val.slice(0, 16), 32) + setLengthRight(val.slice(0, 16), 32), ) // Update the commitment for the second 16 bytes of the value cCommitment = this.verkleCrypto.updateCommitment( @@ -172,7 +172,7 @@ export class LeafNode extends BaseVerkleNode { commitmentIndex + 1, cValues[commitmentIndex + 1], // Right pad the value with zeroes since commitments require 32 byte scalars - setLengthRight(val.slice(16), 32) + setLengthRight(val.slice(16), 32), ) // Update the cCommitment corresponding to the index let oldCCommitment: Uint8Array | undefined @@ -191,7 +191,7 @@ export class LeafNode extends BaseVerkleNode { this.commitment, cIndex, this.verkleCrypto.hashCommitment(oldCCommitment!), - this.verkleCrypto.hashCommitment(cCommitment) + this.verkleCrypto.hashCommitment(cCommitment), ) } diff --git a/packages/verkle/src/node/util.ts b/packages/verkle/src/node/util.ts index 72c1953034..b441bdfa19 100644 --- a/packages/verkle/src/node/util.ts +++ b/packages/verkle/src/node/util.ts @@ -88,9 +88,9 @@ export const createCValues = (values: (Uint8Array | VerkleLeafNodeValue)[]) => { break } // We add 16 trailing zeros to each value since all commitments are padded to an array of 32 byte values - // TODO: Determine whether we need to apply the leaf marker (i.e. set 129th bit) for all written values - // regardless of whether the value stored is zero or not expandedValues[x * 2] = setLengthRight(val.slice(0, 16), 32) + // Apply leaf marker to all touched values (i.e. flip 129th bit) + if (retrievedValue !== VerkleLeafNodeValue.Untouched) expandedValues[x * 2][16] = 0x80 expandedValues[x * 2 + 1] = setLengthRight(val.slice(16), 32) } return expandedValues diff --git a/packages/verkle/src/util/bytes.ts b/packages/verkle/src/util/bytes.ts deleted file mode 100644 index 4fb0c236ac..0000000000 --- a/packages/verkle/src/util/bytes.ts +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Compares two byte arrays and returns the count of consecutively matching items from the start. - * - * @function - * @param {Uint8Array} bytes1 - The first Uint8Array to compare. - * @param {Uint8Array} bytes2 - The second Uint8Array to compare. - * @returns {number} The count of consecutively matching items from the start. - */ -export function matchingBytesLength(bytes1: Uint8Array, bytes2: Uint8Array): number { - let count = 0 - const minLength = Math.min(bytes1.length, bytes2.length) - - for (let i = 0; i < minLength; i++) { - if (bytes1[i] === bytes2[i]) { - count++ - } else { - // Break early if a mismatch is found - break - } - } - return count -} diff --git a/packages/verkle/src/util/index.ts b/packages/verkle/src/util/index.ts deleted file mode 100644 index 2da7bbb112..0000000000 --- a/packages/verkle/src/util/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from './bytes.js' -export * from './tasks.js' diff --git a/packages/verkle/src/util/tasks.ts b/packages/verkle/src/util/tasks.ts deleted file mode 100644 index 26dc56a96a..0000000000 --- a/packages/verkle/src/util/tasks.ts +++ /dev/null @@ -1,59 +0,0 @@ -interface Task { - priority: number - fn: Function -} - -export class PrioritizedTaskExecutor { - /** The maximum size of the pool */ - private maxPoolSize: number - /** The current size of the pool */ - private currentPoolSize: number - /** The task queue */ - private queue: Task[] - - /** - * Executes tasks up to maxPoolSize at a time, other items are put in a priority queue. - * @class PrioritizedTaskExecutor - * @private - * @param maxPoolSize The maximum size of the pool - */ - constructor(maxPoolSize: number) { - this.maxPoolSize = maxPoolSize - this.currentPoolSize = 0 - this.queue = [] - } - - /** - * Executes the task or queues it if no spots are available. - * When a task is added, check if there are spots left in the pool. - * If a spot is available, claim that spot and give back the spot once the asynchronous task has been resolved. - * When no spots are available, add the task to the task queue. The task will be executed at some point when another task has been resolved. - * @private - * @param priority The priority of the task - * @param fn The function that accepts the callback, which must be called upon the task completion. - */ - executeOrQueue(priority: number, fn: Function) { - if (this.currentPoolSize < this.maxPoolSize) { - this.currentPoolSize++ - fn(() => { - this.currentPoolSize-- - if (this.queue.length > 0) { - this.queue.sort((a, b) => b.priority - a.priority) - const item = this.queue.shift() - this.executeOrQueue(item!.priority, item!.fn) - } - }) - } else { - this.queue.push({ priority, fn }) - } - } - - /** - * Checks if the taskExecutor is finished. - * @private - * @returns Returns `true` if the taskExecutor is finished, otherwise returns `false`. - */ - finished(): boolean { - return this.currentPoolSize === 0 - } -} diff --git a/packages/verkle/src/verkleTree.ts b/packages/verkle/src/verkleTree.ts index efebeaee67..47c768c7f8 100644 --- a/packages/verkle/src/verkleTree.ts +++ b/packages/verkle/src/verkleTree.ts @@ -6,6 +6,7 @@ import { bytesToHex, equalsBytes, intToHex, + matchingBytesLength, zeros, } from '@ethereumjs/util' import debug from 'debug' @@ -22,7 +23,6 @@ import { type VerkleTreeOpts, type VerkleTreeOptsWithDefaults, } from './types.js' -import { matchingBytesLength } from './util/index.js' import type { DB, PutBatch, VerkleCrypto } from '@ethereumjs/util' import type { Debugger } from 'debug' @@ -86,7 +86,7 @@ export class VerkleTree { this.verkleCrypto = opts?.verkleCrypto this.DEBUG = - typeof window === 'undefined' ? process?.env?.DEBUG?.includes('ethjs') ?? false : false + typeof window === 'undefined' ? (process?.env?.DEBUG?.includes('ethjs') ?? false) : false this.debug = this.DEBUG ? (message: string, namespaces: string[] = []) => { let log = this._debug @@ -237,7 +237,7 @@ export class VerkleTree { // Sanity check to verify we have the right node type if (!isLeafNode(foundPath.node)) { throw new Error( - `expected leaf node found at ${bytesToHex(stem)}. Got internal node instead` + `expected leaf node found at ${bytesToHex(stem)}. Got internal node instead`, ) } leafNode = foundPath.node @@ -245,8 +245,8 @@ export class VerkleTree { if (!equalsBytes(leafNode.stem, stem)) { throw new Error( `invalid leaf node found. Expected stem: ${bytesToHex(stem)}; got ${bytesToHex( - foundPath.node.stem - )}` + foundPath.node.stem, + )}`, ) } } else { @@ -265,7 +265,7 @@ export class VerkleTree { this.DEBUG && this.debug( `Updating value for suffix: ${suffix} at leaf node with stem: ${bytesToHex(stem)}`, - ['PUT'] + ['PUT'], ) putStack.push([leafNode.hash(), leafNode]) @@ -296,9 +296,9 @@ export class VerkleTree { this.DEBUG && this.debug( `Updating child reference for node with path: ${bytesToHex( - lastPath + lastPath, )} at index ${childIndex} in internal node at path ${bytesToHex(nextPath)}`, - ['PUT'] + ['PUT'], ) // Hold onto `path` to current node for updating next parent node child index lastPath = nextPath @@ -318,7 +318,7 @@ export class VerkleTree { `Updating child reference for node with path: ${bytesToHex(lastPath)} at index ${ lastPath[0] } in root node`, - ['PUT'] + ['PUT'], ) this.DEBUG && this.debug(`Updating root node hash to ${bytesToHex(this._root)}`, ['PUT']) putStack.push([this._root, rootNode]) @@ -342,7 +342,7 @@ export class VerkleTree { updateParent( leafNode: LeafNode, nearestNode: VerkleNode, - pathToNode: Uint8Array + pathToNode: Uint8Array, ): { node: InternalNode; lastPath: Uint8Array } { // Compute the portion of leafNode.stem and nearestNode.path that match (i.e. the partial path closest to leafNode.stem) const partialMatchingStemIndex = matchingBytesLength(leafNode.stem, pathToNode) @@ -375,13 +375,13 @@ export class VerkleTree { this.DEBUG && this.debug( `Updating child reference for leaf node with stem: ${bytesToHex( - leafNode.stem + leafNode.stem, )} at index ${ leafNode.stem[partialMatchingStemIndex] } in internal node at path ${bytesToHex( - leafNode.stem.slice(0, partialMatchingStemIndex) + leafNode.stem.slice(0, partialMatchingStemIndex), )}`, - ['PUT'] + ['PUT'], ) } return { node: internalNode, lastPath: pathToNode } @@ -440,9 +440,9 @@ export class VerkleTree { this.DEBUG && this.debug( `Path ${bytesToHex(key)} - found full path to node ${bytesToHex( - decodedNode.hash() + decodedNode.hash(), )}.`, - ['FIND_PATH'] + ['FIND_PATH'], ) result.node = decodedNode result.remaining = new Uint8Array() @@ -455,9 +455,9 @@ export class VerkleTree { this.DEBUG && this.debug( `Path ${bytesToHex(pathToNearestNode)} - found path to nearest node ${bytesToHex( - decodedNode.hash() + decodedNode.hash(), )} but target node not found.`, - ['FIND_PATH'] + ['FIND_PATH'], ) result.stack.push([decodedNode, pathToNearestNode]) return result @@ -467,9 +467,9 @@ export class VerkleTree { this.DEBUG && this.debug( `Partial Path ${bytesToHex( - key.slice(0, matchingKeyLength) + key.slice(0, matchingKeyLength), )} - found next node in path ${bytesToHex(decodedNode.hash())}.`, - ['FIND_PATH'] + ['FIND_PATH'], ) // Get the next child node in the path const childIndex = key[matchingKeyLength] @@ -478,9 +478,9 @@ export class VerkleTree { this.DEBUG && this.debug( `Found partial path ${key.slice( - 31 - result.remaining.length + 31 - result.remaining.length, )} but sought node is not present in trie.`, - ['FIND_PATH'] + ['FIND_PATH'], ) return result } @@ -529,10 +529,10 @@ export class VerkleTree { } /** - * Creates a proof from a tree and key that can be verified using {@link VerkleTree.verifyProof}. + * Creates a proof from a tree and key that can be verified using {@link VerkleTree.verifyVerkleProof}. * @param key */ - async createProof(_key: Uint8Array): Promise { + async createVerkleProof(_key: Uint8Array): Promise { throw new Error('Not implemented') } @@ -544,10 +544,10 @@ export class VerkleTree { * @throws If proof is found to be invalid. * @returns The value from the key, or null if valid proof of non-existence. */ - async verifyProof( + async verifyVerkleProof( _rootHash: Uint8Array, _key: Uint8Array, - _proof: Proof + _proof: Proof, ): Promise { throw new Error('Not implemented') } diff --git a/packages/verkle/test/internalNode.spec.ts b/packages/verkle/test/internalNode.spec.ts index 328658d9f4..f8fd34102f 100644 --- a/packages/verkle/test/internalNode.spec.ts +++ b/packages/verkle/test/internalNode.spec.ts @@ -22,7 +22,7 @@ describe('verkle node - internal', () => { assert.equal(node.children.length, NODE_WIDTH, 'number of children should equal verkle width') assert.ok( node.children.every((child) => child === null), - 'every children should be null' + 'every children should be null', ) }) @@ -33,14 +33,14 @@ describe('verkle node - internal', () => { assert.deepEqual( node.commitment, verkleCrypto.zeroCommitment, - 'commitment should be set to point identity' + 'commitment should be set to point identity', ) // Children nodes should all default to null. assert.equal(node.children.length, NODE_WIDTH, 'number of children should equal verkle width') assert.ok( node.children.every((child) => child === null), - 'every children should be null' + 'every children should be null', ) }) it('should serialize and deserialize a node', async () => { diff --git a/packages/verkle/test/leafNode.spec.ts b/packages/verkle/test/leafNode.spec.ts index d23dab7162..b9addecde5 100644 --- a/packages/verkle/test/leafNode.spec.ts +++ b/packages/verkle/test/leafNode.spec.ts @@ -2,7 +2,13 @@ import { type VerkleCrypto, equalsBytes, randomBytes, setLengthLeft } from '@eth import { loadVerkleCrypto } from 'verkle-cryptography-wasm' import { assert, beforeAll, describe, it } from 'vitest' -import { VerkleLeafNodeValue, VerkleNodeType, decodeNode, isLeafNode } from '../src/node/index.js' +import { + VerkleLeafNodeValue, + VerkleNodeType, + createCValues, + decodeNode, + isLeafNode, +} from '../src/node/index.js' import { LeafNode } from '../src/node/leafNode.js' describe('verkle node - leaf', () => { @@ -29,14 +35,14 @@ describe('verkle node - leaf', () => { assert.equal(node.type, VerkleNodeType.Leaf, 'type should be set') assert.ok( equalsBytes(node.commitment as unknown as Uint8Array, commitment), - 'commitment should be set' + 'commitment should be set', ) assert.ok(equalsBytes(node.c1 as unknown as Uint8Array, c1), 'c1 should be set') assert.ok(equalsBytes(node.c2 as unknown as Uint8Array, c2), 'c2 should be set') assert.ok(equalsBytes(node.stem, stem), 'stem should be set') assert.ok( values.every((value, index) => equalsBytes(value, node.values[index] as Uint8Array)), - 'values should be set' + 'values should be set', ) }) @@ -61,11 +67,18 @@ describe('verkle node - leaf', () => { assert.deepEqual(node.getValue(0), new Uint8Array(32)) }) + it('should set the leaf marker on a touched value', async () => { + const key = randomBytes(32) + const node = await LeafNode.create(key.slice(0, 31), verkleCrypto) + node.setValue(0, VerkleLeafNodeValue.Deleted) + const c1Values = createCValues(node.values.slice(0, 128)) + assert.equal(c1Values[0][16], 0x80) + }) + it('should update a commitment when setting a value', async () => { const key = randomBytes(32) const stem = key.slice(0, 31) - const values = new Array(256).fill(new Uint8Array(32)) - const node = await LeafNode.create(stem, verkleCrypto, values) + const node = await LeafNode.create(stem, verkleCrypto) assert.deepEqual(node.c1, verkleCrypto.zeroCommitment) node.setValue(0, randomBytes(32)) assert.notDeepEqual(node.c1, verkleCrypto.zeroCommitment) diff --git a/packages/verkle/test/util/bytes.spec.ts b/packages/verkle/test/util/bytes.spec.ts deleted file mode 100644 index 850a2bdb14..0000000000 --- a/packages/verkle/test/util/bytes.spec.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { assert, describe, it } from 'vitest' - -import { matchingBytesLength } from '../../src/util/bytes.js' - -describe('matchingBytesLength', () => { - it('should return 0 when both arrays are empty', () => { - const bytes1 = new Uint8Array([]) - const bytes2 = new Uint8Array([]) - assert.equal(matchingBytesLength(bytes1, bytes2), 0) - }) - - it('should return 0 when one of the arrays is empty', () => { - const bytes1 = new Uint8Array([1, 2, 3]) - const bytes2 = new Uint8Array([]) - assert.equal(matchingBytesLength(bytes1, bytes2), 0) - }) - - it('should return 0 when arrays have no matching elements', () => { - const bytes1 = new Uint8Array([1, 2, 3]) - const bytes2 = new Uint8Array([4, 5, 6]) - assert.equal(matchingBytesLength(bytes1, bytes2), 0) - }) - - it('should handle arrays with same elements but different lengths', () => { - const bytes1 = new Uint8Array([1, 2, 3]) - const bytes2 = new Uint8Array([1, 2, 3, 4]) - assert.equal(matchingBytesLength(bytes1, bytes2), 3) - }) - - it('should handle arrays with matching elements at end', () => { - const bytes1 = new Uint8Array([1, 2, 3]) - const bytes2 = new Uint8Array([0, 1, 2, 3]) - assert.equal(matchingBytesLength(bytes1, bytes2), 0) - }) - - it('should handle arrays with matching elements at start', () => { - const bytes1 = new Uint8Array([1, 2, 3]) - const bytes2 = new Uint8Array([1, 2, 3, 4, 5]) - assert.equal(matchingBytesLength(bytes1, bytes2), 3) - }) - - it('should handle arrays with large number of elements', () => { - const bytes1 = new Uint8Array(Array.from({ length: 1000000 }, (_, i) => i)) - const bytes2 = new Uint8Array(Array.from({ length: 1000000 }, (_, i) => i)) - assert.equal(matchingBytesLength(bytes1, bytes2), 1000000) - }) -}) diff --git a/packages/verkle/test/verkle.spec.ts b/packages/verkle/test/verkle.spec.ts index 9cbfb15fe6..17c11b5f69 100644 --- a/packages/verkle/test/verkle.spec.ts +++ b/packages/verkle/test/verkle.spec.ts @@ -1,4 +1,4 @@ -import { MapDB, equalsBytes, hexToBytes } from '@ethereumjs/util' +import { MapDB, equalsBytes, hexToBytes, matchingBytesLength } from '@ethereumjs/util' import { loadVerkleCrypto } from 'verkle-cryptography-wasm' import { assert, beforeAll, describe, it } from 'vitest' @@ -8,7 +8,6 @@ import { VerkleLeafNodeValue, VerkleNodeType, decodeNode, - matchingBytesLength, } from '../src/index.js' import { VerkleTree } from '../src/verkleTree.js' @@ -97,7 +96,7 @@ describe('Verkle tree', () => { assert.deepEqual( verkleCrypto.serializeCommitment(pathToNonExistentNode.stack[0][0].commitment), tree.root(), - 'contains the root node in the stack' + 'contains the root node in the stack', ) }) @@ -264,7 +263,7 @@ describe('Verkle tree', () => { assert.ok(res.node !== null) assert.deepEqual( (res.node as LeafNode).values[hexToBytes(keys[0])[31]], - VerkleLeafNodeValue.Deleted + VerkleLeafNodeValue.Deleted, ) }) }) diff --git a/packages/verkle/tsconfig.lint.json b/packages/verkle/tsconfig.lint.json new file mode 100644 index 0000000000..3698f4f0be --- /dev/null +++ b/packages/verkle/tsconfig.lint.json @@ -0,0 +1,3 @@ +{ + "extends": "../../config/tsconfig.lint.json" +} diff --git a/packages/vm/.eslintrc.cjs b/packages/vm/.eslintrc.cjs index 6aaac545e9..811a41ecc2 100644 --- a/packages/vm/.eslintrc.cjs +++ b/packages/vm/.eslintrc.cjs @@ -1,14 +1,16 @@ module.exports = { extends: '../../config/eslint.cjs', + parserOptions: { + project: ['./tsconfig.lint.json'], + }, rules: { '@typescript-eslint/no-use-before-define': 'off', 'no-invalid-this': 'off', 'no-restricted-syntax': 'off', - 'import/extensions': 'off', }, overrides: [ { - files: ['test/util.ts', 'test/tester/**/*.ts'], + files: ['test/util.ts', 'test/tester/**/*.ts', 'examples/**/*.ts'], rules: { 'no-console': 'off', }, diff --git a/packages/vm/CHANGELOG.md b/packages/vm/CHANGELOG.md index 6877c0d6aa..a95a47bc95 100644 --- a/packages/vm/CHANGELOG.md +++ b/packages/vm/CHANGELOG.md @@ -6,11 +6,58 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) (modification: no type change headlines) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## 8.0.0 - 2024-03-05 +## 8.1.0 - 2024-08-15 + +### EIP-7685 Requests: EIP-6110 (Deposits) / EIP-7002 (Withdrawals) / EIP-7251 (Consolidations) + +This library now supports `EIP-6110` deposit requests, see PR [#3390](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3390), `EIP-7002` withdrawal requests, see PR [#3385](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3385) and `EIP-7251` consolidation requests, see PR [#3477](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3477) as well as the underlying generic execution layer request logic introduced with `EIP-7685` (PR [#3372](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3372)). + +These new request types will be activated with the `Prague` hardfork, see [@ethereumjs/block](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/block) README for detailed documentation. + +### EIP-2935 Serve Historical Block Hashes from State (Prague) + +Starting with this release the VM supports [EIP-2935](https://eips.ethereum.org/EIPS/eip-2935) which stores the latest 8192 block hashes in the storage of a system contract, see PR [#3475](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3475) as the major integration PR (while work on this has already been done in previous PRs). + +This EIP will be activated along the Prague hardfork. Note that this EIP has no effect on the resolution of the `BLOCKHASH` opcode, which will be a separate activation taking place by the integration of [EIP-7709](https://eips.ethereum.org/EIPS/eip-7709) in the following Osaka hardfork. + +### Verkle Dependency Decoupling + +We have relatively light-heartedly added a new `@ethereumjs/verkle` main dependency to the VM/EVM stack in the `v7.2.1` release, which added an additional burden to the bundle size by several hundred KB and additionally draws in unnecessary WASM code. Coupling with Verkle has been refactored in PR [#3462](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3462) and the direct dependency has been removed again. + +An update to this release is therefore strongly recommended even if other fixes or features are not that relevant for you right now. + +### Verkle Updates + +- Fixes for Kaustinen4 support, PR [#3269](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3269) +- Kaustinen5 related fixes, PR [#3343](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3343) +- Kaustinen6 adjustments, `verkle-cryptography-wasm` migration, PRs [#3355](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3355) and [#3356](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3356) +- Missing beaconroot account verkle fix, PR [#3421](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3421) +- Remove the hacks to prevent account cleanups of system contracts, PR [#3418](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3418) +- Updates EIP-2935 tests with the new proposed bytecode and corresponding config, PR [#3438](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3438) +- Fix EIP-2935 address conversion issues, PR [#3447](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3447) +- Remove backfill of block hashes on EIP-2935 activation, PR [#3478](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3478) + +### Other Features + +- Add `evmOpts` to the VM opts to allow for options chaining to the underlying EVM, PR [#3481](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3481) +- Stricter prefixed hex typing, PRs [#3348](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3348), [#3427](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3427) and [#3357](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3357) (some changes removed in PR [#3382](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3382) for backwards compatibility reasons, will be reintroduced along upcoming breaking releases) + +### Other Changes + +- Removes support for [EIP-2315](https://eips.ethereum.org/EIPS/eip-2315) simple subroutines for EVM (deprecated with an alternative version integrated into EOF), PR [#3342](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3342) +- Small clean-up to `VM._emit()`, PR [#3396](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3396) +- Update `mcl-wasm` Dependency (Esbuild Issue), PR [#3461](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3461) + +### Bugfixes + +- Fix block building with blocks including CL requests, PR [#3413](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3413) +- Ensure system address is not created if it is empty, PR [#3400](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3400) + +## 8.0.0 - 2024-03-18 ### New EVM.create() Async Static Constructor / Mandatory VM.create() Constructor -This is an in-between breaking release on both the EVM and VM packages due to a problematic top level await() discovery in the underlying `rustbn-wasm` library (see issue [#10](https://github.com/ethereumjs/rustbn-wasm/issues/10)) generally affecting the compatiblity of our libraries. +This is an in-between breaking release on both the EVM and VM packages due to a problematic top level await() discovery in the underlying `rustbn-wasm` library (see issue [#10](https://github.com/ethereumjs/rustbn-wasm/issues/10)) generally affecting the compatibility of our libraries. The `EVM` direct constructor initialization with `new EVM()` now has been deprecated and replaced by an async static `create()` constructor, as it is already done in various other libraries in the EthereumJS monorepo, see PRs [#3304](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3304/) and [#3315](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3315). If you pass in a custom `EVM` along your `VM` initialization you **need to update your EVM initialization**. @@ -69,7 +116,7 @@ Since this fits well also to be placed here relatively prominently for awareness ## 7.2.1 - 2024-02-08 -- Hotfix release moving the `@ethereumjs/verkle` dependency for `@ethereumjs/statemanager` from a peer dependency to the main dependencis (note that this decision might be temporary) +- Hotfix release moving the `@ethereumjs/verkle` dependency for `@ethereumjs/statemanager` from a peer dependency to the main dependencies (note that this decision might be temporary) ## 7.2.0 - 2023-02-01 @@ -176,7 +223,7 @@ While you could use our libraries in the browser libraries before, there had bee WE HAVE ELIMINATED ALL OF THEM. -The largest two undertakings: First: we have rewritten all (half) of our API and elimited the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went throuh our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. +The largest two undertakings: First: we have rewritten all (half) of our API and eliminated the usage of Node.js specific `Buffer` all over the place and have rewritten with using `Uint8Array` byte objects. Second: we went through our whole stack, rewrote imports and exports, replaced and updated dependencies all over and are now able to provide a hybrid CommonJS/ESM build, for all libraries. Both of these things are huge. Together with some few other modifications this now allows to run each (maybe adding an asterisk for client and devp2p) of our libraries directly in the browser - more or less without any modifications - see the `examples/browser.html` file in each package folder for an easy to set up example. @@ -282,7 +329,7 @@ The `0x44` (old `DIFFICULTY`) opcode - is now named `PREVRANDAO` - starting with Genesis state was huge and had previously been bundled with the `Blockchain` package with the burden going over to the VM, since `Blockchain` is a dependency. -With this release genesis state has been removed from `blockchain` and moved into its own auxiliary package [@ethereumjs/genesis](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/genesis), from which it can be included if needed (for most - especially VM - use cases it is not neceesary), see PR [#2844](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2844). +With this release genesis state has been removed from `blockchain` and moved into its own auxiliary package [@ethereumjs/genesis](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/genesis), from which it can be included if needed (for most - especially VM - use cases it is not necessary), see PR [#2844](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2844). This goes along with some changes in Blockchain and VM API: @@ -482,7 +529,7 @@ The VM library is now ready to work with hardforks triggered by timestamp, which ### Bug Fixes and Other Changes -- More correctly timed `nonce` updates in `VM.runTx()` to avoid certain consensus-critical `nonce`/`account` update constallations. PR [#2404](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2404) +- More correctly timed `nonce` updates in `VM.runTx()` to avoid certain consensus-critical `nonce`/`account` update constellations. PR [#2404](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2404) A reminder: This package does not contain the core EVM code any more. For EVM related bugfixes see the associated [@ethereumjs/evm](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/evm) `v1.2.3` release. @@ -547,7 +594,7 @@ This is the biggest VM change in this release. The inheritance structure of both This allows for an easier typing of the inner `EVM` and makes the core VM/EVM classes leaner and not overloaded with various other partly unused properties. The new `events` property is optional. -Usage code of events needs to be slighly adopted and updated from: +Usage code of events needs to be slightly adopted and updated from: ```ts vm.on('beforeBlock', (val) => { @@ -572,7 +619,7 @@ vm.evm.events!.on('step', (e) => { ### Other Changes - Made `touchAccount` of `VMState` public, PR [#2183](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2183) -- **Pontentially breaking:** Removed `common` option from underlying `StateManager`, PR [#2197](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2197) +- **Potentially breaking:** Removed `common` option from underlying `StateManager`, PR [#2197](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2197) - Reworked/adjusted underlying EVM `skipBalance` option semantics, PR [#2138](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2138) - Fixed an underlying EVM event signature typing bug, PR [#2184](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2184) @@ -618,7 +665,7 @@ Beta 2 release for the upcoming breaking release round on the [EthereumJS monore ### Removed Default Exports -The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all accross the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. +The change with the biggest effect on UX since the last Beta 1 releases is for sure that we have removed default exports all across the monorepo, see PR [#2018](https://github.com/ethereumjs/ethereumjs-monorepo/pull/2018), we even now added a new linting rule that completely disallows using. Default exports were a common source of error and confusion when using our libraries in a CommonJS context, leading to issues like Issue [#978](https://github.com/ethereumjs/ethereumjs-monorepo/issues/978). @@ -626,7 +673,7 @@ Now every import is a named import and we think the long term benefits will very #### Common Library Import Updates -Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all accross our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. +Since our [@ethereumjs/common](https://github.com/ethereumjs/ethereumjs-monorepo/tree/master/packages/common) library is used all across our libraries for chain and HF instantiation this will likely be the one being the most prevalent regarding the need for some import updates. So Common import and usage is changing from: @@ -874,7 +921,7 @@ Note that state in the VM is not activated by default (this also goes for accoun ### L2 Support: Custom Opcodes Option -There is now a new option `customOpcodes` for the VM which allows to add custom opcodes to the VM, see PR [#1705](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1705). This should be useful for L2s and other EVM based side chains if they come with a slighly different opcode set for bytecode execution. +There is now a new option `customOpcodes` for the VM which allows to add custom opcodes to the VM, see PR [#1705](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1705). This should be useful for L2s and other EVM based side chains if they come with a slightly different opcode set for bytecode execution. New opcodes can be passed in with its own logic function and an additional function for gas calculation. Additionally the new option allows for overwriting and/or deleting existing opcodes. @@ -961,7 +1008,7 @@ invalid receiptTrie (vm hf=berlin -> block number=1 hash=0x8e368301586b53e30c58d The extended errors give substantial more object and chain context and should ease debugging. -**Potentially breaking**: Attention! If you do react on errors in your code and do exact errror matching (`error.message === 'invalid transaction trie'`) things will break. Please make sure to do error comparisons with something like `error.message.includes('invalid transaction trie')` instead. This should generally be the pattern used for all error message comparisions and is assured to be future proof on all error messages (we won't change the core text in non-breaking releases). +**Potentially breaking**: Attention! If you do react on errors in your code and do exact error matching (`error.message === 'invalid transaction trie'`) things will break. Please make sure to do error comparisons with something like `error.message.includes('invalid transaction trie')` instead. This should generally be the pattern used for all error message comparisons and is assured to be future proof on all error messages (we won't change the core text in non-breaking releases). ### Other Changes @@ -1080,7 +1127,7 @@ There is a new EVM Object Format (EOF) in preparation which will allow to valida ### StateManager: Preserve State History -This VM release bumps the `merkle-patricia-tree` dependeny to `v4.2.0`, which is used as a datastore for the default `StateManager` implementation. The new MPT version switches to a default behavior to not delete any trie nodes on checkpoint commits, which has implications on the `StateManager.commit()` function which internally calls the MPT commit. This allows to go back to older trie states by setting a new (old) state root with `StateManager.setStateRoot()`. The trie state is now guaranteed to still be consistent and complete, which has not been the case before and lead to erraneous behaviour in certain usage scenarios (e.g. reported by HardHat). +This VM release bumps the `merkle-patricia-tree` dependency to `v4.2.0`, which is used as a datastore for the default `StateManager` implementation. The new MPT version switches to a default behavior to not delete any trie nodes on checkpoint commits, which has implications on the `StateManager.commit()` function which internally calls the MPT commit. This allows to go back to older trie states by setting a new (old) state root with `StateManager.setStateRoot()`. The trie state is now guaranteed to still be consistent and complete, which has not been the case before and lead to erroneous behaviour in certain usage scenarios (e.g. reported by HardHat). See PR [#1262](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1262) @@ -1101,7 +1148,7 @@ See PR [#1168](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1168) ## 5.3.2 - 2021-04-12 -This is a hot-fix performance release, removing the `debug` functionality from PR [#1080](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1080) and follow-up PRs. While highly useful for debugging, this feature side-introduced a siginficant reduction in VM performance which went along unnoticed. For now we will remove since upstream dependencies are awaiting a new release before the `Belin` HF happening. We will try to re-introduce in a performance friendly manner in some subsequent release (we cannot promise on that though). +This is a hot-fix performance release, removing the `debug` functionality from PR [#1080](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1080) and follow-up PRs. While highly useful for debugging, this feature side-introduced a significant reduction in VM performance which went along unnoticed. For now we will remove since upstream dependencies are awaiting a new release before the `Berlin` HF happening. We will try to re-introduce in a performance friendly manner in some subsequent release (we cannot promise on that though). See PR [#1198](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1198). @@ -1215,7 +1262,7 @@ If you are using this library in conjunction with other EthereumJS libraries mak ### Other Features - `{ stateRoot, gasUsed, logsBloom, receiptRoot }` have been added to `RunBlockResult` and will be emitted with `afterBlock`, PR [#853](https://github.com/ethereumjs/ethereumjs-monorepo/pull/853) -- Added `vm:eei:gas` EEI gas debug looger, PR [#1124](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1124) +- Added `vm:eei:gas` EEI gas debug logger, PR [#1124](https://github.com/ethereumjs/ethereumjs-monorepo/pull/1124) ### Other Fixes @@ -1342,7 +1389,7 @@ const common = new Common({ chain: 'mainnet', hardfork: 'spuriousDragon' }) const vm = new VM({ common }) ``` -**Breaking**: The default HF from the VM has been updated from `petersburg` to `istanbul`. The HF setting is now automatically taken from the HF set for `Common.DEAULT_HARDFORK`, see PR [#906](https://github.com/ethereumjs/ethereumjs-monorepo/pull/906). +**Breaking**: The default HF from the VM has been updated from `petersburg` to `istanbul`. The HF setting is now automatically taken from the HF set for `Common.DEFAULT_HARDFORK`, see PR [#906](https://github.com/ethereumjs/ethereumjs-monorepo/pull/906). **Breaking**: Please note that the options to directly pass in `chain` and `hardfork` strings have been removed to simplify the API. Providing a `Common` instance is now the only way to change the chain setup, see PR [#863](https://github.com/ethereumjs/ethereumjs-monorepo/pull/863) @@ -1395,7 +1442,7 @@ The Util package also introduces a new [Address class](https://github.com/ethere We significantly updated our internal tool and CI setup along the work on PR [#913](https://github.com/ethereumjs/ethereumjs-monorepo/pull/913) with an update to `ESLint` from `TSLint` for code linting and formatting and the introduction of a new build setup. -Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes @@ -1496,7 +1543,7 @@ const vm = new VM({ common }) ``` **Breaking**: The default HF from the VM has been updated from `petersburg` to `istanbul`. -The HF setting is now automatically taken from the HF set for `Common.DEAULT_HARDFORK`, +The HF setting is now automatically taken from the HF set for `Common.DEFAULT_HARDFORK`, see PR [#906](https://github.com/ethereumjs/ethereumjs-monorepo/pull/906). **Breaking**: Please note that the options to directly pass in @@ -1585,7 +1632,7 @@ for code linting and formatting and the introduction of a new build setup. Packages now target `ES2017` for Node.js builds (the `main` entrypoint from `package.json`) and introduce a separate `ES5` build distributed along using the `browser` directive as an entrypoint, see PR [#921](https://github.com/ethereumjs/ethereumjs-monorepo/pull/921). This will result -in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a releated discussion. +in performance benefits for Node.js consumers, see [here](https://github.com/ethereumjs/merkle-patricia-tree/pull/117) for a related discussion. ### Other Changes @@ -1878,7 +1925,7 @@ These will be the main release notes for the `v4` feature updates, subsequent `beta` releases and the final release will just publish the delta changes and point here for reference. -Breaking changes in the release notes are preeceeded with `[BREAKING]`, do a +Breaking changes in the release notes are preceded with `[BREAKING]`, do a search for an overview. The outstanding work of [@s1na](https://github.com/s1na) has to be mentioned @@ -2034,7 +2081,7 @@ vm.runTx( // Handle errors appropriately } // Do something with the result - } + }, ) ``` @@ -2284,7 +2331,7 @@ The `StateManager` (`lib/stateManager.js`) - providing a high-level interface to This comes along with larger refactoring work throughout more-or-less the whole code base and the `StateManager` now completely encapsulates the trie structure and the cache backend used, see issue [#268](https://github.com/ethereumjs/ethereumjs-monorepo/issues/268) and associated PRs for reference. This will make it much easier in the future to bring along an own state manager serving special needs (optimized for memory and performance, run on mobile,...) by e.g. using a different trie implementation, cache or underlying storage or database backend. -We plan to completely separate the currently still integrated state manager into its own repository in one of the next releases, this will then be a breaking `v3.0.0` release. Discussion around a finalized interface (we might e.g. drop all genesis-releated methods respectively methods implemented in the `DefaultStateManager`) is still ongoing and you are very much invited to jump in and articulate your needs, just take e.g. the issue mentioned above as an entry point. +We plan to completely separate the currently still integrated state manager into its own repository in one of the next releases, this will then be a breaking `v3.0.0` release. Discussion around a finalized interface (we might e.g. drop all genesis-related methods respectively methods implemented in the `DefaultStateManager`) is still ongoing and you are very much invited to jump in and articulate your needs, just take e.g. the issue mentioned above as an entry point. Change related to the new `StateManager` interface: @@ -2350,7 +2397,7 @@ making the start being introduced in the `v2.4.0` release. Since both the scope of the `Constantinople` hardfork as well as the state of at least some of the EIPs to be included are not yet finalized, this is only meant for `EXPERIMENTAL` purposes, e.g. for developer -tools to give users early access and make themself familiar with dedicated features. +tools to give users early access and make themselves familiar with dedicated features. Once scope and EIPs from `Constantinople` are final we will target a `v2.5.0` release which will officially introduce `Constantinople` support with all the changes bundled together. @@ -2367,7 +2414,7 @@ All the changes from this release: **FEATURES/FUNCTIONALITY** - Improved chain and fork support, see PR [#304](https://github.com/ethereumjs/ethereumjs-monorepo/pull/304) -- Support for the `Constantinople` bitwise shifiting instructions `SHL`, `SHR` and `SAR`, see PR [#251](https://github.com/ethereumjs/ethereumjs-monorepo/pull/251) +- Support for the `Constantinople` bitwise shifting instructions `SHL`, `SHR` and `SAR`, see PR [#251](https://github.com/ethereumjs/ethereumjs-monorepo/pull/251) - New `newContract` event which can be used to do interrupting tasks on contract/address creation, see PR [#306](https://github.com/ethereumjs/ethereumjs-monorepo/pull/306) - Alignment of behavior of bloom filter hashing to go along with mainnet compatible clients _BREAKING_, see PR [#295](https://github.com/ethereumjs/ethereumjs-monorepo/pull/295) diff --git a/packages/vm/README.md b/packages/vm/README.md index 814a0422c4..8809920c1e 100644 --- a/packages/vm/README.md +++ b/packages/vm/README.md @@ -85,7 +85,7 @@ const main = async () => { const parentBlock = Block.fromBlockData( { header: { number: 1n } }, - { skipConsensusFormatValidation: true } + { skipConsensusFormatValidation: true }, ) const headerData = { number: 2n, @@ -223,7 +223,7 @@ const main = async () => { const vm = await VM.create({ common, setHardfork: true }) const block = Block.fromRPC(goerliBlock2, undefined, { common }) - const result = await vm.runBlock({ block, generate: true, skipHeaderValidation: true }) // we skip header validaiton since we are running a block without the full Ethereum history available + const result = await vm.runBlock({ block, generate: true, skipHeaderValidation: true }) // we skip header validation since we are running a block without the full Ethereum history available console.log(`The state root for Goerli block 2 is ${bytesToHex(result.stateRoot)}`) } @@ -274,12 +274,12 @@ const main = async () => { const blockchain = await Blockchain.create({ genesisState }) const vm = await VM.create({ blockchain, genesisState }) const account = await vm.stateManager.getAccount( - Address.fromString('0x000d836201318ec6899a67540690382780743280') + Address.fromString('0x000d836201318ec6899a67540690382780743280'), ) console.log( `This balance for account 0x000d836201318ec6899a67540690382780743280 in this chain's genesis state is ${Number( - account?.balance - )}` + account?.balance, + )}`, ) } main() @@ -318,6 +318,26 @@ For a list with supported EIPs see the [@ethereumjs/evm](https://github.com/ethe This library supports the blob transaction type introduced with [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844). +### EIP-7702 EAO Code Transactions Support (outdated) + +This library support the execution of [EIP-7702](https://eips.ethereum.org/EIPS/eip-7702) EOA code transactions (see tx library for full documentation) with `runTx()` or the wrapping `runBlock()` execution methods starting with `v3.1.0`, see [this test setup](https://github.com/ethereumjs/ethereumjs-monorepo/blob/master/packages/vm/test/api/EIPs/eip-7702.spec.ts) for a more complete example setup on how to run code from an EOA. + +Note: Things move fast with `EIP-7702` and the currently released implementation is based on [this](https://github.com/ethereum/EIPs/blob/14400434e1199c57d912082127b1d22643788d11/EIPS/eip-7702.md) commit and therefore already outdated. An up-to-date version will be released along our breaking release round planned for early September 2024. + +### EIP-7685 Requests Support + +This library supports blocks including the following [EIP-7685](https://eips.ethereum.org/EIPS/eip-7685) requests: + +- [EIP-6110](https://eips.ethereum.org/EIPS/eip-6110) - Deposit Requests (`v7.3.0`+) +- [EIP-7002](https://eips.ethereum.org/EIPS/eip-7002) - Withdrawal Requests (`v7.3.0`+) +- [EIP-7251](https://eips.ethereum.org/EIPS/eip-7251) - Consolidation Requests (`v7.3.0`+) + +### EIP-2935 Serve Historical Block Hashes from State (Prague) + +Starting with `v8.1.0` the VM supports [EIP-2935](https://eips.ethereum.org/EIPS/eip-2935) which stores the latest 8192 block hashes in the storage of a system contract, see PR [#3475](https://github.com/ethereumjs/ethereumjs-monorepo/pull/3475) as the major integration PR (while work on this has already been done in previous PRs). + +This EIP will be activated along the Prague hardfork. Note that this EIP has no effect on the resolution of the `BLOCKHASH` opcode, which will be a separate activation taking place by the integration of [EIP-7709](https://eips.ethereum.org/EIPS/eip-7709) in the following Osaka hardfork. + #### Initialization To run VM/EVM related EIP-4844 functionality you have to activate the EIP in the associated `@ethereumjs/common` library: diff --git a/packages/vm/benchmarks/mainnetBlocks.ts b/packages/vm/benchmarks/mainnetBlocks.ts index dc806da008..d5872a2a2a 100644 --- a/packages/vm/benchmarks/mainnetBlocks.ts +++ b/packages/vm/benchmarks/mainnetBlocks.ts @@ -1,16 +1,14 @@ -import { readFileSync } from 'fs' -import Benchmark from 'benchmark' -import { Chain, Common, Hardfork } from '@ethereumjs/common' import { Block, createBlockFromRPC } from '@ethereumjs/block' -import { VM } from '@ethereumjs/vm' -import { getPreState, getBlockchain, verifyResult } from './util.js' +import { Common, Hardfork, Mainnet } from '@ethereumjs/common' +import { runBlock as runBlockVM, VM } from '@ethereumjs/vm' +import Benchmark from 'benchmark' +import { readFileSync } from 'fs' +import { getBlockchain, getPreState, verifyResult } from './util.js' const BLOCK_FIXTURE = 'benchmarks/fixture/blocks-prestate.json' const runBlock = async (vm: VM, block: Block, receipts: any) => { - await ( - await vm.shallowCopy() - ).runBlock({ + await runBlockVM(await vm.shallowCopy(), { block, generate: true, skipBlockValidation: true, @@ -27,7 +25,7 @@ export async function mainnetBlocks(suite?: Benchmark.Suite, numSamples?: number console.log(`Number of blocks to sample: ${numSamples}`) data = data.slice(0, numSamples) - const common = new Common({ chain: Chain.Mainnet, hardfork: Hardfork.MuirGlacier }) + const common = new Common({ chain: Mainnet, hardfork: Hardfork.MuirGlacier }) for (const blockData of data) { const block = createBlockFromRPC(blockData.block, [], { common }) diff --git a/packages/vm/benchmarks/mockchain.ts b/packages/vm/benchmarks/mockchain.ts index fb413978d1..0318176c8a 100644 --- a/packages/vm/benchmarks/mockchain.ts +++ b/packages/vm/benchmarks/mockchain.ts @@ -9,10 +9,10 @@ export class Mockchain { async _init() {} getBlock(num: bigint): any { - const bhash = this._hashes[num.toString()] + const bHash = this._hashes[num.toString()] return { hash() { - return bhash + return bHash }, } } diff --git a/packages/vm/benchmarks/util.ts b/packages/vm/benchmarks/util.ts index 460f29ce30..f346134417 100644 --- a/packages/vm/benchmarks/util.ts +++ b/packages/vm/benchmarks/util.ts @@ -33,7 +33,7 @@ export async function getPreState( pre: { [k: string]: StateTestPreAccount }, - common: Common + common: Common, ): Promise { const state = new DefaultStateManager() await state.checkpoint() @@ -42,19 +42,19 @@ export async function getPreState( const { nonce, balance, code, storage } = pre[k] const account = new Account(BigInt(nonce), BigInt(balance)) await state.putAccount(address, account) - await state.putContractCode(address, toBytes(code)) + await state.putCode(address, toBytes(code)) for (const storageKey in storage) { const storageValue = storage[storageKey] const storageValueBytes = hexToBytes( - isHexString(storageValue) ? storageValue : `0x${storageValue}` + isHexString(storageValue) ? storageValue : `0x${storageValue}`, ) // verify if this value buffer is not a zero buffer. if so, we should not write it... const zeroBytesEquivalent = new Uint8Array(storageValueBytes.length) if (!equalsBytes(zeroBytesEquivalent, storageValueBytes)) { - await state.putContractStorage( + await state.putStorage( address, hexToBytes(isHexString(storageKey) ? storageKey : `0x${storageKey}`), - storageValueBytes + storageValueBytes, ) } } diff --git a/packages/vm/examples/browser.html b/packages/vm/examples/browser.html index 0173d56d77..dabeeedc98 100644 --- a/packages/vm/examples/browser.html +++ b/packages/vm/examples/browser.html @@ -4,7 +4,7 @@ EthereumJS Browser Examples