diff --git a/src/chains/ethereum/block/src/runtime-block.ts b/src/chains/ethereum/block/src/runtime-block.ts index d018b51b21..0816957c87 100644 --- a/src/chains/ethereum/block/src/runtime-block.ts +++ b/src/chains/ethereum/block/src/runtime-block.ts @@ -3,7 +3,8 @@ import { Quantity, BUFFER_EMPTY, BUFFER_32_ZERO, - BUFFER_8_ZERO + BUFFER_8_ZERO, + BUFFER_ZERO } from "@ganache/utils"; import { BN, KECCAK256_RLP_ARRAY } from "ethereumjs-util"; import { EthereumRawBlockHeader, serialize } from "./serialize"; @@ -128,7 +129,7 @@ export class RuntimeBlock { timestamp: new BnExtra(ts), baseFeePerGas: baseFeePerGas === undefined - ? undefined + ? new BnExtra(BUFFER_ZERO) : new BnExtra(Quantity.from(baseFeePerGas).toBuffer()) }; } diff --git a/src/chains/ethereum/ethereum/package-lock.json b/src/chains/ethereum/ethereum/package-lock.json index e2679e3ac1..185d6acc30 100644 --- a/src/chains/ethereum/ethereum/package-lock.json +++ b/src/chains/ethereum/ethereum/package-lock.json @@ -486,31 +486,42 @@ } }, "@trufflesuite/uws-js-unofficial": { - "version": "18.14.0-unofficial.12", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-18.14.0-unofficial.12.tgz", - "integrity": "sha512-fxM+mskpKW/Wd7Dy9pDJ/jaChCQUq+Ab1X8ZMi9dmC9FGDo7CKacQCNoXh8vHiy+ZFY7n7xvWy8A0I7eFQ0iqQ==", + "version": "20.4.0-unofficial.2", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.2.tgz", + "integrity": "sha512-sDPJI/n1BGvby76iDT5CHfosVGYes0GydONgLpxkU6KPUM24z0E/D7JaPO7DkNmM/to3DhDit02FnMvasSXUdw==", "dev": true, "requires": { - "bufferutil": "4.0.3", - "utf-8-validate": "5.0.5", - "ws": "^8.2.1" + "bufferutil": "4.0.5", + "utf-8-validate": "5.0.7", + "ws": "8.2.3" }, "dependencies": { - "utf-8-validate": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.5.tgz", - "integrity": "sha512-+pnxRYsS/axEpkrrEpzYfNZGXp0IjC/9RIxwM5gntY4Koi8SHmUGSfxfWqxZdRxrtaoVstuOzUp/rbs3JSPELQ==", + "bufferutil": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.5.tgz", + "integrity": "sha512-HTm14iMQKK2FjFLRTM5lAVcyaUzOnqbPtesFIvREgXpJHdQm8bWS+GkQgIkfaBYRHuCnea7w8UVNfwiAQhlr9A==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" } }, - "ws": { - "version": "8.2.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.1.tgz", - "integrity": "sha512-XkgWpJU3sHU7gX8f13NqTn6KQ85bd1WU7noBHTT8fSohx7OS1TPY8k+cyRPCzFkia7C4mM229yeHr1qK9sM4JQ==", - "dev": true + "node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", + "dev": true, + "optional": true + }, + "utf-8-validate": { + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.7.tgz", + "integrity": "sha512-vLt1O5Pp+flcArHGIyKEQq883nBt8nN8tVBcoL0qUXj2XT1n7p70yGIq2VK98I5FdZ1YHc0wk/koOnHjnXWk1Q==", + "dev": true, + "optional": true, + "requires": { + "node-gyp-build": "^4.3.0" + } } } }, @@ -1710,6 +1721,11 @@ "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==", "dev": true }, + "env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==" + }, "errno": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz", @@ -1956,6 +1972,15 @@ "resolved": "https://registry.npmjs.org/exit-on-epipe/-/exit-on-epipe-1.0.1.tgz", "integrity": "sha512-h2z5mrROTxce56S+pnvAV890uu7ls7f1kEvVGJbw1OlFH3/mlJ5bkXu0KRyW94v37zzHPiUd55iLn3DA7TjWpw==" }, + "fast-check": { + "version": "2.17.0", + "resolved": "https://registry.npmjs.org/fast-check/-/fast-check-2.17.0.tgz", + "integrity": "sha512-fNNKkxNEJP+27QMcEzF6nbpOYoSZIS0p+TyB+xh/jXqRBxRhLkiZSREly4ruyV8uJi7nwH1YWAhi7OOK5TubRw==", + "dev": true, + "requires": { + "pure-rand": "^5.0.0" + } + }, "fast-safe-stringify": { "version": "2.0.8", "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.8.tgz", @@ -4397,6 +4422,12 @@ "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", "integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY=" }, + "pure-rand": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-5.0.0.tgz", + "integrity": "sha512-lD2/y78q+7HqBx2SaT6OT4UcwtvXNRfEpzYEzl0EQ+9gZq2Qi3fa0HDnYPeqQwhlHJFBUhT7AO3mLU3+8bynHA==", + "dev": true + }, "qrcode-terminal": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/qrcode-terminal/-/qrcode-terminal-0.12.0.tgz", @@ -5571,9 +5602,9 @@ } }, "ws": { - "version": "7.5.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", - "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==" + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==" }, "xtend": { "version": "4.0.2", diff --git a/src/chains/ethereum/ethereum/package.json b/src/chains/ethereum/ethereum/package.json index 1b6c495021..fb5f777e59 100644 --- a/src/chains/ethereum/ethereum/package.json +++ b/src/chains/ethereum/ethereum/package.json @@ -30,7 +30,7 @@ "docs.preview": "ws --open --port 3010 --directory ../../../../docs", "tsc": "ttsc --build", "test": "nyc --reporter lcov npm run mocha", - "mocha": "cross-env TS_NODE_COMPILER=ttypescript TS_NODE_FILES=true mocha -s 0 -t 0 --exit --check-leaks --throw-deprecation --trace-warnings --require ts-node/register 'tests/**/*.test.ts'" + "mocha": "cross-env TS_NODE_COMPILER=ttypescript TS_NODE_FILES=true mocha -s 0 --exit --check-leaks --throw-deprecation --trace-warnings --require ts-node/register 'tests/**/*.test.ts'" }, "bugs": { "url": "https://github.com/trufflesuite/ganache/issues" @@ -67,6 +67,7 @@ "abort-controller": "3.0.0", "bip39": "3.0.4", "emittery": "0.7.2", + "env-paths": "2.2.1", "eth-sig-util": "2.5.3", "ethereumjs-abi": "0.6.8", "ethereumjs-util": "7.1.0", @@ -83,11 +84,11 @@ "semaphore": "1.1.0", "subleveldown": "5.0.1", "tmp-promise": "3.0.2", - "ws": "7.5.3" + "ws": "8.2.3" }, "devDependencies": { "@trufflesuite/typedoc-default-themes": "0.6.1", - "@trufflesuite/uws-js-unofficial": "18.14.0-unofficial.12", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.2", "@types/encoding-down": "5.0.0", "@types/fs-extra": "9.0.2", "@types/keccak": "3.0.1", @@ -102,6 +103,7 @@ "abstract-leveldown": "6.3.0", "cheerio": "1.0.0-rc.3", "cross-env": "7.0.3", + "fast-check": "2.17.0", "fs-extra": "9.0.1", "local-web-server": "4.2.1", "mocha": "8.4.0", diff --git a/src/chains/ethereum/ethereum/src/blockchain.ts b/src/chains/ethereum/ethereum/src/blockchain.ts index 1a86100833..2bc41414c9 100644 --- a/src/chains/ethereum/ethereum/src/blockchain.ts +++ b/src/chains/ethereum/ethereum/src/blockchain.ts @@ -53,7 +53,6 @@ import { TypedTransaction } from "@ganache/ethereum-transaction"; import { Block, RuntimeBlock, Snapshots } from "@ganache/ethereum-block"; -import { runTransactions } from "./helpers/run-transactions"; import { SimulationTransaction } from "./helpers/run-call"; import { ForkStateManager } from "./forking/state-manager"; import { @@ -673,7 +672,7 @@ export default class Blockchain extends Emittery.Typed< // commit accounts, but for forking. const stateManager = this.vm.stateManager; - stateManager.checkpoint(); + await stateManager.checkpoint(); initialAccounts.forEach(acc => { const a = { buf: acc.address.toBuffer() } as any; (stateManager as any)._cache.put(a, acc); @@ -798,8 +797,16 @@ export default class Blockchain extends Emittery.Typed< return (this.#timeAdjustment = timestamp - Date.now()); } - #deleteBlockData = (blocksToDelete: Block[]) => { - return this.#database.batch(() => { + #deleteBlockData = async (blocksToDelete: Block[]) => { + // if we are forking we need to make sure we clean up the forking related + // metadata that isn't stored in the trie + if ("revertMetaData" in this.trie) { + await (this.trie as ForkTrie).revertMetaData( + blocksToDelete[blocksToDelete.length - 1].header.number, + blocksToDelete[0].header.number + ); + } + await this.#database.batch(() => { const { blocks, transactions, transactionReceipts, blockLogs } = this; blocksToDelete.forEach(block => { block.getTransactions().forEach(tx => { @@ -908,7 +915,7 @@ export default class Blockchain extends Emittery.Typed< if (!currentHash.equals(snapshotHash)) { // if we've added blocks since we snapshotted we need to delete them and put // some things back the way they were. - const blockPromises = []; + const blockPromises: Promise[] = []; let blockList = snapshots.blocks; while (blockList !== null) { if (blockList.current.equals(snapshotHash)) break; @@ -917,7 +924,8 @@ export default class Blockchain extends Emittery.Typed< } snapshots.blocks = blockList; - await Promise.all(blockPromises).then(this.#deleteBlockData); + const blockData = await Promise.all(blockPromises); + await this.#deleteBlockData(blockData); setStateRootSync( this.vm.stateManager, @@ -1091,6 +1099,7 @@ export default class Blockchain extends Emittery.Typed< } #traceTransaction = async ( + transaction: VmTransaction, trie: GanacheTrie, newBlock: RuntimeBlock & { transactions: VmTransaction[] }, options: TransactionTraceOptions, @@ -1123,7 +1132,6 @@ export default class Blockchain extends Emittery.Typed< }); const storage: StorageRecords = {}; - const transaction = newBlock.transactions[newBlock.transactions.length - 1]; // TODO: gas could go theoretically go over Number.MAX_SAFE_INTEGER. // (Ganache v2 didn't handle this possibility either, so it hasn't been @@ -1173,7 +1181,7 @@ export default class Blockchain extends Emittery.Typed< } const structLog: StructLog = { - depth: event.depth, + depth: event.depth + 1, error: "", gas: gasLeft, gasCost: 0, @@ -1245,53 +1253,6 @@ export default class Blockchain extends Emittery.Typed< } }; - const afterTxListener = () => { - vm.removeListener("step", stepListener); - vm.removeListener("afterTransaction", afterTxListener); - this.emit("ganache:vm:tx:after", { - context: transactionEventContext - }); - }; - - const beforeTxListener = async (tx: VmTransaction) => { - if (tx === transaction) { - this.emit("ganache:vm:tx:before", { - context: transactionEventContext - }); - vm.on("step", stepListener); - vm.on("afterTx", afterTxListener); - if (keys && contractAddress) { - const database = this.#database; - return Promise.all( - keys.map(async key => { - // get the raw key using the hashed key - let rawKey = await database.storageKeys.get(key); - - const result = await vm.stateManager.getContractStorage( - { buf: Address.from(contractAddress).toBuffer() } as any, - rawKey - ); - - storage[Data.from(key, key.length).toString()] = { - key: Data.from(rawKey, rawKey.length), - value: Data.from(result, 32) - }; - }) - ); - } - } - }; - - const removeListeners = () => { - vm.removeListener("step", stepListener); - vm.removeListener("beforeTx", beforeTxListener); - vm.removeListener("afterTx", afterTxListener); - }; - - // Listen to beforeTx so we know when our target transaction - // is processing. This event will add the event listener for getting the trace data. - vm.on("beforeTx", beforeTxListener); - // Don't even let the vm try to flush the block's _cache to the stateTrie. // When forking some of the data that the traced function may request will // exist only on the main chain. Because we pretty much lie to the VM by @@ -1309,10 +1270,50 @@ export default class Blockchain extends Emittery.Typed< // The previous implementation had specific error handling. // It's possible we've removed handling specific cases in this implementation. // e.g., the previous incantation of RuntimeError - await runTransactions(vm, newBlock.transactions, newBlock); - - // Just to be safe - removeListeners(); + await vm.stateManager.checkpoint(); + try { + for (let i = 0, l = newBlock.transactions.length; i < l; i++) { + const tx = newBlock.transactions[i] as any; + if (tx === transaction) { + if (keys && contractAddress) { + const database = this.#database; + const ejsContractAddress = { buf: contractAddress } as any; + await Promise.all( + keys.map(async key => { + // get the raw key using the hashed key + const rawKey = await database.storageKeys.get(key); + + const result = await vm.stateManager.getContractStorage( + ejsContractAddress, + rawKey + ); + + storage[Data.from(key, key.length).toString()] = { + key: Data.from(rawKey, rawKey.length), + value: Data.from(result, 32) + }; + }) + ); + break; + } else { + vm.on("step", stepListener); + // force the loop to break after running this transaction by setting + // the current iteration past the end + i = l; + } + } + this.emit("ganache:vm:tx:before", { + context: transactionEventContext + }); + await vm.runTx({ tx, block: newBlock as any }); + this.emit("ganache:vm:tx:after", { + context: transactionEventContext + }); + } + vm.removeListener("step", stepListener); + } finally { + await vm.stateManager.revert(); + } // send state results back return { @@ -1391,8 +1392,8 @@ export default class Blockchain extends Emittery.Typed< throw new Error("Unknown transaction " + transactionHash); } - const targetBlock = await this.blocks.get( - transaction.blockNumber.toBuffer() + const targetBlock = await this.blocks.getByHash( + transaction.blockHash.toBuffer() ); const parentBlock = await this.blocks.getByHash( targetBlock.header.parentHash.toBuffer() @@ -1404,12 +1405,6 @@ export default class Blockchain extends Emittery.Typed< transactionHashBuffer ); - // only copy relevant transactions - newBlock.transactions = newBlock.transactions.slice( - 0, - 1 + transaction.index.toNumber() - ); - // #2 - Set state root of original block // // TODO: Forking needs the forked block number passed during this step: @@ -1427,7 +1422,12 @@ export default class Blockchain extends Emittery.Typed< structLogs, returnValue, storage - } = await this.#traceTransaction(trie, newBlock, options); + } = await this.#traceTransaction( + newBlock.transactions[transaction.index.toNumber()], + trie, + newBlock, + options + ); // #4 - Send results back return { gas, structLogs, returnValue, storage }; @@ -1556,6 +1556,7 @@ export default class Blockchain extends Emittery.Typed< }; const { storage } = await this.#traceTransaction( + newBlock.transactions[transaction.index.toNumber()], trie, newBlock, options, diff --git a/src/chains/ethereum/ethereum/src/connector.ts b/src/chains/ethereum/ethereum/src/connector.ts index 5dea86fcc6..c443a262f1 100644 --- a/src/chains/ethereum/ethereum/src/connector.ts +++ b/src/chains/ethereum/ethereum/src/connector.ts @@ -21,6 +21,7 @@ import { EthereumProviderOptions, EthereumLegacyProviderOptions } from "@ganache/ethereum-options"; +import { bufferify } from "./helpers/bufferify"; type ProviderOptions = EthereumProviderOptions | EthereumLegacyProviderOptions; export type Provider = EthereumProvider; @@ -45,6 +46,8 @@ export class Connector< implements IConnector { #provider: EthereumProvider; + static BUFFERIFY_THRESHOLD: number = 100000; + get provider() { return this.#provider; } @@ -55,6 +58,8 @@ export class Connector< this.#provider = new EthereumProvider(providerOptions, executor); } + public BUFFERIFY_THRESHOLD = Connector.BUFFERIFY_THRESHOLD; + async connect() { await this.#provider.initialize(); // no need to wait for #provider.once("connect") as the initialize() @@ -99,9 +104,16 @@ export class Connector< return this.#provider._requestRaw({ method, params }); }; + format( + result: any, + payload: R + ): RecognizedString | Generator; format(result: any, payload: R): RecognizedString; format(results: any[], payloads: R[]): RecognizedString; - format(results: any | any[], payload: R | R[]): RecognizedString { + format( + results: any | any[], + payload: R | R[] + ): RecognizedString | Generator { if (Array.isArray(payload)) { return JSON.stringify( payload.map((payload, i) => { @@ -115,7 +127,35 @@ export class Connector< ); } else { const json = makeResponse(payload.id, results); - return JSON.stringify(json); + if ( + payload.method === "debug_traceTransaction" && + typeof results === "object" && + Array.isArray(results.structLogs) && + // for "large" debug_traceTransaction results we convert to individual + // parts of the response to Buffers, yielded via a Generator function, + // instead of using JSON.stringify. This is necessary because we: + // * avoid V8's maximum string length limit of 1GB + // * avoid and the max Buffer length limit of ~2GB (on 64bit + // architectures). + // * avoid heap allocation failures due to trying to hold too much + // data in memory (which can happen if we don't immediately consume + // the `format` result -- by buffering everything into one array, + // for example) + // + // We don't do this for everything because the bufferfication is so very + // very slow. + // + // TODO(perf): an even better way of solving this would be to convert + // `debug_traceTransaction` to a generator that yields chunks (of + // Buffer) as soon as they're available. We could then `write` these + // individual chunks immediately and our memory use would stay + // relatively low and constant. + results.structLogs.length > this.BUFFERIFY_THRESHOLD + ) { + return bufferify(json, ""); + } else { + return JSON.stringify(json); + } } } diff --git a/src/chains/ethereum/ethereum/src/data-managers/block-manager.ts b/src/chains/ethereum/ethereum/src/data-managers/block-manager.ts index f05cc093c7..e6f285cade 100644 --- a/src/chains/ethereum/ethereum/src/data-managers/block-manager.ts +++ b/src/chains/ethereum/ethereum/src/data-managers/block-manager.ts @@ -120,7 +120,7 @@ export default class BlockManager extends Manager { let blockNumber: string; if (typeof tagOrBlockNumber === "string") { blockNumber = tagOrBlockNumber; - } else if (tagOrBlockNumber.toBigInt() > fallback.blockNumber.toBigInt()) { + } else if (!fallback.isValidForkBlockNumber(tagOrBlockNumber)) { // don't get the block if the requested block is _after_ our fallback's // blocknumber because it doesn't exist in our local chain. return null; diff --git a/src/chains/ethereum/ethereum/src/data-managers/transaction-manager.ts b/src/chains/ethereum/ethereum/src/data-managers/transaction-manager.ts index 948bcb9c1b..0313ffefe1 100644 --- a/src/chains/ethereum/ethereum/src/data-managers/transaction-manager.ts +++ b/src/chains/ethereum/ethereum/src/data-managers/transaction-manager.ts @@ -48,10 +48,15 @@ export default class TransactionManager extends Manager { [Data.from(transactionHash).toString()] ); if (tx == null) return null; + const blockHash = Data.from((tx as any).blockHash, 32); const blockNumber = Quantity.from((tx as any).blockNumber); const index = Quantity.from((tx as any).transactionIndex); + // don't get the transaction if the requested transaction is _after_ our + // fallback's blocknumber because it doesn't exist in our local chain. + if (!fallback.isValidForkBlockNumber(blockNumber)) return null; + const extra: GanacheRawExtraTx = [ Data.from(tx.from, 20).toBuffer(), Data.from((tx as any).hash, 32).toBuffer(), diff --git a/src/chains/ethereum/ethereum/src/forking/cache.ts b/src/chains/ethereum/ethereum/src/forking/cache.ts index 51e6a005e1..2ca3da22a9 100644 --- a/src/chains/ethereum/ethereum/src/forking/cache.ts +++ b/src/chains/ethereum/ethereum/src/forking/cache.ts @@ -14,6 +14,6 @@ export class ForkCache extends Cache { */ _lookupAccount = async (address: EJS_Address) => { const rlp = await (this._trie as ForkTrie).get(address.buf); - return Account.fromRlpSerializedAccount(rlp!); + return rlp ? Account.fromRlpSerializedAccount(rlp) : new Account(); }; } diff --git a/src/chains/ethereum/ethereum/src/forking/fork.ts b/src/chains/ethereum/ethereum/src/forking/fork.ts index 967de1faff..e719bd8772 100644 --- a/src/chains/ethereum/ethereum/src/forking/fork.ts +++ b/src/chains/ethereum/ethereum/src/forking/fork.ts @@ -11,6 +11,7 @@ import { Address } from "@ganache/ethereum-address"; import { Account } from "@ganache/ethereum-utils"; import BlockManager from "../data-managers/block-manager"; import { ProviderHandler } from "./handlers/provider-handler"; +import { PersistentCache } from "./persistent-cache/persistent-cache"; async function fetchChainId(fork: Fork) { const chainIdHex = await fork.request("eth_chainId", []); @@ -21,16 +22,14 @@ async function fetchNetworkId(fork: Fork) { return parseInt(networkIdStr, 10); } function fetchBlockNumber(fork: Fork) { - return fork.request("eth_blockNumber", []); + // {disableCache: true} required so we never cache the blockNumber, as forking + // shouldn't ever cache a method that can change! + return fork.request("eth_blockNumber", [], { disableCache: true }); } function fetchBlock(fork: Fork, blockNumber: Quantity | Tag.LATEST) { return fork.request("eth_getBlockByNumber", [blockNumber, true]); } -async function fetchNonce( - fork: Fork, - address: Address, - blockNumber: Quantity | Tag.LATEST -) { +async function fetchNonce(fork: Fork, address: Address, blockNumber: Quantity) { const nonce = await fork.request("eth_getTransactionCount", [ address, blockNumber @@ -83,9 +82,9 @@ export class Fork { } } - #setCommonFromChain = async () => { + #setCommonFromChain = async (chainIdPromise: Promise) => { const [chainId, networkId] = await Promise.all([ - fetchChainId(this), + chainIdPromise, fetchNetworkId(this) ]); @@ -102,14 +101,27 @@ export class Fork { (this.common as any).on = () => {}; }; - #setBlockDataFromChainAndOptions = async () => { + #setBlockDataFromChainAndOptions = async ( + chainIdPromise: Promise + ) => { const options = this.#options; if (options.blockNumber === Tag.LATEST) { - // if our block number option is "latest" override it with the original - // chain's current blockNumber - const block = await fetchBlock(this, Tag.LATEST); - options.blockNumber = parseInt(block.number, 16); - this.blockNumber = Quantity.from(options.blockNumber); + const [latestBlock, chainId] = await Promise.all([ + fetchBlock(this, Tag.LATEST), + chainIdPromise + ]); + let blockNumber = parseInt(latestBlock.number, 16); + const effectiveBlockNumber = KNOWN_CHAINIDS.has(chainId) + ? Math.max(blockNumber - options.preLatestConfirmations, 0) + : blockNumber; + let block; + if (effectiveBlockNumber !== blockNumber) { + block = await fetchBlock(this, Quantity.from(effectiveBlockNumber)); + } else { + block = latestBlock; + } + options.blockNumber = effectiveBlockNumber; + this.blockNumber = Quantity.from(effectiveBlockNumber); this.stateRoot = Data.from(block.stateRoot); await this.#syncAccounts(this.blockNumber); return block; @@ -152,18 +164,45 @@ export class Fork { }; public async initialize() { - const [block] = await Promise.all([ - this.#setBlockDataFromChainAndOptions(), - this.#setCommonFromChain() + let cacheProm: Promise; + const options = this.#options; + if (options.deleteCache) await PersistentCache.deleteDb(); + if (options.disableCache === false) { + // ignore cache start up errors as it is possible there is an `open` + // conflict if another ganache fork is running at the time this one is + // started. The cache isn't required (though performance will be + // degraded without it) + cacheProm = PersistentCache.create().catch(_e => null); + } else { + cacheProm = null; + } + const chainIdPromise = fetchChainId(this); + const [block, cache] = await Promise.all([ + this.#setBlockDataFromChainAndOptions(chainIdPromise), + cacheProm, + this.#setCommonFromChain(chainIdPromise) ]); this.block = new Block( BlockManager.rawFromJSON(block, this.common), this.common ); + if (cache) await this.initCache(cache); + } + private async initCache(cache: PersistentCache) { + await cache.initialize( + this.block.header.number, + this.block.hash(), + this.request.bind(this) + ); + this.#handler.setCache(cache); } - public request(method: string, params: unknown[]): Promise { - return this.#handler.request(method, params); + public request( + method: string, + params: unknown[], + options = { disableCache: false } + ): Promise { + return this.#handler.request(method, params, options); } public abort() { @@ -174,8 +213,12 @@ export class Fork { return this.#handler.close(); } + public isValidForkBlockNumber(blockNumber: Quantity) { + return blockNumber.toBigInt() <= this.blockNumber.toBigInt(); + } + public selectValidForkBlockNumber(blockNumber: Quantity) { - return blockNumber.toBigInt() < this.blockNumber.toBigInt() + return this.isValidForkBlockNumber(blockNumber) ? blockNumber : this.blockNumber; } diff --git a/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts b/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts index 5556590c91..e5fd673788 100644 --- a/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts +++ b/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts @@ -5,6 +5,7 @@ import { OutgoingHttpHeaders } from "http"; import RateLimiter from "../rate-limiter/rate-limiter"; import LRU from "lru-cache"; import { AbortError, CodedError } from "@ganache/ethereum-utils"; +import { PersistentCache } from "../persistent-cache/persistent-cache"; const INVALID_RESPONSE = "Invalid response from fork provider: "; @@ -23,6 +24,7 @@ export class BaseHandler { protected limiter: RateLimiter; protected headers: Headers; protected abortSignal: AbortSignal; + private persistentCache: PersistentCache; constructor(options: EthereumInternalOptions, abortSignal: AbortSignal) { const forkingOptions = options.fork; @@ -65,6 +67,10 @@ export class BaseHandler { } } + public setCache(cache: PersistentCache) { + this.persistentCache = cache; + } + /** * Adds Authorization headers from the given options to the provided `headers` * object. Overwrites an existing `Authorization` header value. @@ -135,7 +141,7 @@ export class BaseHandler { } } - getFromCache(key: string) { + getFromMemCache(key: string) { const cachedRequest = this.requestCache.get(key); if (cachedRequest !== undefined) return cachedRequest as Promise; @@ -143,33 +149,88 @@ export class BaseHandler { if (cachedValue !== undefined) return JSON.parse(cachedValue).result as T; } + async getFromSlowCache(method: string, params: any[], key: string) { + if (!this.persistentCache) return; + const raw = await this.persistentCache.get(method, params, key).catch(e => { + if (e.notFound) return null; + // I/O or other error, throw as things are getting weird and the cache may + // have lost integrity + throw e; + }); + if (raw != null) return { result: JSON.parse(raw).result as T, raw }; + } + async queueRequest( + method: string, + params: any[], key: string, send: ( ...args: unknown[] ) => Promise<{ response: { result: any } | { error: { message: string; code: number } }; raw: string | Buffer; - }> + }>, + options = { disableCache: false } ): Promise { - const cached = this.getFromCache(key); - if (cached !== undefined) return cached; - - const promise = this.limiter.handle(send).then(({ response, raw }) => { - if (this.abortSignal.aborted) return Promise.reject(new AbortError()); + if (!options.disableCache) { + const memCached = this.getFromMemCache(key); + if (memCached !== undefined) return memCached; + + const diskCached = await this.getFromSlowCache(method, params, key); + if (diskCached !== undefined) { + this.valueCache.set(key, Buffer.from(diskCached.raw)); + return diskCached.result; + } + } - if (hasOwn(response, "result")) { - // cache non-error responses only - this.valueCache.set(key, raw); + const promise = this.limiter + .handle(send) + .then(async ({ response, raw }) => { + if (this.abortSignal.aborted) return Promise.reject(new AbortError()); + + if (hasOwn(response, "result")) { + if (!options.disableCache) { + // cache non-error responses only + this.valueCache.set(key, raw); + + // swallow errors for the persistentCache, since it's not vital that + // it always works + if (this.persistentCache) { + const prom = this.persistentCache + .put( + method, + params, + key, + typeof raw === "string" ? Buffer.from(raw) : raw + ) + .catch(_ => { + // the cache.put may fail if the db is closed while a request + // is in flight. This is a "fire and forget" method. + }); + + // track these unawaited `puts` + this.fireForget.add(prom); + + // clean up once complete + prom.finally(() => { + this.fireForget.delete(prom); + }); + } + } - return response.result as T; - } else if (hasOwn(response, "error") && response.error != null) { - const { error } = response as JsonRpcError; - throw new CodedError(error.message, error.code); - } - throw new Error(`${INVALID_RESPONSE}\`${JSON.stringify(response)}\``); - }); + return response.result as T; + } else if (hasOwn(response, "error") && response.error != null) { + const { error } = response as JsonRpcError; + throw new CodedError(error.message, error.code); + } + throw new Error(`${INVALID_RESPONSE}\`${JSON.stringify(response)}\``); + }); this.requestCache.set(key, promise); return await promise; } + private fireForget = new Set(); + async close() { + await Promise.all(this.fireForget.keys()); + this.persistentCache && (await this.persistentCache.close()); + } } diff --git a/src/chains/ethereum/ethereum/src/forking/handlers/http-handler.ts b/src/chains/ethereum/ethereum/src/forking/handlers/http-handler.ts index 07eee1850c..1932e7c3cc 100644 --- a/src/chains/ethereum/ethereum/src/forking/handlers/http-handler.ts +++ b/src/chains/ethereum/ethereum/src/forking/handlers/http-handler.ts @@ -85,7 +85,11 @@ export class HttpHandler extends BaseHandler implements Handler { }); } - public async request(method: string, params: unknown[]) { + public async request( + method: string, + params: unknown[], + options = { disableCache: false } + ) { const key = JSON.stringify({ method, params }); const { protocol, hostname: host, port, pathname, search } = this.url; const requestOptions = { @@ -168,11 +172,6 @@ export class HttpHandler extends BaseHandler implements Handler { return deferred.promise.finally(() => this.requestCache.delete(key)); }; - return await this.queueRequest(key, send); - } - - public close() { - // no op - return Promise.resolve(); + return await this.queueRequest(method, params, key, send, options); } } diff --git a/src/chains/ethereum/ethereum/src/forking/handlers/provider-handler.ts b/src/chains/ethereum/ethereum/src/forking/handlers/provider-handler.ts index ab46f2b2f0..0bc554cd49 100644 --- a/src/chains/ethereum/ethereum/src/forking/handlers/provider-handler.ts +++ b/src/chains/ethereum/ethereum/src/forking/handlers/provider-handler.ts @@ -61,17 +61,22 @@ export class ProviderHandler extends BaseHandler implements Handler { throw new Error("Forking `provider` must be EIP-1193 compatible"); } } - public async request(method: string, params: unknown[]) { + public async request( + method: string, + params: unknown[], + options = { disableCache: false } + ) { // format params via JSON stringification because the params might // be Quantity or Data, which aren't valid as `params` themselves, // but when JSON stringified they are const strParams = JSON.stringify(params); - return await this.queueRequest(`${method}:${strParams}`, () => - this._request(method, JSON.parse(strParams) as unknown[]) + return await this.queueRequest( + method, + params, + `${method}:${strParams}`, + () => this._request(method, JSON.parse(strParams) as unknown[]), + options ); } - public close() { - return Promise.resolve(); - } } diff --git a/src/chains/ethereum/ethereum/src/forking/handlers/ws-handler.ts b/src/chains/ethereum/ethereum/src/forking/handlers/ws-handler.ts index 2794b709f2..55a2ff5414 100644 --- a/src/chains/ethereum/ethereum/src/forking/handlers/ws-handler.ts +++ b/src/chains/ethereum/ethereum/src/forking/handlers/ws-handler.ts @@ -53,7 +53,11 @@ export class WsHandler extends BaseHandler implements Handler { this.connection.onmessage = this.onMessage.bind(this); } - public async request(method: string, params: unknown[]) { + public async request( + method: string, + params: unknown[], + options = { disableCache: false } + ) { await this.open; if (this.abortSignal.aborted) return Promise.reject(new AbortError()); @@ -74,7 +78,7 @@ export class WsHandler extends BaseHandler implements Handler { this.connection.send(`${JSONRPC_PREFIX}${messageId},${key.slice(1)}`); return deferred.promise.finally(() => this.requestCache.delete(key)); }; - return await this.queueRequest(key, send); + return await this.queueRequest(method, params, key, send, options); } public onMessage(event: WebSocket.MessageEvent) { @@ -111,8 +115,8 @@ export class WsHandler extends BaseHandler implements Handler { return open; } - public close() { + public async close() { + await super.close(); this.connection.close(); - return Promise.resolve(); } } diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts new file mode 100644 index 0000000000..95f1210f29 --- /dev/null +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts @@ -0,0 +1,64 @@ +import { BUFFER_EMPTY } from "@ganache/utils"; +import { LevelUp } from "levelup"; +import { Tree } from "./tree"; + +export class Ancestry { + private db: LevelUp; + private next: Buffer; + private knownAncestors: Set; + /** + * Prevents fetching the same key from the database simultaneously. + */ + private cacheLock: Map> = new Map(); + constructor(db: LevelUp, parent: Tree) { + this.db = db; + if (parent == null) { + this.next = null; + this.knownAncestors = new Set(); + } else { + this.next = parent.closestKnownAncestor.equals(BUFFER_EMPTY) + ? null + : parent.closestKnownAncestor; + this.knownAncestors = new Set([parent.hash.toString("hex")]); + } + } + + private async loadNextAncestor(next: Buffer) { + const k = next.toString("hex"); + if (this.cacheLock.has(k)) { + throw new Error("couldn't load next ancestor as it is locked"); + } + let resolver: () => void; + this.cacheLock.set( + k, + new Promise(resolve => { + resolver = resolve; + }) + ); + const value = await this.db.get(next); + const node = Tree.deserialize(next, value); + this.next = node.closestKnownAncestor.equals(BUFFER_EMPTY) + ? null + : node.closestKnownAncestor; + this.knownAncestors.add(node.hash.toString("hex")); + this.cacheLock.delete(k); + resolver(); + } + + async has(key: Buffer) { + const strKey = key.toString("hex"); + if (this.knownAncestors.has(strKey)) { + return true; + } else if (this.next) { + const cacheLock = this.cacheLock.get(this.next.toString("hex")); + if (cacheLock) { + await cacheLock; + return this.has(key); + } + await this.loadNextAncestor(this.next); + return this.has(key); + } else { + return false; + } + } +} diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts new file mode 100644 index 0000000000..cfb158a7e7 --- /dev/null +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts @@ -0,0 +1,220 @@ +import { Tag } from "@ganache/ethereum-utils"; +import { BUFFER_EMPTY, Data, DATA_EMPTY, Quantity } from "@ganache/utils"; +import { LevelUp } from "levelup"; +import { Tree } from "./tree"; + +export type Request = (method: string, params: any[]) => Promise; + +export type FindOptions = ( + | { + gte: Buffer; + lt?: Buffer; + } + | { + gt: Buffer; + lt?: Buffer; + } + | { + gt: Buffer; + lte?: Buffer; + } + | { + gte: Buffer; + lte?: Buffer; + } + | { + gte?: Buffer; + lt: Buffer; + } + | { + gt?: Buffer; + lt: Buffer; + } + | { + gt?: Buffer; + lte: Buffer; + } + | { + gte?: Buffer; + lte: Buffer; + } +) & { reverse?: boolean }; + +export function getBlockNumberFromParams(method: string, params: any[]) { + // get the request's block number + switch (method) { + case "eth_getBlockByNumber": + return params[0]; + case "eth_getTransactionCount": + case "eth_getCode": + case "eth_getBalance": + return params[1]; + case "eth_getStorageAt": + return params[2]; + default: + return null; + } +} + +export async function setDbVersion(db: LevelUp, version: Buffer) { + // set the version if the DB was just created, or error if we already have + // a version, but it isn't what we expected + try { + const recordedVersion = await db.get("version"); + if (!version.equals(recordedVersion)) { + // in the future this is where database migrations would go + throw new Error( + `Persistent cache version "${version.toString()}"" is not understood.` + ); + } + } catch (e) { + if (!e.notFound) throw e; + + // if we didn't have a `version` key we need to set one + await db.put("version", version); + } +} + +export async function resolveTargetAndClosestAncestor( + db: LevelUp, + request: Request, + targetHeight: Quantity, + targetHash: Data +) { + let targetBlock: Tree; + let closestAncestor: Tree; + let previousClosestAncestor: Tree; + try { + const key = Tree.encodeKey(targetHeight, targetHash); + targetBlock = Tree.deserialize(key, await db.get(key)); + + if (targetBlock.closestKnownAncestor.equals(BUFFER_EMPTY)) { + // we are the genesis/earliest block + closestAncestor = null; + previousClosestAncestor = null; + } else { + previousClosestAncestor = Tree.deserialize( + targetBlock.closestKnownAncestor, + await db.get(targetBlock.closestKnownAncestor) + ); + // check if we are still the closest known ancestor + closestAncestor = + (await findClosestAncestor( + db, + request, + targetHeight, + previousClosestAncestor.key + )) || previousClosestAncestor; + } + } catch (e) { + // something bad happened (I/O failure?), bail + if (!e.notFound) throw e; + + previousClosestAncestor = null; + + // we couldn't find our target block in the database so we need to figure + // out it's relationships via the blockchain. + + // In order to avoid requesting the "earliest" block unnecessarily, we + // assume the "earliest" block can't be before block 0 (which seems like a + // reasonable assumption to me!). + // If our target is block `0` then we can't have a closest ancestor since + // we are the first block + if (targetHeight.toBigInt() === 0n) { + closestAncestor = null; + targetBlock = new Tree(targetHeight, targetHash); + } else { + const earliestBlock = await getBlockByNumber(request, Tag.EARLIEST); + if (!earliestBlock) throw new Error('Could not find "earliest" block.'); + + const { hash: earliestHash, number: earliestNumber } = earliestBlock; + const hash = Data.from(earliestHash, 32); + + const earliest = new Tree(Quantity.from(earliestNumber), hash); + + closestAncestor = + (await findClosestAncestor(db, request, targetHeight, earliest.key)) || + earliest; + targetBlock = new Tree(targetHeight, targetHash, closestAncestor.key); + } + } + + return { + targetBlock, + closestAncestor, + previousClosestAncestor + }; +} + +export async function* findRelated( + db: LevelUp, + request: Request, + options: FindOptions +) { + const readStream = db.createReadStream({ + keys: true, + values: true, + ...options + }); + + for await (const pair of readStream) { + const { key, value } = (pair as unknown) as { key: Buffer; value: Buffer }; + const node = Tree.deserialize(key, value); + const { height: candidateHeight } = node.decodeKey(); + const block = await getBlockByNumber(request, candidateHeight); + // if the chain has a block at this height, and the hash of the + // block is the same as the one in the db we've found our closest + // ancestor! + if (block != null && block.hash === Data.from(node.hash).toString()) { + yield node; + } + } +} + +/** + * + * @param height Search only before this block height (exclusive) + * @param upTo Search up to this key (inclusive) + * @returns the closest known ancestor, or `upTo` if we know of no ancestors + */ +export async function findClosestAncestor( + db: LevelUp, + request: Request, + height: Quantity, + upTo: Buffer +) { + const generator = findRelated(db, request, { + gte: upTo, + lt: Tree.encodeKey(height, DATA_EMPTY), + reverse: true + }); + const first = await generator.next(); + await generator.return(); + return first.value; +} + +/** + * + * @param height Search only after this block height (exclusive) + * @returns the closest known descendants, or null + */ +export async function* findClosestDescendants( + db: LevelUp, + request: Request, + height: Quantity +) { + const generator = findRelated(db, request, { + gte: Tree.encodeKey(Quantity.from(height.toBigInt() + 1n), DATA_EMPTY), + reverse: false + }); + for await (const node of generator) { + yield node; + } +} + +export async function getBlockByNumber( + request: Request, + blockNumber: Quantity | Tag +) { + return await request("eth_getBlockByNumber", [blockNumber.toString(), false]); +} diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts new file mode 100644 index 0000000000..a33075fa91 --- /dev/null +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts @@ -0,0 +1,382 @@ +import { Tree } from "./tree"; +import { promises } from "fs"; +import envPaths from "env-paths"; +import levelup, { LevelUp } from "levelup"; +import leveldown from "leveldown"; +import sub from "subleveldown"; +import encode from "encoding-down"; +import * as lexico from "../lexicographic-key-codec"; +import { BUFFER_ZERO, Data, Quantity } from "@ganache/utils"; +import { Ancestry } from "./ancestry"; +import { + resolveTargetAndClosestAncestor, + getBlockByNumber, + getBlockNumberFromParams, + Request, + setDbVersion, + findClosestDescendants +} from "./helpers"; +import { AbstractIterator } from "abstract-leveldown"; +import { AbstractLevelDOWN } from "abstract-leveldown"; + +const { mkdir } = promises; + +const levelupOptions = { + keyEncoding: "binary", + valueEncoding: "binary" +}; +const leveldownOpts = { prefix: "" }; + +/** + * A leveldb-backed cache that enables associating immutable data as it existed + * at a specific height on a blockchain. + * + * The design affords faster db reads (one read to get known closest ancestors + * and descendants) and fast db writes (one write per node in a relationship). + */ +export class PersistentCache { + public readonly version = BUFFER_ZERO; + protected db: LevelUp>; + protected cacheDb: LevelUp< + AbstractLevelDOWN, + AbstractIterator + >; + protected ancestorDb: LevelUp< + AbstractLevelDOWN, + AbstractIterator + >; + protected ancestry: Ancestry; + protected hash: Data; + protected request: Request; + constructor() {} + + static async deleteDb(dbSuffix?: string) { + return new Promise((resolve, reject) => { + const directory = PersistentCache.getDbDirectory(dbSuffix); + leveldown.destroy(directory, err => { + if (err) return void reject(err); + resolve(void 0); + }); + }); + } + /** + * Serializes the entire database world state into a JSON tree + */ + static async serializeDb(dbSuffix?: string) { + const cache = await PersistentCache.create(dbSuffix); + type Tree = Record; + return await new Promise(async resolve => { + const rs = cache.ancestorDb.createReadStream({ + gte: BUFFER_ZERO, + keys: true, + values: true + }); + const tree: Tree = {}; + const collection = {}; + for await (const data of rs) { + const { key, value } = (data as any) as { key: Buffer; value: Buffer }; + + const node = Tree.deserialize(key, value); + (node as any).height = node.decodeKey().height.toNumber(); + const keyHex = key.toString("hex"); + const parentKeyHex = node.closestKnownAncestor.toString("hex"); + collection[keyHex] = node; + if (node.closestKnownAncestor.length === 0) { + tree[keyHex] = node as any; + } else { + const descendants = collection[parentKeyHex].descendants || {}; + descendants[keyHex] = node; + collection[parentKeyHex].descendants = descendants; + } + (node as any).hash = Data.from(node.hash).toString(); + (node as any).parent = + node.closestKnownAncestor.length > 0 + ? Data.from(collection[parentKeyHex].hash).toString() + : null; + delete node.key; + // delete node.hash; + delete node.closestKnownDescendants; + delete node.closestKnownAncestor; + } + await cache.close(); + resolve(JSON.parse(JSON.stringify(tree)) as Tree); + }); + } + + static getDbDirectory(suffix: string = "") { + const { data: directory } = envPaths("Ganache/db", { + suffix + }); + return directory; + } + + static async create(dbSuffix?: string) { + const cache = new PersistentCache(); + + const directory = PersistentCache.getDbDirectory(dbSuffix); + await mkdir(directory, { recursive: true }); + + const store = encode(leveldown(directory, leveldownOpts), levelupOptions); + const db = await new Promise((resolve, reject) => { + const db = levelup(store, (err: Error) => { + if (err) return void reject(err); + resolve(db); + }); + }); + cache.db = db; + cache.cacheDb = sub(db, "c", levelupOptions); + cache.ancestorDb = sub(db, "a", levelupOptions); + await cache.cacheDb.open(); + await cache.ancestorDb.open(); + + await setDbVersion(cache.db, cache.version); + return cache; + } + + async initialize(height: Quantity, hash: Data, request: Request) { + this.hash = hash; + this.request = request; + + const { + targetBlock, + closestAncestor, + previousClosestAncestor + } = await resolveTargetAndClosestAncestor( + this.ancestorDb, + this.request, + height, + hash + ); + + this.ancestry = new Ancestry(this.ancestorDb, closestAncestor); + + const atomicBatch = this.ancestorDb.batch(); + + // if we changed closest ancestors remove our targetBlock from the previous + // ancestor so our target block doesn't appear in the database more than + // once, and update our targetBlock to point to this new ancestor + if ( + previousClosestAncestor && + !previousClosestAncestor.key.equals(closestAncestor.key) + ) { + targetBlock.closestKnownAncestor = closestAncestor.key; + + const index = previousClosestAncestor.closestKnownDescendants.findIndex( + buf => buf.equals(targetBlock.key) + ); + previousClosestAncestor.closestKnownDescendants.splice(index, 1); + atomicBatch.put( + previousClosestAncestor.key, + previousClosestAncestor.serialize() + ); + } + + let allKnownDescendants = [...targetBlock.closestKnownDescendants]; + // if we don't have a closestAncestor it because the target block is block 0 + if (closestAncestor == null) { + atomicBatch.put(targetBlock.key, targetBlock.serialize()); + } else { + const ancestorsDescendants = [targetBlock.key]; + + await Promise.all( + closestAncestor.closestKnownDescendants.map(async descendantKey => { + // don't match ourself + if (descendantKey.equals(targetBlock.key)) return; + + const { height: descendantHeight } = Tree.decodeKey(descendantKey); + // if the block number is less than our own it can't be our descendant + if (descendantHeight.toBigInt() <= height.toBigInt()) { + ancestorsDescendants.push(descendantKey); + return; + } + + const descendantValue = await this.ancestorDb.get(descendantKey); + const descendantNode = Tree.deserialize( + descendantKey, + descendantValue + ); + + const descendantRawBlock = await this.getBlock(descendantHeight); + // if the block doesn't exist on our chain, it can't be our child, + // keep it in the parent + if ( + descendantRawBlock == null || + descendantRawBlock.hash !== + Data.from(descendantNode.hash, 32).toString() + ) { + ancestorsDescendants.push(descendantKey); + } else { + targetBlock.closestKnownDescendants.push(descendantNode.key); + // keep track of *all* known descendants so we don't bother + // checking if they are a known closest descendant later on + allKnownDescendants.push(...descendantNode.closestKnownDescendants); + descendantNode.closestKnownAncestor = targetBlock.key; + // update the descendant node with its newly assigned + // closestKnownAncestor + atomicBatch.put(descendantNode.key, descendantNode.serialize()); + } + }) + ); + + closestAncestor.closestKnownDescendants = ancestorsDescendants; + atomicBatch.put(closestAncestor.key, closestAncestor.serialize()); + } + + // TODO(perf): we always re-save the targetBlock but could optimize to only + // resave if it is needed. + atomicBatch.put(targetBlock.key, targetBlock.serialize()); + + await atomicBatch.write(); + + // we DO want to re-balance the descendants, but we don't want to wait for + // it because it can't effect our current fork block's cache results since + // these caches will be for blocks higher than our own fork block + // Do not `await` this. + this._reBalancePromise = this.reBalanceDescendantTree( + height, + targetBlock, + allKnownDescendants + ) + // we don't care if it fails because this is an optimization that only + // matters for _future_ runs of ganache for blocks beyond our current fork + // block + .catch(_ => {}) + .finally(() => { + this._reBalancePromise = null; + }); + } + + /** + * `reBalancePromise` is used at shutdown to ensure we are done balancing the + * tree + * + */ + public _reBalancePromise: Promise = null; + + async getBlock(height: Quantity) { + return await getBlockByNumber(this.request, height); + } + + async reBalanceDescendantTree( + height: Quantity, + targetBlock: Tree, + allKnownDescendants: Buffer[] + ) { + const atomicBatch = this.ancestorDb.batch(); + const closestKnownDescendants = targetBlock.closestKnownDescendants; + const startSize = closestKnownDescendants.length; + + for await (const maybeDescendant of findClosestDescendants( + this.ancestorDb, + this.request, + height + )) { + const key = maybeDescendant.key; + + // don't match with our own self + if (targetBlock.key.equals(key)) continue; + + // this possibleDescendent's descendants can't be our direct descendants + // because trees can't merge + allKnownDescendants.push(...maybeDescendant.closestKnownDescendants); + + // if this already is a descendent of ours we can skip it + if (closestKnownDescendants.some(d => d.equals(key))) continue; + + // if this already is a descendent of one of our descendants skip it + if (allKnownDescendants.some(d => d.equals(key))) continue; + + // move the descendant from the parent to the target + const parentTree = Tree.deserialize( + maybeDescendant.closestKnownAncestor, + await this.ancestorDb.get(maybeDescendant.closestKnownAncestor) + ); + parentTree.closestKnownDescendants.splice( + parentTree.closestKnownDescendants.findIndex(d => d.equals(key)), + 1 + ); + maybeDescendant.closestKnownAncestor = targetBlock.key; + closestKnownDescendants.push(maybeDescendant.key); + + atomicBatch.put(parentTree.key, parentTree.serialize()); + atomicBatch.put(maybeDescendant.key, maybeDescendant.serialize()); + + // if the cache has been closed stop doing work so we can flush what we + // have to the database; descendant resolution shouldn't prevent us from + // fulling closing. + if (this.status === "closed") { + break; + } + } + + // only write if we have changes to write + if (startSize !== closestKnownDescendants.length) { + atomicBatch.put(targetBlock.key, targetBlock.serialize()); + + // check `this.ancestorDb.isOpen()` as we don't need to try to write if + // the db was shutdown in the meantime. This can happen if ganache was + // closed while we were still updating the descendants + if (atomicBatch.length > 0 && this.ancestorDb.isOpen()) + await atomicBatch.write(); + } + } + + async get(method: string, params: any[], key: string) { + const blockNumber = getBlockNumberFromParams(method, params); + if (blockNumber == null) return; + + const height = Quantity.from(blockNumber); + const bufKey = Buffer.from(key); + const start = lexico.encode([height.toBuffer(), bufKey]); + const end = lexico.encode([ + Quantity.from(height.toBigInt() + 1n).toBuffer() + ]); + const readStream = this.cacheDb.createReadStream({ + gt: start, + lt: end, + keys: true, + values: true + }); + const hashBuf = this.hash.toBuffer(); + for await (const data of readStream) { + const { key: k, value } = (data as any) as { key: Buffer; value: Buffer }; + const [_height, _key, blockHash] = lexico.decode(k); + // if our key no longer matches make sure we don't keep searching + if (!_key.equals(bufKey)) return; + if (hashBuf.equals(blockHash) || (await this.ancestry.has(blockHash))) { + return value; + } + } + } + + async put(method: string, params: any[], key: string, value: Buffer) { + const blockNumber = getBlockNumberFromParams(method, params); + if (blockNumber == null) return false; + + const height = Quantity.from(blockNumber); + const dbKey = lexico.encode([ + height.toBuffer(), + Buffer.from(key), + this.hash.toBuffer() + ]); + await this.cacheDb.put(dbKey, value); + return true; + } + + private status: "closed" | "open" = "open"; + async close() { + if (this.status === "closed") return; + + this.status = "closed"; + if (this.cacheDb) { + await this.cacheDb.close(); + } + if (this.ancestorDb) { + await this._reBalancePromise; + await this.ancestorDb.close(); + } + if (this.db) { + await this.db.close(); + } + } +} diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/tree.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/tree.ts new file mode 100644 index 0000000000..789a4def28 --- /dev/null +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/tree.ts @@ -0,0 +1,62 @@ +import * as lexico from "../lexicographic-key-codec"; +import { BUFFER_EMPTY, Data, Quantity } from "@ganache/utils"; +import * as rlp from "@ganache/rlp"; + +/** + * A tree: https://en.wikipedia.org/wiki/Rose_tree + * One parent, multiple children + */ +export class Tree { + public key: Buffer; + public hash: Buffer; + public closestKnownAncestor: Buffer; + public closestKnownDescendants: Buffer[] = []; + + constructor( + height: Quantity, + hash: Data, + closestKnownAncestor: Buffer = BUFFER_EMPTY + ) { + this.key = Tree.encodeKey(height, hash); + this.hash = hash.toBuffer(); + this.closestKnownAncestor = closestKnownAncestor; + } + + public serialize() { + return rlp.encode([ + this.hash, + this.closestKnownAncestor, + this.closestKnownDescendants + ]); + } + + decodeKey() { + return Tree.decodeKey(this.key); + } + + static decodeKey(key: Buffer) { + const [height, hash] = lexico.decode(key); + return { + height: Quantity.from(height), + hash: Data.from(hash) + }; + } + + static deserialize(key: Buffer, value: Buffer) { + const [hash, parent, children] = (rlp.decode(value) as unknown) as [ + Buffer, + Buffer, + Buffer[] + ]; + const tree = Object.create(Tree.prototype) as Tree; + tree.key = key; + tree.hash = hash; + tree.closestKnownAncestor = parent; + tree.closestKnownDescendants = children; + return tree; + } + + static encodeKey(height: Quantity, hash: Data) { + return lexico.encode([height.toBuffer(), hash.toBuffer()]); + } +} diff --git a/src/chains/ethereum/ethereum/src/forking/state-manager.ts b/src/chains/ethereum/ethereum/src/forking/state-manager.ts index d5178a0e83..cc5efa774e 100644 --- a/src/chains/ethereum/ethereum/src/forking/state-manager.ts +++ b/src/chains/ethereum/ethereum/src/forking/state-manager.ts @@ -5,7 +5,6 @@ import AccountManager from "../data-managers/account-manager"; import { ForkCache } from "./cache"; import Common from "@ethereumjs/common"; import { ForkTrie } from "./trie"; -import { SecureTrie as Trie } from "merkle-patricia-tree"; /** * Options for constructing a [[StateManager]]. @@ -58,24 +57,27 @@ export class ForkStateManager extends StateManager { async _lookupStorageTrie(address: EJS_Address) { // from state trie const account = await this.getAccount(address); - const storageTrie = this._trie.copy(false) as ForkTrie; + const storageTrie = this._trie.copy(true) as ForkTrie; storageTrie.setContext( account.stateRoot, address.buf, storageTrie.blockNumber ); + // we copy checkpoints over only for the metadata checkpoints, not the trie + // checkpoints. storageTrie.db.checkpoints = []; return storageTrie; } /** - * Gets the storage value associated with the provided `address` and `key`. This method returns - * the shortest representation of the stored value. - * @param address - Address of the account to get the storage for - * @param key - Key in the account's storage to get the value for. Must be 32 bytes long. + * Gets the storage value associated with the provided `address` and `key`. + * This method returns the shortest representation of the stored value. + * @param address - Address of the account to get the storage for + * @param key - Key in the account's storage to get the value for. Must be 32 + * bytes long. * @returns {Promise} - The storage value for the account - * corresponding to the provided address at the provided key. - * If this does not exist an empty `Buffer` is returned. + * corresponding to the provided address at the provided key. If this does not + * exist an empty `Buffer` is returned. */ async getContractStorage(address: EJS_Address, key: Buffer): Promise { const trie = (await this._getStorageTrie(address)) as ForkTrie; diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index e06ff84aa6..d4019dab31 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -2,7 +2,6 @@ import { Address } from "@ganache/ethereum-address"; import { keccak, BUFFER_EMPTY, - BUFFER_ZERO, RPCQUANTITY_EMPTY, Quantity, Data @@ -17,68 +16,48 @@ import * as lexico from "./lexicographic-key-codec"; import { encode } from "@ganache/rlp"; import { Account } from "@ganache/ethereum-utils"; import { KECCAK256_NULL } from "ethereumjs-util"; +type KVP = { key: Buffer; value: Buffer }; +const DELETED_VALUE = Buffer.allocUnsafe(1).fill(1); const GET_CODE = "eth_getCode"; const GET_NONCE = "eth_getTransactionCount"; const GET_BALANCE = "eth_getBalance"; const GET_STORAGE_AT = "eth_getStorageAt"; -const MetadataSingletons = new WeakMap(); +const MetadataSingletons = new WeakMap(); + const LEVELDOWN_OPTIONS = { keyEncoding: "binary", valueEncoding: "binary" }; -/** - * Commits a checkpoint to disk, if current checkpoint is not nested. - * If nested, only sets the parent checkpoint as current checkpoint. - * @throws If not during a checkpoint phase - */ -async function commit(this: CheckpointDB) { - const { keyValueMap } = this.checkpoints.pop(); - if (!this.isCheckpoint) { - // This was the final checkpoint, we should now commit and flush everything to disk - const batchOp = []; - keyValueMap.forEach(function (value, key) { - if (value === null) { - batchOp.push({ - type: "del", - key: Buffer.from(key, "binary") - }); - } else { - batchOp.push({ - type: "put", - key: Buffer.from(key, "binary"), - value - }); - } - }); - await this.batch(batchOp); - } else { - // dump everything into the current (higher level) cache - const currentKeyValueMap = this.checkpoints[this.checkpoints.length - 1] - .keyValueMap; - keyValueMap.forEach((value, key) => currentKeyValueMap.set(key, value)); - } + +function isEqualKey(encodedKey: Buffer, address: Buffer, key: Buffer) { + const decodedKey = lexico.decode(encodedKey); + const [_, keyAddress, deletedKey] = decodedKey; + return keyAddress.equals(address) && deletedKey.equals(key); } export class ForkTrie extends GanacheTrie { private accounts: AccountManager; private address: Buffer | null = null; - public blockNumber: Quantity | null = null; - private metadata: LevelUp; + private isPreForkBlock = false; + private forkBlockNumber: bigint; + public blockNumber: Quantity; + private metadata: CheckpointDB; constructor(db: LevelUp | null, root: Buffer, blockchain: Blockchain) { super(db, root, blockchain); - this.db.commit = commit.bind(this.db); this.accounts = blockchain.accounts; this.blockNumber = this.blockchain.fallback.blockNumber; + this.forkBlockNumber = this.blockNumber.toBigInt(); if (MetadataSingletons.has(db)) { - this.metadata = MetadataSingletons.get(db); + this.metadata = new CheckpointDB(MetadataSingletons.get(db)); } else { - this.metadata = sub(db, "f", LEVELDOWN_OPTIONS); - MetadataSingletons.set(db, this.metadata); + const metadataDb = sub(db, "f", LEVELDOWN_OPTIONS); + MetadataSingletons.set(db, metadataDb); + this.metadata = new CheckpointDB(metadataDb); } } @@ -90,56 +69,116 @@ export class ForkTrie extends GanacheTrie { return (this as any)._root; } + checkpoint() { + super.checkpoint(); + this.metadata.checkpoint(this.root); + } + async commit() { + await Promise.all([super.commit(), this.metadata.commit()]); + } + async revert() { + await Promise.all([super.revert(), this.metadata.revert()]); + } + setContext(stateRoot: Buffer, address: Buffer, blockNumber: Quantity) { (this as any)._root = stateRoot; this.address = address; this.blockNumber = blockNumber; + this.isPreForkBlock = blockNumber.toBigInt() < this.forkBlockNumber; } async put(key: Buffer, val: Buffer): Promise { return super.put(key, val); } + /** + * Removes saved metadata from the given block range (inclusive) + * @param startBlockNumber (inclusive) + * @param endBlockNumber (inclusive) + */ + public async revertMetaData( + startBlockNumber: Quantity, + endBlockNumber: Quantity + ) { + const db = this.metadata._leveldb; + const stream = db.createKeyStream({ + gte: lexico.encode([startBlockNumber.toBuffer()]), + lt: lexico.encode([ + Quantity.from(endBlockNumber.toBigInt() + 1n).toBuffer() + ]) + }); + const batch = db.batch(); + for await (const key of stream) batch.del(key); + await batch.write(); + } + private createDelKey(key: Buffer) { const blockNum = this.blockNumber.toBuffer(); return lexico.encode([blockNum, this.address, key]); } + /** + * Checks if the key was deleted (locally -- not on the fork) + * @param key + */ private async keyWasDeleted(key: Buffer) { - return new Promise((resolve, reject) => { - const selfAddress = this.address === null ? BUFFER_EMPTY : this.address; - let wasDeleted = false; - const stream = this.metadata - .createKeyStream({ - lte: this.createDelKey(key), - reverse: true - }) - .on("data", data => { - const delKey = lexico.decode(data); - // const blockNumber = delKey[0]; - const address = delKey[1]; - const deletedKey = delKey[2]; - if (address.equals(selfAddress) && deletedKey.equals(key)) { - wasDeleted = true; - (stream as any).destroy(); - } - }) - .on("close", () => resolve(wasDeleted)) - .on("error", reject); + const selfAddress = this.address === null ? BUFFER_EMPTY : this.address; + // check the uncommitted checkpoints for deleted keys before + // checking the database itself + // TODO(perf): there is probably a better/faster way of doing this for the + // common case. + const checkpoints = this.metadata.checkpoints; + for (let i = checkpoints.length - 1; i >= 0; i--) { + for (let [encodedKeyStr, value] of checkpoints[i].keyValueMap.entries()) { + if (!value || !value.equals(DELETED_VALUE)) continue; + const encodedKey = Buffer.from(encodedKeyStr, "binary"); + if (isEqualKey(encodedKey, selfAddress, key)) return true; + } + } + + // since we didn't find proof of deletion in a checkpoint let's check the + // database for it. + // We start searching from our database key (blockNum + address + key) + // down to the earliest block we know about. + // TODO(perf): this is just going to be slow once we get lots of keys + // because it just checks every single key we've ever deleted (before this + // one). + const stream = this.metadata._leveldb.createReadStream({ + lte: this.createDelKey(key), + reverse: true }); + for await (const data of stream) { + const { key: encodedKey, value } = (data as unknown) as KVP; + if (!value || !value.equals(DELETED_VALUE)) continue; + if (isEqualKey(encodedKey, selfAddress, key)) return true; + } + + // we didn't find proof of deletion so we return `false` + return false; } async del(key: Buffer) { await this.lock.wait(); - const hash = keccak(key); - const delKey = this.createDelKey(key); - const metaDataPutPromise = this.metadata.put(delKey, BUFFER_ZERO); + // we only track if the key was deleted (locally) for state tries _after_ + // the fork block because we can't possibly delete keys _before_ the fork + // block, since those happened before ganache was even started + // This little optimization can cut debug_traceTransaction time _in half_. + if (!this.isPreForkBlock) { + const delKey = this.createDelKey(key); + const metaDataPutPromise = this.metadata.put(delKey, DELETED_VALUE); + + const hash = keccak(key); + const { node, stack } = await this.findPath(hash); + if (node) await this._deleteNode(hash, stack); - const { node, stack } = await this.findPath(hash); + await metaDataPutPromise; + } else { + const hash = keccak(key); + const { node, stack } = await this.findPath(hash); + if (node) await this._deleteNode(hash, stack); + } - if (node) await this._deleteNode(hash, stack); - await metaDataPutPromise; this.lock.signal(); } @@ -225,12 +264,15 @@ export class ForkTrie extends GanacheTrie { async get(key: Buffer): Promise { const value = await super.get(key); - if (value != null) { - return value; - } - if (await this.keyWasDeleted(key)) { - return null; - } + if (value != null) return value; + + // since we don't have this key in our local trie check if we've have + // deleted it (locally) + // we only check if the key was deleted (locally) for state tries _after_ + // the fork block because we can't possibly delete keys _before_ the fork + // block, since those happened before ganache was even started + // This little optimization can cut debug_traceTransaction time _in half_. + if (!this.isPreForkBlock && (await this.keyWasDeleted(key))) return null; if (this.address === null) { // if the trie context's address isn't set, our key represents an address: @@ -243,14 +285,30 @@ export class ForkTrie extends GanacheTrie { /** * Returns a copy of the underlying trie with the interface of ForkTrie. - * @param includeCheckpoints - If true and during a checkpoint, the copy will contain the checkpointing metadata and will use the same scratch as underlying db. + * @param includeCheckpoints - If true and during a checkpoint, the copy will + * contain the checkpointing metadata and will use the same scratch as + * underlying db. */ - copy() { - const db = this.db.copy(); + copy(includeCheckpoints: boolean = true) { + const db = this.db.copy() as CheckpointDB; const secureTrie = new ForkTrie(db._leveldb, this.root, this.blockchain); secureTrie.accounts = this.accounts; secureTrie.address = this.address; secureTrie.blockNumber = this.blockNumber; + if (includeCheckpoints && this.isCheckpoint) { + secureTrie.db.checkpoints = [...this.db.checkpoints]; + + // Our `metadata.checkpoints` needs to be the same reference to the + // parent's metadata.checkpoints so that we can continue to track these + // changes on this copy, otherwise deletions made to a contract's storage + // may not be tracked. + // Note: db.checkpoints don't need this same treatment because of the way + // the statemanager uses a contract's trie: it doesn't ever save to it. + // Instead, it saves to its own internal cache, which eventually gets + // reverted or committed (flushed). Our metadata doesn't utilize a central + // cache. + secureTrie.metadata.checkpoints = this.metadata.checkpoints; + } return secureTrie; } } diff --git a/src/chains/ethereum/ethereum/src/forking/types.ts b/src/chains/ethereum/ethereum/src/forking/types.ts index 70acf76f53..0679c70fd6 100644 --- a/src/chains/ethereum/ethereum/src/forking/types.ts +++ b/src/chains/ethereum/ethereum/src/forking/types.ts @@ -1,4 +1,11 @@ +import { PersistentCache } from "./persistent-cache/persistent-cache"; + export interface Handler { - request: (method: string, params: unknown[]) => Promise; + request: ( + method: string, + params: unknown[], + options: { disableCache: boolean } + ) => Promise; + setCache: (cache: PersistentCache) => void; close: () => Promise; } diff --git a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts new file mode 100644 index 0000000000..21c620d19b --- /dev/null +++ b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts @@ -0,0 +1,174 @@ +const PARTS = Buffer.from('[]{},"":null'); +const SQUARE_BRACKET_PAIR = PARTS.slice(0, 2); +const SQUARE_BRACKET_OPEN = SQUARE_BRACKET_PAIR.slice(0, 1); +const SQUARE_BRACKET_CLOSE = SQUARE_BRACKET_PAIR.slice(1, 2); +const CURLY_BRACKET_PAIR = PARTS.slice(2, 4); +const CURLY_BRACKET_OPEN = CURLY_BRACKET_PAIR.slice(0, 1); +const CURLY_BRACKET_CLOSE = CURLY_BRACKET_PAIR.slice(1, 2); +const COMMA_QUOTE = PARTS.slice(4, 6); +const COMMA = COMMA_QUOTE.slice(0, 1); +const QUOTE_PAIR = PARTS.slice(5, 7); +const QUOTE_COLON = PARTS.slice(6, 8); +const COLON = QUOTE_COLON.slice(1, 2); +const NULL = PARTS.slice(8, 12); +const _EMPTY = PARTS.slice(0, 0); + +const toStr = Object.prototype.toString; +const isObj = (val: any) => toStr.call(val) === "[object Object]"; + +function numberToBuffer(value: number) { + const str = value.toString(); + const { length } = str; + if (length > 0) { + const buf = Buffer.allocUnsafe(length); + (buf as any).utf8Write(str, 0, length); + return buf; + } else { + return _EMPTY; + } +} + +function stringToQuotedBuffer(value: string) { + const { length } = value; + if (length > 0) { + const buf = Buffer.allocUnsafe(length + 2); // + 2 for the quotation marks + buf[0] = 34; // DOUBLE QUOTE + buf[length + 1] = 34; // DOUBLE QUOTE + (buf as any).utf8Write(value, 1, length); + return buf; + } else { + return QUOTE_PAIR; + } +} + +function* arrayToBuffer(value: any[]) { + const { length } = value; + if (length === 0) { + yield SQUARE_BRACKET_PAIR; + return; + } else { + yield SQUARE_BRACKET_OPEN; + // sends the first array value: + for (const chunkified of bufferify(value[0], "0")) { + // if the value ends up being nothing (undefined), return null + yield chunkified.length === 0 ? NULL : chunkified; + } + // sends the rest of the array values: + for (let i = 1; i < length; i++) { + yield COMMA; + for (const chunkified of bufferify(value[i], i.toString())) { + // if the value ends up being nothing (undefined), return null + yield chunkified.length === 0 ? NULL : chunkified; + } + } + yield SQUARE_BRACKET_CLOSE; + return; + } +} + +function bufferToQuotedBuffer(value: Buffer) { + const { length } = value; + const buf = Buffer.allocUnsafe(length + 2); + buf[0] = 34; + value.copy(buf, 1, 0, length); + buf[length + 1] = 34; + return buf; +} + +function* objectToBuffer(obj: any, nameOrIndex: string) { + if ("toJSON" in obj) { + yield* bufferify(obj.toJSON(nameOrIndex), nameOrIndex); + return; + } + + let yieldedOpen = false; + for (const key in obj) { + const value = obj[key]; + + let yieldPrefix = true; + for (const chunkified of bufferify(value, key)) { + // if the chunkified value ends up being nothing (undefined) ignore + // the property + const chunkLength = chunkified.length; + if (chunkLength === 0) continue; + + // only yield the prefix once per `key` + if (yieldPrefix) { + yieldPrefix = false; + const quotedKey = stringToQuotedBuffer(key); + if (!yieldedOpen) { + yield Buffer.concat([ + CURLY_BRACKET_OPEN, + quotedKey, + COLON, + chunkified + ]); + yieldedOpen = true; + } else { + yield Buffer.concat([COMMA, quotedKey, COLON, chunkified]); + } + } else { + yield chunkified; + } + } + } + + // if we yielded the + if (yieldedOpen) { + yield CURLY_BRACKET_CLOSE; + return; + } else { + yield CURLY_BRACKET_PAIR; + return; + } +} + +/** + * Converts a JavaScript value to a JavaScript Object Notation (JSON) Buffer + * (utf-8 encoded). + * + * This is a hack. It: + * * Does not support circular references. + * * Does not support double quotes within Object keys; only alphanumerics are + * considered safe to use + * * Probably doesn't support non-ASCII characters + * * Is only tested on transaction traces + * + * Only useful if the `JSON.stringify`ed version would create a string larger + * than what the JavaScript engine can handle. + * + * What is the maximum string size in Node/V8? It depends on the version! Some + * versions are 256MB, some are ~1GB, and others are ~0.5GB. + * See: https://stackoverflow.com/a/47781288/160173 + * + * CAUTION: This method is approx 3 - 20 times slower than using: + * `Buffer.from(JSON.stringify(value), "utf-8")` + * + * @param value A JavaScript value, usually an object or array, to be converted. + * @param nameOrIndex JSON.stringify calls an object's toJSON method, and this + * property is used by internal recursive calls to bufferify. + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#tojson_behavior + */ +export function* bufferify(value: any, nameOrIndex: string): Generator { + const type = typeof value; + if (type === "number" || type === "boolean") { + yield numberToBuffer(value); + } else if (type === "string") { + yield stringToQuotedBuffer(value); + } else if (Buffer.isBuffer(value)) { + yield bufferToQuotedBuffer(value); + } else if (Array.isArray(value)) { + yield* arrayToBuffer(value); + } else if (isObj(value)) { + yield* objectToBuffer(value, nameOrIndex); + } else if (value === null) { + yield NULL; + } else if (type === "undefined") { + // nothing is returned for undefined + yield _EMPTY; + } else if ("toJSON" in value && typeof value.toJSON === "function") { + yield* bufferify(value.toJSON(), nameOrIndex); + } else { + throw new Error("unsupported value in bufferify"); + } +} diff --git a/src/chains/ethereum/ethereum/src/helpers/run-transactions.ts b/src/chains/ethereum/ethereum/src/helpers/run-transactions.ts deleted file mode 100644 index a9d8864c1c..0000000000 --- a/src/chains/ethereum/ethereum/src/helpers/run-transactions.ts +++ /dev/null @@ -1,29 +0,0 @@ -import VM from "@ethereumjs/vm"; -import { RuntimeBlock } from "@ganache/ethereum-block"; -import { VmTransaction } from "@ganache/ethereum-transaction"; - -/** - * Runs the given transactions, unchecked, through the VM with the given block. - * - * The method does not create a `checkpoint` or `commit`/`revert`. - * - * @param vm - * @param transactions - * @param block - */ -export async function runTransactions( - vm: VM, - transactions: VmTransaction[], - block: RuntimeBlock -) { - for (let i = 0, l = transactions.length; i < l; i++) { - await vm - .runTx({ - tx: transactions[i] as any, - block: block as any - }) - // we ignore transactions that error because we just want to _run_ these, - // transactions just to update the blockchain's state - .catch(() => {}); - } -} diff --git a/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts b/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts index 94918de187..7f6eea6268 100644 --- a/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts +++ b/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts @@ -47,11 +47,16 @@ describe("api", () => { const block = await provider.send("eth_getBlockByNumber", [ `0x${numberOfBlocksToMine.toString(16)}` ]); + assert( + block, + `\`block\` is \`null\`; didn't correctly mine ${numberOfBlocksToMine} blocks` + ); assert.strictEqual( block.totalDifficulty, `0x${((numberOfBlocksToMine + 1) * DEFAULT_DIFFICULTY).toString( 16 - )}` + )}`, + `Mined total difficulty, ${block.totalDifficulty} differs from sum of preceding block's difficulties.` ); }); }); @@ -81,9 +86,14 @@ describe("api", () => { const block = await provider.send("eth_getBlockByNumber", [ `0x${numberOfBlocksToMine.toString(16)}` ]); + assert( + block, + `\`block\` is \`null\`; didn't correctly mine ${numberOfBlocksToMine} blocks` + ); assert.strictEqual( block.totalDifficulty, - `0x${((numberOfBlocksToMine + 1) * difficulty).toString(16)}` + `0x${((numberOfBlocksToMine + 1) * difficulty).toString(16)}`, + `Mined total difficulty, ${block.totalDifficulty} differs from sum of preceding block's difficulties.` ); }); }); diff --git a/src/chains/ethereum/ethereum/tests/connector.test.ts b/src/chains/ethereum/ethereum/tests/connector.test.ts new file mode 100644 index 0000000000..80ee62eb66 --- /dev/null +++ b/src/chains/ethereum/ethereum/tests/connector.test.ts @@ -0,0 +1,83 @@ +import assert from "assert"; +import { Executor, RequestCoordinator } from "@ganache/utils"; +import { Connector } from "../"; + +describe("connector", () => { + const primitives = { + string: "string", + empty: "empty", + one: 1, + zero: 1, + true: true, + false: false, + null: null, + undefined: undefined + }; + const json = { + ...primitives, + // `structLogs` triggers an optimization in the connector + structLogs: [{ ...primitives }, ...Object.values(primitives)], + emptyArray: [], + // notDefined and alsoNotDefined should be removed when JSON stringified/bufferified + trickyObject: { + notDefined: undefined, + defined: true, + alsoNotDefined: undefined + }, + allUndefinedArray: [undefined, undefined, undefined], + allUndefinedObject: { uno: undefined, dos: undefined, tres: undefined }, + trickyArray: [...Object.values(primitives)], + object: { + ...primitives, + emptyObject: {}, + nested: { ...primitives }, + array: [{ ...primitives }, ...Object.values(primitives)] + }, + emptyObject: {} + }; + let connector: Connector; + // an arbitrary payload + // `debug_traceTransaction` triggers an optimization in the connector + const payload = { + jsonrpc: "2.0", + method: "debug_traceTransaction", + id: 1, + params: [] // params don't matter + }; + const expected = JSON.parse( + JSON.stringify({ + jsonrpc: payload.jsonrpc, + id: payload.id, + result: json + }) + ); + beforeEach(async () => { + const requestCoordinator = new RequestCoordinator(0); + const executor = new Executor(requestCoordinator); + connector = new Connector({}, executor); + await connector.connect(); + }); + it("formats results as a string as expected", async () => { + const strResult = connector.format(json, payload) as string; + assert.strictEqual(typeof strResult, "string"); + const result = JSON.parse(strResult); + assert.deepStrictEqual(result, expected); + }); + it("formats results as a Buffer as expected", async () => { + function isGeneratorIterator(arg) { + return arg.constructor === function* () {}.prototype.constructor; + } + // trigger the buffering optimization without having to actually parse + // the amount of data it usually takes + connector.BUFFERIFY_THRESHOLD = 1; + + const bufResult = connector.format(json, payload); + assert(isGeneratorIterator(bufResult)); + let str = ""; + for (const datum of bufResult as any) { + str += datum.toString("utf-8"); + } + const result = JSON.parse(str); + assert.deepStrictEqual(result, expected); + }); +}); diff --git a/src/chains/ethereum/ethereum/tests/forking/account.test.ts b/src/chains/ethereum/ethereum/tests/forking/account.test.ts index 48fd561512..f7938b903e 100644 --- a/src/chains/ethereum/ethereum/tests/forking/account.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/account.test.ts @@ -3,7 +3,9 @@ import getProvider from "../helpers/getProvider"; import EthereumProvider from "../../src/provider"; import request from "superagent"; -describe("forking", () => { +describe("forking", function () { + this.timeout(10000); + describe("accounts", function () { const accountAddress = "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"; const blockNumber = 0xb77935; @@ -17,7 +19,8 @@ describe("forking", () => { provider = await getProvider({ fork: { url: URL, - blockNumber + blockNumber, + disableCache: true } }); }); diff --git a/src/chains/ethereum/ethereum/tests/forking/block.test.ts b/src/chains/ethereum/ethereum/tests/forking/block.test.ts index 53ad7c6bd9..1d2216abb7 100644 --- a/src/chains/ethereum/ethereum/tests/forking/block.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/block.test.ts @@ -3,7 +3,9 @@ import getProvider from "../helpers/getProvider"; import EthereumProvider from "../../src/provider"; import request from "superagent"; -describe("forking", () => { +describe("forking", function () { + this.timeout(10000); + describe("blocks", () => { const blockNumber = 0xb77935; const blockNumHex = `0x${blockNumber.toString(16)}`; @@ -16,7 +18,8 @@ describe("forking", () => { provider = await getProvider({ fork: { url: URL, - blockNumber + blockNumber, + disableCache: true } }); }); diff --git a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts new file mode 100644 index 0000000000..6c79a22fee --- /dev/null +++ b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts @@ -0,0 +1,111 @@ +import * as fc from "fast-check"; + +import * as Arbitrary from "./cache/arbitraries"; + +import { PersistentCache } from "../../src/forking/persistent-cache/persistent-cache"; + +import { Data, Quantity } from "@ganache/utils"; +import { Tree } from "../../src/forking/persistent-cache/tree"; +import assert from "assert"; +import { BatchManager, Ref } from "./cache/batch-manager"; + +describe("forking", () => { + describe("persistent cache", () => { + it("creates relationships between networks correctly", async () => { + const arb = Arbitrary.Networks().chain(model => + fc.record({ + model: fc.constant(model), + batches: Arbitrary.Batches(model) + }) + ); + + let counter = 0; + await fc.assert( + fc.asyncProperty(arb, async ({ model, batches }) => { + counter++; + const dbName = `-test-db-${counter}`; + // make sure this cache doesn't already exist + await PersistentCache.deleteDb(dbName); + try { + const batchManager = new BatchManager(model); + for (const batch of batches) { + const block = batch.input.historicBlock; + const network = model.networks[batch.descendantIndex]; + + const genesisRef = batchManager.getGenesis(network); + const ref = batchManager.getOwnRef(block); + + if (block.number > 0) { + const latestAncestor = batchManager.findLatestAncestor( + batch, + genesisRef + ); + latestAncestor.children.add(ref); + batchManager.fixDescendants( + ref, + network, + genesisRef, + batchManager.collectDescendants(ref) + ); + } + + const cache = await PersistentCache.create(dbName); + await cache.initialize( + Quantity.from(block.number), + Data.from(block.hash), + ((_method: string, params: any[]) => { + return Promise.resolve( + network.getBlockByNumber( + params[0] === "earliest" + ? "earliest" + : (parseInt(params[0], 16) as any) + ) + ); + }) as any + ); + + // wait for the descendant re-balance to complete before closing + cache._reBalancePromise && (await cache._reBalancePromise); + await cache.close(); + + const serialized = await PersistentCache.serializeDb(dbName); + + const cacheState: Set = new Set(); + function convertToRefs( + descendants: typeof serialized, + parent: Ref["children"] + ) { + Object.entries(descendants).map(([key, value]) => { + const { height, hash } = Tree.decodeKey( + Buffer.from(key, "hex") + ); + const ref: Ref = { + hash: hash.toString(), + block: { + number: height.toNumber(), + hash: hash.toString() + }, + children: new Set() + }; + parent.add(ref); + if (value.descendants) { + convertToRefs(value.descendants, ref.children); + } + }); + } + convertToRefs(serialized, cacheState); + + assert.deepStrictEqual(batchManager.worldState, cacheState); + } + } finally { + await PersistentCache.deleteDb(dbName); + } + }), + { + numRuns: 50, + endOnFailure: true + } + ); + }).timeout(30000); + }); +}); diff --git a/src/chains/ethereum/ethereum/tests/forking/cache/arbitraries.ts b/src/chains/ethereum/ethereum/tests/forking/cache/arbitraries.ts new file mode 100644 index 0000000000..c4ef7fea0c --- /dev/null +++ b/src/chains/ethereum/ethereum/tests/forking/cache/arbitraries.ts @@ -0,0 +1,172 @@ +import * as fc from "fast-check"; + +export interface Network { + networkId: number; + getBlockByNumber?(height: number): any; + historicBlock: { + number: number; + hash: string; + }; +} + +export class Model { + private byDescendantIndexThenHeight: Network[][] = []; + + extendNetwork(descendantIndex: number, hash: string) { + const networks = this.byDescendantIndexThenHeight[descendantIndex]; + + const [latest] = networks.slice(-1); + + networks.push({ + ...latest, + historicBlock: { + number: latest.historicBlock.number + 1, + hash + } + }); + } + + addNetwork(network: Network) { + this.byDescendantIndexThenHeight.push([network]); + } + + forkNetwork(descendantIndex: number, leftHash: string, rightHash: string) { + const networks = this.byDescendantIndexThenHeight[descendantIndex]; + + const [latest] = networks.slice(-1); + + this.byDescendantIndexThenHeight.push([ + ...networks, + { + ...latest, + historicBlock: { + number: latest.historicBlock.number + 1, + hash: rightHash + } + } + ]); + + networks.push({ + ...latest, + historicBlock: { + number: latest.historicBlock.number + 1, + hash: leftHash + } + }); + } + + get networks() { + return this.byDescendantIndexThenHeight.map(networks => { + const [latest] = networks.slice(-1); + return { + ...latest, + getBlockByNumber: (height: number | "earliest") => + (height === "earliest" ? networks[0] : networks[height] || {}) + .historicBlock + }; + }); + } +} + +const Hash = (): fc.Arbitrary => + fc + .hexaString({ + minLength: 64, + maxLength: 64 + }) + .map(hash => `0x${hash}`); + +const NetworkId = (): fc.Arbitrary => fc.integer({ min: 1 }); + +namespace Commands { + type Command = (model: Model) => void; + + export const AddNetwork = (): fc.Arbitrary => + fc.tuple(Hash(), NetworkId()).map(([hash, networkId]) => (model: Model) => { + model.addNetwork({ + networkId, + historicBlock: { + number: 0, + hash + } + }); + }); + + export const ExtendNetwork = (): fc.Arbitrary => + fc.tuple(fc.nat(), Hash()).map(([num, hash]) => (model: Model) => { + const descendantIndex = num % model.networks.length; + model.extendNetwork(descendantIndex, hash); + }); + + export const ForkNetwork = (): fc.Arbitrary => + fc + .tuple(fc.nat(), Hash(), Hash()) + .map(([num, leftHash, rightHash]) => (model: Model) => { + const descendantIndex = num % model.networks.length; + model.forkNetwork(descendantIndex, leftHash, rightHash); + }); +} + +export const Networks = (): fc.Arbitrary => + fc + .tuple( + Commands.AddNetwork(), + fc.array( + fc.frequency( + { + arbitrary: Commands.AddNetwork(), + weight: 1 + }, + { + arbitrary: Commands.ExtendNetwork(), + weight: 3 + }, + { + arbitrary: Commands.ForkNetwork(), + weight: 1 + } + ), + { maxLength: 100 } + ) + ) + .map(([addNetwork, commands]) => { + const model = new Model(); + + addNetwork(model); + + for (const command of commands) { + command(model); + } + + return model; + }); + +export interface Batch { + descendantIndex: number; + input: Network; +} + +export const Batch = (model: Model): fc.Arbitrary => { + const { networks } = model; + + return fc + .nat({ + max: networks.length * 1000 + }) + .chain(num => { + const descendantIndex = num % model.networks.length; + const network = networks[descendantIndex]; + const maxHeight = network.historicBlock.number; + + return fc.record({ + descendantIndex: fc.constant(descendantIndex), + input: fc.nat({ max: maxHeight }).map(height => ({ + networkId: network.networkId, + historicBlock: network.getBlockByNumber(height) + })) + }); + }); +}; + +export const Batches = (model: Model): fc.Arbitrary => + fc.array(Batch(model), { maxLength: 10 }); diff --git a/src/chains/ethereum/ethereum/tests/forking/cache/batch-manager.ts b/src/chains/ethereum/ethereum/tests/forking/cache/batch-manager.ts new file mode 100644 index 0000000000..a02e5a4b1d --- /dev/null +++ b/src/chains/ethereum/ethereum/tests/forking/cache/batch-manager.ts @@ -0,0 +1,127 @@ +export type Ref = { + hash: string; + block: Network["historicBlock"]; + children: Set; +}; + +import * as Arbitrary from "./arbitraries"; +import { Network, Model } from "./arbitraries"; + +export class BatchManager { + public model: Model; + public networkLookup = new Map(); + public worldState = new Set(); + constructor(model: Model) { + this.model = model; + } + getGenesis(network: Network) { + // Get the genesis block and add it to our world state, if needed. + const genesis = network.getBlockByNumber(0) as Network["historicBlock"]; + if (!this.networkLookup.has(genesis.hash)) { + const genesisRef: Ref = { + hash: genesis.hash, + block: genesis, + children: new Set() + }; + this.networkLookup.set(genesis.hash, genesisRef); + this.worldState.add(genesisRef); + return genesisRef; + } else { + return this.networkLookup.get(genesis.hash); + } + } + getOwnRef(block: Network["historicBlock"]) { + if (!this.networkLookup.has(block.hash)) { + const ref: Ref = { + hash: block.hash, + block: block, + children: new Set() + }; + // if we don't yet know about this block, add it + this.networkLookup.set(block.hash, ref); + return ref; + } else { + return this.networkLookup.get(block.hash); + } + } + findLatestAncestors(batch: Arbitrary.Batch, parent: Ref): Ref[] { + const block = batch.input.historicBlock; + const network = this.model.networks[batch.descendantIndex]; + const candidates: Ref[] = [parent]; + for (const child of parent.children.values()) { + if (child.hash === block.hash) { + // if the child is the same block as us we must delete it + // because we are figuring this all out again anyway + parent.children.delete(child); + continue; + } + + const networkBlock = network.getBlockByNumber(child.block.number); + const isInNetwork = networkBlock && networkBlock.hash === child.hash; + if (!isInNetwork) continue; + + // if the child is in network and comes after us it is + // an eventual *descendant*. continue searching! + if (child.block.number >= block.number) continue; + + // otherwise, it might be our ancestor, keep checking more! + candidates.push(...this.findLatestAncestors(batch, child)); + } + return candidates; + } + + findLatestAncestor(batch: Arbitrary.Batch, parent: Ref) { + // find the ancestor with the high block number + return this.findLatestAncestors(batch, parent).sort((a, b) => { + if (a.block.number < b.block.number) { + return 1; + } else if (a.block.number === b.block.number) { + return 0; + } else { + return -1; + } + })[0]; + } + + /** + * traverse up all descendants to fix those relationships + * @param block + * @param network + * @param parent + * @param allKnownDescendants + */ + fixDescendants( + block: Ref, + network: Network, + parent: Ref, + allKnownDescendants: Set + ) { + const children = [...parent.children.values()]; + for (const child of children) { + const networkBlock = network.getBlockByNumber(child.block.number); + const isInNetwork = networkBlock && networkBlock.hash === child.hash; + if (!isInNetwork) continue; + + // we should move the child if it comes after us + if (child.block.number > block.block.number) { + parent.children.delete(child); + block.children.add(child); + allKnownDescendants.add(child.hash); + } else { + this.fixDescendants(block, network, child, allKnownDescendants); + } + } + } + + /** + * @param of collect descendants of this block + * @param acc an accumulator + */ + collectDescendants(of: Ref, acc = new Set()) { + for (const child of of.children) { + acc.add(child.block.hash); + this.collectDescendants(child, acc); + } + return acc; + } +} diff --git a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts index a4cae2ffa2..531c5e9392 100644 --- a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts @@ -15,7 +15,9 @@ import compile from "../helpers/compile"; import path from "path"; import { CodedError } from "@ganache/ethereum-utils"; -describe("forking", () => { +describe("forking", function () { + this.timeout(10000); + const PORT = 9999; const NETWORK_ID = 1234; const REMOTE_ACCOUNT_COUNT = 15; @@ -111,7 +113,8 @@ describe("forking", () => { }); it("handles invalid JSON-RPC responses", async () => { const { localProvider } = await startLocalChain(port, { - url: `http://0.0.0.0:${port}` + url: `http://0.0.0.0:${port}`, + disableCache: true }); // some bad values to test const junks = [ @@ -148,7 +151,8 @@ describe("forking", () => { () => startLocalChain(PORT, { url: null, - provider: { request: "not a function" } + provider: { request: "not a function" } as any, + disableCache: true }), { message: "Forking `provider` must be EIP-1193 compatible" } ); @@ -156,7 +160,8 @@ describe("forking", () => { () => startLocalChain(PORT, { url: null, - provider: { send: "also not a function" } + provider: { send: "also not a function" } as any, + disableCache: true }), { message: "Forking `provider` must be EIP-1193 compatible" } ); @@ -169,7 +174,8 @@ describe("forking", () => { async () => { const provider = await startLocalChain(PORT, { url: null, - provider: remoteProvider + provider: remoteProvider as any, + disableCache: true }); localProvider = provider.localProvider; } @@ -245,7 +251,8 @@ describe("forking", () => { const provider = await startLocalChain(PORT, { url: null, - provider: remoteProvider + provider: remoteProvider as any, + disableCache: true }); localProvider = provider.localProvider; @@ -314,7 +321,9 @@ describe("forking", () => { describe("initial state", () => { it("should get the Network ID of the forked chain", async () => { - const { localProvider } = await startLocalChain(PORT); + const { localProvider } = await startLocalChain(PORT, { + disableCache: true + }); const [remoteNetworkId, localNetworkId] = await Promise.all( [remoteProvider, localProvider].map(p => p.send("net_version", [])) @@ -336,7 +345,8 @@ describe("forking", () => { assert.strictEqual(remoteBlockNumber, 10); const localStartBlockNum = blocks / 2; const { localProvider } = await startLocalChain(PORT, { - blockNumber: localStartBlockNum + blockNumber: localStartBlockNum, + disableCache: true }); const localBlockNumber = parseInt( @@ -359,7 +369,9 @@ describe("forking", () => { describe("block number", () => { let localProvider: EthereumProvider; beforeEach("start local chain", async () => { - ({ localProvider } = await startLocalChain(PORT)); + ({ localProvider } = await startLocalChain(PORT, { + disableCache: true + })); }); it("local block number should be 1 after the remote block on start up", async () => { @@ -379,7 +391,9 @@ describe("forking", () => { }); beforeEach("start local chain", async () => { - ({ localProvider } = await startLocalChain(PORT)); + ({ localProvider } = await startLocalChain(PORT, { + disableCache: true + })); }); it("should return the nonce of each account", async () => { @@ -404,7 +418,9 @@ describe("forking", () => { }); beforeEach("start local chain", async () => { - ({ localProvider, localAccounts } = await startLocalChain(PORT)); + ({ localProvider, localAccounts } = await startLocalChain(PORT, { + disableCache: true + })); }); it("should use `defaultBalanceEther` for balance of the initial accounts on the local chain", async () => { @@ -468,9 +484,35 @@ describe("forking", () => { ]); } + function set(provider: EthereumProvider, key: number, value: number) { + const encodedKey = Quantity.from(key) + .toBuffer() + .toString("hex") + .padStart(64, "0"); + const encodedValue = Quantity.from(value) + .toBuffer() + .toString("hex") + .padStart(64, "0"); + + return provider.send("eth_sendTransaction", [ + { + from: remoteAccounts[0], + to: contractAddress, + data: `0x${ + methods[`setValueFor(uint8,uint256)`] + }${encodedKey}${encodedValue}`, + gas: `0x${(3141592).toString(16)}` + } + ]); + } + + async function getBlockNumber(provider: EthereumProvider) { + return parseInt(await provider.send("eth_blockNumber", []), 16); + } + async function getBlockRanges(provider: EthereumProvider) { // our local chain starts at `localBlockNumberStart`. - const blockNum = parseInt(await provider.send("eth_blockNumber", []), 16); + const blockNum = await getBlockNumber(provider); assert.strictEqual( contractBlockNum, 1, @@ -495,19 +537,39 @@ describe("forking", () => { return Promise.all( blockNumsWithCode.map(async blockNum => { const value0 = await get("value0", blockNum); - assert.strictEqual(parseInt(value0, 16), 0); + assert.strictEqual( + parseInt(value0, 16), + 0, + `check failed at value0 block ${blockNum}` + ); const value1 = await get("value1", blockNum); - assert.strictEqual(parseInt(value1, 16), 2); + assert.strictEqual( + parseInt(value1, 16), + 2, + `check failed at value1 block ${blockNum}` + ); const value2 = await get("value2", blockNum); - assert.strictEqual(parseInt(value2, 16), 1); + assert.strictEqual( + parseInt(value2, 16), + 1, + `check failed at value2 block ${blockNum}` + ); const value3 = await get("value3", blockNum); - assert.strictEqual(parseInt(value3, 16), 0); + assert.strictEqual( + parseInt(value3, 16), + 0, + `check failed at value3 block ${blockNum}` + ); const value4 = await get("value4", blockNum); - assert.strictEqual(parseInt(value4, 16), 1); + assert.strictEqual( + parseInt(value4, 16), + 1, + `check failed at value4 block ${blockNum}` + ); }) ); } @@ -586,7 +648,9 @@ describe("forking", () => { }); it("should fetch contract code from the remote chain via the local chain", async () => { - const { localProvider } = await startLocalChain(PORT); + const { localProvider } = await startLocalChain(PORT, { + disableCache: true + }); const { blockNumbersWithCode, blockNumbersWithoutCode @@ -616,7 +680,9 @@ describe("forking", () => { }); it("should fetch initial contract data from the remote chain via the local chain", async () => { - const { localProvider } = await startLocalChain(PORT); + const { localProvider } = await startLocalChain(PORT, { + disableCache: true + }); const { blockNum, blockNumbersWithCode, @@ -666,39 +732,23 @@ describe("forking", () => { }); it("should fetch changed contract data from the remote chain via the local chain", async () => { - const { localProvider } = await startLocalChain(PORT); + const { localProvider } = await startLocalChain(PORT, { + disableCache: true + }); const { blockNum, blockNumbersWithCode, blockNumbersWithoutCode } = await getBlockRanges(localProvider); - function set(key: number, value: number) { - const encodedKey = Quantity.from(key) - .toBuffer() - .toString("hex") - .padStart(64, "0"); - const encodedValue = Quantity.from(value) - .toBuffer() - .toString("hex") - .padStart(64, "0"); - - return localProvider.send("eth_sendTransaction", [ - { - from: remoteAccounts[0], - to: contractAddress, - data: `0x${ - methods[`setValueFor(uint8,uint256)`] - }${encodedKey}${encodedValue}`, - gas: `0x${(3141592).toString(16)}` - } - ]); + function _set(key: number, value: number) { + return set(localProvider, key, value); } const _get = (value: string, blockNum: number) => get(localProvider, value, blockNum); - await setAllValuesTo(localProvider, 9, set); + await setAllValuesTo(localProvider, 9, _set); const postNineBlockNum = parseInt( await localProvider.send("eth_blockNumber", []), @@ -717,7 +767,7 @@ describe("forking", () => { await checkRangeForValue(blockNumsAfterNine, nine, _get); // set all values to 0 (the EVM treats this as a "delete") - await setAllValuesTo(localProvider, 0, set); + await setAllValuesTo(localProvider, 0, _set); const postZeroBlockNum = parseInt( await localProvider.send("eth_blockNumber", []), @@ -740,7 +790,7 @@ describe("forking", () => { await checkRangeForValue(blockNumsAfterZero, zero, _get); // set all values to 11 - await setAllValuesTo(localProvider, 11, set); + await setAllValuesTo(localProvider, 11, _set); const postElevenBlockNum = parseInt( await localProvider.send("eth_blockNumber", []), @@ -768,6 +818,119 @@ describe("forking", () => { "0x000000000000000000000000000000000000000000000000000000000000000b"; await checkRangeForValue(blockNumsAfterEleven, eleven, _get); }); + + describe("snapshot/revert", () => { + async function testPermutations( + localProvider: EthereumProvider, + initialValue: number, + snapshotValues: number[] + ) { + for await (const snapshotValue of snapshotValues) { + // set value1 to {snapshotValue} + await set(localProvider, 1, snapshotValue); + const message = await localProvider.once("message"); + const blockNumber = parseInt(message.data.result.number, 16); + const checkValue = await get(localProvider, "value1", blockNumber); + assert.strictEqual( + Quantity.from(checkValue).toNumber(), + snapshotValue, + `Value after snapshot not as expected. Conditions: ${initialValue}, ${JSON.stringify( + snapshotValues + )}. snapshotValue: ${snapshotValue}` + ); //sanity check + } + } + async function initializeSnapshotSetRevertThenTest( + initialValue: number, + snapshotValues: number[] + ) { + const { localProvider } = await startLocalChain(PORT, { + disableCache: true + }); + const subId = await localProvider.send("eth_subscribe", ["newHeads"]); + + // set value1 to {initialValue} (delete it) + await set(localProvider, 1, initialValue); + const message = await localProvider.once("message"); + const initialBlockNumber = parseInt(message.data.result.number, 16); + assert.strictEqual( + Quantity.from( + await get(localProvider, "value1", initialBlockNumber) + ).toNumber(), + initialValue + ); // sanity check + + const snapId = await localProvider.send("evm_snapshot"); + await testPermutations(localProvider, initialValue, snapshotValues); + await localProvider.send("evm_revert", [snapId]); + + assert.strictEqual( + initialBlockNumber, + await getBlockNumber(localProvider) + ); // sanity check + + assert.strictEqual( + Quantity.from( + await get(localProvider, "value1", initialBlockNumber) + ).toNumber(), + initialValue, + "value was not reverted to `initialValue` after evm_revert" + ); + + // Finally, check all permutations outside of the snapshot/revert to + // make sure deleted state was properly reverted + await testPermutations(localProvider, initialValue, snapshotValues); + + await localProvider.send("eth_unsubscribe", [subId]); + } + + const initialValues = [0, 1]; + // test all permutations of values: 0, 1, 2 + const permutations = [ + [0], + [1], + [2], + [0, 1], + [0, 2], + [1, 0], + [1, 2], + [2, 0], + [2, 1], + [0, 1, 2], + [0, 2, 1], + [1, 0, 2], + [1, 2, 0], + [2, 0, 1], + [2, 1, 0] + ]; + for (const remoteInitialValue of initialValues) { + for (const initialValue of initialValues) { + for (const permutation of permutations) { + it(`should revert to previous value after snapshot/{change}/revert, fork value: ${remoteInitialValue}, initialValue, ${initialValue}, permutation: ${JSON.stringify( + permutation + )}`, async () => { + const subId = await remoteProvider.send("eth_subscribe", [ + "newHeads" + ]); + // set the remoteProvider's value1 initialValue to {remoteInitialValue} + await set(remoteProvider, 1, remoteInitialValue); + const message = await remoteProvider.once("message"); + await remoteProvider.send("eth_unsubscribe", [subId]); + const blockNumber = parseInt(message.data.result.number, 16); + assert.strictEqual( + parseInt(await get(remoteProvider, "value1", blockNumber), 16), + remoteInitialValue + ); // sanity check to make sure our initial conditions are correct + + await initializeSnapshotSetRevertThenTest( + initialValue, + permutation + ); + }); + } + } + } + }); }); describe("blocks", () => { diff --git a/src/chains/ethereum/ethereum/tests/forking/helpers.ts b/src/chains/ethereum/ethereum/tests/forking/helpers.ts index 4a7952e972..b4123858c8 100644 --- a/src/chains/ethereum/ethereum/tests/forking/helpers.ts +++ b/src/chains/ethereum/ethereum/tests/forking/helpers.ts @@ -1,6 +1,6 @@ import getProvider from "../helpers/getProvider"; -import Server from "../../../../../packages/core/lib/src/server"; import EthereumProvider from "../../src/provider"; +import { EthereumProviderOptions } from "@ganache/ethereum-options/typings"; export const logging = { logger: { @@ -48,7 +48,10 @@ export const updateRemotesAccountNonces = async ( ); }; -export const startLocalChain = async (port: number, options?: any) => { +export const startLocalChain = async ( + port: number, + options?: EthereumProviderOptions["fork"] +) => { const localProvider = await getProvider({ logging, fork: { url: `ws://0.0.0.0:${port}`, ...options }, diff --git a/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts b/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts index fe06d20d68..33edd8952a 100644 --- a/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts @@ -4,7 +4,9 @@ import EthereumProvider from "../../src/provider"; import request from "superagent"; describe("forking", () => { - describe("transactions", () => { + describe("transactions", function () { + this.timeout(5000); + const blockNumber = 0xcb6169; const URL = "https://mainnet.infura.io/v3/" + process.env.INFURA_KEY; let provider: EthereumProvider; @@ -15,7 +17,8 @@ describe("forking", () => { provider = await getProvider({ fork: { url: URL, - blockNumber + blockNumber, + disableCache: true } }); }); diff --git a/src/chains/ethereum/ethereum/tests/provider.test.ts b/src/chains/ethereum/ethereum/tests/provider.test.ts index f9559cdbd2..f88cb0ad91 100644 --- a/src/chains/ethereum/ethereum/tests/provider.test.ts +++ b/src/chains/ethereum/ethereum/tests/provider.test.ts @@ -178,49 +178,6 @@ describe("provider", () => { await provider.send("debug_traceTransaction", [hash]); }, controlEvents); }); - it("emits vm:tx:* events for debug_storageRangeAt", async () => { - // README - // This test is slightly different, as we actually send a transaction to the - // contract, and then measure those events, instead of the deployment - // transaction itself. - - const { - contractAddress - } = await provider.send("eth_getTransactionReceipt", [deploymentHash]); - const initialValue = "0".repeat(62) + "19"; // 25 - // call the setValue method so we have some stuff to trace at the - // deployed contract - let receipt: any; - const controlEvents = await testEvents(async () => { - const subId = await provider.send("eth_subscribe", ["newHeads"]); - const hash = await provider.send("eth_sendTransaction", [ - { - from, - to: contractAddress, - gas: "0x2fefd8", - data: `0x${contract.contract.evm.methodIdentifiers["setValue(uint256)"]}${initialValue}` - } - ]); - await provider.once("message"); - await provider.send("eth_unsubscribe", [subId]); - receipt = await provider.send("eth_getTransactionReceipt", [hash]); - }); - assert(controlEvents.length > 2); - - await testEvents(async () => { - try { - await provider.send("debug_storageRangeAt", [ - receipt.blockHash, - 0, - contractAddress, - "0x00", - 2 - ]); - } catch (e) { - throw e; - } - }, controlEvents); - }); }); it("returns things via EIP-1193", async () => { diff --git a/src/chains/ethereum/ethereum/tests/tsconfig.json b/src/chains/ethereum/ethereum/tests/tsconfig.json index 8cbcfe608a..df4d02e4be 100644 --- a/src/chains/ethereum/ethereum/tests/tsconfig.json +++ b/src/chains/ethereum/ethereum/tests/tsconfig.json @@ -1,5 +1,10 @@ { "extends": "../tsconfig.json", - "include": ["./", "../src/**/*"], - "compilerOptions": { "rootDir": "../" } -} + "include": [ + "./**/*", + "../src/**/*" + ], + "compilerOptions": { + "rootDir": "../" + } +} \ No newline at end of file diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index b7a73c9502..33ddb20e77 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -1,8 +1,9 @@ import { normalize } from "./helpers"; import { Definitions } from "@ganache/options"; import { $INLINE_JSON } from "ts-transformer-inline-file"; -import { Tag } from "@ganache/ethereum-utils"; +import { QUANTITY, Tag } from "@ganache/ethereum-utils"; import { URL } from "url"; +import { Quantity } from "@ganache/utils"; const { version } = $INLINE_JSON("../../../../packages/ganache/package.json"); // we aren't going to treat block numbers as a bigint, so we don't want to @@ -71,6 +72,15 @@ export type ForkConfig = { }; }; + /** + * When the `fork.blockNumber` is set to "latest" (default), the number of + * blocks before the remote node's "latest" block to fork from. + */ + preLatestConfirmations: { + type: number; + hasDefault: true; + }; + /** * Username to use for Basic Authentication. Does not require setting `fork.password`. * @@ -160,6 +170,26 @@ export type ForkConfig = { type: number; hasDefault: true; }; + + /** + * Disables caching of all forking requests. + * + * @default false + */ + disableCache: { + type: boolean; + hasDefault: true; + }; + + /** + * Deletes the persistent cache on start up. + * + * @default false + */ + deleteCache: { + type: boolean; + hasDefault: true; + }; }; }; @@ -270,9 +300,17 @@ Alternatively, you can use the \`fork.username\` and \`fork.password\` options.` return; } }, - defaultDescription: `"${Tag.LATEST}"` + defaultDescription: `Latest block number` //implies: ["url"] }, + preLatestConfirmations: { + normalize, + cliDescription: + 'When the `fork.blockNumber` is set to "latest" (default), the number of blocks before the remote node\'s "latest" block to fork from.', + default: () => 5, + defaultDescription: "5", + cliType: "number" + }, username: { normalize, cliDescription: `* Username to use for Basic Authentication. Does not require setting \`fork.password\`. @@ -365,5 +403,17 @@ Defaults to: \`["User-Agent: Ganache/VERSION (https://www.trufflesuite.com/ganac "Restrict the number of requests per second sent to the fork provider. `0` means no limit is applied.", cliType: "number" //implies: ["url"] + }, + disableCache: { + normalize, + default: () => false, + cliDescription: "Disables caching of all forking requests.", + cliType: "boolean" + }, + deleteCache: { + normalize, + default: () => false, + cliDescription: "Deletes the persistent cache before starting.", + cliType: "boolean" } }; diff --git a/src/chains/ethereum/transaction/tests/index.test.ts b/src/chains/ethereum/transaction/tests/index.test.ts index db6622b397..e9fe2db8db 100644 --- a/src/chains/ethereum/transaction/tests/index.test.ts +++ b/src/chains/ethereum/transaction/tests/index.test.ts @@ -18,7 +18,6 @@ import Wallet from "../../ethereum/src/wallet"; import { decode } from "@ganache/rlp"; import { EthereumOptionsConfig } from "../../options"; import { BUFFER_EMPTY, Quantity } from "@ganache/utils"; -import { Buffer } from "buffer"; describe("@ganache/ethereum-transaction", async () => { const common = Common.forCustomChain( diff --git a/src/chains/filecoin/filecoin/package-lock.json b/src/chains/filecoin/filecoin/package-lock.json index bdeb1d0368..8df442ab2b 100644 --- a/src/chains/filecoin/filecoin/package-lock.json +++ b/src/chains/filecoin/filecoin/package-lock.json @@ -830,22 +830,14 @@ "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==" }, "@trufflesuite/uws-js-unofficial": { - "version": "18.14.0-unofficial.12", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-18.14.0-unofficial.12.tgz", - "integrity": "sha512-fxM+mskpKW/Wd7Dy9pDJ/jaChCQUq+Ab1X8ZMi9dmC9FGDo7CKacQCNoXh8vHiy+ZFY7n7xvWy8A0I7eFQ0iqQ==", + "version": "20.4.0-unofficial.2", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.2.tgz", + "integrity": "sha512-sDPJI/n1BGvby76iDT5CHfosVGYes0GydONgLpxkU6KPUM24z0E/D7JaPO7DkNmM/to3DhDit02FnMvasSXUdw==", "dev": true, "requires": { - "bufferutil": "4.0.3", - "utf-8-validate": "5.0.5", - "ws": "^8.2.1" - }, - "dependencies": { - "ws": { - "version": "8.2.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.1.tgz", - "integrity": "sha512-XkgWpJU3sHU7gX8f13NqTn6KQ85bd1WU7noBHTT8fSohx7OS1TPY8k+cyRPCzFkia7C4mM229yeHr1qK9sM4JQ==", - "dev": true - } + "bufferutil": "4.0.5", + "utf-8-validate": "5.0.7", + "ws": "8.2.3" } }, "@types/abstract-leveldown": { @@ -1820,13 +1812,22 @@ "dev": true }, "bufferutil": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.3.tgz", - "integrity": "sha512-yEYTwGndELGvfXsImMBLop58eaGW+YdONi1fNjTINSY98tmMmFijBG6WXgdkfuLNt4imzQNtIE+eBp1PVpMCSw==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.5.tgz", + "integrity": "sha512-HTm14iMQKK2FjFLRTM5lAVcyaUzOnqbPtesFIvREgXpJHdQm8bWS+GkQgIkfaBYRHuCnea7w8UVNfwiAQhlr9A==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" + }, + "dependencies": { + "node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", + "dev": true, + "optional": true + } } }, "bufio": { @@ -6390,8 +6391,7 @@ "requires": { "buffer": "^5.6.0", "event-iterator": "^2.0.0", - "relative-url": "^1.0.2", - "ws": "^7.3.1" + "relative-url": "^1.0.2" } }, "iterable-ndjson": { @@ -10656,13 +10656,22 @@ } }, "utf-8-validate": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.5.tgz", - "integrity": "sha512-+pnxRYsS/axEpkrrEpzYfNZGXp0IjC/9RIxwM5gntY4Koi8SHmUGSfxfWqxZdRxrtaoVstuOzUp/rbs3JSPELQ==", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.7.tgz", + "integrity": "sha512-vLt1O5Pp+flcArHGIyKEQq883nBt8nN8tVBcoL0qUXj2XT1n7p70yGIq2VK98I5FdZ1YHc0wk/koOnHjnXWk1Q==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" + }, + "dependencies": { + "node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", + "dev": true, + "optional": true + } } }, "utf8-byte-length": { @@ -11029,9 +11038,10 @@ } }, "ws": { - "version": "7.5.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", - "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==" + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==", + "dev": true }, "xdg-basedir": { "version": "4.0.0", diff --git a/src/chains/filecoin/filecoin/package.json b/src/chains/filecoin/filecoin/package.json index 453168eb67..b0a2eb14da 100644 --- a/src/chains/filecoin/filecoin/package.json +++ b/src/chains/filecoin/filecoin/package.json @@ -59,7 +59,7 @@ "@filecoin-shipyard/lotus-client-schema": "2.0.0", "@ganache/filecoin-options": "0.1.1-alpha.1", "@ganache/utils": "0.1.1-alpha.1", - "@trufflesuite/uws-js-unofficial": "18.14.0-unofficial.12", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.2", "@types/bn.js": "5.1.0", "@types/deep-equal": "1.0.1", "@types/levelup": "4.3.0", @@ -100,6 +100,6 @@ "typescript": "4.1.3", "webpack": "5.21.2", "webpack-cli": "4.5.0", - "ws": "7.5.3" + "ws": "8.2.3" } } diff --git a/src/chains/tezos/tezos/package-lock.json b/src/chains/tezos/tezos/package-lock.json index 130358bf25..0eae6d0cdc 100644 --- a/src/chains/tezos/tezos/package-lock.json +++ b/src/chains/tezos/tezos/package-lock.json @@ -32,14 +32,14 @@ } }, "@trufflesuite/uws-js-unofficial": { - "version": "18.14.0-unofficial.12", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-18.14.0-unofficial.12.tgz", - "integrity": "sha512-fxM+mskpKW/Wd7Dy9pDJ/jaChCQUq+Ab1X8ZMi9dmC9FGDo7CKacQCNoXh8vHiy+ZFY7n7xvWy8A0I7eFQ0iqQ==", + "version": "20.4.0-unofficial.2", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.2.tgz", + "integrity": "sha512-sDPJI/n1BGvby76iDT5CHfosVGYes0GydONgLpxkU6KPUM24z0E/D7JaPO7DkNmM/to3DhDit02FnMvasSXUdw==", "dev": true, "requires": { - "bufferutil": "4.0.3", - "utf-8-validate": "5.0.5", - "ws": "^8.2.1" + "bufferutil": "4.0.5", + "utf-8-validate": "5.0.7", + "ws": "8.2.3" } }, "@types/mocha": { @@ -244,13 +244,13 @@ "dev": true }, "bufferutil": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.3.tgz", - "integrity": "sha512-yEYTwGndELGvfXsImMBLop58eaGW+YdONi1fNjTINSY98tmMmFijBG6WXgdkfuLNt4imzQNtIE+eBp1PVpMCSw==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.5.tgz", + "integrity": "sha512-HTm14iMQKK2FjFLRTM5lAVcyaUzOnqbPtesFIvREgXpJHdQm8bWS+GkQgIkfaBYRHuCnea7w8UVNfwiAQhlr9A==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" } }, "byte-size": { @@ -1892,9 +1892,9 @@ "dev": true }, "node-gyp-build": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.2.3.tgz", - "integrity": "sha512-MN6ZpzmfNCRM+3t57PTJHgHyw/h4OWnZ6mR8P5j/uZtqQr46RRuDE/P+g3n0YR/AiYXeWixZZzaip77gdICfRg==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", "dev": true, "optional": true }, @@ -2558,13 +2558,13 @@ "dev": true }, "utf-8-validate": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.5.tgz", - "integrity": "sha512-+pnxRYsS/axEpkrrEpzYfNZGXp0IjC/9RIxwM5gntY4Koi8SHmUGSfxfWqxZdRxrtaoVstuOzUp/rbs3JSPELQ==", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.7.tgz", + "integrity": "sha512-vLt1O5Pp+flcArHGIyKEQq883nBt8nN8tVBcoL0qUXj2XT1n7p70yGIq2VK98I5FdZ1YHc0wk/koOnHjnXWk1Q==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" } }, "util-deprecate": { @@ -2715,9 +2715,9 @@ "dev": true }, "ws": { - "version": "8.2.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.1.tgz", - "integrity": "sha512-XkgWpJU3sHU7gX8f13NqTn6KQ85bd1WU7noBHTT8fSohx7OS1TPY8k+cyRPCzFkia7C4mM229yeHr1qK9sM4JQ==", + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==", "dev": true }, "y18n": { diff --git a/src/chains/tezos/tezos/package.json b/src/chains/tezos/tezos/package.json index fc45e5eccf..56944ce64d 100644 --- a/src/chains/tezos/tezos/package.json +++ b/src/chains/tezos/tezos/package.json @@ -48,7 +48,7 @@ }, "devDependencies": { "@trufflesuite/typedoc-default-themes": "0.6.1", - "@trufflesuite/uws-js-unofficial": "18.14.0-unofficial.12", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.2", "@types/mocha": "8.2.2", "cheerio": "1.0.0-rc.3", "cross-env": "7.0.3", diff --git a/src/packages/core/package-lock.json b/src/packages/core/package-lock.json index cca91ade70..1c82063d7f 100644 --- a/src/packages/core/package-lock.json +++ b/src/packages/core/package-lock.json @@ -421,20 +421,13 @@ "dev": true }, "@trufflesuite/uws-js-unofficial": { - "version": "18.14.0-unofficial.12", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-18.14.0-unofficial.12.tgz", - "integrity": "sha512-fxM+mskpKW/Wd7Dy9pDJ/jaChCQUq+Ab1X8ZMi9dmC9FGDo7CKacQCNoXh8vHiy+ZFY7n7xvWy8A0I7eFQ0iqQ==", + "version": "20.4.0-unofficial.2", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.2.tgz", + "integrity": "sha512-sDPJI/n1BGvby76iDT5CHfosVGYes0GydONgLpxkU6KPUM24z0E/D7JaPO7DkNmM/to3DhDit02FnMvasSXUdw==", "requires": { - "bufferutil": "4.0.3", - "utf-8-validate": "5.0.5", - "ws": "^8.2.1" - }, - "dependencies": { - "ws": { - "version": "8.2.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.1.tgz", - "integrity": "sha512-XkgWpJU3sHU7gX8f13NqTn6KQ85bd1WU7noBHTT8fSohx7OS1TPY8k+cyRPCzFkia7C4mM229yeHr1qK9sM4JQ==" - } + "bufferutil": "4.0.5", + "utf-8-validate": "5.0.7", + "ws": "8.2.3" } }, "@types/cookiejar": { @@ -619,12 +612,12 @@ "dev": true }, "bufferutil": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.3.tgz", - "integrity": "sha512-yEYTwGndELGvfXsImMBLop58eaGW+YdONi1fNjTINSY98tmMmFijBG6WXgdkfuLNt4imzQNtIE+eBp1PVpMCSw==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.5.tgz", + "integrity": "sha512-HTm14iMQKK2FjFLRTM5lAVcyaUzOnqbPtesFIvREgXpJHdQm8bWS+GkQgIkfaBYRHuCnea7w8UVNfwiAQhlr9A==", "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" } }, "caching-transform": { @@ -1684,9 +1677,9 @@ "dev": true }, "node-gyp-build": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.2.3.tgz", - "integrity": "sha512-MN6ZpzmfNCRM+3t57PTJHgHyw/h4OWnZ6mR8P5j/uZtqQr46RRuDE/P+g3n0YR/AiYXeWixZZzaip77gdICfRg==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", "optional": true }, "node-preload": { @@ -2462,12 +2455,12 @@ } }, "utf-8-validate": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.5.tgz", - "integrity": "sha512-+pnxRYsS/axEpkrrEpzYfNZGXp0IjC/9RIxwM5gntY4Koi8SHmUGSfxfWqxZdRxrtaoVstuOzUp/rbs3JSPELQ==", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.7.tgz", + "integrity": "sha512-vLt1O5Pp+flcArHGIyKEQq883nBt8nN8tVBcoL0qUXj2XT1n7p70yGIq2VK98I5FdZ1YHc0wk/koOnHjnXWk1Q==", "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" } }, "util-deprecate": { @@ -2588,10 +2581,9 @@ } }, "ws": { - "version": "7.5.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", - "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==", - "dev": true + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==" }, "y18n": { "version": "5.0.8", diff --git a/src/packages/core/package.json b/src/packages/core/package.json index 1a4ad63ca5..409ff71a23 100644 --- a/src/packages/core/package.json +++ b/src/packages/core/package.json @@ -53,7 +53,7 @@ "@ganache/options": "0.1.1-alpha.1", "@ganache/tezos": "0.1.1-alpha.1", "@ganache/utils": "0.1.1-alpha.1", - "@trufflesuite/uws-js-unofficial": "18.14.0-unofficial.12", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.2", "aggregate-error": "3.1.0", "emittery": "0.8.1", "promise.allsettled": "1.0.4" @@ -69,6 +69,6 @@ "ts-node": "9.1.1", "ttypescript": "1.5.12", "typescript": "4.1.3", - "ws": "7.5.3" + "ws": "8.2.3" } } diff --git a/src/packages/core/src/server.ts b/src/packages/core/src/server.ts index e00d5b0418..43cb4d4280 100644 --- a/src/packages/core/src/server.ts +++ b/src/packages/core/src/server.ts @@ -16,12 +16,19 @@ import allSettled from "promise.allsettled"; allSettled.shim(); import AggregateError from "aggregate-error"; +import type { + TemplatedApp, + us_listen_socket +} from "@trufflesuite/uws-js-unofficial"; import { App, - TemplatedApp, - us_listen_socket, - us_listen_socket_close + us_listen_socket_close, + _cfg as setUwsGlobalConfig } from "@trufflesuite/uws-js-unofficial"; + +// Set the "silent" config option so we don't output the "uwebsockets" header +setUwsGlobalConfig(new Uint8Array([115, 105, 108, 101, 110, 116]) as any); + import { Connector, ConnectorsByName, diff --git a/src/packages/core/src/servers/http-server.ts b/src/packages/core/src/servers/http-server.ts index f34bef4664..5b7a361242 100644 --- a/src/packages/core/src/servers/http-server.ts +++ b/src/packages/core/src/servers/http-server.ts @@ -8,6 +8,7 @@ import ContentTypes from "./utils/content-types"; import HttpResponseCodes from "./utils/http-response-codes"; import { Connector } from "@ganache/flavors"; import { InternalOptions } from "../options"; +import { types } from "util"; type HttpMethods = "GET" | "OPTIONS" | "POST"; @@ -71,17 +72,21 @@ function prepareCORSResponseHeaders(method: HttpMethods, request: HttpRequest) { function sendResponse( response: HttpResponse, statusCode: HttpResponseCodes, - contentType?: RecognizedString, - data?: RecognizedString, + contentType: RecognizedString | null, + data: RecognizedString | null, writeHeaders: (response: HttpResponse) => void = noop ): void { response.cork(() => { response.writeStatus(statusCode); writeHeaders(response); - if (contentType) { + if (contentType != null) { response.writeHeader("Content-Type", contentType); } - response.end(data); + if (data != null) { + response.end(data); + } else { + response.end(); + } }); } @@ -124,7 +129,7 @@ export default class HttpServer { "400 Bad Request" ); } else { - // all other requests don't mean anything to us, so respond with `404 NOT FOUND`... + // all other requests don't mean anything to us, so respond with `404 Not Found`... sendResponse( response, HttpResponseCodes.NOT_FOUND, @@ -180,13 +185,26 @@ export default class HttpServer { return; } const data = connector.format(result, payload); - sendResponse( - response, - HttpResponseCodes.OK, - ContentTypes.JSON, - data, - writeHeaders - ); + if (types.isGeneratorObject(data)) { + response.cork(() => { + response.writeStatus(HttpResponseCodes.OK); + writeHeaders(response); + response.writeHeader("Content-Type", ContentTypes.JSON); + + for (const datum of data) { + response.write(datum as RecognizedString); + } + response.end(); + }); + } else { + sendResponse( + response, + HttpResponseCodes.OK, + ContentTypes.JSON, + data, + writeHeaders + ); + } }) .catch(error => { if (aborted) { @@ -220,8 +238,8 @@ export default class HttpServer { sendResponse( response, HttpResponseCodes.NO_CONTENT, - void 0, - "", + null, + null, writeHeaders ); }; diff --git a/src/packages/core/src/servers/ws-server.ts b/src/packages/core/src/servers/ws-server.ts index be99378f20..148bb9f693 100644 --- a/src/packages/core/src/servers/ws-server.ts +++ b/src/packages/core/src/servers/ws-server.ts @@ -6,7 +6,9 @@ import { import WebSocketCloseCodes from "./utils/websocket-close-codes"; import { InternalOptions } from "../options"; import * as Flavors from "@ganache/flavors"; -import { PromiEvent } from "@ganache/utils"; +import { hasOwn, PromiEvent } from "@ganache/utils"; +import { isGeneratorFunction, isGeneratorObject } from "util/types"; +import { types } from "util"; type MergePromiseT = Promise ? X : never>; @@ -76,7 +78,7 @@ export default class WebsocketServer { return; } - let response: RecognizedString; + let data: RecognizedString | Generator; try { const { value } = await connector.handle(payload, ws); @@ -89,7 +91,7 @@ export default class WebsocketServer { const result = await resultEmitter; if (ws.closed) return; - response = connector.format(result, payload); + data = connector.format(result, payload); // if the result is an emitter listen to its `"message"` event // We check if `on` is a function rather than check if @@ -117,10 +119,44 @@ export default class WebsocketServer { // ensure the connector's `handle` fn doesn't throw outside of a Promise if (ws.closed) return; - response = connector.formatError(err, payload); + data = connector.formatError(err, payload); } - ws.send(response, useBinary); + if (types.isGeneratorObject(data)) { + const localData = data; + ws.cork(() => { + const { value: first } = localData.next(); + + // get the second fragment, if there is one + // Note: we lag behind by one fragment because the last fragment + // needs to be sent via the `sendLastFragment` method. + // This value acts as a lookahead so we know if we are at the last + // value or not. + let { value: next, done } = localData.next(); + + // if there wasn't a second fragment, just send it the usual way. + if (done) { + ws.send(first, useBinary); + } else { + // fragment send: https://github.com/uNetworking/uWebSockets.js/issues/635 + const shouldCompress = false; + + // send the first fragment + ws.sendFirstFragment(first, useBinary, shouldCompress); + + // Now send the rest of the data piece by piece. + let prev = next; + for (next of localData) { + ws.sendFragment(prev, shouldCompress); + prev = next; + } + // finally, send the last fragment + ws.sendLastFragment(next, shouldCompress); + } + }); + } else { + ws.send(data as RecognizedString, useBinary); + } }, drain: (ws: WebSocket) => { diff --git a/src/packages/core/tests/server.test.ts b/src/packages/core/tests/server.test.ts index 59e9763a0b..10bc9cf303 100644 --- a/src/packages/core/tests/server.test.ts +++ b/src/packages/core/tests/server.test.ts @@ -17,7 +17,7 @@ import intoStream = require("into-stream"); import { PromiEvent } from "@ganache/utils"; import { promisify } from "util"; import { ServerOptions } from "../src/options"; -import { Provider as EthereumProvider } from "@ganache/ethereum"; +import { Connector, Provider as EthereumProvider } from "@ganache/ethereum"; const IS_WINDOWS = process.platform === "win32"; @@ -66,6 +66,9 @@ describe("server", () => { .send(jsonRpcJson); assert.strictEqual(response.status, 200); + // make sure we aren't including the uwebsockets header + assert.strictEqual("uwebsockets" in response.headers, false); + const json = JSON.parse(response.text); assert.strictEqual(json.result, `${networkId}`); return response; @@ -373,14 +376,14 @@ describe("server", () => { } }); - it("handles chunked requests (note: doesn't test `transfer-encoding: chunked`)", async () => { + it("handles chunked requests (note: doesn't test sending with `transfer-encoding: chunked`)", async () => { await setup(); try { const req = request.post("http://localhost:" + port); const json = JSON.stringify(jsonRpcJson); // we have to set the content-length because we can't use - // `Transfer-Encoding: chunked` with uWebSockets.js as of v15.9.0 + // `Transfer-Encoding: chunked` to uWebSockets.js as of v15.9.0 req.set("Content-Length", json.length.toString()); await new Promise((resolve, reject) => { @@ -403,6 +406,52 @@ describe("server", () => { } }); + it("responds with transfer-encoding: chunked responses when bufferification is triggered", async () => { + const originalThreshold = Connector.BUFFERIFY_THRESHOLD; + // This will trigger bufferication in the Ethereum connector + // for calls to debug_traceTransaction that return structLogs that have a + // length greater than BUFFERIFY_THRESHOLD + Connector.BUFFERIFY_THRESHOLD = 0; + + try { + await setup(); + const [from] = await s.provider.send("eth_accounts"); + await s.provider.send("eth_subscribe", ["newHeads"]); + + const ops = [ + { op: "PUSH1", code: "60", data: "00" }, + { op: "PUSH1", code: "60", data: "00" }, + { op: "RETURN", code: "f3", data: "" } + ]; + // a silly "contract" we can trace later: PUSH 0, PUSH, 0, RETURN + const data = "0x" + ops.map(op => op.code + op.data).join(""); + const hash = s.provider.send("eth_sendTransaction", [{ from, data }]); + await s.provider.once("message"); + + // send a `debug_traceTransaction` request to the *server* so we can + // test for `transfer-encoding: chunked` and bufferfication. + const jsonRpcJson: any = { + jsonrpc: "2.0", + id: "1", + method: "debug_traceTransaction", + params: [await hash] + }; + + const { text, header, status } = await request + .post("http://localhost:" + port) + .send(jsonRpcJson); + const { result } = JSON.parse(text); + + assert.strictEqual(header["transfer-encoding"], "chunked"); + assert.strictEqual(header["content-type"], "application/json"); + assert.strictEqual(status, 200); + assert.strictEqual(result.structLogs.length, ops.length); + } finally { + Connector.BUFFERIFY_THRESHOLD = originalThreshold; + await teardown(); + } + }); + it("returns 200/OK for RPC errors over HTTP", async () => { await setup(); const jsonRpcJson: any = { @@ -641,7 +690,12 @@ describe("server", () => { origin ); assert.strictEqual(resp.header["access-control-max-age"], "600"); - assert.strictEqual(resp.header["content-length"], "0"); + // TODO: enable this check once https://github.com/uNetworking/uWebSockets/issues/1370 is fixed + // assert.strictEqual( + // "content-length" in resp.header, + // false, + // "RFC 7230: A server MUST NOT send a Content-Length header field in any response with a status code of 1xx (Informational) or 204 (No Content)" + // ); assert.strictEqual( resp.header["access-control-allow-credentials"], "true" @@ -678,18 +732,13 @@ describe("server", () => { it("returns the net_version over a websocket", async () => { const ws = new WebSocket("ws://localhost:" + port); - const response: any = await new Promise(resolve => { + const { data }: any = await new Promise(resolve => { ws.on("open", () => { ws.send(JSON.stringify(jsonRpcJson)); }); - ws.on("message", resolve); + ws.on("message", (data, isBinary) => resolve({ data, isBinary })); }); - assert.strictEqual( - typeof response, - "string", - "response doesn't seem to be a string as expected" - ); - const json = JSON.parse(response); + const json = JSON.parse(data); assert.strictEqual(json.result, `${networkId}`); }); @@ -922,6 +971,60 @@ describe("server", () => { }); }); + it("responds with transfer-encoding: chunked responses when bufferification is triggered", async () => { + // this test needs to set BUFFERIFY_THRESHOLD before starting the server + await teardown(); + + const originalThreshold = Connector.BUFFERIFY_THRESHOLD; + // This will trigger bufferication in the Ethereum connector + // for calls to debug_traceTransaction that return structLogs that have a + // length greater than BUFFERIFY_THRESHOLD + Connector.BUFFERIFY_THRESHOLD = 0; + + try { + await setup(); + const [from] = await s.provider.send("eth_accounts"); + await s.provider.send("eth_subscribe", ["newHeads"]); + + const ops = [ + { op: "PUSH1", code: "60", data: "00" }, + { op: "PUSH1", code: "60", data: "00" }, + { op: "RETURN", code: "f3", data: "" } + ]; + // a silly "contract" we can trace later: PUSH 0, PUSH, 0, RETURN + const data = "0x" + ops.map(op => op.code + op.data).join(""); + const hash = s.provider.send("eth_sendTransaction", [{ from, data }]); + await s.provider.once("message"); + + // send a `debug_traceTransaction` request to the *server* so we can + // test for `transfer-encoding: chunked` and bufferfication. + const jsonRpcJson: any = { + jsonrpc: "2.0", + id: "1", + method: "debug_traceTransaction", + params: [await hash] + }; + + const ws = new WebSocket("ws://localhost:" + port); + ws.binaryType = "fragments"; + const response: any = await new Promise(resolve => { + ws.on("open", () => { + ws.send(Buffer.from(JSON.stringify(jsonRpcJson)), { + binary: true + }); + }); + ws.on("message", resolve); + }); + + assert.strictEqual(Array.isArray(response), true); + const { result } = JSON.parse(Buffer.concat(response)); + assert.strictEqual(result.structLogs.length, ops.length); + } finally { + Connector.BUFFERIFY_THRESHOLD = originalThreshold; + await teardown(); + } + }).timeout(0); + describe("max payload size", () => { let ws: WebSocket; beforeEach(() => { diff --git a/src/packages/ganache/npm-shrinkwrap.json b/src/packages/ganache/npm-shrinkwrap.json index 75b7de5922..1bf74f5fae 100644 --- a/src/packages/ganache/npm-shrinkwrap.json +++ b/src/packages/ganache/npm-shrinkwrap.json @@ -1,6 +1,6 @@ { "name": "ganache", - "version": "7.0.0-alpha.0", + "version": "7.0.0-alpha.1", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -1510,6 +1510,12 @@ "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", "dev": true }, + "isomorphic-ws": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz", + "integrity": "sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==", + "dev": true + }, "jest-worker": { "version": "26.6.2", "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-26.6.2.tgz", diff --git a/src/packages/ganache/package.json b/src/packages/ganache/package.json index 0df1241824..62cd6f50dc 100644 --- a/src/packages/ganache/package.json +++ b/src/packages/ganache/package.json @@ -62,6 +62,7 @@ "cross-env": "7.0.3", "crypto-browserify": "3.12.0", "events": "3.2.0", + "isomorphic-ws": "4.0.1", "level-js": "5.0.2", "mcl-wasm": "0.7.8", "mocha": "8.4.0", diff --git a/src/packages/ganache/webpack/webpack.browser.config.ts b/src/packages/ganache/webpack/webpack.browser.config.ts index 5c0e4ff9f5..ecaefbc2e5 100644 --- a/src/packages/ganache/webpack/webpack.browser.config.ts +++ b/src/packages/ganache/webpack/webpack.browser.config.ts @@ -24,7 +24,7 @@ const config: webpack.Configuration = merge({}, base, { alias: { "tmp-promise": require.resolve("./polyfills/browser-tmp-promise"), "bigint-buffer": require.resolve("./polyfills/browser-bigint-buffer"), - "crypto": require.resolve("./polyfills/browser-crypto"), + crypto: require.resolve("./polyfills/browser-crypto"), // replace leveldown with a browser version leveldown: require.resolve("level-js/"), // browser version can't start a server, so just remove the websocket server since it can't work anyway @@ -33,7 +33,9 @@ const config: webpack.Configuration = merge({}, base, { // `url` is already a global property in browser url: false, // mcl-wasm may be needed when creating a new @ethereumjs/vm and requires a browser version for browsers - "mcl-wasm": require.resolve("mcl-wasm/browser") + "mcl-wasm": require.resolve("mcl-wasm/browser"), + // ws doesn't work in the browser, isomorphic-ws does + ws: require.resolve("isomorphic-ws/") } }, output: { diff --git a/src/packages/utils/package-lock.json b/src/packages/utils/package-lock.json index 417bdc575d..3f62c28a25 100644 --- a/src/packages/utils/package-lock.json +++ b/src/packages/utils/package-lock.json @@ -5,14 +5,14 @@ "requires": true, "dependencies": { "@trufflesuite/uws-js-unofficial": { - "version": "18.14.0-unofficial.12", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-18.14.0-unofficial.12.tgz", - "integrity": "sha512-fxM+mskpKW/Wd7Dy9pDJ/jaChCQUq+Ab1X8ZMi9dmC9FGDo7CKacQCNoXh8vHiy+ZFY7n7xvWy8A0I7eFQ0iqQ==", + "version": "20.4.0-unofficial.2", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.2.tgz", + "integrity": "sha512-sDPJI/n1BGvby76iDT5CHfosVGYes0GydONgLpxkU6KPUM24z0E/D7JaPO7DkNmM/to3DhDit02FnMvasSXUdw==", "dev": true, "requires": { - "bufferutil": "4.0.3", - "utf-8-validate": "5.0.5", - "ws": "^8.2.1" + "bufferutil": "4.0.5", + "utf-8-validate": "5.0.7", + "ws": "8.2.3" } }, "@types/mocha": { @@ -138,13 +138,22 @@ "dev": true }, "bufferutil": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.3.tgz", - "integrity": "sha512-yEYTwGndELGvfXsImMBLop58eaGW+YdONi1fNjTINSY98tmMmFijBG6WXgdkfuLNt4imzQNtIE+eBp1PVpMCSw==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.5.tgz", + "integrity": "sha512-HTm14iMQKK2FjFLRTM5lAVcyaUzOnqbPtesFIvREgXpJHdQm8bWS+GkQgIkfaBYRHuCnea7w8UVNfwiAQhlr9A==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" + }, + "dependencies": { + "node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", + "dev": true, + "optional": true + } } }, "camelcase": { @@ -850,13 +859,22 @@ "dev": true }, "utf-8-validate": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.5.tgz", - "integrity": "sha512-+pnxRYsS/axEpkrrEpzYfNZGXp0IjC/9RIxwM5gntY4Koi8SHmUGSfxfWqxZdRxrtaoVstuOzUp/rbs3JSPELQ==", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.7.tgz", + "integrity": "sha512-vLt1O5Pp+flcArHGIyKEQq883nBt8nN8tVBcoL0qUXj2XT1n7p70yGIq2VK98I5FdZ1YHc0wk/koOnHjnXWk1Q==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" + }, + "dependencies": { + "node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", + "dev": true, + "optional": true + } } }, "which": { @@ -935,9 +953,9 @@ "dev": true }, "ws": { - "version": "8.2.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.1.tgz", - "integrity": "sha512-XkgWpJU3sHU7gX8f13NqTn6KQ85bd1WU7noBHTT8fSohx7OS1TPY8k+cyRPCzFkia7C4mM229yeHr1qK9sM4JQ==", + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==", "dev": true }, "y18n": { diff --git a/src/packages/utils/package.json b/src/packages/utils/package.json index aae300f885..3af21a3fe5 100644 --- a/src/packages/utils/package.json +++ b/src/packages/utils/package.json @@ -51,7 +51,7 @@ "seedrandom": "3.0.5" }, "devDependencies": { - "@trufflesuite/uws-js-unofficial": "18.14.0-unofficial.12", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.2", "@types/mocha": "8.2.2", "@types/seedrandom": "3.0.1", "cross-env": "7.0.3", diff --git a/src/packages/utils/src/types/connector.ts b/src/packages/utils/src/types/connector.ts index cee85e97d0..aefeab3da6 100644 --- a/src/packages/utils/src/types/connector.ts +++ b/src/packages/utils/src/types/connector.ts @@ -58,6 +58,10 @@ export interface Connector< * @param response * @param payload */ + format( + result: ResponseFormat, + payload: RequestFormat + ): RecognizedString | Generator; format(result: ResponseFormat, payload: RequestFormat): RecognizedString; /**