From 8666e8fb7ca3c27e93f82578d1d6ef6da5c578bb Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 24 Sep 2021 21:47:58 -0400 Subject: [PATCH 01/93] perf: add an disk cache for forking requests --- .../ethereum/ethereum/package-lock.json | 20 ++ src/chains/ethereum/ethereum/package.json | 2 + .../src/data-managers/block-manager.ts | 2 +- .../src/data-managers/transaction-manager.ts | 5 + .../ethereum/ethereum/src/forking/fork.ts | 49 ++- .../src/forking/handlers/base-handler.ts | 95 ++++- .../src/forking/handlers/http-handler.ts | 13 +- .../src/forking/handlers/provider-handler.ts | 17 +- .../src/forking/handlers/ws-handler.ts | 12 +- .../src/forking/persistent-cache/ancestry.ts | 61 ++++ .../src/forking/persistent-cache/helpers.ts | 211 +++++++++++ .../persistent-cache/persistent-cache.ts | 335 ++++++++++++++++++ .../src/forking/persistent-cache/tree.ts | 64 ++++ .../ethereum/ethereum/src/forking/types.ts | 9 +- src/chains/ethereum/ethereum/test.js | 57 +++ .../ethereum/tests/forking/cache.test.ts | 180 ++++++++++ .../tests/forking/cache/arbitraries.ts | 172 +++++++++ .../tests/forking/cache/mock-provider.ts | 55 +++ .../ethereum/tests/forking/cache/plan.ts | 112 ++++++ .../ethereum/tests/forking/forking.test.ts | 34 +- .../ethereum/tests/forking/helpers.ts | 7 +- .../ethereum/ethereum/tests/tsconfig.json | 11 +- .../ethereum/options/src/fork-options.ts | 16 + 23 files changed, 1475 insertions(+), 64 deletions(-) create mode 100644 src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts create mode 100644 src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts create mode 100644 src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts create mode 100644 src/chains/ethereum/ethereum/src/forking/persistent-cache/tree.ts create mode 100644 src/chains/ethereum/ethereum/test.js create mode 100644 src/chains/ethereum/ethereum/tests/forking/cache.test.ts create mode 100644 src/chains/ethereum/ethereum/tests/forking/cache/arbitraries.ts create mode 100644 src/chains/ethereum/ethereum/tests/forking/cache/mock-provider.ts create mode 100644 src/chains/ethereum/ethereum/tests/forking/cache/plan.ts diff --git a/src/chains/ethereum/ethereum/package-lock.json b/src/chains/ethereum/ethereum/package-lock.json index e2679e3ac1..086679e2ba 100644 --- a/src/chains/ethereum/ethereum/package-lock.json +++ b/src/chains/ethereum/ethereum/package-lock.json @@ -1710,6 +1710,11 @@ "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==", "dev": true }, + "env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==" + }, "errno": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz", @@ -1956,6 +1961,15 @@ "resolved": "https://registry.npmjs.org/exit-on-epipe/-/exit-on-epipe-1.0.1.tgz", "integrity": "sha512-h2z5mrROTxce56S+pnvAV890uu7ls7f1kEvVGJbw1OlFH3/mlJ5bkXu0KRyW94v37zzHPiUd55iLn3DA7TjWpw==" }, + "fast-check": { + "version": "2.17.0", + "resolved": "https://registry.npmjs.org/fast-check/-/fast-check-2.17.0.tgz", + "integrity": "sha512-fNNKkxNEJP+27QMcEzF6nbpOYoSZIS0p+TyB+xh/jXqRBxRhLkiZSREly4ruyV8uJi7nwH1YWAhi7OOK5TubRw==", + "dev": true, + "requires": { + "pure-rand": "^5.0.0" + } + }, "fast-safe-stringify": { "version": "2.0.8", "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.8.tgz", @@ -4397,6 +4411,12 @@ "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", "integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY=" }, + "pure-rand": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-5.0.0.tgz", + "integrity": "sha512-lD2/y78q+7HqBx2SaT6OT4UcwtvXNRfEpzYEzl0EQ+9gZq2Qi3fa0HDnYPeqQwhlHJFBUhT7AO3mLU3+8bynHA==", + "dev": true + }, "qrcode-terminal": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/qrcode-terminal/-/qrcode-terminal-0.12.0.tgz", diff --git a/src/chains/ethereum/ethereum/package.json b/src/chains/ethereum/ethereum/package.json index 1b6c495021..f206484729 100644 --- a/src/chains/ethereum/ethereum/package.json +++ b/src/chains/ethereum/ethereum/package.json @@ -67,6 +67,7 @@ "abort-controller": "3.0.0", "bip39": "3.0.4", "emittery": "0.7.2", + "env-paths": "2.2.1", "eth-sig-util": "2.5.3", "ethereumjs-abi": "0.6.8", "ethereumjs-util": "7.1.0", @@ -102,6 +103,7 @@ "abstract-leveldown": "6.3.0", "cheerio": "1.0.0-rc.3", "cross-env": "7.0.3", + "fast-check": "2.17.0", "fs-extra": "9.0.1", "local-web-server": "4.2.1", "mocha": "8.4.0", diff --git a/src/chains/ethereum/ethereum/src/data-managers/block-manager.ts b/src/chains/ethereum/ethereum/src/data-managers/block-manager.ts index f05cc093c7..e6f285cade 100644 --- a/src/chains/ethereum/ethereum/src/data-managers/block-manager.ts +++ b/src/chains/ethereum/ethereum/src/data-managers/block-manager.ts @@ -120,7 +120,7 @@ export default class BlockManager extends Manager { let blockNumber: string; if (typeof tagOrBlockNumber === "string") { blockNumber = tagOrBlockNumber; - } else if (tagOrBlockNumber.toBigInt() > fallback.blockNumber.toBigInt()) { + } else if (!fallback.isValidForkBlockNumber(tagOrBlockNumber)) { // don't get the block if the requested block is _after_ our fallback's // blocknumber because it doesn't exist in our local chain. return null; diff --git a/src/chains/ethereum/ethereum/src/data-managers/transaction-manager.ts b/src/chains/ethereum/ethereum/src/data-managers/transaction-manager.ts index 948bcb9c1b..0313ffefe1 100644 --- a/src/chains/ethereum/ethereum/src/data-managers/transaction-manager.ts +++ b/src/chains/ethereum/ethereum/src/data-managers/transaction-manager.ts @@ -48,10 +48,15 @@ export default class TransactionManager extends Manager { [Data.from(transactionHash).toString()] ); if (tx == null) return null; + const blockHash = Data.from((tx as any).blockHash, 32); const blockNumber = Quantity.from((tx as any).blockNumber); const index = Quantity.from((tx as any).transactionIndex); + // don't get the transaction if the requested transaction is _after_ our + // fallback's blocknumber because it doesn't exist in our local chain. + if (!fallback.isValidForkBlockNumber(blockNumber)) return null; + const extra: GanacheRawExtraTx = [ Data.from(tx.from, 20).toBuffer(), Data.from((tx as any).hash, 32).toBuffer(), diff --git a/src/chains/ethereum/ethereum/src/forking/fork.ts b/src/chains/ethereum/ethereum/src/forking/fork.ts index 967de1faff..27a0ed909b 100644 --- a/src/chains/ethereum/ethereum/src/forking/fork.ts +++ b/src/chains/ethereum/ethereum/src/forking/fork.ts @@ -11,6 +11,8 @@ import { Address } from "@ganache/ethereum-address"; import { Account } from "@ganache/ethereum-utils"; import BlockManager from "../data-managers/block-manager"; import { ProviderHandler } from "./handlers/provider-handler"; +import { PersistentCache } from "./persistent-cache/persistent-cache"; +import BlockLogManager from "../data-managers/blocklog-manager"; async function fetchChainId(fork: Fork) { const chainIdHex = await fork.request("eth_chainId", []); @@ -21,16 +23,14 @@ async function fetchNetworkId(fork: Fork) { return parseInt(networkIdStr, 10); } function fetchBlockNumber(fork: Fork) { - return fork.request("eth_blockNumber", []); + // {noCache: true} required so we never cache the blockNumber, as forking + // shouldn't ever cache a method that can change! + return fork.request("eth_blockNumber", [], { noCache: true }); } function fetchBlock(fork: Fork, blockNumber: Quantity | Tag.LATEST) { return fork.request("eth_getBlockByNumber", [blockNumber, true]); } -async function fetchNonce( - fork: Fork, - address: Address, - blockNumber: Quantity | Tag.LATEST -) { +async function fetchNonce(fork: Fork, address: Address, blockNumber: Quantity) { const nonce = await fork.request("eth_getTransactionCount", [ address, blockNumber @@ -152,18 +152,43 @@ export class Fork { }; public async initialize() { - const [block] = await Promise.all([ + let cacheProm: Promise; + if (this.#options.noCache === false) { + // ignore cache start up errors as it is possible there is an open + // conflict if another ganache fork is running at the time this one is + // started. The cache isn't required (though performance will be + // degraded without it) + cacheProm = PersistentCache.create().catch(_e => null); + } else { + cacheProm = null; + } + + const [block, cache] = await Promise.all([ this.#setBlockDataFromChainAndOptions(), + cacheProm, this.#setCommonFromChain() ]); this.block = new Block( BlockManager.rawFromJSON(block, this.common), this.common ); + if (cache) await this.initCache(cache); + } + private async initCache(cache: PersistentCache) { + await cache.initialize( + this.block.header.number, + this.block.hash(), + this.request.bind(this) + ); + this.#handler.setCache(cache); } - public request(method: string, params: unknown[]): Promise { - return this.#handler.request(method, params); + public request( + method: string, + params: unknown[], + options = { noCache: false } + ): Promise { + return this.#handler.request(method, params, options); } public abort() { @@ -174,8 +199,12 @@ export class Fork { return this.#handler.close(); } + public isValidForkBlockNumber(blockNumber: Quantity) { + return blockNumber.toBigInt() <= this.blockNumber.toBigInt(); + } + public selectValidForkBlockNumber(blockNumber: Quantity) { - return blockNumber.toBigInt() < this.blockNumber.toBigInt() + return this.isValidForkBlockNumber(blockNumber) ? blockNumber : this.blockNumber; } diff --git a/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts b/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts index 5556590c91..00a4f2f7a4 100644 --- a/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts +++ b/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts @@ -5,6 +5,7 @@ import { OutgoingHttpHeaders } from "http"; import RateLimiter from "../rate-limiter/rate-limiter"; import LRU from "lru-cache"; import { AbortError, CodedError } from "@ganache/ethereum-utils"; +import { PersistentCache } from "../persistent-cache/persistent-cache"; const INVALID_RESPONSE = "Invalid response from fork provider: "; @@ -23,6 +24,7 @@ export class BaseHandler { protected limiter: RateLimiter; protected headers: Headers; protected abortSignal: AbortSignal; + private persistentCache: PersistentCache; constructor(options: EthereumInternalOptions, abortSignal: AbortSignal) { const forkingOptions = options.fork; @@ -65,6 +67,10 @@ export class BaseHandler { } } + public setCache(cache: PersistentCache) { + this.persistentCache = cache; + } + /** * Adds Authorization headers from the given options to the provided `headers` * object. Overwrites an existing `Authorization` header value. @@ -135,7 +141,7 @@ export class BaseHandler { } } - getFromCache(key: string) { + getFromMemCache(key: string) { const cachedRequest = this.requestCache.get(key); if (cachedRequest !== undefined) return cachedRequest as Promise; @@ -143,33 +149,88 @@ export class BaseHandler { if (cachedValue !== undefined) return JSON.parse(cachedValue).result as T; } + async getFromSlowCache(method: string, params: any[], key: string) { + if (!this.persistentCache) return; + const raw = await this.persistentCache.get(method, params, key).catch(e => { + if (e.notFound) return null; + // I/O or other error, throw as things are getting weird and the cache may + // have lost integrity + throw e; + }); + if (raw !== undefined) return { result: JSON.parse(raw).result as T, raw }; + } + async queueRequest( + method: string, + params: any[], key: string, send: ( ...args: unknown[] ) => Promise<{ response: { result: any } | { error: { message: string; code: number } }; raw: string | Buffer; - }> + }>, + options = { noCache: false } ): Promise { - const cached = this.getFromCache(key); - if (cached !== undefined) return cached; - - const promise = this.limiter.handle(send).then(({ response, raw }) => { - if (this.abortSignal.aborted) return Promise.reject(new AbortError()); + if (!options.noCache) { + const memCached = this.getFromMemCache(key); + if (memCached !== undefined) return memCached; + + const diskCached = await this.getFromSlowCache(method, params, key); + if (diskCached !== undefined) { + this.valueCache.set(key, Buffer.from(diskCached.raw)); + return diskCached.result; + } + } - if (hasOwn(response, "result")) { - // cache non-error responses only - this.valueCache.set(key, raw); + const promise = this.limiter + .handle(send) + .then(async ({ response, raw }) => { + if (this.abortSignal.aborted) return Promise.reject(new AbortError()); + + if (hasOwn(response, "result")) { + if (!options.noCache) { + // cache non-error responses only + this.valueCache.set(key, raw); + + // swallow errors for the persistentCache, since it's not vital that + // it always works + if (this.persistentCache) { + const prom = this.persistentCache + .put( + method, + params, + key, + typeof raw === "string" ? Buffer.from(raw) : raw + ) + .catch(_ => { + // the cache.put may fail if the db is closed while a request + // is in flight. This is a "fire and forget" method. + }); + + // track these unawaited `puts` + this.fireForget.add(prom); + + // clean up once complete + prom.finally(() => { + this.fireForget.delete(prom); + }); + } + } - return response.result as T; - } else if (hasOwn(response, "error") && response.error != null) { - const { error } = response as JsonRpcError; - throw new CodedError(error.message, error.code); - } - throw new Error(`${INVALID_RESPONSE}\`${JSON.stringify(response)}\``); - }); + return response.result as T; + } else if (hasOwn(response, "error") && response.error != null) { + const { error } = response as JsonRpcError; + throw new CodedError(error.message, error.code); + } + throw new Error(`${INVALID_RESPONSE}\`${JSON.stringify(response)}\``); + }); this.requestCache.set(key, promise); return await promise; } + private fireForget = new Set(); + async close() { + await Promise.all(this.fireForget.keys()); + this.persistentCache && (await this.persistentCache.close()); + } } diff --git a/src/chains/ethereum/ethereum/src/forking/handlers/http-handler.ts b/src/chains/ethereum/ethereum/src/forking/handlers/http-handler.ts index 07eee1850c..f1d4735908 100644 --- a/src/chains/ethereum/ethereum/src/forking/handlers/http-handler.ts +++ b/src/chains/ethereum/ethereum/src/forking/handlers/http-handler.ts @@ -85,7 +85,11 @@ export class HttpHandler extends BaseHandler implements Handler { }); } - public async request(method: string, params: unknown[]) { + public async request( + method: string, + params: unknown[], + options = { noCache: false } + ) { const key = JSON.stringify({ method, params }); const { protocol, hostname: host, port, pathname, search } = this.url; const requestOptions = { @@ -168,11 +172,6 @@ export class HttpHandler extends BaseHandler implements Handler { return deferred.promise.finally(() => this.requestCache.delete(key)); }; - return await this.queueRequest(key, send); - } - - public close() { - // no op - return Promise.resolve(); + return await this.queueRequest(method, params, key, send, options); } } diff --git a/src/chains/ethereum/ethereum/src/forking/handlers/provider-handler.ts b/src/chains/ethereum/ethereum/src/forking/handlers/provider-handler.ts index ab46f2b2f0..5c67088f0b 100644 --- a/src/chains/ethereum/ethereum/src/forking/handlers/provider-handler.ts +++ b/src/chains/ethereum/ethereum/src/forking/handlers/provider-handler.ts @@ -61,17 +61,22 @@ export class ProviderHandler extends BaseHandler implements Handler { throw new Error("Forking `provider` must be EIP-1193 compatible"); } } - public async request(method: string, params: unknown[]) { + public async request( + method: string, + params: unknown[], + options = { noCache: false } + ) { // format params via JSON stringification because the params might // be Quantity or Data, which aren't valid as `params` themselves, // but when JSON stringified they are const strParams = JSON.stringify(params); - return await this.queueRequest(`${method}:${strParams}`, () => - this._request(method, JSON.parse(strParams) as unknown[]) + return await this.queueRequest( + method, + params, + `${method}:${strParams}`, + () => this._request(method, JSON.parse(strParams) as unknown[]), + options ); } - public close() { - return Promise.resolve(); - } } diff --git a/src/chains/ethereum/ethereum/src/forking/handlers/ws-handler.ts b/src/chains/ethereum/ethereum/src/forking/handlers/ws-handler.ts index 2794b709f2..c1e77a6edf 100644 --- a/src/chains/ethereum/ethereum/src/forking/handlers/ws-handler.ts +++ b/src/chains/ethereum/ethereum/src/forking/handlers/ws-handler.ts @@ -53,7 +53,11 @@ export class WsHandler extends BaseHandler implements Handler { this.connection.onmessage = this.onMessage.bind(this); } - public async request(method: string, params: unknown[]) { + public async request( + method: string, + params: unknown[], + options = { noCache: false } + ) { await this.open; if (this.abortSignal.aborted) return Promise.reject(new AbortError()); @@ -74,7 +78,7 @@ export class WsHandler extends BaseHandler implements Handler { this.connection.send(`${JSONRPC_PREFIX}${messageId},${key.slice(1)}`); return deferred.promise.finally(() => this.requestCache.delete(key)); }; - return await this.queueRequest(key, send); + return await this.queueRequest(method, params, key, send, options); } public onMessage(event: WebSocket.MessageEvent) { @@ -111,8 +115,8 @@ export class WsHandler extends BaseHandler implements Handler { return open; } - public close() { + public async close() { + await super.close(); this.connection.close(); - return Promise.resolve(); } } diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts new file mode 100644 index 0000000000..14a7946012 --- /dev/null +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts @@ -0,0 +1,61 @@ +import { BUFFER_EMPTY } from "@ganache/utils"; +import { LevelUp } from "levelup"; +import { Tree } from "./tree"; + +export class Ancestry { + private db: LevelUp; + private next: Buffer; + private knownAncestors: Set; + private lock: Map> = new Map(); + constructor(db: LevelUp, parent: Tree) { + this.db = db; + if (parent == null) { + this.next = null; + this.knownAncestors = new Set(); + } else { + this.next = parent.closestKnownAncestor.equals(BUFFER_EMPTY) + ? null + : parent.closestKnownAncestor; + this.knownAncestors = new Set([parent.key.toString("hex")]); + } + } + + private async loadNextAncestor(next: Buffer) { + const k = next.toString("hex"); + if (this.lock.has(k)) { + throw new Error("could not obtain lock"); + } + let resolver: () => void; + this.lock.set( + k, + new Promise(resolve => { + resolver = resolve; + }) + ); + const value = await this.db.get(next); + const node = Tree.deserialize(next, value); + this.next = node.closestKnownAncestor.equals(BUFFER_EMPTY) + ? null + : node.closestKnownAncestor; + this.knownAncestors.add(node.key.toString("hex")); + this.lock.delete(k); + resolver(); + } + + async has(key: Buffer) { + const strKey = key.toString("hex"); + if (this.knownAncestors.has(strKey)) { + return true; + } else if (this.next) { + const lock = this.lock.get(this.next.toString("hex")); + if (lock) { + await lock; + return this.has(key); + } + await this.loadNextAncestor(this.next); + return this.has(key); + } else { + return false; + } + } +} diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts new file mode 100644 index 0000000000..9eb0f63bf6 --- /dev/null +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts @@ -0,0 +1,211 @@ +import { Tag } from "@ganache/ethereum-utils"; +import { BUFFER_EMPTY, Data, DATA_EMPTY, Quantity } from "@ganache/utils"; +import { LevelUp } from "levelup"; +import { Tree } from "./tree"; + +export type Request = (method: string, params: any[]) => Promise; + +export type FindOptions = ( + | { + gte: Buffer; + lt?: Buffer; + } + | { + gt: Buffer; + lt?: Buffer; + } + | { + gt: Buffer; + lte?: Buffer; + } + | { + gte: Buffer; + lte?: Buffer; + } + | { + gte?: Buffer; + lt: Buffer; + } + | { + gt?: Buffer; + lt: Buffer; + } + | { + gt?: Buffer; + lte: Buffer; + } + | { + gte?: Buffer; + lte: Buffer; + } +) & { reverse?: boolean }; + +export function getBlockNumberFromParams(method: string, params: any[]) { + // get the request's block number + switch (method) { + case "eth_getBlockByNumber": + return params[0]; + case "eth_getTransactionCount": + case "eth_getCode": + case "eth_getBalance": + return params[1]; + case "eth_getStorageAt": + return params[2]; + default: + throw new Error(`Persistent cache does not support calls to "${method}.`); + } +} + +export async function setDbVersion(db: LevelUp, version: Buffer) { + // set the version if the DB was just created, or error if we already have + // a version, but it isn't what we expected + try { + const version = await db.get("version"); + if (!version.equals(version)) { + // in the future this is where database migrations would go + throw new Error( + `Persistent cache version "${version.toString()}"" is not understood.` + ); + } + } catch (e) { + if (!e.notFound) throw e; + + // if we didn't have a `version` key we need to set one + await db.put("version", version); + } +} + +export async function resolveTargetAndClosestAncestor( + db: LevelUp, + request: Request, + targetHeight: Quantity, + targetHash: Data +) { + let targetBlock: Tree; + let closestAncestor: Tree; + try { + const key = Tree.encodeKey(targetHeight, targetHash); + targetBlock = Tree.deserialize(key, await db.get(key)); + + if (targetBlock.closestKnownAncestor.equals(BUFFER_EMPTY)) { + // we are the genesis/earliest block + closestAncestor = null; + } else { + closestAncestor = Tree.deserialize( + targetBlock.closestKnownAncestor, + await db.get(targetBlock.closestKnownAncestor) + ); + } + } catch (e) { + // something bad happened (I/O failure?), bail + if (!e.notFound) throw e; + + // we couldn't find our target block in the database so we need to figure + // out it's relationships via the blockchain. + + // In order to avoid requesting the "earliest" block unnecessarily, we + // assume the "earliest" block can't be before block 0 (which seems like a + // reasonable assumption to me!). + // If our target is block `0` then we can't have a closest ancestor since + // we are the first block + if (targetHeight.toBigInt() === 0n) { + closestAncestor = null; + targetBlock = new Tree(targetHeight, targetHash); + } else { + const earliestBlock = await getBlockByNumber(request, Tag.EARLIEST); + if (!earliestBlock) throw new Error('Could not find "earliest" block.'); + + const { hash: earliestHash, number: earliestNumber } = earliestBlock; + const hash = Data.from(earliestHash, 32); + + const earliest = new Tree(Quantity.from(earliestNumber), hash); + + closestAncestor = await findClosestAncestor( + db, + request, + targetHeight, + earliest + ); + targetBlock = new Tree(targetHeight, targetHash, closestAncestor.key); + } + } + + return { + targetBlock, + closestAncestor + }; +} + +export async function* findRelated( + db: LevelUp, + request: Request, + options: FindOptions +) { + const readStream = db.createReadStream({ + keys: true, + values: true, + ...options + }); + + for await (const pair of readStream) { + const { key, value } = (pair as unknown) as { key: Buffer; value: Buffer }; + const node = Tree.deserialize(key, value); + const { height: candidateHeight } = node.decodeKey(); + const block = await getBlockByNumber(request, candidateHeight); + // if the chain has a block at this height, and the hash of the + // block is the same as the one in the db we've found our closest + // ancestor! + if (block != null && block.hash === Data.from(node.hash).toString()) { + const shouldContinue = yield node; + if (!shouldContinue) break; + } + } +} + +/** + * + * @param height Search only before this block height (exclusive) + * @param upTo Search up to this key (inclusive) + * @returns the closest known ancestor, or `upTo` if we know of no ancestors + */ +export async function findClosestAncestor( + db: LevelUp, + request: Request, + height: Quantity, + upTo: Tree +) { + const generator = findRelated(db, request, { + gte: upTo.key, + lt: Tree.encodeKey(height, DATA_EMPTY), + reverse: true + }); + const first = await generator.next(); + await generator.return(); + return first.value || upTo; +} + +/** + * + * @param height Search only after this block height (exclusive) + * @returns the closest known descendants, or null + */ +export async function* findClosestDescendants( + db: LevelUp, + request: Request, + height: Quantity +) { + const generator = findRelated(db, request, { + gte: Tree.encodeKey(Quantity.from(height.toBigInt() + 1n), DATA_EMPTY), + reverse: false + }); + for await (const node of generator) { + yield node; + } +} + +export async function getBlockByNumber( + request: Request, + blockNumber: Quantity | Tag +) { + return await request("eth_getBlockByNumber", [blockNumber.toString(), false]); +} diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts new file mode 100644 index 0000000000..5c9fd2cf4a --- /dev/null +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts @@ -0,0 +1,335 @@ +import { Tree } from "./tree"; +import { promises } from "fs"; +import envPaths from "env-paths"; +import levelup, { LevelUp } from "levelup"; +import leveldown from "leveldown"; +import sub from "subleveldown"; +import encode from "encoding-down"; +import * as lexico from "../lexicographic-key-codec"; +import { BUFFER_ZERO, Data, Quantity } from "@ganache/utils"; +import { Ancestry } from "./ancestry"; +import { + resolveTargetAndClosestAncestor, + getBlockByNumber, + getBlockNumberFromParams, + Request, + setDbVersion, + findClosestDescendants +} from "./helpers"; +import { AbstractIterator } from "abstract-leveldown"; +import { AbstractLevelDOWN } from "abstract-leveldown"; + +const { mkdir } = promises; + +const levelupOptions = { + keyEncoding: "binary", + valueEncoding: "binary" +}; +const leveldownOpts = { prefix: "" }; + +/** + * A leveldb-backed cache that enables associating immutable data as it existed + * at a specific height on a blockchain. + * + * Note: + * + * The relationships between blocks are valid, but not stable. Race + * contention between multiple processes is possible; this may cause + * relationships between blocks to be lost if multiple writes to the same blocks + * occur nearly simultaneously. + * + * This will not cause a loss of data, but may result in increased cache misses. + * + * The design affords faster db reads (one read to get known closest ancestors + * and descendants) and fast db writes (one write per node in a relationship). + */ +export class PersistentCache { + public readonly version = BUFFER_ZERO; + protected db: LevelUp>; + protected cacheDb: LevelUp< + AbstractLevelDOWN, + AbstractIterator + >; + protected ancestorDb: LevelUp< + AbstractLevelDOWN, + AbstractIterator + >; + protected ancestry: Ancestry; + protected hash: Data; + protected request: Request; + constructor() {} + + static async deleteDb(dbSuffix?: string) { + return new Promise((resolve, reject) => { + const directory = PersistentCache.getDbDirectory(dbSuffix); + leveldown.destroy(directory, err => { + if (err) return void reject(err); + resolve(void 0); + }); + }); + } + /** + * Serializes the entire database world state into a JSON tree + */ + static async serializeDb(dbSuffix?: string) { + const cache = await PersistentCache.create(dbSuffix); + type Tree = Record; + return await new Promise(async resolve => { + const rs = cache.ancestorDb.createReadStream({ + gte: BUFFER_ZERO, + keys: true, + values: true + }); + const tree: Tree = {}; + const collection = {}; + for await (const data of rs) { + const { key, value } = (data as any) as { key: Buffer; value: Buffer }; + + const node = Tree.deserialize(key, value); + (node as any).height = node.decodeKey().height.toNumber(); + const keyHex = key.toString("hex"); + const parentKeyHex = node.closestKnownAncestor.toString("hex"); + collection[keyHex] = node; + if (node.closestKnownAncestor.length === 0) { + tree[keyHex] = node as any; + } else { + const descendants = collection[parentKeyHex].descendants || {}; + descendants[keyHex] = node; + collection[parentKeyHex].descendants = descendants; + } + (node as any).hash = Data.from(node.hash).toString(); + (node as any).parent = + node.closestKnownAncestor.length > 0 + ? Data.from(collection[parentKeyHex].hash).toString() + : null; + delete node.key; + // delete node.hash; + delete node.closestKnownDescendants; + delete node.closestKnownAncestor; + } + await cache.close(); + resolve(JSON.parse(JSON.stringify(tree)) as Tree); + }); + } + + static getDbDirectory(suffix: string = "") { + const { data: directory } = envPaths("Ganache/db", { + suffix + }); + return directory; + } + + static async create(dbSuffix?: string) { + const cache = new PersistentCache(); + + const directory = PersistentCache.getDbDirectory(dbSuffix); + await mkdir(directory, { recursive: true }); + + const store = encode(leveldown(directory, leveldownOpts), levelupOptions); + const db = await new Promise((resolve, reject) => { + const db = levelup(store, (err: Error) => { + if (err) return void reject(err); + resolve(db); + }); + }); + console.log("opened!"); + cache.db = db; + cache.cacheDb = sub(db, "c", levelupOptions); + cache.ancestorDb = sub(db, "a", levelupOptions); + console.log("await cache.cacheDb.open();"); + await cache.cacheDb.open(); + console.log("await cache.ancestorDb.open();"); + await cache.ancestorDb.open(); + + await setDbVersion(cache.db, cache.version); + return cache; + } + + async initialize(height: Quantity, hash: Data, request: Request) { + this.hash = hash; + this.request = request; + + const { + targetBlock, + closestAncestor + } = await resolveTargetAndClosestAncestor( + this.ancestorDb, + this.request, + height, + hash + ); + + this.ancestry = new Ancestry(this.ancestorDb, closestAncestor); + + let allKnownDescendants = []; + // if we don't have a closestAncestor it because the target block is block 0 + if (closestAncestor == null) { + allKnownDescendants = targetBlock.closestKnownDescendants; + await this.ancestorDb.put(targetBlock.key, targetBlock.serialize()); + } else { + const atomicBatch = this.ancestorDb.batch(); + + const ancestorsDescendants = [targetBlock.key]; + const newNodeClosestKnownDescendants: Buffer[] = []; + + await Promise.all( + closestAncestor.closestKnownDescendants.map(async descendantKey => { + // don't match ourself + if (descendantKey.equals(targetBlock.key)) return; + + const { height: descendantHeight } = Tree.decodeKey(descendantKey); + // if the block number is less than our own it can't be our descendant + if (descendantHeight.toBigInt() <= height.toBigInt()) { + ancestorsDescendants.push(descendantKey); + return; + } + + const descendantValue = await this.ancestorDb.get(descendantKey); + const descendantNode = Tree.deserialize( + descendantKey, + descendantValue + ); + + const descendantRawBlock = await this.getBlock(descendantHeight); + // if the block doesn't exist on our chain, it can't be our child, keep + // it in the parent + if ( + descendantRawBlock == null || + descendantRawBlock.hash !== + Data.from(descendantNode.hash, 32).toString() + ) { + ancestorsDescendants.push(descendantKey); + } else { + newNodeClosestKnownDescendants.push(descendantNode.key); + // keep track of *all* known descendants do we don't bother + // checking if they are a known closest descendant later on + allKnownDescendants.push(...descendantNode.closestKnownDescendants); + descendantNode.closestKnownAncestor = targetBlock.key; + // update the descendant node with it's newly assigned + // closestKnownAncestor + atomicBatch.put(descendantNode.key, descendantNode.serialize()); + } + }) + ); + + closestAncestor.closestKnownDescendants = ancestorsDescendants; + targetBlock.closestKnownDescendants = newNodeClosestKnownDescendants; + + atomicBatch.put(closestAncestor.key, closestAncestor.serialize()); + atomicBatch.put(targetBlock.key, targetBlock.serialize()); + + await atomicBatch.write(); + } + + // we DO want to re-balance the descendants, but we don't want to wait for + // it because it can't effect our current fork block's cache results since + // these caches will be for blocks higher than our own fork block + // Do not `await` this. + this.rebalanceDescendantTree( + height, + targetBlock, + allKnownDescendants + ).catch(_ => {}); // if it fails, it fails. + } + + async getBlock(height: Quantity) { + return await getBlockByNumber(this.request, height); + } + + async rebalanceDescendantTree( + height: Quantity, + targetBlock: Tree, + allKnownDescendants: Buffer[] + ) { + const atomicBatch = this.ancestorDb.batch(); + const newClosestKnownDescendants = targetBlock.closestKnownDescendants; + const startSize = newClosestKnownDescendants.length; + + for await (const maybeDescendant of findClosestDescendants( + this.ancestorDb, + this.request, + height + )) { + const key = maybeDescendant.key; + + // don't match with our own self + if (targetBlock.key.equals(key)) continue; + + // if this already is a descendent of ours we can skip it + if (newClosestKnownDescendants.some(d => d.equals(key))) continue; + + // this possibleDescendent's descendants can't be our direct descendants + // because trees can't merge + allKnownDescendants.push(...maybeDescendant.closestKnownDescendants); + + // if this already is a descendent of one of our descendants skip it + if (allKnownDescendants.some(d => d.equals(key))) continue; + + maybeDescendant.closestKnownAncestor = targetBlock.key; + newClosestKnownDescendants.push(maybeDescendant.key); + + atomicBatch.put(maybeDescendant.key, maybeDescendant.serialize()); + } + + // only write if we have changes to write + if (startSize !== newClosestKnownDescendants.length) { + targetBlock.closestKnownDescendants = newClosestKnownDescendants; + atomicBatch.put(targetBlock.key, targetBlock.serialize()); + + // check `this.ancestorDb.isOpen()` as we don't need to try to write if + // the db was shutdown in the meantime. This can happen if ganache was + // closed while we were still updating the descendants + if (atomicBatch.length > 0 && this.ancestorDb.isOpen()) + await atomicBatch.write(); + } + } + + async get(method: string, params: any[], key: string) { + const blockNumber = getBlockNumberFromParams(method, params); + const height = Quantity.from(blockNumber); + const start = lexico.encode([height.toBuffer(), Buffer.from(key)]); + const end = lexico.encode([ + Quantity.from(height.toBigInt() + 1n).toBuffer() + ]); + const readStream = this.cacheDb.createReadStream({ + gt: start, + lt: end, + keys: true, + values: true + }); + const hashBuf = this.hash.toBuffer(); + for await (const data of readStream) { + const { key, value } = (data as any) as { key: Buffer; value: Buffer }; + const [_height, _key, blockHash] = lexico.decode(key); + if (hashBuf.equals(blockHash) || (await this.ancestry.has(blockHash))) { + return value; + } + } + } + + put(method: string, params: any[], key: string, value: Buffer) { + const height = Quantity.from(getBlockNumberFromParams(method, params)); + const dbKey = lexico.encode([ + height.toBuffer(), + Buffer.from(key), + this.hash.toBuffer() + ]); + return this.cacheDb.put(dbKey, value); + } + + private status: "closed" | "open" = "open"; + async close() { + if (this.status === "closed") return; + + this.status = "closed"; + if (this.cacheDb) { + await this.cacheDb.close(); + } + if (this.ancestorDb) { + await this.ancestorDb.close(); + } + if (this.db) { + await this.db.close(); + } + } +} diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/tree.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/tree.ts new file mode 100644 index 0000000000..33393f464a --- /dev/null +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/tree.ts @@ -0,0 +1,64 @@ +import * as lexico from "../lexicographic-key-codec"; +import { BUFFER_EMPTY, Data, Quantity } from "@ganache/utils"; +import * as rlp from "@ganache/rlp"; + +/** + * A tree: https://en.wikipedia.org/wiki/Rose_tree + * One parent, multiple children + */ +export class Tree { + public key: Buffer; + public hash: Buffer; + public closestKnownAncestor: Buffer; + public closestKnownDescendants: Buffer[]; + + constructor( + height: Quantity, + hash: Data, + closestKnownAncestor: Buffer = BUFFER_EMPTY, + closestKnownDescendants: Buffer[] = [] + ) { + this.key = Tree.encodeKey(height, hash); + this.hash = hash.toBuffer(); + this.closestKnownAncestor = closestKnownAncestor; + this.closestKnownDescendants = closestKnownDescendants; + } + + public serialize() { + return rlp.encode([ + this.hash, + this.closestKnownAncestor, + this.closestKnownDescendants + ]); + } + + decodeKey() { + return Tree.decodeKey(this.key); + } + + static decodeKey(key: Buffer) { + const [height, hash] = lexico.decode(key); + return { + height: Quantity.from(height), + hash: Data.from(hash) + }; + } + + static deserialize(key: Buffer, value: Buffer) { + const [hash, parent, children] = (rlp.decode(value) as unknown) as [ + Buffer, + Buffer, + Buffer[] + ]; + const tree = Object.create(Tree.prototype) as Tree; + tree.key = key; + tree.hash = hash; + tree.closestKnownAncestor = parent; + tree.closestKnownDescendants = children; + return tree; + } + + static encodeKey(height: Quantity, hash: Data) { + return lexico.encode([height.toBuffer(), hash.toBuffer()]); + } +} diff --git a/src/chains/ethereum/ethereum/src/forking/types.ts b/src/chains/ethereum/ethereum/src/forking/types.ts index 70acf76f53..9d7f4781cc 100644 --- a/src/chains/ethereum/ethereum/src/forking/types.ts +++ b/src/chains/ethereum/ethereum/src/forking/types.ts @@ -1,4 +1,11 @@ +import { PersistentCache } from "./persistent-cache/persistent-cache"; + export interface Handler { - request: (method: string, params: unknown[]) => Promise; + request: ( + method: string, + params: unknown[], + options: { noCache: boolean } + ) => Promise; + setCache: (cache: PersistentCache) => void; close: () => Promise; } diff --git a/src/chains/ethereum/ethereum/test.js b/src/chains/ethereum/ethereum/test.js new file mode 100644 index 0000000000..f7f86e8d35 --- /dev/null +++ b/src/chains/ethereum/ethereum/test.js @@ -0,0 +1,57 @@ +const { RequestCoordinator, Executor } = require("@ganache/utils"); +const EthereumProvider = require("./lib/src/provider").default; +const seedrandom = require("seedrandom"); + +const mnemonic = + "into trim cross then helmet popular suit hammer cart shrug oval student"; + +const getProvider = async ( + options = { + wallet: { mnemonic: mnemonic } + } +) => { + options.chain = options.chain || {}; + options.logging = options.logging || { logger: { log: () => {} } }; + + // set `asyncRequestProcessing` to `true` by default + let doAsync = options.chain.asyncRequestProcessing; + doAsync = options.chain.asyncRequestProcessing = + doAsync != null ? doAsync : true; + + // don't write to stdout in tests + if (!options.logging.logger) { + options.logging.logger = { log: () => {} }; + } + + const requestCoordinator = new RequestCoordinator(doAsync ? 0 : 1); + const executor = new Executor(requestCoordinator); + const provider = new EthereumProvider(options, executor); + await provider.initialize(); + requestCoordinator.resume(); + return provider; +}; + +const rand = seedrandom("seed"); +function randomIntFromInterval(min, max) { + // min and max included + return Math.floor(rand() * (max - min + 1) + min); +} +(async () => { + const provider = await getProvider({ + wallet: { mnemonic }, + fork: { + url: + "https://mainnet.infura.io/v3/0e96090b2eb34ea293a23feec9594e20@13291115" + } + }); + const a = await provider.send("eth_accounts"); + + for (let j = 0; j < 60; j++) { + let address = "0x"; + for (let i = 0; i < 20; i++) { + address += randomIntFromInterval(0, 255).toString(16).padStart(2, "0"); + } + console.log(address, await provider.send("eth_getBalance", [address])); + } + //console.log(a); +})(); diff --git a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts new file mode 100644 index 0000000000..6b905e1997 --- /dev/null +++ b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts @@ -0,0 +1,180 @@ +import * as fc from "fast-check"; + +import * as Arbitrary from "./cache/arbitraries"; + +import { PersistentCache } from "../../src/forking/persistent-cache/persistent-cache"; + +import { Data, Quantity } from "@ganache/utils"; +import { Tree } from "../../src/forking/persistent-cache/tree"; +import assert from "assert"; + +const testConfig = process.env["OVERKILL"] + ? { + timeout: 5 * 60 * 1000, // 5 min + numRuns: 500 + } + : { + timeout: 30 * 1000, // 30 sec + numRuns: 50 + }; + +describe("forking", () => { + describe("persistent cache", () => { + it("works", async () => { + const arb = Arbitrary.Networks().chain(model => + fc.record({ + model: fc.constant(model), + batches: Arbitrary.Batches(model) + }) + ); + + let counter = 0; + await fc.assert( + fc.asyncProperty(arb, async ({ model, batches }) => { + counter++; + const dbName = `-test-db-${counter}`; + await PersistentCache.deleteDb(dbName); + try { + type Ref = { + hash: string; + block: { + number: number; + hash: string; + }; + parent: string; + children: Set; + }; + const networkLookup: Map = new Map(); + const worldState = new Set(); + for (const batch of batches) { + const block = batch.input.historicBlock; + const network = model.networks[batch.descendantIndex]; + + // if we aren't the genesis block get the genesis block and add it + // to our world state, if needed. + + let genesisRef: Ref; + const genesis = network.getBlockByNumber(0); + if (!networkLookup.has(genesis.hash)) { + genesisRef = { + hash: genesis.hash, + block: genesis, + parent: null, + children: new Set() + }; + networkLookup.set(genesis.hash, genesisRef); + worldState.add(genesisRef); + } else { + genesisRef = networkLookup.get(genesis.hash); + } + + // if we don't yet know about this block, add it + let ref: Ref; + if (!networkLookup.has(block.hash)) { + ref = { + hash: block.hash, + block: block, + parent: null, + children: new Set() + }; + networkLookup.set(block.hash, ref); + } else { + ref = networkLookup.get(block.hash); + } + + if (block.number > 0) { + function findLatestAncestorAndUpdateDescendants( + curRef: Ref + ): Ref { + for (const child of curRef.children.values()) { + // if the child is us don't do anything. + if (child.hash == block.hash) continue; + + const networkBlock = network.getBlockByNumber( + child.block.number + ); + const isInNetwork = + networkBlock && networkBlock.hash === child.block.hash; + if (!isInNetwork) { + continue; + } + // if the child is after us it is our descendent + if (child.block.number > block.number) { + curRef.children.delete(child); + ref.children.add(child); + child.parent = ref.block.hash; + } else { + // otherwise, it might be our ancestor, keep checking! + return findLatestAncestorAndUpdateDescendants(child); + } + } + return curRef; + } + let latestAncestor = findLatestAncestorAndUpdateDescendants( + genesisRef + ); + latestAncestor.children.add(ref); + ref.parent = latestAncestor.block.hash; + } + + const cache = await PersistentCache.create(dbName); + await cache.initialize( + Quantity.from(block.number), + Data.from(block.hash), + ((_method: string, params: any[]) => { + return Promise.resolve( + network.getBlockByNumber( + params[0] === "earliest" + ? "earliest" + : (parseInt(params[0], 16) as any) + ) + ); + }) as any + ); + await cache.close(); + + const serialized = await PersistentCache.serializeDb(dbName); + const cacheState: Set = new Set(); + function convertToRefs( + parentHash: string, + descendants: typeof serialized, + parent: Ref["children"] + ) { + Object.entries(descendants).map(([key, value]) => { + const { height, hash } = Tree.decodeKey( + Buffer.from(key, "hex") + ); + const ref: Ref = { + hash: hash.toString(), + block: { + number: height.toNumber(), + hash: hash.toString() + }, + parent: parentHash, + children: new Set() + }; + parent.add(ref); + if (value.descendants) { + convertToRefs( + hash.toString(), + value.descendants, + ref.children + ); + } + }); + } + convertToRefs(null, serialized, cacheState); + + assert.deepStrictEqual(worldState, cacheState); + } + } finally { + await PersistentCache.deleteDb(dbName); + } + }), + { + numRuns: testConfig.numRuns + } + ); + }); + }); +}); diff --git a/src/chains/ethereum/ethereum/tests/forking/cache/arbitraries.ts b/src/chains/ethereum/ethereum/tests/forking/cache/arbitraries.ts new file mode 100644 index 0000000000..7b444b2723 --- /dev/null +++ b/src/chains/ethereum/ethereum/tests/forking/cache/arbitraries.ts @@ -0,0 +1,172 @@ +import * as fc from "fast-check"; + +export interface Network { + networkId: number; + getBlockByNumber?(height: number): Promise; + historicBlock: { + number: number; + hash: string; + }; +} + +export class Model { + private byDescendantIndexThenHeight: Network[][] = []; + + extendNetwork(descendantIndex: number, hash: string) { + const networks = this.byDescendantIndexThenHeight[descendantIndex]; + + const [latest] = networks.slice(-1); + + networks.push({ + ...latest, + historicBlock: { + number: latest.historicBlock.number + 1, + hash + } + }); + } + + addNetwork(network: Network) { + this.byDescendantIndexThenHeight.push([network]); + } + + forkNetwork(descendantIndex: number, leftHash: string, rightHash: string) { + const networks = this.byDescendantIndexThenHeight[descendantIndex]; + + const [latest] = networks.slice(-1); + + this.byDescendantIndexThenHeight.push([ + ...networks, + { + ...latest, + historicBlock: { + number: latest.historicBlock.number + 1, + hash: rightHash + } + } + ]); + + networks.push({ + ...latest, + historicBlock: { + number: latest.historicBlock.number + 1, + hash: leftHash + } + }); + } + + get networks() { + return this.byDescendantIndexThenHeight.map(networks => { + const [latest] = networks.slice(-1); + return { + ...latest, + getBlockByNumber: (height: number | "earliest") => + (height === "earliest" ? networks[0] : networks[height] || {}) + .historicBlock + }; + }); + } +} + +const Hash = (): fc.Arbitrary => + fc + .hexaString({ + minLength: 64, + maxLength: 64 + }) + .map(hash => `0x${hash}`); + +const NetworkId = (): fc.Arbitrary => fc.integer({ min: 1 }); + +namespace Commands { + type Command = (model: Model) => void; + + export const AddNetwork = (): fc.Arbitrary => + fc.tuple(Hash(), NetworkId()).map(([hash, networkId]) => (model: Model) => { + model.addNetwork({ + networkId, + historicBlock: { + number: 0, + hash + } + }); + }); + + export const ExtendNetwork = (): fc.Arbitrary => + fc.tuple(fc.nat(), Hash()).map(([num, hash]) => (model: Model) => { + const descendantIndex = num % model.networks.length; + model.extendNetwork(descendantIndex, hash); + }); + + export const ForkNetwork = (): fc.Arbitrary => + fc + .tuple(fc.nat(), Hash(), Hash()) + .map(([num, leftHash, rightHash]) => (model: Model) => { + const descendantIndex = num % model.networks.length; + model.forkNetwork(descendantIndex, leftHash, rightHash); + }); +} + +export const Networks = (): fc.Arbitrary => + fc + .tuple( + Commands.AddNetwork(), + fc.array( + fc.frequency( + { + arbitrary: Commands.AddNetwork(), + weight: 1 + }, + { + arbitrary: Commands.ExtendNetwork(), + weight: 3 + }, + { + arbitrary: Commands.ForkNetwork(), + weight: 1 + } + ), + { maxLength: 100 } + ) + ) + .map(([addNetwork, commands]) => { + const model = new Model(); + + addNetwork(model); + + for (const command of commands) { + command(model); + } + + return model; + }); + +export interface Batch { + descendantIndex: number; + input: Network; +} + +export const Batch = (model: Model): fc.Arbitrary => { + const { networks } = model; + + return fc + .nat({ + max: networks.length * 1000 + }) + .chain(num => { + const descendantIndex = num % model.networks.length; + const network = networks[descendantIndex]; + const maxHeight = network.historicBlock.number; + + return fc.record({ + descendantIndex: fc.constant(descendantIndex), + input: fc.nat({ max: maxHeight }).map(height => ({ + networkId: network.networkId, + historicBlock: network.getBlockByNumber(height) + })) + }); + }); +}; + +export const Batches = (model: Model): fc.Arbitrary => + fc.array(Batch(model), { maxLength: 10 }); diff --git a/src/chains/ethereum/ethereum/tests/forking/cache/mock-provider.ts b/src/chains/ethereum/ethereum/tests/forking/cache/mock-provider.ts new file mode 100644 index 0000000000..06902337ac --- /dev/null +++ b/src/chains/ethereum/ethereum/tests/forking/cache/mock-provider.ts @@ -0,0 +1,55 @@ +type Provider = { + send: (payload: any, callback: any) => void; +}; + +import { Batch, Model } from "./arbitraries"; + +export const mockProvider = (options: { + model: Model; + batch: Batch; +}): Provider => { + const { model, batch } = options; + + const { networkId, getBlockByNumber } = model.networks[batch.descendantIndex]; + + return { + send(payload, callback) { + const { jsonrpc, id, method, params } = payload; + + switch (method) { + case "eth_getBlockByNumber": { + let [blockNumber] = params; + if (blockNumber === "earliest") { + blockNumber = 0; + } + + const height = parseInt(blockNumber); + + (getBlockByNumber(height) as any).then(block => { + const result = block + ? { + number: `0x${height.toString(16)}`, + hash: block.hash + } + : undefined; + + return callback(null, { + jsonrpc, + id, + result + }); + }); + } + case "net_version": { + const result = networkId; + + return callback(null, { + jsonrpc, + id, + result + }); + } + } + } + }; +}; diff --git a/src/chains/ethereum/ethereum/tests/forking/cache/plan.ts b/src/chains/ethereum/ethereum/tests/forking/cache/plan.ts new file mode 100644 index 0000000000..07f2e97c10 --- /dev/null +++ b/src/chains/ethereum/ethereum/tests/forking/cache/plan.ts @@ -0,0 +1,112 @@ +import { Batch, Model } from "./arbitraries"; + +export const plan = (options: { model: Model; batches: Batch[] }) => { + const { model, batches } = options; + + // track latest for each descendant in the model + const latestByDescendantIndex: { + [descendantIndex: number]: { + network: { id: string }; + number: number; + }; + } = {}; + + // track any networks that have been superseded by later descendants, since + // it's impossible to know that a given network in our model has further + // descendants unless we tell it about those descendants. + const superseded = new Set(); + const worldView = {}; + + // for each batch + for (const batch of batches) { + const { input } = batch; + const { + networkId, + getBlockByNumber: getBatchBlockByNumber + } = model.networks[batch.descendantIndex]; + + // for each input in each batch + for (const { networkId, historicBlock } of [input]) { + const { number } = historicBlock; + + // for each descendant network in our model + for (const [ + descendantIndex, + { getBlockByNumber: getComparedBlockByNumber } + ] of model.networks.entries()) { + const { network: currentLatestNetwork, number: latestHeight = -1 } = + latestByDescendantIndex[descendantIndex] || {}; + + const inputComparison = + number === latestHeight + ? "equal" + : number < latestHeight + ? "earlier" + : "later"; + + const id = networkId + historicBlock.number + historicBlock.hash; + + switch (inputComparison) { + case "equal": { + // if input is the same height as the latest, don't update any + // records + break; + } + case "later": { + // if the input is later than current latest for compared network, + // check the compared network's equivalent block at input height + // + // if these match, then the current latest is ancestor to the + // input: mark current latest as superseded and update latest + const batchBlock = historicBlock; + const comparedBlock = getComparedBlockByNumber(number); + + if (comparedBlock && batchBlock.hash === comparedBlock.hash) { + // mark any previously known latest as superseded + if (currentLatestNetwork) { + superseded.add(currentLatestNetwork.id); + } + + // update known latest + latestByDescendantIndex[descendantIndex] = { + network: { id }, + number + }; + } + + break; + } + case "earlier": { + // if the input is earlier than the current latest, check that + // the current latest block for the compared network matches the + // equivalent block for the input batch network + // + // if these match, then the current latest block is a known + // descendant of the input: mark input as superseded + const batchBlock = getBatchBlockByNumber(latestHeight); + const comparedBlock = getComparedBlockByNumber(latestHeight); + + if (batchBlock && batchBlock.hash === comparedBlock.hash) { + // then mark immediately as superseded (we know this network will + // not come back as a latestDescendant) + + superseded.add(id); + } + + break; + } + } + } + } + + const ids = new Set( + Object.values(latestByDescendantIndex).map(({ network: { id } }) => id) + ); + + return { + expectedLatestDescendants: [...ids] + .filter(id => !superseded.has(id)) + .map(id => ({ id })) + }; + } +}; diff --git a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts index a4cae2ffa2..4d941f31ab 100644 --- a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts @@ -111,7 +111,8 @@ describe("forking", () => { }); it("handles invalid JSON-RPC responses", async () => { const { localProvider } = await startLocalChain(port, { - url: `http://0.0.0.0:${port}` + url: `http://0.0.0.0:${port}`, + noCache: true }); // some bad values to test const junks = [ @@ -148,7 +149,8 @@ describe("forking", () => { () => startLocalChain(PORT, { url: null, - provider: { request: "not a function" } + provider: { request: "not a function" } as any, + noCache: true }), { message: "Forking `provider` must be EIP-1193 compatible" } ); @@ -156,7 +158,8 @@ describe("forking", () => { () => startLocalChain(PORT, { url: null, - provider: { send: "also not a function" } + provider: { send: "also not a function" } as any, + noCache: true }), { message: "Forking `provider` must be EIP-1193 compatible" } ); @@ -169,7 +172,8 @@ describe("forking", () => { async () => { const provider = await startLocalChain(PORT, { url: null, - provider: remoteProvider + provider: remoteProvider as any, + noCache: true }); localProvider = provider.localProvider; } @@ -245,7 +249,8 @@ describe("forking", () => { const provider = await startLocalChain(PORT, { url: null, - provider: remoteProvider + provider: remoteProvider as any, + noCache: true }); localProvider = provider.localProvider; @@ -314,7 +319,7 @@ describe("forking", () => { describe("initial state", () => { it("should get the Network ID of the forked chain", async () => { - const { localProvider } = await startLocalChain(PORT); + const { localProvider } = await startLocalChain(PORT, { noCache: true }); const [remoteNetworkId, localNetworkId] = await Promise.all( [remoteProvider, localProvider].map(p => p.send("net_version", [])) @@ -336,7 +341,8 @@ describe("forking", () => { assert.strictEqual(remoteBlockNumber, 10); const localStartBlockNum = blocks / 2; const { localProvider } = await startLocalChain(PORT, { - blockNumber: localStartBlockNum + blockNumber: localStartBlockNum, + noCache: true }); const localBlockNumber = parseInt( @@ -359,7 +365,7 @@ describe("forking", () => { describe("block number", () => { let localProvider: EthereumProvider; beforeEach("start local chain", async () => { - ({ localProvider } = await startLocalChain(PORT)); + ({ localProvider } = await startLocalChain(PORT, { noCache: true })); }); it("local block number should be 1 after the remote block on start up", async () => { @@ -379,7 +385,7 @@ describe("forking", () => { }); beforeEach("start local chain", async () => { - ({ localProvider } = await startLocalChain(PORT)); + ({ localProvider } = await startLocalChain(PORT, { noCache: true })); }); it("should return the nonce of each account", async () => { @@ -404,7 +410,9 @@ describe("forking", () => { }); beforeEach("start local chain", async () => { - ({ localProvider, localAccounts } = await startLocalChain(PORT)); + ({ localProvider, localAccounts } = await startLocalChain(PORT, { + noCache: true + })); }); it("should use `defaultBalanceEther` for balance of the initial accounts on the local chain", async () => { @@ -586,7 +594,7 @@ describe("forking", () => { }); it("should fetch contract code from the remote chain via the local chain", async () => { - const { localProvider } = await startLocalChain(PORT); + const { localProvider } = await startLocalChain(PORT, { noCache: true }); const { blockNumbersWithCode, blockNumbersWithoutCode @@ -616,7 +624,7 @@ describe("forking", () => { }); it("should fetch initial contract data from the remote chain via the local chain", async () => { - const { localProvider } = await startLocalChain(PORT); + const { localProvider } = await startLocalChain(PORT, { noCache: true }); const { blockNum, blockNumbersWithCode, @@ -666,7 +674,7 @@ describe("forking", () => { }); it("should fetch changed contract data from the remote chain via the local chain", async () => { - const { localProvider } = await startLocalChain(PORT); + const { localProvider } = await startLocalChain(PORT, { noCache: true }); const { blockNum, blockNumbersWithCode, diff --git a/src/chains/ethereum/ethereum/tests/forking/helpers.ts b/src/chains/ethereum/ethereum/tests/forking/helpers.ts index 4a7952e972..b4123858c8 100644 --- a/src/chains/ethereum/ethereum/tests/forking/helpers.ts +++ b/src/chains/ethereum/ethereum/tests/forking/helpers.ts @@ -1,6 +1,6 @@ import getProvider from "../helpers/getProvider"; -import Server from "../../../../../packages/core/lib/src/server"; import EthereumProvider from "../../src/provider"; +import { EthereumProviderOptions } from "@ganache/ethereum-options/typings"; export const logging = { logger: { @@ -48,7 +48,10 @@ export const updateRemotesAccountNonces = async ( ); }; -export const startLocalChain = async (port: number, options?: any) => { +export const startLocalChain = async ( + port: number, + options?: EthereumProviderOptions["fork"] +) => { const localProvider = await getProvider({ logging, fork: { url: `ws://0.0.0.0:${port}`, ...options }, diff --git a/src/chains/ethereum/ethereum/tests/tsconfig.json b/src/chains/ethereum/ethereum/tests/tsconfig.json index 8cbcfe608a..df4d02e4be 100644 --- a/src/chains/ethereum/ethereum/tests/tsconfig.json +++ b/src/chains/ethereum/ethereum/tests/tsconfig.json @@ -1,5 +1,10 @@ { "extends": "../tsconfig.json", - "include": ["./", "../src/**/*"], - "compilerOptions": { "rootDir": "../" } -} + "include": [ + "./**/*", + "../src/**/*" + ], + "compilerOptions": { + "rootDir": "../" + } +} \ No newline at end of file diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index b7a73c9502..737f0a9cac 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -160,6 +160,16 @@ export type ForkConfig = { type: number; hasDefault: true; }; + + /** + * Don't cache forking requests in a persistent db. + * + * @default false + */ + noCache: { + type: boolean; + hasDefault: true; + }; }; }; @@ -365,5 +375,11 @@ Defaults to: \`["User-Agent: Ganache/VERSION (https://www.trufflesuite.com/ganac "Restrict the number of requests per second sent to the fork provider. `0` means no limit is applied.", cliType: "number" //implies: ["url"] + }, + noCache: { + normalize, + default: () => false, + cliDescription: "DOn't cache forking requests in a persistent db.", + cliType: "boolean" } }; From 866a9dcacdb1de89472501fe075d6d3d6340c4af Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 8 Oct 2021 18:27:47 -0400 Subject: [PATCH 02/93] some fixes --- .../src/forking/persistent-cache/helpers.ts | 32 ++++--- .../persistent-cache/persistent-cache.ts | 87 +++++++++++++------ .../ethereum/tests/forking/cache.test.ts | 48 +++++++--- 3 files changed, 117 insertions(+), 50 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts index 9eb0f63bf6..cf0a542531 100644 --- a/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts @@ -83,6 +83,7 @@ export async function resolveTargetAndClosestAncestor( ) { let targetBlock: Tree; let closestAncestor: Tree; + let previousClosestAncestor: Tree; try { const key = Tree.encodeKey(targetHeight, targetHash); targetBlock = Tree.deserialize(key, await db.get(key)); @@ -90,16 +91,27 @@ export async function resolveTargetAndClosestAncestor( if (targetBlock.closestKnownAncestor.equals(BUFFER_EMPTY)) { // we are the genesis/earliest block closestAncestor = null; + previousClosestAncestor = null; } else { - closestAncestor = Tree.deserialize( + previousClosestAncestor = Tree.deserialize( targetBlock.closestKnownAncestor, await db.get(targetBlock.closestKnownAncestor) ); + // check if we are still the closest known ancestor + closestAncestor = + (await findClosestAncestor( + db, + request, + targetHeight, + previousClosestAncestor.key + )) || previousClosestAncestor; } } catch (e) { // something bad happened (I/O failure?), bail if (!e.notFound) throw e; + previousClosestAncestor = null; + // we couldn't find our target block in the database so we need to figure // out it's relationships via the blockchain. @@ -120,19 +132,17 @@ export async function resolveTargetAndClosestAncestor( const earliest = new Tree(Quantity.from(earliestNumber), hash); - closestAncestor = await findClosestAncestor( - db, - request, - targetHeight, - earliest - ); + closestAncestor = + (await findClosestAncestor(db, request, targetHeight, earliest.key)) || + earliest; targetBlock = new Tree(targetHeight, targetHash, closestAncestor.key); } } return { targetBlock, - closestAncestor + closestAncestor, + previousClosestAncestor }; } @@ -172,16 +182,16 @@ export async function findClosestAncestor( db: LevelUp, request: Request, height: Quantity, - upTo: Tree + upTo: Buffer ) { const generator = findRelated(db, request, { - gte: upTo.key, + gte: upTo, lt: Tree.encodeKey(height, DATA_EMPTY), reverse: true }); const first = await generator.next(); await generator.return(); - return first.value || upTo; + return first.value; } /** diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts index 5c9fd2cf4a..87c92fa0bf 100644 --- a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts @@ -132,13 +132,10 @@ export class PersistentCache { resolve(db); }); }); - console.log("opened!"); cache.db = db; cache.cacheDb = sub(db, "c", levelupOptions); cache.ancestorDb = sub(db, "a", levelupOptions); - console.log("await cache.cacheDb.open();"); await cache.cacheDb.open(); - console.log("await cache.ancestorDb.open();"); await cache.ancestorDb.open(); await setDbVersion(cache.db, cache.version); @@ -151,7 +148,8 @@ export class PersistentCache { const { targetBlock, - closestAncestor + closestAncestor, + previousClosestAncestor } = await resolveTargetAndClosestAncestor( this.ancestorDb, this.request, @@ -161,16 +159,34 @@ export class PersistentCache { this.ancestry = new Ancestry(this.ancestorDb, closestAncestor); + const atomicBatch = this.ancestorDb.batch(); + + // if we changed closest ancestors remove our targetBlock from the previous + // ancestor so our target block doesn't appear in the database more than + // once, and update our targetBlock to point to this new ancestor + if ( + previousClosestAncestor && + !previousClosestAncestor.key.equals(closestAncestor.key) + ) { + targetBlock.closestKnownAncestor = closestAncestor.key; + + const index = previousClosestAncestor.closestKnownDescendants.findIndex( + buf => buf.equals(targetBlock.key) + ); + previousClosestAncestor.closestKnownDescendants.splice(index, 1); + atomicBatch.put( + previousClosestAncestor.key, + previousClosestAncestor.serialize() + ); + } + let allKnownDescendants = []; // if we don't have a closestAncestor it because the target block is block 0 if (closestAncestor == null) { allKnownDescendants = targetBlock.closestKnownDescendants; - await this.ancestorDb.put(targetBlock.key, targetBlock.serialize()); + atomicBatch.put(targetBlock.key, targetBlock.serialize()); } else { - const atomicBatch = this.ancestorDb.batch(); - const ancestorsDescendants = [targetBlock.key]; - const newNodeClosestKnownDescendants: Buffer[] = []; await Promise.all( closestAncestor.closestKnownDescendants.map(async descendantKey => { @@ -191,8 +207,8 @@ export class PersistentCache { ); const descendantRawBlock = await this.getBlock(descendantHeight); - // if the block doesn't exist on our chain, it can't be our child, keep - // it in the parent + // if the block doesn't exist on our chain, it can't be our child, + // keep it in the parent if ( descendantRawBlock == null || descendantRawBlock.hash !== @@ -200,12 +216,12 @@ export class PersistentCache { ) { ancestorsDescendants.push(descendantKey); } else { - newNodeClosestKnownDescendants.push(descendantNode.key); + targetBlock.closestKnownDescendants.push(descendantNode.key); // keep track of *all* known descendants do we don't bother // checking if they are a known closest descendant later on allKnownDescendants.push(...descendantNode.closestKnownDescendants); descendantNode.closestKnownAncestor = targetBlock.key; - // update the descendant node with it's newly assigned + // update the descendant node with its newly assigned // closestKnownAncestor atomicBatch.put(descendantNode.key, descendantNode.serialize()); } @@ -213,37 +229,49 @@ export class PersistentCache { ); closestAncestor.closestKnownDescendants = ancestorsDescendants; - targetBlock.closestKnownDescendants = newNodeClosestKnownDescendants; - atomicBatch.put(closestAncestor.key, closestAncestor.serialize()); - atomicBatch.put(targetBlock.key, targetBlock.serialize()); - - await atomicBatch.write(); } + // TODO(perf): we always re-save the targetBlock but could optimize to only + // resave if it is needed. + atomicBatch.put(targetBlock.key, targetBlock.serialize()); + + await atomicBatch.write(); + // we DO want to re-balance the descendants, but we don't want to wait for // it because it can't effect our current fork block's cache results since // these caches will be for blocks higher than our own fork block // Do not `await` this. - this.rebalanceDescendantTree( + this._reBalancePromise = this.reBalanceDescendantTree( height, targetBlock, allKnownDescendants - ).catch(_ => {}); // if it fails, it fails. + ) + .catch(_ => {}) // if it fails, it fails. + .finally(() => { + this._reBalancePromise = null; + }); } + /** + * `reBalancePromise` is used at shutdown to ensure we are done balancing the + * tree + * + */ + public _reBalancePromise: Promise = null; + async getBlock(height: Quantity) { return await getBlockByNumber(this.request, height); } - async rebalanceDescendantTree( + async reBalanceDescendantTree( height: Quantity, targetBlock: Tree, allKnownDescendants: Buffer[] ) { const atomicBatch = this.ancestorDb.batch(); - const newClosestKnownDescendants = targetBlock.closestKnownDescendants; - const startSize = newClosestKnownDescendants.length; + const closestKnownDescendants = targetBlock.closestKnownDescendants; + const startSize = closestKnownDescendants.length; for await (const maybeDescendant of findClosestDescendants( this.ancestorDb, @@ -256,7 +284,7 @@ export class PersistentCache { if (targetBlock.key.equals(key)) continue; // if this already is a descendent of ours we can skip it - if (newClosestKnownDescendants.some(d => d.equals(key))) continue; + if (closestKnownDescendants.some(d => d.equals(key))) continue; // this possibleDescendent's descendants can't be our direct descendants // because trees can't merge @@ -266,14 +294,20 @@ export class PersistentCache { if (allKnownDescendants.some(d => d.equals(key))) continue; maybeDescendant.closestKnownAncestor = targetBlock.key; - newClosestKnownDescendants.push(maybeDescendant.key); + closestKnownDescendants.push(maybeDescendant.key); atomicBatch.put(maybeDescendant.key, maybeDescendant.serialize()); + + // if the cache has been closed stop doing work so we can flush what we + // have to the database; descendant resolution shouldn't prevent us from + // fulling closing. + if (this.status === "closed") { + break; + } } // only write if we have changes to write - if (startSize !== newClosestKnownDescendants.length) { - targetBlock.closestKnownDescendants = newClosestKnownDescendants; + if (startSize !== closestKnownDescendants.length) { atomicBatch.put(targetBlock.key, targetBlock.serialize()); // check `this.ancestorDb.isOpen()` as we don't need to try to write if @@ -326,6 +360,7 @@ export class PersistentCache { await this.cacheDb.close(); } if (this.ancestorDb) { + await this._reBalancePromise; await this.ancestorDb.close(); } if (this.db) { diff --git a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts index 6b905e1997..c74d00c6a9 100644 --- a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts @@ -20,7 +20,7 @@ const testConfig = process.env["OVERKILL"] describe("forking", () => { describe("persistent cache", () => { - it("works", async () => { + it.only("create relationships between networks correctly", async () => { const arb = Arbitrary.Networks().chain(model => fc.record({ model: fc.constant(model), @@ -41,7 +41,6 @@ describe("forking", () => { number: number; hash: string; }; - parent: string; children: Set; }; const networkLookup: Map = new Map(); @@ -59,7 +58,6 @@ describe("forking", () => { genesisRef = { hash: genesis.hash, block: genesis, - parent: null, children: new Set() }; networkLookup.set(genesis.hash, genesisRef); @@ -74,7 +72,6 @@ describe("forking", () => { ref = { hash: block.hash, block: block, - parent: null, children: new Set() }; networkLookup.set(block.hash, ref); @@ -86,9 +83,14 @@ describe("forking", () => { function findLatestAncestorAndUpdateDescendants( curRef: Ref ): Ref { + let candidate: Ref[] = []; for (const child of curRef.children.values()) { - // if the child is us don't do anything. - if (child.hash == block.hash) continue; + if (child.hash == block.hash) { + // if the child is the same block as us we can delete it + // because we are figuring this all out again anyway + curRef.children.delete(child); + continue; + } const networkBlock = network.getBlockByNumber( child.block.number @@ -102,19 +104,29 @@ describe("forking", () => { if (child.block.number > block.number) { curRef.children.delete(child); ref.children.add(child); - child.parent = ref.block.hash; } else { // otherwise, it might be our ancestor, keep checking! - return findLatestAncestorAndUpdateDescendants(child); + candidate.push( + findLatestAncestorAndUpdateDescendants(child) + ); } } - return curRef; + // take the highest ancestor + candidate.sort((a, b) => { + if (a.block.number < b.block.number) { + return 1; + } else if (a.block.number < b.block.number) { + return 0; + } else { + return -1; + } + }); + return candidate[0] || curRef; } let latestAncestor = findLatestAncestorAndUpdateDescendants( genesisRef ); latestAncestor.children.add(ref); - ref.parent = latestAncestor.block.hash; } const cache = await PersistentCache.create(dbName); @@ -131,9 +143,12 @@ describe("forking", () => { ); }) as any ); + + cache._reBalancePromise && (await cache._reBalancePromise); await cache.close(); const serialized = await PersistentCache.serializeDb(dbName); + console.log(JSON.stringify(serialized, null, 2)); const cacheState: Set = new Set(); function convertToRefs( parentHash: string, @@ -150,7 +165,6 @@ describe("forking", () => { number: height.toNumber(), hash: hash.toString() }, - parent: parentHash, children: new Set() }; parent.add(ref); @@ -165,14 +179,22 @@ describe("forking", () => { } convertToRefs(null, serialized, cacheState); - assert.deepStrictEqual(worldState, cacheState); + try { + assert.deepStrictEqual(worldState, cacheState); + } catch (e) { + console.log(e); + throw e; + } } } finally { await PersistentCache.deleteDb(dbName); } }), { - numRuns: testConfig.numRuns + numRuns: 1000, + endOnFailure: true, + seed: -981409496, + path: "493" } ); }); From b6dadaa8015a99b7d9f2dfed73aa245ead8b815a Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 8 Oct 2021 21:52:42 -0400 Subject: [PATCH 03/93] test fixes --- .../ethereum/tests/forking/cache.test.ts | 144 ++++++++++-------- 1 file changed, 83 insertions(+), 61 deletions(-) diff --git a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts index c74d00c6a9..c4a5434b30 100644 --- a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts @@ -48,45 +48,48 @@ describe("forking", () => { for (const batch of batches) { const block = batch.input.historicBlock; const network = model.networks[batch.descendantIndex]; - - // if we aren't the genesis block get the genesis block and add it - // to our world state, if needed. - - let genesisRef: Ref; - const genesis = network.getBlockByNumber(0); - if (!networkLookup.has(genesis.hash)) { - genesisRef = { - hash: genesis.hash, - block: genesis, - children: new Set() - }; - networkLookup.set(genesis.hash, genesisRef); - worldState.add(genesisRef); - } else { - genesisRef = networkLookup.get(genesis.hash); + function getGenesis() { + // Get the genesis block and add it to our world state, if needed. + const genesis = network.getBlockByNumber(0); + if (!networkLookup.has(genesis.hash)) { + const genesisRef: Ref = { + hash: genesis.hash, + block: genesis, + children: new Set() + }; + networkLookup.set(genesis.hash, genesisRef); + worldState.add(genesisRef); + return genesisRef; + } else { + return networkLookup.get(genesis.hash); + } } - - // if we don't yet know about this block, add it - let ref: Ref; - if (!networkLookup.has(block.hash)) { - ref = { - hash: block.hash, - block: block, - children: new Set() - }; - networkLookup.set(block.hash, ref); - } else { - ref = networkLookup.get(block.hash); + function getOwnRef() { + if (!networkLookup.has(block.hash)) { + const ref: Ref = { + hash: block.hash, + block: block, + children: new Set() + }; + // if we don't yet know about this block, add it + networkLookup.set(block.hash, ref); + return ref; + } else { + return networkLookup.get(block.hash); + } } + const genesisRef = getGenesis(); + const ref = getOwnRef(); + if (block.number > 0) { function findLatestAncestorAndUpdateDescendants( curRef: Ref - ): Ref { - let candidate: Ref[] = []; + ): Ref[] { + const candidates: Ref[] = [curRef]; for (const child of curRef.children.values()) { - if (child.hash == block.hash) { - // if the child is the same block as us we can delete it + if (child.hash === block.hash) { + // if the child is the same block as us we must delete it // because we are figuring this all out again anyway curRef.children.delete(child); continue; @@ -96,36 +99,55 @@ describe("forking", () => { child.block.number ); const isInNetwork = - networkBlock && networkBlock.hash === child.block.hash; - if (!isInNetwork) { - continue; - } - // if the child is after us it is our descendent - if (child.block.number > block.number) { - curRef.children.delete(child); - ref.children.add(child); - } else { - // otherwise, it might be our ancestor, keep checking! - candidate.push( - findLatestAncestorAndUpdateDescendants(child) - ); - } + networkBlock && networkBlock.hash === child.hash; + if (!isInNetwork) continue; + + // if the child is in network and comes after us it is + // an eventual descendant. continue searching! + if (child.block.number >= block.number) continue; + + // otherwise, it might be our ancestor, keep checking more! + candidates.push( + ...findLatestAncestorAndUpdateDescendants(child) + ); } - // take the highest ancestor - candidate.sort((a, b) => { - if (a.block.number < b.block.number) { - return 1; - } else if (a.block.number < b.block.number) { - return 0; - } else { - return -1; - } - }); - return candidate[0] || curRef; + return candidates; } - let latestAncestor = findLatestAncestorAndUpdateDescendants( + const candidates = findLatestAncestorAndUpdateDescendants( genesisRef ); + const [latestAncestor] = candidates.sort((a, b) => { + if (a.block.number < b.block.number) { + return 1; + } else if (a.block.number === b.block.number) { + return 0; + } else { + return -1; + } + }); + if (candidates.length === 2) { + console.log(candidates); + } + + // move any of our latestAncestor's children that are in our network + // and come after us to our children. + // note: we _could_ figure out some other ancestry relationships + // by looking at _all potential_ ancestors children, but we + // don't because the look ups are costly. + for (const child of latestAncestor.children.values()) { + const networkBlock = network.getBlockByNumber( + child.block.number + ); + const isInNetwork = + networkBlock && networkBlock.hash === child.hash; + if (!isInNetwork) continue; + if (child.block.number > block.number) { + latestAncestor.children.delete(child); + ref.children.add(child); + } + } + //} + latestAncestor.children.add(ref); } @@ -192,9 +214,9 @@ describe("forking", () => { }), { numRuns: 1000, - endOnFailure: true, - seed: -981409496, - path: "493" + seed: -693367450, + path: "245", + endOnFailure: true } ); }); From 7121a6a9787ec6436de055def4b8bafc810490e6 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 8 Oct 2021 21:53:04 -0400 Subject: [PATCH 04/93] test fix --- src/chains/ethereum/ethereum/tests/forking/cache.test.ts | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts index c4a5434b30..87f2ff9d18 100644 --- a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts @@ -213,10 +213,7 @@ describe("forking", () => { } }), { - numRuns: 1000, - seed: -693367450, - path: "245", - endOnFailure: true + numRuns: 100 } ); }); From 8ccb894edba44d37ceb9cb06b4d1123f56b97014 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 8 Oct 2021 23:17:04 -0400 Subject: [PATCH 05/93] remove .only --- src/chains/ethereum/ethereum/tests/forking/cache.test.ts | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts index 87f2ff9d18..63b603700f 100644 --- a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts @@ -20,7 +20,7 @@ const testConfig = process.env["OVERKILL"] describe("forking", () => { describe("persistent cache", () => { - it.only("create relationships between networks correctly", async () => { + it("create relationships between networks correctly", async () => { const arb = Arbitrary.Networks().chain(model => fc.record({ model: fc.constant(model), @@ -125,9 +125,6 @@ describe("forking", () => { return -1; } }); - if (candidates.length === 2) { - console.log(candidates); - } // move any of our latestAncestor's children that are in our network // and come after us to our children. @@ -146,7 +143,6 @@ describe("forking", () => { ref.children.add(child); } } - //} latestAncestor.children.add(ref); } From 046bd016dee2faefdeac7c741fb77a9aaf8ddb82 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Sat, 9 Oct 2021 18:39:11 -0400 Subject: [PATCH 06/93] fix test --- .../ethereum/tests/forking/cache.test.ts | 35 ++++++++++--------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts index 63b603700f..85f424f0bf 100644 --- a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts @@ -126,23 +126,26 @@ describe("forking", () => { } }); - // move any of our latestAncestor's children that are in our network - // and come after us to our children. - // note: we _could_ figure out some other ancestry relationships - // by looking at _all potential_ ancestors children, but we - // don't because the look ups are costly. - for (const child of latestAncestor.children.values()) { - const networkBlock = network.getBlockByNumber( - child.block.number - ); - const isInNetwork = - networkBlock && networkBlock.hash === child.hash; - if (!isInNetwork) continue; - if (child.block.number > block.number) { - latestAncestor.children.delete(child); - ref.children.add(child); + // traverse up all descendants to fix those relationships + const fixDescendants = (parent: Ref) => { + const children = [...parent.children.values()]; + for (const child of children) { + const networkBlock = network.getBlockByNumber( + child.block.number + ); + const isInNetwork = + networkBlock && networkBlock.hash === child.hash; + if (!isInNetwork) continue; + + if (child.block.number > block.number) { + parent.children.delete(child); + ref.children.add(child); + } else { + fixDescendants(child); + } } - } + }; + fixDescendants(genesisRef); latestAncestor.children.add(ref); } From 6eedfb3ecae72cb54f37224334e2749126177e0c Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Mon, 11 Oct 2021 15:24:24 -0400 Subject: [PATCH 07/93] fix: make sure we update the parent after moving descendant --- .../persistent-cache/persistent-cache.ts | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts index 87c92fa0bf..d54c311310 100644 --- a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts @@ -180,10 +180,9 @@ export class PersistentCache { ); } - let allKnownDescendants = []; + let allKnownDescendants = [...targetBlock.closestKnownDescendants]; // if we don't have a closestAncestor it because the target block is block 0 if (closestAncestor == null) { - allKnownDescendants = targetBlock.closestKnownDescendants; atomicBatch.put(targetBlock.key, targetBlock.serialize()); } else { const ancestorsDescendants = [targetBlock.key]; @@ -283,19 +282,29 @@ export class PersistentCache { // don't match with our own self if (targetBlock.key.equals(key)) continue; - // if this already is a descendent of ours we can skip it - if (closestKnownDescendants.some(d => d.equals(key))) continue; - // this possibleDescendent's descendants can't be our direct descendants // because trees can't merge allKnownDescendants.push(...maybeDescendant.closestKnownDescendants); + // if this already is a descendent of ours we can skip it + if (closestKnownDescendants.some(d => d.equals(key))) continue; + // if this already is a descendent of one of our descendants skip it if (allKnownDescendants.some(d => d.equals(key))) continue; + // move the descendant from the parent to the target + const parentTree = Tree.deserialize( + maybeDescendant.closestKnownAncestor, + await this.ancestorDb.get(maybeDescendant.closestKnownAncestor) + ); + parentTree.closestKnownDescendants.splice( + parentTree.closestKnownDescendants.findIndex(d => d.equals(key)), + 1 + ); maybeDescendant.closestKnownAncestor = targetBlock.key; closestKnownDescendants.push(maybeDescendant.key); + atomicBatch.put(parentTree.key, parentTree.serialize()); atomicBatch.put(maybeDescendant.key, maybeDescendant.serialize()); // if the cache has been closed stop doing work so we can flush what we From 06c29fae6a603f0220b7ce6af24476d3a1fb6a14 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 10:58:38 -0400 Subject: [PATCH 08/93] don't filter found related nodes --- .../ethereum/ethereum/src/forking/persistent-cache/helpers.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts index cf0a542531..1905d2e284 100644 --- a/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts @@ -166,8 +166,7 @@ export async function* findRelated( // block is the same as the one in the db we've found our closest // ancestor! if (block != null && block.hash === Data.from(node.hash).toString()) { - const shouldContinue = yield node; - if (!shouldContinue) break; + yield node; } } } From 3e065f83f4a57617094d8bcd8136b522f95f96dc Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 10:59:01 -0400 Subject: [PATCH 09/93] remove irrelevant comment --- .../src/forking/persistent-cache/persistent-cache.ts | 9 --------- 1 file changed, 9 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts index d54c311310..67f532b4b1 100644 --- a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts @@ -31,15 +31,6 @@ const leveldownOpts = { prefix: "" }; * A leveldb-backed cache that enables associating immutable data as it existed * at a specific height on a blockchain. * - * Note: - * - * The relationships between blocks are valid, but not stable. Race - * contention between multiple processes is possible; this may cause - * relationships between blocks to be lost if multiple writes to the same blocks - * occur nearly simultaneously. - * - * This will not cause a loss of data, but may result in increased cache misses. - * * The design affords faster db reads (one read to get known closest ancestors * and descendants) and fast db writes (one write per node in a relationship). */ From 41d130b6e98b50aa854e1517ebf6eb98ee4efb08 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 11:01:10 -0400 Subject: [PATCH 10/93] fix comment typo --- .../ethereum/src/forking/persistent-cache/persistent-cache.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts index 67f532b4b1..d37841fd00 100644 --- a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts @@ -207,7 +207,7 @@ export class PersistentCache { ancestorsDescendants.push(descendantKey); } else { targetBlock.closestKnownDescendants.push(descendantNode.key); - // keep track of *all* known descendants do we don't bother + // keep track of *all* known descendants so we don't bother // checking if they are a known closest descendant later on allKnownDescendants.push(...descendantNode.closestKnownDescendants); descendantNode.closestKnownAncestor = targetBlock.key; From 918add7ca3ed4ab8ba897b0165087c6f51e21e5b Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 11:59:46 -0400 Subject: [PATCH 11/93] clean up --- .../ethereum/tests/forking/cache.test.ts | 275 ++++++++++-------- .../tests/forking/cache/arbitraries.ts | 2 +- 2 files changed, 153 insertions(+), 124 deletions(-) diff --git a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts index 85f424f0bf..e91a8694a5 100644 --- a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts @@ -1,22 +1,142 @@ import * as fc from "fast-check"; import * as Arbitrary from "./cache/arbitraries"; +import { Network, Model } from "./cache/arbitraries"; import { PersistentCache } from "../../src/forking/persistent-cache/persistent-cache"; import { Data, Quantity } from "@ganache/utils"; import { Tree } from "../../src/forking/persistent-cache/tree"; import assert from "assert"; - -const testConfig = process.env["OVERKILL"] - ? { - timeout: 5 * 60 * 1000, // 5 min - numRuns: 500 +import Block from "ethereumjs-block"; + +type Ref = { + hash: string; + block: Network["historicBlock"]; + children: Set; +}; + +class BatchManager { + public networkLookup: Map; + public worldState: Set; + constructor() {} + getGenesis(network: Network) { + // Get the genesis block and add it to our world state, if needed. + const genesis = network.getBlockByNumber(0) as Network["historicBlock"]; + if (!this.networkLookup.has(genesis.hash)) { + const genesisRef: Ref = { + hash: genesis.hash, + block: genesis, + children: new Set() + }; + this.networkLookup.set(genesis.hash, genesisRef); + this.worldState.add(genesisRef); + return genesisRef; + } else { + return this.networkLookup.get(genesis.hash); + } + } + getOwnRef(block: Network["historicBlock"]) { + if (!this.networkLookup.has(block.hash)) { + const ref: Ref = { + hash: block.hash, + block: block, + children: new Set() + }; + // if we don't yet know about this block, add it + this.networkLookup.set(block.hash, ref); + return ref; + } else { + return this.networkLookup.get(block.hash); + } + } + findLatestAncestors( + block: Network["historicBlock"], + network: Network, + parent: Ref + ): Ref[] { + const candidates: Ref[] = [parent]; + for (const child of parent.children.values()) { + if (child.hash === block.hash) { + // if the child is the same block as us we must delete it + // because we are figuring this all out again anyway + parent.children.delete(child); + continue; + } + + const networkBlock = network.getBlockByNumber(child.block.number); + const isInNetwork = networkBlock && networkBlock.hash === child.hash; + if (!isInNetwork) continue; + + // if the child is in network and comes after us it is + // an eventual *descendant*. continue searching! + if (child.block.number >= block.number) continue; + + // otherwise, it might be our ancestor, keep checking more! + candidates.push(...this.findLatestAncestors(block, network, child)); + } + return candidates; + } + + findLatestAncestor( + block: Network["historicBlock"], + network: Network, + parent: Ref + ) { + // find the ancestor with the high block number + return this.findLatestAncestors(block, network, parent).sort((a, b) => { + if (a.block.number < b.block.number) { + return 1; + } else if (a.block.number === b.block.number) { + return 0; + } else { + return -1; + } + })[0]; + } + + /** + * traverse up all descendants to fix those relationships + * @param block + * @param network + * @param parent + * @param allKnownDescendants + */ + fixDescendants( + block: Ref, + network: Network, + parent: Ref, + allKnownDescendants: Set + ) { + const children = [...parent.children.values()]; + for (const child of children) { + const networkBlock = network.getBlockByNumber(child.block.number); + const isInNetwork = networkBlock && networkBlock.hash === child.hash; + if (!isInNetwork) continue; + + // we should move the child if it comes after us + if (child.block.number > block.block.number) { + parent.children.delete(child); + block.children.add(child); + allKnownDescendants.add(child.hash); + } else { + this.fixDescendants(block, network, child, allKnownDescendants); + } + } + } + + /** + * @param of collect descendants of this block + * @param acc an accumulator + */ + collectDescendants(of: Ref, acc = new Set()) { + for (const child of of.children) { + acc.add(child.block.hash); + this.collectDescendants(child, acc); } - : { - timeout: 30 * 1000, // 30 sec - numRuns: 50 - }; + return acc; + } +} describe("forking", () => { describe("persistent cache", () => { @@ -33,121 +153,30 @@ describe("forking", () => { fc.asyncProperty(arb, async ({ model, batches }) => { counter++; const dbName = `-test-db-${counter}`; + // make sure this cache doesn't already exist await PersistentCache.deleteDb(dbName); try { - type Ref = { - hash: string; - block: { - number: number; - hash: string; - }; - children: Set; - }; - const networkLookup: Map = new Map(); - const worldState = new Set(); + const batchManager = new BatchManager(); for (const batch of batches) { const block = batch.input.historicBlock; const network = model.networks[batch.descendantIndex]; - function getGenesis() { - // Get the genesis block and add it to our world state, if needed. - const genesis = network.getBlockByNumber(0); - if (!networkLookup.has(genesis.hash)) { - const genesisRef: Ref = { - hash: genesis.hash, - block: genesis, - children: new Set() - }; - networkLookup.set(genesis.hash, genesisRef); - worldState.add(genesisRef); - return genesisRef; - } else { - return networkLookup.get(genesis.hash); - } - } - function getOwnRef() { - if (!networkLookup.has(block.hash)) { - const ref: Ref = { - hash: block.hash, - block: block, - children: new Set() - }; - // if we don't yet know about this block, add it - networkLookup.set(block.hash, ref); - return ref; - } else { - return networkLookup.get(block.hash); - } - } - const genesisRef = getGenesis(); - const ref = getOwnRef(); + const genesisRef = batchManager.getGenesis(network); + const ref = batchManager.getOwnRef(block); if (block.number > 0) { - function findLatestAncestorAndUpdateDescendants( - curRef: Ref - ): Ref[] { - const candidates: Ref[] = [curRef]; - for (const child of curRef.children.values()) { - if (child.hash === block.hash) { - // if the child is the same block as us we must delete it - // because we are figuring this all out again anyway - curRef.children.delete(child); - continue; - } - - const networkBlock = network.getBlockByNumber( - child.block.number - ); - const isInNetwork = - networkBlock && networkBlock.hash === child.hash; - if (!isInNetwork) continue; - - // if the child is in network and comes after us it is - // an eventual descendant. continue searching! - if (child.block.number >= block.number) continue; - - // otherwise, it might be our ancestor, keep checking more! - candidates.push( - ...findLatestAncestorAndUpdateDescendants(child) - ); - } - return candidates; - } - const candidates = findLatestAncestorAndUpdateDescendants( + const latestAncestor = batchManager.findLatestAncestor( + block, + network, genesisRef ); - const [latestAncestor] = candidates.sort((a, b) => { - if (a.block.number < b.block.number) { - return 1; - } else if (a.block.number === b.block.number) { - return 0; - } else { - return -1; - } - }); - - // traverse up all descendants to fix those relationships - const fixDescendants = (parent: Ref) => { - const children = [...parent.children.values()]; - for (const child of children) { - const networkBlock = network.getBlockByNumber( - child.block.number - ); - const isInNetwork = - networkBlock && networkBlock.hash === child.hash; - if (!isInNetwork) continue; - - if (child.block.number > block.number) { - parent.children.delete(child); - ref.children.add(child); - } else { - fixDescendants(child); - } - } - }; - fixDescendants(genesisRef); - latestAncestor.children.add(ref); + batchManager.fixDescendants( + ref, + network, + genesisRef, + batchManager.collectDescendants(ref) + ); } const cache = await PersistentCache.create(dbName); @@ -165,14 +194,14 @@ describe("forking", () => { }) as any ); + // wait for the descendant re-balance to complete before closing cache._reBalancePromise && (await cache._reBalancePromise); await cache.close(); const serialized = await PersistentCache.serializeDb(dbName); - console.log(JSON.stringify(serialized, null, 2)); + const cacheState: Set = new Set(); function convertToRefs( - parentHash: string, descendants: typeof serialized, parent: Ref["children"] ) { @@ -190,18 +219,14 @@ describe("forking", () => { }; parent.add(ref); if (value.descendants) { - convertToRefs( - hash.toString(), - value.descendants, - ref.children - ); + convertToRefs(value.descendants, ref.children); } }); } - convertToRefs(null, serialized, cacheState); + convertToRefs(serialized, cacheState); try { - assert.deepStrictEqual(worldState, cacheState); + assert.deepStrictEqual(batchManager.worldState, cacheState); } catch (e) { console.log(e); throw e; @@ -212,7 +237,11 @@ describe("forking", () => { } }), { - numRuns: 100 + numRuns: 10000, + endOnFailure: true + // seed: -1336914165, + // path: + // "492:3332:23:26:25:25:28:27:34:28:20:19:12:21:9:9:20:11:9:9:12:9:11:12:9:23" } ); }); diff --git a/src/chains/ethereum/ethereum/tests/forking/cache/arbitraries.ts b/src/chains/ethereum/ethereum/tests/forking/cache/arbitraries.ts index 7b444b2723..c4ef7fea0c 100644 --- a/src/chains/ethereum/ethereum/tests/forking/cache/arbitraries.ts +++ b/src/chains/ethereum/ethereum/tests/forking/cache/arbitraries.ts @@ -2,7 +2,7 @@ import * as fc from "fast-check"; export interface Network { networkId: number; - getBlockByNumber?(height: number): Promise; + getBlockByNumber?(height: number): any; historicBlock: { number: number; hash: string; From d99647d020be7467f0e17b632aa5156cbece81dc Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 13:00:17 -0400 Subject: [PATCH 12/93] more cache-test cleanup --- .../ethereum/tests/forking/cache.test.ts | 145 +----------------- .../tests/forking/cache/batch-manager.ts | 127 +++++++++++++++ 2 files changed, 132 insertions(+), 140 deletions(-) create mode 100644 src/chains/ethereum/ethereum/tests/forking/cache/batch-manager.ts diff --git a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts index e91a8694a5..80d90dcec7 100644 --- a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts @@ -1,142 +1,13 @@ import * as fc from "fast-check"; import * as Arbitrary from "./cache/arbitraries"; -import { Network, Model } from "./cache/arbitraries"; import { PersistentCache } from "../../src/forking/persistent-cache/persistent-cache"; import { Data, Quantity } from "@ganache/utils"; import { Tree } from "../../src/forking/persistent-cache/tree"; import assert from "assert"; -import Block from "ethereumjs-block"; - -type Ref = { - hash: string; - block: Network["historicBlock"]; - children: Set; -}; - -class BatchManager { - public networkLookup: Map; - public worldState: Set; - constructor() {} - getGenesis(network: Network) { - // Get the genesis block and add it to our world state, if needed. - const genesis = network.getBlockByNumber(0) as Network["historicBlock"]; - if (!this.networkLookup.has(genesis.hash)) { - const genesisRef: Ref = { - hash: genesis.hash, - block: genesis, - children: new Set() - }; - this.networkLookup.set(genesis.hash, genesisRef); - this.worldState.add(genesisRef); - return genesisRef; - } else { - return this.networkLookup.get(genesis.hash); - } - } - getOwnRef(block: Network["historicBlock"]) { - if (!this.networkLookup.has(block.hash)) { - const ref: Ref = { - hash: block.hash, - block: block, - children: new Set() - }; - // if we don't yet know about this block, add it - this.networkLookup.set(block.hash, ref); - return ref; - } else { - return this.networkLookup.get(block.hash); - } - } - findLatestAncestors( - block: Network["historicBlock"], - network: Network, - parent: Ref - ): Ref[] { - const candidates: Ref[] = [parent]; - for (const child of parent.children.values()) { - if (child.hash === block.hash) { - // if the child is the same block as us we must delete it - // because we are figuring this all out again anyway - parent.children.delete(child); - continue; - } - - const networkBlock = network.getBlockByNumber(child.block.number); - const isInNetwork = networkBlock && networkBlock.hash === child.hash; - if (!isInNetwork) continue; - - // if the child is in network and comes after us it is - // an eventual *descendant*. continue searching! - if (child.block.number >= block.number) continue; - - // otherwise, it might be our ancestor, keep checking more! - candidates.push(...this.findLatestAncestors(block, network, child)); - } - return candidates; - } - - findLatestAncestor( - block: Network["historicBlock"], - network: Network, - parent: Ref - ) { - // find the ancestor with the high block number - return this.findLatestAncestors(block, network, parent).sort((a, b) => { - if (a.block.number < b.block.number) { - return 1; - } else if (a.block.number === b.block.number) { - return 0; - } else { - return -1; - } - })[0]; - } - - /** - * traverse up all descendants to fix those relationships - * @param block - * @param network - * @param parent - * @param allKnownDescendants - */ - fixDescendants( - block: Ref, - network: Network, - parent: Ref, - allKnownDescendants: Set - ) { - const children = [...parent.children.values()]; - for (const child of children) { - const networkBlock = network.getBlockByNumber(child.block.number); - const isInNetwork = networkBlock && networkBlock.hash === child.hash; - if (!isInNetwork) continue; - - // we should move the child if it comes after us - if (child.block.number > block.block.number) { - parent.children.delete(child); - block.children.add(child); - allKnownDescendants.add(child.hash); - } else { - this.fixDescendants(block, network, child, allKnownDescendants); - } - } - } - - /** - * @param of collect descendants of this block - * @param acc an accumulator - */ - collectDescendants(of: Ref, acc = new Set()) { - for (const child of of.children) { - acc.add(child.block.hash); - this.collectDescendants(child, acc); - } - return acc; - } -} +import { BatchManager, Ref } from "./cache/batch-manager"; describe("forking", () => { describe("persistent cache", () => { @@ -156,7 +27,7 @@ describe("forking", () => { // make sure this cache doesn't already exist await PersistentCache.deleteDb(dbName); try { - const batchManager = new BatchManager(); + const batchManager = new BatchManager(model); for (const batch of batches) { const block = batch.input.historicBlock; const network = model.networks[batch.descendantIndex]; @@ -166,8 +37,7 @@ describe("forking", () => { if (block.number > 0) { const latestAncestor = batchManager.findLatestAncestor( - block, - network, + batch, genesisRef ); latestAncestor.children.add(ref); @@ -225,19 +95,14 @@ describe("forking", () => { } convertToRefs(serialized, cacheState); - try { - assert.deepStrictEqual(batchManager.worldState, cacheState); - } catch (e) { - console.log(e); - throw e; - } + assert.deepStrictEqual(batchManager.worldState, cacheState); } } finally { await PersistentCache.deleteDb(dbName); } }), { - numRuns: 10000, + numRuns: 50, endOnFailure: true // seed: -1336914165, // path: diff --git a/src/chains/ethereum/ethereum/tests/forking/cache/batch-manager.ts b/src/chains/ethereum/ethereum/tests/forking/cache/batch-manager.ts new file mode 100644 index 0000000000..a02e5a4b1d --- /dev/null +++ b/src/chains/ethereum/ethereum/tests/forking/cache/batch-manager.ts @@ -0,0 +1,127 @@ +export type Ref = { + hash: string; + block: Network["historicBlock"]; + children: Set; +}; + +import * as Arbitrary from "./arbitraries"; +import { Network, Model } from "./arbitraries"; + +export class BatchManager { + public model: Model; + public networkLookup = new Map(); + public worldState = new Set(); + constructor(model: Model) { + this.model = model; + } + getGenesis(network: Network) { + // Get the genesis block and add it to our world state, if needed. + const genesis = network.getBlockByNumber(0) as Network["historicBlock"]; + if (!this.networkLookup.has(genesis.hash)) { + const genesisRef: Ref = { + hash: genesis.hash, + block: genesis, + children: new Set() + }; + this.networkLookup.set(genesis.hash, genesisRef); + this.worldState.add(genesisRef); + return genesisRef; + } else { + return this.networkLookup.get(genesis.hash); + } + } + getOwnRef(block: Network["historicBlock"]) { + if (!this.networkLookup.has(block.hash)) { + const ref: Ref = { + hash: block.hash, + block: block, + children: new Set() + }; + // if we don't yet know about this block, add it + this.networkLookup.set(block.hash, ref); + return ref; + } else { + return this.networkLookup.get(block.hash); + } + } + findLatestAncestors(batch: Arbitrary.Batch, parent: Ref): Ref[] { + const block = batch.input.historicBlock; + const network = this.model.networks[batch.descendantIndex]; + const candidates: Ref[] = [parent]; + for (const child of parent.children.values()) { + if (child.hash === block.hash) { + // if the child is the same block as us we must delete it + // because we are figuring this all out again anyway + parent.children.delete(child); + continue; + } + + const networkBlock = network.getBlockByNumber(child.block.number); + const isInNetwork = networkBlock && networkBlock.hash === child.hash; + if (!isInNetwork) continue; + + // if the child is in network and comes after us it is + // an eventual *descendant*. continue searching! + if (child.block.number >= block.number) continue; + + // otherwise, it might be our ancestor, keep checking more! + candidates.push(...this.findLatestAncestors(batch, child)); + } + return candidates; + } + + findLatestAncestor(batch: Arbitrary.Batch, parent: Ref) { + // find the ancestor with the high block number + return this.findLatestAncestors(batch, parent).sort((a, b) => { + if (a.block.number < b.block.number) { + return 1; + } else if (a.block.number === b.block.number) { + return 0; + } else { + return -1; + } + })[0]; + } + + /** + * traverse up all descendants to fix those relationships + * @param block + * @param network + * @param parent + * @param allKnownDescendants + */ + fixDescendants( + block: Ref, + network: Network, + parent: Ref, + allKnownDescendants: Set + ) { + const children = [...parent.children.values()]; + for (const child of children) { + const networkBlock = network.getBlockByNumber(child.block.number); + const isInNetwork = networkBlock && networkBlock.hash === child.hash; + if (!isInNetwork) continue; + + // we should move the child if it comes after us + if (child.block.number > block.block.number) { + parent.children.delete(child); + block.children.add(child); + allKnownDescendants.add(child.hash); + } else { + this.fixDescendants(block, network, child, allKnownDescendants); + } + } + } + + /** + * @param of collect descendants of this block + * @param acc an accumulator + */ + collectDescendants(of: Ref, acc = new Set()) { + for (const child of of.children) { + acc.add(child.block.hash); + this.collectDescendants(child, acc); + } + return acc; + } +} From dc602c39563a79df075c489f77cf1b511fff048d Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 13:12:43 -0400 Subject: [PATCH 13/93] fix typeo --- src/chains/ethereum/options/src/fork-options.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index 737f0a9cac..758951c6d7 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -379,7 +379,7 @@ Defaults to: \`["User-Agent: Ganache/VERSION (https://www.trufflesuite.com/ganac noCache: { normalize, default: () => false, - cliDescription: "DOn't cache forking requests in a persistent db.", + cliDescription: "Don't cache forking requests in a persistent db.", cliType: "boolean" } }; From 482eea3c32047240c40fcb608e604581a4629093 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 13:20:52 -0400 Subject: [PATCH 14/93] remove unused and fix typo --- .../ethereum/tests/forking/cache.test.ts | 2 +- .../ethereum/tests/forking/cache/plan.ts | 112 ------------------ 2 files changed, 1 insertion(+), 113 deletions(-) delete mode 100644 src/chains/ethereum/ethereum/tests/forking/cache/plan.ts diff --git a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts index 80d90dcec7..9613512f18 100644 --- a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts @@ -11,7 +11,7 @@ import { BatchManager, Ref } from "./cache/batch-manager"; describe("forking", () => { describe("persistent cache", () => { - it("create relationships between networks correctly", async () => { + it("creates relationships between networks correctly", async () => { const arb = Arbitrary.Networks().chain(model => fc.record({ model: fc.constant(model), diff --git a/src/chains/ethereum/ethereum/tests/forking/cache/plan.ts b/src/chains/ethereum/ethereum/tests/forking/cache/plan.ts deleted file mode 100644 index 07f2e97c10..0000000000 --- a/src/chains/ethereum/ethereum/tests/forking/cache/plan.ts +++ /dev/null @@ -1,112 +0,0 @@ -import { Batch, Model } from "./arbitraries"; - -export const plan = (options: { model: Model; batches: Batch[] }) => { - const { model, batches } = options; - - // track latest for each descendant in the model - const latestByDescendantIndex: { - [descendantIndex: number]: { - network: { id: string }; - number: number; - }; - } = {}; - - // track any networks that have been superseded by later descendants, since - // it's impossible to know that a given network in our model has further - // descendants unless we tell it about those descendants. - const superseded = new Set(); - const worldView = {}; - - // for each batch - for (const batch of batches) { - const { input } = batch; - const { - networkId, - getBlockByNumber: getBatchBlockByNumber - } = model.networks[batch.descendantIndex]; - - // for each input in each batch - for (const { networkId, historicBlock } of [input]) { - const { number } = historicBlock; - - // for each descendant network in our model - for (const [ - descendantIndex, - { getBlockByNumber: getComparedBlockByNumber } - ] of model.networks.entries()) { - const { network: currentLatestNetwork, number: latestHeight = -1 } = - latestByDescendantIndex[descendantIndex] || {}; - - const inputComparison = - number === latestHeight - ? "equal" - : number < latestHeight - ? "earlier" - : "later"; - - const id = networkId + historicBlock.number + historicBlock.hash; - - switch (inputComparison) { - case "equal": { - // if input is the same height as the latest, don't update any - // records - break; - } - case "later": { - // if the input is later than current latest for compared network, - // check the compared network's equivalent block at input height - // - // if these match, then the current latest is ancestor to the - // input: mark current latest as superseded and update latest - const batchBlock = historicBlock; - const comparedBlock = getComparedBlockByNumber(number); - - if (comparedBlock && batchBlock.hash === comparedBlock.hash) { - // mark any previously known latest as superseded - if (currentLatestNetwork) { - superseded.add(currentLatestNetwork.id); - } - - // update known latest - latestByDescendantIndex[descendantIndex] = { - network: { id }, - number - }; - } - - break; - } - case "earlier": { - // if the input is earlier than the current latest, check that - // the current latest block for the compared network matches the - // equivalent block for the input batch network - // - // if these match, then the current latest block is a known - // descendant of the input: mark input as superseded - const batchBlock = getBatchBlockByNumber(latestHeight); - const comparedBlock = getComparedBlockByNumber(latestHeight); - - if (batchBlock && batchBlock.hash === comparedBlock.hash) { - // then mark immediately as superseded (we know this network will - // not come back as a latestDescendant) - - superseded.add(id); - } - - break; - } - } - } - } - - const ids = new Set( - Object.values(latestByDescendantIndex).map(({ network: { id } }) => id) - ); - - return { - expectedLatestDescendants: [...ids] - .filter(id => !superseded.has(id)) - .map(id => ({ id })) - }; - } -}; From f30e83d2a67a4d69222ea54473da642be74c9c07 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 13:21:37 -0400 Subject: [PATCH 15/93] remove old comment --- src/chains/ethereum/ethereum/tests/forking/cache.test.ts | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts index 9613512f18..f7b431095c 100644 --- a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts @@ -104,9 +104,6 @@ describe("forking", () => { { numRuns: 50, endOnFailure: true - // seed: -1336914165, - // path: - // "492:3332:23:26:25:25:28:27:34:28:20:19:12:21:9:9:20:11:9:9:12:9:11:12:9:23" } ); }); From 33117eb817b4139cd65c5ac99bf6125cb9c07c90 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 13:23:39 -0400 Subject: [PATCH 16/93] remove timeout limit in ethereum tests --- src/chains/ethereum/ethereum/package.json | 2 +- src/chains/ethereum/ethereum/tests/forking/cache.test.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/chains/ethereum/ethereum/package.json b/src/chains/ethereum/ethereum/package.json index f206484729..6103c95327 100644 --- a/src/chains/ethereum/ethereum/package.json +++ b/src/chains/ethereum/ethereum/package.json @@ -30,7 +30,7 @@ "docs.preview": "ws --open --port 3010 --directory ../../../../docs", "tsc": "ttsc --build", "test": "nyc --reporter lcov npm run mocha", - "mocha": "cross-env TS_NODE_COMPILER=ttypescript TS_NODE_FILES=true mocha -s 0 -t 0 --exit --check-leaks --throw-deprecation --trace-warnings --require ts-node/register 'tests/**/*.test.ts'" + "mocha": "cross-env TS_NODE_COMPILER=ttypescript TS_NODE_FILES=true mocha -s 0 --exit --check-leaks --throw-deprecation --trace-warnings --require ts-node/register 'tests/**/*.test.ts'" }, "bugs": { "url": "https://github.com/trufflesuite/ganache/issues" diff --git a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts index f7b431095c..1010f409ba 100644 --- a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts @@ -106,6 +106,6 @@ describe("forking", () => { endOnFailure: true } ); - }); + }).timeout(20000); }); }); From ea06d78aa183263beb80629d74e13c227255c89b Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 14:45:50 -0400 Subject: [PATCH 17/93] fix cache issue and increase test timeouts --- .../ethereum/ethereum/src/forking/fork.ts | 23 +++++++++++++++---- .../src/forking/persistent-cache/ancestry.ts | 4 ++-- .../ethereum/tests/forking/block.test.ts | 4 +++- .../ethereum/tests/forking/forking.test.ts | 4 +++- .../ethereum/options/src/fork-options.ts | 18 ++++++++++++++- 5 files changed, 43 insertions(+), 10 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/fork.ts b/src/chains/ethereum/ethereum/src/forking/fork.ts index 27a0ed909b..8bf5b60c81 100644 --- a/src/chains/ethereum/ethereum/src/forking/fork.ts +++ b/src/chains/ethereum/ethereum/src/forking/fork.ts @@ -12,7 +12,8 @@ import { Account } from "@ganache/ethereum-utils"; import BlockManager from "../data-managers/block-manager"; import { ProviderHandler } from "./handlers/provider-handler"; import { PersistentCache } from "./persistent-cache/persistent-cache"; -import BlockLogManager from "../data-managers/blocklog-manager"; + +const CONFIRMATIONS = 5n; async function fetchChainId(fork: Fork) { const chainIdHex = await fork.request("eth_chainId", []); @@ -105,9 +106,19 @@ export class Fork { #setBlockDataFromChainAndOptions = async () => { const options = this.#options; if (options.blockNumber === Tag.LATEST) { - // if our block number option is "latest" override it with the original - // chain's current blockNumber - const block = await fetchBlock(this, Tag.LATEST); + // if our block number option is "latest" use `latest - CONFIRMATIONS` + // as the block number to ensure the block is fully synced. Then override + // the `options.blockNumber` with the original chain's + // `"latest" - CONFIRMATIONS` block number. + // One reason for this is because providers often know about blocks before + // they've fully synced the state. so a eth_getBlockByNumber(latest) + // followed by calls that use that number (like getting an account's + // transaction count) may result in a "header not found" error. + const latestBlockNumber = BigInt(await fetchBlockNumber(this)); + const block = await fetchBlock( + this, + Quantity.from(latestBlockNumber - CONFIRMATIONS) + ); options.blockNumber = parseInt(block.number, 16); this.blockNumber = Quantity.from(options.blockNumber); this.stateRoot = Data.from(block.stateRoot); @@ -153,7 +164,9 @@ export class Fork { public async initialize() { let cacheProm: Promise; - if (this.#options.noCache === false) { + const options = this.#options; + if (options.deleteCache) await PersistentCache.deleteDb(); + if (options.noCache === false) { // ignore cache start up errors as it is possible there is an open // conflict if another ganache fork is running at the time this one is // started. The cache isn't required (though performance will be diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts index 14a7946012..a75f85210f 100644 --- a/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts @@ -16,7 +16,7 @@ export class Ancestry { this.next = parent.closestKnownAncestor.equals(BUFFER_EMPTY) ? null : parent.closestKnownAncestor; - this.knownAncestors = new Set([parent.key.toString("hex")]); + this.knownAncestors = new Set([parent.hash.toString("hex")]); } } @@ -37,7 +37,7 @@ export class Ancestry { this.next = node.closestKnownAncestor.equals(BUFFER_EMPTY) ? null : node.closestKnownAncestor; - this.knownAncestors.add(node.key.toString("hex")); + this.knownAncestors.add(node.hash.toString("hex")); this.lock.delete(k); resolver(); } diff --git a/src/chains/ethereum/ethereum/tests/forking/block.test.ts b/src/chains/ethereum/ethereum/tests/forking/block.test.ts index 53ad7c6bd9..9403ecc5a5 100644 --- a/src/chains/ethereum/ethereum/tests/forking/block.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/block.test.ts @@ -3,7 +3,9 @@ import getProvider from "../helpers/getProvider"; import EthereumProvider from "../../src/provider"; import request from "superagent"; -describe("forking", () => { +describe("forking", function () { + this.timeout(10000); + describe("blocks", () => { const blockNumber = 0xb77935; const blockNumHex = `0x${blockNumber.toString(16)}`; diff --git a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts index 4d941f31ab..f785e817bf 100644 --- a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts @@ -15,7 +15,9 @@ import compile from "../helpers/compile"; import path from "path"; import { CodedError } from "@ganache/ethereum-utils"; -describe("forking", () => { +describe("forking", function () { + this.timeout(10000); + const PORT = 9999; const NETWORK_ID = 1234; const REMOTE_ACCOUNT_COUNT = 15; diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index 758951c6d7..c7090b68ec 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -170,6 +170,16 @@ export type ForkConfig = { type: boolean; hasDefault: true; }; + + /** + * Deletes the persistent cache on start up. + * + * @default false + */ + deleteCache: { + type: boolean; + hasDefault: true; + }; }; }; @@ -280,7 +290,7 @@ Alternatively, you can use the \`fork.username\` and \`fork.password\` options.` return; } }, - defaultDescription: `"${Tag.LATEST}"` + defaultDescription: `Latest block number - 5` //implies: ["url"] }, username: { @@ -381,5 +391,11 @@ Defaults to: \`["User-Agent: Ganache/VERSION (https://www.trufflesuite.com/ganac default: () => false, cliDescription: "Don't cache forking requests in a persistent db.", cliType: "boolean" + }, + deleteCache: { + normalize, + default: () => false, + cliDescription: "Deletes the persistent cache before starting.", + cliType: "boolean" } }; From 5839621f7ad1a7976742aa45ff4cb33c95c795de Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 15:51:05 -0400 Subject: [PATCH 18/93] fix tests --- .../ethereum/ethereum/src/forking/fork.ts | 31 ++++++++++--------- .../ethereum/tests/forking/account.test.ts | 3 +- .../ethereum/tests/forking/block.test.ts | 3 +- .../ethereum/tests/forking/helpers.ts | 2 +- .../tests/forking/transaction.test.ts | 3 +- .../ethereum/options/src/fork-options.ts | 20 +++++++++++- 6 files changed, 43 insertions(+), 19 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/fork.ts b/src/chains/ethereum/ethereum/src/forking/fork.ts index 8bf5b60c81..4304ed942e 100644 --- a/src/chains/ethereum/ethereum/src/forking/fork.ts +++ b/src/chains/ethereum/ethereum/src/forking/fork.ts @@ -106,21 +106,24 @@ export class Fork { #setBlockDataFromChainAndOptions = async () => { const options = this.#options; if (options.blockNumber === Tag.LATEST) { - // if our block number option is "latest" use `latest - CONFIRMATIONS` - // as the block number to ensure the block is fully synced. Then override - // the `options.blockNumber` with the original chain's - // `"latest" - CONFIRMATIONS` block number. - // One reason for this is because providers often know about blocks before - // they've fully synced the state. so a eth_getBlockByNumber(latest) - // followed by calls that use that number (like getting an account's - // transaction count) may result in a "header not found" error. - const latestBlockNumber = BigInt(await fetchBlockNumber(this)); - const block = await fetchBlock( - this, - Quantity.from(latestBlockNumber - CONFIRMATIONS) - ); + const latestBlock = await fetchBlock(this, Tag.LATEST); + let blockNumber = parseInt(latestBlock.number, 16); + const currentTime = BigInt((Date.now() / 1000) | 0); // current time in seconds + // if the "latest" block is less than `blockAge` seconds old we don't use it + // because it is possible that the node we connected to hasn't fully synced its + // state, so successive calls to this block + const useOlderBlock = + blockNumber > 0 && + currentTime - BigInt(latestBlock.timestamp) < options.blockAge; + let block; + if (useOlderBlock) { + blockNumber -= 1; + block = await fetchBlock(this, Quantity.from(blockNumber)); + } else { + block = latestBlock; + } options.blockNumber = parseInt(block.number, 16); - this.blockNumber = Quantity.from(options.blockNumber); + this.blockNumber = Quantity.from(blockNumber); this.stateRoot = Data.from(block.stateRoot); await this.#syncAccounts(this.blockNumber); return block; diff --git a/src/chains/ethereum/ethereum/tests/forking/account.test.ts b/src/chains/ethereum/ethereum/tests/forking/account.test.ts index 48fd561512..08e0ee9c1f 100644 --- a/src/chains/ethereum/ethereum/tests/forking/account.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/account.test.ts @@ -17,7 +17,8 @@ describe("forking", () => { provider = await getProvider({ fork: { url: URL, - blockNumber + blockNumber, + noCache: true } }); }); diff --git a/src/chains/ethereum/ethereum/tests/forking/block.test.ts b/src/chains/ethereum/ethereum/tests/forking/block.test.ts index 9403ecc5a5..b0db24b0fd 100644 --- a/src/chains/ethereum/ethereum/tests/forking/block.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/block.test.ts @@ -18,7 +18,8 @@ describe("forking", function () { provider = await getProvider({ fork: { url: URL, - blockNumber + blockNumber, + noCache: true } }); }); diff --git a/src/chains/ethereum/ethereum/tests/forking/helpers.ts b/src/chains/ethereum/ethereum/tests/forking/helpers.ts index b4123858c8..019a281944 100644 --- a/src/chains/ethereum/ethereum/tests/forking/helpers.ts +++ b/src/chains/ethereum/ethereum/tests/forking/helpers.ts @@ -54,7 +54,7 @@ export const startLocalChain = async ( ) => { const localProvider = await getProvider({ logging, - fork: { url: `ws://0.0.0.0:${port}`, ...options }, + fork: { blockAge: 0, url: `ws://0.0.0.0:${port}`, ...options }, wallet: { deterministic: true } }); return { diff --git a/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts b/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts index fe06d20d68..521bd2d3eb 100644 --- a/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts @@ -15,7 +15,8 @@ describe("forking", () => { provider = await getProvider({ fork: { url: URL, - blockNumber + blockNumber, + noCache: true } }); }); diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index c7090b68ec..f493e694c3 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -1,8 +1,9 @@ import { normalize } from "./helpers"; import { Definitions } from "@ganache/options"; import { $INLINE_JSON } from "ts-transformer-inline-file"; -import { Tag } from "@ganache/ethereum-utils"; +import { QUANTITY, Tag } from "@ganache/ethereum-utils"; import { URL } from "url"; +import { Quantity } from "@ganache/utils"; const { version } = $INLINE_JSON("../../../../packages/ganache/package.json"); // we aren't going to treat block numbers as a bigint, so we don't want to @@ -71,6 +72,17 @@ export type ForkConfig = { }; }; + /** + * Minimum age in seconds of the "latest" block. If the "latest" block is + * younger than this amount the block immediately preceding the latest block + * will be used instead. + */ + blockAge: { + type: bigint; + rawType: number; + hasDefault: true; + }; + /** * Username to use for Basic Authentication. Does not require setting `fork.password`. * @@ -293,6 +305,12 @@ Alternatively, you can use the \`fork.username\` and \`fork.password\` options.` defaultDescription: `Latest block number - 5` //implies: ["url"] }, + blockAge: { + normalize: rawInput => BigInt(rawInput), + cliDescription: `Minimum age in seconds of the "latest" block. If the "latest" block is younger than this amount the block immediately preceding the latest block will be used instead.`, + default: () => 1n, + cliType: "number" + }, username: { normalize, cliDescription: `* Username to use for Basic Authentication. Does not require setting \`fork.password\`. From 968d648c6b31646c2d0e5240bd6c0741a62dd5e3 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 17:33:35 -0400 Subject: [PATCH 19/93] extend timeout --- src/chains/ethereum/ethereum/tests/forking/account.test.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/tests/forking/account.test.ts b/src/chains/ethereum/ethereum/tests/forking/account.test.ts index 08e0ee9c1f..8690d4bab3 100644 --- a/src/chains/ethereum/ethereum/tests/forking/account.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/account.test.ts @@ -3,7 +3,9 @@ import getProvider from "../helpers/getProvider"; import EthereumProvider from "../../src/provider"; import request from "superagent"; -describe("forking", () => { +describe("forking", function () { + this.timeout(10000); + describe("accounts", function () { const accountAddress = "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"; const blockNumber = 0xb77935; From 702505142980873a97155dcfc193001b39fbc0a7 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 22:02:42 -0400 Subject: [PATCH 20/93] don't return the wrong data --- .../src/forking/persistent-cache/helpers.ts | 2 +- .../persistent-cache/persistent-cache.ts | 21 +++++++++++++------ 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts index 1905d2e284..1f150a1c2b 100644 --- a/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts @@ -52,7 +52,7 @@ export function getBlockNumberFromParams(method: string, params: any[]) { case "eth_getStorageAt": return params[2]; default: - throw new Error(`Persistent cache does not support calls to "${method}.`); + return null; } } diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts index d37841fd00..9b09b74ec3 100644 --- a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts @@ -320,8 +320,11 @@ export class PersistentCache { async get(method: string, params: any[], key: string) { const blockNumber = getBlockNumberFromParams(method, params); + if (blockNumber == null) return; + const height = Quantity.from(blockNumber); - const start = lexico.encode([height.toBuffer(), Buffer.from(key)]); + const bufKey = Buffer.from(key); + const start = lexico.encode([height.toBuffer(), bufKey]); const end = lexico.encode([ Quantity.from(height.toBigInt() + 1n).toBuffer() ]); @@ -333,22 +336,28 @@ export class PersistentCache { }); const hashBuf = this.hash.toBuffer(); for await (const data of readStream) { - const { key, value } = (data as any) as { key: Buffer; value: Buffer }; - const [_height, _key, blockHash] = lexico.decode(key); + const { key: k, value } = (data as any) as { key: Buffer; value: Buffer }; + const [_height, _key, blockHash] = lexico.decode(k); + // if our key no longer matches make sure we don't keep searching + if (!_key.equals(bufKey)) return; if (hashBuf.equals(blockHash) || (await this.ancestry.has(blockHash))) { return value; } } } - put(method: string, params: any[], key: string, value: Buffer) { - const height = Quantity.from(getBlockNumberFromParams(method, params)); + async put(method: string, params: any[], key: string, value: Buffer) { + const blockNumber = getBlockNumberFromParams(method, params); + if (blockNumber == null) return false; + + const height = Quantity.from(blockNumber); const dbKey = lexico.encode([ height.toBuffer(), Buffer.from(key), this.hash.toBuffer() ]); - return this.cacheDb.put(dbKey, value); + await this.cacheDb.put(dbKey, value); + return true; } private status: "closed" | "open" = "open"; From 26b42f960eaeae0cbb18e9600e00a18f5f52dc7b Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 22:24:26 -0400 Subject: [PATCH 21/93] extend timeout even more --- src/chains/ethereum/ethereum/tests/forking/cache.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts index 1010f409ba..6c79a22fee 100644 --- a/src/chains/ethereum/ethereum/tests/forking/cache.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/cache.test.ts @@ -106,6 +106,6 @@ describe("forking", () => { endOnFailure: true } ); - }).timeout(20000); + }).timeout(30000); }); }); From c4e3d8b83b7fa06e2f04acf53df1f175e0206d68 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 22:25:06 -0400 Subject: [PATCH 22/93] extend timeout --- src/chains/ethereum/ethereum/tests/forking/transaction.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts b/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts index 521bd2d3eb..462babb665 100644 --- a/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts @@ -45,6 +45,6 @@ describe("forking", () => { provider.send("eth_getTransactionByHash", [txHash]) ]); assert.deepStrictEqual(tx, originalTx); - }); + }).timeout(5000); }); }); From ccecf6de461afde0f0787848215b8f6d09c9dd56 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 23:04:03 -0400 Subject: [PATCH 23/93] extend all tests in forking->transactions --- .../ethereum/ethereum/tests/forking/transaction.test.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts b/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts index 462babb665..3f94e355be 100644 --- a/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts @@ -4,7 +4,9 @@ import EthereumProvider from "../../src/provider"; import request from "superagent"; describe("forking", () => { - describe("transactions", () => { + describe("transactions", function () { + this.timeout(5000); + const blockNumber = 0xcb6169; const URL = "https://mainnet.infura.io/v3/" + process.env.INFURA_KEY; let provider: EthereumProvider; @@ -45,6 +47,6 @@ describe("forking", () => { provider.send("eth_getTransactionByHash", [txHash]) ]); assert.deepStrictEqual(tx, originalTx); - }).timeout(5000); + }); }); }); From 5d11997e435ac613e9b4f1c043c5019267f5ee10 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 12 Oct 2021 23:07:43 -0400 Subject: [PATCH 24/93] add more info to a failling test --- .../ethereum/tests/api/eth/getBlockByNumber.test.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts b/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts index 94918de187..945ba3f045 100644 --- a/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts +++ b/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts @@ -51,7 +51,8 @@ describe("api", () => { block.totalDifficulty, `0x${((numberOfBlocksToMine + 1) * DEFAULT_DIFFICULTY).toString( 16 - )}` + )}`, + `Didn't correctly mine ${numberOfBlocksToMine} blocks` ); }); }); @@ -83,7 +84,8 @@ describe("api", () => { ]); assert.strictEqual( block.totalDifficulty, - `0x${((numberOfBlocksToMine + 1) * difficulty).toString(16)}` + `0x${((numberOfBlocksToMine + 1) * difficulty).toString(16)}`, + `Didn't correctly mine ${numberOfBlocksToMine} blocks` ); }); }); From 9e17e2428d8669022a1bd9e73851ac07fd6e645f Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 26 Oct 2021 07:57:58 -0400 Subject: [PATCH 25/93] fixes --- .../ethereum/block/src/runtime-block.ts | 5 +- .../ethereum/ethereum/src/blockchain.ts | 8 +- src/chains/ethereum/ethereum/src/connector.ts | 84 ++++++++++++++- .../ethereum/src/forking/state-manager.ts | 14 +-- .../ethereum/ethereum/src/forking/trie.ts | 101 ++++++++++-------- .../ethereum/src/helpers/run-transactions.ts | 12 +-- 6 files changed, 158 insertions(+), 66 deletions(-) diff --git a/src/chains/ethereum/block/src/runtime-block.ts b/src/chains/ethereum/block/src/runtime-block.ts index d018b51b21..0816957c87 100644 --- a/src/chains/ethereum/block/src/runtime-block.ts +++ b/src/chains/ethereum/block/src/runtime-block.ts @@ -3,7 +3,8 @@ import { Quantity, BUFFER_EMPTY, BUFFER_32_ZERO, - BUFFER_8_ZERO + BUFFER_8_ZERO, + BUFFER_ZERO } from "@ganache/utils"; import { BN, KECCAK256_RLP_ARRAY } from "ethereumjs-util"; import { EthereumRawBlockHeader, serialize } from "./serialize"; @@ -128,7 +129,7 @@ export class RuntimeBlock { timestamp: new BnExtra(ts), baseFeePerGas: baseFeePerGas === undefined - ? undefined + ? new BnExtra(BUFFER_ZERO) : new BnExtra(Quantity.from(baseFeePerGas).toBuffer()) }; } diff --git a/src/chains/ethereum/ethereum/src/blockchain.ts b/src/chains/ethereum/ethereum/src/blockchain.ts index 1a86100833..cbebde4a86 100644 --- a/src/chains/ethereum/ethereum/src/blockchain.ts +++ b/src/chains/ethereum/ethereum/src/blockchain.ts @@ -673,7 +673,7 @@ export default class Blockchain extends Emittery.Typed< // commit accounts, but for forking. const stateManager = this.vm.stateManager; - stateManager.checkpoint(); + await stateManager.checkpoint(); initialAccounts.forEach(acc => { const a = { buf: acc.address.toBuffer() } as any; (stateManager as any)._cache.put(a, acc); @@ -1173,7 +1173,7 @@ export default class Blockchain extends Emittery.Typed< } const structLog: StructLog = { - depth: event.depth, + depth: event.depth + 1, error: "", gas: gasLeft, gasCost: 0, @@ -1391,8 +1391,8 @@ export default class Blockchain extends Emittery.Typed< throw new Error("Unknown transaction " + transactionHash); } - const targetBlock = await this.blocks.get( - transaction.blockNumber.toBuffer() + const targetBlock = await this.blocks.getByHash( + transaction.blockHash.toBuffer() ); const parentBlock = await this.blocks.getByHash( targetBlock.header.parentHash.toBuffer() diff --git a/src/chains/ethereum/ethereum/src/connector.ts b/src/chains/ethereum/ethereum/src/connector.ts index 5dea86fcc6..e65b416fb5 100644 --- a/src/chains/ethereum/ethereum/src/connector.ts +++ b/src/chains/ethereum/ethereum/src/connector.ts @@ -35,6 +35,88 @@ function isHttp( ); } +function chunkify(val: any, nameOrIndex: string) { + if (Array.isArray(val)) { + const l = val.length; + if (l === 0) { + return Buffer.from("[]"); + } else { + const chunkified = chunkify(val[0], "0"); + // if the value ends up being nothing (undefined), return null + const bufs = [ + Buffer.from("["), + chunkified.length === 0 ? Buffer.from("null") : chunkified + ]; + if (l > 1) { + for (let i = 1; i < l; i++) { + const v = val[i]; + bufs.push(Buffer.from(",")); + const chunkified = chunkify(v, i.toString()); + // if the value ends up being nothing (undefined), return null + bufs.push(chunkified.length === 0 ? Buffer.from("null") : chunkified); + } + } + bufs.push(Buffer.from("]")); + return Buffer.concat(bufs); + } + } else if (Object.prototype.toString.call(val) === "[object Object]") { + if ("toJSON" in val) return chunkify(val.toJSON(nameOrIndex), "") as Buffer; + + const entries = Object.entries(val); + const l = entries.length; + if (l === 0) { + return Buffer.from("{}"); + } else { + const [key, value] = entries[0]; + let i = 0; + let bufs = [Buffer.from("{")]; + + // find the first non-null property to start the object + while (i < l) { + const chunkified = chunkify(value, key); + // if the chunkified value ends up being nothing (undefined) ignore + // the property + if (chunkified.length === 0) { + i++; + continue; + } + + bufs.push( + ...[Buffer.from(JSON.stringify(key)), Buffer.from(":"), chunkified] + ); + break; + } + if (l > 1) { + for (let i = 1; i < l; i++) { + const [key, value] = entries[i]; + const chunkified = chunkify(value, key); + // if the chunkified value ends up being nothing (undefined) ignore + // the property + if (chunkified.length === 0) continue; + + bufs.push( + ...[ + Buffer.from(","), + Buffer.from(JSON.stringify(key)), + Buffer.from(":"), + chunkified + ] + ); + } + } + bufs.push(Buffer.from("}")); + return Buffer.concat(bufs); + } + } else if (val === null) { + return Buffer.from("null"); + } else if (val === undefined) { + // nothing is returned for undefined + return Buffer.allocUnsafe(0); + } else { + return Buffer.from(JSON.stringify(val)); + } +} + export class Connector< R extends JsonRpcRequest< EthereumApi, @@ -115,7 +197,7 @@ export class Connector< ); } else { const json = makeResponse(payload.id, results); - return JSON.stringify(json); + return chunkify(json, ""); } } diff --git a/src/chains/ethereum/ethereum/src/forking/state-manager.ts b/src/chains/ethereum/ethereum/src/forking/state-manager.ts index d5178a0e83..abde45e5dd 100644 --- a/src/chains/ethereum/ethereum/src/forking/state-manager.ts +++ b/src/chains/ethereum/ethereum/src/forking/state-manager.ts @@ -5,7 +5,6 @@ import AccountManager from "../data-managers/account-manager"; import { ForkCache } from "./cache"; import Common from "@ethereumjs/common"; import { ForkTrie } from "./trie"; -import { SecureTrie as Trie } from "merkle-patricia-tree"; /** * Options for constructing a [[StateManager]]. @@ -69,13 +68,14 @@ export class ForkStateManager extends StateManager { } /** - * Gets the storage value associated with the provided `address` and `key`. This method returns - * the shortest representation of the stored value. - * @param address - Address of the account to get the storage for - * @param key - Key in the account's storage to get the value for. Must be 32 bytes long. + * Gets the storage value associated with the provided `address` and `key`. + * This method returns the shortest representation of the stored value. + * @param address - Address of the account to get the storage for + * @param key - Key in the account's storage to get the value for. Must be 32 + * bytes long. * @returns {Promise} - The storage value for the account - * corresponding to the provided address at the provided key. - * If this does not exist an empty `Buffer` is returned. + * corresponding to the provided address at the provided key. If this does not + * exist an empty `Buffer` is returned. */ async getContractStorage(address: EJS_Address, key: Buffer): Promise { const trie = (await this._getStorageTrie(address)) as ForkTrie; diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index e06ff84aa6..7f7735923b 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -2,7 +2,7 @@ import { Address } from "@ganache/ethereum-address"; import { keccak, BUFFER_EMPTY, - BUFFER_ZERO, + RPCQUANTITY_ONE, RPCQUANTITY_EMPTY, Quantity, Data @@ -23,53 +23,20 @@ const GET_NONCE = "eth_getTransactionCount"; const GET_BALANCE = "eth_getBalance"; const GET_STORAGE_AT = "eth_getStorageAt"; -const MetadataSingletons = new WeakMap(); +const MetadataSingletons = new WeakMap(); const LEVELDOWN_OPTIONS = { keyEncoding: "binary", valueEncoding: "binary" }; -/** - * Commits a checkpoint to disk, if current checkpoint is not nested. - * If nested, only sets the parent checkpoint as current checkpoint. - * @throws If not during a checkpoint phase - */ -async function commit(this: CheckpointDB) { - const { keyValueMap } = this.checkpoints.pop(); - if (!this.isCheckpoint) { - // This was the final checkpoint, we should now commit and flush everything to disk - const batchOp = []; - keyValueMap.forEach(function (value, key) { - if (value === null) { - batchOp.push({ - type: "del", - key: Buffer.from(key, "binary") - }); - } else { - batchOp.push({ - type: "put", - key: Buffer.from(key, "binary"), - value - }); - } - }); - await this.batch(batchOp); - } else { - // dump everything into the current (higher level) cache - const currentKeyValueMap = this.checkpoints[this.checkpoints.length - 1] - .keyValueMap; - keyValueMap.forEach((value, key) => currentKeyValueMap.set(key, value)); - } -} export class ForkTrie extends GanacheTrie { private accounts: AccountManager; private address: Buffer | null = null; - public blockNumber: Quantity | null = null; - private metadata: LevelUp; + public blockNumber: Quantity; + private metadata: CheckpointDB; constructor(db: LevelUp | null, root: Buffer, blockchain: Blockchain) { super(db, root, blockchain); - this.db.commit = commit.bind(this.db); this.accounts = blockchain.accounts; this.blockNumber = this.blockchain.fallback.blockNumber; @@ -77,7 +44,7 @@ export class ForkTrie extends GanacheTrie { if (MetadataSingletons.has(db)) { this.metadata = MetadataSingletons.get(db); } else { - this.metadata = sub(db, "f", LEVELDOWN_OPTIONS); + this.metadata = new CheckpointDB(sub(db, "f", LEVELDOWN_OPTIONS)); MetadataSingletons.set(db, this.metadata); } } @@ -90,6 +57,17 @@ export class ForkTrie extends GanacheTrie { return (this as any)._root; } + checkpoint() { + super.checkpoint(); + this.metadata.checkpoint(this.root); + } + async commit() { + await Promise.all([super.commit(), this.metadata.commit()]); + } + async revert() { + await Promise.all([super.revert(), this.metadata.revert()]); + } + setContext(stateRoot: Buffer, address: Buffer, blockNumber: Quantity) { (this as any)._root = stateRoot; this.address = address; @@ -105,18 +83,47 @@ export class ForkTrie extends GanacheTrie { return lexico.encode([blockNum, this.address, key]); } + /** + * Checks if the key was deleted (locally -- not on the fork) + * @param key + */ private async keyWasDeleted(key: Buffer) { + const selfAddress = this.address === null ? BUFFER_EMPTY : this.address; + // check the uncommitted checkpoints for deleted keys before + // checking the database itself + // TODO(perf): there is probably a better/faster way of doing this for the + // common case. + const checkpoints = this.metadata.checkpoints; + for (let i = checkpoints.length - 1; i >= 0; i--) { + for (let [data, value] of checkpoints[i].keyValueMap.entries()) { + if (!value || value[0] !== 1) { + continue; + } + + const delKey = lexico.decode(Buffer.from(data, "binary")); + //const blockNumber = delKey[0]; + const address = delKey[1]; + const deletedKey = delKey[2]; + if (address.equals(selfAddress) && deletedKey.equals(key)) { + return true; + } + } + } + return new Promise((resolve, reject) => { - const selfAddress = this.address === null ? BUFFER_EMPTY : this.address; let wasDeleted = false; - const stream = this.metadata - .createKeyStream({ + const stream = this.metadata._leveldb + .createReadStream({ lte: this.createDelKey(key), reverse: true }) .on("data", data => { - const delKey = lexico.decode(data); - // const blockNumber = delKey[0]; + const { key, value } = data; + if (!value || value[0] !== 1) { + return; + } + const delKey = lexico.decode(key); + //const blockNumber = delKey[0]; const address = delKey[1]; const deletedKey = delKey[2]; if (address.equals(selfAddress) && deletedKey.equals(key)) { @@ -134,7 +141,11 @@ export class ForkTrie extends GanacheTrie { const hash = keccak(key); const delKey = this.createDelKey(key); - const metaDataPutPromise = this.metadata.put(delKey, BUFFER_ZERO); + + const metaDataPutPromise = this.metadata.put( + delKey, + RPCQUANTITY_ONE.toBuffer() + ); const { node, stack } = await this.findPath(hash); @@ -228,6 +239,8 @@ export class ForkTrie extends GanacheTrie { if (value != null) { return value; } + // since we don't have this key in our local trie check if we've have + // deleted it (locally) if (await this.keyWasDeleted(key)) { return null; } diff --git a/src/chains/ethereum/ethereum/src/helpers/run-transactions.ts b/src/chains/ethereum/ethereum/src/helpers/run-transactions.ts index a9d8864c1c..16250d9b2e 100644 --- a/src/chains/ethereum/ethereum/src/helpers/run-transactions.ts +++ b/src/chains/ethereum/ethereum/src/helpers/run-transactions.ts @@ -17,13 +17,9 @@ export async function runTransactions( block: RuntimeBlock ) { for (let i = 0, l = transactions.length; i < l; i++) { - await vm - .runTx({ - tx: transactions[i] as any, - block: block as any - }) - // we ignore transactions that error because we just want to _run_ these, - // transactions just to update the blockchain's state - .catch(() => {}); + await vm.runTx({ + tx: transactions[i] as any, + block: block as any + }); } } From 8c478531928ff169abeef7d9e7ed0c71e8bf25e7 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 26 Oct 2021 18:54:22 -0400 Subject: [PATCH 26/93] fix fork deletions --- .../ethereum/ethereum/src/blockchain.ts | 17 ++- .../ethereum/ethereum/src/forking/cache.ts | 2 +- .../ethereum/ethereum/src/forking/trie.ts | 124 +++++++++--------- 3 files changed, 79 insertions(+), 64 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/blockchain.ts b/src/chains/ethereum/ethereum/src/blockchain.ts index cbebde4a86..a7e21c6022 100644 --- a/src/chains/ethereum/ethereum/src/blockchain.ts +++ b/src/chains/ethereum/ethereum/src/blockchain.ts @@ -798,8 +798,16 @@ export default class Blockchain extends Emittery.Typed< return (this.#timeAdjustment = timestamp - Date.now()); } - #deleteBlockData = (blocksToDelete: Block[]) => { - return this.#database.batch(() => { + #deleteBlockData = async (blocksToDelete: Block[]) => { + // if we are forking we need to make sure we clean up the forking related + // metadata that isn't stored in the trie + if ("revertMetaData" in this.trie) { + await (this.trie as ForkTrie).revertMetaData( + blocksToDelete[blocksToDelete.length - 1].header.number, + blocksToDelete[0].header.number + ); + } + await this.#database.batch(() => { const { blocks, transactions, transactionReceipts, blockLogs } = this; blocksToDelete.forEach(block => { block.getTransactions().forEach(tx => { @@ -908,7 +916,7 @@ export default class Blockchain extends Emittery.Typed< if (!currentHash.equals(snapshotHash)) { // if we've added blocks since we snapshotted we need to delete them and put // some things back the way they were. - const blockPromises = []; + const blockPromises: Promise[] = []; let blockList = snapshots.blocks; while (blockList !== null) { if (blockList.current.equals(snapshotHash)) break; @@ -917,7 +925,8 @@ export default class Blockchain extends Emittery.Typed< } snapshots.blocks = blockList; - await Promise.all(blockPromises).then(this.#deleteBlockData); + const blockData = await Promise.all(blockPromises); + await this.#deleteBlockData(blockData); setStateRootSync( this.vm.stateManager, diff --git a/src/chains/ethereum/ethereum/src/forking/cache.ts b/src/chains/ethereum/ethereum/src/forking/cache.ts index 51e6a005e1..2ca3da22a9 100644 --- a/src/chains/ethereum/ethereum/src/forking/cache.ts +++ b/src/chains/ethereum/ethereum/src/forking/cache.ts @@ -14,6 +14,6 @@ export class ForkCache extends Cache { */ _lookupAccount = async (address: EJS_Address) => { const rlp = await (this._trie as ForkTrie).get(address.buf); - return Account.fromRlpSerializedAccount(rlp!); + return rlp ? Account.fromRlpSerializedAccount(rlp) : new Account(); }; } diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index 7f7735923b..a7d62c446a 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -2,7 +2,6 @@ import { Address } from "@ganache/ethereum-address"; import { keccak, BUFFER_EMPTY, - RPCQUANTITY_ONE, RPCQUANTITY_EMPTY, Quantity, Data @@ -17,18 +16,25 @@ import * as lexico from "./lexicographic-key-codec"; import { encode } from "@ganache/rlp"; import { Account } from "@ganache/ethereum-utils"; import { KECCAK256_NULL } from "ethereumjs-util"; +type KVP = { key: Buffer; value: Buffer }; +const DELETED_VALUE = Buffer.allocUnsafe(1).fill(1); const GET_CODE = "eth_getCode"; const GET_NONCE = "eth_getTransactionCount"; const GET_BALANCE = "eth_getBalance"; const GET_STORAGE_AT = "eth_getStorageAt"; -const MetadataSingletons = new WeakMap(); const LEVELDOWN_OPTIONS = { keyEncoding: "binary", valueEncoding: "binary" }; +function isEqualKey(encodedKey: Buffer, address: Buffer, key: Buffer) { + const decodedKey = lexico.decode(encodedKey); + const [, keyAddress, deletedKey] = decodedKey; + return keyAddress.equals(address) && deletedKey.equals(key); +} + export class ForkTrie extends GanacheTrie { private accounts: AccountManager; private address: Buffer | null = null; @@ -40,13 +46,7 @@ export class ForkTrie extends GanacheTrie { this.accounts = blockchain.accounts; this.blockNumber = this.blockchain.fallback.blockNumber; - - if (MetadataSingletons.has(db)) { - this.metadata = MetadataSingletons.get(db); - } else { - this.metadata = new CheckpointDB(sub(db, "f", LEVELDOWN_OPTIONS)); - MetadataSingletons.set(db, this.metadata); - } + this.metadata = new CheckpointDB(sub(db, "f", LEVELDOWN_OPTIONS)); } set root(value: Buffer) { @@ -78,6 +78,27 @@ export class ForkTrie extends GanacheTrie { return super.put(key, val); } + /** + * Removes saved metadata from the given block range (inclusive) + * @param startBlockNumber (inclusive) + * @param endBlockNumber (inclusive) + */ + public async revertMetaData( + startBlockNumber: Quantity, + endBlockNumber: Quantity + ) { + const db = this.metadata._leveldb; + const stream = db.createKeyStream({ + gte: lexico.encode([startBlockNumber.toBuffer()]), + lt: lexico.encode([ + Quantity.from(endBlockNumber.toBigInt() + 1n).toBuffer() + ]) + }); + const batch = db.batch(); + for await (const key of stream) batch.del(key); + await batch.write(); + } + private createDelKey(key: Buffer) { const blockNum = this.blockNumber.toBuffer(); return lexico.encode([blockNum, this.address, key]); @@ -95,62 +116,46 @@ export class ForkTrie extends GanacheTrie { // common case. const checkpoints = this.metadata.checkpoints; for (let i = checkpoints.length - 1; i >= 0; i--) { - for (let [data, value] of checkpoints[i].keyValueMap.entries()) { - if (!value || value[0] !== 1) { - continue; - } - - const delKey = lexico.decode(Buffer.from(data, "binary")); - //const blockNumber = delKey[0]; - const address = delKey[1]; - const deletedKey = delKey[2]; - if (address.equals(selfAddress) && deletedKey.equals(key)) { - return true; - } + for (let [encodedKeyStr, value] of checkpoints[i].keyValueMap.entries()) { + if (!value || !value.equals(DELETED_VALUE)) continue; + const encodedKey = Buffer.from(encodedKeyStr, "binary"); + if (isEqualKey(encodedKey, selfAddress, key)) return true; } } - return new Promise((resolve, reject) => { - let wasDeleted = false; - const stream = this.metadata._leveldb - .createReadStream({ - lte: this.createDelKey(key), - reverse: true - }) - .on("data", data => { - const { key, value } = data; - if (!value || value[0] !== 1) { - return; - } - const delKey = lexico.decode(key); - //const blockNumber = delKey[0]; - const address = delKey[1]; - const deletedKey = delKey[2]; - if (address.equals(selfAddress) && deletedKey.equals(key)) { - wasDeleted = true; - (stream as any).destroy(); - } - }) - .on("close", () => resolve(wasDeleted)) - .on("error", reject); + // since we didn't find proof of deletion in a checkpoint let's check the + // database for it. + // We start searching from our database key (blockNum + address + key) + // down to the earliest block we know about. + // TODO(perf): this is just going to be slow once we get lots of keys + // because it just checks every single key we've ever deleted (before this + // one). + const stream = this.metadata._leveldb.createReadStream({ + lte: this.createDelKey(key), + reverse: true }); + for await (const data of stream) { + const { key: encodedKey, value } = (data as unknown) as KVP; + if (!value || !value.equals(DELETED_VALUE)) continue; + if (isEqualKey(encodedKey, selfAddress, key)) return true; + } + + // we didn't find proof of deletion so we return `false` + return false; } async del(key: Buffer) { await this.lock.wait(); - const hash = keccak(key); const delKey = this.createDelKey(key); + const metaDataPutPromise = this.metadata.put(delKey, DELETED_VALUE); - const metaDataPutPromise = this.metadata.put( - delKey, - RPCQUANTITY_ONE.toBuffer() - ); - + const hash = keccak(key); const { node, stack } = await this.findPath(hash); - if (node) await this._deleteNode(hash, stack); + await metaDataPutPromise; + this.lock.signal(); } @@ -236,14 +241,11 @@ export class ForkTrie extends GanacheTrie { async get(key: Buffer): Promise { const value = await super.get(key); - if (value != null) { - return value; - } + if (value != null) return value; + // since we don't have this key in our local trie check if we've have // deleted it (locally) - if (await this.keyWasDeleted(key)) { - return null; - } + if (await this.keyWasDeleted(key)) return null; if (this.address === null) { // if the trie context's address isn't set, our key represents an address: @@ -258,12 +260,16 @@ export class ForkTrie extends GanacheTrie { * Returns a copy of the underlying trie with the interface of ForkTrie. * @param includeCheckpoints - If true and during a checkpoint, the copy will contain the checkpointing metadata and will use the same scratch as underlying db. */ - copy() { - const db = this.db.copy(); + copy(includeCheckpoints: boolean = true) { + const db = this.db.copy() as CheckpointDB; const secureTrie = new ForkTrie(db._leveldb, this.root, this.blockchain); secureTrie.accounts = this.accounts; secureTrie.address = this.address; secureTrie.blockNumber = this.blockNumber; + if (includeCheckpoints && this.isCheckpoint) { + db.checkpoints = [...this.db.checkpoints]; + secureTrie.metadata.checkpoints = this.metadata.checkpoints.slice(0); + } return secureTrie; } } From 8cdeeaa56fa406af5a4fd629b9b67bc21ea828c0 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Wed, 27 Oct 2021 17:47:28 -0400 Subject: [PATCH 27/93] refactor bufferify and add tests --- src/chains/ethereum/ethereum/src/connector.ts | 106 +++-------- .../ethereum/src/helpers/bufferify.ts | 173 ++++++++++++++++++ .../ethereum/ethereum/tests/connector.test.ts | 66 +++++++ .../ethereum/tests/forking/forking.test.ts | 169 ++++++++++++++--- 4 files changed, 406 insertions(+), 108 deletions(-) create mode 100644 src/chains/ethereum/ethereum/src/helpers/bufferify.ts create mode 100644 src/chains/ethereum/ethereum/tests/connector.test.ts diff --git a/src/chains/ethereum/ethereum/src/connector.ts b/src/chains/ethereum/ethereum/src/connector.ts index e65b416fb5..ab42a584e0 100644 --- a/src/chains/ethereum/ethereum/src/connector.ts +++ b/src/chains/ethereum/ethereum/src/connector.ts @@ -21,11 +21,14 @@ import { EthereumProviderOptions, EthereumLegacyProviderOptions } from "@ganache/ethereum-options"; +import { bufferify } from "./helpers/bufferify"; type ProviderOptions = EthereumProviderOptions | EthereumLegacyProviderOptions; export type Provider = EthereumProvider; export const Provider = EthereumProvider; +const BUFFERIFY_THRESHOLD = 100000; + function isHttp( connection: HttpRequest | WebSocket ): connection is HttpRequest { @@ -35,88 +38,6 @@ function isHttp( ); } -function chunkify(val: any, nameOrIndex: string) { - if (Array.isArray(val)) { - const l = val.length; - if (l === 0) { - return Buffer.from("[]"); - } else { - const chunkified = chunkify(val[0], "0"); - // if the value ends up being nothing (undefined), return null - const bufs = [ - Buffer.from("["), - chunkified.length === 0 ? Buffer.from("null") : chunkified - ]; - if (l > 1) { - for (let i = 1; i < l; i++) { - const v = val[i]; - bufs.push(Buffer.from(",")); - const chunkified = chunkify(v, i.toString()); - // if the value ends up being nothing (undefined), return null - bufs.push(chunkified.length === 0 ? Buffer.from("null") : chunkified); - } - } - bufs.push(Buffer.from("]")); - return Buffer.concat(bufs); - } - } else if (Object.prototype.toString.call(val) === "[object Object]") { - if ("toJSON" in val) return chunkify(val.toJSON(nameOrIndex), "") as Buffer; - - const entries = Object.entries(val); - const l = entries.length; - if (l === 0) { - return Buffer.from("{}"); - } else { - const [key, value] = entries[0]; - let i = 0; - let bufs = [Buffer.from("{")]; - - // find the first non-null property to start the object - while (i < l) { - const chunkified = chunkify(value, key); - // if the chunkified value ends up being nothing (undefined) ignore - // the property - if (chunkified.length === 0) { - i++; - continue; - } - - bufs.push( - ...[Buffer.from(JSON.stringify(key)), Buffer.from(":"), chunkified] - ); - break; - } - if (l > 1) { - for (let i = 1; i < l; i++) { - const [key, value] = entries[i]; - const chunkified = chunkify(value, key); - // if the chunkified value ends up being nothing (undefined) ignore - // the property - if (chunkified.length === 0) continue; - - bufs.push( - ...[ - Buffer.from(","), - Buffer.from(JSON.stringify(key)), - Buffer.from(":"), - chunkified - ] - ); - } - } - bufs.push(Buffer.from("}")); - return Buffer.concat(bufs); - } - } else if (val === null) { - return Buffer.from("null"); - } else if (val === undefined) { - // nothing is returned for undefined - return Buffer.allocUnsafe(0); - } else { - return Buffer.from(JSON.stringify(val)); - } -} - export class Connector< R extends JsonRpcRequest< EthereumApi, @@ -137,6 +58,8 @@ export class Connector< this.#provider = new EthereumProvider(providerOptions, executor); } + public BUFFERIFY_THRESHOLD = BUFFERIFY_THRESHOLD; + async connect() { await this.#provider.initialize(); // no need to wait for #provider.once("connect") as the initialize() @@ -191,13 +114,28 @@ export class Connector< if (result instanceof Error) { return makeError(payload.id, result as any); } else { - return makeResponse(payload.id, result); + return this.format(result, payload); } }) ); } else { const json = makeResponse(payload.id, results); - return chunkify(json, ""); + if ( + payload.method === "debug_traceTransaction" && + // for "large" debug_traceTransaction results convert directly to + // a Buffer instead of JSON.stringify so we don't hit V8's maximum + // string length limit of 1GB. We don't do this for everything + // because the bufferfication is so very very slow + // TODO(perf): an even better way of solving this would be to convert + // `debug_traceTransaction` to a generator that yields chunks (of + // Buffer) as soon as they're available. We could then `write` these + // individual chunks immediately. + results.structLogs.length > this.BUFFERIFY_THRESHOLD + ) { + return bufferify(json, ""); + } else { + return JSON.stringify(json); + } } } diff --git a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts new file mode 100644 index 0000000000..11f6ae51a4 --- /dev/null +++ b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts @@ -0,0 +1,173 @@ +const PARTS = Buffer.from('[]{},"":null'); +const SQUARE_BRACKET_PAIR = PARTS.slice(0, 2); +const SQUARE_BRACKET_OPEN = SQUARE_BRACKET_PAIR.slice(0, 1); +const SQUARE_BRACKET_CLOSE = SQUARE_BRACKET_PAIR.slice(1, 2); +const CURLY_BRACKET_PAIR = PARTS.slice(2, 4); +const CURLY_BRACKET_OPEN = CURLY_BRACKET_PAIR.slice(0, 1); +const CURLY_BRACKET_CLOSE = CURLY_BRACKET_PAIR.slice(1, 2); +const COMMA_QUOTE = PARTS.slice(4, 6); +const COMMA = COMMA_QUOTE.slice(0, 1); +const QUOTE_PAIR = PARTS.slice(5, 7); +const QUOTE_COLON = PARTS.slice(6, 8); +const COLON = QUOTE_COLON.slice(1, 2); +const NULL = PARTS.slice(8, 12); +const _EMPTY = PARTS.slice(0, 0); + +const toStr = Object.prototype.toString; +const isObj = (val: any) => toStr.call(val) === "[object Object]"; + +function numberToBuffer(value: number) { + const str = value.toString(); + const l = str.length; + if (l > 0) { + const buf = Buffer.allocUnsafe(l); + (buf as any).utf8Write(str, 0, l); + return buf; + } else { + return _EMPTY; + } +} + +function stringToQuotedBuffer(value: string) { + const length = value.length; + if (length > 0) { + const l = length + 2; + const buf = Buffer.allocUnsafe(l); + buf[0] = 34; // QUOTE + buf[length + 1] = 34; // QUOTE + (buf as any).utf8Write(value, 1, length); + return buf; + } else { + return QUOTE_PAIR; + } +} +function arrayToBuffer(value: any[]) { + const l = value.length; + if (l === 0) { + return SQUARE_BRACKET_PAIR; + } else { + const chunkified = bufferify(value[0], "0"); + const jsonVal = chunkified.length === 0 ? NULL : chunkified; + // if the value ends up being nothing (undefined), return null + const bufs = [SQUARE_BRACKET_OPEN, jsonVal]; + let length = 2 + jsonVal.length; // 2 = `[` and `]` + if (l > 1) { + for (let i = 1; i < l; i++) { + const chunkified = bufferify(value[i], i.toString()); + const chunkLength = chunkified.length; + if (chunkLength === 0) { + // if the value ends up being nothing (undefined), return null + bufs.push(COMMA, NULL); + length += 5; + } else { + bufs.push(COMMA, chunkified); + length += chunkLength + 1; + } + } + } + bufs.push(SQUARE_BRACKET_CLOSE); + return Buffer.concat(bufs, length); + } +} +function bufferToQuotedBuffer(value: Buffer) { + const length = value.length; + const buf = Buffer.allocUnsafe(length + 2); + buf[0] = 34; + value.copy(buf, 1, 0, length); + buf[length + 1] = 34; + return buf; +} + +function objectToBuffer(value: any, nameOrIndex: string) { + if ("toJSON" in value) + return bufferify(value.toJSON(nameOrIndex), nameOrIndex) as Buffer; + + const entries = Object.entries(value); + const l = entries.length; + if (l === 0) { + return CURLY_BRACKET_PAIR; + } else { + let i = 0; + let length = 2; // 2 == `{` and `}` + const bufs = [CURLY_BRACKET_OPEN]; + + // find the first non-null property to start the object + while (i < l) { + const [key, value] = entries[i]; + i++; + const chunkified = bufferify(value, key); + // if the chunkified value ends up being nothing (undefined) ignore + // the property + const chunkLength = chunkified.length; + if (chunkLength === 0) { + continue; + } + + bufs.push(stringToQuotedBuffer(key), COLON, chunkified); + length += key.length + 2 + 1 + chunkLength; + break; + } + if (l > 1) { + for (; i < l; i++) { + const [key, value] = entries[i]; + const chunkified = bufferify(value, key); + // if the chunkified value ends up being nothing (undefined) ignore + // the property + const chunkLength = chunkified.length; + if (chunkLength === 0) continue; + + bufs.push(COMMA, stringToQuotedBuffer(key), COLON, chunkified); + length += 2 + key.length + 2 + chunkLength; + } + } + bufs.push(CURLY_BRACKET_CLOSE); + return Buffer.concat(bufs, length); + } +} + +/** + * Converts a JavaScript value to a JavaScript Object Notation (JSON) Buffer + * (utf-8 encoded). + * + * This is a hack. It: + * * Does not support circular references. + * * Does not support double quotes within Object keys; just stick with ascii + * * Probably doesn't support non-ASCII characters + * * Is only tested on transaction traces + * + * Only useful if the `JSON.stringify`ed version would create a string larger + * than what the JavaScript engine can handle. + * + * What is the maximum string size in Node/V8? It depends on the version! Some + * versions are 256MB, some are ~1GB, and others are ~0.5GB. + * See: https://stackoverflow.com/a/47781288/160173 + * + * CAUTION: This method is approx 3 - 20 times slower than using: + * `Buffer.from(JSON.stringify(value), "utf-8")` + * + * @param value A JavaScript value, usually an object or array, to be converted. + * @param nameOrIndex JSON.stringify calls an object's toJSON method, and this + * property is used by internal recursive calls to bufferify. + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#tojson_behavior + */ +export function bufferify(value: any, nameOrIndex: string) { + const type = typeof value; + if (type === "number" || type === "boolean") { + return numberToBuffer(value); + } else if (type === "string") { + return stringToQuotedBuffer(value); + } else if (Buffer.isBuffer(value)) { + return bufferToQuotedBuffer(value); + } else if (Array.isArray(value)) { + return arrayToBuffer(value); + } else if (isObj(value)) { + return objectToBuffer(value, nameOrIndex); + } else if (value === null) { + return NULL; + } else if (type === "undefined") { + // nothing is returned for undefined + return _EMPTY; + } else { + throw new Error("unsupported value in bufferify"); + } +} diff --git a/src/chains/ethereum/ethereum/tests/connector.test.ts b/src/chains/ethereum/ethereum/tests/connector.test.ts new file mode 100644 index 0000000000..17b34c53c5 --- /dev/null +++ b/src/chains/ethereum/ethereum/tests/connector.test.ts @@ -0,0 +1,66 @@ +import assert from "assert"; +import { Executor, RequestCoordinator } from "@ganache/utils"; +import { Connector } from "../"; + +describe("connector", () => { + const primitives = { + string: "string", + empty: "empty", + one: 1, + zero: 1, + true: true, + false: false, + null: null, + undefined: undefined + }; + const json = { + ...primitives, + // `structLogs` triggers an optimization in the connector + structLogs: [{ ...primitives }, ...Object.values(primitives)], + emptyArray: [], + object: { + ...primitives, + emptyObject: {}, + nested: { ...primitives }, + array: [{ ...primitives }, ...Object.values(primitives)] + }, + emptyObject: {} + }; + let connector: Connector; + // an arbitrary payload + // `debug_traceTransaction` is triggers an optimization in the connector + const payload = { + jsonrpc: "2.0", + method: "debug_traceTransaction", + id: 1, + params: [] // params don't matter + }; + const expected = JSON.parse( + JSON.stringify({ + jsonrpc: payload.jsonrpc, + id: payload.id, + result: json + }) + ); + beforeEach(async () => { + const requestCoordinator = new RequestCoordinator(0); + const executor = new Executor(requestCoordinator); + connector = new Connector({}, executor); + await connector.connect(); + }); + it("formats results as a string as expected", async () => { + const strResult = connector.format(json, payload) as string; + assert.strictEqual(typeof strResult, "string"); + const result = JSON.parse(strResult); + assert.deepStrictEqual(result, expected); + }); + it("formats results as a Buffer as expected", async () => { + // trigger the buffering optimization without having to actually parse + // that much data + connector.BUFFERIFY_THRESHOLD = 1; + const bufResult = connector.format(json, payload) as string; + assert(Buffer.isBuffer(bufResult)); + const result = JSON.parse(bufResult.toString("utf-8")); + assert.deepStrictEqual(result, expected); + }); +}); diff --git a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts index f785e817bf..94b7e294a9 100644 --- a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts @@ -478,9 +478,35 @@ describe("forking", function () { ]); } + function set(provider: EthereumProvider, key: number, value: number) { + const encodedKey = Quantity.from(key) + .toBuffer() + .toString("hex") + .padStart(64, "0"); + const encodedValue = Quantity.from(value) + .toBuffer() + .toString("hex") + .padStart(64, "0"); + + return provider.send("eth_sendTransaction", [ + { + from: remoteAccounts[0], + to: contractAddress, + data: `0x${ + methods[`setValueFor(uint8,uint256)`] + }${encodedKey}${encodedValue}`, + gas: `0x${(3141592).toString(16)}` + } + ]); + } + + async function getBlockNumber(provider: EthereumProvider) { + return parseInt(await provider.send("eth_blockNumber", []), 16); + } + async function getBlockRanges(provider: EthereumProvider) { // our local chain starts at `localBlockNumberStart`. - const blockNum = parseInt(await provider.send("eth_blockNumber", []), 16); + const blockNum = await getBlockNumber(provider); assert.strictEqual( contractBlockNum, 1, @@ -683,32 +709,14 @@ describe("forking", function () { blockNumbersWithoutCode } = await getBlockRanges(localProvider); - function set(key: number, value: number) { - const encodedKey = Quantity.from(key) - .toBuffer() - .toString("hex") - .padStart(64, "0"); - const encodedValue = Quantity.from(value) - .toBuffer() - .toString("hex") - .padStart(64, "0"); - - return localProvider.send("eth_sendTransaction", [ - { - from: remoteAccounts[0], - to: contractAddress, - data: `0x${ - methods[`setValueFor(uint8,uint256)`] - }${encodedKey}${encodedValue}`, - gas: `0x${(3141592).toString(16)}` - } - ]); + function _set(key: number, value: number) { + return set(localProvider, key, value); } const _get = (value: string, blockNum: number) => get(localProvider, value, blockNum); - await setAllValuesTo(localProvider, 9, set); + await setAllValuesTo(localProvider, 9, _set); const postNineBlockNum = parseInt( await localProvider.send("eth_blockNumber", []), @@ -727,7 +735,7 @@ describe("forking", function () { await checkRangeForValue(blockNumsAfterNine, nine, _get); // set all values to 0 (the EVM treats this as a "delete") - await setAllValuesTo(localProvider, 0, set); + await setAllValuesTo(localProvider, 0, _set); const postZeroBlockNum = parseInt( await localProvider.send("eth_blockNumber", []), @@ -750,7 +758,7 @@ describe("forking", function () { await checkRangeForValue(blockNumsAfterZero, zero, _get); // set all values to 11 - await setAllValuesTo(localProvider, 11, set); + await setAllValuesTo(localProvider, 11, _set); const postElevenBlockNum = parseInt( await localProvider.send("eth_blockNumber", []), @@ -778,6 +786,119 @@ describe("forking", function () { "0x000000000000000000000000000000000000000000000000000000000000000b"; await checkRangeForValue(blockNumsAfterEleven, eleven, _get); }); + + describe("snapshot/revert", () => { + async function testPermutations( + localProvider: EthereumProvider, + initialValue: number, + snapshotValues: number[] + ) { + for await (const snapshotValue of snapshotValues) { + // set value0 to {snapshotValue} + await set(localProvider, 0, snapshotValue); + const message = await localProvider.once("message"); + const blockNumber = parseInt(message.data.result.number, 16); + const checkValue = await get(localProvider, "value0", blockNumber); + assert.strictEqual( + Quantity.from(checkValue).toNumber(), + snapshotValue, + `Value after snapshot not as expected. Conditions: ${initialValue}, ${JSON.stringify( + snapshotValues + )}. snapshotValue: ${snapshotValue}` + ); //sanity check + } + } + async function initializeSnapshotSetRevertThenTest( + initialValue: number, + snapshotValues: number[] + ) { + const { localProvider } = await startLocalChain(PORT, { + noCache: true + }); + const subId = await localProvider.send("eth_subscribe", ["newHeads"]); + + // set value0 to {initialValue} (delete it) + await set(localProvider, 0, initialValue); + const message = await localProvider.once("message"); + const initialBlockNumber = parseInt(message.data.result.number, 16); + assert.strictEqual( + Quantity.from( + await get(localProvider, "value0", initialBlockNumber) + ).toNumber(), + initialValue + ); // sanity check + + const snapId = await localProvider.send("evm_snapshot"); + await testPermutations(localProvider, initialValue, snapshotValues); + await localProvider.send("evm_revert", [snapId]); + + assert.strictEqual( + initialBlockNumber, + await getBlockNumber(localProvider) + ); // sanity check + + assert.strictEqual( + Quantity.from( + await get(localProvider, "value0", initialBlockNumber) + ).toNumber(), + initialValue, + "value was not reverted to `initialValue` after evm_revert" + ); + + // Finally, check all permutations outside of the snapshot/revert to + // make sure deleted state was properly reverted + await testPermutations(localProvider, initialValue, snapshotValues); + + await localProvider.send("eth_unsubscribe", [subId]); + } + + const initialValues = [0, 1]; + // test all permutations of values: 0, 1, 2 + const permutations = [ + [0], + [1], + [2], + [0, 1], + [0, 2], + [1, 0], + [1, 2], + [2, 0], + [2, 1], + [0, 1, 2], + [0, 2, 1], + [1, 0, 2], + [1, 2, 0], + [2, 0, 1], + [2, 1, 0] + ]; + for (const remoteInitialValue of initialValues) { + for (const initialValue of initialValues) { + for (const permutation of permutations) { + it(`should revert to previous value after snapshot/{change}/revert, fork value: ${remoteInitialValue}, initialValue, ${initialValue}, permutation: ${JSON.stringify( + permutation + )}`, async () => { + const subId = await remoteProvider.send("eth_subscribe", [ + "newHeads" + ]); + // set the remoteProvider's initialValue to {remoteInitialValue} + await set(remoteProvider, 0, remoteInitialValue); + const message = await remoteProvider.once("message"); + await remoteProvider.send("eth_unsubscribe", [subId]); + const blockNumber = parseInt(message.data.result.number, 16); + assert.strictEqual( + parseInt(await get(remoteProvider, "value0", blockNumber), 16), + remoteInitialValue + ); // sanity check to make sure our initial conditions are correct + + await initializeSnapshotSetRevertThenTest( + initialValue, + permutation + ); + }); + } + } + } + }); }); describe("blocks", () => { From dbbc50041b1b31ee4c87e1c6d73938eca92ce158 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Wed, 27 Oct 2021 20:43:59 -0400 Subject: [PATCH 28/93] fix checkpoint issue --- src/chains/ethereum/ethereum/src/blockchain.ts | 1 + src/chains/ethereum/ethereum/src/forking/trie.ts | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/blockchain.ts b/src/chains/ethereum/ethereum/src/blockchain.ts index a7e21c6022..a8dbf1b871 100644 --- a/src/chains/ethereum/ethereum/src/blockchain.ts +++ b/src/chains/ethereum/ethereum/src/blockchain.ts @@ -1318,6 +1318,7 @@ export default class Blockchain extends Emittery.Typed< // The previous implementation had specific error handling. // It's possible we've removed handling specific cases in this implementation. // e.g., the previous incantation of RuntimeError + await vm.stateManager.checkpoint(); await runTransactions(vm, newBlock.transactions, newBlock); // Just to be safe diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index a7d62c446a..577f622bfb 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -24,6 +24,8 @@ const GET_NONCE = "eth_getTransactionCount"; const GET_BALANCE = "eth_getBalance"; const GET_STORAGE_AT = "eth_getStorageAt"; +const MetadataSingletons = new WeakMap(); + const LEVELDOWN_OPTIONS = { keyEncoding: "binary", valueEncoding: "binary" @@ -46,7 +48,13 @@ export class ForkTrie extends GanacheTrie { this.accounts = blockchain.accounts; this.blockNumber = this.blockchain.fallback.blockNumber; - this.metadata = new CheckpointDB(sub(db, "f", LEVELDOWN_OPTIONS)); + + if (MetadataSingletons.has(db)) { + this.metadata = MetadataSingletons.get(db); + } else { + this.metadata = new CheckpointDB(sub(db, "f", LEVELDOWN_OPTIONS)); + MetadataSingletons.set(db, this.metadata); + } } set root(value: Buffer) { From 358ee62ec2907239ea29accb02458c99acb4674d Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Thu, 28 Oct 2021 18:42:05 -0400 Subject: [PATCH 29/93] stream results from bufferify as they are parsed --- .../ethereum/ethereum/src/blockchain.ts | 6 +- src/chains/ethereum/ethereum/src/connector.ts | 31 ++++- .../ethereum/src/helpers/bufferify.ts | 125 +++++++++++------- .../ethereum/ethereum/tests/connector.test.ts | 16 ++- src/packages/core/src/servers/http-server.ts | 25 +++- src/packages/core/src/servers/ws-server.ts | 16 ++- src/packages/utils/src/types/connector.ts | 5 +- 7 files changed, 152 insertions(+), 72 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/blockchain.ts b/src/chains/ethereum/ethereum/src/blockchain.ts index a8dbf1b871..a5df2320cb 100644 --- a/src/chains/ethereum/ethereum/src/blockchain.ts +++ b/src/chains/ethereum/ethereum/src/blockchain.ts @@ -1319,7 +1319,11 @@ export default class Blockchain extends Emittery.Typed< // It's possible we've removed handling specific cases in this implementation. // e.g., the previous incantation of RuntimeError await vm.stateManager.checkpoint(); - await runTransactions(vm, newBlock.transactions, newBlock); + try { + await runTransactions(vm, newBlock.transactions, newBlock); + } finally { + await vm.stateManager.revert(); + } // Just to be safe removeListeners(); diff --git a/src/chains/ethereum/ethereum/src/connector.ts b/src/chains/ethereum/ethereum/src/connector.ts index ab42a584e0..74ae4f213d 100644 --- a/src/chains/ethereum/ethereum/src/connector.ts +++ b/src/chains/ethereum/ethereum/src/connector.ts @@ -104,9 +104,15 @@ export class Connector< return this.#provider._requestRaw({ method, params }); }; - format(result: any, payload: R): RecognizedString; + format( + result: any, + payload: R + ): RecognizedString | Generator; format(results: any[], payloads: R[]): RecognizedString; - format(results: any | any[], payload: R | R[]): RecognizedString { + format( + results: any | any[], + payload: R | R[] + ): RecognizedString | Generator { if (Array.isArray(payload)) { return JSON.stringify( payload.map((payload, i) => { @@ -122,14 +128,25 @@ export class Connector< const json = makeResponse(payload.id, results); if ( payload.method === "debug_traceTransaction" && - // for "large" debug_traceTransaction results convert directly to - // a Buffer instead of JSON.stringify so we don't hit V8's maximum - // string length limit of 1GB. We don't do this for everything - // because the bufferfication is so very very slow + // for "large" debug_traceTransaction results we convert to individual + // parts of the response to Buffers, yielded via a Generator function, + // instead of using JSON.stringify. This is necessary because we: + // * avoid V8's maximum string length limit of 1GB + // * avoid and the max Buffer length limit of ~2GB (on 64bit + // architectures). + // * avoid heap allocation failures due to trying to hold too much + // data in memory (which can happen if we don't immediately consume + // the `format` result -- by buffering everything into one array, + // for example) + // + // We don't do this for everything because the bufferfication is so very + // very slow. + // // TODO(perf): an even better way of solving this would be to convert // `debug_traceTransaction` to a generator that yields chunks (of // Buffer) as soon as they're available. We could then `write` these - // individual chunks immediately. + // individual chunks immediately and our memory use would stay + // relatively low and constant. results.structLogs.length > this.BUFFERIFY_THRESHOLD ) { return bufferify(json, ""); diff --git a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts index 11f6ae51a4..7c5ad29fb1 100644 --- a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts +++ b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts @@ -41,32 +41,42 @@ function stringToQuotedBuffer(value: string) { return QUOTE_PAIR; } } -function arrayToBuffer(value: any[]) { +function* arrayToBuffer(value: any[]) { const l = value.length; if (l === 0) { - return SQUARE_BRACKET_PAIR; + yield SQUARE_BRACKET_PAIR; + return; } else { - const chunkified = bufferify(value[0], "0"); - const jsonVal = chunkified.length === 0 ? NULL : chunkified; - // if the value ends up being nothing (undefined), return null - const bufs = [SQUARE_BRACKET_OPEN, jsonVal]; - let length = 2 + jsonVal.length; // 2 = `[` and `]` + let yieldPrefix = true; + for (const chunkified of bufferify(value[0], "0")) { + // if the value ends up being nothing (undefined), return null + const jsonVal = chunkified.length === 0 ? NULL : chunkified; + if (yieldPrefix) { + yield SQUARE_BRACKET_OPEN; + yieldPrefix = false; + } + yield jsonVal; + } if (l > 1) { for (let i = 1; i < l; i++) { - const chunkified = bufferify(value[i], i.toString()); - const chunkLength = chunkified.length; - if (chunkLength === 0) { - // if the value ends up being nothing (undefined), return null - bufs.push(COMMA, NULL); - length += 5; - } else { - bufs.push(COMMA, chunkified); - length += chunkLength + 1; + let yieldPrefix = true; + for (const chunkified of bufferify(value[i], i.toString())) { + const chunkLength = chunkified.length; + if (yieldPrefix) { + yield COMMA; + yieldPrefix = false; + } + if (chunkLength === 0) { + // if the value ends up being nothing (undefined), return null + yield NULL; + } else { + yield chunkified; + } } } } - bufs.push(SQUARE_BRACKET_CLOSE); - return Buffer.concat(bufs, length); + yield SQUARE_BRACKET_CLOSE; + return; } } function bufferToQuotedBuffer(value: Buffer) { @@ -78,50 +88,63 @@ function bufferToQuotedBuffer(value: Buffer) { return buf; } -function objectToBuffer(value: any, nameOrIndex: string) { - if ("toJSON" in value) - return bufferify(value.toJSON(nameOrIndex), nameOrIndex) as Buffer; +function* objectToBuffer(value: any, nameOrIndex: string) { + if ("toJSON" in value) { + yield* bufferify(value.toJSON(nameOrIndex), nameOrIndex); + return; + } const entries = Object.entries(value); const l = entries.length; if (l === 0) { - return CURLY_BRACKET_PAIR; + yield CURLY_BRACKET_PAIR; + return; } else { let i = 0; - let length = 2; // 2 == `{` and `}` - const bufs = [CURLY_BRACKET_OPEN]; + yield CURLY_BRACKET_OPEN; // find the first non-null property to start the object while (i < l) { const [key, value] = entries[i]; i++; - const chunkified = bufferify(value, key); - // if the chunkified value ends up being nothing (undefined) ignore - // the property - const chunkLength = chunkified.length; - if (chunkLength === 0) { - continue; - } - bufs.push(stringToQuotedBuffer(key), COLON, chunkified); - length += key.length + 2 + 1 + chunkLength; + let yieldPrefix = true; + for (const chunkified of bufferify(value, key)) { + // if the chunkified value ends up being nothing (undefined) ignore + // the property + const chunkLength = chunkified.length; + if (chunkLength === 0) { + continue; + } + + if (yieldPrefix) { + yield Buffer.concat([stringToQuotedBuffer(key), COLON]); + yieldPrefix = null; + } + yield chunkified; + } break; } if (l > 1) { for (; i < l; i++) { const [key, value] = entries[i]; - const chunkified = bufferify(value, key); - // if the chunkified value ends up being nothing (undefined) ignore - // the property - const chunkLength = chunkified.length; - if (chunkLength === 0) continue; + let yieldPrefix = true; + for (const chunkified of bufferify(value, key)) { + // if the chunkified value ends up being nothing (undefined) ignore + // the property + const chunkLength = chunkified.length; + if (chunkLength === 0) continue; - bufs.push(COMMA, stringToQuotedBuffer(key), COLON, chunkified); - length += 2 + key.length + 2 + chunkLength; + if (yieldPrefix) { + yield Buffer.concat([COMMA, stringToQuotedBuffer(key), COLON]); + yieldPrefix = false; + } + yield chunkified; + } } } - bufs.push(CURLY_BRACKET_CLOSE); - return Buffer.concat(bufs, length); + yield CURLY_BRACKET_CLOSE; + return; } } @@ -150,23 +173,25 @@ function objectToBuffer(value: any, nameOrIndex: string) { * property is used by internal recursive calls to bufferify. * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#tojson_behavior */ -export function bufferify(value: any, nameOrIndex: string) { +export function* bufferify(value: any, nameOrIndex: string): Generator { const type = typeof value; if (type === "number" || type === "boolean") { - return numberToBuffer(value); + yield numberToBuffer(value); } else if (type === "string") { - return stringToQuotedBuffer(value); + yield stringToQuotedBuffer(value); } else if (Buffer.isBuffer(value)) { - return bufferToQuotedBuffer(value); + yield bufferToQuotedBuffer(value); } else if (Array.isArray(value)) { - return arrayToBuffer(value); + yield* arrayToBuffer(value); } else if (isObj(value)) { - return objectToBuffer(value, nameOrIndex); + yield* objectToBuffer(value, nameOrIndex); } else if (value === null) { - return NULL; + yield NULL; } else if (type === "undefined") { // nothing is returned for undefined - return _EMPTY; + yield _EMPTY; + } else if ("toJSON" in value && typeof value.toJSON === "function") { + yield* bufferify(value.toJSON(), nameOrIndex); } else { throw new Error("unsupported value in bufferify"); } diff --git a/src/chains/ethereum/ethereum/tests/connector.test.ts b/src/chains/ethereum/ethereum/tests/connector.test.ts index 17b34c53c5..1e41e58ef0 100644 --- a/src/chains/ethereum/ethereum/tests/connector.test.ts +++ b/src/chains/ethereum/ethereum/tests/connector.test.ts @@ -55,12 +55,20 @@ describe("connector", () => { assert.deepStrictEqual(result, expected); }); it("formats results as a Buffer as expected", async () => { + function isGeneratorIterator(arg) { + return arg.constructor === function* () {}.prototype.constructor; + } // trigger the buffering optimization without having to actually parse - // that much data + // the amount of data it usually takes connector.BUFFERIFY_THRESHOLD = 1; - const bufResult = connector.format(json, payload) as string; - assert(Buffer.isBuffer(bufResult)); - const result = JSON.parse(bufResult.toString("utf-8")); + + const bufResult = connector.format(json, payload); + assert(isGeneratorIterator(bufResult)); + let str = ""; + for (const datum of bufResult as any) { + str += datum.toString("utf-8"); + } + const result = JSON.parse(str); assert.deepStrictEqual(result, expected); }); }); diff --git a/src/packages/core/src/servers/http-server.ts b/src/packages/core/src/servers/http-server.ts index f34bef4664..3448d67ba1 100644 --- a/src/packages/core/src/servers/http-server.ts +++ b/src/packages/core/src/servers/http-server.ts @@ -180,13 +180,24 @@ export default class HttpServer { return; } const data = connector.format(result, payload); - sendResponse( - response, - HttpResponseCodes.OK, - ContentTypes.JSON, - data, - writeHeaders - ); + if (typeof data.next === "function") { + response.cork(() => { + response.writeStatus(HttpResponseCodes.OK); + writeHeaders(response); + response.writeHeader("Content-Type", ContentTypes.JSON); + + for (const datum of data) response.write(datum); + response.end(); + }); + } else { + sendResponse( + response, + HttpResponseCodes.OK, + ContentTypes.JSON, + data, + writeHeaders + ); + } }) .catch(error => { if (aborted) { diff --git a/src/packages/core/src/servers/ws-server.ts b/src/packages/core/src/servers/ws-server.ts index be99378f20..a7d32efd6f 100644 --- a/src/packages/core/src/servers/ws-server.ts +++ b/src/packages/core/src/servers/ws-server.ts @@ -76,7 +76,7 @@ export default class WebsocketServer { return; } - let response: RecognizedString; + let response: RecognizedString | Generator; try { const { value } = await connector.handle(payload, ws); @@ -120,7 +120,19 @@ export default class WebsocketServer { response = connector.formatError(err, payload); } - ws.send(response, useBinary); + if ( + typeof response === "object" && + Symbol.iterator in response && + typeof response[Symbol.iterator] === "function" + ) { + ws.cork(() => { + // as any because typescript STILL thinks response doesn't have a + // `Symbol.iterator` function. + for (const chunk of response as any) ws.send(chunk, useBinary); + }); + } else { + ws.send(response as RecognizedString, useBinary); + } }, drain: (ws: WebSocket) => { diff --git a/src/packages/utils/src/types/connector.ts b/src/packages/utils/src/types/connector.ts index cee85e97d0..48d0e079e6 100644 --- a/src/packages/utils/src/types/connector.ts +++ b/src/packages/utils/src/types/connector.ts @@ -58,7 +58,10 @@ export interface Connector< * @param response * @param payload */ - format(result: ResponseFormat, payload: RequestFormat): RecognizedString; + format( + result: ResponseFormat, + payload: RequestFormat + ): RecognizedString | Generator; /** * Formats the error response From 66a885f96efc4e98fbf889793c59c24aef234caa Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 29 Oct 2021 15:53:12 -0400 Subject: [PATCH 30/93] fix batch --- src/chains/ethereum/ethereum/src/connector.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/connector.ts b/src/chains/ethereum/ethereum/src/connector.ts index 74ae4f213d..02a06df910 100644 --- a/src/chains/ethereum/ethereum/src/connector.ts +++ b/src/chains/ethereum/ethereum/src/connector.ts @@ -120,7 +120,7 @@ export class Connector< if (result instanceof Error) { return makeError(payload.id, result as any); } else { - return this.format(result, payload); + return makeResponse(result, payload); } }) ); From 53b011f9784a15e36ab6cda9e8facd1c6247a2ed Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 29 Oct 2021 15:54:55 -0400 Subject: [PATCH 31/93] remove unused variable --- src/chains/ethereum/ethereum/src/forking/fork.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/fork.ts b/src/chains/ethereum/ethereum/src/forking/fork.ts index 4304ed942e..58369f6f4d 100644 --- a/src/chains/ethereum/ethereum/src/forking/fork.ts +++ b/src/chains/ethereum/ethereum/src/forking/fork.ts @@ -13,8 +13,6 @@ import BlockManager from "../data-managers/block-manager"; import { ProviderHandler } from "./handlers/provider-handler"; import { PersistentCache } from "./persistent-cache/persistent-cache"; -const CONFIRMATIONS = 5n; - async function fetchChainId(fork: Fork) { const chainIdHex = await fork.request("eth_chainId", []); return parseInt(chainIdHex, 16); From 46b2139f9ca65a63a1b2c7539ec55a2c88dc6f1b Mon Sep 17 00:00:00 2001 From: David Murdoch <187813+davidmurdoch@users.noreply.github.com> Date: Fri, 29 Oct 2021 15:58:15 -0400 Subject: [PATCH 32/93] delete test.js --- src/chains/ethereum/ethereum/test.js | 57 ---------------------------- 1 file changed, 57 deletions(-) delete mode 100644 src/chains/ethereum/ethereum/test.js diff --git a/src/chains/ethereum/ethereum/test.js b/src/chains/ethereum/ethereum/test.js deleted file mode 100644 index f7f86e8d35..0000000000 --- a/src/chains/ethereum/ethereum/test.js +++ /dev/null @@ -1,57 +0,0 @@ -const { RequestCoordinator, Executor } = require("@ganache/utils"); -const EthereumProvider = require("./lib/src/provider").default; -const seedrandom = require("seedrandom"); - -const mnemonic = - "into trim cross then helmet popular suit hammer cart shrug oval student"; - -const getProvider = async ( - options = { - wallet: { mnemonic: mnemonic } - } -) => { - options.chain = options.chain || {}; - options.logging = options.logging || { logger: { log: () => {} } }; - - // set `asyncRequestProcessing` to `true` by default - let doAsync = options.chain.asyncRequestProcessing; - doAsync = options.chain.asyncRequestProcessing = - doAsync != null ? doAsync : true; - - // don't write to stdout in tests - if (!options.logging.logger) { - options.logging.logger = { log: () => {} }; - } - - const requestCoordinator = new RequestCoordinator(doAsync ? 0 : 1); - const executor = new Executor(requestCoordinator); - const provider = new EthereumProvider(options, executor); - await provider.initialize(); - requestCoordinator.resume(); - return provider; -}; - -const rand = seedrandom("seed"); -function randomIntFromInterval(min, max) { - // min and max included - return Math.floor(rand() * (max - min + 1) + min); -} -(async () => { - const provider = await getProvider({ - wallet: { mnemonic }, - fork: { - url: - "https://mainnet.infura.io/v3/0e96090b2eb34ea293a23feec9594e20@13291115" - } - }); - const a = await provider.send("eth_accounts"); - - for (let j = 0; j < 60; j++) { - let address = "0x"; - for (let i = 0; i < 20; i++) { - address += randomIntFromInterval(0, 255).toString(16).padStart(2, "0"); - } - console.log(address, await provider.send("eth_getBalance", [address])); - } - //console.log(a); -})(); From 87f5d426d735f112bf32a287e590717a2cbaa909 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 29 Oct 2021 21:54:11 -0400 Subject: [PATCH 33/93] remove chunk streaming from ws-server --- src/chains/ethereum/ethereum/src/connector.ts | 14 +++++++++++--- .../ethereum/ethereum/tests/connector.test.ts | 8 ++++++-- src/chains/filecoin/filecoin/src/connector.ts | 6 +++++- src/packages/core/src/servers/ws-server.ts | 18 +++--------------- src/packages/utils/src/types/connector.ts | 8 +++++++- 5 files changed, 32 insertions(+), 22 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/connector.ts b/src/chains/ethereum/ethereum/src/connector.ts index 02a06df910..aa45007cd5 100644 --- a/src/chains/ethereum/ethereum/src/connector.ts +++ b/src/chains/ethereum/ethereum/src/connector.ts @@ -106,12 +106,19 @@ export class Connector< format( result: any, - payload: R + payload: R, + connection: HttpRequest ): RecognizedString | Generator; - format(results: any[], payloads: R[]): RecognizedString; + format(result: any, payload: R, connection: WebSocket): RecognizedString; + format( + results: any[], + payloads: R[], + connection: HttpRequest | WebSocket + ): RecognizedString; format( results: any | any[], - payload: R | R[] + payload: R | R[], + connection: HttpRequest | WebSocket ): RecognizedString | Generator { if (Array.isArray(payload)) { return JSON.stringify( @@ -127,6 +134,7 @@ export class Connector< } else { const json = makeResponse(payload.id, results); if ( + isHttp(connection) && payload.method === "debug_traceTransaction" && // for "large" debug_traceTransaction results we convert to individual // parts of the response to Buffers, yielded via a Generator function, diff --git a/src/chains/ethereum/ethereum/tests/connector.test.ts b/src/chains/ethereum/ethereum/tests/connector.test.ts index 1e41e58ef0..0354578aca 100644 --- a/src/chains/ethereum/ethereum/tests/connector.test.ts +++ b/src/chains/ethereum/ethereum/tests/connector.test.ts @@ -49,7 +49,9 @@ describe("connector", () => { await connector.connect(); }); it("formats results as a string as expected", async () => { - const strResult = connector.format(json, payload) as string; + const strResult = connector.format(json, payload, { + constructor: { name: "HttpRequest" } + } as any) as string; assert.strictEqual(typeof strResult, "string"); const result = JSON.parse(strResult); assert.deepStrictEqual(result, expected); @@ -62,7 +64,9 @@ describe("connector", () => { // the amount of data it usually takes connector.BUFFERIFY_THRESHOLD = 1; - const bufResult = connector.format(json, payload); + const bufResult = connector.format(json, payload, { + constructor: { name: "HttpRequest" } + } as any); assert(isGeneratorIterator(bufResult)); let str = ""; for (const datum of bufResult as any) { diff --git a/src/chains/filecoin/filecoin/src/connector.ts b/src/chains/filecoin/filecoin/src/connector.ts index 70d9b3dde7..0571b8b385 100644 --- a/src/chains/filecoin/filecoin/src/connector.ts +++ b/src/chains/filecoin/filecoin/src/connector.ts @@ -59,7 +59,11 @@ export class Connector< return this.#provider._requestRaw(payload); } - format(result: any, payload: R): RecognizedString { + format( + result: any, + payload: R, + _connection: HttpRequest | WebSocket + ): RecognizedString { const json = makeResponse(payload.id, result); return JSON.stringify(json); } diff --git a/src/packages/core/src/servers/ws-server.ts b/src/packages/core/src/servers/ws-server.ts index a7d32efd6f..64c5b80799 100644 --- a/src/packages/core/src/servers/ws-server.ts +++ b/src/packages/core/src/servers/ws-server.ts @@ -76,7 +76,7 @@ export default class WebsocketServer { return; } - let response: RecognizedString | Generator; + let response: RecognizedString; try { const { value } = await connector.handle(payload, ws); @@ -89,7 +89,7 @@ export default class WebsocketServer { const result = await resultEmitter; if (ws.closed) return; - response = connector.format(result, payload); + response = connector.format(result, payload, ws); // if the result is an emitter listen to its `"message"` event // We check if `on` is a function rather than check if @@ -120,19 +120,7 @@ export default class WebsocketServer { response = connector.formatError(err, payload); } - if ( - typeof response === "object" && - Symbol.iterator in response && - typeof response[Symbol.iterator] === "function" - ) { - ws.cork(() => { - // as any because typescript STILL thinks response doesn't have a - // `Symbol.iterator` function. - for (const chunk of response as any) ws.send(chunk, useBinary); - }); - } else { - ws.send(response as RecognizedString, useBinary); - } + ws.send(response as RecognizedString, useBinary); }, drain: (ws: WebSocket) => { diff --git a/src/packages/utils/src/types/connector.ts b/src/packages/utils/src/types/connector.ts index 48d0e079e6..d5b9b6cfcb 100644 --- a/src/packages/utils/src/types/connector.ts +++ b/src/packages/utils/src/types/connector.ts @@ -60,8 +60,14 @@ export interface Connector< */ format( result: ResponseFormat, - payload: RequestFormat + payload: RequestFormat, + connection: HttpRequest ): RecognizedString | Generator; + format( + result: ResponseFormat, + payload: RequestFormat, + connection: WebSocket + ): RecognizedString; /** * Formats the error response From d300bbca2e19f1ffbca9ef0110e4d4295700c8b8 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Mon, 1 Nov 2021 15:09:35 -0400 Subject: [PATCH 34/93] track metadata dbs, not checkpoints! --- .../ethereum/ethereum/src/forking/trie.ts | 9 +++--- .../ethereum/tests/forking/forking.test.ts | 30 +++++++++++++++---- 2 files changed, 30 insertions(+), 9 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index 577f622bfb..065fd3f50d 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -24,7 +24,7 @@ const GET_NONCE = "eth_getTransactionCount"; const GET_BALANCE = "eth_getBalance"; const GET_STORAGE_AT = "eth_getStorageAt"; -const MetadataSingletons = new WeakMap(); +const MetadataSingletons = new WeakMap(); const LEVELDOWN_OPTIONS = { keyEncoding: "binary", @@ -50,10 +50,11 @@ export class ForkTrie extends GanacheTrie { this.blockNumber = this.blockchain.fallback.blockNumber; if (MetadataSingletons.has(db)) { - this.metadata = MetadataSingletons.get(db); + this.metadata = new CheckpointDB(MetadataSingletons.get(db)); } else { - this.metadata = new CheckpointDB(sub(db, "f", LEVELDOWN_OPTIONS)); - MetadataSingletons.set(db, this.metadata); + const metadataDb = sub(db, "f", LEVELDOWN_OPTIONS); + MetadataSingletons.set(db, metadataDb); + this.metadata = new CheckpointDB(metadataDb); } } diff --git a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts index 94b7e294a9..a382dc70d7 100644 --- a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts @@ -531,19 +531,39 @@ describe("forking", function () { return Promise.all( blockNumsWithCode.map(async blockNum => { const value0 = await get("value0", blockNum); - assert.strictEqual(parseInt(value0, 16), 0); + assert.strictEqual( + parseInt(value0, 16), + 0, + `check failed at value0 block ${blockNum}` + ); const value1 = await get("value1", blockNum); - assert.strictEqual(parseInt(value1, 16), 2); + assert.strictEqual( + parseInt(value1, 16), + 2, + `check failed at value1 block ${blockNum}` + ); const value2 = await get("value2", blockNum); - assert.strictEqual(parseInt(value2, 16), 1); + assert.strictEqual( + parseInt(value2, 16), + 1, + `check failed at value2 block ${blockNum}` + ); const value3 = await get("value3", blockNum); - assert.strictEqual(parseInt(value3, 16), 0); + assert.strictEqual( + parseInt(value3, 16), + 0, + `check failed at value3 block ${blockNum}` + ); const value4 = await get("value4", blockNum); - assert.strictEqual(parseInt(value4, 16), 1); + assert.strictEqual( + parseInt(value4, 16), + 1, + `check failed at value4 block ${blockNum}` + ); }) ); } From bcb3e295293be21bcc3370817e5247c2cf6cb570 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Mon, 1 Nov 2021 16:48:36 -0400 Subject: [PATCH 35/93] remove this test as the transaction traced for `debug_storageRangeAt` isn't actually run --- .../ethereum/ethereum/tests/provider.test.ts | 43 ------------------- 1 file changed, 43 deletions(-) diff --git a/src/chains/ethereum/ethereum/tests/provider.test.ts b/src/chains/ethereum/ethereum/tests/provider.test.ts index f9559cdbd2..f88cb0ad91 100644 --- a/src/chains/ethereum/ethereum/tests/provider.test.ts +++ b/src/chains/ethereum/ethereum/tests/provider.test.ts @@ -178,49 +178,6 @@ describe("provider", () => { await provider.send("debug_traceTransaction", [hash]); }, controlEvents); }); - it("emits vm:tx:* events for debug_storageRangeAt", async () => { - // README - // This test is slightly different, as we actually send a transaction to the - // contract, and then measure those events, instead of the deployment - // transaction itself. - - const { - contractAddress - } = await provider.send("eth_getTransactionReceipt", [deploymentHash]); - const initialValue = "0".repeat(62) + "19"; // 25 - // call the setValue method so we have some stuff to trace at the - // deployed contract - let receipt: any; - const controlEvents = await testEvents(async () => { - const subId = await provider.send("eth_subscribe", ["newHeads"]); - const hash = await provider.send("eth_sendTransaction", [ - { - from, - to: contractAddress, - gas: "0x2fefd8", - data: `0x${contract.contract.evm.methodIdentifiers["setValue(uint256)"]}${initialValue}` - } - ]); - await provider.once("message"); - await provider.send("eth_unsubscribe", [subId]); - receipt = await provider.send("eth_getTransactionReceipt", [hash]); - }); - assert(controlEvents.length > 2); - - await testEvents(async () => { - try { - await provider.send("debug_storageRangeAt", [ - receipt.blockHash, - 0, - contractAddress, - "0x00", - 2 - ]); - } catch (e) { - throw e; - } - }, controlEvents); - }); }); it("returns things via EIP-1193", async () => { From 764682d620bb48337d84c945cee48aabef764f98 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Mon, 1 Nov 2021 16:49:54 -0400 Subject: [PATCH 36/93] fix traceTransaction and debug_traceTransaction events --- .../ethereum/ethereum/src/blockchain.ts | 106 ++++++++---------- .../ethereum/src/helpers/run-transactions.ts | 25 ----- 2 files changed, 47 insertions(+), 84 deletions(-) delete mode 100644 src/chains/ethereum/ethereum/src/helpers/run-transactions.ts diff --git a/src/chains/ethereum/ethereum/src/blockchain.ts b/src/chains/ethereum/ethereum/src/blockchain.ts index a5df2320cb..c55f7c8014 100644 --- a/src/chains/ethereum/ethereum/src/blockchain.ts +++ b/src/chains/ethereum/ethereum/src/blockchain.ts @@ -1100,6 +1100,7 @@ export default class Blockchain extends Emittery.Typed< } #traceTransaction = async ( + transaction: VmTransaction, trie: GanacheTrie, newBlock: RuntimeBlock & { transactions: VmTransaction[] }, options: TransactionTraceOptions, @@ -1132,7 +1133,6 @@ export default class Blockchain extends Emittery.Typed< }); const storage: StorageRecords = {}; - const transaction = newBlock.transactions[newBlock.transactions.length - 1]; // TODO: gas could go theoretically go over Number.MAX_SAFE_INTEGER. // (Ganache v2 didn't handle this possibility either, so it hasn't been @@ -1254,53 +1254,6 @@ export default class Blockchain extends Emittery.Typed< } }; - const afterTxListener = () => { - vm.removeListener("step", stepListener); - vm.removeListener("afterTransaction", afterTxListener); - this.emit("ganache:vm:tx:after", { - context: transactionEventContext - }); - }; - - const beforeTxListener = async (tx: VmTransaction) => { - if (tx === transaction) { - this.emit("ganache:vm:tx:before", { - context: transactionEventContext - }); - vm.on("step", stepListener); - vm.on("afterTx", afterTxListener); - if (keys && contractAddress) { - const database = this.#database; - return Promise.all( - keys.map(async key => { - // get the raw key using the hashed key - let rawKey = await database.storageKeys.get(key); - - const result = await vm.stateManager.getContractStorage( - { buf: Address.from(contractAddress).toBuffer() } as any, - rawKey - ); - - storage[Data.from(key, key.length).toString()] = { - key: Data.from(rawKey, rawKey.length), - value: Data.from(result, 32) - }; - }) - ); - } - } - }; - - const removeListeners = () => { - vm.removeListener("step", stepListener); - vm.removeListener("beforeTx", beforeTxListener); - vm.removeListener("afterTx", afterTxListener); - }; - - // Listen to beforeTx so we know when our target transaction - // is processing. This event will add the event listener for getting the trace data. - vm.on("beforeTx", beforeTxListener); - // Don't even let the vm try to flush the block's _cache to the stateTrie. // When forking some of the data that the traced function may request will // exist only on the main chain. Because we pretty much lie to the VM by @@ -1320,14 +1273,49 @@ export default class Blockchain extends Emittery.Typed< // e.g., the previous incantation of RuntimeError await vm.stateManager.checkpoint(); try { - await runTransactions(vm, newBlock.transactions, newBlock); + for (let i = 0, l = newBlock.transactions.length; i < l; i++) { + const tx = newBlock.transactions[i] as any; + if (tx === transaction) { + if (keys && contractAddress) { + const database = this.#database; + const ejsContractAddress = { buf: contractAddress } as any; + await Promise.all( + keys.map(async key => { + // get the raw key using the hashed key + const rawKey = await database.storageKeys.get(key); + + const result = await vm.stateManager.getContractStorage( + ejsContractAddress, + rawKey + ); + + storage[Data.from(key, key.length).toString()] = { + key: Data.from(rawKey, rawKey.length), + value: Data.from(result, 32) + }; + }) + ); + break; + } else { + vm.on("step", stepListener); + // force the loop to break after running this transaction by setting + // the current iteration past the end + i = l; + } + } + this.emit("ganache:vm:tx:before", { + context: transactionEventContext + }); + await vm.runTx({ tx, block: newBlock as any }); + this.emit("ganache:vm:tx:after", { + context: transactionEventContext + }); + } + vm.removeListener("step", stepListener); } finally { await vm.stateManager.revert(); } - // Just to be safe - removeListeners(); - // send state results back return { gas, @@ -1418,12 +1406,6 @@ export default class Blockchain extends Emittery.Typed< transactionHashBuffer ); - // only copy relevant transactions - newBlock.transactions = newBlock.transactions.slice( - 0, - 1 + transaction.index.toNumber() - ); - // #2 - Set state root of original block // // TODO: Forking needs the forked block number passed during this step: @@ -1441,7 +1423,12 @@ export default class Blockchain extends Emittery.Typed< structLogs, returnValue, storage - } = await this.#traceTransaction(trie, newBlock, options); + } = await this.#traceTransaction( + newBlock.transactions[transaction.index.toNumber()], + trie, + newBlock, + options + ); // #4 - Send results back return { gas, structLogs, returnValue, storage }; @@ -1570,6 +1557,7 @@ export default class Blockchain extends Emittery.Typed< }; const { storage } = await this.#traceTransaction( + newBlock.transactions[transaction.index.toNumber()], trie, newBlock, options, diff --git a/src/chains/ethereum/ethereum/src/helpers/run-transactions.ts b/src/chains/ethereum/ethereum/src/helpers/run-transactions.ts deleted file mode 100644 index 16250d9b2e..0000000000 --- a/src/chains/ethereum/ethereum/src/helpers/run-transactions.ts +++ /dev/null @@ -1,25 +0,0 @@ -import VM from "@ethereumjs/vm"; -import { RuntimeBlock } from "@ganache/ethereum-block"; -import { VmTransaction } from "@ganache/ethereum-transaction"; - -/** - * Runs the given transactions, unchecked, through the VM with the given block. - * - * The method does not create a `checkpoint` or `commit`/`revert`. - * - * @param vm - * @param transactions - * @param block - */ -export async function runTransactions( - vm: VM, - transactions: VmTransaction[], - block: RuntimeBlock -) { - for (let i = 0, l = transactions.length; i < l; i++) { - await vm.runTx({ - tx: transactions[i] as any, - block: block as any - }); - } -} From 52973dc0ebd573306364d8d8ed139f2322120cf2 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Mon, 1 Nov 2021 17:05:24 -0400 Subject: [PATCH 37/93] delete old file import --- src/chains/ethereum/ethereum/src/blockchain.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/blockchain.ts b/src/chains/ethereum/ethereum/src/blockchain.ts index c55f7c8014..2bc41414c9 100644 --- a/src/chains/ethereum/ethereum/src/blockchain.ts +++ b/src/chains/ethereum/ethereum/src/blockchain.ts @@ -53,7 +53,6 @@ import { TypedTransaction } from "@ganache/ethereum-transaction"; import { Block, RuntimeBlock, Snapshots } from "@ganache/ethereum-block"; -import { runTransactions } from "./helpers/run-transactions"; import { SimulationTransaction } from "./helpers/run-call"; import { ForkStateManager } from "./forking/state-manager"; import { From 9da11d387f27c1a55a201574f45e94ac20851a22 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Mon, 1 Nov 2021 17:33:53 -0400 Subject: [PATCH 38/93] fix typo and omission --- src/chains/ethereum/ethereum/src/connector.ts | 2 +- src/packages/core/src/servers/http-server.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/connector.ts b/src/chains/ethereum/ethereum/src/connector.ts index aa45007cd5..021bf2796b 100644 --- a/src/chains/ethereum/ethereum/src/connector.ts +++ b/src/chains/ethereum/ethereum/src/connector.ts @@ -127,7 +127,7 @@ export class Connector< if (result instanceof Error) { return makeError(payload.id, result as any); } else { - return makeResponse(result, payload); + return makeResponse(payload.id, payload); } }) ); diff --git a/src/packages/core/src/servers/http-server.ts b/src/packages/core/src/servers/http-server.ts index 3448d67ba1..1572358b7d 100644 --- a/src/packages/core/src/servers/http-server.ts +++ b/src/packages/core/src/servers/http-server.ts @@ -179,7 +179,7 @@ export default class HttpServer { // cause an `Unhandled promise rejection` if we try) return; } - const data = connector.format(result, payload); + const data = connector.format(result, payload, this); if (typeof data.next === "function") { response.cork(() => { response.writeStatus(HttpResponseCodes.OK); From 3d1caca56584da99c796d5979e40a8648935cba5 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Mon, 1 Nov 2021 18:13:28 -0400 Subject: [PATCH 39/93] . --- src/chains/ethereum/ethereum/src/connector.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/connector.ts b/src/chains/ethereum/ethereum/src/connector.ts index 021bf2796b..2d7c153270 100644 --- a/src/chains/ethereum/ethereum/src/connector.ts +++ b/src/chains/ethereum/ethereum/src/connector.ts @@ -127,7 +127,7 @@ export class Connector< if (result instanceof Error) { return makeError(payload.id, result as any); } else { - return makeResponse(payload.id, payload); + return makeResponse(payload.id, result); } }) ); From bbc0ec324f347e9e0f5c94d9d465010a23878fac Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Mon, 1 Nov 2021 18:27:30 -0400 Subject: [PATCH 40/93] add info to flaky test --- .../ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts b/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts index 945ba3f045..fcc3c8e45f 100644 --- a/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts +++ b/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts @@ -82,6 +82,10 @@ describe("api", () => { const block = await provider.send("eth_getBlockByNumber", [ `0x${numberOfBlocksToMine.toString(16)}` ]); + assert( + block, + `\`block\` is \`null\`; didn't correctly mine ${numberOfBlocksToMine} blocks` + ); assert.strictEqual( block.totalDifficulty, `0x${((numberOfBlocksToMine + 1) * difficulty).toString(16)}`, From 3c9e0cf9cf7ae02de68edfeac1e5295e51b1ad6d Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 2 Nov 2021 18:58:32 -0400 Subject: [PATCH 41/93] add streaming support to websockets --- .../ethereum/ethereum/package-lock.json | 49 ++++--- src/chains/ethereum/ethereum/package.json | 4 +- src/chains/ethereum/ethereum/src/connector.ts | 23 ++-- .../ethereum/ethereum/tests/connector.test.ts | 8 +- .../filecoin/filecoin/package-lock.json | 64 +++++---- src/chains/filecoin/filecoin/package.json | 4 +- src/chains/filecoin/filecoin/src/connector.ts | 6 +- src/chains/tezos/tezos/package-lock.json | 40 +++--- src/chains/tezos/tezos/package.json | 2 +- src/packages/core/package-lock.json | 48 +++---- src/packages/core/package.json | 4 +- src/packages/core/src/server.ts | 10 +- src/packages/core/src/servers/http-server.ts | 8 +- src/packages/core/src/servers/ws-server.ts | 42 +++++- src/packages/core/tests/server.test.ts | 121 ++++++++++++++++-- src/packages/ganache/npm-shrinkwrap.json | 2 +- src/packages/utils/package-lock.json | 52 +++++--- src/packages/utils/package.json | 2 +- src/packages/utils/src/types/connector.ts | 9 +- 19 files changed, 324 insertions(+), 174 deletions(-) diff --git a/src/chains/ethereum/ethereum/package-lock.json b/src/chains/ethereum/ethereum/package-lock.json index 086679e2ba..2f98e506ff 100644 --- a/src/chains/ethereum/ethereum/package-lock.json +++ b/src/chains/ethereum/ethereum/package-lock.json @@ -486,31 +486,42 @@ } }, "@trufflesuite/uws-js-unofficial": { - "version": "18.14.0-unofficial.12", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-18.14.0-unofficial.12.tgz", - "integrity": "sha512-fxM+mskpKW/Wd7Dy9pDJ/jaChCQUq+Ab1X8ZMi9dmC9FGDo7CKacQCNoXh8vHiy+ZFY7n7xvWy8A0I7eFQ0iqQ==", + "version": "20.4.0-unofficial.1", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.1.tgz", + "integrity": "sha512-24XLW1mp1bbr91mymCxRdPhtkEYnt+cKhpIZJZgXRBPjkJh8Pg0ypY+dNYN0yUdTslol/W0YidiljYjAiPpkhw==", "dev": true, "requires": { - "bufferutil": "4.0.3", - "utf-8-validate": "5.0.5", - "ws": "^8.2.1" + "bufferutil": "4.0.5", + "utf-8-validate": "5.0.7", + "ws": "8.2.3" }, "dependencies": { - "utf-8-validate": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.5.tgz", - "integrity": "sha512-+pnxRYsS/axEpkrrEpzYfNZGXp0IjC/9RIxwM5gntY4Koi8SHmUGSfxfWqxZdRxrtaoVstuOzUp/rbs3JSPELQ==", + "bufferutil": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.5.tgz", + "integrity": "sha512-HTm14iMQKK2FjFLRTM5lAVcyaUzOnqbPtesFIvREgXpJHdQm8bWS+GkQgIkfaBYRHuCnea7w8UVNfwiAQhlr9A==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" } }, - "ws": { - "version": "8.2.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.1.tgz", - "integrity": "sha512-XkgWpJU3sHU7gX8f13NqTn6KQ85bd1WU7noBHTT8fSohx7OS1TPY8k+cyRPCzFkia7C4mM229yeHr1qK9sM4JQ==", - "dev": true + "node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", + "dev": true, + "optional": true + }, + "utf-8-validate": { + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.7.tgz", + "integrity": "sha512-vLt1O5Pp+flcArHGIyKEQq883nBt8nN8tVBcoL0qUXj2XT1n7p70yGIq2VK98I5FdZ1YHc0wk/koOnHjnXWk1Q==", + "dev": true, + "optional": true, + "requires": { + "node-gyp-build": "^4.3.0" + } } } }, @@ -5591,9 +5602,9 @@ } }, "ws": { - "version": "7.5.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", - "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==" + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==" }, "xtend": { "version": "4.0.2", diff --git a/src/chains/ethereum/ethereum/package.json b/src/chains/ethereum/ethereum/package.json index 6103c95327..ec029243bc 100644 --- a/src/chains/ethereum/ethereum/package.json +++ b/src/chains/ethereum/ethereum/package.json @@ -84,11 +84,11 @@ "semaphore": "1.1.0", "subleveldown": "5.0.1", "tmp-promise": "3.0.2", - "ws": "7.5.3" + "ws": "8.2.3" }, "devDependencies": { "@trufflesuite/typedoc-default-themes": "0.6.1", - "@trufflesuite/uws-js-unofficial": "18.14.0-unofficial.12", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.1", "@types/encoding-down": "5.0.0", "@types/fs-extra": "9.0.2", "@types/keccak": "3.0.1", diff --git a/src/chains/ethereum/ethereum/src/connector.ts b/src/chains/ethereum/ethereum/src/connector.ts index 2d7c153270..c443a262f1 100644 --- a/src/chains/ethereum/ethereum/src/connector.ts +++ b/src/chains/ethereum/ethereum/src/connector.ts @@ -27,8 +27,6 @@ type ProviderOptions = EthereumProviderOptions | EthereumLegacyProviderOptions; export type Provider = EthereumProvider; export const Provider = EthereumProvider; -const BUFFERIFY_THRESHOLD = 100000; - function isHttp( connection: HttpRequest | WebSocket ): connection is HttpRequest { @@ -48,6 +46,8 @@ export class Connector< implements IConnector { #provider: EthereumProvider; + static BUFFERIFY_THRESHOLD: number = 100000; + get provider() { return this.#provider; } @@ -58,7 +58,7 @@ export class Connector< this.#provider = new EthereumProvider(providerOptions, executor); } - public BUFFERIFY_THRESHOLD = BUFFERIFY_THRESHOLD; + public BUFFERIFY_THRESHOLD = Connector.BUFFERIFY_THRESHOLD; async connect() { await this.#provider.initialize(); @@ -106,19 +106,13 @@ export class Connector< format( result: any, - payload: R, - connection: HttpRequest + payload: R ): RecognizedString | Generator; - format(result: any, payload: R, connection: WebSocket): RecognizedString; - format( - results: any[], - payloads: R[], - connection: HttpRequest | WebSocket - ): RecognizedString; + format(result: any, payload: R): RecognizedString; + format(results: any[], payloads: R[]): RecognizedString; format( results: any | any[], - payload: R | R[], - connection: HttpRequest | WebSocket + payload: R | R[] ): RecognizedString | Generator { if (Array.isArray(payload)) { return JSON.stringify( @@ -134,8 +128,9 @@ export class Connector< } else { const json = makeResponse(payload.id, results); if ( - isHttp(connection) && payload.method === "debug_traceTransaction" && + typeof results === "object" && + Array.isArray(results.structLogs) && // for "large" debug_traceTransaction results we convert to individual // parts of the response to Buffers, yielded via a Generator function, // instead of using JSON.stringify. This is necessary because we: diff --git a/src/chains/ethereum/ethereum/tests/connector.test.ts b/src/chains/ethereum/ethereum/tests/connector.test.ts index 0354578aca..1e41e58ef0 100644 --- a/src/chains/ethereum/ethereum/tests/connector.test.ts +++ b/src/chains/ethereum/ethereum/tests/connector.test.ts @@ -49,9 +49,7 @@ describe("connector", () => { await connector.connect(); }); it("formats results as a string as expected", async () => { - const strResult = connector.format(json, payload, { - constructor: { name: "HttpRequest" } - } as any) as string; + const strResult = connector.format(json, payload) as string; assert.strictEqual(typeof strResult, "string"); const result = JSON.parse(strResult); assert.deepStrictEqual(result, expected); @@ -64,9 +62,7 @@ describe("connector", () => { // the amount of data it usually takes connector.BUFFERIFY_THRESHOLD = 1; - const bufResult = connector.format(json, payload, { - constructor: { name: "HttpRequest" } - } as any); + const bufResult = connector.format(json, payload); assert(isGeneratorIterator(bufResult)); let str = ""; for (const datum of bufResult as any) { diff --git a/src/chains/filecoin/filecoin/package-lock.json b/src/chains/filecoin/filecoin/package-lock.json index bdeb1d0368..5d60353f4a 100644 --- a/src/chains/filecoin/filecoin/package-lock.json +++ b/src/chains/filecoin/filecoin/package-lock.json @@ -830,22 +830,14 @@ "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==" }, "@trufflesuite/uws-js-unofficial": { - "version": "18.14.0-unofficial.12", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-18.14.0-unofficial.12.tgz", - "integrity": "sha512-fxM+mskpKW/Wd7Dy9pDJ/jaChCQUq+Ab1X8ZMi9dmC9FGDo7CKacQCNoXh8vHiy+ZFY7n7xvWy8A0I7eFQ0iqQ==", + "version": "20.4.0-unofficial.1", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.1.tgz", + "integrity": "sha512-24XLW1mp1bbr91mymCxRdPhtkEYnt+cKhpIZJZgXRBPjkJh8Pg0ypY+dNYN0yUdTslol/W0YidiljYjAiPpkhw==", "dev": true, "requires": { - "bufferutil": "4.0.3", - "utf-8-validate": "5.0.5", - "ws": "^8.2.1" - }, - "dependencies": { - "ws": { - "version": "8.2.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.1.tgz", - "integrity": "sha512-XkgWpJU3sHU7gX8f13NqTn6KQ85bd1WU7noBHTT8fSohx7OS1TPY8k+cyRPCzFkia7C4mM229yeHr1qK9sM4JQ==", - "dev": true - } + "bufferutil": "4.0.5", + "utf-8-validate": "5.0.7", + "ws": "8.2.3" } }, "@types/abstract-leveldown": { @@ -1820,13 +1812,22 @@ "dev": true }, "bufferutil": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.3.tgz", - "integrity": "sha512-yEYTwGndELGvfXsImMBLop58eaGW+YdONi1fNjTINSY98tmMmFijBG6WXgdkfuLNt4imzQNtIE+eBp1PVpMCSw==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.5.tgz", + "integrity": "sha512-HTm14iMQKK2FjFLRTM5lAVcyaUzOnqbPtesFIvREgXpJHdQm8bWS+GkQgIkfaBYRHuCnea7w8UVNfwiAQhlr9A==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" + }, + "dependencies": { + "node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", + "dev": true, + "optional": true + } } }, "bufio": { @@ -6390,8 +6391,7 @@ "requires": { "buffer": "^5.6.0", "event-iterator": "^2.0.0", - "relative-url": "^1.0.2", - "ws": "^7.3.1" + "relative-url": "^1.0.2" } }, "iterable-ndjson": { @@ -10656,13 +10656,22 @@ } }, "utf-8-validate": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.5.tgz", - "integrity": "sha512-+pnxRYsS/axEpkrrEpzYfNZGXp0IjC/9RIxwM5gntY4Koi8SHmUGSfxfWqxZdRxrtaoVstuOzUp/rbs3JSPELQ==", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.7.tgz", + "integrity": "sha512-vLt1O5Pp+flcArHGIyKEQq883nBt8nN8tVBcoL0qUXj2XT1n7p70yGIq2VK98I5FdZ1YHc0wk/koOnHjnXWk1Q==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" + }, + "dependencies": { + "node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", + "dev": true, + "optional": true + } } }, "utf8-byte-length": { @@ -11029,9 +11038,10 @@ } }, "ws": { - "version": "7.5.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", - "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==" + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==", + "dev": true }, "xdg-basedir": { "version": "4.0.0", diff --git a/src/chains/filecoin/filecoin/package.json b/src/chains/filecoin/filecoin/package.json index 453168eb67..49d1caf5eb 100644 --- a/src/chains/filecoin/filecoin/package.json +++ b/src/chains/filecoin/filecoin/package.json @@ -59,7 +59,7 @@ "@filecoin-shipyard/lotus-client-schema": "2.0.0", "@ganache/filecoin-options": "0.1.1-alpha.1", "@ganache/utils": "0.1.1-alpha.1", - "@trufflesuite/uws-js-unofficial": "18.14.0-unofficial.12", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.1", "@types/bn.js": "5.1.0", "@types/deep-equal": "1.0.1", "@types/levelup": "4.3.0", @@ -100,6 +100,6 @@ "typescript": "4.1.3", "webpack": "5.21.2", "webpack-cli": "4.5.0", - "ws": "7.5.3" + "ws": "8.2.3" } } diff --git a/src/chains/filecoin/filecoin/src/connector.ts b/src/chains/filecoin/filecoin/src/connector.ts index 0571b8b385..70d9b3dde7 100644 --- a/src/chains/filecoin/filecoin/src/connector.ts +++ b/src/chains/filecoin/filecoin/src/connector.ts @@ -59,11 +59,7 @@ export class Connector< return this.#provider._requestRaw(payload); } - format( - result: any, - payload: R, - _connection: HttpRequest | WebSocket - ): RecognizedString { + format(result: any, payload: R): RecognizedString { const json = makeResponse(payload.id, result); return JSON.stringify(json); } diff --git a/src/chains/tezos/tezos/package-lock.json b/src/chains/tezos/tezos/package-lock.json index 130358bf25..93b2163528 100644 --- a/src/chains/tezos/tezos/package-lock.json +++ b/src/chains/tezos/tezos/package-lock.json @@ -32,14 +32,14 @@ } }, "@trufflesuite/uws-js-unofficial": { - "version": "18.14.0-unofficial.12", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-18.14.0-unofficial.12.tgz", - "integrity": "sha512-fxM+mskpKW/Wd7Dy9pDJ/jaChCQUq+Ab1X8ZMi9dmC9FGDo7CKacQCNoXh8vHiy+ZFY7n7xvWy8A0I7eFQ0iqQ==", + "version": "20.4.0-unofficial.1", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.1.tgz", + "integrity": "sha512-24XLW1mp1bbr91mymCxRdPhtkEYnt+cKhpIZJZgXRBPjkJh8Pg0ypY+dNYN0yUdTslol/W0YidiljYjAiPpkhw==", "dev": true, "requires": { - "bufferutil": "4.0.3", - "utf-8-validate": "5.0.5", - "ws": "^8.2.1" + "bufferutil": "4.0.5", + "utf-8-validate": "5.0.7", + "ws": "8.2.3" } }, "@types/mocha": { @@ -244,13 +244,13 @@ "dev": true }, "bufferutil": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.3.tgz", - "integrity": "sha512-yEYTwGndELGvfXsImMBLop58eaGW+YdONi1fNjTINSY98tmMmFijBG6WXgdkfuLNt4imzQNtIE+eBp1PVpMCSw==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.5.tgz", + "integrity": "sha512-HTm14iMQKK2FjFLRTM5lAVcyaUzOnqbPtesFIvREgXpJHdQm8bWS+GkQgIkfaBYRHuCnea7w8UVNfwiAQhlr9A==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" } }, "byte-size": { @@ -1892,9 +1892,9 @@ "dev": true }, "node-gyp-build": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.2.3.tgz", - "integrity": "sha512-MN6ZpzmfNCRM+3t57PTJHgHyw/h4OWnZ6mR8P5j/uZtqQr46RRuDE/P+g3n0YR/AiYXeWixZZzaip77gdICfRg==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", "dev": true, "optional": true }, @@ -2558,13 +2558,13 @@ "dev": true }, "utf-8-validate": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.5.tgz", - "integrity": "sha512-+pnxRYsS/axEpkrrEpzYfNZGXp0IjC/9RIxwM5gntY4Koi8SHmUGSfxfWqxZdRxrtaoVstuOzUp/rbs3JSPELQ==", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.7.tgz", + "integrity": "sha512-vLt1O5Pp+flcArHGIyKEQq883nBt8nN8tVBcoL0qUXj2XT1n7p70yGIq2VK98I5FdZ1YHc0wk/koOnHjnXWk1Q==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" } }, "util-deprecate": { @@ -2715,9 +2715,9 @@ "dev": true }, "ws": { - "version": "8.2.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.1.tgz", - "integrity": "sha512-XkgWpJU3sHU7gX8f13NqTn6KQ85bd1WU7noBHTT8fSohx7OS1TPY8k+cyRPCzFkia7C4mM229yeHr1qK9sM4JQ==", + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==", "dev": true }, "y18n": { diff --git a/src/chains/tezos/tezos/package.json b/src/chains/tezos/tezos/package.json index fc45e5eccf..8fad58c019 100644 --- a/src/chains/tezos/tezos/package.json +++ b/src/chains/tezos/tezos/package.json @@ -48,7 +48,7 @@ }, "devDependencies": { "@trufflesuite/typedoc-default-themes": "0.6.1", - "@trufflesuite/uws-js-unofficial": "18.14.0-unofficial.12", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.1", "@types/mocha": "8.2.2", "cheerio": "1.0.0-rc.3", "cross-env": "7.0.3", diff --git a/src/packages/core/package-lock.json b/src/packages/core/package-lock.json index cca91ade70..64a60d17d9 100644 --- a/src/packages/core/package-lock.json +++ b/src/packages/core/package-lock.json @@ -421,20 +421,13 @@ "dev": true }, "@trufflesuite/uws-js-unofficial": { - "version": "18.14.0-unofficial.12", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-18.14.0-unofficial.12.tgz", - "integrity": "sha512-fxM+mskpKW/Wd7Dy9pDJ/jaChCQUq+Ab1X8ZMi9dmC9FGDo7CKacQCNoXh8vHiy+ZFY7n7xvWy8A0I7eFQ0iqQ==", + "version": "20.4.0-unofficial.1", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.1.tgz", + "integrity": "sha512-24XLW1mp1bbr91mymCxRdPhtkEYnt+cKhpIZJZgXRBPjkJh8Pg0ypY+dNYN0yUdTslol/W0YidiljYjAiPpkhw==", "requires": { - "bufferutil": "4.0.3", - "utf-8-validate": "5.0.5", - "ws": "^8.2.1" - }, - "dependencies": { - "ws": { - "version": "8.2.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.1.tgz", - "integrity": "sha512-XkgWpJU3sHU7gX8f13NqTn6KQ85bd1WU7noBHTT8fSohx7OS1TPY8k+cyRPCzFkia7C4mM229yeHr1qK9sM4JQ==" - } + "bufferutil": "4.0.5", + "utf-8-validate": "5.0.7", + "ws": "8.2.3" } }, "@types/cookiejar": { @@ -619,12 +612,12 @@ "dev": true }, "bufferutil": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.3.tgz", - "integrity": "sha512-yEYTwGndELGvfXsImMBLop58eaGW+YdONi1fNjTINSY98tmMmFijBG6WXgdkfuLNt4imzQNtIE+eBp1PVpMCSw==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.5.tgz", + "integrity": "sha512-HTm14iMQKK2FjFLRTM5lAVcyaUzOnqbPtesFIvREgXpJHdQm8bWS+GkQgIkfaBYRHuCnea7w8UVNfwiAQhlr9A==", "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" } }, "caching-transform": { @@ -1684,9 +1677,9 @@ "dev": true }, "node-gyp-build": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.2.3.tgz", - "integrity": "sha512-MN6ZpzmfNCRM+3t57PTJHgHyw/h4OWnZ6mR8P5j/uZtqQr46RRuDE/P+g3n0YR/AiYXeWixZZzaip77gdICfRg==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", "optional": true }, "node-preload": { @@ -2462,12 +2455,12 @@ } }, "utf-8-validate": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.5.tgz", - "integrity": "sha512-+pnxRYsS/axEpkrrEpzYfNZGXp0IjC/9RIxwM5gntY4Koi8SHmUGSfxfWqxZdRxrtaoVstuOzUp/rbs3JSPELQ==", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.7.tgz", + "integrity": "sha512-vLt1O5Pp+flcArHGIyKEQq883nBt8nN8tVBcoL0qUXj2XT1n7p70yGIq2VK98I5FdZ1YHc0wk/koOnHjnXWk1Q==", "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" } }, "util-deprecate": { @@ -2588,10 +2581,9 @@ } }, "ws": { - "version": "7.5.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", - "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==", - "dev": true + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==" }, "y18n": { "version": "5.0.8", diff --git a/src/packages/core/package.json b/src/packages/core/package.json index 1a4ad63ca5..7e0bbfddc8 100644 --- a/src/packages/core/package.json +++ b/src/packages/core/package.json @@ -53,7 +53,7 @@ "@ganache/options": "0.1.1-alpha.1", "@ganache/tezos": "0.1.1-alpha.1", "@ganache/utils": "0.1.1-alpha.1", - "@trufflesuite/uws-js-unofficial": "18.14.0-unofficial.12", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.1", "aggregate-error": "3.1.0", "emittery": "0.8.1", "promise.allsettled": "1.0.4" @@ -69,6 +69,6 @@ "ts-node": "9.1.1", "ttypescript": "1.5.12", "typescript": "4.1.3", - "ws": "7.5.3" + "ws": "8.2.3" } } diff --git a/src/packages/core/src/server.ts b/src/packages/core/src/server.ts index e00d5b0418..096d1eb5f5 100644 --- a/src/packages/core/src/server.ts +++ b/src/packages/core/src/server.ts @@ -16,12 +16,16 @@ import allSettled from "promise.allsettled"; allSettled.shim(); import AggregateError from "aggregate-error"; +import type {TemplatedApp, us_listen_socket} from "@trufflesuite/uws-js-unofficial"; import { App, - TemplatedApp, - us_listen_socket, - us_listen_socket_close + us_listen_socket_close, + _cfg as setUwsGlobalConfig } from "@trufflesuite/uws-js-unofficial"; + +// Set the "silent" config option so we don't output the "uwebsockets" header +setUwsGlobalConfig(new Uint8Array([115, 105, 108, 101, 110, 116]) as any); + import { Connector, ConnectorsByName, diff --git a/src/packages/core/src/servers/http-server.ts b/src/packages/core/src/servers/http-server.ts index 1572358b7d..828aab447b 100644 --- a/src/packages/core/src/servers/http-server.ts +++ b/src/packages/core/src/servers/http-server.ts @@ -8,6 +8,7 @@ import ContentTypes from "./utils/content-types"; import HttpResponseCodes from "./utils/http-response-codes"; import { Connector } from "@ganache/flavors"; import { InternalOptions } from "../options"; +import { types } from "util"; type HttpMethods = "GET" | "OPTIONS" | "POST"; @@ -179,14 +180,15 @@ export default class HttpServer { // cause an `Unhandled promise rejection` if we try) return; } - const data = connector.format(result, payload, this); - if (typeof data.next === "function") { + const data = connector.format(result, payload); + if (types.isGeneratorObject(data)) { response.cork(() => { response.writeStatus(HttpResponseCodes.OK); writeHeaders(response); response.writeHeader("Content-Type", ContentTypes.JSON); - for (const datum of data) response.write(datum); + for (const datum of data) + response.write(datum as RecognizedString); response.end(); }); } else { diff --git a/src/packages/core/src/servers/ws-server.ts b/src/packages/core/src/servers/ws-server.ts index 64c5b80799..2578fdb8f0 100644 --- a/src/packages/core/src/servers/ws-server.ts +++ b/src/packages/core/src/servers/ws-server.ts @@ -6,7 +6,9 @@ import { import WebSocketCloseCodes from "./utils/websocket-close-codes"; import { InternalOptions } from "../options"; import * as Flavors from "@ganache/flavors"; -import { PromiEvent } from "@ganache/utils"; +import { hasOwn, PromiEvent } from "@ganache/utils"; +import { isGeneratorFunction, isGeneratorObject } from "util/types"; +import { types } from "util"; type MergePromiseT = Promise ? X : never>; @@ -76,7 +78,7 @@ export default class WebsocketServer { return; } - let response: RecognizedString; + let data: RecognizedString | Generator; try { const { value } = await connector.handle(payload, ws); @@ -89,7 +91,7 @@ export default class WebsocketServer { const result = await resultEmitter; if (ws.closed) return; - response = connector.format(result, payload, ws); + data = connector.format(result, payload); // if the result is an emitter listen to its `"message"` event // We check if `on` is a function rather than check if @@ -117,10 +119,40 @@ export default class WebsocketServer { // ensure the connector's `handle` fn doesn't throw outside of a Promise if (ws.closed) return; - response = connector.formatError(err, payload); + data = connector.formatError(err, payload); } - ws.send(response as RecognizedString, useBinary); + if (types.isGeneratorObject(data)) { + const localData = data; + ws.cork(() => { + const { value: first } = localData.next(); + const COMPRESS = false; + + // get the second fragment, if there is one + let { value: next, done } = localData.next(); + + // if there wasn't a second fragment, just send it the usual way. + if (done) { + ws.send(first, useBinary); + } else { + // send the first fragment + ws.sendFirstFragment(first, useBinary, COMPRESS); + + // Now send the rest of the data piece by piece. + // We lag behind by one fragment because the last fragment needs + // to be sent via the `sendLastFragment` method + let prev = next; + for (next of localData) { + ws.sendFragment(prev, COMPRESS); + prev = next; + } + // finally, send the last fragment + ws.sendLastFragment(next, COMPRESS); + } + }); + } else { + ws.send(data as RecognizedString, useBinary); + } }, drain: (ws: WebSocket) => { diff --git a/src/packages/core/tests/server.test.ts b/src/packages/core/tests/server.test.ts index 59e9763a0b..8df656ce1f 100644 --- a/src/packages/core/tests/server.test.ts +++ b/src/packages/core/tests/server.test.ts @@ -17,7 +17,8 @@ import intoStream = require("into-stream"); import { PromiEvent } from "@ganache/utils"; import { promisify } from "util"; import { ServerOptions } from "../src/options"; -import { Provider as EthereumProvider } from "@ganache/ethereum"; +import { Connector, Provider as EthereumProvider } from "@ganache/ethereum"; +import { Buffer } from "buffer"; const IS_WINDOWS = process.platform === "win32"; @@ -66,6 +67,9 @@ describe("server", () => { .send(jsonRpcJson); assert.strictEqual(response.status, 200); + // make sure we aren't including the uwebsockets header + assert.strictEqual("uwebsockets" in response.headers, false); + const json = JSON.parse(response.text); assert.strictEqual(json.result, `${networkId}`); return response; @@ -373,14 +377,14 @@ describe("server", () => { } }); - it("handles chunked requests (note: doesn't test `transfer-encoding: chunked`)", async () => { + it("handles chunked requests (note: doesn't test sending with `transfer-encoding: chunked`)", async () => { await setup(); try { const req = request.post("http://localhost:" + port); const json = JSON.stringify(jsonRpcJson); // we have to set the content-length because we can't use - // `Transfer-Encoding: chunked` with uWebSockets.js as of v15.9.0 + // `Transfer-Encoding: chunked` to uWebSockets.js as of v15.9.0 req.set("Content-Length", json.length.toString()); await new Promise((resolve, reject) => { @@ -403,6 +407,52 @@ describe("server", () => { } }); + it.only("responds with transfer-encoding: chunked responses when bufferification is triggered", async () => { + const originalThreshold = Connector.BUFFERIFY_THRESHOLD; + // This will trigger bufferication in the Ethereum connector + // for calls to debug_traceTransaction that return structLogs that have a + // length greater than BUFFERIFY_THRESHOLD + Connector.BUFFERIFY_THRESHOLD = 0; + + try { + await setup(); + const [from] = await s.provider.send("eth_accounts"); + await s.provider.send("eth_subscribe", ["newHeads"]); + + const ops = [ + { op: "PUSH1", code: "60", data: "00" }, + { op: "PUSH1", code: "60", data: "00" }, + { op: "RETURN", code: "f3", data: "" } + ]; + // a silly "contract" we can trace later: PUSH 0, PUSH, 0, RETURN + const data = "0x" + ops.map(op => op.code + op.data).join(""); + const hash = s.provider.send("eth_sendTransaction", [{ from, data }]); + await s.provider.once("message"); + + // send a `debug_traceTransaction` request to the *server* so we can + // test for `transfer-encoding: chunked` and bufferfication. + const jsonRpcJson: any = { + jsonrpc: "2.0", + id: "1", + method: "debug_traceTransaction", + params: [await hash] + }; + + const { text, header, status } = await request + .post("http://localhost:" + port) + .send(jsonRpcJson); + const { result } = JSON.parse(text); + + assert.strictEqual(header["transfer-encoding"], "chunked"); + assert.strictEqual(header["content-type"], "application/json"); + assert.strictEqual(status, 200); + assert.strictEqual(result.structLogs.length, ops.length); + } finally { + Connector.BUFFERIFY_THRESHOLD = originalThreshold; + await teardown(); + } + }); + it("returns 200/OK for RPC errors over HTTP", async () => { await setup(); const jsonRpcJson: any = { @@ -678,18 +728,13 @@ describe("server", () => { it("returns the net_version over a websocket", async () => { const ws = new WebSocket("ws://localhost:" + port); - const response: any = await new Promise(resolve => { + const { data }: any = await new Promise(resolve => { ws.on("open", () => { ws.send(JSON.stringify(jsonRpcJson)); }); - ws.on("message", resolve); + ws.on("message", (data, isBinary) => resolve({ data, isBinary })); }); - assert.strictEqual( - typeof response, - "string", - "response doesn't seem to be a string as expected" - ); - const json = JSON.parse(response); + const json = JSON.parse(data); assert.strictEqual(json.result, `${networkId}`); }); @@ -922,6 +967,60 @@ describe("server", () => { }); }); + it("responds with transfer-encoding: chunked responses when bufferification is triggered", async () => { + // this test needs to set BUFFERIFY_THRESHOLD before starting the server + await teardown(); + + const originalThreshold = Connector.BUFFERIFY_THRESHOLD; + // This will trigger bufferication in the Ethereum connector + // for calls to debug_traceTransaction that return structLogs that have a + // length greater than BUFFERIFY_THRESHOLD + Connector.BUFFERIFY_THRESHOLD = 0; + + try { + await setup(); + const [from] = await s.provider.send("eth_accounts"); + await s.provider.send("eth_subscribe", ["newHeads"]); + + const ops = [ + { op: "PUSH1", code: "60", data: "00" }, + { op: "PUSH1", code: "60", data: "00" }, + { op: "RETURN", code: "f3", data: "" } + ]; + // a silly "contract" we can trace later: PUSH 0, PUSH, 0, RETURN + const data = "0x" + ops.map(op => op.code + op.data).join(""); + const hash = s.provider.send("eth_sendTransaction", [{ from, data }]); + await s.provider.once("message"); + + // send a `debug_traceTransaction` request to the *server* so we can + // test for `transfer-encoding: chunked` and bufferfication. + const jsonRpcJson: any = { + jsonrpc: "2.0", + id: "1", + method: "debug_traceTransaction", + params: [await hash] + }; + + const ws = new WebSocket("ws://localhost:" + port); + ws.binaryType = "fragments"; + const response: any = await new Promise(resolve => { + ws.on("open", () => { + ws.send(Buffer.from(JSON.stringify(jsonRpcJson)), { + binary: true + }); + }); + ws.on("message", resolve); + }); + + assert.strictEqual(Array.isArray(response), true); + const { result } = JSON.parse(Buffer.concat(response)); + assert.strictEqual(result.structLogs.length, ops.length); + } finally { + Connector.BUFFERIFY_THRESHOLD = originalThreshold; + await teardown(); + } + }).timeout(0); + describe("max payload size", () => { let ws: WebSocket; beforeEach(() => { diff --git a/src/packages/ganache/npm-shrinkwrap.json b/src/packages/ganache/npm-shrinkwrap.json index 75b7de5922..3a6dbb0fa7 100644 --- a/src/packages/ganache/npm-shrinkwrap.json +++ b/src/packages/ganache/npm-shrinkwrap.json @@ -1,6 +1,6 @@ { "name": "ganache", - "version": "7.0.0-alpha.0", + "version": "7.0.0-alpha.1", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/src/packages/utils/package-lock.json b/src/packages/utils/package-lock.json index 417bdc575d..415105bb0d 100644 --- a/src/packages/utils/package-lock.json +++ b/src/packages/utils/package-lock.json @@ -5,14 +5,14 @@ "requires": true, "dependencies": { "@trufflesuite/uws-js-unofficial": { - "version": "18.14.0-unofficial.12", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-18.14.0-unofficial.12.tgz", - "integrity": "sha512-fxM+mskpKW/Wd7Dy9pDJ/jaChCQUq+Ab1X8ZMi9dmC9FGDo7CKacQCNoXh8vHiy+ZFY7n7xvWy8A0I7eFQ0iqQ==", + "version": "20.4.0-unofficial.1", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.1.tgz", + "integrity": "sha512-24XLW1mp1bbr91mymCxRdPhtkEYnt+cKhpIZJZgXRBPjkJh8Pg0ypY+dNYN0yUdTslol/W0YidiljYjAiPpkhw==", "dev": true, "requires": { - "bufferutil": "4.0.3", - "utf-8-validate": "5.0.5", - "ws": "^8.2.1" + "bufferutil": "4.0.5", + "utf-8-validate": "5.0.7", + "ws": "8.2.3" } }, "@types/mocha": { @@ -138,13 +138,22 @@ "dev": true }, "bufferutil": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.3.tgz", - "integrity": "sha512-yEYTwGndELGvfXsImMBLop58eaGW+YdONi1fNjTINSY98tmMmFijBG6WXgdkfuLNt4imzQNtIE+eBp1PVpMCSw==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.5.tgz", + "integrity": "sha512-HTm14iMQKK2FjFLRTM5lAVcyaUzOnqbPtesFIvREgXpJHdQm8bWS+GkQgIkfaBYRHuCnea7w8UVNfwiAQhlr9A==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" + }, + "dependencies": { + "node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", + "dev": true, + "optional": true + } } }, "camelcase": { @@ -850,13 +859,22 @@ "dev": true }, "utf-8-validate": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.5.tgz", - "integrity": "sha512-+pnxRYsS/axEpkrrEpzYfNZGXp0IjC/9RIxwM5gntY4Koi8SHmUGSfxfWqxZdRxrtaoVstuOzUp/rbs3JSPELQ==", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.7.tgz", + "integrity": "sha512-vLt1O5Pp+flcArHGIyKEQq883nBt8nN8tVBcoL0qUXj2XT1n7p70yGIq2VK98I5FdZ1YHc0wk/koOnHjnXWk1Q==", "dev": true, "optional": true, "requires": { - "node-gyp-build": "^4.2.0" + "node-gyp-build": "^4.3.0" + }, + "dependencies": { + "node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", + "dev": true, + "optional": true + } } }, "which": { @@ -935,9 +953,9 @@ "dev": true }, "ws": { - "version": "8.2.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.1.tgz", - "integrity": "sha512-XkgWpJU3sHU7gX8f13NqTn6KQ85bd1WU7noBHTT8fSohx7OS1TPY8k+cyRPCzFkia7C4mM229yeHr1qK9sM4JQ==", + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==", "dev": true }, "y18n": { diff --git a/src/packages/utils/package.json b/src/packages/utils/package.json index aae300f885..d1360b5f21 100644 --- a/src/packages/utils/package.json +++ b/src/packages/utils/package.json @@ -51,7 +51,7 @@ "seedrandom": "3.0.5" }, "devDependencies": { - "@trufflesuite/uws-js-unofficial": "18.14.0-unofficial.12", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.1", "@types/mocha": "8.2.2", "@types/seedrandom": "3.0.1", "cross-env": "7.0.3", diff --git a/src/packages/utils/src/types/connector.ts b/src/packages/utils/src/types/connector.ts index d5b9b6cfcb..aefeab3da6 100644 --- a/src/packages/utils/src/types/connector.ts +++ b/src/packages/utils/src/types/connector.ts @@ -60,14 +60,9 @@ export interface Connector< */ format( result: ResponseFormat, - payload: RequestFormat, - connection: HttpRequest + payload: RequestFormat ): RecognizedString | Generator; - format( - result: ResponseFormat, - payload: RequestFormat, - connection: WebSocket - ): RecognizedString; + format(result: ResponseFormat, payload: RequestFormat): RecognizedString; /** * Formats the error response From 55e3ad3d7ca585d2f2141ac8516f16a9c37e8ea1 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 2 Nov 2021 21:45:56 -0400 Subject: [PATCH 42/93] update uws --- src/chains/ethereum/ethereum/package-lock.json | 6 +++--- src/chains/ethereum/ethereum/package.json | 2 +- src/chains/filecoin/filecoin/package-lock.json | 6 +++--- src/chains/filecoin/filecoin/package.json | 2 +- src/chains/tezos/tezos/package-lock.json | 6 +++--- src/chains/tezos/tezos/package.json | 2 +- src/packages/core/package-lock.json | 6 +++--- src/packages/core/package.json | 2 +- src/packages/utils/package-lock.json | 6 +++--- src/packages/utils/package.json | 2 +- 10 files changed, 20 insertions(+), 20 deletions(-) diff --git a/src/chains/ethereum/ethereum/package-lock.json b/src/chains/ethereum/ethereum/package-lock.json index 2f98e506ff..185d6acc30 100644 --- a/src/chains/ethereum/ethereum/package-lock.json +++ b/src/chains/ethereum/ethereum/package-lock.json @@ -486,9 +486,9 @@ } }, "@trufflesuite/uws-js-unofficial": { - "version": "20.4.0-unofficial.1", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.1.tgz", - "integrity": "sha512-24XLW1mp1bbr91mymCxRdPhtkEYnt+cKhpIZJZgXRBPjkJh8Pg0ypY+dNYN0yUdTslol/W0YidiljYjAiPpkhw==", + "version": "20.4.0-unofficial.2", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.2.tgz", + "integrity": "sha512-sDPJI/n1BGvby76iDT5CHfosVGYes0GydONgLpxkU6KPUM24z0E/D7JaPO7DkNmM/to3DhDit02FnMvasSXUdw==", "dev": true, "requires": { "bufferutil": "4.0.5", diff --git a/src/chains/ethereum/ethereum/package.json b/src/chains/ethereum/ethereum/package.json index ec029243bc..fb5f777e59 100644 --- a/src/chains/ethereum/ethereum/package.json +++ b/src/chains/ethereum/ethereum/package.json @@ -88,7 +88,7 @@ }, "devDependencies": { "@trufflesuite/typedoc-default-themes": "0.6.1", - "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.1", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.2", "@types/encoding-down": "5.0.0", "@types/fs-extra": "9.0.2", "@types/keccak": "3.0.1", diff --git a/src/chains/filecoin/filecoin/package-lock.json b/src/chains/filecoin/filecoin/package-lock.json index 5d60353f4a..8df442ab2b 100644 --- a/src/chains/filecoin/filecoin/package-lock.json +++ b/src/chains/filecoin/filecoin/package-lock.json @@ -830,9 +830,9 @@ "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==" }, "@trufflesuite/uws-js-unofficial": { - "version": "20.4.0-unofficial.1", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.1.tgz", - "integrity": "sha512-24XLW1mp1bbr91mymCxRdPhtkEYnt+cKhpIZJZgXRBPjkJh8Pg0ypY+dNYN0yUdTslol/W0YidiljYjAiPpkhw==", + "version": "20.4.0-unofficial.2", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.2.tgz", + "integrity": "sha512-sDPJI/n1BGvby76iDT5CHfosVGYes0GydONgLpxkU6KPUM24z0E/D7JaPO7DkNmM/to3DhDit02FnMvasSXUdw==", "dev": true, "requires": { "bufferutil": "4.0.5", diff --git a/src/chains/filecoin/filecoin/package.json b/src/chains/filecoin/filecoin/package.json index 49d1caf5eb..b0a2eb14da 100644 --- a/src/chains/filecoin/filecoin/package.json +++ b/src/chains/filecoin/filecoin/package.json @@ -59,7 +59,7 @@ "@filecoin-shipyard/lotus-client-schema": "2.0.0", "@ganache/filecoin-options": "0.1.1-alpha.1", "@ganache/utils": "0.1.1-alpha.1", - "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.1", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.2", "@types/bn.js": "5.1.0", "@types/deep-equal": "1.0.1", "@types/levelup": "4.3.0", diff --git a/src/chains/tezos/tezos/package-lock.json b/src/chains/tezos/tezos/package-lock.json index 93b2163528..0eae6d0cdc 100644 --- a/src/chains/tezos/tezos/package-lock.json +++ b/src/chains/tezos/tezos/package-lock.json @@ -32,9 +32,9 @@ } }, "@trufflesuite/uws-js-unofficial": { - "version": "20.4.0-unofficial.1", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.1.tgz", - "integrity": "sha512-24XLW1mp1bbr91mymCxRdPhtkEYnt+cKhpIZJZgXRBPjkJh8Pg0ypY+dNYN0yUdTslol/W0YidiljYjAiPpkhw==", + "version": "20.4.0-unofficial.2", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.2.tgz", + "integrity": "sha512-sDPJI/n1BGvby76iDT5CHfosVGYes0GydONgLpxkU6KPUM24z0E/D7JaPO7DkNmM/to3DhDit02FnMvasSXUdw==", "dev": true, "requires": { "bufferutil": "4.0.5", diff --git a/src/chains/tezos/tezos/package.json b/src/chains/tezos/tezos/package.json index 8fad58c019..56944ce64d 100644 --- a/src/chains/tezos/tezos/package.json +++ b/src/chains/tezos/tezos/package.json @@ -48,7 +48,7 @@ }, "devDependencies": { "@trufflesuite/typedoc-default-themes": "0.6.1", - "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.1", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.2", "@types/mocha": "8.2.2", "cheerio": "1.0.0-rc.3", "cross-env": "7.0.3", diff --git a/src/packages/core/package-lock.json b/src/packages/core/package-lock.json index 64a60d17d9..1c82063d7f 100644 --- a/src/packages/core/package-lock.json +++ b/src/packages/core/package-lock.json @@ -421,9 +421,9 @@ "dev": true }, "@trufflesuite/uws-js-unofficial": { - "version": "20.4.0-unofficial.1", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.1.tgz", - "integrity": "sha512-24XLW1mp1bbr91mymCxRdPhtkEYnt+cKhpIZJZgXRBPjkJh8Pg0ypY+dNYN0yUdTslol/W0YidiljYjAiPpkhw==", + "version": "20.4.0-unofficial.2", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.2.tgz", + "integrity": "sha512-sDPJI/n1BGvby76iDT5CHfosVGYes0GydONgLpxkU6KPUM24z0E/D7JaPO7DkNmM/to3DhDit02FnMvasSXUdw==", "requires": { "bufferutil": "4.0.5", "utf-8-validate": "5.0.7", diff --git a/src/packages/core/package.json b/src/packages/core/package.json index 7e0bbfddc8..409ff71a23 100644 --- a/src/packages/core/package.json +++ b/src/packages/core/package.json @@ -53,7 +53,7 @@ "@ganache/options": "0.1.1-alpha.1", "@ganache/tezos": "0.1.1-alpha.1", "@ganache/utils": "0.1.1-alpha.1", - "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.1", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.2", "aggregate-error": "3.1.0", "emittery": "0.8.1", "promise.allsettled": "1.0.4" diff --git a/src/packages/utils/package-lock.json b/src/packages/utils/package-lock.json index 415105bb0d..3f62c28a25 100644 --- a/src/packages/utils/package-lock.json +++ b/src/packages/utils/package-lock.json @@ -5,9 +5,9 @@ "requires": true, "dependencies": { "@trufflesuite/uws-js-unofficial": { - "version": "20.4.0-unofficial.1", - "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.1.tgz", - "integrity": "sha512-24XLW1mp1bbr91mymCxRdPhtkEYnt+cKhpIZJZgXRBPjkJh8Pg0ypY+dNYN0yUdTslol/W0YidiljYjAiPpkhw==", + "version": "20.4.0-unofficial.2", + "resolved": "https://registry.npmjs.org/@trufflesuite/uws-js-unofficial/-/uws-js-unofficial-20.4.0-unofficial.2.tgz", + "integrity": "sha512-sDPJI/n1BGvby76iDT5CHfosVGYes0GydONgLpxkU6KPUM24z0E/D7JaPO7DkNmM/to3DhDit02FnMvasSXUdw==", "dev": true, "requires": { "bufferutil": "4.0.5", diff --git a/src/packages/utils/package.json b/src/packages/utils/package.json index d1360b5f21..3af21a3fe5 100644 --- a/src/packages/utils/package.json +++ b/src/packages/utils/package.json @@ -51,7 +51,7 @@ "seedrandom": "3.0.5" }, "devDependencies": { - "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.1", + "@trufflesuite/uws-js-unofficial": "20.4.0-unofficial.2", "@types/mocha": "8.2.2", "@types/seedrandom": "3.0.1", "cross-env": "7.0.3", From 27332abad2f6846923b8aca32e91a14ceb8151e4 Mon Sep 17 00:00:00 2001 From: David Murdoch <187813+davidmurdoch@users.noreply.github.com> Date: Tue, 2 Nov 2021 22:46:26 -0400 Subject: [PATCH 43/93] remove .only --- src/packages/core/tests/server.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/packages/core/tests/server.test.ts b/src/packages/core/tests/server.test.ts index 8df656ce1f..32d61a3542 100644 --- a/src/packages/core/tests/server.test.ts +++ b/src/packages/core/tests/server.test.ts @@ -407,7 +407,7 @@ describe("server", () => { } }); - it.only("responds with transfer-encoding: chunked responses when bufferification is triggered", async () => { + it("responds with transfer-encoding: chunked responses when bufferification is triggered", async () => { const originalThreshold = Connector.BUFFERIFY_THRESHOLD; // This will trigger bufferication in the Ethereum connector // for calls to debug_traceTransaction that return structLogs that have a From 77b104d732fe2940f1b44dd51bb73bda746a649a Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Wed, 3 Nov 2021 12:31:12 -0400 Subject: [PATCH 44/93] disable a test --- src/packages/core/src/server.ts | 5 ++++- src/packages/core/src/servers/http-server.ts | 16 +++++++++------- src/packages/core/tests/server.test.ts | 7 ++++++- 3 files changed, 19 insertions(+), 9 deletions(-) diff --git a/src/packages/core/src/server.ts b/src/packages/core/src/server.ts index 096d1eb5f5..43cb4d4280 100644 --- a/src/packages/core/src/server.ts +++ b/src/packages/core/src/server.ts @@ -16,7 +16,10 @@ import allSettled from "promise.allsettled"; allSettled.shim(); import AggregateError from "aggregate-error"; -import type {TemplatedApp, us_listen_socket} from "@trufflesuite/uws-js-unofficial"; +import type { + TemplatedApp, + us_listen_socket +} from "@trufflesuite/uws-js-unofficial"; import { App, us_listen_socket_close, diff --git a/src/packages/core/src/servers/http-server.ts b/src/packages/core/src/servers/http-server.ts index 828aab447b..892f3f1a27 100644 --- a/src/packages/core/src/servers/http-server.ts +++ b/src/packages/core/src/servers/http-server.ts @@ -72,17 +72,19 @@ function prepareCORSResponseHeaders(method: HttpMethods, request: HttpRequest) { function sendResponse( response: HttpResponse, statusCode: HttpResponseCodes, - contentType?: RecognizedString, - data?: RecognizedString, + contentType: RecognizedString | null, + data: RecognizedString | null, writeHeaders: (response: HttpResponse) => void = noop ): void { response.cork(() => { response.writeStatus(statusCode); writeHeaders(response); - if (contentType) { + if (contentType != null) { response.writeHeader("Content-Type", contentType); } - response.end(data); + if (data != null) { + response.end(data); + } }); } @@ -125,7 +127,7 @@ export default class HttpServer { "400 Bad Request" ); } else { - // all other requests don't mean anything to us, so respond with `404 NOT FOUND`... + // all other requests don't mean anything to us, so respond with `404 Not Found`... sendResponse( response, HttpResponseCodes.NOT_FOUND, @@ -233,8 +235,8 @@ export default class HttpServer { sendResponse( response, HttpResponseCodes.NO_CONTENT, - void 0, - "", + null, + null, writeHeaders ); }; diff --git a/src/packages/core/tests/server.test.ts b/src/packages/core/tests/server.test.ts index 32d61a3542..6c8fcc9124 100644 --- a/src/packages/core/tests/server.test.ts +++ b/src/packages/core/tests/server.test.ts @@ -691,7 +691,12 @@ describe("server", () => { origin ); assert.strictEqual(resp.header["access-control-max-age"], "600"); - assert.strictEqual(resp.header["content-length"], "0"); + // TODO: enable this check once https://github.com/uNetworking/uWebSockets/issues/1370 is fixed + // assert.strictEqual( + // "content-length" in resp.header, + // false, + // "RFC 7230: A server MUST NOT send a Content-Length header field in any response with a status code of 1xx (Informational) or 204 (No Content)" + // ); assert.strictEqual( resp.header["access-control-allow-credentials"], "true" From b43df570917a1a17c99b33eec9fb109bad4dc57c Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Wed, 3 Nov 2021 13:37:53 -0400 Subject: [PATCH 45/93] oops --- src/packages/core/src/servers/http-server.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/packages/core/src/servers/http-server.ts b/src/packages/core/src/servers/http-server.ts index 892f3f1a27..0c113c0cda 100644 --- a/src/packages/core/src/servers/http-server.ts +++ b/src/packages/core/src/servers/http-server.ts @@ -84,6 +84,8 @@ function sendResponse( } if (data != null) { response.end(data); + } else { + response.end(); } }); } From 458f1c56037f863100a018b758c93ca42b180fb0 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Thu, 4 Nov 2021 14:07:21 -0400 Subject: [PATCH 46/93] fix --- .../ethereum/src/forking/state-manager.ts | 2 +- .../ethereum/ethereum/src/forking/trie.ts | 4 ++-- .../ethereum/tests/forking/forking.test.ts | 20 +++++++++---------- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/state-manager.ts b/src/chains/ethereum/ethereum/src/forking/state-manager.ts index abde45e5dd..049d309dc1 100644 --- a/src/chains/ethereum/ethereum/src/forking/state-manager.ts +++ b/src/chains/ethereum/ethereum/src/forking/state-manager.ts @@ -57,7 +57,7 @@ export class ForkStateManager extends StateManager { async _lookupStorageTrie(address: EJS_Address) { // from state trie const account = await this.getAccount(address); - const storageTrie = this._trie.copy(false) as ForkTrie; + const storageTrie = this._trie.copy(true) as ForkTrie; storageTrie.setContext( account.stateRoot, address.buf, diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index 065fd3f50d..ad125f3e96 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -276,8 +276,8 @@ export class ForkTrie extends GanacheTrie { secureTrie.address = this.address; secureTrie.blockNumber = this.blockNumber; if (includeCheckpoints && this.isCheckpoint) { - db.checkpoints = [...this.db.checkpoints]; - secureTrie.metadata.checkpoints = this.metadata.checkpoints.slice(0); + secureTrie.db.checkpoints = [...this.db.checkpoints]; + secureTrie.metadata.checkpoints = this.metadata.checkpoints; } return secureTrie; } diff --git a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts index a382dc70d7..1e380647c8 100644 --- a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts @@ -814,11 +814,11 @@ describe("forking", function () { snapshotValues: number[] ) { for await (const snapshotValue of snapshotValues) { - // set value0 to {snapshotValue} - await set(localProvider, 0, snapshotValue); + // set value1 to {snapshotValue} + await set(localProvider, 1, snapshotValue); const message = await localProvider.once("message"); const blockNumber = parseInt(message.data.result.number, 16); - const checkValue = await get(localProvider, "value0", blockNumber); + const checkValue = await get(localProvider, "value1", blockNumber); assert.strictEqual( Quantity.from(checkValue).toNumber(), snapshotValue, @@ -837,13 +837,13 @@ describe("forking", function () { }); const subId = await localProvider.send("eth_subscribe", ["newHeads"]); - // set value0 to {initialValue} (delete it) - await set(localProvider, 0, initialValue); + // set value1 to {initialValue} (delete it) + await set(localProvider, 1, initialValue); const message = await localProvider.once("message"); const initialBlockNumber = parseInt(message.data.result.number, 16); assert.strictEqual( Quantity.from( - await get(localProvider, "value0", initialBlockNumber) + await get(localProvider, "value1", initialBlockNumber) ).toNumber(), initialValue ); // sanity check @@ -859,7 +859,7 @@ describe("forking", function () { assert.strictEqual( Quantity.from( - await get(localProvider, "value0", initialBlockNumber) + await get(localProvider, "value1", initialBlockNumber) ).toNumber(), initialValue, "value was not reverted to `initialValue` after evm_revert" @@ -900,13 +900,13 @@ describe("forking", function () { const subId = await remoteProvider.send("eth_subscribe", [ "newHeads" ]); - // set the remoteProvider's initialValue to {remoteInitialValue} - await set(remoteProvider, 0, remoteInitialValue); + // set the remoteProvider's value1 initialValue to {remoteInitialValue} + await set(remoteProvider, 1, remoteInitialValue); const message = await remoteProvider.once("message"); await remoteProvider.send("eth_unsubscribe", [subId]); const blockNumber = parseInt(message.data.result.number, 16); assert.strictEqual( - parseInt(await get(remoteProvider, "value0", blockNumber), 16), + parseInt(await get(remoteProvider, "value1", blockNumber), 16), remoteInitialValue ); // sanity check to make sure our initial conditions are correct From 18afb3ec2cdf432a5ad845051320d5eed31b5ee0 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Thu, 4 Nov 2021 14:09:31 -0400 Subject: [PATCH 47/93] add clarifying comment --- .../src/forking/persistent-cache/persistent-cache.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts index 9b09b74ec3..a33075fa91 100644 --- a/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/persistent-cache.ts @@ -237,7 +237,10 @@ export class PersistentCache { targetBlock, allKnownDescendants ) - .catch(_ => {}) // if it fails, it fails. + // we don't care if it fails because this is an optimization that only + // matters for _future_ runs of ganache for blocks beyond our current fork + // block + .catch(_ => {}) .finally(() => { this._reBalancePromise = null; }); From b984af7ab07693c157e4afa2d71c619f118fdf45 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Thu, 4 Nov 2021 14:30:28 -0400 Subject: [PATCH 48/93] fix options --- src/chains/ethereum/options/src/fork-options.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index f493e694c3..55de676d65 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -302,13 +302,14 @@ Alternatively, you can use the \`fork.username\` and \`fork.password\` options.` return; } }, - defaultDescription: `Latest block number - 5` + defaultDescription: `Latest block number` //implies: ["url"] }, blockAge: { normalize: rawInput => BigInt(rawInput), cliDescription: `Minimum age in seconds of the "latest" block. If the "latest" block is younger than this amount the block immediately preceding the latest block will be used instead.`, - default: () => 1n, + default: () => 5n, + defaultDescription: "5", cliType: "number" }, username: { From 7099e9c2043156d09c228c1f9310cd599eb24d04 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Thu, 4 Nov 2021 14:38:41 -0400 Subject: [PATCH 49/93] clarifying comment --- src/chains/ethereum/ethereum/src/forking/fork.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/forking/fork.ts b/src/chains/ethereum/ethereum/src/forking/fork.ts index 58369f6f4d..db55f3e6f3 100644 --- a/src/chains/ethereum/ethereum/src/forking/fork.ts +++ b/src/chains/ethereum/ethereum/src/forking/fork.ts @@ -168,7 +168,7 @@ export class Fork { const options = this.#options; if (options.deleteCache) await PersistentCache.deleteDb(); if (options.noCache === false) { - // ignore cache start up errors as it is possible there is an open + // ignore cache start up errors as it is possible there is an `open` // conflict if another ganache fork is running at the time this one is // started. The cache isn't required (though performance will be // degraded without it) From f9ad18151bd9a60bd083e615eafea77121067ae4 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Thu, 4 Nov 2021 16:54:07 -0400 Subject: [PATCH 50/93] make sure bufferify creates valid JSON --- .../ethereum/ethereum/src/helpers/bufferify.ts | 16 ++++++++++------ .../ethereum/ethereum/tests/connector.test.ts | 9 ++++++++- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts index 7c5ad29fb1..1b34d7afb4 100644 --- a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts +++ b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts @@ -48,6 +48,7 @@ function* arrayToBuffer(value: any[]) { return; } else { let yieldPrefix = true; + // sends the first array value: for (const chunkified of bufferify(value[0], "0")) { // if the value ends up being nothing (undefined), return null const jsonVal = chunkified.length === 0 ? NULL : chunkified; @@ -57,6 +58,7 @@ function* arrayToBuffer(value: any[]) { } yield jsonVal; } + // sends the rest of the array values: if (l > 1) { for (let i = 1; i < l; i++) { let yieldPrefix = true; @@ -103,7 +105,9 @@ function* objectToBuffer(value: any, nameOrIndex: string) { let i = 0; yield CURLY_BRACKET_OPEN; - // find the first non-null property to start the object + // Find the first non-null property to start the object + // The difference betwwen the first property and the rest is is that the + // first property is *not* preceded by a comma while (i < l) { const [key, value] = entries[i]; i++; @@ -113,18 +117,18 @@ function* objectToBuffer(value: any, nameOrIndex: string) { // if the chunkified value ends up being nothing (undefined) ignore // the property const chunkLength = chunkified.length; - if (chunkLength === 0) { - continue; - } + if (chunkLength === 0) continue; if (yieldPrefix) { yield Buffer.concat([stringToQuotedBuffer(key), COLON]); - yieldPrefix = null; + yieldPrefix = false; } yield chunkified; } - break; + // if we sent the prefix we found a non-undefined entry and should break + if (yieldPrefix === false) break; } + // sends the rest of the object fields if (l > 1) { for (; i < l; i++) { const [key, value] = entries[i]; diff --git a/src/chains/ethereum/ethereum/tests/connector.test.ts b/src/chains/ethereum/ethereum/tests/connector.test.ts index 1e41e58ef0..e153df55e1 100644 --- a/src/chains/ethereum/ethereum/tests/connector.test.ts +++ b/src/chains/ethereum/ethereum/tests/connector.test.ts @@ -2,7 +2,7 @@ import assert from "assert"; import { Executor, RequestCoordinator } from "@ganache/utils"; import { Connector } from "../"; -describe("connector", () => { +describe.only("connector", () => { const primitives = { string: "string", empty: "empty", @@ -18,6 +18,13 @@ describe("connector", () => { // `structLogs` triggers an optimization in the connector structLogs: [{ ...primitives }, ...Object.values(primitives)], emptyArray: [], + // notDefined and alsoNotDefined should be removed when JSON stringified/bufferified + trickyObject: { + notDefined: undefined, + defined: true, + alsoNotDefined: undefined + }, + trickyArray: [...Object.values(primitives)], object: { ...primitives, emptyObject: {}, From 7834d71a58589a34b826529ab6779b5b1073d731 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Thu, 4 Nov 2021 16:54:25 -0400 Subject: [PATCH 51/93] only --- src/chains/ethereum/ethereum/tests/connector.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/tests/connector.test.ts b/src/chains/ethereum/ethereum/tests/connector.test.ts index e153df55e1..d1b89ff905 100644 --- a/src/chains/ethereum/ethereum/tests/connector.test.ts +++ b/src/chains/ethereum/ethereum/tests/connector.test.ts @@ -2,7 +2,7 @@ import assert from "assert"; import { Executor, RequestCoordinator } from "@ganache/utils"; import { Connector } from "../"; -describe.only("connector", () => { +describe("connector", () => { const primitives = { string: "string", empty: "empty", From c34672d420b11814c992a47eb4a13cccd3e8a4de Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Thu, 4 Nov 2021 18:35:51 -0400 Subject: [PATCH 52/93] make build work --- src/packages/ganache/npm-shrinkwrap.json | 6 ++++++ src/packages/ganache/package.json | 3 ++- src/packages/ganache/webpack/webpack.browser.config.ts | 6 ++++-- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/packages/ganache/npm-shrinkwrap.json b/src/packages/ganache/npm-shrinkwrap.json index 3a6dbb0fa7..1bf74f5fae 100644 --- a/src/packages/ganache/npm-shrinkwrap.json +++ b/src/packages/ganache/npm-shrinkwrap.json @@ -1510,6 +1510,12 @@ "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", "dev": true }, + "isomorphic-ws": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz", + "integrity": "sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==", + "dev": true + }, "jest-worker": { "version": "26.6.2", "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-26.6.2.tgz", diff --git a/src/packages/ganache/package.json b/src/packages/ganache/package.json index 0df1241824..a11e2ab984 100644 --- a/src/packages/ganache/package.json +++ b/src/packages/ganache/package.json @@ -31,7 +31,7 @@ "tsc": "ttsc --build", "test": "nyc npm run mocha", "mocha": "cross-env TS_NODE_COMPILER=ttypescript TS_NODE_FILES=true mocha --exit --check-leaks --throw-deprecation --trace-warnings --require ts-node/register 'tests/**/*.test.ts'", - "start": "cross-env TS_NODE_COMPILER=ttypescript node --require ts-node/register --inspect src/cli.ts" + "start": "cross-env TS_NODE_COMPILER=ttypescript node --require ts-node/register src/cli.ts" }, "bugs": { "url": "https://github.com/trufflesuite/ganache/issues" @@ -62,6 +62,7 @@ "cross-env": "7.0.3", "crypto-browserify": "3.12.0", "events": "3.2.0", + "isomorphic-ws": "4.0.1", "level-js": "5.0.2", "mcl-wasm": "0.7.8", "mocha": "8.4.0", diff --git a/src/packages/ganache/webpack/webpack.browser.config.ts b/src/packages/ganache/webpack/webpack.browser.config.ts index 5c0e4ff9f5..ecaefbc2e5 100644 --- a/src/packages/ganache/webpack/webpack.browser.config.ts +++ b/src/packages/ganache/webpack/webpack.browser.config.ts @@ -24,7 +24,7 @@ const config: webpack.Configuration = merge({}, base, { alias: { "tmp-promise": require.resolve("./polyfills/browser-tmp-promise"), "bigint-buffer": require.resolve("./polyfills/browser-bigint-buffer"), - "crypto": require.resolve("./polyfills/browser-crypto"), + crypto: require.resolve("./polyfills/browser-crypto"), // replace leveldown with a browser version leveldown: require.resolve("level-js/"), // browser version can't start a server, so just remove the websocket server since it can't work anyway @@ -33,7 +33,9 @@ const config: webpack.Configuration = merge({}, base, { // `url` is already a global property in browser url: false, // mcl-wasm may be needed when creating a new @ethereumjs/vm and requires a browser version for browsers - "mcl-wasm": require.resolve("mcl-wasm/browser") + "mcl-wasm": require.resolve("mcl-wasm/browser"), + // ws doesn't work in the browser, isomorphic-ws does + ws: require.resolve("isomorphic-ws/") } }, output: { From 348ec576896eaa7410391db49cc4aef46ac92262 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Thu, 4 Nov 2021 18:36:32 -0400 Subject: [PATCH 53/93] add pre-fork block trie optimization --- .../ethereum/ethereum/src/forking/trie.ts | 32 +++++++++++++++---- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index ad125f3e96..fa8fbcf3b7 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -40,6 +40,8 @@ function isEqualKey(encodedKey: Buffer, address: Buffer, key: Buffer) { export class ForkTrie extends GanacheTrie { private accounts: AccountManager; private address: Buffer | null = null; + private preForkBlock = false; + private forkBlockNumber: bigint; public blockNumber: Quantity; private metadata: CheckpointDB; @@ -48,6 +50,7 @@ export class ForkTrie extends GanacheTrie { this.accounts = blockchain.accounts; this.blockNumber = this.blockchain.fallback.blockNumber; + this.forkBlockNumber = this.blockNumber.toBigInt(); if (MetadataSingletons.has(db)) { this.metadata = new CheckpointDB(MetadataSingletons.get(db)); @@ -81,6 +84,7 @@ export class ForkTrie extends GanacheTrie { (this as any)._root = stateRoot; this.address = address; this.blockNumber = blockNumber; + this.preForkBlock = blockNumber.toBigInt() < this.forkBlockNumber; } async put(key: Buffer, val: Buffer): Promise { @@ -156,14 +160,24 @@ export class ForkTrie extends GanacheTrie { async del(key: Buffer) { await this.lock.wait(); - const delKey = this.createDelKey(key); - const metaDataPutPromise = this.metadata.put(delKey, DELETED_VALUE); + // we only track if the key was deleted (locally) for state tries _after_ + // the fork block because we can't possibly delete keys _before_ the fork + // block, since those happened before ganache was even started + // This little optimization can debug_traceTransaction time _in half_. + if (!this.preForkBlock) { + const delKey = this.createDelKey(key); + const metaDataPutPromise = this.metadata.put(delKey, DELETED_VALUE); - const hash = keccak(key); - const { node, stack } = await this.findPath(hash); - if (node) await this._deleteNode(hash, stack); + const hash = keccak(key); + const { node, stack } = await this.findPath(hash); + if (node) await this._deleteNode(hash, stack); - await metaDataPutPromise; + await metaDataPutPromise; + } else { + const hash = keccak(key); + const { node, stack } = await this.findPath(hash); + if (node) await this._deleteNode(hash, stack); + } this.lock.signal(); } @@ -254,7 +268,11 @@ export class ForkTrie extends GanacheTrie { // since we don't have this key in our local trie check if we've have // deleted it (locally) - if (await this.keyWasDeleted(key)) return null; + // we only check if the key was deleted (locally) for state tries _after_ + // the fork block because we can't possibly delete keys _before_ the fork + // block, since those happened before ganache was even started + // This little optimization can debug_traceTransaction time _in half_. + if (!this.preForkBlock && (await this.keyWasDeleted(key))) return null; if (this.address === null) { // if the trie context's address isn't set, our key represents an address: From ffb1696d846074d19105be1c5da1add079444ff9 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 11:35:45 -0400 Subject: [PATCH 54/93] revert a change --- src/packages/ganache/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/packages/ganache/package.json b/src/packages/ganache/package.json index a11e2ab984..62cd6f50dc 100644 --- a/src/packages/ganache/package.json +++ b/src/packages/ganache/package.json @@ -31,7 +31,7 @@ "tsc": "ttsc --build", "test": "nyc npm run mocha", "mocha": "cross-env TS_NODE_COMPILER=ttypescript TS_NODE_FILES=true mocha --exit --check-leaks --throw-deprecation --trace-warnings --require ts-node/register 'tests/**/*.test.ts'", - "start": "cross-env TS_NODE_COMPILER=ttypescript node --require ts-node/register src/cli.ts" + "start": "cross-env TS_NODE_COMPILER=ttypescript node --require ts-node/register --inspect src/cli.ts" }, "bugs": { "url": "https://github.com/trufflesuite/ganache/issues" From 2b4041df2d182901cb771f85b616c12e8bdeb356 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 11:39:26 -0400 Subject: [PATCH 55/93] update comment --- src/chains/ethereum/ethereum/src/helpers/bufferify.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts index 1b34d7afb4..cfd2b0c99c 100644 --- a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts +++ b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts @@ -158,7 +158,8 @@ function* objectToBuffer(value: any, nameOrIndex: string) { * * This is a hack. It: * * Does not support circular references. - * * Does not support double quotes within Object keys; just stick with ascii + * * Does not support double quotes within Object keys; only alphanumerics are + * considered safe to use * * Probably doesn't support non-ASCII characters * * Is only tested on transaction traces * From 560abc4e6d8ab39834d27eb53661d9dce0c899fb Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 11:54:31 -0400 Subject: [PATCH 56/93] clarify comment --- src/chains/ethereum/ethereum/src/helpers/bufferify.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts index cfd2b0c99c..a2e44c3ca0 100644 --- a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts +++ b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts @@ -31,8 +31,7 @@ function numberToBuffer(value: number) { function stringToQuotedBuffer(value: string) { const length = value.length; if (length > 0) { - const l = length + 2; - const buf = Buffer.allocUnsafe(l); + const buf = Buffer.allocUnsafe(length + 2); // + 2 for the quotation marks buf[0] = 34; // QUOTE buf[length + 1] = 34; // QUOTE (buf as any).utf8Write(value, 1, length); From acd10cd1084e0ca65a27f26531b98ffcc4206020 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 12:07:12 -0400 Subject: [PATCH 57/93] remove unncessary check --- src/chains/ethereum/ethereum/src/helpers/bufferify.ts | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts index a2e44c3ca0..670540c47c 100644 --- a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts +++ b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts @@ -40,22 +40,18 @@ function stringToQuotedBuffer(value: string) { return QUOTE_PAIR; } } + function* arrayToBuffer(value: any[]) { const l = value.length; if (l === 0) { yield SQUARE_BRACKET_PAIR; return; } else { - let yieldPrefix = true; + yield SQUARE_BRACKET_OPEN; // sends the first array value: for (const chunkified of bufferify(value[0], "0")) { // if the value ends up being nothing (undefined), return null - const jsonVal = chunkified.length === 0 ? NULL : chunkified; - if (yieldPrefix) { - yield SQUARE_BRACKET_OPEN; - yieldPrefix = false; - } - yield jsonVal; + yield chunkified.length === 0 ? NULL : chunkified; } // sends the rest of the array values: if (l > 1) { @@ -80,6 +76,7 @@ function* arrayToBuffer(value: any[]) { return; } } + function bufferToQuotedBuffer(value: Buffer) { const length = value.length; const buf = Buffer.allocUnsafe(length + 2); From a6fcf37cc14209b037d8779bb247325a6c9cbe1a Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 12:09:37 -0400 Subject: [PATCH 58/93] simplify --- .../ethereum/ethereum/src/helpers/bufferify.ts | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts index 670540c47c..75225849e5 100644 --- a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts +++ b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts @@ -56,19 +56,10 @@ function* arrayToBuffer(value: any[]) { // sends the rest of the array values: if (l > 1) { for (let i = 1; i < l; i++) { - let yieldPrefix = true; + yield COMMA; for (const chunkified of bufferify(value[i], i.toString())) { - const chunkLength = chunkified.length; - if (yieldPrefix) { - yield COMMA; - yieldPrefix = false; - } - if (chunkLength === 0) { - // if the value ends up being nothing (undefined), return null - yield NULL; - } else { - yield chunkified; - } + // if the value ends up being nothing (undefined), return null + yield chunkified.length === 0 ? NULL : chunkified; } } } From 5441044043750cc4fc981a2cdfe78a08bd8d3913 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 12:11:23 -0400 Subject: [PATCH 59/93] simplify more --- .../ethereum/ethereum/src/helpers/bufferify.ts | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts index 75225849e5..f79cb22122 100644 --- a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts +++ b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts @@ -54,13 +54,11 @@ function* arrayToBuffer(value: any[]) { yield chunkified.length === 0 ? NULL : chunkified; } // sends the rest of the array values: - if (l > 1) { - for (let i = 1; i < l; i++) { - yield COMMA; - for (const chunkified of bufferify(value[i], i.toString())) { - // if the value ends up being nothing (undefined), return null - yield chunkified.length === 0 ? NULL : chunkified; - } + for (let i = 1; i < l; i++) { + yield COMMA; + for (const chunkified of bufferify(value[i], i.toString())) { + // if the value ends up being nothing (undefined), return null + yield chunkified.length === 0 ? NULL : chunkified; } } yield SQUARE_BRACKET_CLOSE; From 9c060ec931d236e220e5b471a7a1d2a5c5ecb2c6 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 12:29:59 -0400 Subject: [PATCH 60/93] refactor bufferify 's objectToBuffer --- .../ethereum/src/helpers/bufferify.ts | 75 +++++++------------ 1 file changed, 28 insertions(+), 47 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts index f79cb22122..13a8bb96f7 100644 --- a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts +++ b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts @@ -75,65 +75,46 @@ function bufferToQuotedBuffer(value: Buffer) { return buf; } -function* objectToBuffer(value: any, nameOrIndex: string) { - if ("toJSON" in value) { - yield* bufferify(value.toJSON(nameOrIndex), nameOrIndex); +function* objectToBuffer(obj: any, nameOrIndex: string) { + if ("toJSON" in obj) { + yield* bufferify(obj.toJSON(nameOrIndex), nameOrIndex); return; } - const entries = Object.entries(value); - const l = entries.length; - if (l === 0) { - yield CURLY_BRACKET_PAIR; - return; - } else { - let i = 0; - yield CURLY_BRACKET_OPEN; - - // Find the first non-null property to start the object - // The difference betwwen the first property and the rest is is that the - // first property is *not* preceded by a comma - while (i < l) { - const [key, value] = entries[i]; - i++; + let yieldedOpen = false; + for (const key in obj) { + const value = obj[key]; - let yieldPrefix = true; - for (const chunkified of bufferify(value, key)) { - // if the chunkified value ends up being nothing (undefined) ignore - // the property - const chunkLength = chunkified.length; - if (chunkLength === 0) continue; + let yieldPrefix = true; + for (const chunkified of bufferify(value, key)) { + // if the chunkified value ends up being nothing (undefined) ignore + // the property + const chunkLength = chunkified.length; + if (chunkLength === 0) continue; - if (yieldPrefix) { - yield Buffer.concat([stringToQuotedBuffer(key), COLON]); - yieldPrefix = false; + // only yield the prefix once per `key` + if (yieldPrefix) { + yieldPrefix = false; + const quotedKey = stringToQuotedBuffer(key); + if (!yieldedOpen) { + yield Buffer.concat([CURLY_BRACKET_OPEN, quotedKey, chunkified]); + yieldedOpen = true; + } else { + yield Buffer.concat([COMMA, quotedKey, COLON, chunkified]); } + } else { yield chunkified; } - // if we sent the prefix we found a non-undefined entry and should break - if (yieldPrefix === false) break; } - // sends the rest of the object fields - if (l > 1) { - for (; i < l; i++) { - const [key, value] = entries[i]; - let yieldPrefix = true; - for (const chunkified of bufferify(value, key)) { - // if the chunkified value ends up being nothing (undefined) ignore - // the property - const chunkLength = chunkified.length; - if (chunkLength === 0) continue; + } - if (yieldPrefix) { - yield Buffer.concat([COMMA, stringToQuotedBuffer(key), COLON]); - yieldPrefix = false; - } - yield chunkified; - } - } - } + // if we yielded the + if (yieldedOpen) { yield CURLY_BRACKET_CLOSE; return; + } else { + yield CURLY_BRACKET_PAIR; + return; } } From d1cf676c184fea41fe41deb91e1bb5fa5d509b73 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 12:54:42 -0400 Subject: [PATCH 61/93] add more bufferification test props --- src/chains/ethereum/ethereum/src/helpers/bufferify.ts | 7 ++++++- src/chains/ethereum/ethereum/tests/connector.test.ts | 2 ++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts index 13a8bb96f7..0dd4bfbce2 100644 --- a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts +++ b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts @@ -97,7 +97,12 @@ function* objectToBuffer(obj: any, nameOrIndex: string) { yieldPrefix = false; const quotedKey = stringToQuotedBuffer(key); if (!yieldedOpen) { - yield Buffer.concat([CURLY_BRACKET_OPEN, quotedKey, chunkified]); + yield Buffer.concat([ + CURLY_BRACKET_OPEN, + quotedKey, + COLON, + chunkified + ]); yieldedOpen = true; } else { yield Buffer.concat([COMMA, quotedKey, COLON, chunkified]); diff --git a/src/chains/ethereum/ethereum/tests/connector.test.ts b/src/chains/ethereum/ethereum/tests/connector.test.ts index d1b89ff905..534226ab93 100644 --- a/src/chains/ethereum/ethereum/tests/connector.test.ts +++ b/src/chains/ethereum/ethereum/tests/connector.test.ts @@ -24,6 +24,8 @@ describe("connector", () => { defined: true, alsoNotDefined: undefined }, + allUndefinedArray: [undefined, undefined, undefined], + allUndefinedObject: { uno: undefined, dos: undefined, tres: undefined }, trickyArray: [...Object.values(primitives)], object: { ...primitives, From cadbd5d810b3f4e1f7d87b6559e65788665d269a Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 12:55:44 -0400 Subject: [PATCH 62/93] fix comment typo --- src/chains/ethereum/ethereum/tests/connector.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/tests/connector.test.ts b/src/chains/ethereum/ethereum/tests/connector.test.ts index 534226ab93..80ee62eb66 100644 --- a/src/chains/ethereum/ethereum/tests/connector.test.ts +++ b/src/chains/ethereum/ethereum/tests/connector.test.ts @@ -37,7 +37,7 @@ describe("connector", () => { }; let connector: Connector; // an arbitrary payload - // `debug_traceTransaction` is triggers an optimization in the connector + // `debug_traceTransaction` triggers an optimization in the connector const payload = { jsonrpc: "2.0", method: "debug_traceTransaction", From 3903441d874fa025fe27fc1fd747281410337610 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 15:36:28 -0400 Subject: [PATCH 63/93] code review changes --- .../ethereum/src/helpers/bufferify.ts | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts index 0dd4bfbce2..21c620d19b 100644 --- a/src/chains/ethereum/ethereum/src/helpers/bufferify.ts +++ b/src/chains/ethereum/ethereum/src/helpers/bufferify.ts @@ -18,10 +18,10 @@ const isObj = (val: any) => toStr.call(val) === "[object Object]"; function numberToBuffer(value: number) { const str = value.toString(); - const l = str.length; - if (l > 0) { - const buf = Buffer.allocUnsafe(l); - (buf as any).utf8Write(str, 0, l); + const { length } = str; + if (length > 0) { + const buf = Buffer.allocUnsafe(length); + (buf as any).utf8Write(str, 0, length); return buf; } else { return _EMPTY; @@ -29,11 +29,11 @@ function numberToBuffer(value: number) { } function stringToQuotedBuffer(value: string) { - const length = value.length; + const { length } = value; if (length > 0) { const buf = Buffer.allocUnsafe(length + 2); // + 2 for the quotation marks - buf[0] = 34; // QUOTE - buf[length + 1] = 34; // QUOTE + buf[0] = 34; // DOUBLE QUOTE + buf[length + 1] = 34; // DOUBLE QUOTE (buf as any).utf8Write(value, 1, length); return buf; } else { @@ -42,8 +42,8 @@ function stringToQuotedBuffer(value: string) { } function* arrayToBuffer(value: any[]) { - const l = value.length; - if (l === 0) { + const { length } = value; + if (length === 0) { yield SQUARE_BRACKET_PAIR; return; } else { @@ -54,7 +54,7 @@ function* arrayToBuffer(value: any[]) { yield chunkified.length === 0 ? NULL : chunkified; } // sends the rest of the array values: - for (let i = 1; i < l; i++) { + for (let i = 1; i < length; i++) { yield COMMA; for (const chunkified of bufferify(value[i], i.toString())) { // if the value ends up being nothing (undefined), return null @@ -67,7 +67,7 @@ function* arrayToBuffer(value: any[]) { } function bufferToQuotedBuffer(value: Buffer) { - const length = value.length; + const { length } = value; const buf = Buffer.allocUnsafe(length + 2); buf[0] = 34; value.copy(buf, 1, 0, length); From 423945be58a2400736c0e7fdb3314a11c478f3e6 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 15:44:47 -0400 Subject: [PATCH 64/93] remove Buffer import from tests --- src/chains/ethereum/transaction/tests/index.test.ts | 1 - src/packages/core/tests/server.test.ts | 1 - 2 files changed, 2 deletions(-) diff --git a/src/chains/ethereum/transaction/tests/index.test.ts b/src/chains/ethereum/transaction/tests/index.test.ts index db6622b397..e9fe2db8db 100644 --- a/src/chains/ethereum/transaction/tests/index.test.ts +++ b/src/chains/ethereum/transaction/tests/index.test.ts @@ -18,7 +18,6 @@ import Wallet from "../../ethereum/src/wallet"; import { decode } from "@ganache/rlp"; import { EthereumOptionsConfig } from "../../options"; import { BUFFER_EMPTY, Quantity } from "@ganache/utils"; -import { Buffer } from "buffer"; describe("@ganache/ethereum-transaction", async () => { const common = Common.forCustomChain( diff --git a/src/packages/core/tests/server.test.ts b/src/packages/core/tests/server.test.ts index 6c8fcc9124..10bc9cf303 100644 --- a/src/packages/core/tests/server.test.ts +++ b/src/packages/core/tests/server.test.ts @@ -18,7 +18,6 @@ import { PromiEvent } from "@ganache/utils"; import { promisify } from "util"; import { ServerOptions } from "../src/options"; import { Connector, Provider as EthereumProvider } from "@ganache/ethereum"; -import { Buffer } from "buffer"; const IS_WINDOWS = process.platform === "win32"; From 57e63cbf51c8842f9e8fb8d310dfbed05df21a04 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 15:59:04 -0400 Subject: [PATCH 65/93] add comments about ws-server --- src/packages/core/src/servers/ws-server.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/packages/core/src/servers/ws-server.ts b/src/packages/core/src/servers/ws-server.ts index 2578fdb8f0..861bc60359 100644 --- a/src/packages/core/src/servers/ws-server.ts +++ b/src/packages/core/src/servers/ws-server.ts @@ -135,6 +135,8 @@ export default class WebsocketServer { if (done) { ws.send(first, useBinary); } else { + // fragment send: https://github.com/uNetworking/uWebSockets.js/issues/635 + // send the first fragment ws.sendFirstFragment(first, useBinary, COMPRESS); From e56e9af1aa07c6285a6f2ceade31dbffd066ffd1 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 16:53:05 -0400 Subject: [PATCH 66/93] clarify variable name --- src/packages/core/src/servers/ws-server.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/packages/core/src/servers/ws-server.ts b/src/packages/core/src/servers/ws-server.ts index 861bc60359..bec76551a7 100644 --- a/src/packages/core/src/servers/ws-server.ts +++ b/src/packages/core/src/servers/ws-server.ts @@ -126,7 +126,7 @@ export default class WebsocketServer { const localData = data; ws.cork(() => { const { value: first } = localData.next(); - const COMPRESS = false; + const shouldCompress = false; // get the second fragment, if there is one let { value: next, done } = localData.next(); @@ -138,18 +138,18 @@ export default class WebsocketServer { // fragment send: https://github.com/uNetworking/uWebSockets.js/issues/635 // send the first fragment - ws.sendFirstFragment(first, useBinary, COMPRESS); + ws.sendFirstFragment(first, useBinary, shouldCompress); // Now send the rest of the data piece by piece. // We lag behind by one fragment because the last fragment needs // to be sent via the `sendLastFragment` method let prev = next; for (next of localData) { - ws.sendFragment(prev, COMPRESS); + ws.sendFragment(prev, shouldCompress); prev = next; } // finally, send the last fragment - ws.sendLastFragment(next, COMPRESS); + ws.sendLastFragment(next, shouldCompress); } }); } else { From 567e24f13d86deed7b54bcfb41d2d9c523f543cf Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 17:00:36 -0400 Subject: [PATCH 67/93] move this weird line up --- src/packages/core/src/servers/ws-server.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/packages/core/src/servers/ws-server.ts b/src/packages/core/src/servers/ws-server.ts index bec76551a7..9c0387622b 100644 --- a/src/packages/core/src/servers/ws-server.ts +++ b/src/packages/core/src/servers/ws-server.ts @@ -125,8 +125,8 @@ export default class WebsocketServer { if (types.isGeneratorObject(data)) { const localData = data; ws.cork(() => { - const { value: first } = localData.next(); const shouldCompress = false; + const { value: first } = localData.next(); // get the second fragment, if there is one let { value: next, done } = localData.next(); From 1f36cf9303a1eb5bd01b9b63083dff16e116779f Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 17:02:50 -0400 Subject: [PATCH 68/93] clarifying comments again --- src/packages/core/src/servers/ws-server.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/packages/core/src/servers/ws-server.ts b/src/packages/core/src/servers/ws-server.ts index 9c0387622b..a2439541e5 100644 --- a/src/packages/core/src/servers/ws-server.ts +++ b/src/packages/core/src/servers/ws-server.ts @@ -129,6 +129,10 @@ export default class WebsocketServer { const { value: first } = localData.next(); // get the second fragment, if there is one + // Note: we lag behind by one fragment because the last fragment + // needs to be sent via the `sendLastFragment` method. + // This value acts as a lookahead so we know if we are at the last + // value or not. let { value: next, done } = localData.next(); // if there wasn't a second fragment, just send it the usual way. @@ -141,8 +145,6 @@ export default class WebsocketServer { ws.sendFirstFragment(first, useBinary, shouldCompress); // Now send the rest of the data piece by piece. - // We lag behind by one fragment because the last fragment needs - // to be sent via the `sendLastFragment` method let prev = next; for (next of localData) { ws.sendFragment(prev, shouldCompress); From 7afa74f9422435badd3f2c61fad3729ee31c43cc Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 17:03:25 -0400 Subject: [PATCH 69/93] move it again --- src/packages/core/src/servers/ws-server.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/packages/core/src/servers/ws-server.ts b/src/packages/core/src/servers/ws-server.ts index a2439541e5..148bb9f693 100644 --- a/src/packages/core/src/servers/ws-server.ts +++ b/src/packages/core/src/servers/ws-server.ts @@ -125,7 +125,6 @@ export default class WebsocketServer { if (types.isGeneratorObject(data)) { const localData = data; ws.cork(() => { - const shouldCompress = false; const { value: first } = localData.next(); // get the second fragment, if there is one @@ -140,6 +139,7 @@ export default class WebsocketServer { ws.send(first, useBinary); } else { // fragment send: https://github.com/uNetworking/uWebSockets.js/issues/635 + const shouldCompress = false; // send the first fragment ws.sendFirstFragment(first, useBinary, shouldCompress); From 7ad162ded964cdf3cb3c23c8304f6591d07ba898 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 17:10:26 -0400 Subject: [PATCH 70/93] better with brackets --- src/packages/core/src/servers/http-server.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/packages/core/src/servers/http-server.ts b/src/packages/core/src/servers/http-server.ts index 0c113c0cda..5b7a361242 100644 --- a/src/packages/core/src/servers/http-server.ts +++ b/src/packages/core/src/servers/http-server.ts @@ -191,8 +191,9 @@ export default class HttpServer { writeHeaders(response); response.writeHeader("Content-Type", ContentTypes.JSON); - for (const datum of data) + for (const datum of data) { response.write(datum as RecognizedString); + } response.end(); }); } else { From 30747b24cc2beceb7753c085be34f1cbc01bd2cc Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 17:34:33 -0400 Subject: [PATCH 71/93] _ --- src/chains/ethereum/ethereum/src/forking/trie.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index fa8fbcf3b7..9f94eb41e3 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -33,7 +33,7 @@ const LEVELDOWN_OPTIONS = { function isEqualKey(encodedKey: Buffer, address: Buffer, key: Buffer) { const decodedKey = lexico.decode(encodedKey); - const [, keyAddress, deletedKey] = decodedKey; + const [_, keyAddress, deletedKey] = decodedKey; return keyAddress.equals(address) && deletedKey.equals(key); } From a3d8be4a77d42e6472fdc76d5e8c526f69841e0a Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 18:57:47 -0400 Subject: [PATCH 72/93] add clarifying comment --- .../ethereum/ethereum/src/forking/state-manager.ts | 2 ++ src/chains/ethereum/ethereum/src/forking/trie.ts | 11 +++++++++++ 2 files changed, 13 insertions(+) diff --git a/src/chains/ethereum/ethereum/src/forking/state-manager.ts b/src/chains/ethereum/ethereum/src/forking/state-manager.ts index 049d309dc1..cc5efa774e 100644 --- a/src/chains/ethereum/ethereum/src/forking/state-manager.ts +++ b/src/chains/ethereum/ethereum/src/forking/state-manager.ts @@ -63,6 +63,8 @@ export class ForkStateManager extends StateManager { address.buf, storageTrie.blockNumber ); + // we copy checkpoints over only for the metadata checkpoints, not the trie + // checkpoints. storageTrie.db.checkpoints = []; return storageTrie; } diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index 9f94eb41e3..8314ab3a7b 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -295,6 +295,17 @@ export class ForkTrie extends GanacheTrie { secureTrie.blockNumber = this.blockNumber; if (includeCheckpoints && this.isCheckpoint) { secureTrie.db.checkpoints = [...this.db.checkpoints]; + + // Our `metadata.checkpoints` needs to be the same reference to the + // parent's metadata.checkpoints so that we can continue to track these + // changes on this copy, otherwise deletions made to a contract's storage + // may not be tracked. + // Note: db.checkpoints don't need this same treatment because of the way + // the statemanager uses a contract's trie: it doesn't ever save to it. + // Instead, it saves to it's own internal cache, which eventually gets + // reverted or committed (flushed). Our metadata doesn't utilize a central + // cache. + // secureTrie.metadata.checkpoints = this.metadata.checkpoints; secureTrie.metadata.checkpoints = this.metadata.checkpoints; } return secureTrie; From bb6c7eb6875bf2c473ca6cb0bceb36f1626a8f1e Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Fri, 5 Nov 2021 20:29:17 -0400 Subject: [PATCH 73/93] fix typo --- src/chains/ethereum/ethereum/src/forking/trie.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index 8314ab3a7b..828023ec01 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -302,7 +302,7 @@ export class ForkTrie extends GanacheTrie { // may not be tracked. // Note: db.checkpoints don't need this same treatment because of the way // the statemanager uses a contract's trie: it doesn't ever save to it. - // Instead, it saves to it's own internal cache, which eventually gets + // Instead, it saves to its own internal cache, which eventually gets // reverted or committed (flushed). Our metadata doesn't utilize a central // cache. // secureTrie.metadata.checkpoints = this.metadata.checkpoints; From 2c06a7083e1429b8199cb7e50d908efc7b9d4edf Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Mon, 8 Nov 2021 17:53:13 -0500 Subject: [PATCH 74/93] partially switch to block confirmations for "latest" (some other relevant changes are pending a review submission) --- .../ethereum/ethereum/src/forking/fork.ts | 38 +++++++++---------- .../ethereum/tests/forking/helpers.ts | 2 +- .../ethereum/options/src/fork-options.ts | 7 ++-- 3 files changed, 23 insertions(+), 24 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/fork.ts b/src/chains/ethereum/ethereum/src/forking/fork.ts index db55f3e6f3..3b4067ecae 100644 --- a/src/chains/ethereum/ethereum/src/forking/fork.ts +++ b/src/chains/ethereum/ethereum/src/forking/fork.ts @@ -82,9 +82,9 @@ export class Fork { } } - #setCommonFromChain = async () => { + #setCommonFromChain = async (chainIdPromise: Promise) => { const [chainId, networkId] = await Promise.all([ - fetchChainId(this), + chainIdPromise, fetchNetworkId(this) ]); @@ -101,27 +101,27 @@ export class Fork { (this.common as any).on = () => {}; }; - #setBlockDataFromChainAndOptions = async () => { + #setBlockDataFromChainAndOptions = async ( + chainIdPromise: Promise + ) => { const options = this.#options; if (options.blockNumber === Tag.LATEST) { - const latestBlock = await fetchBlock(this, Tag.LATEST); + const [latestBlock, chainId] = await Promise.all([ + fetchBlock(this, Tag.LATEST), + chainIdPromise + ]); let blockNumber = parseInt(latestBlock.number, 16); - const currentTime = BigInt((Date.now() / 1000) | 0); // current time in seconds - // if the "latest" block is less than `blockAge` seconds old we don't use it - // because it is possible that the node we connected to hasn't fully synced its - // state, so successive calls to this block - const useOlderBlock = - blockNumber > 0 && - currentTime - BigInt(latestBlock.timestamp) < options.blockAge; + const effectiveBlockNumber = KNOWN_CHAINIDS.has(chainId) + ? Math.max(blockNumber - options.blockAge, 0) + : blockNumber; let block; - if (useOlderBlock) { - blockNumber -= 1; - block = await fetchBlock(this, Quantity.from(blockNumber)); + if (effectiveBlockNumber !== blockNumber) { + block = await fetchBlock(this, Quantity.from(effectiveBlockNumber)); } else { block = latestBlock; } - options.blockNumber = parseInt(block.number, 16); - this.blockNumber = Quantity.from(blockNumber); + options.blockNumber = effectiveBlockNumber; + this.blockNumber = Quantity.from(effectiveBlockNumber); this.stateRoot = Data.from(block.stateRoot); await this.#syncAccounts(this.blockNumber); return block; @@ -176,11 +176,11 @@ export class Fork { } else { cacheProm = null; } - + const chainIdPromise = fetchChainId(this); const [block, cache] = await Promise.all([ - this.#setBlockDataFromChainAndOptions(), + this.#setBlockDataFromChainAndOptions(chainIdPromise), cacheProm, - this.#setCommonFromChain() + this.#setCommonFromChain(chainIdPromise) ]); this.block = new Block( BlockManager.rawFromJSON(block, this.common), diff --git a/src/chains/ethereum/ethereum/tests/forking/helpers.ts b/src/chains/ethereum/ethereum/tests/forking/helpers.ts index 019a281944..b4123858c8 100644 --- a/src/chains/ethereum/ethereum/tests/forking/helpers.ts +++ b/src/chains/ethereum/ethereum/tests/forking/helpers.ts @@ -54,7 +54,7 @@ export const startLocalChain = async ( ) => { const localProvider = await getProvider({ logging, - fork: { blockAge: 0, url: `ws://0.0.0.0:${port}`, ...options }, + fork: { url: `ws://0.0.0.0:${port}`, ...options }, wallet: { deterministic: true } }); return { diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index 55de676d65..2bf110414d 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -78,8 +78,7 @@ export type ForkConfig = { * will be used instead. */ blockAge: { - type: bigint; - rawType: number; + type: number; hasDefault: true; }; @@ -306,9 +305,9 @@ Alternatively, you can use the \`fork.username\` and \`fork.password\` options.` //implies: ["url"] }, blockAge: { - normalize: rawInput => BigInt(rawInput), + normalize, cliDescription: `Minimum age in seconds of the "latest" block. If the "latest" block is younger than this amount the block immediately preceding the latest block will be used instead.`, - default: () => 5n, + default: () => 5, defaultDescription: "5", cliType: "number" }, From c202b2e59ef06c2527c41818b8ef37733e36dd57 Mon Sep 17 00:00:00 2001 From: David Murdoch <187813+davidmurdoch@users.noreply.github.com> Date: Tue, 9 Nov 2021 12:40:50 -0500 Subject: [PATCH 75/93] Update src/chains/ethereum/ethereum/src/forking/trie.ts Co-authored-by: Micaiah Reid --- src/chains/ethereum/ethereum/src/forking/trie.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index 828023ec01..884e3e6179 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -285,7 +285,9 @@ export class ForkTrie extends GanacheTrie { /** * Returns a copy of the underlying trie with the interface of ForkTrie. - * @param includeCheckpoints - If true and during a checkpoint, the copy will contain the checkpointing metadata and will use the same scratch as underlying db. + * @param includeCheckpoints - If true and during a checkpoint, the copy will + * contain the checkpointing metadata and will use the same scratch as + * underlying db. */ copy(includeCheckpoints: boolean = true) { const db = this.db.copy() as CheckpointDB; From 1566e285089ea5c8919b6b218df77a6519082ff2 Mon Sep 17 00:00:00 2001 From: David Murdoch <187813+davidmurdoch@users.noreply.github.com> Date: Tue, 9 Nov 2021 12:40:58 -0500 Subject: [PATCH 76/93] Update src/chains/ethereum/ethereum/src/forking/trie.ts Co-authored-by: Micaiah Reid --- src/chains/ethereum/ethereum/src/forking/trie.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index 884e3e6179..b2f8e406f4 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -163,7 +163,7 @@ export class ForkTrie extends GanacheTrie { // we only track if the key was deleted (locally) for state tries _after_ // the fork block because we can't possibly delete keys _before_ the fork // block, since those happened before ganache was even started - // This little optimization can debug_traceTransaction time _in half_. + // This little optimization can cut debug_traceTransaction time _in half_. if (!this.preForkBlock) { const delKey = this.createDelKey(key); const metaDataPutPromise = this.metadata.put(delKey, DELETED_VALUE); From 381ae915ebcb11472c8575ebe60fde22225e5554 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 9 Nov 2021 12:41:29 -0500 Subject: [PATCH 77/93] rename confusing variable --- src/chains/ethereum/ethereum/src/forking/trie.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index b2f8e406f4..45902f7e38 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -40,7 +40,7 @@ function isEqualKey(encodedKey: Buffer, address: Buffer, key: Buffer) { export class ForkTrie extends GanacheTrie { private accounts: AccountManager; private address: Buffer | null = null; - private preForkBlock = false; + private isPreForkBlock = false; private forkBlockNumber: bigint; public blockNumber: Quantity; private metadata: CheckpointDB; @@ -84,7 +84,7 @@ export class ForkTrie extends GanacheTrie { (this as any)._root = stateRoot; this.address = address; this.blockNumber = blockNumber; - this.preForkBlock = blockNumber.toBigInt() < this.forkBlockNumber; + this.isPreForkBlock = blockNumber.toBigInt() < this.forkBlockNumber; } async put(key: Buffer, val: Buffer): Promise { @@ -164,7 +164,7 @@ export class ForkTrie extends GanacheTrie { // the fork block because we can't possibly delete keys _before_ the fork // block, since those happened before ganache was even started // This little optimization can cut debug_traceTransaction time _in half_. - if (!this.preForkBlock) { + if (!this.isPreForkBlock) { const delKey = this.createDelKey(key); const metaDataPutPromise = this.metadata.put(delKey, DELETED_VALUE); @@ -272,7 +272,7 @@ export class ForkTrie extends GanacheTrie { // the fork block because we can't possibly delete keys _before_ the fork // block, since those happened before ganache was even started // This little optimization can debug_traceTransaction time _in half_. - if (!this.preForkBlock && (await this.keyWasDeleted(key))) return null; + if (!this.isPreForkBlock && (await this.keyWasDeleted(key))) return null; if (this.address === null) { // if the trie context's address isn't set, our key represents an address: @@ -285,7 +285,7 @@ export class ForkTrie extends GanacheTrie { /** * Returns a copy of the underlying trie with the interface of ForkTrie. - * @param includeCheckpoints - If true and during a checkpoint, the copy will + * @param includeCheckpoints - If true and during a checkpoint, the copy will * contain the checkpointing metadata and will use the same scratch as * underlying db. */ From 5ba148fe480a53470be44f6f433d2a2cf4cfd4b9 Mon Sep 17 00:00:00 2001 From: David Murdoch <187813+davidmurdoch@users.noreply.github.com> Date: Tue, 9 Nov 2021 12:41:47 -0500 Subject: [PATCH 78/93] Update src/chains/ethereum/ethereum/src/forking/trie.ts Co-authored-by: Micaiah Reid --- src/chains/ethereum/ethereum/src/forking/trie.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index 45902f7e38..3c9348f6f8 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -271,7 +271,7 @@ export class ForkTrie extends GanacheTrie { // we only check if the key was deleted (locally) for state tries _after_ // the fork block because we can't possibly delete keys _before_ the fork // block, since those happened before ganache was even started - // This little optimization can debug_traceTransaction time _in half_. + // This little optimization can cut debug_traceTransaction time _in half_. if (!this.isPreForkBlock && (await this.keyWasDeleted(key))) return null; if (this.address === null) { From d7d65ce10dfed9c322d3b16e7089734c5845e1c0 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 9 Nov 2021 12:42:45 -0500 Subject: [PATCH 79/93] Rename `blockAge` to `preLatestConfirmations` --- src/chains/ethereum/ethereum/src/forking/fork.ts | 2 +- src/chains/ethereum/options/src/fork-options.ts | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/fork.ts b/src/chains/ethereum/ethereum/src/forking/fork.ts index 3b4067ecae..6466d44a3f 100644 --- a/src/chains/ethereum/ethereum/src/forking/fork.ts +++ b/src/chains/ethereum/ethereum/src/forking/fork.ts @@ -112,7 +112,7 @@ export class Fork { ]); let blockNumber = parseInt(latestBlock.number, 16); const effectiveBlockNumber = KNOWN_CHAINIDS.has(chainId) - ? Math.max(blockNumber - options.blockAge, 0) + ? Math.max(blockNumber - options.preLatestConfirmations, 0) : blockNumber; let block; if (effectiveBlockNumber !== blockNumber) { diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index 2bf110414d..977dad05fc 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -77,7 +77,7 @@ export type ForkConfig = { * younger than this amount the block immediately preceding the latest block * will be used instead. */ - blockAge: { + preLatestConfirmations: { type: number; hasDefault: true; }; @@ -304,7 +304,7 @@ Alternatively, you can use the \`fork.username\` and \`fork.password\` options.` defaultDescription: `Latest block number` //implies: ["url"] }, - blockAge: { + preLatestConfirmations: { normalize, cliDescription: `Minimum age in seconds of the "latest" block. If the "latest" block is younger than this amount the block immediately preceding the latest block will be used instead.`, default: () => 5, From 74eb2ed9b3a8a7b7185f4997d4b8bd40344f5fd5 Mon Sep 17 00:00:00 2001 From: David Murdoch <187813+davidmurdoch@users.noreply.github.com> Date: Tue, 9 Nov 2021 12:42:58 -0500 Subject: [PATCH 80/93] Update src/chains/ethereum/options/src/fork-options.ts Co-authored-by: Micaiah Reid --- src/chains/ethereum/options/src/fork-options.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index 2bf110414d..5ad1a46ddc 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -306,7 +306,7 @@ Alternatively, you can use the \`fork.username\` and \`fork.password\` options.` }, blockAge: { normalize, - cliDescription: `Minimum age in seconds of the "latest" block. If the "latest" block is younger than this amount the block immediately preceding the latest block will be used instead.`, + cliDescription: `When the `fork.blockNumber` is set to "latest" (default), the number of blocks before the remote node's "latest" block to fork from.`, default: () => 5, defaultDescription: "5", cliType: "number" From 6302d166cfe69f061d56bbd9960eea83b4189632 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 9 Nov 2021 12:45:43 -0500 Subject: [PATCH 81/93] fix setDbVersion function --- .../ethereum/ethereum/src/forking/persistent-cache/helpers.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts index 1f150a1c2b..cfb158a7e7 100644 --- a/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/helpers.ts @@ -60,8 +60,8 @@ export async function setDbVersion(db: LevelUp, version: Buffer) { // set the version if the DB was just created, or error if we already have // a version, but it isn't what we expected try { - const version = await db.get("version"); - if (!version.equals(version)) { + const recordedVersion = await db.get("version"); + if (!version.equals(recordedVersion)) { // in the future this is where database migrations would go throw new Error( `Persistent cache version "${version.toString()}"" is not understood.` From cf0947d1ef2ae446df9dae550a0bcebf295b1f4e Mon Sep 17 00:00:00 2001 From: David Murdoch <187813+davidmurdoch@users.noreply.github.com> Date: Tue, 9 Nov 2021 12:47:34 -0500 Subject: [PATCH 82/93] Update src/chains/ethereum/ethereum/src/forking/trie.ts Co-authored-by: Micaiah Reid --- src/chains/ethereum/ethereum/src/forking/trie.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/forking/trie.ts b/src/chains/ethereum/ethereum/src/forking/trie.ts index 3c9348f6f8..d4019dab31 100644 --- a/src/chains/ethereum/ethereum/src/forking/trie.ts +++ b/src/chains/ethereum/ethereum/src/forking/trie.ts @@ -307,7 +307,6 @@ export class ForkTrie extends GanacheTrie { // Instead, it saves to its own internal cache, which eventually gets // reverted or committed (flushed). Our metadata doesn't utilize a central // cache. - // secureTrie.metadata.checkpoints = this.metadata.checkpoints; secureTrie.metadata.checkpoints = this.metadata.checkpoints; } return secureTrie; From 95b779e29897304275e9bf54589a6c28ccfe7288 Mon Sep 17 00:00:00 2001 From: David Murdoch <187813+davidmurdoch@users.noreply.github.com> Date: Tue, 9 Nov 2021 12:47:46 -0500 Subject: [PATCH 83/93] Update src/chains/ethereum/options/src/fork-options.ts Co-authored-by: Micaiah Reid --- src/chains/ethereum/options/src/fork-options.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index 943843bb81..72cf58d74e 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -407,7 +407,7 @@ Defaults to: \`["User-Agent: Ganache/VERSION (https://www.trufflesuite.com/ganac noCache: { normalize, default: () => false, - cliDescription: "Don't cache forking requests in a persistent db.", + cliDescription: "Disables caching of all forking requests.", cliType: "boolean" }, deleteCache: { From fdb7c770ca9bc72afa3e0aca75a54d85048fd34e Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 9 Nov 2021 12:50:47 -0500 Subject: [PATCH 84/93] rename noCache to disableCache --- .../ethereum/ethereum/src/forking/fork.ts | 8 ++-- .../src/forking/handlers/base-handler.ts | 6 +-- .../src/forking/handlers/http-handler.ts | 2 +- .../src/forking/handlers/provider-handler.ts | 2 +- .../src/forking/handlers/ws-handler.ts | 2 +- .../ethereum/ethereum/src/forking/types.ts | 2 +- .../ethereum/tests/forking/account.test.ts | 2 +- .../ethereum/tests/forking/block.test.ts | 2 +- .../ethereum/tests/forking/forking.test.ts | 40 ++++++++++++------- .../tests/forking/transaction.test.ts | 2 +- .../ethereum/options/src/fork-options.ts | 4 +- 11 files changed, 42 insertions(+), 30 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/fork.ts b/src/chains/ethereum/ethereum/src/forking/fork.ts index 6466d44a3f..e719bd8772 100644 --- a/src/chains/ethereum/ethereum/src/forking/fork.ts +++ b/src/chains/ethereum/ethereum/src/forking/fork.ts @@ -22,9 +22,9 @@ async function fetchNetworkId(fork: Fork) { return parseInt(networkIdStr, 10); } function fetchBlockNumber(fork: Fork) { - // {noCache: true} required so we never cache the blockNumber, as forking + // {disableCache: true} required so we never cache the blockNumber, as forking // shouldn't ever cache a method that can change! - return fork.request("eth_blockNumber", [], { noCache: true }); + return fork.request("eth_blockNumber", [], { disableCache: true }); } function fetchBlock(fork: Fork, blockNumber: Quantity | Tag.LATEST) { return fork.request("eth_getBlockByNumber", [blockNumber, true]); @@ -167,7 +167,7 @@ export class Fork { let cacheProm: Promise; const options = this.#options; if (options.deleteCache) await PersistentCache.deleteDb(); - if (options.noCache === false) { + if (options.disableCache === false) { // ignore cache start up errors as it is possible there is an `open` // conflict if another ganache fork is running at the time this one is // started. The cache isn't required (though performance will be @@ -200,7 +200,7 @@ export class Fork { public request( method: string, params: unknown[], - options = { noCache: false } + options = { disableCache: false } ): Promise { return this.#handler.request(method, params, options); } diff --git a/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts b/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts index 00a4f2f7a4..6de72670bb 100644 --- a/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts +++ b/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts @@ -170,9 +170,9 @@ export class BaseHandler { response: { result: any } | { error: { message: string; code: number } }; raw: string | Buffer; }>, - options = { noCache: false } + options = { disableCache: false } ): Promise { - if (!options.noCache) { + if (!options.disableCache) { const memCached = this.getFromMemCache(key); if (memCached !== undefined) return memCached; @@ -189,7 +189,7 @@ export class BaseHandler { if (this.abortSignal.aborted) return Promise.reject(new AbortError()); if (hasOwn(response, "result")) { - if (!options.noCache) { + if (!options.disableCache) { // cache non-error responses only this.valueCache.set(key, raw); diff --git a/src/chains/ethereum/ethereum/src/forking/handlers/http-handler.ts b/src/chains/ethereum/ethereum/src/forking/handlers/http-handler.ts index f1d4735908..1932e7c3cc 100644 --- a/src/chains/ethereum/ethereum/src/forking/handlers/http-handler.ts +++ b/src/chains/ethereum/ethereum/src/forking/handlers/http-handler.ts @@ -88,7 +88,7 @@ export class HttpHandler extends BaseHandler implements Handler { public async request( method: string, params: unknown[], - options = { noCache: false } + options = { disableCache: false } ) { const key = JSON.stringify({ method, params }); const { protocol, hostname: host, port, pathname, search } = this.url; diff --git a/src/chains/ethereum/ethereum/src/forking/handlers/provider-handler.ts b/src/chains/ethereum/ethereum/src/forking/handlers/provider-handler.ts index 5c67088f0b..0bc554cd49 100644 --- a/src/chains/ethereum/ethereum/src/forking/handlers/provider-handler.ts +++ b/src/chains/ethereum/ethereum/src/forking/handlers/provider-handler.ts @@ -64,7 +64,7 @@ export class ProviderHandler extends BaseHandler implements Handler { public async request( method: string, params: unknown[], - options = { noCache: false } + options = { disableCache: false } ) { // format params via JSON stringification because the params might // be Quantity or Data, which aren't valid as `params` themselves, diff --git a/src/chains/ethereum/ethereum/src/forking/handlers/ws-handler.ts b/src/chains/ethereum/ethereum/src/forking/handlers/ws-handler.ts index c1e77a6edf..55a2ff5414 100644 --- a/src/chains/ethereum/ethereum/src/forking/handlers/ws-handler.ts +++ b/src/chains/ethereum/ethereum/src/forking/handlers/ws-handler.ts @@ -56,7 +56,7 @@ export class WsHandler extends BaseHandler implements Handler { public async request( method: string, params: unknown[], - options = { noCache: false } + options = { disableCache: false } ) { await this.open; if (this.abortSignal.aborted) return Promise.reject(new AbortError()); diff --git a/src/chains/ethereum/ethereum/src/forking/types.ts b/src/chains/ethereum/ethereum/src/forking/types.ts index 9d7f4781cc..0679c70fd6 100644 --- a/src/chains/ethereum/ethereum/src/forking/types.ts +++ b/src/chains/ethereum/ethereum/src/forking/types.ts @@ -4,7 +4,7 @@ export interface Handler { request: ( method: string, params: unknown[], - options: { noCache: boolean } + options: { disableCache: boolean } ) => Promise; setCache: (cache: PersistentCache) => void; close: () => Promise; diff --git a/src/chains/ethereum/ethereum/tests/forking/account.test.ts b/src/chains/ethereum/ethereum/tests/forking/account.test.ts index 8690d4bab3..f7938b903e 100644 --- a/src/chains/ethereum/ethereum/tests/forking/account.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/account.test.ts @@ -20,7 +20,7 @@ describe("forking", function () { fork: { url: URL, blockNumber, - noCache: true + disableCache: true } }); }); diff --git a/src/chains/ethereum/ethereum/tests/forking/block.test.ts b/src/chains/ethereum/ethereum/tests/forking/block.test.ts index b0db24b0fd..1d2216abb7 100644 --- a/src/chains/ethereum/ethereum/tests/forking/block.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/block.test.ts @@ -19,7 +19,7 @@ describe("forking", function () { fork: { url: URL, blockNumber, - noCache: true + disableCache: true } }); }); diff --git a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts index 1e380647c8..531c5e9392 100644 --- a/src/chains/ethereum/ethereum/tests/forking/forking.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/forking.test.ts @@ -114,7 +114,7 @@ describe("forking", function () { it("handles invalid JSON-RPC responses", async () => { const { localProvider } = await startLocalChain(port, { url: `http://0.0.0.0:${port}`, - noCache: true + disableCache: true }); // some bad values to test const junks = [ @@ -152,7 +152,7 @@ describe("forking", function () { startLocalChain(PORT, { url: null, provider: { request: "not a function" } as any, - noCache: true + disableCache: true }), { message: "Forking `provider` must be EIP-1193 compatible" } ); @@ -161,7 +161,7 @@ describe("forking", function () { startLocalChain(PORT, { url: null, provider: { send: "also not a function" } as any, - noCache: true + disableCache: true }), { message: "Forking `provider` must be EIP-1193 compatible" } ); @@ -175,7 +175,7 @@ describe("forking", function () { const provider = await startLocalChain(PORT, { url: null, provider: remoteProvider as any, - noCache: true + disableCache: true }); localProvider = provider.localProvider; } @@ -252,7 +252,7 @@ describe("forking", function () { const provider = await startLocalChain(PORT, { url: null, provider: remoteProvider as any, - noCache: true + disableCache: true }); localProvider = provider.localProvider; @@ -321,7 +321,9 @@ describe("forking", function () { describe("initial state", () => { it("should get the Network ID of the forked chain", async () => { - const { localProvider } = await startLocalChain(PORT, { noCache: true }); + const { localProvider } = await startLocalChain(PORT, { + disableCache: true + }); const [remoteNetworkId, localNetworkId] = await Promise.all( [remoteProvider, localProvider].map(p => p.send("net_version", [])) @@ -344,7 +346,7 @@ describe("forking", function () { const localStartBlockNum = blocks / 2; const { localProvider } = await startLocalChain(PORT, { blockNumber: localStartBlockNum, - noCache: true + disableCache: true }); const localBlockNumber = parseInt( @@ -367,7 +369,9 @@ describe("forking", function () { describe("block number", () => { let localProvider: EthereumProvider; beforeEach("start local chain", async () => { - ({ localProvider } = await startLocalChain(PORT, { noCache: true })); + ({ localProvider } = await startLocalChain(PORT, { + disableCache: true + })); }); it("local block number should be 1 after the remote block on start up", async () => { @@ -387,7 +391,9 @@ describe("forking", function () { }); beforeEach("start local chain", async () => { - ({ localProvider } = await startLocalChain(PORT, { noCache: true })); + ({ localProvider } = await startLocalChain(PORT, { + disableCache: true + })); }); it("should return the nonce of each account", async () => { @@ -413,7 +419,7 @@ describe("forking", function () { beforeEach("start local chain", async () => { ({ localProvider, localAccounts } = await startLocalChain(PORT, { - noCache: true + disableCache: true })); }); @@ -642,7 +648,9 @@ describe("forking", function () { }); it("should fetch contract code from the remote chain via the local chain", async () => { - const { localProvider } = await startLocalChain(PORT, { noCache: true }); + const { localProvider } = await startLocalChain(PORT, { + disableCache: true + }); const { blockNumbersWithCode, blockNumbersWithoutCode @@ -672,7 +680,9 @@ describe("forking", function () { }); it("should fetch initial contract data from the remote chain via the local chain", async () => { - const { localProvider } = await startLocalChain(PORT, { noCache: true }); + const { localProvider } = await startLocalChain(PORT, { + disableCache: true + }); const { blockNum, blockNumbersWithCode, @@ -722,7 +732,9 @@ describe("forking", function () { }); it("should fetch changed contract data from the remote chain via the local chain", async () => { - const { localProvider } = await startLocalChain(PORT, { noCache: true }); + const { localProvider } = await startLocalChain(PORT, { + disableCache: true + }); const { blockNum, blockNumbersWithCode, @@ -833,7 +845,7 @@ describe("forking", function () { snapshotValues: number[] ) { const { localProvider } = await startLocalChain(PORT, { - noCache: true + disableCache: true }); const subId = await localProvider.send("eth_subscribe", ["newHeads"]); diff --git a/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts b/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts index 3f94e355be..33edd8952a 100644 --- a/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts +++ b/src/chains/ethereum/ethereum/tests/forking/transaction.test.ts @@ -18,7 +18,7 @@ describe("forking", () => { fork: { url: URL, blockNumber, - noCache: true + disableCache: true } }); }); diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index 72cf58d74e..284b7e50e1 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -177,7 +177,7 @@ export type ForkConfig = { * * @default false */ - noCache: { + disableCache: { type: boolean; hasDefault: true; }; @@ -404,7 +404,7 @@ Defaults to: \`["User-Agent: Ganache/VERSION (https://www.trufflesuite.com/ganac cliType: "number" //implies: ["url"] }, - noCache: { + disableCache: { normalize, default: () => false, cliDescription: "Disables caching of all forking requests.", From c4b36de072278c2f841755a7dbf8c54a4411d3dd Mon Sep 17 00:00:00 2001 From: David Murdoch <187813+davidmurdoch@users.noreply.github.com> Date: Tue, 9 Nov 2021 12:51:31 -0500 Subject: [PATCH 85/93] Update src/chains/ethereum/options/src/fork-options.ts Co-authored-by: Micaiah Reid --- src/chains/ethereum/options/src/fork-options.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index 284b7e50e1..7dadf4db55 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -173,7 +173,7 @@ export type ForkConfig = { }; /** - * Don't cache forking requests in a persistent db. + * Disables caching of all forking requests. * * @default false */ From a2ac1e94574d6fd77a6a829237ea1f8a1774e7f6 Mon Sep 17 00:00:00 2001 From: David Murdoch <187813+davidmurdoch@users.noreply.github.com> Date: Tue, 9 Nov 2021 12:51:48 -0500 Subject: [PATCH 86/93] Update src/chains/ethereum/options/src/fork-options.ts Co-authored-by: Micaiah Reid --- src/chains/ethereum/options/src/fork-options.ts | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index 7dadf4db55..864afadf05 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -73,9 +73,8 @@ export type ForkConfig = { }; /** - * Minimum age in seconds of the "latest" block. If the "latest" block is - * younger than this amount the block immediately preceding the latest block - * will be used instead. + * When the `fork.blockNumber` is set to "latest" (default), the number + * of blocks before the remote node's "latest" block to fork from. */ preLatestConfirmations: { type: number; From 6faae0e133bb33a7a1888e1a1f599428f5075f2a Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 9 Nov 2021 13:01:54 -0500 Subject: [PATCH 87/93] fix commit suggestion typo --- src/chains/ethereum/options/src/fork-options.ts | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index 864afadf05..e3265c2e3f 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -73,8 +73,9 @@ export type ForkConfig = { }; /** - * When the `fork.blockNumber` is set to "latest" (default), the number - * of blocks before the remote node's "latest" block to fork from. + * Minimum age in seconds of the "latest" block. If the "latest" block is + * younger than this amount the block immediately preceding the latest block + * will be used instead. */ preLatestConfirmations: { type: number; @@ -172,7 +173,7 @@ export type ForkConfig = { }; /** - * Disables caching of all forking requests. + * Don't cache forking requests in a persistent db. * * @default false */ @@ -305,7 +306,8 @@ Alternatively, you can use the \`fork.username\` and \`fork.password\` options.` }, preLatestConfirmations: { normalize, - cliDescription: `When the `fork.blockNumber` is set to "latest" (default), the number of blocks before the remote node's "latest" block to fork from.`, + cliDescription: + 'When the `fork.blockNumber` is set to "latest" (default), the number of blocks before the remote node\'s "latest" block to fork from.', default: () => 5, defaultDescription: "5", cliType: "number" From 25c38b53119d0afd7c3a855ad520818f6241fc0e Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Tue, 9 Nov 2021 13:47:07 -0500 Subject: [PATCH 88/93] fix error in slowCache --- .../ethereum/ethereum/src/forking/handlers/base-handler.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts b/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts index 6de72670bb..e5fd673788 100644 --- a/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts +++ b/src/chains/ethereum/ethereum/src/forking/handlers/base-handler.ts @@ -157,7 +157,7 @@ export class BaseHandler { // have lost integrity throw e; }); - if (raw !== undefined) return { result: JSON.parse(raw).result as T, raw }; + if (raw != null) return { result: JSON.parse(raw).result as T, raw }; } async queueRequest( From 7cb9cf60f54ca93af1892e24e8bbc86235343e91 Mon Sep 17 00:00:00 2001 From: MicaiahReid Date: Wed, 10 Nov 2021 15:53:54 -0500 Subject: [PATCH 89/93] remove mock-provider.ts --- .../tests/forking/cache/mock-provider.ts | 55 ------------------- 1 file changed, 55 deletions(-) delete mode 100644 src/chains/ethereum/ethereum/tests/forking/cache/mock-provider.ts diff --git a/src/chains/ethereum/ethereum/tests/forking/cache/mock-provider.ts b/src/chains/ethereum/ethereum/tests/forking/cache/mock-provider.ts deleted file mode 100644 index 06902337ac..0000000000 --- a/src/chains/ethereum/ethereum/tests/forking/cache/mock-provider.ts +++ /dev/null @@ -1,55 +0,0 @@ -type Provider = { - send: (payload: any, callback: any) => void; -}; - -import { Batch, Model } from "./arbitraries"; - -export const mockProvider = (options: { - model: Model; - batch: Batch; -}): Provider => { - const { model, batch } = options; - - const { networkId, getBlockByNumber } = model.networks[batch.descendantIndex]; - - return { - send(payload, callback) { - const { jsonrpc, id, method, params } = payload; - - switch (method) { - case "eth_getBlockByNumber": { - let [blockNumber] = params; - if (blockNumber === "earliest") { - blockNumber = 0; - } - - const height = parseInt(blockNumber); - - (getBlockByNumber(height) as any).then(block => { - const result = block - ? { - number: `0x${height.toString(16)}`, - hash: block.hash - } - : undefined; - - return callback(null, { - jsonrpc, - id, - result - }); - }); - } - case "net_version": { - const result = networkId; - - return callback(null, { - jsonrpc, - id, - result - }); - } - } - } - }; -}; From ed038fe4a8a665762ace238467877d71d279df3c Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Wed, 10 Nov 2021 17:09:32 -0500 Subject: [PATCH 90/93] code review changes --- .../ethereum/tests/api/eth/getBlockByNumber.test.ts | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts b/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts index fcc3c8e45f..7f6eea6268 100644 --- a/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts +++ b/src/chains/ethereum/ethereum/tests/api/eth/getBlockByNumber.test.ts @@ -47,12 +47,16 @@ describe("api", () => { const block = await provider.send("eth_getBlockByNumber", [ `0x${numberOfBlocksToMine.toString(16)}` ]); + assert( + block, + `\`block\` is \`null\`; didn't correctly mine ${numberOfBlocksToMine} blocks` + ); assert.strictEqual( block.totalDifficulty, `0x${((numberOfBlocksToMine + 1) * DEFAULT_DIFFICULTY).toString( 16 )}`, - `Didn't correctly mine ${numberOfBlocksToMine} blocks` + `Mined total difficulty, ${block.totalDifficulty} differs from sum of preceding block's difficulties.` ); }); }); @@ -89,7 +93,7 @@ describe("api", () => { assert.strictEqual( block.totalDifficulty, `0x${((numberOfBlocksToMine + 1) * difficulty).toString(16)}`, - `Didn't correctly mine ${numberOfBlocksToMine} blocks` + `Mined total difficulty, ${block.totalDifficulty} differs from sum of preceding block's difficulties.` ); }); }); From 259b3a2f424ca2eb0ead6d35714ae692bffa4042 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Wed, 10 Nov 2021 17:15:26 -0500 Subject: [PATCH 91/93] fix up --- src/chains/ethereum/options/src/fork-options.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/chains/ethereum/options/src/fork-options.ts b/src/chains/ethereum/options/src/fork-options.ts index e3265c2e3f..33ddb20e77 100644 --- a/src/chains/ethereum/options/src/fork-options.ts +++ b/src/chains/ethereum/options/src/fork-options.ts @@ -73,9 +73,8 @@ export type ForkConfig = { }; /** - * Minimum age in seconds of the "latest" block. If the "latest" block is - * younger than this amount the block immediately preceding the latest block - * will be used instead. + * When the `fork.blockNumber` is set to "latest" (default), the number of + * blocks before the remote node's "latest" block to fork from. */ preLatestConfirmations: { type: number; @@ -173,7 +172,7 @@ export type ForkConfig = { }; /** - * Don't cache forking requests in a persistent db. + * Disables caching of all forking requests. * * @default false */ From 5d60810102b90cc046cc7e1f622931e347653b93 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Wed, 10 Nov 2021 17:15:31 -0500 Subject: [PATCH 92/93] better var name --- .../src/forking/persistent-cache/ancestry.ts | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts index a75f85210f..95f1210f29 100644 --- a/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/ancestry.ts @@ -6,7 +6,10 @@ export class Ancestry { private db: LevelUp; private next: Buffer; private knownAncestors: Set; - private lock: Map> = new Map(); + /** + * Prevents fetching the same key from the database simultaneously. + */ + private cacheLock: Map> = new Map(); constructor(db: LevelUp, parent: Tree) { this.db = db; if (parent == null) { @@ -22,11 +25,11 @@ export class Ancestry { private async loadNextAncestor(next: Buffer) { const k = next.toString("hex"); - if (this.lock.has(k)) { - throw new Error("could not obtain lock"); + if (this.cacheLock.has(k)) { + throw new Error("couldn't load next ancestor as it is locked"); } let resolver: () => void; - this.lock.set( + this.cacheLock.set( k, new Promise(resolve => { resolver = resolve; @@ -38,7 +41,7 @@ export class Ancestry { ? null : node.closestKnownAncestor; this.knownAncestors.add(node.hash.toString("hex")); - this.lock.delete(k); + this.cacheLock.delete(k); resolver(); } @@ -47,9 +50,9 @@ export class Ancestry { if (this.knownAncestors.has(strKey)) { return true; } else if (this.next) { - const lock = this.lock.get(this.next.toString("hex")); - if (lock) { - await lock; + const cacheLock = this.cacheLock.get(this.next.toString("hex")); + if (cacheLock) { + await cacheLock; return this.has(key); } await this.loadNextAncestor(this.next); From 09dd1f664ebba1dd7163bf7082df5a79b3aeb870 Mon Sep 17 00:00:00 2001 From: David Murdoch Date: Wed, 10 Nov 2021 17:17:02 -0500 Subject: [PATCH 93/93] nice cache! --- .../ethereum/ethereum/src/forking/persistent-cache/tree.ts | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/chains/ethereum/ethereum/src/forking/persistent-cache/tree.ts b/src/chains/ethereum/ethereum/src/forking/persistent-cache/tree.ts index 33393f464a..789a4def28 100644 --- a/src/chains/ethereum/ethereum/src/forking/persistent-cache/tree.ts +++ b/src/chains/ethereum/ethereum/src/forking/persistent-cache/tree.ts @@ -10,18 +10,16 @@ export class Tree { public key: Buffer; public hash: Buffer; public closestKnownAncestor: Buffer; - public closestKnownDescendants: Buffer[]; + public closestKnownDescendants: Buffer[] = []; constructor( height: Quantity, hash: Data, - closestKnownAncestor: Buffer = BUFFER_EMPTY, - closestKnownDescendants: Buffer[] = [] + closestKnownAncestor: Buffer = BUFFER_EMPTY ) { this.key = Tree.encodeKey(height, hash); this.hash = hash.toBuffer(); this.closestKnownAncestor = closestKnownAncestor; - this.closestKnownDescendants = closestKnownDescendants; } public serialize() {