diff --git a/docs/docs/developers/sandbox/references/sandbox-reference.md b/docs/docs/developers/sandbox/references/sandbox-reference.md index 408572796eb..c454b16f9b9 100644 --- a/docs/docs/developers/sandbox/references/sandbox-reference.md +++ b/docs/docs/developers/sandbox/references/sandbox-reference.md @@ -59,7 +59,7 @@ cd ~/.aztec && docker-compose up If you wish to run components of the Aztec network stack separately, you can use the `aztec start` command with various options for enabling components. ```bash -aztec start --node [nodeOptions] --pxe [pxeOptions] --archiver [archiverOptions] --sequencer [sequencerOptions] ----p2p-bootstrap [p2pOptions] +aztec start --node [nodeOptions] --pxe [pxeOptions] --archiver [archiverOptions] --sequencer [sequencerOptions] --prover [proverOptions] ----p2p-bootstrap [p2pOptions] ``` Starting the aztec node alongside a PXE, sequencer or archiver, will attach the components to the node. If you want to e.g. run a PXE separately to a node, you can: diff --git a/yarn-project/aztec-node/package.json b/yarn-project/aztec-node/package.json index 2c0dd26296c..f49c5c58920 100644 --- a/yarn-project/aztec-node/package.json +++ b/yarn-project/aztec-node/package.json @@ -42,7 +42,9 @@ "@aztec/l1-artifacts": "workspace:^", "@aztec/merkle-tree": "workspace:^", "@aztec/p2p": "workspace:^", + "@aztec/prover-client": "workspace:^", "@aztec/sequencer-client": "workspace:^", + "@aztec/simulator": "workspace:^", "@aztec/types": "workspace:^", "@aztec/world-state": "workspace:^", "koa": "^2.14.2", diff --git a/yarn-project/aztec-node/src/aztec-node/config.ts b/yarn-project/aztec-node/src/aztec-node/config.ts index 73f14115804..07f79f82383 100644 --- a/yarn-project/aztec-node/src/aztec-node/config.ts +++ b/yarn-project/aztec-node/src/aztec-node/config.ts @@ -12,6 +12,9 @@ export type AztecNodeConfig = ArchiverConfig & /** Whether the sequencer is disabled for this node. */ disableSequencer: boolean; + /** Whether the prover is disabled for this node. */ + disableProver: boolean; + /** A URL for an archiver service that the node will use. */ archiverUrl?: string; }; @@ -21,13 +24,14 @@ export type AztecNodeConfig = ArchiverConfig & * @returns A valid aztec node config. */ export function getConfigEnvVars(): AztecNodeConfig { - const { SEQ_DISABLED } = process.env; + const { SEQ_DISABLED, PROVER_DISABLED } = process.env; const allEnvVars: AztecNodeConfig = { ...getSequencerVars(), ...getArchiverVars(), ...getP2PConfigEnvVars(), ...getWorldStateVars(), disableSequencer: !!SEQ_DISABLED, + disableProver: !!PROVER_DISABLED, archiverUrl: process.env.ARCHIVER_URL, }; diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 46f9aef2146..43a1aefd8bc 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -12,6 +12,7 @@ import { LogType, MerkleTreeId, NullifierMembershipWitness, + ProverClient, PublicDataWitness, SequencerConfig, SiblingPath, @@ -20,6 +21,7 @@ import { TxHash, TxReceipt, TxStatus, + partitionReverts, } from '@aztec/circuit-types'; import { ARCHIVE_HEIGHT, @@ -44,14 +46,14 @@ import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; import { initStoreForRollup, openTmpStore } from '@aztec/kv-store/utils'; import { SHA256Trunc, StandardTree } from '@aztec/merkle-tree'; import { AztecKVTxPool, P2P, createP2PClient } from '@aztec/p2p'; +import { DummyProver, TxProver } from '@aztec/prover-client'; import { GlobalVariableBuilder, PublicProcessorFactory, SequencerClient, - WASMSimulator, getGlobalVariableBuilder, - partitionReverts, } from '@aztec/sequencer-client'; +import { WASMSimulator } from '@aztec/simulator'; import { ContractClassPublic, ContractDataSource, ContractInstanceWithAddress } from '@aztec/types/contracts'; import { MerkleTrees, @@ -62,6 +64,7 @@ import { } from '@aztec/world-state'; import { AztecNodeConfig } from './config.js'; +import { getSimulationProvider } from './simulator-factory.js'; /** * The aztec node. @@ -81,6 +84,7 @@ export class AztecNodeService implements AztecNode { protected readonly version: number, protected readonly globalVariableBuilder: GlobalVariableBuilder, protected readonly merkleTreesDb: AztecKVStore, + private readonly prover: ProverClient, private log = createDebugLogger('aztec:node'), ) { const message = @@ -139,10 +143,25 @@ export class AztecNodeService implements AztecNode { // start both and wait for them to sync from the block source await Promise.all([p2pClient.start(), worldStateSynchronizer.start()]); + // start the prover if we have been told to + const simulationProvider = await getSimulationProvider(config, log); + const prover = config.disableProver + ? await DummyProver.new() + : await TxProver.new(config, worldStateSynchronizer, simulationProvider); + // now create the sequencer const sequencer = config.disableSequencer ? undefined - : await SequencerClient.new(config, p2pClient, worldStateSynchronizer, archiver, archiver, archiver); + : await SequencerClient.new( + config, + p2pClient, + worldStateSynchronizer, + archiver, + archiver, + archiver, + prover, + simulationProvider, + ); return new AztecNodeService( config, @@ -158,6 +177,7 @@ export class AztecNodeService implements AztecNode { config.version, getGlobalVariableBuilder(config), store, + prover, log, ); } @@ -299,6 +319,7 @@ export class AztecNodeService implements AztecNode { await this.p2pClient.stop(); await this.worldStateSynchronizer.stop(); await this.blockSource.stop(); + await this.prover.stop(); this.log.info(`Stopped`); } diff --git a/yarn-project/aztec-node/src/aztec-node/simulator-factory.ts b/yarn-project/aztec-node/src/aztec-node/simulator-factory.ts new file mode 100644 index 00000000000..8cf8de0b01f --- /dev/null +++ b/yarn-project/aztec-node/src/aztec-node/simulator-factory.ts @@ -0,0 +1,24 @@ +import { DebugLogger } from '@aztec/foundation/log'; +import { NativeACVMSimulator, SimulationProvider, WASMSimulator } from '@aztec/simulator'; + +import * as fs from 'fs/promises'; + +import { AztecNodeConfig } from './config.js'; + +export async function getSimulationProvider( + config: AztecNodeConfig, + logger?: DebugLogger, +): Promise { + if (config.acvmBinaryPath && config.acvmWorkingDirectory) { + try { + await fs.access(config.acvmBinaryPath, fs.constants.R_OK); + await fs.mkdir(config.acvmWorkingDirectory, { recursive: true }); + logger?.(`Using native ACVM at ${config.acvmBinaryPath} and working directory ${config.acvmWorkingDirectory}`); + return new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath); + } catch { + logger?.(`Failed to access ACVM at ${config.acvmBinaryPath}, falling back to WASM`); + } + } + logger?.('Using WASM ACVM simulation'); + return new WASMSimulator(); +} diff --git a/yarn-project/aztec-node/terraform/main.tf b/yarn-project/aztec-node/terraform/main.tf index 5bf9187d744..fffdb2991aa 100644 --- a/yarn-project/aztec-node/terraform/main.tf +++ b/yarn-project/aztec-node/terraform/main.tf @@ -155,7 +155,7 @@ resource "aws_ecs_task_definition" "aztec-node" { { "name": "${var.DEPLOY_TAG}-aztec-node-${count.index + 1}", "image": "${var.DOCKERHUB_ACCOUNT}/aztec:${var.DEPLOY_TAG}", - "command": ["start", "--node", "--archiver", "--sequencer"], + "command": ["start", "--node", "--archiver", "--sequencer", "--prover"], "essential": true, "memoryReservation": 3776, "portMappings": [ diff --git a/yarn-project/aztec-node/tsconfig.json b/yarn-project/aztec-node/tsconfig.json index 9979b01f137..811c082824f 100644 --- a/yarn-project/aztec-node/tsconfig.json +++ b/yarn-project/aztec-node/tsconfig.json @@ -33,9 +33,15 @@ { "path": "../p2p" }, + { + "path": "../prover-client" + }, { "path": "../sequencer-client" }, + { + "path": "../simulator" + }, { "path": "../types" }, diff --git a/yarn-project/aztec/src/cli/cli.ts b/yarn-project/aztec/src/cli/cli.ts index a6ad675c617..a373f6e97ba 100644 --- a/yarn-project/aztec/src/cli/cli.ts +++ b/yarn-project/aztec/src/cli/cli.ts @@ -36,6 +36,7 @@ export function getProgram(userLog: LogFn, debugLogger: DebugLogger): Command { .option('-px, --pxe [options]', cliTexts.pxe) .option('-a, --archiver [options]', cliTexts.archiver) .option('-s, --sequencer [options]', cliTexts.sequencer) + .option('-r, --prover [options]', cliTexts.prover) .option('-p2p, --p2p-bootstrap [options]', cliTexts.p2pBootstrap) .action(async options => { // list of 'stop' functions to call when process ends diff --git a/yarn-project/aztec/src/cli/cmds/start_node.ts b/yarn-project/aztec/src/cli/cmds/start_node.ts index 04e066a39ab..f498b42fdef 100644 --- a/yarn-project/aztec/src/cli/cmds/start_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_node.ts @@ -59,6 +59,10 @@ export const startNode = async ( nodeConfig.publisherPrivateKey = `0x${Buffer.from(privKey!).toString('hex')}`; } + if (!options.prover) { + nodeConfig.disableProver = true; + } + // Create and start Aztec Node. const node = await createAztecNode(nodeConfig); const nodeServer = createAztecNodeRpcServer(node); diff --git a/yarn-project/aztec/src/cli/texts.ts b/yarn-project/aztec/src/cli/texts.ts index 4c37b30ff9a..0ee661350c3 100644 --- a/yarn-project/aztec/src/cli/texts.ts +++ b/yarn-project/aztec/src/cli/texts.ts @@ -63,7 +63,14 @@ export const cliTexts = { 'requiredConfirmations:SEQ_REQUIRED_CONFIRMATIONS - number - The number of confirmations required before publishing a block. Default: 1\n' + 'l1BlockPublishRetryIntervalMS:SEQ_PUBLISH_RETRY_INTERVAL_MS - number - The interval in ms to wait before retrying to publish a block. Default: 1000\n' + 'transactionPollingIntervalMS:SEQ_TX_POLLING_INTERVAL_MS - number - The interval in ms to wait before polling for new transactions. Default: 1000\n' + + 'acvmBinaryPath:ACVM_BINARY_PATH - string - The full path to an instance of the acvm cli application. If not provided will fallback to WASM circuit simulation\n' + + 'acvmWorkingDirectory:ACVM_WORKING_DIRECTORY - string - A directory to use for temporary files used by the acvm application. If not provided WASM circuit simulation will be used\n' + contractAddresses, + prover: + 'Starts a Prover with options. If started additionally to --node, the Prover will attach to that node.\n' + + 'Available options are listed below as cliProperty:ENV_VARIABLE_NAME.\n' + + 'acvmBinaryPath:ACVM_BINARY_PATH - string - The full path to an instance of the acvm cli application. If not provided will fallback to WASM circuit simulation\n' + + 'acvmWorkingDirectory:ACVM_WORKING_DIRECTORY - string - A directory to use for temporary files used by the acvm application. If not provided WASM circuit simulation will be used\n', p2pBootstrap: 'Starts a P2P bootstrap node with options.\n' + 'Available options are listed below as cliProperty:ENV_VARIABLE_NAME.\n' + diff --git a/yarn-project/circuit-types/src/body.ts b/yarn-project/circuit-types/src/body.ts index fcabae1c013..79b794a9697 100644 --- a/yarn-project/circuit-types/src/body.ts +++ b/yarn-project/circuit-types/src/body.ts @@ -95,4 +95,8 @@ export class Body { return new Body(txEffects); } + + static empty() { + return new Body([]); + } } diff --git a/yarn-project/circuit-types/src/interfaces/block-prover.ts b/yarn-project/circuit-types/src/interfaces/block-prover.ts new file mode 100644 index 00000000000..9e02a117b22 --- /dev/null +++ b/yarn-project/circuit-types/src/interfaces/block-prover.ts @@ -0,0 +1,41 @@ +import { Fr, GlobalVariables, Proof } from '@aztec/circuits.js'; + +import { L2Block } from '../l2_block.js'; +import { ProcessedTx } from '../tx/processed_tx.js'; + +export enum PROVING_STATUS { + SUCCESS, + FAILURE, +} + +export type ProvingSuccess = { + status: PROVING_STATUS.SUCCESS; + block: L2Block; + proof: Proof; +}; + +export type ProvingFailure = { + status: PROVING_STATUS.FAILURE; + reason: string; +}; + +export type ProvingResult = ProvingSuccess | ProvingFailure; + +export type ProvingTicket = { + provingPromise: Promise; +}; + +/** + * The interface to the block prover. + * Provides the ability to generate proofs and build rollups. + */ +export interface BlockProver { + startNewBlock( + numTxs: number, + globalVariables: GlobalVariables, + l1ToL2Messages: Fr[], + emptyTx: ProcessedTx, + ): Promise; + + addNewTx(tx: ProcessedTx): Promise; +} diff --git a/yarn-project/circuit-types/src/interfaces/index.ts b/yarn-project/circuit-types/src/interfaces/index.ts index 8997ca87955..25d6f63bd82 100644 --- a/yarn-project/circuit-types/src/interfaces/index.ts +++ b/yarn-project/circuit-types/src/interfaces/index.ts @@ -5,3 +5,5 @@ export * from './sync-status.js'; export * from './configs.js'; export * from './nullifier_tree.js'; export * from './public_data_tree.js'; +export * from './prover-client.js'; +export * from './block-prover.js'; diff --git a/yarn-project/circuit-types/src/interfaces/prover-client.ts b/yarn-project/circuit-types/src/interfaces/prover-client.ts new file mode 100644 index 00000000000..ac803d0e94b --- /dev/null +++ b/yarn-project/circuit-types/src/interfaces/prover-client.ts @@ -0,0 +1,11 @@ +import { BlockProver } from './block-prover.js'; + +/** + * The interface to the prover client. + * Provides the ability to generate proofs and build rollups. + */ +export interface ProverClient extends BlockProver { + start(): Promise; + + stop(): Promise; +} diff --git a/yarn-project/circuit-types/src/l2_block.ts b/yarn-project/circuit-types/src/l2_block.ts index b8535432dfd..e02ea43329b 100644 --- a/yarn-project/circuit-types/src/l2_block.ts +++ b/yarn-project/circuit-types/src/l2_block.ts @@ -114,6 +114,18 @@ export class L2Block { }); } + /** + * Creates an L2 block containing empty data. + * @returns The L2 block. + */ + static empty(): L2Block { + return L2Block.fromFields({ + archive: AppendOnlyTreeSnapshot.zero(), + header: Header.empty(), + body: Body.empty(), + }); + } + get number(): number { return Number(this.header.globalVariables.blockNumber.toBigInt()); } diff --git a/yarn-project/circuit-types/src/tx/index.ts b/yarn-project/circuit-types/src/tx/index.ts index 12409f062b8..f1ca9d6f805 100644 --- a/yarn-project/circuit-types/src/tx/index.ts +++ b/yarn-project/circuit-types/src/tx/index.ts @@ -1,3 +1,4 @@ export * from './tx.js'; export * from './tx_hash.js'; export * from './tx_receipt.js'; +export * from './processed_tx.js'; diff --git a/yarn-project/sequencer-client/src/sequencer/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts similarity index 100% rename from yarn-project/sequencer-client/src/sequencer/processed_tx.ts rename to yarn-project/circuit-types/src/tx/processed_tx.ts diff --git a/yarn-project/circuit-types/src/tx_effect.ts b/yarn-project/circuit-types/src/tx_effect.ts index cf51aca0ed1..c80d0cee28d 100644 --- a/yarn-project/circuit-types/src/tx_effect.ts +++ b/yarn-project/circuit-types/src/tx_effect.ts @@ -142,6 +142,18 @@ export class TxEffect { ); } + static empty(): TxEffect { + return new TxEffect( + RevertCode.OK, + makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, Fr.zero), + makeTuple(MAX_NEW_NULLIFIERS_PER_TX, Fr.zero), + makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, Fr.zero), + makeTuple(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, PublicDataWrite.empty), + TxL2Logs.empty(), + TxL2Logs.empty(), + ); + } + /** * Returns a string representation of the TxEffect object. */ diff --git a/yarn-project/circuits.js/package.json b/yarn-project/circuits.js/package.json index f6192beede0..4584f83c848 100644 --- a/yarn-project/circuits.js/package.json +++ b/yarn-project/circuits.js/package.json @@ -11,7 +11,8 @@ "./types": "./dest/types/index.js", "./constants": "./dest/constants.gen.js", "./contract": "./dest/contract/index.js", - "./merkle": "./dest/merkle/index.js" + "./merkle": "./dest/merkle/index.js", + "./simulation": "./dest/simulator/index.js" }, "typedocOptions": { "entryPoints": [ diff --git a/yarn-project/circuits.js/src/structs/proof.ts b/yarn-project/circuits.js/src/structs/proof.ts index c2a16b9616b..ff339c9345f 100644 --- a/yarn-project/circuits.js/src/structs/proof.ts +++ b/yarn-project/circuits.js/src/structs/proof.ts @@ -1,5 +1,7 @@ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; +const EMPTY_PROOF_SIZE = 42; + /** * The Proof class is a wrapper around the circuits proof. * Underlying it is a buffer of proof data in a form a barretenberg prover understands. @@ -47,5 +49,5 @@ export class Proof { * @returns The empty "proof". */ export function makeEmptyProof() { - return new Proof(Buffer.alloc(0)); + return new Proof(Buffer.alloc(EMPTY_PROOF_SIZE, 0)); } diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index a5aedd32dcd..f61000a26d3 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -32,8 +32,10 @@ "@aztec/noir-contracts.js": "workspace:^", "@aztec/p2p": "workspace:^", "@aztec/protocol-contracts": "workspace:^", + "@aztec/prover-client": "workspace:^", "@aztec/pxe": "workspace:^", "@aztec/sequencer-client": "workspace:^", + "@aztec/simulator": "workspace:^", "@aztec/types": "workspace:^", "@aztec/world-state": "workspace:^", "@jest/globals": "^29.5.0", @@ -50,6 +52,7 @@ "crypto-browserify": "^3.12.0", "glob": "^10.3.10", "jest": "^29.5.0", + "jest-mock-extended": "^3.0.5", "koa": "^2.14.2", "koa-static": "^5.0.0", "levelup": "^5.1.1", diff --git a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts index 415d3aca702..d6a568def0b 100644 --- a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts @@ -1,5 +1,13 @@ +import { ArchiveSource } from '@aztec/archiver'; import { getConfigEnvVars } from '@aztec/aztec-node'; import { AztecAddress, Body, Fr, GlobalVariables, L2Actor, L2Block, createDebugLogger, mockTx } from '@aztec/aztec.js'; +// eslint-disable-next-line no-restricted-imports +import { + ProcessedTx, + ProvingSuccess, + makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, + makeProcessedTx, +} from '@aztec/circuit-types'; import { EthAddress, Header, @@ -20,21 +28,14 @@ import { toTruncField } from '@aztec/foundation/serialize'; import { openTmpStore } from '@aztec/kv-store/utils'; import { AvailabilityOracleAbi, InboxAbi, OutboxAbi, RollupAbi } from '@aztec/l1-artifacts'; import { SHA256Trunc, StandardTree } from '@aztec/merkle-tree'; -import { - EmptyRollupProver, - L1Publisher, - RealRollupCircuitSimulator, - SoloBlockBuilder, - WASMSimulator, - getL1Publisher, - getVerificationKeys, - makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, - makeProcessedTx, -} from '@aztec/sequencer-client'; -import { MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; +import { TxProver } from '@aztec/prover-client'; +import { L1Publisher, getL1Publisher } from '@aztec/sequencer-client'; +import { WASMSimulator } from '@aztec/simulator'; +import { MerkleTrees, ServerWorldStateSynchronizer, WorldStateConfig } from '@aztec/world-state'; import { beforeEach, describe, expect, it } from '@jest/globals'; import * as fs from 'fs'; +import { MockProxy, mock } from 'jest-mock-extended'; import { Account, Address, @@ -80,12 +81,14 @@ describe('L1Publisher integration', () => { let publisher: L1Publisher; let l2Proof: Buffer; - let builder: SoloBlockBuilder; - let builderDb: MerkleTreeOperations; + let builder: TxProver; + let builderDb: MerkleTrees; // The header of the last block let prevHeader: Header; + let blockSource: MockProxy; + const chainId = createEthereumChain(config.rpcUrl, config.apiKey).chainInfo.id; let coinbase: EthAddress; @@ -123,12 +126,16 @@ describe('L1Publisher integration', () => { client: publicClient, }); - builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); - const vks = getVerificationKeys(); - const simulator = new RealRollupCircuitSimulator(new WASMSimulator()); - const prover = new EmptyRollupProver(); - builder = new SoloBlockBuilder(builderDb, vks, simulator, prover); - + const tmpStore = openTmpStore(); + builderDb = await MerkleTrees.new(tmpStore); + blockSource = mock(); + blockSource.getBlocks.mockResolvedValue([]); + const worldStateConfig: WorldStateConfig = { + worldStateBlockCheckIntervalMS: 10000, + l2QueueSize: 10, + }; + const worldStateSynchronizer = new ServerWorldStateSynchronizer(tmpStore, builderDb, blockSource, worldStateConfig); + builder = await TxProver.new({}, worldStateSynchronizer, new WASMSimulator()); l2Proof = Buffer.alloc(0); publisher = getL1Publisher({ @@ -143,7 +150,7 @@ describe('L1Publisher integration', () => { coinbase = config.coinbase || EthAddress.random(); feeRecipient = config.feeRecipient || AztecAddress.random(); - prevHeader = await builderDb.buildInitialHeader(); + prevHeader = await builderDb.buildInitialHeader(false); }, 100_000); const makeEmptyProcessedTx = () => { @@ -297,6 +304,19 @@ describe('L1Publisher integration', () => { fs.writeFileSync(path, output, 'utf8'); }; + const buildBlock = async ( + globalVariables: GlobalVariables, + txs: ProcessedTx[], + l1ToL2Messages: Fr[], + emptyTx: ProcessedTx, + ) => { + const blockTicket = await builder.startNewBlock(txs.length, globalVariables, l1ToL2Messages, emptyTx); + for (const tx of txs) { + await builder.addNewTx(tx); + } + return blockTicket; + }; + it('Block body is correctly published to AvailabilityOracle', async () => { const body = Body.random(); // `sendPublishTx` function is private so I am hacking around TS here. I think it's ok for test purposes. @@ -360,7 +380,9 @@ describe('L1Publisher integration', () => { coinbase, feeRecipient, ); - const [block] = await builder.buildL2Block(globalVariables, txs, currentL1ToL2Messages); + const ticket = await buildBlock(globalVariables, txs, currentL1ToL2Messages, makeEmptyProcessedTx()); + const result = await ticket.provingPromise; + const block = (result as ProvingSuccess).block; prevHeader = block.header; const newL2ToL1MsgsArray = block.body.txEffects.flatMap(txEffect => txEffect.l2ToL1Msgs); @@ -441,7 +463,9 @@ describe('L1Publisher integration', () => { coinbase, feeRecipient, ); - const [block] = await builder.buildL2Block(globalVariables, txs, l1ToL2Messages); + const blockTicket = await buildBlock(globalVariables, txs, l1ToL2Messages, makeEmptyProcessedTx()); + const result = await blockTicket.provingPromise; + const block = (result as ProvingSuccess).block; prevHeader = block.header; writeJson(`empty_block_${i}`, block, [], AztecAddress.ZERO, deployerAccount.address); diff --git a/yarn-project/end-to-end/tsconfig.json b/yarn-project/end-to-end/tsconfig.json index 1b1651c944b..159d8f7cea1 100644 --- a/yarn-project/end-to-end/tsconfig.json +++ b/yarn-project/end-to-end/tsconfig.json @@ -54,12 +54,18 @@ { "path": "../protocol-contracts" }, + { + "path": "../prover-client" + }, { "path": "../pxe" }, { "path": "../sequencer-client" }, + { + "path": "../simulator" + }, { "path": "../types" }, diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index 49b0dba2623..a1b03a69775 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -30,14 +30,23 @@ "rootDir": "./src" }, "dependencies": { + "@aztec/circuit-types": "workspace:^", + "@aztec/circuits.js": "workspace:^", "@aztec/foundation": "workspace:^", + "@aztec/kv-store": "workspace:^", + "@aztec/noir-protocol-circuits-types": "workspace:^", + "@aztec/simulator": "workspace:^", + "@aztec/world-state": "workspace:^", + "lodash.chunk": "^4.2.0", "tslib": "^2.4.0" }, "devDependencies": { "@jest/globals": "^29.5.0", "@types/jest": "^29.5.0", + "@types/memdown": "^3.0.0", "@types/node": "^18.7.23", "jest": "^29.5.0", + "jest-mock-extended": "^3.0.3", "ts-jest": "^29.1.0", "ts-node": "^10.9.1", "typescript": "^5.0.4" diff --git a/yarn-project/prover-client/src/config.ts b/yarn-project/prover-client/src/config.ts new file mode 100644 index 00000000000..a6d2cefb83c --- /dev/null +++ b/yarn-project/prover-client/src/config.ts @@ -0,0 +1,22 @@ +/** + * The prover configuration. + */ +export interface ProverConfig { + /** The working directory to use for simulation/proving */ + acvmWorkingDirectory?: string; + /** The path to the ACVM binary */ + acvmBinaryPath?: string; +} + +/** + * Returns the prover configuration from the environment variables. + * Note: If an environment variable is not set, the default value is used. + * @returns The prover configuration. + */ +export function getConfigEnvVars(): ProverConfig { + const { ACVM_WORKING_DIRECTORY, ACVM_BINARY_PATH } = process.env; + return { + acvmWorkingDirectory: ACVM_WORKING_DIRECTORY ? ACVM_WORKING_DIRECTORY : undefined, + acvmBinaryPath: ACVM_BINARY_PATH ? ACVM_BINARY_PATH : undefined, + }; +} diff --git a/yarn-project/prover-client/src/dummy-prover.ts b/yarn-project/prover-client/src/dummy-prover.ts new file mode 100644 index 00000000000..d2c1f4842e4 --- /dev/null +++ b/yarn-project/prover-client/src/dummy-prover.ts @@ -0,0 +1,45 @@ +import { + L2Block, + PROVING_STATUS, + ProcessedTx, + ProverClient, + ProvingSuccess, + ProvingTicket, +} from '@aztec/circuit-types'; +import { GlobalVariables, makeEmptyProof } from '@aztec/circuits.js'; +import { Fr } from '@aztec/foundation/fields'; + +export class DummyProver implements ProverClient { + public start(): Promise { + return Promise.resolve(); + } + + public stop(): Promise { + return Promise.resolve(); + } + + public static new(): Promise { + return Promise.resolve(new DummyProver()); + } + + startNewBlock( + _numTxs: number, + _globalVariables: GlobalVariables, + _newL1ToL2Messages: Fr[], + _emptyTx: ProcessedTx, + ): Promise { + const result: ProvingSuccess = { + status: PROVING_STATUS.SUCCESS, + proof: makeEmptyProof(), + block: L2Block.empty(), + }; + const ticket: ProvingTicket = { + provingPromise: Promise.resolve(result), + }; + return Promise.resolve(ticket); + } + + addNewTx(_tx: ProcessedTx): Promise { + return Promise.resolve(); + } +} diff --git a/yarn-project/prover-client/src/index.ts b/yarn-project/prover-client/src/index.ts index 90fe8eacdf0..46368c53575 100644 --- a/yarn-project/prover-client/src/index.ts +++ b/yarn-project/prover-client/src/index.ts @@ -1,4 +1,8 @@ -/** - * A placeholder for the Prover Client. - */ -export class ProverClient {} +export * from './tx-prover/tx-prover.js'; +export * from './config.js'; +export * from './dummy-prover.js'; + +// Exported for integration_l1_publisher.test.ts +export { getVerificationKeys } from './mocks/verification_keys.js'; +export { EmptyRollupProver } from './prover/empty.js'; +export { RealRollupCircuitSimulator } from './simulator/rollup.js'; diff --git a/yarn-project/sequencer-client/src/mocks/verification_keys.ts b/yarn-project/prover-client/src/mocks/verification_keys.ts similarity index 100% rename from yarn-project/sequencer-client/src/mocks/verification_keys.ts rename to yarn-project/prover-client/src/mocks/verification_keys.ts diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts new file mode 100644 index 00000000000..07ed89f7a6c --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -0,0 +1,595 @@ +import { MerkleTreeId, ProcessedTx } from '@aztec/circuit-types'; +import { + ARCHIVE_HEIGHT, + AppendOnlyTreeSnapshot, + BaseOrMergeRollupPublicInputs, + BaseParityInputs, + BaseRollupInputs, + ConstantRollupData, + Fr, + GlobalVariables, + L1_TO_L2_MSG_SUBTREE_HEIGHT, + L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, + MAX_NEW_NULLIFIERS_PER_TX, + MAX_PUBLIC_DATA_READS_PER_TX, + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + MembershipWitness, + MergeRollupInputs, + NOTE_HASH_SUBTREE_HEIGHT, + NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, + NULLIFIER_SUBTREE_HEIGHT, + NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, + NULLIFIER_TREE_HEIGHT, + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, + NullifierLeafPreimage, + PUBLIC_DATA_SUBTREE_HEIGHT, + PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, + PUBLIC_DATA_TREE_HEIGHT, + PartialStateReference, + PreviousRollupData, + Proof, + PublicDataTreeLeaf, + PublicDataTreeLeafPreimage, + ROLLUP_VK_TREE_HEIGHT, + RollupKernelCircuitPublicInputs, + RollupKernelData, + RollupTypes, + RootParityInput, + RootParityInputs, + RootRollupInputs, + RootRollupPublicInputs, + StateDiffHints, + StateReference, + VK_TREE_HEIGHT, + VerificationKey, +} from '@aztec/circuits.js'; +import { assertPermutation, makeTuple } from '@aztec/foundation/array'; +import { DebugLogger } from '@aztec/foundation/log'; +import { Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize'; +import { MerkleTreeOperations } from '@aztec/world-state'; + +import { VerificationKeys, getVerificationKeys } from '../mocks/verification_keys.js'; +import { RollupProver } from '../prover/index.js'; +import { RollupSimulator } from '../simulator/rollup.js'; + +// Denotes fields that are not used now, but will be in the future +const FUTURE_FR = new Fr(0n); +const FUTURE_NUM = 0; + +// Denotes fields that should be deleted +const DELETE_FR = new Fr(0n); + +/** + * Type representing the names of the trees for the base rollup. + */ +type BaseTreeNames = 'NoteHashTree' | 'ContractTree' | 'NullifierTree' | 'PublicDataTree'; +/** + * Type representing the names of the trees. + */ +export type TreeNames = BaseTreeNames | 'L1ToL2MessageTree' | 'Archive'; + +// Builds the base rollup inputs, updating the contract, nullifier, and data trees in the process +export async function buildBaseRollupInput( + tx: ProcessedTx, + globalVariables: GlobalVariables, + db: MerkleTreeOperations, +) { + // Get trees info before any changes hit + const constants = await getConstantRollupData(globalVariables, db); + const start = new PartialStateReference( + await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db), + await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db), + await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db), + ); + // Get the subtree sibling paths for the circuit + const noteHashSubtreeSiblingPathArray = await getSubtreeSiblingPath( + MerkleTreeId.NOTE_HASH_TREE, + NOTE_HASH_SUBTREE_HEIGHT, + db, + ); + + const noteHashSubtreeSiblingPath = makeTuple(NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, i => + i < noteHashSubtreeSiblingPathArray.length ? noteHashSubtreeSiblingPathArray[i] : Fr.ZERO, + ); + + // Update the note hash trees with the new items being inserted to get the new roots + // that will be used by the next iteration of the base rollup circuit, skipping the empty ones + const newNoteHashes = tx.data.combinedData.newNoteHashes.map(x => x.value.toBuffer()); + await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, newNoteHashes); + + // The read witnesses for a given TX should be generated before the writes of the same TX are applied. + // All reads that refer to writes in the same tx are transient and can be simplified out. + const txPublicDataReadsInfo = await getPublicDataReadsInfo(tx, db); + const txPublicDataUpdateRequestInfo = await processPublicDataUpdateRequests(tx, db); + + // Update the nullifier tree, capturing the low nullifier info for each individual operation + const { + lowLeavesWitnessData: nullifierWitnessLeaves, + newSubtreeSiblingPath: newNullifiersSubtreeSiblingPath, + sortedNewLeaves: sortedNewNullifiers, + sortedNewLeavesIndexes, + } = await db.batchInsert( + MerkleTreeId.NULLIFIER_TREE, + tx.data.combinedData.newNullifiers.map(sideEffectLinkedToNoteHash => sideEffectLinkedToNoteHash.value.toBuffer()), + NULLIFIER_SUBTREE_HEIGHT, + ); + if (nullifierWitnessLeaves === undefined) { + throw new Error(`Could not craft nullifier batch insertion proofs`); + } + + // Extract witness objects from returned data + const nullifierPredecessorMembershipWitnessesWithoutPadding: MembershipWitness[] = + nullifierWitnessLeaves.map(l => + MembershipWitness.fromBufferArray(l.index, assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT)), + ); + + const nullifierSubtreeSiblingPathArray = newNullifiersSubtreeSiblingPath.toFields(); + + const nullifierSubtreeSiblingPath = makeTuple(NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, i => + i < nullifierSubtreeSiblingPathArray.length ? nullifierSubtreeSiblingPathArray[i] : Fr.ZERO, + ); + + const publicDataSiblingPath = txPublicDataUpdateRequestInfo.newPublicDataSubtreeSiblingPath; + + const stateDiffHints = StateDiffHints.from({ + nullifierPredecessorPreimages: makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i => + i < nullifierWitnessLeaves.length + ? (nullifierWitnessLeaves[i].leafPreimage as NullifierLeafPreimage) + : NullifierLeafPreimage.empty(), + ), + nullifierPredecessorMembershipWitnesses: makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i => + i < nullifierPredecessorMembershipWitnessesWithoutPadding.length + ? nullifierPredecessorMembershipWitnessesWithoutPadding[i] + : makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT), + ), + sortedNullifiers: makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i => Fr.fromBuffer(sortedNewNullifiers[i])), + sortedNullifierIndexes: makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i => sortedNewLeavesIndexes[i]), + noteHashSubtreeSiblingPath, + nullifierSubtreeSiblingPath, + publicDataSiblingPath, + }); + + const blockHash = tx.data.constants.historicalHeader.hash(); + const archiveRootMembershipWitness = await getMembershipWitnessFor( + blockHash, + MerkleTreeId.ARCHIVE, + ARCHIVE_HEIGHT, + db, + ); + + return BaseRollupInputs.from({ + kernelData: getKernelDataFor(tx, getVerificationKeys()), + start, + stateDiffHints, + + sortedPublicDataWrites: txPublicDataUpdateRequestInfo.sortedPublicDataWrites, + sortedPublicDataWritesIndexes: txPublicDataUpdateRequestInfo.sortedPublicDataWritesIndexes, + lowPublicDataWritesPreimages: txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages, + lowPublicDataWritesMembershipWitnesses: txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses, + publicDataReadsPreimages: txPublicDataReadsInfo.newPublicDataReadsPreimages, + publicDataReadsMembershipWitnesses: txPublicDataReadsInfo.newPublicDataReadsWitnesses, + + archiveRootMembershipWitness, + + constants, + }); +} + +export function createMergeRollupInputs( + left: [BaseOrMergeRollupPublicInputs, Proof], + right: [BaseOrMergeRollupPublicInputs, Proof], +) { + const vks = getVerificationKeys(); + const vk = left[0].rollupType === RollupTypes.Base ? vks.baseRollupCircuit : vks.mergeRollupCircuit; + const mergeInputs = new MergeRollupInputs([ + getPreviousRollupDataFromPublicInputs(left[0], left[1], vk), + getPreviousRollupDataFromPublicInputs(right[0], right[1], vk), + ]); + return mergeInputs; +} + +export async function executeMergeRollupCircuit( + mergeInputs: MergeRollupInputs, + simulator: RollupSimulator, + prover: RollupProver, + logger?: DebugLogger, +): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { + logger?.debug(`Running merge rollup circuit`); + const output = await simulator.mergeRollupCircuit(mergeInputs); + const proof = await prover.getMergeRollupProof(mergeInputs, output); + return [output, proof]; +} + +export async function executeRootRollupCircuit( + left: [BaseOrMergeRollupPublicInputs, Proof], + right: [BaseOrMergeRollupPublicInputs, Proof], + l1ToL2Roots: RootParityInput, + newL1ToL2Messages: Tuple, + simulator: RollupSimulator, + prover: RollupProver, + db: MerkleTreeOperations, + logger?: DebugLogger, +): Promise<[RootRollupPublicInputs, Proof]> { + logger?.debug(`Running root rollup circuit`); + const rootInput = await getRootRollupInput(...left, ...right, l1ToL2Roots, newL1ToL2Messages, db); + + // Update the local trees to include the new l1 to l2 messages + await db.appendLeaves( + MerkleTreeId.L1_TO_L2_MESSAGE_TREE, + newL1ToL2Messages.map(m => m.toBuffer()), + ); + + // Simulate and get proof for the root circuit + const rootOutput = await simulator.rootRollupCircuit(rootInput); + + const rootProof = await prover.getRootRollupProof(rootInput, rootOutput); + + //TODO(@PhilWindle) Move this to orchestrator to ensure that we are still on the same block + // Update the archive with the latest block header + logger?.debug(`Updating and validating root trees`); + await db.updateArchive(rootOutput.header); + + await validateRootOutput(rootOutput, db); + + return [rootOutput, rootProof]; +} + +// Validate that the roots of all local trees match the output of the root circuit simulation +export async function validateRootOutput(rootOutput: RootRollupPublicInputs, db: MerkleTreeOperations) { + await Promise.all([ + validateState(rootOutput.header.state, db), + validateSimulatedTree(await getTreeSnapshot(MerkleTreeId.ARCHIVE, db), rootOutput.archive, 'Archive'), + ]); +} + +export async function validateState(state: StateReference, db: MerkleTreeOperations) { + const promises = [MerkleTreeId.NOTE_HASH_TREE, MerkleTreeId.NULLIFIER_TREE, MerkleTreeId.PUBLIC_DATA_TREE].map( + async (id: MerkleTreeId) => { + return { key: id, value: await getTreeSnapshot(id, db) }; + }, + ); + const snapshots: Map = new Map( + (await Promise.all(promises)).map(obj => [obj.key, obj.value]), + ); + validatePartialState(state.partial, snapshots); + validateSimulatedTree( + await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db), + state.l1ToL2MessageTree, + 'L1ToL2MessageTree', + ); +} + +// Builds the inputs for the root rollup circuit, without making any changes to trees +export async function getRootRollupInput( + rollupOutputLeft: BaseOrMergeRollupPublicInputs, + rollupProofLeft: Proof, + rollupOutputRight: BaseOrMergeRollupPublicInputs, + rollupProofRight: Proof, + l1ToL2Roots: RootParityInput, + newL1ToL2Messages: Tuple, + db: MerkleTreeOperations, +) { + const vks = getVerificationKeys(); + const vk = rollupOutputLeft.rollupType === RollupTypes.Base ? vks.baseRollupCircuit : vks.mergeRollupCircuit; + const previousRollupData: RootRollupInputs['previousRollupData'] = [ + getPreviousRollupDataFromPublicInputs(rollupOutputLeft, rollupProofLeft, vk), + getPreviousRollupDataFromPublicInputs(rollupOutputRight, rollupProofRight, vk), + ]; + + const getRootTreeSiblingPath = async (treeId: MerkleTreeId) => { + const { size } = await db.getTreeInfo(treeId); + const path = await db.getSiblingPath(treeId, size); + return path.toFields(); + }; + + const newL1ToL2MessageTreeRootSiblingPathArray = await getSubtreeSiblingPath( + MerkleTreeId.L1_TO_L2_MESSAGE_TREE, + L1_TO_L2_MSG_SUBTREE_HEIGHT, + db, + ); + + const newL1ToL2MessageTreeRootSiblingPath = makeTuple( + L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, + i => (i < newL1ToL2MessageTreeRootSiblingPathArray.length ? newL1ToL2MessageTreeRootSiblingPathArray[i] : Fr.ZERO), + 0, + ); + + // Get tree snapshots + const startL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db); + + // Get blocks tree + const startArchiveSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db); + const newArchiveSiblingPathArray = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE); + + const newArchiveSiblingPath = makeTuple( + ARCHIVE_HEIGHT, + i => (i < newArchiveSiblingPathArray.length ? newArchiveSiblingPathArray[i] : Fr.ZERO), + 0, + ); + + return RootRollupInputs.from({ + previousRollupData, + l1ToL2Roots, + newL1ToL2Messages, + newL1ToL2MessageTreeRootSiblingPath, + startL1ToL2MessageTreeSnapshot, + startArchiveSnapshot, + newArchiveSiblingPath, + }); +} + +export function getPreviousRollupDataFromPublicInputs( + rollupOutput: BaseOrMergeRollupPublicInputs, + rollupProof: Proof, + vk: VerificationKey, +) { + return new PreviousRollupData( + rollupOutput, + rollupProof, + vk, + + // MembershipWitness for a VK tree to be implemented in the future + FUTURE_NUM, + new MembershipWitness( + ROLLUP_VK_TREE_HEIGHT, + BigInt(FUTURE_NUM), + makeTuple(ROLLUP_VK_TREE_HEIGHT, () => FUTURE_FR), + ), + ); +} + +export async function getConstantRollupData( + globalVariables: GlobalVariables, + db: MerkleTreeOperations, +): Promise { + return ConstantRollupData.from({ + baseRollupVkHash: DELETE_FR, + mergeRollupVkHash: DELETE_FR, + privateKernelVkTreeRoot: FUTURE_FR, + publicKernelVkTreeRoot: FUTURE_FR, + lastArchive: await getTreeSnapshot(MerkleTreeId.ARCHIVE, db), + globalVariables, + }); +} + +export async function getTreeSnapshot(id: MerkleTreeId, db: MerkleTreeOperations): Promise { + const treeInfo = await db.getTreeInfo(id); + return new AppendOnlyTreeSnapshot(Fr.fromBuffer(treeInfo.root), Number(treeInfo.size)); +} + +export function getKernelDataFor(tx: ProcessedTx, vks: VerificationKeys): RollupKernelData { + const inputs = new RollupKernelCircuitPublicInputs( + tx.data.aggregationObject, + tx.data.combinedData, + tx.data.constants, + ); + return new RollupKernelData( + inputs, + tx.proof, + + // VK for the kernel circuit + vks.privateKernelCircuit, + + // MembershipWitness for a VK tree to be implemented in the future + FUTURE_NUM, + assertLength(Array(VK_TREE_HEIGHT).fill(FUTURE_FR), VK_TREE_HEIGHT), + ); +} + +export function makeEmptyMembershipWitness(height: N) { + return new MembershipWitness( + height, + 0n, + makeTuple(height, () => Fr.ZERO), + ); +} + +export async function getPublicDataReadsInfo(tx: ProcessedTx, db: MerkleTreeOperations) { + const newPublicDataReadsWitnesses: Tuple< + MembershipWitness, + typeof MAX_PUBLIC_DATA_READS_PER_TX + > = makeTuple(MAX_PUBLIC_DATA_READS_PER_TX, () => MembershipWitness.empty(PUBLIC_DATA_TREE_HEIGHT, 0n)); + + const newPublicDataReadsPreimages: Tuple = makeTuple( + MAX_PUBLIC_DATA_READS_PER_TX, + () => PublicDataTreeLeafPreimage.empty(), + ); + + for (const i in tx.data.validationRequests.publicDataReads) { + const leafSlot = tx.data.validationRequests.publicDataReads[i].leafSlot.value; + const lowLeafResult = await db.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot); + if (!lowLeafResult) { + throw new Error(`Public data tree should have one initial leaf`); + } + const preimage = await db.getLeafPreimage(MerkleTreeId.PUBLIC_DATA_TREE, lowLeafResult.index); + const path = await db.getSiblingPath(MerkleTreeId.PUBLIC_DATA_TREE, lowLeafResult.index); + newPublicDataReadsWitnesses[i] = new MembershipWitness( + PUBLIC_DATA_TREE_HEIGHT, + BigInt(lowLeafResult.index), + path.toTuple(), + ); + newPublicDataReadsPreimages[i] = preimage! as PublicDataTreeLeafPreimage; + } + return { + newPublicDataReadsWitnesses, + newPublicDataReadsPreimages, + }; +} + +export async function processPublicDataUpdateRequests(tx: ProcessedTx, db: MerkleTreeOperations) { + const combinedPublicDataUpdateRequests = tx.data.combinedData.publicDataUpdateRequests.map(updateRequest => { + return new PublicDataTreeLeaf(updateRequest.leafSlot, updateRequest.newValue); + }); + const { lowLeavesWitnessData, newSubtreeSiblingPath, sortedNewLeaves, sortedNewLeavesIndexes } = await db.batchInsert( + MerkleTreeId.PUBLIC_DATA_TREE, + combinedPublicDataUpdateRequests.map(x => x.toBuffer()), + // TODO(#3675) remove oldValue from update requests + PUBLIC_DATA_SUBTREE_HEIGHT, + ); + + if (lowLeavesWitnessData === undefined) { + throw new Error(`Could not craft public data batch insertion proofs`); + } + + const sortedPublicDataWrites = makeTuple(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => { + return PublicDataTreeLeaf.fromBuffer(sortedNewLeaves[i]); + }); + + const sortedPublicDataWritesIndexes = makeTuple(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => { + return sortedNewLeavesIndexes[i]; + }); + + const subtreeSiblingPathAsFields = newSubtreeSiblingPath.toFields(); + const newPublicDataSubtreeSiblingPath = makeTuple(PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, i => { + return subtreeSiblingPathAsFields[i]; + }); + + const lowPublicDataWritesMembershipWitnesses: Tuple< + MembershipWitness, + typeof MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX + > = makeTuple(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => { + const witness = lowLeavesWitnessData[i]; + return MembershipWitness.fromBufferArray( + witness.index, + assertLength(witness.siblingPath.toBufferArray(), PUBLIC_DATA_TREE_HEIGHT), + ); + }); + + const lowPublicDataWritesPreimages: Tuple = + makeTuple(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => { + return lowLeavesWitnessData[i].leafPreimage as PublicDataTreeLeafPreimage; + }); + + // validate that the sortedPublicDataWrites and sortedPublicDataWritesIndexes are in the correct order + // otherwise it will just fail in the circuit + assertPermutation(combinedPublicDataUpdateRequests, sortedPublicDataWrites, sortedPublicDataWritesIndexes, (a, b) => + a.equals(b), + ); + + return { + lowPublicDataWritesPreimages, + lowPublicDataWritesMembershipWitnesses, + newPublicDataSubtreeSiblingPath, + sortedPublicDataWrites, + sortedPublicDataWritesIndexes, + }; +} + +export async function getSubtreeSiblingPath( + treeId: MerkleTreeId, + subtreeHeight: number, + db: MerkleTreeOperations, +): Promise { + const nextAvailableLeafIndex = await db.getTreeInfo(treeId).then(t => t.size); + const fullSiblingPath = await db.getSiblingPath(treeId, nextAvailableLeafIndex); + + // Drop the first subtreeHeight items since we only care about the path to the subtree root + return fullSiblingPath.getSubtreeSiblingPath(subtreeHeight).toFields(); +} + +// Scan a tree searching for a specific value and return a membership witness proof for it +export async function getMembershipWitnessFor( + value: Fr, + treeId: MerkleTreeId, + height: N, + db: MerkleTreeOperations, +): Promise> { + // If this is an empty tx, then just return zeroes + if (value.isZero()) { + return makeEmptyMembershipWitness(height); + } + + const index = await db.findLeafIndex(treeId, value.toBuffer()); + if (index === undefined) { + throw new Error(`Leaf with value ${value} not found in tree ${MerkleTreeId[treeId]}`); + } + const path = await db.getSiblingPath(treeId, index); + return new MembershipWitness(height, index, assertLength(path.toFields(), height)); +} + +export async function executeBaseRollupCircuit( + tx: ProcessedTx, + inputs: BaseRollupInputs, + treeSnapshots: Map, + simulator: RollupSimulator, + prover: RollupProver, + logger?: DebugLogger, +): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { + logger?.(`Running base rollup for ${tx.hash}`); + const rollupOutput = await simulator.baseRollupCircuit(inputs); + validatePartialState(rollupOutput.end, treeSnapshots); + const proof = await prover.getBaseRollupProof(inputs, rollupOutput); + return [rollupOutput, proof]; +} + +export function validatePartialState( + partialState: PartialStateReference, + treeSnapshots: Map, +) { + validateSimulatedTree(treeSnapshots.get(MerkleTreeId.NOTE_HASH_TREE)!, partialState.noteHashTree, 'NoteHashTree'); + validateSimulatedTree(treeSnapshots.get(MerkleTreeId.NULLIFIER_TREE)!, partialState.nullifierTree, 'NullifierTree'); + validateSimulatedTree( + treeSnapshots.get(MerkleTreeId.PUBLIC_DATA_TREE)!, + partialState.publicDataTree, + 'PublicDataTree', + ); +} + +// Helper for comparing two trees snapshots +export function validateSimulatedTree( + localTree: AppendOnlyTreeSnapshot, + simulatedTree: AppendOnlyTreeSnapshot, + name: TreeNames, + label?: string, +) { + if (!simulatedTree.root.toBuffer().equals(localTree.root.toBuffer())) { + throw new Error(`${label ?? name} tree root mismatch (local ${localTree.root}, simulated ${simulatedTree.root})`); + } + if (simulatedTree.nextAvailableLeafIndex !== localTree.nextAvailableLeafIndex) { + throw new Error( + `${label ?? name} tree next available leaf index mismatch (local ${localTree.nextAvailableLeafIndex}, simulated ${ + simulatedTree.nextAvailableLeafIndex + })`, + ); + } +} + +export async function executeBaseParityCircuit( + inputs: BaseParityInputs, + simulator: RollupSimulator, + prover: RollupProver, + logger?: DebugLogger, +): Promise { + logger?.debug(`Running base parity circuit`); + const parityPublicInputs = await simulator.baseParityCircuit(inputs); + const proof = await prover.getBaseParityProof(inputs, parityPublicInputs); + return new RootParityInput(proof, parityPublicInputs); +} + +export async function executeRootParityCircuit( + inputs: RootParityInputs, + simulator: RollupSimulator, + prover: RollupProver, + logger?: DebugLogger, +): Promise { + logger?.debug(`Running root parity circuit`); + const parityPublicInputs = await simulator.rootParityCircuit(inputs); + const proof = await prover.getRootParityProof(inputs, parityPublicInputs); + return new RootParityInput(proof, parityPublicInputs); +} + +export function validateTx(tx: ProcessedTx) { + const txHeader = tx.data.constants.historicalHeader; + if (txHeader.state.l1ToL2MessageTree.isZero()) { + throw new Error(`Empty L1 to L2 messages tree in tx: ${toFriendlyJSON(tx)}`); + } + if (txHeader.state.partial.noteHashTree.isZero()) { + throw new Error(`Empty note hash tree in tx: ${toFriendlyJSON(tx)}`); + } + if (txHeader.state.partial.nullifierTree.isZero()) { + throw new Error(`Empty nullifier tree in tx: ${toFriendlyJSON(tx)}`); + } + if (txHeader.state.partial.publicDataTree.isZero()) { + throw new Error(`Empty public data tree in tx: ${toFriendlyJSON(tx)}`); + } +} diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts new file mode 100644 index 00000000000..73a2bd9fe2d --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.test.ts @@ -0,0 +1,596 @@ +import { + MerkleTreeId, + PROVING_STATUS, + ProcessedTx, + ProvingSuccess, + makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, + makeProcessedTx, + mockTx, +} from '@aztec/circuit-types'; +import { + AztecAddress, + BaseOrMergeRollupPublicInputs, + EthAddress, + Fr, + GlobalVariables, + MAX_NEW_L2_TO_L1_MSGS_PER_TX, + MAX_NEW_NOTE_HASHES_PER_TX, + MAX_NEW_NULLIFIERS_PER_TX, + MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX, + MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX, + MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + MAX_REVERTIBLE_NOTE_HASHES_PER_TX, + MAX_REVERTIBLE_NULLIFIERS_PER_TX, + MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + NULLIFIER_SUBTREE_HEIGHT, + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, + PUBLIC_DATA_SUBTREE_HEIGHT, + Proof, + PublicDataTreeLeaf, + PublicDataUpdateRequest, + PublicKernelCircuitPublicInputs, + RootRollupPublicInputs, + SideEffect, + SideEffectLinkedToNoteHash, + sideEffectCmp, +} from '@aztec/circuits.js'; +import { + fr, + makeBaseOrMergeRollupPublicInputs, + makeNewSideEffect, + makeNewSideEffectLinkedToNoteHash, + makeParityPublicInputs, + makeProof, + makeRootRollupPublicInputs, +} from '@aztec/circuits.js/testing'; +import { makeTuple, range } from '@aztec/foundation/array'; +import { padArrayEnd, times } from '@aztec/foundation/collection'; +import { toTruncField } from '@aztec/foundation/serialize'; +import { sleep } from '@aztec/foundation/sleep'; +import { openTmpStore } from '@aztec/kv-store/utils'; +import { WASMSimulator } from '@aztec/simulator'; +import { MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; + +import { MockProxy, mock } from 'jest-mock-extended'; +import { type MemDown, default as memdown } from 'memdown'; + +import { getVerificationKeys } from '../mocks/verification_keys.js'; +import { RollupProver } from '../prover/index.js'; +import { RollupSimulator } from '../simulator/rollup.js'; +import { ProvingOrchestrator } from './orchestrator.js'; + +export const createMemDown = () => (memdown as any)() as MemDown; + +describe('prover/tx-prover', () => { + let builder: ProvingOrchestrator; + let builderDb: MerkleTreeOperations; + let expectsDb: MerkleTreeOperations; + + let simulator: MockProxy; + let prover: MockProxy; + + let blockNumber: number; + let baseRollupOutputLeft: BaseOrMergeRollupPublicInputs; + let baseRollupOutputRight: BaseOrMergeRollupPublicInputs; + let rootRollupOutput: RootRollupPublicInputs; + let mockL1ToL2Messages: Fr[]; + + let globalVariables: GlobalVariables; + + const emptyProof = new Proof(Buffer.alloc(32, 0)); + + const chainId = Fr.ZERO; + const version = Fr.ZERO; + const coinbase = EthAddress.ZERO; + const feeRecipient = AztecAddress.ZERO; + + beforeEach(async () => { + blockNumber = 3; + globalVariables = new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); + + builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + expectsDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); + simulator = mock(); + prover = mock(); + builder = new ProvingOrchestrator(builderDb, new WASMSimulator(), getVerificationKeys(), prover); + + // Create mock l1 to L2 messages + mockL1ToL2Messages = new Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)); + + // Create mock outputs for simulator + baseRollupOutputLeft = makeBaseOrMergeRollupPublicInputs(0, globalVariables); + baseRollupOutputRight = makeBaseOrMergeRollupPublicInputs(0, globalVariables); + rootRollupOutput = makeRootRollupPublicInputs(0); + rootRollupOutput.header.globalVariables = globalVariables; + + // Set up mocks + prover.getBaseParityProof.mockResolvedValue(emptyProof); + prover.getRootParityProof.mockResolvedValue(emptyProof); + prover.getBaseRollupProof.mockResolvedValue(emptyProof); + prover.getMergeRollupProof.mockResolvedValue(emptyProof); + prover.getRootRollupProof.mockResolvedValue(emptyProof); + simulator.baseParityCircuit + .mockResolvedValueOnce(makeParityPublicInputs(1)) + .mockResolvedValue(makeParityPublicInputs(2)) + .mockResolvedValue(makeParityPublicInputs(3)) + .mockResolvedValueOnce(makeParityPublicInputs(4)); + simulator.rootParityCircuit.mockResolvedValueOnce(makeParityPublicInputs(5)); + simulator.baseRollupCircuit + .mockResolvedValueOnce(baseRollupOutputLeft) + .mockResolvedValueOnce(baseRollupOutputRight); + simulator.rootRollupCircuit.mockResolvedValue(rootRollupOutput); + }, 20_000); + + const makeEmptyProcessedTx = async () => { + const header = await builderDb.buildInitialHeader(); + return makeEmptyProcessedTxFromHistoricalTreeRoots(header, chainId, version); + }; + + // Updates the expectedDb trees based on the new note hashes, contracts, and nullifiers from these txs + const updateExpectedTreesFromTxs = async (txs: ProcessedTx[]) => { + await expectsDb.appendLeaves( + MerkleTreeId.NOTE_HASH_TREE, + txs.flatMap(tx => + padArrayEnd( + [...tx.data.endNonRevertibleData.newNoteHashes, ...tx.data.end.newNoteHashes] + .filter(x => !x.isEmpty()) + .sort(sideEffectCmp), + SideEffect.empty(), + MAX_NEW_NOTE_HASHES_PER_TX, + ).map(l => l.value.toBuffer()), + ), + ); + await expectsDb.batchInsert( + MerkleTreeId.NULLIFIER_TREE, + txs.flatMap(tx => + padArrayEnd( + [...tx.data.endNonRevertibleData.newNullifiers, ...tx.data.end.newNullifiers] + .filter(x => !x.isEmpty()) + .sort(sideEffectCmp), + SideEffectLinkedToNoteHash.empty(), + MAX_NEW_NULLIFIERS_PER_TX, + ).map(x => x.value.toBuffer()), + ), + NULLIFIER_SUBTREE_HEIGHT, + ); + for (const tx of txs) { + await expectsDb.batchInsert( + MerkleTreeId.PUBLIC_DATA_TREE, + [...tx.data.endNonRevertibleData.publicDataUpdateRequests, ...tx.data.end.publicDataUpdateRequests].map( + write => { + return new PublicDataTreeLeaf(write.leafSlot, write.newValue).toBuffer(); + }, + ), + PUBLIC_DATA_SUBTREE_HEIGHT, + ); + } + }; + + // const updateL1ToL2MessageTree = async (l1ToL2Messages: Fr[]) => { + // const asBuffer = l1ToL2Messages.map(m => m.toBuffer()); + // await expectsDb.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, asBuffer); + // }; + + // const updateArchive = async () => { + // const blockHash = rootRollupOutput.header.hash(); + // await expectsDb.appendLeaves(MerkleTreeId.ARCHIVE, [blockHash.toBuffer()]); + // }; + + // const getTreeSnapshot = async (tree: MerkleTreeId) => { + // const treeInfo = await expectsDb.getTreeInfo(tree); + // return new AppendOnlyTreeSnapshot(Fr.fromBuffer(treeInfo.root), Number(treeInfo.size)); + // }; + + // const getPartialStateReference = async () => { + // return new PartialStateReference( + // await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE), + // await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE), + // await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE), + // ); + // }; + + // const getStateReference = async () => { + // return new StateReference( + // await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE), + // await getPartialStateReference(), + // ); + // }; + + // const buildMockSimulatorInputs = async () => { + // const kernelOutput = makePrivateKernelTailCircuitPublicInputs(); + // kernelOutput.constants.historicalHeader = await expectsDb.buildInitialHeader(); + // kernelOutput.needsAppLogic = false; + // kernelOutput.needsSetup = false; + // kernelOutput.needsTeardown = false; + + // const tx = makeProcessedTx( + // new Tx( + // kernelOutput, + // emptyProof, + // makeEmptyLogs(), + // makeEmptyLogs(), + // times(MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, makePublicCallRequest), + // ), + // ); + + // const txs = [tx, await makeEmptyProcessedTx()]; + + // // Calculate what would be the tree roots after the first tx and update mock circuit output + // await updateExpectedTreesFromTxs([txs[0]]); + // baseRollupOutputLeft.end = await getPartialStateReference(); + // baseRollupOutputLeft.txsEffectsHash = to2Fields(toTxEffect(tx).hash()); + + // // Same for the tx on the right + // await updateExpectedTreesFromTxs([txs[1]]); + // baseRollupOutputRight.end = await getPartialStateReference(); + // baseRollupOutputRight.txsEffectsHash = to2Fields(toTxEffect(tx).hash()); + + // // Update l1 to l2 message tree + // await updateL1ToL2MessageTree(mockL1ToL2Messages); + + // // Collect all new nullifiers, commitments, and contracts from all txs in this block + // const txEffects: TxEffect[] = txs.map(tx => toTxEffect(tx)); + + // const body = new Body(padArrayEnd(mockL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP), txEffects); + // // We are constructing the block here just to get body hash/calldata hash so we can pass in an empty archive and header + // const l2Block = L2Block.fromFields({ + // archive: AppendOnlyTreeSnapshot.zero(), + // header: Header.empty(), + // // Only the values below go to body hash/calldata hash + // body, + // }); + + // // Now we update can make the final header, compute the block hash and update archive + // rootRollupOutput.header.globalVariables = globalVariables; + // rootRollupOutput.header.contentCommitment.txsEffectsHash = l2Block.body.getTxsEffectsHash(); + // rootRollupOutput.header.state = await getStateReference(); + + // await updateArchive(); + // rootRollupOutput.archive = await getTreeSnapshot(MerkleTreeId.ARCHIVE); + + // return txs; + // }; + + describe('error handling', () => { + beforeEach(async () => { + builder = await ProvingOrchestrator.new(builderDb, new WASMSimulator(), prover); + }); + + it.each([ + [ + 'Base Rollup Failed', + () => { + prover.getBaseRollupProof.mockRejectedValue('Base Rollup Failed'); + }, + ], + [ + 'Merge Rollup Failed', + () => { + prover.getMergeRollupProof.mockRejectedValue('Merge Rollup Failed'); + }, + ], + [ + 'Root Rollup Failed', + () => { + prover.getRootRollupProof.mockRejectedValue('Root Rollup Failed'); + }, + ], + [ + 'Base Parity Failed', + () => { + prover.getBaseParityProof.mockRejectedValue('Base Parity Failed'); + }, + ], + [ + 'Root Parity Failed', + () => { + prover.getRootParityProof.mockRejectedValue('Root Parity Failed'); + }, + ], + ] as const)( + 'handles a %s error', + async (message: string, fn: () => void) => { + fn(); + const txs = await Promise.all([ + makeEmptyProcessedTx(), + makeEmptyProcessedTx(), + makeEmptyProcessedTx(), + makeEmptyProcessedTx(), + ]); + + const blockTicket = await builder.startNewBlock(txs.length, globalVariables, [], await makeEmptyProcessedTx()); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + await expect(blockTicket.provingPromise).resolves.toEqual({ status: PROVING_STATUS.FAILURE, reason: message }); + }, + 60000, + ); + + afterEach(async () => { + await builder.stop(); + }); + }); + + describe('circuits simulator', () => { + beforeEach(async () => { + builder = await ProvingOrchestrator.new(builderDb, new WASMSimulator(), prover); + }); + + afterEach(async () => { + await builder.stop(); + }); + + const makeBloatedProcessedTx = async (seed = 0x1) => { + seed *= MAX_NEW_NULLIFIERS_PER_TX; // Ensure no clashing given incremental seeds + const tx = mockTx(seed); + const kernelOutput = PublicKernelCircuitPublicInputs.empty(); + kernelOutput.constants.historicalHeader = await builderDb.buildInitialHeader(); + kernelOutput.end.publicDataUpdateRequests = makeTuple( + MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + i => new PublicDataUpdateRequest(fr(i), fr(i + 10)), + seed + 0x500, + ); + kernelOutput.endNonRevertibleData.publicDataUpdateRequests = makeTuple( + MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + i => new PublicDataUpdateRequest(fr(i), fr(i + 10)), + seed + 0x600, + ); + + const processedTx = makeProcessedTx(tx, kernelOutput, makeProof()); + + processedTx.data.end.newNoteHashes = makeTuple( + MAX_REVERTIBLE_NOTE_HASHES_PER_TX, + makeNewSideEffect, + seed + 0x100, + ); + processedTx.data.endNonRevertibleData.newNoteHashes = makeTuple( + MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX, + makeNewSideEffect, + seed + 0x100, + ); + processedTx.data.end.newNullifiers = makeTuple( + MAX_REVERTIBLE_NULLIFIERS_PER_TX, + makeNewSideEffectLinkedToNoteHash, + seed + 0x100000, + ); + + processedTx.data.endNonRevertibleData.newNullifiers = makeTuple( + MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX, + makeNewSideEffectLinkedToNoteHash, + seed + 0x100000 + MAX_REVERTIBLE_NULLIFIERS_PER_TX, + ); + + processedTx.data.end.newNullifiers[tx.data.end.newNullifiers.length - 1] = SideEffectLinkedToNoteHash.empty(); + + processedTx.data.end.newL2ToL1Msgs = makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, fr, seed + 0x300); + processedTx.data.end.encryptedLogsHash = toTruncField(processedTx.encryptedLogs.hash()); + processedTx.data.end.unencryptedLogsHash = toTruncField(processedTx.unencryptedLogs.hash()); + + return processedTx; + }; + + it.each([ + [0, 4], + [1, 4], + [4, 4], + [0, 16], + [4, 16], + ] as const)( + 'builds an L2 block with %i bloated txs and %i txs total', + async (bloatedCount: number, totalCount: number) => { + const noteHashTreeBefore = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); + const txs = [ + ...(await Promise.all(times(bloatedCount, makeBloatedProcessedTx))), + ...(await Promise.all(times(totalCount - bloatedCount, makeEmptyProcessedTx))), + ]; + + const blockTicket = await builder.startNewBlock( + txs.length, + globalVariables, + mockL1ToL2Messages, + await makeEmptyProcessedTx(), + ); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + + expect((result as ProvingSuccess).block.number).toEqual(blockNumber); + + await updateExpectedTreesFromTxs(txs); + const noteHashTreeAfter = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); + + if (bloatedCount > 0) { + expect(noteHashTreeAfter.root).not.toEqual(noteHashTreeBefore.root); + } + + const expectedNoteHashTreeAfter = await expectsDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE).then(t => t.root); + expect(noteHashTreeAfter.root).toEqual(expectedNoteHashTreeAfter); + }, + 60000, + ); + + it('builds an empty L2 block', async () => { + const txs = await Promise.all([ + makeEmptyProcessedTx(), + makeEmptyProcessedTx(), + makeEmptyProcessedTx(), + makeEmptyProcessedTx(), + ]); + + const blockTicket = await builder.startNewBlock(txs.length, globalVariables, [], await makeEmptyProcessedTx()); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + expect((result as ProvingSuccess).block.number).toEqual(blockNumber); + }, 30_000); + + it('builds a block with 1 transaction', async () => { + const txs = await Promise.all([makeEmptyProcessedTx()]); + + const blockTicket = await builder.startNewBlock(txs.length, globalVariables, [], await makeEmptyProcessedTx()); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + expect((result as ProvingSuccess).block.number).toEqual(blockNumber); + }, 30_000); + + it('builds a mixed L2 block', async () => { + const txs = await Promise.all([ + makeBloatedProcessedTx(1), + makeBloatedProcessedTx(2), + makeBloatedProcessedTx(3), + makeBloatedProcessedTx(4), + ]); + + const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); + + const blockTicket = await builder.startNewBlock( + txs.length, + globalVariables, + l1ToL2Messages, + await makeEmptyProcessedTx(), + ); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + expect((result as ProvingSuccess).block.number).toEqual(blockNumber); + }, 200_000); + + it('builds a block concurrently with transactions', async () => { + const txs = await Promise.all([ + makeBloatedProcessedTx(1), + makeBloatedProcessedTx(2), + makeBloatedProcessedTx(3), + makeBloatedProcessedTx(4), + ]); + + const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); + + const blockTicket = await builder.startNewBlock( + txs.length, + globalVariables, + l1ToL2Messages, + await makeEmptyProcessedTx(), + ); + + for (const tx of txs) { + await builder.addNewTx(tx); + await sleep(1000); + } + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + expect((result as ProvingSuccess).block.number).toEqual(blockNumber); + }, 200_000); + + // it('cancels current blocks and switches to new ones', async () => { + // const txs = await Promise.all([ + // makeBloatedProcessedTx(1), + // makeBloatedProcessedTx(2), + // makeBloatedProcessedTx(3), + // makeBloatedProcessedTx(4), + // ]); + + // const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); + + // const blockPromise1 = await builder.startNewBlock( + // txs.length, + // globalVariables, + // l1ToL2Messages, + // await makeEmptyProcessedTx(), + // ); + + // builder.addNewTx(txs[0]); + + // const blockPromise2 = await builder.startNewBlock( + // txs.length, + // globalVariables, + // l1ToL2Messages, + // await makeEmptyProcessedTx(), + // ); + + // builder.addNewTx(txs[0]); + + // await expect(blockPromise1).rejects.toEqual('Block cancelled'); + + // const result = await blockPromise2; + // expect(result.block.number).toEqual(blockNumber); + // }, 200_000); + + it('builds an unbalanced L2 block', async () => { + const txs = await Promise.all([makeEmptyProcessedTx(), makeEmptyProcessedTx(), makeEmptyProcessedTx()]); + + const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); + + const blockTicket = await builder.startNewBlock( + txs.length, + globalVariables, + l1ToL2Messages, + await makeEmptyProcessedTx(), + ); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + expect((result as ProvingSuccess).block.number).toEqual(blockNumber); + }, 200_000); + + it('throws if adding too many transactions', async () => { + const txs = await Promise.all([ + makeEmptyProcessedTx(), + makeEmptyProcessedTx(), + makeEmptyProcessedTx(), + makeEmptyProcessedTx(), + ]); + + const blockTicket = await builder.startNewBlock(txs.length, globalVariables, [], await makeEmptyProcessedTx()); + + for (const tx of txs) { + await builder.addNewTx(tx); + } + + await expect(async () => await builder.addNewTx(await makeEmptyProcessedTx())).rejects.toThrow( + `Rollup already contains 4 transactions`, + ); + + const result = await blockTicket.provingPromise; + expect(result.status).toBe(PROVING_STATUS.SUCCESS); + expect((result as ProvingSuccess).block.number).toEqual(blockNumber); + }, 30_000); + + it('throws if adding a transaction before start', async () => { + await expect(async () => await builder.addNewTx(await makeEmptyProcessedTx())).rejects.toThrow( + `Invalid proving state, call startNewBlock before adding transactions`, + ); + }, 30_000); + + it('rejects if too many l1 to l2 messages are provided', async () => { + // Assemble a fake transaction + const l1ToL2Messages = new Array(100).fill(new Fr(0n)); + await expect( + async () => await builder.startNewBlock(1, globalVariables, l1ToL2Messages, await makeEmptyProcessedTx()), + ).rejects.toThrow('Too many L1 to L2 messages'); + }); + }); +}); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts new file mode 100644 index 00000000000..d813a9dc0d1 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -0,0 +1,522 @@ +import { Body, L2Block, MerkleTreeId, ProcessedTx, TxEffect, toTxEffect } from '@aztec/circuit-types'; +import { PROVING_STATUS, ProvingResult, ProvingTicket } from '@aztec/circuit-types/interfaces'; +import { CircuitSimulationStats } from '@aztec/circuit-types/stats'; +import { + AppendOnlyTreeSnapshot, + BaseOrMergeRollupPublicInputs, + BaseParityInputs, + BaseRollupInputs, + Fr, + GlobalVariables, + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, + NUM_BASE_PARITY_PER_ROOT_PARITY, + Proof, + RootParityInput, + RootParityInputs, +} from '@aztec/circuits.js'; +import { padArrayEnd } from '@aztec/foundation/collection'; +import { MemoryFifo } from '@aztec/foundation/fifo'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { Tuple } from '@aztec/foundation/serialize'; +import { sleep } from '@aztec/foundation/sleep'; +import { elapsed } from '@aztec/foundation/timer'; +import { SimulationProvider } from '@aztec/simulator'; +import { MerkleTreeOperations } from '@aztec/world-state'; + +import { inspect } from 'util'; + +import { VerificationKeys, getVerificationKeys } from '../mocks/verification_keys.js'; +import { RollupProver } from '../prover/index.js'; +import { RealRollupCircuitSimulator, RollupSimulator } from '../simulator/rollup.js'; +import { + buildBaseRollupInput, + createMergeRollupInputs, + executeBaseParityCircuit, + executeBaseRollupCircuit, + executeMergeRollupCircuit, + executeRootParityCircuit, + executeRootRollupCircuit, + getTreeSnapshot, + validateTx, +} from './block-building-helpers.js'; +import { MergeRollupInputData, PROVING_JOB_TYPE, ProvingJob, ProvingState } from './proving-state.js'; + +const logger = createDebugLogger('aztec:prover:proving-orchestrator'); + +/** + * Implements an event driven proving scheduler to build the recursive proof tree. The idea being: + * 1. Transactions are provided to the scheduler post simulation. + * 2. Tree insertions are performed as required to generate transaction specific proofs + * 3. Those transaction specific proofs are generated in the necessary order accounting for dependencies + * 4. Once a transaction is proven, it will be incorporated into a merge proof + * 5. Merge proofs are produced at each level of the tree until the root proof is produced + * + * The proving implementation is determined by the provided prover implementation. This could be for example a local prover or a remote prover pool. + */ + +const SLEEP_TIME = 50; +const MAX_CONCURRENT_JOBS = 64; + +enum PROMISE_RESULT { + SLEEP, + OPERATIONS, +} + +/** + * The orchestrator, managing the flow of recursive proving operations required to build the rollup proof tree. + */ +export class ProvingOrchestrator { + private provingState: ProvingState | undefined = undefined; + private jobQueue: MemoryFifo = new MemoryFifo(); + private simulator: RollupSimulator; + private jobProcessPromise?: Promise; + private stopped = false; + constructor( + private db: MerkleTreeOperations, + simulationProvider: SimulationProvider, + protected vks: VerificationKeys, + private prover: RollupProver, + private maxConcurrentJobs = MAX_CONCURRENT_JOBS, + ) { + this.simulator = new RealRollupCircuitSimulator(simulationProvider); + } + + public static new(db: MerkleTreeOperations, simulationProvider: SimulationProvider, prover: RollupProver) { + const orchestrator = new ProvingOrchestrator(db, simulationProvider, getVerificationKeys(), prover); + orchestrator.start(); + return Promise.resolve(orchestrator); + } + + public start() { + this.jobProcessPromise = this.processJobQueue(); + } + + public async stop() { + this.stopped = true; + this.jobQueue.cancel(); + await this.jobProcessPromise; + } + + /** + * Starts off a new block + * @param numTxs - The number of real transactions in the block + * @param globalVariables - The global variables for the block + * @param l1ToL2Messages - The l1 to l2 messages for the block + * @param emptyTx - The instance of an empty transaction to be used to pad this block + * @returns A proving ticket, containing a promise notifying of proving completion + */ + public async startNewBlock( + numTxs: number, + globalVariables: GlobalVariables, + l1ToL2Messages: Fr[], + emptyTx: ProcessedTx, + ): Promise { + if (this.provingState && !this.provingState.isFinished()) { + throw new Error("Can't start a new block until the previous block is finished"); + } + logger.info(`Starting new block with ${numTxs} transactions`); + // we start the block by enqueueing all of the base parity circuits + let baseParityInputs: BaseParityInputs[] = []; + let l1ToL2MessagesPadded: Tuple; + try { + l1ToL2MessagesPadded = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); + } catch (err) { + throw new Error('Too many L1 to L2 messages'); + } + baseParityInputs = Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }, (_, i) => + BaseParityInputs.fromSlice(l1ToL2MessagesPadded, i), + ); + + //TODO:(@PhilWindle) Temporary until we figure out when to perform L1 to L2 insertions to make state consistency easier. + await Promise.resolve(); + + const promise = new Promise((resolve, reject) => { + this.provingState = new ProvingState( + numTxs, + resolve, + reject, + globalVariables, + l1ToL2MessagesPadded, + baseParityInputs.length, + emptyTx, + ); + }).catch((reason: string) => ({ status: PROVING_STATUS.FAILURE, reason } as const)); + + for (let i = 0; i < baseParityInputs.length; i++) { + this.enqueueJob(this.provingState!.Id, PROVING_JOB_TYPE.BASE_PARITY, () => + this.runBaseParityCircuit(baseParityInputs[i], i, this.provingState!.Id), + ); + } + + const ticket: ProvingTicket = { + provingPromise: promise, + }; + return ticket; + } + + /** + * The interface to add a simulated transaction to the scheduler + * @param tx - The transaction to be proven + */ + public async addNewTx(tx: ProcessedTx): Promise { + if (!this.provingState) { + throw new Error(`Invalid proving state, call startNewBlock before adding transactions`); + } + + if (this.provingState.numTxs === this.provingState.transactionsReceived) { + throw new Error(`Rollup already contains ${this.provingState.transactionsReceived} transactions`); + } + + validateTx(tx); + + logger.info(`Received transaction :${tx.hash}`); + + // We start the transaction by enqueueing the state updates + + const txIndex = this.provingState!.addNewTx(tx); + // we start this transaction off by performing it's tree insertions and + await this.prepareBaseRollupInputs(BigInt(txIndex), tx, this.provingState!.globalVariables, this.provingState!.Id); + + if (this.provingState.transactionsReceived === this.provingState.numTxs) { + // we need to pad the rollup with empty transactions + const numPaddingTxs = this.provingState.numPaddingTxs; + for (let i = 0; i < numPaddingTxs; i++) { + const paddingTxIndex = this.provingState.addNewTx(this.provingState.emptyTx); + await this.prepareBaseRollupInputs( + BigInt(paddingTxIndex), + this.provingState!.emptyTx, + this.provingState!.globalVariables, + this.provingState!.Id, + ); + } + } + } + + /** + * Enqueue a job to be scheduled + * @param stateIdentifier - For state Id verification + * @param jobType - The type of job to be queued + * @param job - The actual job, returns a promise notifying of the job's completion + */ + private enqueueJob(stateIdentifier: string, jobType: PROVING_JOB_TYPE, job: () => Promise) { + if (!this.provingState!.verifyState(stateIdentifier)) { + logger(`Discarding job for state ID: ${stateIdentifier}`); + return; + } + // We use a 'safeJob'. We don't want promise rejections in the proving pool, we want to capture the error here + // and reject the proving job whilst keeping the event loop free of rejections + const safeJob = async () => { + try { + await job(); + } catch (err) { + logger.error(`Error thrown when proving job type ${PROVING_JOB_TYPE[jobType]}: ${err}`); + this.provingState!.reject(`${err}`, stateIdentifier); + } + }; + const provingJob: ProvingJob = { + type: jobType, + operation: safeJob, + }; + this.jobQueue.put(provingJob); + } + + // Updates the merkle trees for a transaction. The first enqueued job for a transaction + private async prepareBaseRollupInputs( + index: bigint, + tx: ProcessedTx, + globalVariables: GlobalVariables, + stateIdentifier: string, + ) { + const inputs = await buildBaseRollupInput(tx, globalVariables, this.db); + const promises = [MerkleTreeId.NOTE_HASH_TREE, MerkleTreeId.NULLIFIER_TREE, MerkleTreeId.PUBLIC_DATA_TREE].map( + async (id: MerkleTreeId) => { + return { key: id, value: await getTreeSnapshot(id, this.db) }; + }, + ); + const treeSnapshots: Map = new Map( + (await Promise.all(promises)).map(obj => [obj.key, obj.value]), + ); + + if (!this.provingState?.verifyState(stateIdentifier)) { + logger(`Discarding job for state ID: ${stateIdentifier}`); + return; + } + + this.enqueueJob(stateIdentifier, PROVING_JOB_TYPE.BASE_ROLLUP, () => + this.runBaseRollup(index, tx, inputs, treeSnapshots, stateIdentifier), + ); + } + + // Stores the intermediate inputs prepared for a merge proof + private storeMergeInputs( + currentLevel: bigint, + currentIndex: bigint, + mergeInputs: [BaseOrMergeRollupPublicInputs, Proof], + ) { + const mergeLevel = currentLevel - 1n; + const indexWithinMergeLevel = currentIndex >> 1n; + const mergeIndex = 2n ** mergeLevel - 1n + indexWithinMergeLevel; + const subscript = Number(mergeIndex); + const indexWithinMerge = Number(currentIndex & 1n); + const ready = this.provingState!.storeMergeInputs(mergeInputs, indexWithinMerge, subscript); + return { ready, indexWithinMergeLevel, mergeLevel, mergeInputData: this.provingState!.getMergeInputs(subscript) }; + } + + // Executes the base rollup circuit and stored the output as intermediate state for the parent merge/root circuit + // Executes the next level of merge if all inputs are available + private async runBaseRollup( + index: bigint, + tx: ProcessedTx, + inputs: BaseRollupInputs, + treeSnapshots: Map, + stateIdentifier: string, + ) { + const [duration, baseRollupOutputs] = await elapsed(() => + executeBaseRollupCircuit(tx, inputs, treeSnapshots, this.simulator, this.prover, logger), + ); + logger.debug(`Simulated base rollup circuit`, { + eventName: 'circuit-simulation', + circuitName: 'base-rollup', + duration, + inputSize: inputs.toBuffer().length, + outputSize: baseRollupOutputs[0].toBuffer().length, + } satisfies CircuitSimulationStats); + if (!this.provingState?.verifyState(stateIdentifier)) { + logger(`Discarding job for state ID: ${stateIdentifier}`); + return; + } + const currentLevel = this.provingState!.numMergeLevels + 1n; + logger.info(`Completed base rollup at index ${index}, current level ${currentLevel}`); + this.storeAndExecuteNextMergeLevel(currentLevel, index, baseRollupOutputs, stateIdentifier); + } + + // Executes the merge rollup circuit and stored the output as intermediate state for the parent merge/root circuit + // Executes the next level of merge if all inputs are available + private async runMergeRollup( + level: bigint, + index: bigint, + mergeInputData: MergeRollupInputData, + stateIdentifier: string, + ) { + const circuitInputs = createMergeRollupInputs( + [mergeInputData.inputs[0]!, mergeInputData.proofs[0]!], + [mergeInputData.inputs[1]!, mergeInputData.proofs[1]!], + ); + const [duration, circuitOutputs] = await elapsed(() => + executeMergeRollupCircuit(circuitInputs, this.simulator, this.prover, logger), + ); + logger.debug(`Simulated merge rollup circuit`, { + eventName: 'circuit-simulation', + circuitName: 'merge-rollup', + duration, + inputSize: circuitInputs.toBuffer().length, + outputSize: circuitOutputs[0].toBuffer().length, + } satisfies CircuitSimulationStats); + if (!this.provingState?.verifyState(stateIdentifier)) { + logger(`Discarding job for state ID: ${stateIdentifier}`); + return; + } + logger.info(`Completed merge rollup at level ${level}, index ${index}`); + this.storeAndExecuteNextMergeLevel(level, index, circuitOutputs, stateIdentifier); + } + + // Executes the root rollup circuit + private async runRootRollup( + mergeInputData: MergeRollupInputData, + rootParityInput: RootParityInput, + stateIdentifier: string, + ) { + const [circuitsOutput, proof] = await executeRootRollupCircuit( + [mergeInputData.inputs[0]!, mergeInputData.proofs[0]!], + [mergeInputData.inputs[1]!, mergeInputData.proofs[1]!], + rootParityInput, + this.provingState!.newL1ToL2Messages, + this.simulator, + this.prover, + this.db, + logger, + ); + logger.info(`Completed root rollup`); + // Collect all new nullifiers, commitments, and contracts from all txs in this block + const txEffects: TxEffect[] = this.provingState!.allTxs.map(tx => toTxEffect(tx)); + + const blockBody = new Body(txEffects); + + const l2Block = L2Block.fromFields({ + archive: circuitsOutput.archive, + header: circuitsOutput.header, + body: blockBody, + }); + + if (!l2Block.body.getTxsEffectsHash().equals(circuitsOutput.header.contentCommitment.txsEffectsHash)) { + logger(inspect(blockBody)); + throw new Error( + `Txs effects hash mismatch, ${l2Block.body + .getTxsEffectsHash() + .toString('hex')} == ${circuitsOutput.header.contentCommitment.txsEffectsHash.toString('hex')} `, + ); + } + + const provingResult: ProvingResult = { + status: PROVING_STATUS.SUCCESS, + block: l2Block, + proof, + }; + logger.info(`Successfully proven block ${l2Block.number}!`); + this.provingState!.resolve(provingResult, stateIdentifier); + } + + // Executes the base parity circuit and stores the intermediate state for the root parity circuit + // Enqueues the root parity circuit if all inputs are available + private async runBaseParityCircuit(inputs: BaseParityInputs, index: number, stateIdentifier: string) { + const [duration, circuitOutputs] = await elapsed(() => + executeBaseParityCircuit(inputs, this.simulator, this.prover, logger), + ); + logger.debug(`Simulated base parity circuit`, { + eventName: 'circuit-simulation', + circuitName: 'base-parity', + duration, + inputSize: inputs.toBuffer().length, + outputSize: circuitOutputs.toBuffer().length, + } satisfies CircuitSimulationStats); + if (!this.provingState?.verifyState(stateIdentifier)) { + logger(`Discarding job for state ID: ${stateIdentifier}`); + return; + } + this.provingState!.setRootParityInputs(circuitOutputs, index); + + if (!this.provingState!.areRootParityInputsReady()) { + // not ready to run the root parity circuit yet + return; + } + const rootParityInputs = new RootParityInputs( + this.provingState!.rootParityInput as Tuple, + ); + this.enqueueJob(stateIdentifier, PROVING_JOB_TYPE.ROOT_PARITY, () => + this.runRootParityCircuit(rootParityInputs, stateIdentifier), + ); + } + + // Runs the root parity circuit ans stored the outputs + // Enqueues the root rollup proof if all inputs are available + private async runRootParityCircuit(inputs: RootParityInputs, stateIdentifier: string) { + const [duration, circuitOutputs] = await elapsed(() => + executeRootParityCircuit(inputs, this.simulator, this.prover, logger), + ); + logger.debug(`Simulated root parity circuit`, { + eventName: 'circuit-simulation', + circuitName: 'root-parity', + duration, + inputSize: inputs.toBuffer().length, + outputSize: circuitOutputs.toBuffer().length, + } satisfies CircuitSimulationStats); + if (!this.provingState?.verifyState(stateIdentifier)) { + logger(`Discarding job for state ID: ${stateIdentifier}`); + return; + } + this.provingState!.finalRootParityInput = circuitOutputs; + this.checkAndExecuteRootRollup(stateIdentifier); + } + + private checkAndExecuteRootRollup(stateIdentifier: string) { + if (!this.provingState!.isReadyForRootRollup()) { + logger('Not ready for root'); + return; + } + this.enqueueJob(stateIdentifier, PROVING_JOB_TYPE.ROOT_ROLLUP, () => + this.runRootRollup( + this.provingState!.getMergeInputs(0)!, + this.provingState!.finalRootParityInput!, + stateIdentifier, + ), + ); + } + + private storeAndExecuteNextMergeLevel( + currentLevel: bigint, + currentIndex: bigint, + mergeInputData: [BaseOrMergeRollupPublicInputs, Proof], + stateIdentifier: string, + ) { + const result = this.storeMergeInputs(currentLevel, currentIndex, mergeInputData); + + // Are we ready to execute the next circuit? + if (!result.ready) { + return; + } + + if (result.mergeLevel === 0n) { + this.checkAndExecuteRootRollup(stateIdentifier); + } else { + // onto the next merge level + this.enqueueJob(stateIdentifier, PROVING_JOB_TYPE.MERGE_ROLLUP, () => + this.runMergeRollup(result.mergeLevel, result.indexWithinMergeLevel, result.mergeInputData, stateIdentifier), + ); + } + } + + /** + * Process the job queue + * Works by managing an input queue of proof requests and an active pool of proving 'jobs' + */ + private async processJobQueue() { + // Used for determining the current state of a proving job + const promiseState = (p: Promise) => { + const t = {}; + return Promise.race([p, t]).then( + v => (v === t ? 'pending' : 'fulfilled'), + () => 'rejected', + ); + }; + + // Just a short break between managing the sets of requests and active jobs + const createSleepPromise = () => + sleep(SLEEP_TIME).then(_ => { + return PROMISE_RESULT.SLEEP; + }); + + let sleepPromise = createSleepPromise(); + let promises: Promise[] = []; + while (!this.stopped) { + // first look for more work + if (this.jobQueue.length() && promises.length < this.maxConcurrentJobs) { + // more work could be available + const job = await this.jobQueue.get(); + if (job !== null) { + // a proving job, add it to the pool of outstanding jobs + promises.push(job.operation()); + } + // continue adding more work + continue; + } + + // no more work to add, here we wait for any outstanding jobs to finish and/or sleep a little + try { + const ops = Promise.race(promises).then(_ => { + return PROMISE_RESULT.OPERATIONS; + }); + const result = await Promise.race([sleepPromise, ops]); + if (result === PROMISE_RESULT.SLEEP) { + // this is the sleep promise + // we simply setup the promise again and go round the loop checking for more work + sleepPromise = createSleepPromise(); + continue; + } + } catch (err) { + // We shouldn't get here as all jobs should be wrapped in a 'safeJob' meaning they don't fail! + logger.error(`Unexpected error in proving orchestrator ${err}`); + } + + // one or more of the jobs completed, remove them + const pendingPromises = []; + for (const jobPromise of promises) { + const state = await promiseState(jobPromise); + if (state === 'pending') { + pendingPromises.push(jobPromise); + } + } + // eslint-disable-next-line @typescript-eslint/no-floating-promises + promises = pendingPromises; + } + } +} diff --git a/yarn-project/prover-client/src/orchestrator/proving-state.ts b/yarn-project/prover-client/src/orchestrator/proving-state.ts new file mode 100644 index 00000000000..232f7ad40e5 --- /dev/null +++ b/yarn-project/prover-client/src/orchestrator/proving-state.ts @@ -0,0 +1,182 @@ +import { ProcessedTx, ProvingResult } from '@aztec/circuit-types'; +import { + BaseOrMergeRollupPublicInputs, + Fr, + GlobalVariables, + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, + Proof, + RootParityInput, +} from '@aztec/circuits.js'; +import { randomBytes } from '@aztec/foundation/crypto'; +import { Tuple } from '@aztec/foundation/serialize'; + +/** + * Enums and structs to communicate the type of work required in each request. + */ +export enum PROVING_JOB_TYPE { + STATE_UPDATE, + BASE_ROLLUP, + MERGE_ROLLUP, + ROOT_ROLLUP, + BASE_PARITY, + ROOT_PARITY, +} + +export type ProvingJob = { + type: PROVING_JOB_TYPE; + operation: () => Promise; +}; + +export type MergeRollupInputData = { + inputs: [BaseOrMergeRollupPublicInputs | undefined, BaseOrMergeRollupPublicInputs | undefined]; + proofs: [Proof | undefined, Proof | undefined]; +}; + +/** + * The current state of the proving schedule. Contains the raw inputs (txs) and intermediate state to generate every constituent proof in the tree. + * Carries an identifier so we can identify if the proving state is discarded and a new one started. + * Captures resolve and reject callbacks to provide a promise base interface to the consumer of our proving. + */ +export class ProvingState { + private stateIdentifier: string; + private mergeRollupInputs: MergeRollupInputData[] = []; + private rootParityInputs: Array = []; + private finalRootParityInputs: RootParityInput | undefined; + private finished = false; + private txs: ProcessedTx[] = []; + constructor( + public readonly numTxs: number, + private completionCallback: (result: ProvingResult) => void, + private rejectionCallback: (reason: string) => void, + public readonly globalVariables: GlobalVariables, + public readonly newL1ToL2Messages: Tuple, + numRootParityInputs: number, + public readonly emptyTx: ProcessedTx, + ) { + this.stateIdentifier = randomBytes(32).toString('hex'); + this.rootParityInputs = Array.from({ length: numRootParityInputs }).map(_ => undefined); + } + + public get baseMergeLevel() { + return BigInt(Math.ceil(Math.log2(this.totalNumTxs)) - 1); + } + + public get numMergeLevels() { + return this.baseMergeLevel; + } + + public get Id() { + return this.stateIdentifier; + } + + public get numPaddingTxs() { + return this.totalNumTxs - this.numTxs; + } + + public get totalNumTxs() { + const realTxs = Math.max(2, this.numTxs); + const pow2Txs = Math.ceil(Math.log2(realTxs)); + return 2 ** pow2Txs; + } + + public addNewTx(tx: ProcessedTx) { + this.txs.push(tx); + return this.txs.length - 1; + } + + public get transactionsReceived() { + return this.txs.length; + } + + public get finalRootParityInput() { + return this.finalRootParityInputs; + } + + public set finalRootParityInput(input: RootParityInput | undefined) { + this.finalRootParityInputs = input; + } + + public get rootParityInput() { + return this.rootParityInputs; + } + + public verifyState(stateId: string) { + return stateId === this.stateIdentifier && !this.finished; + } + + public get allTxs() { + return this.txs; + } + + public storeMergeInputs( + mergeInputs: [BaseOrMergeRollupPublicInputs, Proof], + indexWithinMerge: number, + indexOfMerge: number, + ) { + if (!this.mergeRollupInputs[indexOfMerge]) { + const mergeInputData: MergeRollupInputData = { + inputs: [undefined, undefined], + proofs: [undefined, undefined], + }; + mergeInputData.inputs[indexWithinMerge] = mergeInputs[0]; + mergeInputData.proofs[indexWithinMerge] = mergeInputs[1]; + this.mergeRollupInputs[indexOfMerge] = mergeInputData; + return false; + } + const mergeInputData = this.mergeRollupInputs[indexOfMerge]; + mergeInputData.inputs[indexWithinMerge] = mergeInputs[0]; + mergeInputData.proofs[indexWithinMerge] = mergeInputs[1]; + return true; + } + + public getMergeInputs(indexOfMerge: number) { + return this.mergeRollupInputs[indexOfMerge]; + } + + public isReadyForRootRollup() { + if (this.mergeRollupInputs[0] === undefined) { + return false; + } + if (this.mergeRollupInputs[0].inputs.findIndex(p => !p) !== -1) { + return false; + } + if (this.finalRootParityInput === undefined) { + return false; + } + return true; + } + + public setRootParityInputs(inputs: RootParityInput, index: number) { + this.rootParityInputs[index] = inputs; + } + + public areRootParityInputsReady() { + return this.rootParityInputs.findIndex(p => !p) === -1; + } + + public reject(reason: string, stateIdentifier: string) { + if (!this.verifyState(stateIdentifier)) { + return; + } + if (this.finished) { + return; + } + this.finished = true; + this.rejectionCallback(reason); + } + + public resolve(result: ProvingResult, stateIdentifier: string) { + if (!this.verifyState(stateIdentifier)) { + return; + } + if (this.finished) { + return; + } + this.finished = true; + this.completionCallback(result); + } + + public isFinished() { + return this.finished; + } +} diff --git a/yarn-project/sequencer-client/src/prover/empty.ts b/yarn-project/prover-client/src/prover/empty.ts similarity index 100% rename from yarn-project/sequencer-client/src/prover/empty.ts rename to yarn-project/prover-client/src/prover/empty.ts diff --git a/yarn-project/sequencer-client/src/prover/index.ts b/yarn-project/prover-client/src/prover/index.ts similarity index 100% rename from yarn-project/sequencer-client/src/prover/index.ts rename to yarn-project/prover-client/src/prover/index.ts diff --git a/yarn-project/sequencer-client/src/simulator/rollup.ts b/yarn-project/prover-client/src/simulator/rollup.ts similarity index 75% rename from yarn-project/sequencer-client/src/simulator/rollup.ts rename to yarn-project/prover-client/src/simulator/rollup.ts index e87f6e38816..5101c07b2d9 100644 --- a/yarn-project/sequencer-client/src/simulator/rollup.ts +++ b/yarn-project/prover-client/src/simulator/rollup.ts @@ -28,9 +28,43 @@ import { convertRootRollupInputsToWitnessMap, convertRootRollupOutputsFromWitnessMap, } from '@aztec/noir-protocol-circuits-types'; +import { SimulationProvider, WASMSimulator } from '@aztec/simulator'; -import { RollupSimulator, WASMSimulator } from './index.js'; -import { SimulationProvider } from './simulation_provider.js'; +/** + * Circuit simulator for the rollup circuits. + */ +export interface RollupSimulator { + /** + * Simulates the base parity circuit from its inputs. + * @param inputs - Inputs to the circuit. + * @returns The public inputs of the parity circuit. + */ + baseParityCircuit(inputs: BaseParityInputs): Promise; + /** + * Simulates the root parity circuit from its inputs. + * @param inputs - Inputs to the circuit. + * @returns The public inputs of the parity circuit. + */ + rootParityCircuit(inputs: RootParityInputs): Promise; + /** + * Simulates the base rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + baseRollupCircuit(input: BaseRollupInputs): Promise; + /** + * Simulates the merge rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + mergeRollupCircuit(input: MergeRollupInputs): Promise; + /** + * Simulates the root rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + rootRollupCircuit(input: RootRollupInputs): Promise; +} /** * Implements the rollup circuit simulator. @@ -121,7 +155,6 @@ export class RealRollupCircuitSimulator implements RollupSimulator { inputSize: input.toBuffer().length, outputSize: result.toBuffer().length, } satisfies CircuitSimulationStats); - return result; } } diff --git a/yarn-project/prover-client/src/tx-prover/tx-prover.ts b/yarn-project/prover-client/src/tx-prover/tx-prover.ts new file mode 100644 index 00000000000..782b65d14c2 --- /dev/null +++ b/yarn-project/prover-client/src/tx-prover/tx-prover.ts @@ -0,0 +1,73 @@ +import { ProcessedTx } from '@aztec/circuit-types'; +import { ProverClient, ProvingTicket } from '@aztec/circuit-types/interfaces'; +import { Fr, GlobalVariables } from '@aztec/circuits.js'; +import { SimulationProvider } from '@aztec/simulator'; +import { WorldStateSynchronizer } from '@aztec/world-state'; + +import { ProverConfig } from '../config.js'; +import { VerificationKeys, getVerificationKeys } from '../mocks/verification_keys.js'; +import { ProvingOrchestrator } from '../orchestrator/orchestrator.js'; +import { EmptyRollupProver } from '../prover/empty.js'; + +/** + * A prover accepting individual transaction requests + */ +export class TxProver implements ProverClient { + private orchestrator: ProvingOrchestrator; + constructor( + worldStateSynchronizer: WorldStateSynchronizer, + simulationProvider: SimulationProvider, + protected vks: VerificationKeys, + ) { + this.orchestrator = new ProvingOrchestrator( + worldStateSynchronizer.getLatest(), + simulationProvider, + getVerificationKeys(), + new EmptyRollupProver(), + ); + } + + /** + * Starts the prover instance + */ + public start() { + this.orchestrator.start(); + return Promise.resolve(); + } + + /** + * Stops the prover instance + */ + public async stop() { + await this.orchestrator.stop(); + } + + /** + * + * @param config - The prover configuration. + * @param worldStateSynchronizer - An instance of the world state + * @returns An instance of the prover, constructed and started. + */ + public static async new( + config: ProverConfig, + worldStateSynchronizer: WorldStateSynchronizer, + simulationProvider: SimulationProvider, + ) { + const prover = new TxProver(worldStateSynchronizer, simulationProvider, getVerificationKeys()); + await prover.start(); + return prover; + } + + public startNewBlock( + numTxs: number, + globalVariables: GlobalVariables, + newL1ToL2Messages: Fr[], + emptyTx: ProcessedTx, + ): Promise { + return this.orchestrator.startNewBlock(numTxs, globalVariables, newL1ToL2Messages, emptyTx); + } + + public addNewTx(tx: ProcessedTx): Promise { + return this.orchestrator.addNewTx(tx); + } +} diff --git a/yarn-project/prover-client/tsconfig.json b/yarn-project/prover-client/tsconfig.json index 63f8ab3e9f7..a9fab4069e1 100644 --- a/yarn-project/prover-client/tsconfig.json +++ b/yarn-project/prover-client/tsconfig.json @@ -6,8 +6,26 @@ "tsBuildInfoFile": ".tsbuildinfo" }, "references": [ + { + "path": "../circuit-types" + }, + { + "path": "../circuits.js" + }, { "path": "../foundation" + }, + { + "path": "../kv-store" + }, + { + "path": "../noir-protocol-circuits-types" + }, + { + "path": "../simulator" + }, + { + "path": "../world-state" } ], "include": ["src"] diff --git a/yarn-project/sequencer-client/src/block_builder/index.ts b/yarn-project/sequencer-client/src/block_builder/index.ts deleted file mode 100644 index 7f2fefca322..00000000000 --- a/yarn-project/sequencer-client/src/block_builder/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { L2Block } from '@aztec/circuit-types'; -import { GlobalVariables, Proof } from '@aztec/circuits.js'; -import { Fr } from '@aztec/foundation/fields'; - -import { ProcessedTx } from '../sequencer/processed_tx.js'; - -/** - * Assembles an L2Block from a set of processed transactions. - */ -export interface BlockBuilder { - /** - * Creates a new L2Block with the given number, containing the set of processed txs. - * Note that the number of txs need to be a power of two. - * @param globalVariables - Global variables to include in the block. - * @param txs - Processed txs to include. - * @param l1ToL2Messages - L1 to L2 messages to be part of the block. - * @returns The new L2 block along with its proof from the root circuit. - */ - buildL2Block(globalVariables: GlobalVariables, txs: ProcessedTx[], l1ToL2Messages: Fr[]): Promise<[L2Block, Proof]>; -} diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts deleted file mode 100644 index 568dc8e56dc..00000000000 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts +++ /dev/null @@ -1,453 +0,0 @@ -import { Body, L2Block, MerkleTreeId, Tx, TxEffect, makeEmptyLogs, mockTx } from '@aztec/circuit-types'; -import { - AppendOnlyTreeSnapshot, - AztecAddress, - BaseOrMergeRollupPublicInputs, - EthAddress, - Fr, - GlobalVariables, - Header, - MAX_NEW_L2_TO_L1_MSGS_PER_TX, - MAX_NEW_NOTE_HASHES_PER_TX, - MAX_NEW_NULLIFIERS_PER_TX, - MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX, - MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX, - MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, - MAX_REVERTIBLE_NOTE_HASHES_PER_TX, - MAX_REVERTIBLE_NULLIFIERS_PER_TX, - MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - NULLIFIER_SUBTREE_HEIGHT, - NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, - PUBLIC_DATA_SUBTREE_HEIGHT, - PartialStateReference, - Proof, - PublicDataTreeLeaf, - PublicDataUpdateRequest, - PublicKernelCircuitPublicInputs, - RootRollupPublicInputs, - SideEffect, - SideEffectLinkedToNoteHash, - StateReference, - sideEffectCmp, -} from '@aztec/circuits.js'; -import { - fr, - makeBaseOrMergeRollupPublicInputs, - makeNewSideEffect, - makeNewSideEffectLinkedToNoteHash, - makeParityPublicInputs, - makePrivateKernelTailCircuitPublicInputs, - makeProof, - makePublicCallRequest, - makeRootRollupPublicInputs, -} from '@aztec/circuits.js/testing'; -import { makeTuple, range } from '@aztec/foundation/array'; -import { toBufferBE } from '@aztec/foundation/bigint-buffer'; -import { padArrayEnd, times } from '@aztec/foundation/collection'; -import { toTruncField } from '@aztec/foundation/serialize'; -import { openTmpStore } from '@aztec/kv-store/utils'; -import { MerkleTreeOperations, MerkleTrees } from '@aztec/world-state'; - -import { jest } from '@jest/globals'; -import { MockProxy, mock } from 'jest-mock-extended'; -import { type MemDown, default as memdown } from 'memdown'; - -import { VerificationKeys, getVerificationKeys } from '../mocks/verification_keys.js'; -import { EmptyRollupProver } from '../prover/empty.js'; -import { RollupProver } from '../prover/index.js'; -import { - ProcessedTx, - makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, - makeProcessedTx, - toTxEffect, -} from '../sequencer/processed_tx.js'; -import { WASMSimulator } from '../simulator/acvm_wasm.js'; -import { RollupSimulator } from '../simulator/index.js'; -import { RealRollupCircuitSimulator } from '../simulator/rollup.js'; -import { SoloBlockBuilder } from './solo_block_builder.js'; - -export const createMemDown = () => (memdown as any)() as MemDown; - -describe('sequencer/solo_block_builder', () => { - let builder: SoloBlockBuilder; - let builderDb: MerkleTreeOperations; - let expectsDb: MerkleTreeOperations; - let vks: VerificationKeys; - - let simulator: MockProxy; - let prover: MockProxy; - - let blockNumber: number; - let baseRollupOutputLeft: BaseOrMergeRollupPublicInputs; - let baseRollupOutputRight: BaseOrMergeRollupPublicInputs; - let rootRollupOutput: RootRollupPublicInputs; - let mockL1ToL2Messages: Fr[]; - - let globalVariables: GlobalVariables; - - const emptyProof = new Proof(Buffer.alloc(32, 0)); - - const chainId = Fr.ZERO; - const version = Fr.ZERO; - const coinbase = EthAddress.ZERO; - const feeRecipient = AztecAddress.ZERO; - - beforeEach(async () => { - blockNumber = 3; - globalVariables = new GlobalVariables(chainId, version, new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient); - - builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); - expectsDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); - vks = getVerificationKeys(); - simulator = mock(); - prover = mock(); - builder = new SoloBlockBuilder(builderDb, vks, simulator, prover); - - // Create mock l1 to L2 messages - mockL1ToL2Messages = new Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)); - - // Create mock outputs for simulator - baseRollupOutputLeft = makeBaseOrMergeRollupPublicInputs(0, globalVariables); - baseRollupOutputRight = makeBaseOrMergeRollupPublicInputs(0, globalVariables); - rootRollupOutput = makeRootRollupPublicInputs(0); - rootRollupOutput.header.globalVariables = globalVariables; - - // Set up mocks - prover.getBaseParityProof.mockResolvedValue(emptyProof); - prover.getRootParityProof.mockResolvedValue(emptyProof); - prover.getBaseRollupProof.mockResolvedValue(emptyProof); - prover.getRootRollupProof.mockResolvedValue(emptyProof); - simulator.baseParityCircuit - .mockResolvedValueOnce(makeParityPublicInputs(1)) - .mockResolvedValue(makeParityPublicInputs(2)) - .mockResolvedValue(makeParityPublicInputs(3)) - .mockResolvedValueOnce(makeParityPublicInputs(4)); - simulator.rootParityCircuit.mockResolvedValueOnce(makeParityPublicInputs(5)); - simulator.baseRollupCircuit - .mockResolvedValueOnce(baseRollupOutputLeft) - .mockResolvedValueOnce(baseRollupOutputRight); - simulator.rootRollupCircuit.mockResolvedValue(rootRollupOutput); - }, 20_000); - - const makeEmptyProcessedTx = async () => { - const header = await builderDb.buildInitialHeader(); - return makeEmptyProcessedTxFromHistoricalTreeRoots(header, chainId, version); - }; - - // Updates the expectedDb trees based on the new note hashes, contracts, and nullifiers from these txs - const updateExpectedTreesFromTxs = async (txs: ProcessedTx[]) => { - await expectsDb.appendLeaves( - MerkleTreeId.NOTE_HASH_TREE, - txs.flatMap(tx => - padArrayEnd( - [...tx.data.endNonRevertibleData.newNoteHashes, ...tx.data.end.newNoteHashes] - .filter(x => !x.isEmpty()) - .sort(sideEffectCmp), - SideEffect.empty(), - MAX_NEW_NOTE_HASHES_PER_TX, - ).map(l => l.value.toBuffer()), - ), - ); - await expectsDb.batchInsert( - MerkleTreeId.NULLIFIER_TREE, - txs.flatMap(tx => - padArrayEnd( - [...tx.data.endNonRevertibleData.newNullifiers, ...tx.data.end.newNullifiers] - .filter(x => !x.isEmpty()) - .sort(sideEffectCmp), - SideEffectLinkedToNoteHash.empty(), - MAX_NEW_NULLIFIERS_PER_TX, - ).map(x => x.value.toBuffer()), - ), - NULLIFIER_SUBTREE_HEIGHT, - ); - for (const tx of txs) { - await expectsDb.batchInsert( - MerkleTreeId.PUBLIC_DATA_TREE, - [...tx.data.endNonRevertibleData.publicDataUpdateRequests, ...tx.data.end.publicDataUpdateRequests].map( - write => { - return new PublicDataTreeLeaf(write.leafSlot, write.newValue).toBuffer(); - }, - ), - PUBLIC_DATA_SUBTREE_HEIGHT, - ); - } - }; - - const updateL1ToL2MessageTree = async (l1ToL2Messages: Fr[]) => { - const asBuffer = l1ToL2Messages.map(m => m.toBuffer()); - await expectsDb.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, asBuffer); - }; - - const updateArchive = async () => { - const blockHash = rootRollupOutput.header.hash(); - await expectsDb.appendLeaves(MerkleTreeId.ARCHIVE, [blockHash.toBuffer()]); - }; - - const getTreeSnapshot = async (tree: MerkleTreeId) => { - const treeInfo = await expectsDb.getTreeInfo(tree); - return new AppendOnlyTreeSnapshot(Fr.fromBuffer(treeInfo.root), Number(treeInfo.size)); - }; - - const getPartialStateReference = async () => { - return new PartialStateReference( - await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE), - await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE), - await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE), - ); - }; - - const getStateReference = async () => { - return new StateReference( - await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE), - await getPartialStateReference(), - ); - }; - - const buildMockSimulatorInputs = async () => { - const kernelOutput = makePrivateKernelTailCircuitPublicInputs(); - kernelOutput.constants.historicalHeader = await expectsDb.buildInitialHeader(); - kernelOutput.needsAppLogic = false; - kernelOutput.needsSetup = false; - kernelOutput.needsTeardown = false; - - const tx = makeProcessedTx( - new Tx( - kernelOutput, - emptyProof, - makeEmptyLogs(), - makeEmptyLogs(), - times(MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, makePublicCallRequest), - ), - ); - - const txs = [tx, await makeEmptyProcessedTx()]; - - // Calculate what would be the tree roots after the first tx and update mock circuit output - await updateExpectedTreesFromTxs([txs[0]]); - baseRollupOutputLeft.end = await getPartialStateReference(); - baseRollupOutputLeft.txsEffectsHash = toTruncField(toTxEffect(tx).hash()); - - // Same for the tx on the right - await updateExpectedTreesFromTxs([txs[1]]); - baseRollupOutputRight.end = await getPartialStateReference(); - baseRollupOutputRight.txsEffectsHash = toTruncField(toTxEffect(tx).hash()); - - // Update l1 to l2 message tree - await updateL1ToL2MessageTree(mockL1ToL2Messages); - - // Collect all new nullifiers, commitments, and contracts from all txs in this block - const txEffects: TxEffect[] = txs.map(tx => toTxEffect(tx)); - - const body = new Body(txEffects); - // We are constructing the block here just to get body hash/calldata hash so we can pass in an empty archive and header - const l2Block = L2Block.fromFields({ - archive: AppendOnlyTreeSnapshot.zero(), - header: Header.empty(), - // Only the values below go to body hash/calldata hash - body, - }); - - // Now we update can make the final header, compute the block hash and update archive - rootRollupOutput.header.globalVariables = globalVariables; - rootRollupOutput.header.contentCommitment.txsEffectsHash = l2Block.body.getTxsEffectsHash(); - rootRollupOutput.header.state = await getStateReference(); - - await updateArchive(); - rootRollupOutput.archive = await getTreeSnapshot(MerkleTreeId.ARCHIVE); - - return txs; - }; - - describe('mock simulator', () => { - beforeAll(() => { - jest.spyOn(TxEffect.prototype, 'hash').mockImplementation(() => { - return Buffer.alloc(32, 0); - }); - }); - - afterAll(() => { - jest.restoreAllMocks(); - }); - - beforeEach(() => { - // Create instance to test - builder = new SoloBlockBuilder(builderDb, vks, simulator, prover); - // since we now assert on the hash of the tx effect while running the base rollup, - // we need to mock the hash function to return a constant value - }); - - it('builds an L2 block using mock simulator', async () => { - // Assemble a fake transaction - const txs = await buildMockSimulatorInputs(); - - // Actually build a block! - const [l2Block, proof] = await builder.buildL2Block(globalVariables, txs, mockL1ToL2Messages); - - expect(l2Block.number).toEqual(blockNumber); - expect(proof).toEqual(emptyProof); - }, 20000); - - it('rejects if too many l1 to l2 messages are provided', async () => { - // Assemble a fake transaction - const txs = await buildMockSimulatorInputs(); - const l1ToL2Messages = new Array(100).fill(new Fr(0n)); - await expect(builder.buildL2Block(globalVariables, txs, l1ToL2Messages)).rejects.toThrow(); - }); - }); - - describe('circuits simulator', () => { - beforeEach(() => { - const simulator = new RealRollupCircuitSimulator(new WASMSimulator()); - const prover = new EmptyRollupProver(); - builder = new SoloBlockBuilder(builderDb, vks, simulator, prover); - }); - - const makeBloatedProcessedTx = async (seed = 0x1) => { - seed *= MAX_NEW_NULLIFIERS_PER_TX; // Ensure no clashing given incremental seeds - const tx = mockTx(seed); - const kernelOutput = PublicKernelCircuitPublicInputs.empty(); - kernelOutput.constants.historicalHeader = await builderDb.buildInitialHeader(); - kernelOutput.end.publicDataUpdateRequests = makeTuple( - MAX_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - i => new PublicDataUpdateRequest(fr(i), fr(i + 10)), - seed + 0x500, - ); - kernelOutput.endNonRevertibleData.publicDataUpdateRequests = makeTuple( - MAX_NON_REVERTIBLE_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - i => new PublicDataUpdateRequest(fr(i), fr(i + 10)), - seed + 0x600, - ); - - const processedTx = makeProcessedTx(tx, kernelOutput, makeProof()); - - processedTx.data.end.newNoteHashes = makeTuple( - MAX_REVERTIBLE_NOTE_HASHES_PER_TX, - makeNewSideEffect, - seed + 0x100, - ); - processedTx.data.endNonRevertibleData.newNoteHashes = makeTuple( - MAX_NON_REVERTIBLE_NOTE_HASHES_PER_TX, - makeNewSideEffect, - seed + 0x100, - ); - processedTx.data.end.newNullifiers = makeTuple( - MAX_REVERTIBLE_NULLIFIERS_PER_TX, - makeNewSideEffectLinkedToNoteHash, - seed + 0x100000, - ); - - processedTx.data.endNonRevertibleData.newNullifiers = makeTuple( - MAX_NON_REVERTIBLE_NULLIFIERS_PER_TX, - makeNewSideEffectLinkedToNoteHash, - seed + 0x100000 + MAX_REVERTIBLE_NULLIFIERS_PER_TX, - ); - - processedTx.data.end.newNullifiers[tx.data.end.newNullifiers.length - 1] = SideEffectLinkedToNoteHash.empty(); - - processedTx.data.end.newL2ToL1Msgs = makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, fr, seed + 0x300); - processedTx.data.end.encryptedLogsHash = toTruncField(processedTx.encryptedLogs.hash()); - processedTx.data.end.unencryptedLogsHash = toTruncField(processedTx.unencryptedLogs.hash()); - - return processedTx; - }; - - it.each([ - [0, 4], - [1, 4], - [4, 4], - [0, 16], - [16, 16], - ] as const)( - 'builds an L2 block with %i bloated txs and %i txs total', - async (bloatedCount: number, totalCount: number) => { - const noteHashTreeBefore = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); - const txs = [ - ...(await Promise.all(times(bloatedCount, makeBloatedProcessedTx))), - ...(await Promise.all(times(totalCount - bloatedCount, makeEmptyProcessedTx))), - ]; - - const [l2Block] = await builder.buildL2Block(globalVariables, txs, mockL1ToL2Messages); - expect(l2Block.number).toEqual(blockNumber); - - await updateExpectedTreesFromTxs(txs); - const noteHashTreeAfter = await builderDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE); - - if (bloatedCount > 0) { - expect(noteHashTreeAfter.root).not.toEqual(noteHashTreeBefore.root); - } - - const expectedNoteHashTreeAfter = await expectsDb.getTreeInfo(MerkleTreeId.NOTE_HASH_TREE).then(t => t.root); - expect(noteHashTreeAfter.root).toEqual(expectedNoteHashTreeAfter); - }, - 60000, - ); - - it('builds an empty L2 block', async () => { - const txs = await Promise.all([ - makeEmptyProcessedTx(), - makeEmptyProcessedTx(), - makeEmptyProcessedTx(), - makeEmptyProcessedTx(), - ]); - - const [l2Block] = await builder.buildL2Block(globalVariables, txs, mockL1ToL2Messages); - expect(l2Block.number).toEqual(blockNumber); - }, 30_000); - - it('builds a mixed L2 block', async () => { - const txs = await Promise.all([ - makeBloatedProcessedTx(1), - makeBloatedProcessedTx(2), - makeBloatedProcessedTx(3), - makeBloatedProcessedTx(4), - ]); - - const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - - const [l2Block] = await builder.buildL2Block(globalVariables, txs, l1ToL2Messages); - expect(l2Block.number).toEqual(blockNumber); - }, 200_000); - - // This test specifically tests nullifier values which previously caused e2e_private_token test to fail - it('e2e_private_token edge case regression test on nullifier values', async () => { - const simulator = new RealRollupCircuitSimulator(new WASMSimulator()); - const prover = new EmptyRollupProver(); - builder = new SoloBlockBuilder(builderDb, vks, simulator, prover); - // update the starting tree - const updateVals = Array(4 * MAX_NEW_NULLIFIERS_PER_TX).fill(0n); - updateVals[0] = 19777494491628650244807463906174285795660759352776418619064841306523677458742n; - updateVals[1] = 10246291467305176436335175657884940686778521321101740385288169037814567547848n; - - // new added values - const tx = await makeEmptyProcessedTx(); - tx.data.end.newNullifiers[0] = new SideEffectLinkedToNoteHash( - new Fr(10336601644835972678500657502133589897705389664587188571002640950065546264856n), - Fr.ZERO, - Fr.ZERO, - ); - tx.data.end.newNullifiers[1] = new SideEffectLinkedToNoteHash( - new Fr(17490072961923661940560522096125238013953043065748521735636170028491723851741n), - Fr.ZERO, - Fr.ZERO, - ); - - const txs = [tx, await makeEmptyProcessedTx(), await makeEmptyProcessedTx(), await makeEmptyProcessedTx()]; - - // Must be built after the txs are created - await builderDb.batchInsert( - MerkleTreeId.NULLIFIER_TREE, - updateVals.map(v => toBufferBE(v, 32)), - NULLIFIER_SUBTREE_HEIGHT, - ); - - const [l2Block] = await builder.buildL2Block(globalVariables, txs, mockL1ToL2Messages); - - expect(l2Block.number).toEqual(blockNumber); - }, 20000); - }); - - // describe("Input guard tests", () => { - // }) -}); diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts deleted file mode 100644 index 48acbde1230..00000000000 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts +++ /dev/null @@ -1,812 +0,0 @@ -import { Body, L2Block, MerkleTreeId, TxEffect } from '@aztec/circuit-types'; -import { CircuitSimulationStats } from '@aztec/circuit-types/stats'; -import { - ARCHIVE_HEIGHT, - AppendOnlyTreeSnapshot, - BaseOrMergeRollupPublicInputs, - BaseParityInputs, - BaseRollupInputs, - ConstantRollupData, - GlobalVariables, - L1_TO_L2_MSG_SUBTREE_HEIGHT, - L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, - MAX_NEW_NULLIFIERS_PER_TX, - MAX_PUBLIC_DATA_READS_PER_TX, - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - MembershipWitness, - MergeRollupInputs, - NOTE_HASH_SUBTREE_HEIGHT, - NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, - NULLIFIER_SUBTREE_HEIGHT, - NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, - NULLIFIER_TREE_HEIGHT, - NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, - NUM_BASE_PARITY_PER_ROOT_PARITY, - NullifierLeafPreimage, - PUBLIC_DATA_SUBTREE_HEIGHT, - PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, - PUBLIC_DATA_TREE_HEIGHT, - PartialStateReference, - PreviousRollupData, - Proof, - PublicDataTreeLeaf, - PublicDataTreeLeafPreimage, - ROLLUP_VK_TREE_HEIGHT, - RollupKernelCircuitPublicInputs, - RollupKernelData, - RollupTypes, - RootParityInput, - RootParityInputs, - RootRollupInputs, - RootRollupPublicInputs, - StateDiffHints, - StateReference, - VK_TREE_HEIGHT, - VerificationKey, -} from '@aztec/circuits.js'; -import { assertPermutation, makeTuple } from '@aztec/foundation/array'; -import { toBigIntBE } from '@aztec/foundation/bigint-buffer'; -import { padArrayEnd } from '@aztec/foundation/collection'; -import { Fr } from '@aztec/foundation/fields'; -import { createDebugLogger } from '@aztec/foundation/log'; -import { Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize'; -import { elapsed } from '@aztec/foundation/timer'; -import { MerkleTreeOperations } from '@aztec/world-state'; - -import chunk from 'lodash.chunk'; -import { inspect } from 'util'; - -import { VerificationKeys } from '../mocks/verification_keys.js'; -import { RollupProver } from '../prover/index.js'; -import { ProcessedTx, toTxEffect } from '../sequencer/processed_tx.js'; -import { RollupSimulator } from '../simulator/index.js'; -import { BlockBuilder } from './index.js'; -import { TreeNames } from './types.js'; - -const frToBigInt = (fr: Fr) => toBigIntBE(fr.toBuffer()); - -// Denotes fields that are not used now, but will be in the future -const FUTURE_FR = new Fr(0n); -const FUTURE_NUM = 0; - -// Denotes fields that should be deleted -const DELETE_FR = new Fr(0n); - -/** - * Builds an L2 block out of a set of ProcessedTx's, - * using the base, merge, and root rollup circuits. - */ -export class SoloBlockBuilder implements BlockBuilder { - constructor( - protected db: MerkleTreeOperations, - protected vks: VerificationKeys, - protected simulator: RollupSimulator, - protected prover: RollupProver, - protected debug = createDebugLogger('aztec:sequencer:solo-block-builder'), - ) {} - - /** - * Builds an L2 block with the given number containing the given txs, updating state trees. - * @param globalVariables - Global variables to be used in the block. - * @param txs - Processed transactions to include in the block. - * @param l1ToL2Messages - L1 to L2 messages to be part of the block. - * @param timestamp - Timestamp of the block. - * @returns The new L2 block and a correctness proof as returned by the root rollup circuit. - */ - public async buildL2Block( - globalVariables: GlobalVariables, - txs: ProcessedTx[], - l1ToL2Messages: Fr[], - ): Promise<[L2Block, Proof]> { - // Check txs are good for processing by checking if all the tree snapshots in header are non-empty - this.validateTxs(txs); - - // We fill the tx batch with empty txs, we process only one tx at a time for now - const [circuitsOutput, proof] = await this.runCircuits(globalVariables, txs, l1ToL2Messages); - - // Collect all new nullifiers, commitments, and contracts from all txs in this block - const txEffects: TxEffect[] = txs.map(tx => toTxEffect(tx)); - - const blockBody = new Body(txEffects); - - const l2Block = L2Block.fromFields({ - archive: circuitsOutput.archive, - header: circuitsOutput.header, - body: blockBody, - }); - - if (!l2Block.body.getTxsEffectsHash().equals(circuitsOutput.header.contentCommitment.txsEffectsHash)) { - this.debug(inspect(blockBody)); - throw new Error( - `Txs effects hash mismatch, ${l2Block.body - .getTxsEffectsHash() - .toString('hex')} == ${circuitsOutput.header.contentCommitment.txsEffectsHash.toString('hex')} `, - ); - } - - return [l2Block, proof]; - } - - protected validateTxs(txs: ProcessedTx[]) { - for (const tx of txs) { - const txHeader = tx.data.constants.historicalHeader; - if (txHeader.state.l1ToL2MessageTree.isZero()) { - throw new Error(`Empty L1 to L2 messages tree in tx: ${toFriendlyJSON(tx)}`); - } - if (txHeader.state.partial.noteHashTree.isZero()) { - throw new Error(`Empty note hash tree in tx: ${toFriendlyJSON(tx)}`); - } - if (txHeader.state.partial.nullifierTree.isZero()) { - throw new Error(`Empty nullifier tree in tx: ${toFriendlyJSON(tx)}`); - } - if (txHeader.state.partial.publicDataTree.isZero()) { - throw new Error(`Empty public data tree in tx: ${toFriendlyJSON(tx)}`); - } - } - } - - protected async getTreeSnapshot(id: MerkleTreeId): Promise { - const treeInfo = await this.db.getTreeInfo(id); - return new AppendOnlyTreeSnapshot(Fr.fromBuffer(treeInfo.root), Number(treeInfo.size)); - } - - protected async runCircuits( - globalVariables: GlobalVariables, - txs: ProcessedTx[], - l1ToL2Messages: Fr[], - ): Promise<[RootRollupPublicInputs, Proof]> { - // TODO(#5357): Instead of performing the check bellow pad the txs here. - // Check that the length of the array of txs is a power of two - // See https://graphics.stanford.edu/~seander/bithacks.html#DetermineIfPowerOf2 - if (txs.length < 2 || (txs.length & (txs.length - 1)) !== 0) { - throw new Error(`Length of txs for the block should be a power of two and at least two (got ${txs.length})`); - } - - // We pad the messages as the circuits expect that. - const l1ToL2MessagesPadded = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); - - // BASE PARITY CIRCUIT (run in parallel) - // Note: In the future we will want to cache the results of empty base and root parity circuits so that we don't - // have to run them. (It will most likely be quite common that some base parity circuits will be "empty") - let baseParityInputs: BaseParityInputs[] = []; - let elapsedBaseParityOutputsPromise: Promise<[number, RootParityInput[]]>; - { - baseParityInputs = Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }, (_, i) => - BaseParityInputs.fromSlice(l1ToL2MessagesPadded, i), - ); - - const baseParityOutputs: Promise[] = []; - for (const inputs of baseParityInputs) { - baseParityOutputs.push(this.baseParityCircuit(inputs)); - } - elapsedBaseParityOutputsPromise = elapsed(() => Promise.all(baseParityOutputs)); - } - - // BASE ROLLUP CIRCUIT (run in parallel) - let elapsedBaseRollupOutputsPromise: Promise<[number, [BaseOrMergeRollupPublicInputs, Proof][]]>; - const baseRollupInputs: BaseRollupInputs[] = []; - { - // Perform all tree insertions and retrieve snapshots for all base rollups - const treeSnapshots: Map[] = []; - for (const tx of txs) { - const input = await this.buildBaseRollupInput(tx, globalVariables); - baseRollupInputs.push(input); - const promises = [MerkleTreeId.NOTE_HASH_TREE, MerkleTreeId.NULLIFIER_TREE, MerkleTreeId.PUBLIC_DATA_TREE].map( - async (id: MerkleTreeId) => { - return { key: id, value: await this.getTreeSnapshot(id) }; - }, - ); - const snapshots: Map = new Map( - (await Promise.all(promises)).map(obj => [obj.key, obj.value]), - ); - treeSnapshots.push(snapshots); - } - - // Run the base rollup circuits for the txs in parallel - const baseRollupOutputs: Promise<[BaseOrMergeRollupPublicInputs, Proof]>[] = []; - for (let i = 0; i < txs.length; i++) { - baseRollupOutputs.push(this.baseRollupCircuit(txs[i], baseRollupInputs[i], treeSnapshots[i])); - } - - elapsedBaseRollupOutputsPromise = elapsed(() => Promise.all(baseRollupOutputs)); - } - - // ROOT PARITY CIRCUIT - let elapsedRootParityOutputPromise: Promise<[number, RootParityInput]>; - let rootParityInputs: RootParityInputs; - { - // First we await the base parity outputs - const [duration, baseParityOutputs] = await elapsedBaseParityOutputsPromise; - - // We emit stats for base parity circuits - for (let i = 0; i < baseParityOutputs.length; i++) { - this.debug(`Simulated base parity circuit`, { - eventName: 'circuit-simulation', - circuitName: 'base-parity', - duration: duration / baseParityOutputs.length, - inputSize: baseParityInputs[i].toBuffer().length, - outputSize: baseParityOutputs[i].toBuffer().length, - } satisfies CircuitSimulationStats); - } - - rootParityInputs = new RootParityInputs( - baseParityOutputs as Tuple, - ); - elapsedRootParityOutputPromise = elapsed(() => this.rootParityCircuit(rootParityInputs)); - } - - // MERGE ROLLUP CIRCUIT (each layer run in parallel) - let mergeOutputLeft: [BaseOrMergeRollupPublicInputs, Proof]; - let mergeOutputRight: [BaseOrMergeRollupPublicInputs, Proof]; - { - // Run merge rollups in layers until we have only two outputs - const [duration, mergeInputs] = await elapsedBaseRollupOutputsPromise; - - // We emit stats for base rollup circuits - for (let i = 0; i < mergeInputs.length; i++) { - this.debug(`Simulated base rollup circuit`, { - eventName: 'circuit-simulation', - circuitName: 'base-rollup', - duration: duration / mergeInputs.length, - inputSize: baseRollupInputs[i].toBuffer().length, - outputSize: mergeInputs[i][0].toBuffer().length, - } satisfies CircuitSimulationStats); - } - - let mergeRollupInputs: [BaseOrMergeRollupPublicInputs, Proof][] = mergeInputs; - while (mergeRollupInputs.length > 2) { - const mergeInputStructs: MergeRollupInputs[] = []; - for (const pair of chunk(mergeRollupInputs, 2)) { - const [r1, r2] = pair; - mergeInputStructs.push(this.createMergeRollupInputs(r1, r2)); - } - - const [duration, mergeOutputs] = await elapsed(() => - Promise.all(mergeInputStructs.map(async input => await this.mergeRollupCircuit(input))), - ); - - // We emit stats for merge rollup circuits - for (let i = 0; i < mergeOutputs.length; i++) { - this.debug(`Simulated merge rollup circuit`, { - eventName: 'circuit-simulation', - circuitName: 'merge-rollup', - duration: duration / mergeOutputs.length, - inputSize: mergeInputStructs[i].toBuffer().length, - outputSize: mergeOutputs[i][0].toBuffer().length, - } satisfies CircuitSimulationStats); - } - mergeRollupInputs = mergeOutputs; - } - - // Run the root rollup with the last two merge rollups (or base, if no merge layers) - [mergeOutputLeft, mergeOutputRight] = mergeRollupInputs; - } - - // Finally, we emit stats for root parity circuit - const [duration, rootParityOutput] = await elapsedRootParityOutputPromise; - this.debug(`Simulated root parity circuit`, { - eventName: 'circuit-simulation', - circuitName: 'root-parity', - duration: duration, - inputSize: rootParityInputs.toBuffer().length, - outputSize: rootParityOutput.toBuffer().length, - } satisfies CircuitSimulationStats); - - return this.rootRollupCircuit(mergeOutputLeft, mergeOutputRight, rootParityOutput, l1ToL2MessagesPadded); - } - - protected async baseParityCircuit(inputs: BaseParityInputs): Promise { - this.debug(`Running base parity circuit`); - const parityPublicInputs = await this.simulator.baseParityCircuit(inputs); - const proof = await this.prover.getBaseParityProof(inputs, parityPublicInputs); - return new RootParityInput(proof, parityPublicInputs); - } - - protected async rootParityCircuit(inputs: RootParityInputs): Promise { - this.debug(`Running root parity circuit`); - const parityPublicInputs = await this.simulator.rootParityCircuit(inputs); - const proof = await this.prover.getRootParityProof(inputs, parityPublicInputs); - return new RootParityInput(proof, parityPublicInputs); - } - - protected async baseRollupCircuit( - tx: ProcessedTx, - inputs: BaseRollupInputs, - treeSnapshots: Map, - ): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { - this.debug(`Running base rollup for ${tx.hash}`); - const rollupOutput = await this.simulator.baseRollupCircuit(inputs); - this.validatePartialState(rollupOutput.end, treeSnapshots); - const proof = await this.prover.getBaseRollupProof(inputs, rollupOutput); - return [rollupOutput, proof]; - } - - protected createMergeRollupInputs( - left: [BaseOrMergeRollupPublicInputs, Proof], - right: [BaseOrMergeRollupPublicInputs, Proof], - ) { - const vk = this.getVerificationKey(left[0].rollupType); - const mergeInputs = new MergeRollupInputs([ - this.getPreviousRollupDataFromPublicInputs(left[0], left[1], vk), - this.getPreviousRollupDataFromPublicInputs(right[0], right[1], vk), - ]); - return mergeInputs; - } - - protected async mergeRollupCircuit(mergeInputs: MergeRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { - this.debug(`Running merge rollup circuit`); - const output = await this.simulator.mergeRollupCircuit(mergeInputs); - const proof = await this.prover.getMergeRollupProof(mergeInputs, output); - return [output, proof]; - } - - protected getVerificationKey(type: RollupTypes) { - switch (type) { - case RollupTypes.Base: - return this.vks.baseRollupCircuit; - case RollupTypes.Merge: - return this.vks.mergeRollupCircuit; - default: - throw new Error(`No verification key available for ${type}`); - } - } - - protected async rootRollupCircuit( - left: [BaseOrMergeRollupPublicInputs, Proof], - right: [BaseOrMergeRollupPublicInputs, Proof], - l1ToL2Roots: RootParityInput, - l1ToL2Messages: Tuple, - ): Promise<[RootRollupPublicInputs, Proof]> { - this.debug(`Running root rollup circuit`); - const rootInput = await this.getRootRollupInput(...left, ...right, l1ToL2Roots, l1ToL2Messages); - - // Update the local trees to include the l1 to l2 messages - await this.db.appendLeaves( - MerkleTreeId.L1_TO_L2_MESSAGE_TREE, - l1ToL2Messages.map(m => m.toBuffer()), - ); - - // Simulate and get proof for the root circuit - const rootOutput = await this.simulator.rootRollupCircuit(rootInput); - - const rootProof = await this.prover.getRootRollupProof(rootInput, rootOutput); - - this.debug(`Updating archive with new header`); - await this.db.updateArchive(rootOutput.header); - - await this.validateRootOutput(rootOutput); - - return [rootOutput, rootProof]; - } - - protected validatePartialState( - partialState: PartialStateReference, - treeSnapshots: Map, - ) { - this.validateSimulatedTree( - treeSnapshots.get(MerkleTreeId.NOTE_HASH_TREE)!, - partialState.noteHashTree, - 'NoteHashTree', - ); - this.validateSimulatedTree( - treeSnapshots.get(MerkleTreeId.NULLIFIER_TREE)!, - partialState.nullifierTree, - 'NullifierTree', - ); - this.validateSimulatedTree( - treeSnapshots.get(MerkleTreeId.PUBLIC_DATA_TREE)!, - partialState.publicDataTree, - 'PublicDataTree', - ); - } - - protected async validateState(state: StateReference) { - const promises = [MerkleTreeId.NOTE_HASH_TREE, MerkleTreeId.NULLIFIER_TREE, MerkleTreeId.PUBLIC_DATA_TREE].map( - async (id: MerkleTreeId) => { - return { key: id, value: await this.getTreeSnapshot(id) }; - }, - ); - const snapshots: Map = new Map( - (await Promise.all(promises)).map(obj => [obj.key, obj.value]), - ); - this.validatePartialState(state.partial, snapshots); - this.validateSimulatedTree( - await this.getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE), - state.l1ToL2MessageTree, - 'L1ToL2MessageTree', - ); - } - - // Validate that the roots of all local trees match the output of the root circuit simulation - protected async validateRootOutput(rootOutput: RootRollupPublicInputs) { - await Promise.all([ - this.validateState(rootOutput.header.state), - this.validateSimulatedTree(await this.getTreeSnapshot(MerkleTreeId.ARCHIVE), rootOutput.archive, 'Archive'), - ]); - } - - // Helper for comparing two trees snapshots - protected validateSimulatedTree( - localTree: AppendOnlyTreeSnapshot, - simulatedTree: AppendOnlyTreeSnapshot, - name: TreeNames, - label?: string, - ) { - if (!simulatedTree.root.toBuffer().equals(localTree.root.toBuffer())) { - throw new Error(`${label ?? name} tree root mismatch (local ${localTree.root}, simulated ${simulatedTree.root})`); - } - if (simulatedTree.nextAvailableLeafIndex !== localTree.nextAvailableLeafIndex) { - throw new Error( - `${label ?? name} tree next available leaf index mismatch (local ${ - localTree.nextAvailableLeafIndex - }, simulated ${simulatedTree.nextAvailableLeafIndex})`, - ); - } - } - - // Builds the inputs for the root rollup circuit, without making any changes to trees - protected async getRootRollupInput( - rollupOutputLeft: BaseOrMergeRollupPublicInputs, - rollupProofLeft: Proof, - rollupOutputRight: BaseOrMergeRollupPublicInputs, - rollupProofRight: Proof, - l1ToL2Roots: RootParityInput, - newL1ToL2Messages: Tuple, - ) { - const vk = this.getVerificationKey(rollupOutputLeft.rollupType); - const previousRollupData: RootRollupInputs['previousRollupData'] = [ - this.getPreviousRollupDataFromPublicInputs(rollupOutputLeft, rollupProofLeft, vk), - this.getPreviousRollupDataFromPublicInputs(rollupOutputRight, rollupProofRight, vk), - ]; - - const getRootTreeSiblingPath = async (treeId: MerkleTreeId) => { - const { size } = await this.db.getTreeInfo(treeId); - const path = await this.db.getSiblingPath(treeId, size); - return path.toFields(); - }; - - const newL1ToL2MessageTreeRootSiblingPathArray = await this.getSubtreeSiblingPath( - MerkleTreeId.L1_TO_L2_MESSAGE_TREE, - L1_TO_L2_MSG_SUBTREE_HEIGHT, - ); - - const newL1ToL2MessageTreeRootSiblingPath = makeTuple( - L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, - i => - i < newL1ToL2MessageTreeRootSiblingPathArray.length ? newL1ToL2MessageTreeRootSiblingPathArray[i] : Fr.ZERO, - 0, - ); - - // Get tree snapshots - const startL1ToL2MessageTreeSnapshot = await this.getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE); - - // Get blocks tree - const startArchiveSnapshot = await this.getTreeSnapshot(MerkleTreeId.ARCHIVE); - const newArchiveSiblingPathArray = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE); - - const newArchiveSiblingPath = makeTuple( - ARCHIVE_HEIGHT, - i => (i < newArchiveSiblingPathArray.length ? newArchiveSiblingPathArray[i] : Fr.ZERO), - 0, - ); - - return RootRollupInputs.from({ - previousRollupData, - l1ToL2Roots, - newL1ToL2Messages, - newL1ToL2MessageTreeRootSiblingPath, - startL1ToL2MessageTreeSnapshot, - startArchiveSnapshot, - newArchiveSiblingPath, - }); - } - - protected getPreviousRollupDataFromPublicInputs( - rollupOutput: BaseOrMergeRollupPublicInputs, - rollupProof: Proof, - vk: VerificationKey, - ) { - return new PreviousRollupData( - rollupOutput, - rollupProof, - vk, - - // MembershipWitness for a VK tree to be implemented in the future - FUTURE_NUM, - new MembershipWitness( - ROLLUP_VK_TREE_HEIGHT, - BigInt(FUTURE_NUM), - makeTuple(ROLLUP_VK_TREE_HEIGHT, () => FUTURE_FR), - ), - ); - } - - protected getKernelDataFor(tx: ProcessedTx): RollupKernelData { - const inputs = new RollupKernelCircuitPublicInputs( - tx.data.aggregationObject, - tx.data.combinedData, - tx.data.constants, - ); - return new RollupKernelData( - inputs, - tx.proof, - - // VK for the kernel circuit - this.vks.privateKernelCircuit, - - // MembershipWitness for a VK tree to be implemented in the future - FUTURE_NUM, - assertLength(Array(VK_TREE_HEIGHT).fill(FUTURE_FR), VK_TREE_HEIGHT), - ); - } - - // Scan a tree searching for a specific value and return a membership witness proof for it - protected async getMembershipWitnessFor( - value: Fr, - treeId: MerkleTreeId, - height: N, - ): Promise> { - // If this is an empty tx, then just return zeroes - if (value.isZero()) { - return this.makeEmptyMembershipWitness(height); - } - - const index = await this.db.findLeafIndex(treeId, value.toBuffer()); - if (index === undefined) { - throw new Error(`Leaf with value ${value} not found in tree ${MerkleTreeId[treeId]}`); - } - const path = await this.db.getSiblingPath(treeId, index); - return new MembershipWitness(height, index, assertLength(path.toFields(), height)); - } - - protected async getConstantRollupData(globalVariables: GlobalVariables): Promise { - return ConstantRollupData.from({ - baseRollupVkHash: DELETE_FR, - mergeRollupVkHash: DELETE_FR, - privateKernelVkTreeRoot: FUTURE_FR, - publicKernelVkTreeRoot: FUTURE_FR, - lastArchive: await this.getTreeSnapshot(MerkleTreeId.ARCHIVE), - globalVariables, - }); - } - - protected async getLowNullifierInfo(nullifier: Fr) { - // Return empty nullifier info for an empty tx - if (nullifier.value === 0n) { - return { - index: 0, - leafPreimage: NullifierLeafPreimage.empty(), - witness: this.makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT), - }; - } - - const tree = MerkleTreeId.NULLIFIER_TREE; - const prevValueIndex = await this.db.getPreviousValueIndex(tree, frToBigInt(nullifier)); - if (!prevValueIndex) { - throw new Error(`Nullifier tree should have one initial leaf`); - } - const prevValuePreimage = (await this.db.getLeafPreimage(tree, prevValueIndex.index))!; - - const prevValueSiblingPath = await this.db.getSiblingPath(tree, BigInt(prevValueIndex.index)); - - return { - index: prevValueIndex, - leafPreimage: prevValuePreimage, - witness: new MembershipWitness( - NULLIFIER_TREE_HEIGHT, - BigInt(prevValueIndex.index), - assertLength(prevValueSiblingPath.toFields(), NULLIFIER_TREE_HEIGHT), - ), - }; - } - - protected async getSubtreeSiblingPath(treeId: MerkleTreeId, subtreeHeight: number): Promise { - const nextAvailableLeafIndex = await this.db.getTreeInfo(treeId).then(t => t.size); - const fullSiblingPath = await this.db.getSiblingPath(treeId, nextAvailableLeafIndex); - - // Drop the first subtreeHeight items since we only care about the path to the subtree root - return fullSiblingPath.getSubtreeSiblingPath(subtreeHeight).toFields(); - } - - protected async processPublicDataUpdateRequests(tx: ProcessedTx) { - const combinedPublicDataUpdateRequests = tx.data.combinedData.publicDataUpdateRequests.map(updateRequest => { - return new PublicDataTreeLeaf(updateRequest.leafSlot, updateRequest.newValue); - }); - const { lowLeavesWitnessData, newSubtreeSiblingPath, sortedNewLeaves, sortedNewLeavesIndexes } = - await this.db.batchInsert( - MerkleTreeId.PUBLIC_DATA_TREE, - combinedPublicDataUpdateRequests.map(x => x.toBuffer()), - // TODO(#3675) remove oldValue from update requests - PUBLIC_DATA_SUBTREE_HEIGHT, - ); - - if (lowLeavesWitnessData === undefined) { - throw new Error(`Could not craft public data batch insertion proofs`); - } - - const sortedPublicDataWrites = makeTuple(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => { - return PublicDataTreeLeaf.fromBuffer(sortedNewLeaves[i]); - }); - - const sortedPublicDataWritesIndexes = makeTuple(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => { - return sortedNewLeavesIndexes[i]; - }); - - const subtreeSiblingPathAsFields = newSubtreeSiblingPath.toFields(); - const newPublicDataSubtreeSiblingPath = makeTuple(PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, i => { - return subtreeSiblingPathAsFields[i]; - }); - - const lowPublicDataWritesMembershipWitnesses: Tuple< - MembershipWitness, - typeof MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX - > = makeTuple(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => { - const witness = lowLeavesWitnessData[i]; - return MembershipWitness.fromBufferArray( - witness.index, - assertLength(witness.siblingPath.toBufferArray(), PUBLIC_DATA_TREE_HEIGHT), - ); - }); - - const lowPublicDataWritesPreimages: Tuple< - PublicDataTreeLeafPreimage, - typeof MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX - > = makeTuple(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => { - return lowLeavesWitnessData[i].leafPreimage as PublicDataTreeLeafPreimage; - }); - - // validate that the sortedPublicDataWrites and sortedPublicDataWritesIndexes are in the correct order - // otherwise it will just fail in the circuit - assertPermutation(combinedPublicDataUpdateRequests, sortedPublicDataWrites, sortedPublicDataWritesIndexes, (a, b) => - a.equals(b), - ); - - return { - lowPublicDataWritesPreimages, - lowPublicDataWritesMembershipWitnesses, - newPublicDataSubtreeSiblingPath, - sortedPublicDataWrites, - sortedPublicDataWritesIndexes, - }; - } - - protected async getPublicDataReadsInfo(tx: ProcessedTx) { - const newPublicDataReadsWitnesses: Tuple< - MembershipWitness, - typeof MAX_PUBLIC_DATA_READS_PER_TX - > = makeTuple(MAX_PUBLIC_DATA_READS_PER_TX, () => MembershipWitness.empty(PUBLIC_DATA_TREE_HEIGHT, 0n)); - - const newPublicDataReadsPreimages: Tuple = - makeTuple(MAX_PUBLIC_DATA_READS_PER_TX, () => PublicDataTreeLeafPreimage.empty()); - - for (const i in tx.data.validationRequests.publicDataReads) { - const leafSlot = tx.data.validationRequests.publicDataReads[i].leafSlot.value; - const lowLeafResult = await this.db.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot); - if (!lowLeafResult) { - throw new Error(`Public data tree should have one initial leaf`); - } - const preimage = await this.db.getLeafPreimage(MerkleTreeId.PUBLIC_DATA_TREE, lowLeafResult.index); - const path = await this.db.getSiblingPath(MerkleTreeId.PUBLIC_DATA_TREE, lowLeafResult.index); - newPublicDataReadsWitnesses[i] = new MembershipWitness( - PUBLIC_DATA_TREE_HEIGHT, - BigInt(lowLeafResult.index), - path.toTuple(), - ); - newPublicDataReadsPreimages[i] = preimage! as PublicDataTreeLeafPreimage; - } - return { - newPublicDataReadsWitnesses, - newPublicDataReadsPreimages, - }; - } - - // Builds the base rollup inputs, updating the contract, nullifier, and data trees in the process - protected async buildBaseRollupInput(tx: ProcessedTx, globalVariables: GlobalVariables) { - // Get trees info before any changes hit - const constants = await this.getConstantRollupData(globalVariables); - const start = new PartialStateReference( - await this.getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE), - await this.getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE), - await this.getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE), - ); - - // Get the subtree sibling paths for the circuit - const noteHashSubtreeSiblingPathArray = await this.getSubtreeSiblingPath( - MerkleTreeId.NOTE_HASH_TREE, - NOTE_HASH_SUBTREE_HEIGHT, - ); - - const noteHashSubtreeSiblingPath = makeTuple(NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, i => - i < noteHashSubtreeSiblingPathArray.length ? noteHashSubtreeSiblingPathArray[i] : Fr.ZERO, - ); - - // Update the note hash trees with the new items being inserted to get the new roots - // that will be used by the next iteration of the base rollup circuit, skipping the empty ones - const newNoteHashes = tx.data.combinedData.newNoteHashes.map(x => x.value.toBuffer()); - await this.db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, newNoteHashes); - - // The read witnesses for a given TX should be generated before the writes of the same TX are applied. - // All reads that refer to writes in the same tx are transient and can be simplified out. - const txPublicDataReadsInfo = await this.getPublicDataReadsInfo(tx); - const txPublicDataUpdateRequestInfo = await this.processPublicDataUpdateRequests(tx); - - // Update the nullifier tree, capturing the low nullifier info for each individual operation - const { - lowLeavesWitnessData: nullifierWitnessLeaves, - newSubtreeSiblingPath: newNullifiersSubtreeSiblingPath, - sortedNewLeaves: sortedNewNullifiers, - sortedNewLeavesIndexes, - } = await this.db.batchInsert( - MerkleTreeId.NULLIFIER_TREE, - tx.data.combinedData.newNullifiers.map(sideEffectLinkedToNoteHash => sideEffectLinkedToNoteHash.value.toBuffer()), - NULLIFIER_SUBTREE_HEIGHT, - ); - if (nullifierWitnessLeaves === undefined) { - throw new Error(`Could not craft nullifier batch insertion proofs`); - } - - // Extract witness objects from returned data - const nullifierPredecessorMembershipWitnessesWithoutPadding: MembershipWitness[] = - nullifierWitnessLeaves.map(l => - MembershipWitness.fromBufferArray(l.index, assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT)), - ); - - const nullifierSubtreeSiblingPathArray = newNullifiersSubtreeSiblingPath.toFields(); - - const nullifierSubtreeSiblingPath = makeTuple(NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, i => - i < nullifierSubtreeSiblingPathArray.length ? nullifierSubtreeSiblingPathArray[i] : Fr.ZERO, - ); - - const publicDataSiblingPath = txPublicDataUpdateRequestInfo.newPublicDataSubtreeSiblingPath; - - const stateDiffHints = StateDiffHints.from({ - nullifierPredecessorPreimages: makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i => - i < nullifierWitnessLeaves.length - ? (nullifierWitnessLeaves[i].leafPreimage as NullifierLeafPreimage) - : NullifierLeafPreimage.empty(), - ), - nullifierPredecessorMembershipWitnesses: makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i => - i < nullifierPredecessorMembershipWitnessesWithoutPadding.length - ? nullifierPredecessorMembershipWitnessesWithoutPadding[i] - : this.makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT), - ), - sortedNullifiers: makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i => Fr.fromBuffer(sortedNewNullifiers[i])), - sortedNullifierIndexes: makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i => sortedNewLeavesIndexes[i]), - noteHashSubtreeSiblingPath, - nullifierSubtreeSiblingPath, - publicDataSiblingPath, - }); - - const blockHash = tx.data.constants.historicalHeader.hash(); - const archiveRootMembershipWitness = await this.getMembershipWitnessFor( - blockHash, - MerkleTreeId.ARCHIVE, - ARCHIVE_HEIGHT, - ); - - return BaseRollupInputs.from({ - kernelData: this.getKernelDataFor(tx), - start, - stateDiffHints, - - sortedPublicDataWrites: txPublicDataUpdateRequestInfo.sortedPublicDataWrites, - sortedPublicDataWritesIndexes: txPublicDataUpdateRequestInfo.sortedPublicDataWritesIndexes, - lowPublicDataWritesPreimages: txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages, - lowPublicDataWritesMembershipWitnesses: txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses, - publicDataReadsPreimages: txPublicDataReadsInfo.newPublicDataReadsPreimages, - publicDataReadsMembershipWitnesses: txPublicDataReadsInfo.newPublicDataReadsWitnesses, - - archiveRootMembershipWitness, - - constants, - }); - } - - protected makeEmptyMembershipWitness(height: N) { - return new MembershipWitness( - height, - 0n, - makeTuple(height, () => Fr.ZERO), - ); - } -} diff --git a/yarn-project/sequencer-client/src/block_builder/types.ts b/yarn-project/sequencer-client/src/block_builder/types.ts deleted file mode 100644 index b687864c69d..00000000000 --- a/yarn-project/sequencer-client/src/block_builder/types.ts +++ /dev/null @@ -1,8 +0,0 @@ -/** - * Type representing the names of the trees for the base rollup. - */ -type BaseTreeNames = 'NoteHashTree' | 'ContractTree' | 'NullifierTree' | 'PublicDataTree'; -/** - * Type representing the names of the trees. - */ -export type TreeNames = BaseTreeNames | 'L1ToL2MessageTree' | 'Archive'; diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index b85130d5716..ba7ab1ec918 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -1,45 +1,15 @@ import { L1ToL2MessageSource, L2BlockSource } from '@aztec/circuit-types'; -import { createDebugLogger } from '@aztec/foundation/log'; +import { BlockProver } from '@aztec/circuit-types/interfaces'; import { P2P } from '@aztec/p2p'; +import { SimulationProvider } from '@aztec/simulator'; import { ContractDataSource } from '@aztec/types/contracts'; import { WorldStateSynchronizer } from '@aztec/world-state'; -import * as fs from 'fs/promises'; - -import { SoloBlockBuilder } from '../block_builder/solo_block_builder.js'; import { SequencerClientConfig } from '../config.js'; import { getGlobalVariableBuilder } from '../global_variable_builder/index.js'; -import { getVerificationKeys } from '../mocks/verification_keys.js'; -import { EmptyRollupProver } from '../prover/empty.js'; import { getL1Publisher } from '../publisher/index.js'; import { Sequencer, SequencerConfig } from '../sequencer/index.js'; import { PublicProcessorFactory } from '../sequencer/public_processor.js'; -import { NativeACVMSimulator } from '../simulator/acvm_native.js'; -import { WASMSimulator } from '../simulator/acvm_wasm.js'; -import { RealRollupCircuitSimulator } from '../simulator/rollup.js'; -import { SimulationProvider } from '../simulator/simulation_provider.js'; - -const logger = createDebugLogger('aztec:sequencer-client'); - -/** - * Factory function to create a simulation provider. Will attempt to use native binary simulation falling back to WASM if unavailable. - * @param config - The provided sequencer client configuration - * @returns The constructed simulation provider - */ -async function getSimulationProvider(config: SequencerClientConfig): Promise { - if (config.acvmBinaryPath && config.acvmWorkingDirectory) { - try { - await fs.access(config.acvmBinaryPath, fs.constants.R_OK); - await fs.mkdir(config.acvmWorkingDirectory, { recursive: true }); - logger(`Using native ACVM at ${config.acvmBinaryPath}`); - return new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath); - } catch { - logger(`Failed to access ACVM at ${config.acvmBinaryPath}, falling back to WASM`); - } - } - logger('Using WASM ACVM simulation'); - return new WASMSimulator(); -} /** * Encapsulates the full sequencer and publisher. @@ -55,6 +25,8 @@ export class SequencerClient { * @param contractDataSource - Provides access to contract bytecode for public executions. * @param l2BlockSource - Provides information about the previously published blocks. * @param l1ToL2MessageSource - Provides access to L1 to L2 messages. + * @param prover - An instance of a block prover + * @param simulationProvider - An instance of a simulation provider * @returns A new running instance. */ public static async new( @@ -64,20 +36,13 @@ export class SequencerClient { contractDataSource: ContractDataSource, l2BlockSource: L2BlockSource, l1ToL2MessageSource: L1ToL2MessageSource, + prover: BlockProver, + simulationProvider: SimulationProvider, ) { const publisher = getL1Publisher(config); const globalsBuilder = getGlobalVariableBuilder(config); const merkleTreeDb = worldStateSynchronizer.getLatest(); - const simulationProvider = await getSimulationProvider(config); - - const blockBuilder = new SoloBlockBuilder( - merkleTreeDb, - getVerificationKeys(), - new RealRollupCircuitSimulator(simulationProvider), - new EmptyRollupProver(), - ); - const publicProcessorFactory = new PublicProcessorFactory( merkleTreeDb, contractDataSource, @@ -90,7 +55,7 @@ export class SequencerClient { globalsBuilder, p2pClient, worldStateSynchronizer, - blockBuilder, + prover, l2BlockSource, l1ToL2MessageSource, publicProcessorFactory, diff --git a/yarn-project/sequencer-client/src/index.ts b/yarn-project/sequencer-client/src/index.ts index 7d4538bf476..ca2c6f3e5a2 100644 --- a/yarn-project/sequencer-client/src/index.ts +++ b/yarn-project/sequencer-client/src/index.ts @@ -1,17 +1,8 @@ export * from './client/index.js'; export * from './config.js'; -export * from './mocks/verification_keys.js'; export * from './publisher/index.js'; export * from './sequencer/index.js'; // Used by the node to simulate public parts of transactions. Should these be moved to a shared library? export * from './global_variable_builder/index.js'; export * from './sequencer/public_processor.js'; - -// Used by publisher test in e2e -export { SoloBlockBuilder } from './block_builder/solo_block_builder.js'; -export { EmptyRollupProver } from './prover/empty.js'; -export { makeEmptyProcessedTx, makeProcessedTx, partitionReverts } from './sequencer/processed_tx.js'; -export { WASMSimulator } from './simulator/acvm_wasm.js'; -export { RealRollupCircuitSimulator } from './simulator/rollup.js'; -export { SimulationProvider } from './simulator/simulation_provider.js'; diff --git a/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts index 8d143ed4f0a..0ecd9e56cae 100644 --- a/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/abstract_phase_manager.ts @@ -38,6 +38,8 @@ import { SideEffect, SideEffectLinkedToNoteHash, VK_TREE_HEIGHT, + VerificationKey, + makeEmptyProof, } from '@aztec/circuits.js'; import { computeVarArgsHash } from '@aztec/circuits.js/hash'; import { arrayNonEmptyLength, padArrayEnd } from '@aztec/foundation/collection'; @@ -55,8 +57,6 @@ import { MerkleTreeOperations } from '@aztec/world-state'; import { env } from 'process'; -import { getVerificationKeys } from '../mocks/verification_keys.js'; -import { PublicProver } from '../prover/index.js'; import { PublicKernelCircuitSimulator } from '../simulator/index.js'; import { HintsBuilder } from './hints_builder.js'; import { lastSideEffectCounter } from './utils.js'; @@ -82,7 +82,6 @@ export abstract class AbstractPhaseManager { protected db: MerkleTreeOperations, protected publicExecutor: PublicExecutor, protected publicKernel: PublicKernelCircuitSimulator, - protected publicProver: PublicProver, protected globalVariables: GlobalVariables, protected historicalHeader: Header, public phase: PublicKernelPhase, @@ -279,8 +278,7 @@ export abstract class AbstractPhaseManager { callData?: PublicCallData, ): Promise<[PublicKernelCircuitPublicInputs, Proof]> { const output = await this.getKernelCircuitOutput(previousOutput, previousProof, callData); - const proof = await this.publicProver.getPublicKernelCircuitProof(output); - return [output, proof]; + return [output, makeEmptyProof()]; } protected async getKernelCircuitOutput( @@ -331,7 +329,8 @@ export abstract class AbstractPhaseManager { previousOutput: PublicKernelCircuitPublicInputs, previousProof: Proof, ): PublicKernelData { - const vk = getVerificationKeys().publicKernelCircuit; + // TODO(@PhilWindle) Fix once we move this to prover-client + const vk = VerificationKey.makeFake(); const vkIndex = 0; const vkSiblingPath = MembershipWitness.random(VK_TREE_HEIGHT).siblingPath; return new PublicKernelData(previousOutput, previousProof, vk, vkIndex, vkSiblingPath); @@ -434,8 +433,7 @@ export abstract class AbstractPhaseManager { ); const publicCallStack = padArrayEnd(publicCallRequests, CallRequest.empty(), MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL); const portalContractAddress = result.execution.callContext.portalContractAddress.toField(); - const proof = await this.publicProver.getPublicCircuitProof(callStackItem.publicInputs); - return new PublicCallData(callStackItem, publicCallStack, proof, portalContractAddress, bytecodeHash); + return new PublicCallData(callStackItem, publicCallStack, makeEmptyProof(), portalContractAddress, bytecodeHash); } } diff --git a/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts index 6c0ab1d6f56..9ba3cf97196 100644 --- a/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/app_logic_phase_manager.ts @@ -3,7 +3,6 @@ import { GlobalVariables, Header, Proof, PublicKernelCircuitPublicInputs } from import { PublicExecutor, PublicStateDB } from '@aztec/simulator'; import { MerkleTreeOperations } from '@aztec/world-state'; -import { PublicProver } from '../prover/index.js'; import { PublicKernelCircuitSimulator } from '../simulator/index.js'; import { ContractsDataSourcePublicDB } from '../simulator/public_executor.js'; import { AbstractPhaseManager, PublicKernelPhase } from './abstract_phase_manager.js'; @@ -16,14 +15,13 @@ export class AppLogicPhaseManager extends AbstractPhaseManager { protected db: MerkleTreeOperations, protected publicExecutor: PublicExecutor, protected publicKernel: PublicKernelCircuitSimulator, - protected publicProver: PublicProver, protected globalVariables: GlobalVariables, protected historicalHeader: Header, protected publicContractsDB: ContractsDataSourcePublicDB, protected publicStateDB: PublicStateDB, public phase: PublicKernelPhase = PublicKernelPhase.APP_LOGIC, ) { - super(db, publicExecutor, publicKernel, publicProver, globalVariables, historicalHeader, phase); + super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); } override async handle( diff --git a/yarn-project/sequencer-client/src/sequencer/phase_manager_factory.ts b/yarn-project/sequencer-client/src/sequencer/phase_manager_factory.ts index 129fdc88129..9c34ee17e1d 100644 --- a/yarn-project/sequencer-client/src/sequencer/phase_manager_factory.ts +++ b/yarn-project/sequencer-client/src/sequencer/phase_manager_factory.ts @@ -3,7 +3,6 @@ import { GlobalVariables, Header, PublicKernelCircuitPublicInputs } from '@aztec import { PublicExecutor, PublicStateDB } from '@aztec/simulator'; import { MerkleTreeOperations } from '@aztec/world-state'; -import { PublicProver } from '../prover/index.js'; import { PublicKernelCircuitSimulator } from '../simulator/index.js'; import { ContractsDataSourcePublicDB } from '../simulator/public_executor.js'; import { AbstractPhaseManager, PublicKernelPhase } from './abstract_phase_manager.js'; @@ -30,7 +29,6 @@ export class PhaseManagerFactory { db: MerkleTreeOperations, publicExecutor: PublicExecutor, publicKernel: PublicKernelCircuitSimulator, - publicProver: PublicProver, globalVariables: GlobalVariables, historicalHeader: Header, publicContractsDB: ContractsDataSourcePublicDB, @@ -41,7 +39,6 @@ export class PhaseManagerFactory { db, publicExecutor, publicKernel, - publicProver, globalVariables, historicalHeader, publicContractsDB, @@ -52,7 +49,6 @@ export class PhaseManagerFactory { db, publicExecutor, publicKernel, - publicProver, globalVariables, historicalHeader, publicContractsDB, @@ -63,7 +59,6 @@ export class PhaseManagerFactory { db, publicExecutor, publicKernel, - publicProver, globalVariables, historicalHeader, publicContractsDB, @@ -80,7 +75,6 @@ export class PhaseManagerFactory { db: MerkleTreeOperations, publicExecutor: PublicExecutor, publicKernel: PublicKernelCircuitSimulator, - publicProver: PublicProver, globalVariables: GlobalVariables, historicalHeader: Header, publicContractsDB: ContractsDataSourcePublicDB, @@ -96,7 +90,6 @@ export class PhaseManagerFactory { db, publicExecutor, publicKernel, - publicProver, globalVariables, historicalHeader, publicContractsDB, @@ -110,7 +103,6 @@ export class PhaseManagerFactory { db, publicExecutor, publicKernel, - publicProver, globalVariables, historicalHeader, publicContractsDB, @@ -121,7 +113,6 @@ export class PhaseManagerFactory { db, publicExecutor, publicKernel, - publicProver, globalVariables, historicalHeader, publicContractsDB, diff --git a/yarn-project/sequencer-client/src/sequencer/public_processor.test.ts b/yarn-project/sequencer-client/src/sequencer/public_processor.test.ts index 126237a3f72..802a7d98ff0 100644 --- a/yarn-project/sequencer-client/src/sequencer/public_processor.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/public_processor.test.ts @@ -1,12 +1,14 @@ import { FunctionCall, FunctionL2Logs, + ProcessedTx, PublicDataWrite, SiblingPath, SimulationError, Tx, TxL2Logs, mockTx, + toTxEffect, } from '@aztec/circuit-types'; import { ARGS_LENGTH, @@ -42,24 +44,20 @@ import { } from '@aztec/circuits.js/testing'; import { makeTuple } from '@aztec/foundation/array'; import { arrayNonEmptyLength, padArrayEnd, times } from '@aztec/foundation/collection'; -import { PublicExecution, PublicExecutionResult, PublicExecutor } from '@aztec/simulator'; +import { PublicExecution, PublicExecutionResult, PublicExecutor, WASMSimulator } from '@aztec/simulator'; import { MerkleTreeOperations, TreeInfo } from '@aztec/world-state'; import { jest } from '@jest/globals'; import { MockProxy, mock } from 'jest-mock-extended'; -import { PublicProver } from '../prover/index.js'; -import { WASMSimulator } from '../simulator/acvm_wasm.js'; import { PublicKernelCircuitSimulator } from '../simulator/index.js'; import { ContractsDataSourcePublicDB, WorldStatePublicDB } from '../simulator/public_executor.js'; import { RealPublicKernelCircuitSimulator } from '../simulator/public_kernel.js'; -import { ProcessedTx, toTxEffect } from './processed_tx.js'; import { PublicProcessor } from './public_processor.js'; describe('public_processor', () => { let db: MockProxy; let publicExecutor: MockProxy; - let publicProver: MockProxy; let publicContractsDB: MockProxy; let publicWorldStateDB: MockProxy; @@ -71,15 +69,12 @@ describe('public_processor', () => { beforeEach(() => { db = mock(); publicExecutor = mock(); - publicProver = mock(); publicContractsDB = mock(); publicWorldStateDB = mock(); proof = makeEmptyProof(); root = Buffer.alloc(32, 5); - publicProver.getPublicCircuitProof.mockResolvedValue(proof); - publicProver.getPublicKernelCircuitProof.mockResolvedValue(proof); db.getTreeInfo.mockResolvedValue({ root } as TreeInfo); }); @@ -92,7 +87,6 @@ describe('public_processor', () => { db, publicExecutor, publicKernel, - publicProver, GlobalVariables.empty(), Header.empty(), publicContractsDB, @@ -180,7 +174,6 @@ describe('public_processor', () => { db, publicExecutor, publicKernel, - publicProver, GlobalVariables.empty(), Header.empty(), publicContractsDB, diff --git a/yarn-project/sequencer-client/src/sequencer/public_processor.ts b/yarn-project/sequencer-client/src/sequencer/public_processor.ts index 1c620bd2787..98a1b4a4a71 100644 --- a/yarn-project/sequencer-client/src/sequencer/public_processor.ts +++ b/yarn-project/sequencer-client/src/sequencer/public_processor.ts @@ -1,28 +1,27 @@ -import { L1ToL2MessageSource, SimulationError, Tx } from '@aztec/circuit-types'; +import { + FailedTx, + L1ToL2MessageSource, + ProcessedTx, + SimulationError, + Tx, + getPreviousOutputAndProof, + makeEmptyProcessedTx, + makeProcessedTx, + validateProcessedTx, +} from '@aztec/circuit-types'; import { TxSequencerProcessingStats } from '@aztec/circuit-types/stats'; import { GlobalVariables, Header } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; -import { PublicExecutor, PublicStateDB } from '@aztec/simulator'; +import { PublicExecutor, PublicStateDB, SimulationProvider } from '@aztec/simulator'; import { ContractDataSource } from '@aztec/types/contracts'; import { MerkleTreeOperations } from '@aztec/world-state'; -import { EmptyPublicProver } from '../prover/empty.js'; -import { PublicProver } from '../prover/index.js'; import { PublicKernelCircuitSimulator } from '../simulator/index.js'; import { ContractsDataSourcePublicDB, WorldStateDB, WorldStatePublicDB } from '../simulator/public_executor.js'; import { RealPublicKernelCircuitSimulator } from '../simulator/public_kernel.js'; -import { SimulationProvider } from '../simulator/simulation_provider.js'; import { AbstractPhaseManager } from './abstract_phase_manager.js'; import { PhaseManagerFactory } from './phase_manager_factory.js'; -import { - FailedTx, - ProcessedTx, - getPreviousOutputAndProof, - makeEmptyProcessedTx, - makeProcessedTx, - validateProcessedTx, -} from './processed_tx.js'; /** * Creates new instances of PublicProcessor given the provided merkle tree db and contract data source. @@ -56,7 +55,6 @@ export class PublicProcessorFactory { this.merkleTree, publicExecutor, new RealPublicKernelCircuitSimulator(this.simulator), - new EmptyPublicProver(), globalVariables, historicalHeader, publicContractsDB, @@ -74,7 +72,6 @@ export class PublicProcessor { protected db: MerkleTreeOperations, protected publicExecutor: PublicExecutor, protected publicKernel: PublicKernelCircuitSimulator, - protected publicProver: PublicProver, protected globalVariables: GlobalVariables, protected historicalHeader: Header, protected publicContractsDB: ContractsDataSourcePublicDB, @@ -100,7 +97,6 @@ export class PublicProcessor { this.db, this.publicExecutor, this.publicKernel, - this.publicProver, this.globalVariables, this.historicalHeader, this.publicContractsDB, @@ -122,7 +118,6 @@ export class PublicProcessor { this.db, this.publicExecutor, this.publicKernel, - this.publicProver, this.globalVariables, this.historicalHeader, this.publicContractsDB, diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 61ccb97a346..10d5878e81f 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -1,4 +1,16 @@ -import { L1ToL2MessageSource, L2Block, L2BlockSource, MerkleTreeId, Tx, TxHash, mockTx } from '@aztec/circuit-types'; +import { + L1ToL2MessageSource, + L2Block, + L2BlockSource, + MerkleTreeId, + PROVING_STATUS, + ProverClient, + ProvingSuccess, + ProvingTicket, + makeEmptyProcessedTx, + makeProcessedTx, + mockTx, +} from '@aztec/circuit-types'; import { AztecAddress, EthAddress, @@ -8,16 +20,13 @@ import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, makeEmptyProof, } from '@aztec/circuits.js'; -import { times } from '@aztec/foundation/collection'; import { P2P, P2PClientState } from '@aztec/p2p'; import { MerkleTreeOperations, WorldStateRunningState, WorldStateSynchronizer } from '@aztec/world-state'; import { MockProxy, mock, mockFn } from 'jest-mock-extended'; -import { BlockBuilder } from '../block_builder/index.js'; import { GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; import { L1Publisher } from '../index.js'; -import { makeEmptyProcessedTx, makeProcessedTx } from './processed_tx.js'; import { PublicProcessor, PublicProcessorFactory } from './public_processor.js'; import { Sequencer } from './sequencer.js'; @@ -26,7 +35,7 @@ describe('sequencer', () => { let globalVariableBuilder: MockProxy; let p2p: MockProxy; let worldState: MockProxy; - let blockBuilder: MockProxy; + let proverClient: MockProxy; let merkleTreeOps: MockProxy; let publicProcessor: MockProxy; let l2BlockSource: MockProxy; @@ -48,7 +57,7 @@ describe('sequencer', () => { publisher = mock(); globalVariableBuilder = mock(); merkleTreeOps = mock(); - blockBuilder = mock(); + proverClient = mock(); p2p = mock({ getStatus: () => Promise.resolve({ state: P2PClientState.IDLE, syncedToL2Block: lastBlockNumber }), @@ -82,7 +91,7 @@ describe('sequencer', () => { globalVariableBuilder, p2p, worldState, - blockBuilder, + proverClient, l2BlockSource, l1ToL2MessageSource, publicProcessorFactory, @@ -96,9 +105,17 @@ describe('sequencer', () => { tx.data.needsTeardown = false; const block = L2Block.random(lastBlockNumber + 1); const proof = makeEmptyProof(); + const result: ProvingSuccess = { + status: PROVING_STATUS.SUCCESS, + proof, + block, + }; + const ticket: ProvingTicket = { + provingPromise: Promise.resolve(result), + }; p2p.getTxs.mockResolvedValueOnce([tx]); - blockBuilder.buildL2Block.mockResolvedValueOnce([block, proof]); + proverClient.startNewBlock.mockResolvedValueOnce(ticket); publisher.processL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO, coinbase, feeRecipient), @@ -107,13 +124,13 @@ describe('sequencer', () => { await sequencer.initialSync(); await sequencer.work(); - const expectedTxHashes = [...Tx.getHashes([tx]), ...times(1, () => TxHash.ZERO)]; - - expect(blockBuilder.buildL2Block).toHaveBeenCalledWith( + expect(proverClient.startNewBlock).toHaveBeenCalledWith( + 1, new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO, coinbase, feeRecipient), - expectedTxHashes.map(hash => expect.objectContaining({ hash })), Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), + publicProcessor.makeEmptyProcessedTx(), ); + expect(proverClient.addNewTx).toHaveBeenCalledWith(expect.objectContaining({ hash: tx.getTxHash() })); expect(publisher.processL2Block).toHaveBeenCalledWith(block); }); @@ -127,9 +144,17 @@ describe('sequencer', () => { const doubleSpendTx = txs[1]; const block = L2Block.random(lastBlockNumber + 1); const proof = makeEmptyProof(); + const result: ProvingSuccess = { + status: PROVING_STATUS.SUCCESS, + proof, + block, + }; + const ticket: ProvingTicket = { + provingPromise: Promise.resolve(result), + }; p2p.getTxs.mockResolvedValueOnce(txs); - blockBuilder.buildL2Block.mockResolvedValueOnce([block, proof]); + proverClient.startNewBlock.mockResolvedValueOnce(ticket); publisher.processL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO, coinbase, feeRecipient), @@ -146,13 +171,14 @@ describe('sequencer', () => { await sequencer.initialSync(); await sequencer.work(); - const expectedTxHashes = Tx.getHashes([txs[0], txs[2]]); - - expect(blockBuilder.buildL2Block).toHaveBeenCalledWith( + expect(proverClient.startNewBlock).toHaveBeenCalledWith( + 2, new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO, coinbase, feeRecipient), - expectedTxHashes.map(hash => expect.objectContaining({ hash })), Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), + publicProcessor.makeEmptyProcessedTx(), ); + expect(proverClient.addNewTx).toHaveBeenCalledWith(expect.objectContaining({ hash: txs[0].getTxHash() })); + expect(proverClient.addNewTx).toHaveBeenCalledWith(expect.objectContaining({ hash: txs[2].getTxHash() })); expect(publisher.processL2Block).toHaveBeenCalledWith(block); expect(p2p.deleteTxs).toHaveBeenCalledWith([doubleSpendTx.getTxHash()]); }); @@ -167,9 +193,17 @@ describe('sequencer', () => { const invalidChainTx = txs[1]; const block = L2Block.random(lastBlockNumber + 1); const proof = makeEmptyProof(); + const result: ProvingSuccess = { + status: PROVING_STATUS.SUCCESS, + proof, + block, + }; + const ticket: ProvingTicket = { + provingPromise: Promise.resolve(result), + }; p2p.getTxs.mockResolvedValueOnce(txs); - blockBuilder.buildL2Block.mockResolvedValueOnce([block, proof]); + proverClient.startNewBlock.mockResolvedValueOnce(ticket); publisher.processL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO, coinbase, feeRecipient), @@ -181,13 +215,14 @@ describe('sequencer', () => { await sequencer.initialSync(); await sequencer.work(); - const expectedTxHashes = Tx.getHashes([txs[0], txs[2]]); - - expect(blockBuilder.buildL2Block).toHaveBeenCalledWith( + expect(proverClient.startNewBlock).toHaveBeenCalledWith( + 2, new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO, coinbase, feeRecipient), - expectedTxHashes.map(hash => expect.objectContaining({ hash })), Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), + publicProcessor.makeEmptyProcessedTx(), ); + expect(proverClient.addNewTx).toHaveBeenCalledWith(expect.objectContaining({ hash: txs[0].getTxHash() })); + expect(proverClient.addNewTx).toHaveBeenCalledWith(expect.objectContaining({ hash: txs[2].getTxHash() })); expect(publisher.processL2Block).toHaveBeenCalledWith(block); expect(p2p.deleteTxs).toHaveBeenCalledWith([invalidChainTx.getTxHash()]); }); @@ -197,9 +232,17 @@ describe('sequencer', () => { tx.data.constants.txContext.chainId = chainId; const block = L2Block.random(lastBlockNumber + 1); const proof = makeEmptyProof(); + const result: ProvingSuccess = { + status: PROVING_STATUS.SUCCESS, + proof, + block, + }; + const ticket: ProvingTicket = { + provingPromise: Promise.resolve(result), + }; p2p.getTxs.mockResolvedValueOnce([tx]); - blockBuilder.buildL2Block.mockResolvedValueOnce([block, proof]); + proverClient.startNewBlock.mockResolvedValueOnce(ticket); publisher.processL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO, coinbase, feeRecipient), diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 84ce6bf3dfe..f4c3481a272 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -1,7 +1,7 @@ -import { L1ToL2MessageSource, L2Block, L2BlockSource, MerkleTreeId, Tx } from '@aztec/circuit-types'; +import { L1ToL2MessageSource, L2Block, L2BlockSource, MerkleTreeId, ProcessedTx, Tx } from '@aztec/circuit-types'; +import { BlockProver, PROVING_STATUS } from '@aztec/circuit-types/interfaces'; import { L2BlockBuiltStats } from '@aztec/circuit-types/stats'; import { AztecAddress, EthAddress, GlobalVariables } from '@aztec/circuits.js'; -import { times } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; @@ -9,13 +9,10 @@ import { Timer, elapsed } from '@aztec/foundation/timer'; import { P2P } from '@aztec/p2p'; import { WorldStateStatus, WorldStateSynchronizer } from '@aztec/world-state'; -import { BlockBuilder } from '../block_builder/index.js'; import { GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; import { L1Publisher } from '../publisher/l1-publisher.js'; import { WorldStatePublicDB } from '../simulator/public_executor.js'; -import { ceilPowerOfTwo } from '../utils.js'; import { SequencerConfig } from './config.js'; -import { ProcessedTx } from './processed_tx.js'; import { PublicProcessorFactory } from './public_processor.js'; import { TxValidator } from './tx_validator.js'; @@ -44,7 +41,7 @@ export class Sequencer { private globalsBuilder: GlobalVariableBuilder, private p2pClient: P2P, private worldState: WorldStateSynchronizer, - private blockBuilder: BlockBuilder, + private prover: BlockProver, private l2BlockSource: L2BlockSource, private l1ToL2MessageSource: L1ToL2MessageSource, private publicProcessorFactory: PublicProcessorFactory, @@ -305,15 +302,17 @@ export class Sequencer { emptyTx: ProcessedTx, globalVariables: GlobalVariables, ) { - // Pad the txs array with empty txs to be a power of two, at least 2 - const txsTargetSize = Math.max(ceilPowerOfTwo(txs.length), 2); - const emptyTxCount = txsTargetSize - txs.length; + const blockTicket = await this.prover.startNewBlock(txs.length, globalVariables, l1ToL2Messages, emptyTx); - const allTxs = [...txs, ...times(emptyTxCount, () => emptyTx)]; - this.log(`Building block ${globalVariables.blockNumber.toBigInt()}`); + for (const tx of txs) { + await this.prover.addNewTx(tx); + } - const [block] = await this.blockBuilder.buildL2Block(globalVariables, allTxs, l1ToL2Messages); - return block; + const result = await blockTicket.provingPromise; + if (result.status === PROVING_STATUS.FAILURE) { + throw new Error(`Block proving failed, reason: ${result.reason}`); + } + return result.block; } get coinbase(): EthAddress { diff --git a/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.test.ts b/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.test.ts index 853bd0e8d1a..9b198177c62 100644 --- a/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.test.ts @@ -5,8 +5,6 @@ import { Header, MAX_NON_REVERTIBLE_PUBLIC_CALL_STACK_LENGTH_PER_TX, MAX_REVERTIBLE_PUBLIC_CALL_STACK_LENGTH_PER_TX, - Proof, - makeEmptyProof, } from '@aztec/circuits.js'; import { makeTuple } from '@aztec/foundation/array'; import { PublicExecutor } from '@aztec/simulator'; @@ -15,7 +13,6 @@ import { MerkleTreeOperations, TreeInfo } from '@aztec/world-state'; import { it } from '@jest/globals'; import { MockProxy, mock } from 'jest-mock-extended'; -import { PublicProver } from '../prover/index.js'; import { PublicKernelCircuitSimulator } from '../simulator/index.js'; import { ContractsDataSourcePublicDB, WorldStatePublicDB } from '../simulator/public_executor.js'; import { SetupPhaseManager } from './setup_phase_manager.js'; @@ -29,12 +26,10 @@ class TestSetupPhaseManager extends SetupPhaseManager { describe('setup_phase_manager', () => { let db: MockProxy; let publicExecutor: MockProxy; - let publicProver: MockProxy; let publicContractsDB: MockProxy; let publicWorldStateDB: MockProxy; let publicKernel: MockProxy; - let proof: Proof; let root: Buffer; let phaseManager: TestSetupPhaseManager; @@ -42,22 +37,16 @@ describe('setup_phase_manager', () => { beforeEach(() => { db = mock(); publicExecutor = mock(); - publicProver = mock(); publicContractsDB = mock(); publicWorldStateDB = mock(); - proof = makeEmptyProof(); root = Buffer.alloc(32, 5); - - publicProver.getPublicCircuitProof.mockResolvedValue(proof); - publicProver.getPublicKernelCircuitProof.mockResolvedValue(proof); db.getTreeInfo.mockResolvedValue({ root } as TreeInfo); publicKernel = mock(); phaseManager = new TestSetupPhaseManager( db, publicExecutor, publicKernel, - publicProver, GlobalVariables.empty(), Header.empty(), publicContractsDB, diff --git a/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts index f30c50ee3e4..76da4e232a6 100644 --- a/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/setup_phase_manager.ts @@ -3,7 +3,6 @@ import { GlobalVariables, Header, Proof, PublicKernelCircuitPublicInputs } from import { PublicExecutor, PublicStateDB } from '@aztec/simulator'; import { MerkleTreeOperations } from '@aztec/world-state'; -import { PublicProver } from '../prover/index.js'; import { PublicKernelCircuitSimulator } from '../simulator/index.js'; import { ContractsDataSourcePublicDB } from '../simulator/public_executor.js'; import { AbstractPhaseManager, PublicKernelPhase } from './abstract_phase_manager.js'; @@ -16,14 +15,13 @@ export class SetupPhaseManager extends AbstractPhaseManager { protected db: MerkleTreeOperations, protected publicExecutor: PublicExecutor, protected publicKernel: PublicKernelCircuitSimulator, - protected publicProver: PublicProver, protected globalVariables: GlobalVariables, protected historicalHeader: Header, protected publicContractsDB: ContractsDataSourcePublicDB, protected publicStateDB: PublicStateDB, public phase: PublicKernelPhase = PublicKernelPhase.SETUP, ) { - super(db, publicExecutor, publicKernel, publicProver, globalVariables, historicalHeader, phase); + super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); } override async handle( diff --git a/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts index 804623c13c1..0fe236452d9 100644 --- a/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/tail_phase_manager.ts @@ -3,7 +3,6 @@ import { GlobalVariables, Header, Proof, PublicKernelCircuitPublicInputs } from import { PublicExecutor, PublicStateDB } from '@aztec/simulator'; import { MerkleTreeOperations } from '@aztec/world-state'; -import { PublicProver } from '../prover/index.js'; import { PublicKernelCircuitSimulator } from '../simulator/index.js'; import { ContractsDataSourcePublicDB } from '../simulator/public_executor.js'; import { AbstractPhaseManager, PublicKernelPhase } from './abstract_phase_manager.js'; @@ -13,14 +12,13 @@ export class TailPhaseManager extends AbstractPhaseManager { protected db: MerkleTreeOperations, protected publicExecutor: PublicExecutor, protected publicKernel: PublicKernelCircuitSimulator, - protected publicProver: PublicProver, protected globalVariables: GlobalVariables, protected historicalHeader: Header, protected publicContractsDB: ContractsDataSourcePublicDB, protected publicStateDB: PublicStateDB, public readonly phase: PublicKernelPhase = PublicKernelPhase.TAIL, ) { - super(db, publicExecutor, publicKernel, publicProver, globalVariables, historicalHeader, phase); + super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); } async handle(tx: Tx, previousPublicKernelOutput: PublicKernelCircuitPublicInputs, previousPublicKernelProof: Proof) { diff --git a/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts b/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts index f263806caf5..ddaaa7c8943 100644 --- a/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts +++ b/yarn-project/sequencer-client/src/sequencer/teardown_phase_manager.ts @@ -3,7 +3,6 @@ import { GlobalVariables, Header, Proof, PublicKernelCircuitPublicInputs } from import { PublicExecutor, PublicStateDB } from '@aztec/simulator'; import { MerkleTreeOperations } from '@aztec/world-state'; -import { PublicProver } from '../prover/index.js'; import { PublicKernelCircuitSimulator } from '../simulator/index.js'; import { ContractsDataSourcePublicDB } from '../simulator/public_executor.js'; import { AbstractPhaseManager, PublicKernelPhase } from './abstract_phase_manager.js'; @@ -16,14 +15,13 @@ export class TeardownPhaseManager extends AbstractPhaseManager { protected db: MerkleTreeOperations, protected publicExecutor: PublicExecutor, protected publicKernel: PublicKernelCircuitSimulator, - protected publicProver: PublicProver, protected globalVariables: GlobalVariables, protected historicalHeader: Header, protected publicContractsDB: ContractsDataSourcePublicDB, protected publicStateDB: PublicStateDB, public phase: PublicKernelPhase = PublicKernelPhase.TEARDOWN, ) { - super(db, publicExecutor, publicKernel, publicProver, globalVariables, historicalHeader, phase); + super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); } override async handle( diff --git a/yarn-project/sequencer-client/src/sequencer/tx_validator.ts b/yarn-project/sequencer-client/src/sequencer/tx_validator.ts index 271b9a2928c..621ec88abde 100644 --- a/yarn-project/sequencer-client/src/sequencer/tx_validator.ts +++ b/yarn-project/sequencer-client/src/sequencer/tx_validator.ts @@ -1,11 +1,10 @@ -import { Tx } from '@aztec/circuit-types'; +import { ProcessedTx, Tx } from '@aztec/circuit-types'; import { AztecAddress, EthAddress, Fr, GlobalVariables } from '@aztec/circuits.js'; import { pedersenHash } from '@aztec/foundation/crypto'; import { Logger, createDebugLogger } from '@aztec/foundation/log'; import { getCanonicalGasTokenAddress } from '@aztec/protocol-contracts/gas-token'; import { AbstractPhaseManager, PublicKernelPhase } from './abstract_phase_manager.js'; -import { ProcessedTx } from './processed_tx.js'; /** A source of what nullifiers have been committed to the state trees */ export interface NullifierSource { diff --git a/yarn-project/sequencer-client/src/simulator/index.ts b/yarn-project/sequencer-client/src/simulator/index.ts index 9f25ce30388..ba9106b6b1a 100644 --- a/yarn-project/sequencer-client/src/simulator/index.ts +++ b/yarn-project/sequencer-client/src/simulator/index.ts @@ -1,53 +1,9 @@ import { - BaseOrMergeRollupPublicInputs, - BaseParityInputs, - BaseRollupInputs, - MergeRollupInputs, - ParityPublicInputs, PublicKernelCircuitPrivateInputs, PublicKernelCircuitPublicInputs, PublicKernelTailCircuitPrivateInputs, - RootParityInputs, - RootRollupInputs, - RootRollupPublicInputs, } from '@aztec/circuits.js'; -/** - * Circuit simulator for the rollup circuits. - */ -export interface RollupSimulator { - /** - * Simulates the base parity circuit from its inputs. - * @param inputs - Inputs to the circuit. - * @returns The public inputs of the parity circuit. - */ - baseParityCircuit(inputs: BaseParityInputs): Promise; - /** - * Simulates the root parity circuit from its inputs. - * @param inputs - Inputs to the circuit. - * @returns The public inputs of the parity circuit. - */ - rootParityCircuit(inputs: RootParityInputs): Promise; - /** - * Simulates the base rollup circuit from its inputs. - * @param input - Inputs to the circuit. - * @returns The public inputs as outputs of the simulation. - */ - baseRollupCircuit(input: BaseRollupInputs): Promise; - /** - * Simulates the merge rollup circuit from its inputs. - * @param input - Inputs to the circuit. - * @returns The public inputs as outputs of the simulation. - */ - mergeRollupCircuit(input: MergeRollupInputs): Promise; - /** - * Simulates the root rollup circuit from its inputs. - * @param input - Inputs to the circuit. - * @returns The public inputs as outputs of the simulation. - */ - rootRollupCircuit(input: RootRollupInputs): Promise; -} - /** * Circuit simulator for the public kernel circuits. */ @@ -77,4 +33,3 @@ export interface PublicKernelCircuitSimulator { */ publicKernelCircuitTail(inputs: PublicKernelTailCircuitPrivateInputs): Promise; } -export * from './acvm_wasm.js'; diff --git a/yarn-project/sequencer-client/src/simulator/public_kernel.ts b/yarn-project/sequencer-client/src/simulator/public_kernel.ts index 0d6a3efa015..c60e74b63ae 100644 --- a/yarn-project/sequencer-client/src/simulator/public_kernel.ts +++ b/yarn-project/sequencer-client/src/simulator/public_kernel.ts @@ -20,9 +20,9 @@ import { convertPublicTeardownRollupInputsToWitnessMap, convertPublicTeardownRollupOutputFromWitnessMap, } from '@aztec/noir-protocol-circuits-types'; +import { SimulationProvider, WASMSimulator } from '@aztec/simulator'; -import { PublicKernelCircuitSimulator, WASMSimulator } from './index.js'; -import { SimulationProvider } from './simulation_provider.js'; +import { PublicKernelCircuitSimulator } from './index.js'; /** * Implements the PublicKernelCircuitSimulator. diff --git a/yarn-project/simulator/package.json b/yarn-project/simulator/package.json index 18e69ceb7d9..3999470cf4d 100644 --- a/yarn-project/simulator/package.json +++ b/yarn-project/simulator/package.json @@ -35,6 +35,7 @@ "@aztec/foundation": "workspace:^", "@aztec/types": "workspace:^", "@noir-lang/acvm_js": "portal:../../noir/packages/acvm_js", + "@noir-lang/types": "portal:../../noir/packages/types", "levelup": "^5.1.1", "memdown": "^6.1.1", "tslib": "^2.4.0" diff --git a/yarn-project/simulator/src/index.ts b/yarn-project/simulator/src/index.ts index 83725c7d2be..e55ef8e402e 100644 --- a/yarn-project/simulator/src/index.ts +++ b/yarn-project/simulator/src/index.ts @@ -2,3 +2,4 @@ export * from './acvm/index.js'; export * from './client/index.js'; export * from './common/index.js'; export * from './public/index.js'; +export * from './simulator/index.js'; diff --git a/yarn-project/sequencer-client/src/simulator/acvm_native.ts b/yarn-project/simulator/src/simulator/acvm_native.ts similarity index 100% rename from yarn-project/sequencer-client/src/simulator/acvm_native.ts rename to yarn-project/simulator/src/simulator/acvm_native.ts diff --git a/yarn-project/sequencer-client/src/simulator/acvm_wasm.ts b/yarn-project/simulator/src/simulator/acvm_wasm.ts similarity index 100% rename from yarn-project/sequencer-client/src/simulator/acvm_wasm.ts rename to yarn-project/simulator/src/simulator/acvm_wasm.ts diff --git a/yarn-project/simulator/src/simulator/index.ts b/yarn-project/simulator/src/simulator/index.ts new file mode 100644 index 00000000000..936e33be0fa --- /dev/null +++ b/yarn-project/simulator/src/simulator/index.ts @@ -0,0 +1,3 @@ +export * from './acvm_native.js'; +export * from './acvm_wasm.js'; +export * from './simulation_provider.js'; diff --git a/yarn-project/sequencer-client/src/simulator/simulation_provider.ts b/yarn-project/simulator/src/simulator/simulation_provider.ts similarity index 100% rename from yarn-project/sequencer-client/src/simulator/simulation_provider.ts rename to yarn-project/simulator/src/simulator/simulation_provider.ts diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 83d063e6450..a7437ba5451 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -150,7 +150,9 @@ __metadata: "@aztec/l1-artifacts": "workspace:^" "@aztec/merkle-tree": "workspace:^" "@aztec/p2p": "workspace:^" + "@aztec/prover-client": "workspace:^" "@aztec/sequencer-client": "workspace:^" + "@aztec/simulator": "workspace:^" "@aztec/types": "workspace:^" "@aztec/world-state": "workspace:^" "@jest/globals": ^29.5.0 @@ -384,8 +386,10 @@ __metadata: "@aztec/noir-contracts.js": "workspace:^" "@aztec/p2p": "workspace:^" "@aztec/protocol-contracts": "workspace:^" + "@aztec/prover-client": "workspace:^" "@aztec/pxe": "workspace:^" "@aztec/sequencer-client": "workspace:^" + "@aztec/simulator": "workspace:^" "@aztec/types": "workspace:^" "@aztec/world-state": "workspace:^" "@jest/globals": ^29.5.0 @@ -403,6 +407,7 @@ __metadata: crypto-browserify: ^3.12.0 glob: ^10.3.10 jest: ^29.5.0 + jest-mock-extended: ^3.0.5 koa: ^2.14.2 koa-static: ^5.0.0 levelup: ^5.1.1 @@ -746,15 +751,24 @@ __metadata: languageName: unknown linkType: soft -"@aztec/prover-client@workspace:prover-client": +"@aztec/prover-client@workspace:^, @aztec/prover-client@workspace:prover-client": version: 0.0.0-use.local resolution: "@aztec/prover-client@workspace:prover-client" dependencies: + "@aztec/circuit-types": "workspace:^" + "@aztec/circuits.js": "workspace:^" "@aztec/foundation": "workspace:^" + "@aztec/kv-store": "workspace:^" + "@aztec/noir-protocol-circuits-types": "workspace:^" + "@aztec/simulator": "workspace:^" + "@aztec/world-state": "workspace:^" "@jest/globals": ^29.5.0 "@types/jest": ^29.5.0 + "@types/memdown": ^3.0.0 "@types/node": ^18.7.23 jest: ^29.5.0 + jest-mock-extended: ^3.0.3 + lodash.chunk: ^4.2.0 ts-jest: ^29.1.0 ts-node: ^10.9.1 tslib: ^2.4.0 @@ -879,6 +893,7 @@ __metadata: "@aztec/types": "workspace:^" "@jest/globals": ^29.5.0 "@noir-lang/acvm_js": "portal:../../noir/packages/acvm_js" + "@noir-lang/types": "portal:../../noir/packages/types" "@types/jest": ^29.5.0 "@types/levelup": ^5.1.3 "@types/lodash.merge": ^4.6.9